forked from extern/nushell
Compare commits
297 Commits
Author | SHA1 | Date | |
---|---|---|---|
5676713b1f | |||
b59231d32b | |||
e530cf0a9d | |||
1ba69e4b11 | |||
f10390b1be | |||
c2b1908644 | |||
0a93335f6d | |||
8e7acd1094 | |||
c6ee6273db | |||
c77059f891 | |||
5bdda06ca6 | |||
d8303dd6d6 | |||
60ec68b097 | |||
deae66c194 | |||
0bdb6e735a | |||
7933e01e77 | |||
b443a2d713 | |||
7a28ababd1 | |||
ddb9d3a864 | |||
186b75a848 | |||
8cedd2ee5b | |||
0845572878 | |||
2e4b0b0b17 | |||
9f42d7693f | |||
3424334ce5 | |||
c68d236fd7 | |||
7c6e82c990 | |||
eb5d0d295b | |||
2eae5a2a89 | |||
595c9f2999 | |||
70d63e34e9 | |||
83ac65ced3 | |||
be140382cf | |||
d320ffe742 | |||
fbc6f01cfb | |||
3008434c0f | |||
5fbea31d15 | |||
f70c6d5d48 | |||
71e7eb7cfc | |||
339ec46961 | |||
fe53c37654 | |||
06857fbc52 | |||
1c830b5c95 | |||
a74145961e | |||
91698b2657 | |||
40fd8070a9 | |||
4d5f1f6023 | |||
bc2d65cd2e | |||
1a0b339897 | |||
8d3a937413 | |||
e85e1b2c9e | |||
c8aa8cb842 | |||
88c4473283 | |||
f4d9975dab | |||
6e8b768d79 | |||
cdb0eeafa2 | |||
388fc24191 | |||
b3c021899c | |||
bff50c6987 | |||
111fcf188e | |||
015693aea7 | |||
03a52f1988 | |||
372f6c16b3 | |||
c04da4c232 | |||
a070cb8154 | |||
bf4273776f | |||
95ca3ed4fa | |||
54c0603263 | |||
c598cd4255 | |||
2bb03d9813 | |||
9c41f581a9 | |||
6231367bc8 | |||
a7d7098b1a | |||
90aeb700ea | |||
9dfc647386 | |||
f992f5de95 | |||
946f7256e4 | |||
57d425d929 | |||
dd36bf07f4 | |||
406fb8d1d9 | |||
2d4a225e2a | |||
db218e06dc | |||
17e8a5ce38 | |||
07db14f72e | |||
412831cb9c | |||
f4dc79f4ba | |||
9cb573b3b4 | |||
ce106bfda9 | |||
a3ffc4baf0 | |||
3c3637b674 | |||
bcecd08825 | |||
55f99073ad | |||
008c60651c | |||
63667d9e46 | |||
08b770719c | |||
e0d27ebf84 | |||
0756145caf | |||
036860770b | |||
aa1ef39da3 | |||
7c8969d4ea | |||
87d58535ff | |||
1060ba2206 | |||
0401087175 | |||
f8dc06ef49 | |||
282cb46ff1 | |||
a3ff5f1246 | |||
5bb822dcd4 | |||
00b3c2036a | |||
3163b0d362 | |||
21f48577ae | |||
11e4410d1c | |||
27a950d28e | |||
f3d056110a | |||
b39c2e2f75 | |||
7cf3c6eb95 | |||
cdec0254ec | |||
02f3330812 | |||
6f013d0225 | |||
1f06f57de3 | |||
0f405f24c7 | |||
5a8128dd30 | |||
50616cc62c | |||
9d345cab07 | |||
59ab11e932 | |||
df302d4bac | |||
6bbfd0f4f6 | |||
943e0045e7 | |||
62a5250554 | |||
9043970e97 | |||
d32c9ce1b6 | |||
73d8478678 | |||
bab58576b4 | |||
41212c1ad1 | |||
4a6122905b | |||
15986c598a | |||
078342442d | |||
8855c54391 | |||
5dfc81a157 | |||
c42d97fb97 | |||
13314ad1e7 | |||
ff6026ca79 | |||
c6c6c0f295 | |||
1cca5557b1 | |||
76208110b9 | |||
56dd0282f0 | |||
c01b602b86 | |||
d6f46236e9 | |||
104b30142f | |||
f3a885d920 | |||
60445b0559 | |||
01d6287a8f | |||
0462b2db80 | |||
4cb399ed70 | |||
7ef9f7702f | |||
44a1686a76 | |||
15c6d24178 | |||
3b84e3ccfe | |||
da7d6beb22 | |||
f012eb7bdd | |||
f966394b63 | |||
889d2bb378 | |||
a2c4e485ba | |||
8860d8de8d | |||
d7b768ee9f | |||
6ea8e42331 | |||
1b784cb77a | |||
4a0ec1207c | |||
ffb2fedca9 | |||
382b1ba85f | |||
3b42655b51 | |||
e43e906f86 | |||
e51d9d0935 | |||
f57489ed92 | |||
503e521820 | |||
c317094947 | |||
243df63978 | |||
05ff102e09 | |||
cd30fac050 | |||
f589d3c795 | |||
51879d022e | |||
2260b3dda3 | |||
aa64442453 | |||
129ee45944 | |||
2fe7d105b0 | |||
136c8acba6 | |||
e92d4b2ccb | |||
6e91c96dd7 | |||
7801c03e2d | |||
763bbe1c01 | |||
0f67569cc3 | |||
0ea3527544 | |||
20dfca073f | |||
a3679f0f4e | |||
e75fdc2865 | |||
4be88ff572 | |||
992789af26 | |||
b822e13f12 | |||
cd058db046 | |||
1b3143d3d4 | |||
e31ed66610 | |||
7f18ff10b2 | |||
65ae24fbf1 | |||
b54ce921dd | |||
4935129c5a | |||
7614ce4b49 | |||
9d34ec9153 | |||
fd92271884 | |||
cea8fab307 | |||
2d44b7d296 | |||
faccb0627f | |||
a9cd6b4f7a | |||
81691e07c6 | |||
26f40dcabc | |||
3820fef801 | |||
392ff286b2 | |||
b6824d8b88 | |||
e09160e80d | |||
8ba5388438 | |||
30b6eac03d | |||
17ad07ce27 | |||
53911ebecd | |||
bc309705a9 | |||
1de80aeac3 | |||
1eaaf368ee | |||
36e40ebb85 | |||
3f600c5b82 | |||
fbd980f8b0 | |||
7d383421c6 | |||
aed386b3cd | |||
540cc4016e | |||
1b3a09495d | |||
b7af34371b | |||
105762e1c3 | |||
2706ae076d | |||
07ceec3e0b | |||
72fd1b047f | |||
178b6d4d8d | |||
d160e834eb | |||
3e8b9e7e8b | |||
c34ebfe739 | |||
571b33a11c | |||
07b90f4b4b | |||
f1630da2cc | |||
16751b5dee | |||
29ec9a436a | |||
6a7c00eaef | |||
82b24d9beb | |||
a317072e4e | |||
5b701cd197 | |||
8f035616a0 | |||
81f8ba9e4c | |||
380ab19910 | |||
4329629ee9 | |||
39fde52d8e | |||
0611f56776 | |||
8923e91e39 | |||
d6e6811bb9 | |||
f24bc5c826 | |||
c209d0d487 | |||
74dddc880d | |||
f3c41bbdf1 | |||
c45ddc8f22 | |||
84a98995bf | |||
ed83449514 | |||
9eda573a43 | |||
4f91d2512a | |||
2f5eeab567 | |||
f9fbb0eb3c | |||
43fbf4345d | |||
8262c2dd33 | |||
0e86430ea3 | |||
fc1301c92d | |||
e913e26c01 | |||
5ce4b12cc1 | |||
94429d781f | |||
321629a693 | |||
f21405399c | |||
305ca11eb5 | |||
9b1ff9b566 | |||
a0ed6ea3c8 | |||
4a6529973e | |||
9a02fac0e5 | |||
2c6a9e9e48 | |||
d91b735442 | |||
f8d337ad29 | |||
47150efc14 | |||
3e14de158b | |||
c8671c719f | |||
0412c3a2f8 | |||
ef3e8eb778 | |||
fb8cfeb70d | |||
93ae5043cc | |||
b134394319 | |||
b163775112 | |||
8bd035f51d | |||
fa859f1461 | |||
556f4b2f12 |
@ -3,13 +3,13 @@ trigger:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
linux-nightly:
|
||||
linux-stable:
|
||||
image: ubuntu-16.04
|
||||
style: 'unflagged'
|
||||
macos-nightly:
|
||||
macos-stable:
|
||||
image: macos-10.14
|
||||
style: 'unflagged'
|
||||
windows-nightly:
|
||||
windows-stable:
|
||||
image: vs2017-win2016
|
||||
style: 'unflagged'
|
||||
linux-nightly-canary:
|
||||
@ -35,11 +35,12 @@ steps:
|
||||
then
|
||||
sudo apt-get -y install libxcb-composite0-dev libx11-dev
|
||||
fi
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain `cat rust-toolchain`
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "stable"
|
||||
export PATH=$HOME/.cargo/bin:$PATH
|
||||
rustup update
|
||||
rustc -Vv
|
||||
echo "##vso[task.prependpath]$HOME/.cargo/bin"
|
||||
rustup component add rustfmt --toolchain `cat rust-toolchain`
|
||||
rustup component add rustfmt --toolchain "stable"
|
||||
displayName: Install Rust
|
||||
- bash: RUSTFLAGS="-D warnings" cargo test --all-features
|
||||
condition: eq(variables['style'], 'unflagged')
|
||||
|
@ -0,0 +1,3 @@
|
||||
[build]
|
||||
|
||||
#rustflags = ["--cfg", "coloring_in_tokens"]
|
30
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
30
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Configuration (please complete the following information):**
|
||||
- OS: [e.g. Windows]
|
||||
- Version [e.g. 0.4.0]
|
||||
- Optional features (if any)
|
||||
|
||||
Add any other context about the problem here.
|
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
70
.github/workflows/docker-publish.yml
vendored
70
.github/workflows/docker-publish.yml
vendored
@ -2,7 +2,7 @@ name: Publish consumable Docker images
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: ['*.*.*']
|
||||
tags: ['v?[0-9]+.[0-9]+.[0-9]+*']
|
||||
|
||||
jobs:
|
||||
compile:
|
||||
@ -14,7 +14,11 @@ jobs:
|
||||
- x86_64-unknown-linux-gnu
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- run: cargo install cross
|
||||
- name: Install rust-embedded/cross
|
||||
env: { VERSION: v0.1.16 }
|
||||
run: >-
|
||||
wget -nv https://github.com/rust-embedded/cross/releases/download/${VERSION}/cross-${VERSION}-x86_64-unknown-linux-gnu.tar.gz
|
||||
-O- | sudo tar xz -C /usr/local/bin/
|
||||
- name: compile for specific target
|
||||
env: { arch: '${{ matrix.arch }}' }
|
||||
run: |
|
||||
@ -31,6 +35,10 @@ jobs:
|
||||
name: Build and publish docker images
|
||||
needs: compile
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DOCKER_REGISTRY: quay.io/nushell
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_REGISTRY }}
|
||||
DOCKER_USER: ${{ secrets.DOCKER_USER }}
|
||||
strategy:
|
||||
matrix:
|
||||
tag:
|
||||
@ -58,41 +66,53 @@ jobs:
|
||||
- uses: actions/download-artifact@master
|
||||
with: { name: '${{ matrix.arch }}', path: target/release }
|
||||
- name: Build and publish exact version
|
||||
run: |
|
||||
REGISTRY=${REGISTRY,,}; export TAG=${GITHUB_REF##*/}-${{ matrix.tag }};
|
||||
run: |-
|
||||
export DOCKER_TAG=${GITHUB_REF##*/}-${{ matrix.tag }}
|
||||
export NU_BINS=target/release/$( [ ${{ matrix.plugin }} = true ] && echo nu* || echo nu )
|
||||
export PATCH=$([ ${{ matrix.use-patch }} = true ] && echo .${{ matrix.tag }} || echo '')
|
||||
chmod +x $NU_BINS
|
||||
|
||||
echo ${{ secrets.DOCKER_REGISTRY }} | docker login docker.pkg.github.com -u ${{ github.actor }} --password-stdin
|
||||
echo ${DOCKER_PASSWORD} | docker login ${DOCKER_REGISTRY} -u ${DOCKER_USER} --password-stdin
|
||||
docker-compose --file docker/docker-compose.package.yml build
|
||||
docker-compose --file docker/docker-compose.package.yml push # exact version
|
||||
env:
|
||||
BASE_IMAGE: ${{ matrix.base-image }}
|
||||
REGISTRY: docker.pkg.github.com/${{ github.repository }}
|
||||
|
||||
#region semantics tagging
|
||||
- name: Retag and push without suffixing version
|
||||
run: |
|
||||
- name: Retag and push with suffixed version
|
||||
run: |-
|
||||
VERSION=${GITHUB_REF##*/}
|
||||
docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${{ matrix.tag }}
|
||||
docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION%%.*}-${{ matrix.tag }}
|
||||
docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION%.*}-${{ matrix.tag }}
|
||||
docker push ${REGISTRY,,}/nu:${VERSION%.*}-${{ matrix.tag }} # latest patch
|
||||
docker push ${REGISTRY,,}/nu:${VERSION%%.*}-${{ matrix.tag }} # latest features
|
||||
docker push ${REGISTRY,,}/nu:${{ matrix.tag }} # latest version
|
||||
env: { REGISTRY: 'docker.pkg.github.com/${{ github.repository }}' }
|
||||
|
||||
latest_version=${VERSION%%%.*}-${{ matrix.tag }}
|
||||
latest_feature=${VERSION%%.*}-${{ matrix.tag }}
|
||||
latest_patch=${VERSION%.*}-${{ matrix.tag }}
|
||||
exact_version=${VERSION}-${{ matrix.tag }}
|
||||
|
||||
tags=( ${latest_version} ${latest_feature} ${latest_patch} ${exact_version} )
|
||||
|
||||
for tag in ${tags[@]}; do
|
||||
docker tag ${DOCKER_REGISTRY}/nu:${VERSION}-${{ matrix.tag }} ${DOCKER_REGISTRY}/nu:${tag}
|
||||
docker push ${DOCKER_REGISTRY}/nu:${tag}
|
||||
done
|
||||
|
||||
# latest version
|
||||
docker tag ${DOCKER_REGISTRY}/nu:${VERSION}-${{ matrix.tag }} ${DOCKER_REGISTRY}/nu:${{ matrix.tag }}
|
||||
docker push ${DOCKER_REGISTRY}/nu:${{ matrix.tag }}
|
||||
|
||||
- name: Retag and push debian as latest
|
||||
if: matrix.tag == 'debian'
|
||||
run: |
|
||||
run: |-
|
||||
VERSION=${GITHUB_REF##*/}
|
||||
docker tag ${REGISTRY,,}/nu:${{ matrix.tag }} ${REGISTRY,,}/nu:latest
|
||||
docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION%.*}
|
||||
docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION%%.*}
|
||||
docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION}
|
||||
docker push ${REGISTRY,,}/nu:${VERSION} # exact version
|
||||
docker push ${REGISTRY,,}/nu:${VERSION%%.*} # latest features
|
||||
docker push ${REGISTRY,,}/nu:${VERSION%.*} # latest patch
|
||||
docker push ${REGISTRY,,}/nu:latest # latest version
|
||||
env: { REGISTRY: 'docker.pkg.github.com/${{ github.repository }}' }
|
||||
|
||||
# ${latest features} ${latest patch} ${exact version}
|
||||
tags=( ${VERSION%%.*} ${VERSION%.*} ${VERSION} )
|
||||
|
||||
for tag in ${tags[@]}; do
|
||||
docker tag ${DOCKER_REGISTRY}/nu:${VERSION}-${{ matrix.tag }} ${DOCKER_REGISTRY}/nu:${tag}
|
||||
docker push ${DOCKER_REGISTRY}/nu:${tag}
|
||||
done
|
||||
|
||||
# latest version
|
||||
docker tag ${DOCKER_REGISTRY}/nu:${{ matrix.tag }} ${DOCKER_REGISTRY}/nu:latest
|
||||
docker push ${DOCKER_REGISTRY}/nu:latest
|
||||
#endregion semantics tagging
|
||||
|
@ -1,8 +1,8 @@
|
||||
image:
|
||||
file: .gitpod.Dockerfile
|
||||
tasks:
|
||||
- init: cargo build
|
||||
command: cargo run
|
||||
- init: cargo install nu
|
||||
command: nu
|
||||
github:
|
||||
prebuilds:
|
||||
# enable for the master/default branch (defaults to true)
|
||||
|
2828
Cargo.lock
generated
2828
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
85
Cargo.toml
85
Cargo.toml
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "nu"
|
||||
version = "0.4.0"
|
||||
version = "0.6.1"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||
description = "A shell for the GitHub era"
|
||||
license = "MIT"
|
||||
@ -11,70 +11,85 @@ repository = "https://github.com/nushell/nushell"
|
||||
homepage = "https://www.nushell.sh"
|
||||
documentation = "https://book.nushell.sh"
|
||||
|
||||
[workspace]
|
||||
|
||||
members = ["crates/nu-source"]
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
rustyline = "5.0.3"
|
||||
nu-source = { version = "0.1.0", path = "./crates/nu-source" }
|
||||
|
||||
rustyline = "5.0.4"
|
||||
chrono = { version = "0.4.9", features = ["serde"] }
|
||||
derive-new = "0.5.8"
|
||||
prettytable-rs = "0.8.0"
|
||||
itertools = "0.8.0"
|
||||
itertools = "0.8.1"
|
||||
ansi_term = "0.12.1"
|
||||
nom = "5.0.0"
|
||||
nom = "5.0.1"
|
||||
dunce = "1.0.0"
|
||||
indexmap = { version = "1.2.0", features = ["serde-1"] }
|
||||
indexmap = { version = "1.3.0", features = ["serde-1"] }
|
||||
chrono-humanize = "0.0.11"
|
||||
byte-unit = "3.0.1"
|
||||
base64 = "0.10.1"
|
||||
futures-preview = { version = "=0.3.0-alpha.18", features = ["compat", "io-compat"] }
|
||||
async-stream = "0.1.1"
|
||||
byte-unit = "3.0.3"
|
||||
base64 = "0.11"
|
||||
futures-preview = { version = "=0.3.0-alpha.19", features = ["compat", "io-compat"] }
|
||||
async-stream = "0.1.2"
|
||||
futures_codec = "0.2.5"
|
||||
num-traits = "0.2.8"
|
||||
term = "0.5.2"
|
||||
bytes = "0.4.12"
|
||||
log = "0.4.8"
|
||||
pretty_env_logger = "0.3.1"
|
||||
serde = { version = "1.0.100", features = ["derive"] }
|
||||
serde = { version = "1.0.102", features = ["derive"] }
|
||||
bson = { version = "0.14.0", features = ["decimal128"] }
|
||||
serde_json = "1.0.40"
|
||||
serde_json = "1.0.41"
|
||||
serde-hjson = "0.9.1"
|
||||
serde_yaml = "0.8"
|
||||
serde_bytes = "0.11.2"
|
||||
getset = "0.0.8"
|
||||
getset = "0.0.9"
|
||||
language-reporting = "0.4.0"
|
||||
app_dirs = "1.2.1"
|
||||
csv = "1.1"
|
||||
toml = "0.5.3"
|
||||
toml = "0.5.5"
|
||||
clap = "2.33.0"
|
||||
git2 = { version = "0.10.1", default_features = false }
|
||||
dirs = "2.0.2"
|
||||
glob = "0.3.0"
|
||||
ctrlc = "3.1.3"
|
||||
surf = "1.0.2"
|
||||
surf = "1.0.3"
|
||||
url = "2.1.0"
|
||||
roxmltree = "0.7.0"
|
||||
roxmltree = "0.7.2"
|
||||
nom_locate = "1.0.0"
|
||||
nom-tracable = "0.4.0"
|
||||
nom-tracable = "0.4.1"
|
||||
unicode-xid = "0.2.0"
|
||||
serde_ini = "0.2.0"
|
||||
subprocess = "0.1.18"
|
||||
mime = "0.3.14"
|
||||
pretty-hex = "0.1.0"
|
||||
hex = "0.3.2"
|
||||
pretty-hex = "0.1.1"
|
||||
hex = "0.4"
|
||||
tempfile = "3.1.0"
|
||||
semver = "0.9.0"
|
||||
which = "2.0.1"
|
||||
which = "3.1"
|
||||
textwrap = {version = "0.11.0", features = ["term_size"]}
|
||||
shellexpand = "1.0.0"
|
||||
futures-timer = "0.4.0"
|
||||
futures-timer = "2.0.0"
|
||||
pin-utils = "0.1.0-alpha.4"
|
||||
num-bigint = { version = "0.2.3", features = ["serde"] }
|
||||
bigdecimal = { version = "0.1.0", features = ["serde"] }
|
||||
natural = "0.3.0"
|
||||
serde_urlencoded = "0.6.1"
|
||||
sublime_fuzzy = "0.5"
|
||||
sublime_fuzzy = "0.6"
|
||||
trash = "1.0.0"
|
||||
regex = "1"
|
||||
cfg-if = "0.1"
|
||||
strip-ansi-escapes = "0.1.0"
|
||||
calamine = "0.16"
|
||||
umask = "0.1"
|
||||
futures-util = "0.3.0"
|
||||
pretty = "0.5.2"
|
||||
termcolor = "1.0.5"
|
||||
console = "0.9.1"
|
||||
|
||||
regex = {version = "1", optional = true }
|
||||
neso = { version = "0.5.0", optional = true }
|
||||
crossterm = { version = "0.10.2", optional = true }
|
||||
syntect = {version = "3.2.0", optional = true }
|
||||
@ -83,8 +98,9 @@ heim = {version = "0.0.8", optional = true }
|
||||
battery = {version = "0.7.4", optional = true }
|
||||
rawkey = {version = "0.1.2", optional = true }
|
||||
clipboard = {version = "0.5", optional = true }
|
||||
ptree = {version = "0.2", optional = true }
|
||||
ptree = {version = "0.2" }
|
||||
image = { version = "0.22.2", default_features = false, features = ["png_codec", "jpeg"], optional = true }
|
||||
starship = { version = "0.26.4", optional = true}
|
||||
|
||||
[features]
|
||||
default = ["textview", "sys", "ps"]
|
||||
@ -93,8 +109,8 @@ textview = ["syntect", "onig_sys", "crossterm"]
|
||||
binaryview = ["image", "crossterm"]
|
||||
sys = ["heim", "battery"]
|
||||
ps = ["heim"]
|
||||
starship-prompt = ["starship"]
|
||||
# trace = ["nom-tracable/trace"]
|
||||
all = ["raw-key", "textview", "binaryview", "sys", "ps", "clipboard", "ptree"]
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "0.20.0"
|
||||
@ -104,8 +120,8 @@ features = ["bundled", "blob"]
|
||||
pretty_assertions = "0.6.1"
|
||||
|
||||
[build-dependencies]
|
||||
toml = "0.5.3"
|
||||
serde = { version = "1.0.101", features = ["derive"] }
|
||||
toml = "0.5.5"
|
||||
serde = { version = "1.0.102", features = ["derive"] }
|
||||
|
||||
[lib]
|
||||
name = "nu"
|
||||
@ -119,18 +135,30 @@ path = "src/plugins/inc.rs"
|
||||
name = "nu_plugin_sum"
|
||||
path = "src/plugins/sum.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_average"
|
||||
path = "src/plugins/average.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_embed"
|
||||
path = "src/plugins/embed.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_add"
|
||||
path = "src/plugins/add.rs"
|
||||
name = "nu_plugin_insert"
|
||||
path = "src/plugins/insert.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_edit"
|
||||
path = "src/plugins/edit.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_format"
|
||||
path = "src/plugins/format.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_parse"
|
||||
path = "src/plugins/parse.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_str"
|
||||
path = "src/plugins/str.rs"
|
||||
@ -142,7 +170,6 @@ path = "src/plugins/skip.rs"
|
||||
[[bin]]
|
||||
name = "nu_plugin_match"
|
||||
path = "src/plugins/match.rs"
|
||||
required-features = ["regex"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_sys"
|
||||
|
@ -4,21 +4,12 @@ command = "lalrpop"
|
||||
args = ["src/parser/parser.lalrpop"]
|
||||
|
||||
[tasks.baseline]
|
||||
dependencies = ["lalrpop"]
|
||||
|
||||
[tasks.build]
|
||||
command = "cargo"
|
||||
args = ["build"]
|
||||
dependencies = ["lalrpop"]
|
||||
args = ["build", "--bins"]
|
||||
|
||||
[tasks.run]
|
||||
command = "cargo"
|
||||
args = ["run", "--release"]
|
||||
dependencies = ["baseline"]
|
||||
|
||||
[tasks.release]
|
||||
command = "cargo"
|
||||
args = ["build", "--release"]
|
||||
args = ["run"]
|
||||
dependencies = ["baseline"]
|
||||
|
||||
[tasks.test]
|
||||
|
49
README.md
49
README.md
@ -32,9 +32,9 @@ Try it in Gitpod.
|
||||
|
||||
## Local
|
||||
|
||||
Up-to-date installation instructions can be found in the [installation chapter of the book](https://book.nushell.sh/en/installation).
|
||||
Up-to-date installation instructions can be found in the [installation chapter of the book](https://book.nushell.sh/en/installation). **Windows users**: please note that Nu works on Windows 10 and does not currently have Windows 7/8.1 support.
|
||||
|
||||
To build Nu, you will need to use the **nightly** version of the compiler.
|
||||
To build Nu, you will need to use the **latest stable (1.39 or later)** version of the compiler.
|
||||
|
||||
Required dependencies:
|
||||
|
||||
@ -46,16 +46,16 @@ Optional dependencies:
|
||||
* To use Nu with all possible optional features enabled, you'll also need the following:
|
||||
* on Linux (on Debian/Ubuntu): `apt install libxcb-composite0-dev libx11-dev`
|
||||
|
||||
To install Nu via cargo (make sure you have installed [rustup](https://rustup.rs/) and the beta compiler via `rustup install beta`):
|
||||
To install Nu via cargo (make sure you have installed [rustup](https://rustup.rs/) and the latest stable compiler via `rustup install stable`):
|
||||
|
||||
```
|
||||
cargo +beta install nu
|
||||
cargo install nu
|
||||
```
|
||||
|
||||
You can also install Nu with all the bells and whistles (be sure to have installed the [dependencies](https://book.nushell.sh/en/installation#dependencies) for your platform):
|
||||
|
||||
```
|
||||
cargo +beta install nu --all-features
|
||||
cargo install nu --all-features
|
||||
```
|
||||
|
||||
## Docker
|
||||
@ -173,7 +173,7 @@ We can pipeline this into a command that gets the contents of one of the columns
|
||||
━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━┯━━━━━━┯━━━━━━━━━
|
||||
authors │ description │ edition │ license │ name │ version
|
||||
─────────────────┼────────────────────────────┼─────────┼─────────┼──────┼─────────
|
||||
[table: 3 rows] │ A shell for the GitHub era │ 2018 │ MIT │ nu │ 0.4.0
|
||||
[table: 3 rows] │ A shell for the GitHub era │ 2018 │ MIT │ nu │ 0.6.1
|
||||
━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━┷━━━━━━┷━━━━━━━━━
|
||||
```
|
||||
|
||||
@ -181,11 +181,30 @@ Finally, we can use commands outside of Nu once we have the data we want:
|
||||
|
||||
```
|
||||
/home/jonathan/Source/nushell(master)> open Cargo.toml | get package.version | echo $it
|
||||
0.4.0
|
||||
0.6.1
|
||||
```
|
||||
|
||||
Here we use the variable `$it` to refer to the value being piped to the external command.
|
||||
|
||||
## Configuration
|
||||
|
||||
Nu has early support for configuring the shell. It currently supports the following settings:
|
||||
|
||||
| Variable | Type | Description |
|
||||
| ------------- | ------------- | ----- |
|
||||
| path | table of strings | PATH to use to find binaries |
|
||||
| env | row | the environment variables to pass to external commands |
|
||||
| ctrlc_exit | boolean | whether or not to exit Nu after multiple ctrl-c presses |
|
||||
| table_mode | "light" or other | enable lightweight or normal tables |
|
||||
| edit_mode | "vi" or "emacs" | changes line editing to "vi" or "emacs" mode |
|
||||
|
||||
To set one of these variables, you can use `config --set`. For example:
|
||||
|
||||
```
|
||||
> config --set [edit_mode "vi"]
|
||||
> config --set [path $nu:path]
|
||||
```
|
||||
|
||||
## Shells
|
||||
|
||||
Nu will work inside of a single directory and allow you to navigate around your filesystem by default. Nu also offers a way of adding additional working directories that you can jump between, allowing you to work in multiple directories at the same time.
|
||||
@ -248,20 +267,29 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat
|
||||
## Filters on tables (structured data)
|
||||
| command | description |
|
||||
| ------------- | ------------- |
|
||||
| add column-or-column-path value | Add a new column to the table |
|
||||
| append row-data | Append a row to the end of the table |
|
||||
| compact ...columns | Remove rows where given columns are empty |
|
||||
| count | Show the total number of rows |
|
||||
| default column row-data | Sets a default row's column if missing |
|
||||
| edit column-or-column-path value | Edit an existing column to have a new value |
|
||||
| embed column | Creates a new table of one column with the given name, and places the current table inside of it |
|
||||
| first amount | Show only the first number of rows |
|
||||
| format pattern | Format table row data as a string following the given pattern |
|
||||
| get column-or-column-path | Open column and get data from the corresponding cells |
|
||||
| group-by column | Creates a new table with the data from the table rows grouped by the column given |
|
||||
| histogram column ...column-names | Creates a new table with a histogram based on the column name passed in, optionally give the frequency column name
|
||||
| inc (column-or-column-path) | Increment a value or version. Optionally use the column of a table |
|
||||
| insert column-or-column-path value | Insert a new column to the table |
|
||||
| last amount | Show only the last number of rows |
|
||||
| nth row-number | Return only the selected row |
|
||||
| nth ...row-numbers | Return only the selected rows |
|
||||
| pick ...columns | Down-select table to only these columns |
|
||||
| pivot --header-row <headers> | Pivot the tables, making columns into rows and vice versa |
|
||||
| prepend row-data | Prepend a row to the beginning of the table |
|
||||
| reject ...columns | Remove the given columns from the table |
|
||||
| reverse | Reverses the table. |
|
||||
| skip amount | Skip a number of rows |
|
||||
| skip-while condition | Skips rows while the condition matches. |
|
||||
| skip-while condition | Skips rows while the condition matches |
|
||||
| split-by column | Creates a new table with the data from the inner tables splitted by the column given |
|
||||
| sort-by ...columns | Sort by the given columns |
|
||||
| str (column) | Apply string function. Optionally use the column of a table |
|
||||
| sum | Sum a column of values |
|
||||
@ -291,6 +319,7 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat
|
||||
| from-xml | Parse text as .xml and create a table |
|
||||
| from-yaml | Parse text as a .yaml/.yml and create a table |
|
||||
| lines | Split single string into rows, one per line |
|
||||
| parse pattern | Convert text to a table by matching the given pattern |
|
||||
| size | Gather word count statistics on the text |
|
||||
| split-column sep ...column-names | Split row contents across multiple columns via the separator, optionally give the columns names |
|
||||
| split-row sep | Split row contents over multiple rows via the separator |
|
||||
|
48
TODO.md
Normal file
48
TODO.md
Normal file
@ -0,0 +1,48 @@
|
||||
This pattern is extremely repetitive and can be abstracted:
|
||||
|
||||
```rs
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let tag = args.name_tag();
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
let mut latest_tag: Option<Tag> = None;
|
||||
|
||||
for value in values {
|
||||
latest_tag = Some(value_tag.clone());
|
||||
let value_span = value.tag.span;
|
||||
|
||||
match &value.value {
|
||||
UntaggedValue::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
concat_string.push_str("\n");
|
||||
}
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
name_span,
|
||||
"value originates from here",
|
||||
value_span,
|
||||
)),
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
Mandatory and Optional in parse_command
|
||||
|
||||
trace_remaining?
|
||||
|
||||
select_fields and select_fields take unnecessary Tag
|
||||
|
||||
Value#value should be Value#untagged
|
||||
|
||||
Unify dictionary building, probably around a macro
|
||||
|
||||
sys plugin in own crate
|
||||
|
||||
textview in own crate
|
20
crates/nu-source/Cargo.toml
Normal file
20
crates/nu-source/Cargo.toml
Normal file
@ -0,0 +1,20 @@
|
||||
[package]
|
||||
name = "nu-source"
|
||||
version = "0.1.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>"]
|
||||
edition = "2018"
|
||||
description = "A source string characterizer for Nushell"
|
||||
license = "MIT"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
|
||||
serde = { version = "1.0.102", features = ["derive"] }
|
||||
derive-new = "0.5.8"
|
||||
getset = "0.0.9"
|
||||
nom_locate = "1.0.0"
|
||||
nom-tracable = "0.4.1"
|
||||
language-reporting = "0.4.0"
|
||||
termcolor = "1.0.5"
|
||||
pretty = "0.5.2"
|
15
crates/nu-source/src/lib.rs
Normal file
15
crates/nu-source/src/lib.rs
Normal file
@ -0,0 +1,15 @@
|
||||
mod meta;
|
||||
mod pretty;
|
||||
mod term_colored;
|
||||
mod text;
|
||||
mod tracable;
|
||||
|
||||
pub use self::meta::{
|
||||
span_for_spanned_list, tag_for_tagged_list, AnchorLocation, HasFallibleSpan, HasSpan, HasTag,
|
||||
Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem,
|
||||
};
|
||||
pub use self::pretty::{
|
||||
b, DebugDoc, DebugDocBuilder, PrettyDebug, PrettyDebugWithSource, ShellAnnotation,
|
||||
};
|
||||
pub use self::text::Text;
|
||||
pub use self::tracable::{nom_input, NomSpan, TracableContext};
|
@ -1,12 +1,24 @@
|
||||
use crate::context::AnchorLocation;
|
||||
use crate::parser::parse::parser::TracableContext;
|
||||
use crate::prelude::*;
|
||||
use crate::pretty::{b, DebugDocBuilder, PrettyDebugWithSource};
|
||||
use crate::text::Text;
|
||||
use crate::tracable::TracableContext;
|
||||
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub enum AnchorLocation {
|
||||
Url(String),
|
||||
File(String),
|
||||
Source(Text),
|
||||
}
|
||||
|
||||
pub trait HasTag {
|
||||
fn tag(&self) -> Tag;
|
||||
}
|
||||
|
||||
#[derive(new, Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
|
||||
pub struct Spanned<T> {
|
||||
pub span: Span,
|
||||
@ -22,6 +34,21 @@ impl<T> Spanned<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl Spanned<String> {
|
||||
pub fn items<'a, U>(
|
||||
items: impl Iterator<Item = &'a Spanned<String>>,
|
||||
) -> impl Iterator<Item = &'a str> {
|
||||
items.into_iter().map(|item| &item.item[..])
|
||||
}
|
||||
}
|
||||
|
||||
impl Spanned<String> {
|
||||
pub fn borrow_spanned(&self) -> Spanned<&str> {
|
||||
let span = self.span;
|
||||
self.item[..].spanned(span)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait SpannedItem: Sized {
|
||||
fn spanned(self, span: impl Into<Span>) -> Spanned<Self> {
|
||||
Spanned {
|
||||
@ -46,12 +73,30 @@ impl<T> std::ops::Deref for Spanned<T> {
|
||||
&self.item
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(new, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
|
||||
pub struct Tagged<T> {
|
||||
pub tag: Tag,
|
||||
pub item: T,
|
||||
}
|
||||
|
||||
impl Tagged<String> {
|
||||
pub fn borrow_spanned(&self) -> Spanned<&str> {
|
||||
let span = self.tag.span;
|
||||
self.item[..].spanned(span)
|
||||
}
|
||||
|
||||
pub fn borrow_tagged(&self) -> Tagged<&str> {
|
||||
self.item[..].tagged(self.tag.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Tagged<Vec<T>> {
|
||||
pub fn items(&self) -> impl Iterator<Item = &T> {
|
||||
self.item.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> HasTag for Tagged<T> {
|
||||
fn tag(&self) -> Tag {
|
||||
self.tag.clone()
|
||||
@ -104,6 +149,24 @@ impl<T> Tagged<T> {
|
||||
mapped.tagged(tag)
|
||||
}
|
||||
|
||||
pub fn map_anchored(self, anchor: &Option<AnchorLocation>) -> Tagged<T> {
|
||||
let mut tag = self.tag;
|
||||
|
||||
tag.anchor = anchor.clone();
|
||||
|
||||
Tagged {
|
||||
item: self.item,
|
||||
tag: tag,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn transpose(&self) -> Tagged<&T> {
|
||||
Tagged {
|
||||
item: &self.item,
|
||||
tag: self.tag.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tag(&self) -> Tag {
|
||||
self.tag.clone()
|
||||
}
|
||||
@ -139,14 +202,8 @@ impl From<&Tag> for Tag {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<nom_locate::LocatedSpanEx<&str, TracableContext>> for Span {
|
||||
fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Span {
|
||||
Span::new(input.offset, input.offset + input.fragment.len())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<nom_locate::LocatedSpanEx<&str, u64>> for Span {
|
||||
fn from(input: nom_locate::LocatedSpanEx<&str, u64>) -> Span {
|
||||
impl<T> From<nom_locate::LocatedSpanEx<&str, T>> for Span {
|
||||
fn from(input: nom_locate::LocatedSpanEx<&str, T>) -> Span {
|
||||
Span::new(input.offset, input.offset + input.fragment.len())
|
||||
}
|
||||
}
|
||||
@ -285,6 +342,10 @@ impl Tag {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn anchor(&self) -> Option<AnchorLocation> {
|
||||
self.anchor.clone()
|
||||
}
|
||||
|
||||
pub fn until(&self, other: impl Into<Tag>) -> Tag {
|
||||
let other = other.into();
|
||||
debug_assert!(
|
||||
@ -331,6 +392,14 @@ impl Tag {
|
||||
pub fn tagged_string<'a>(&self, source: &'a str) -> Tagged<String> {
|
||||
self.span.slice(source).to_string().tagged(self)
|
||||
}
|
||||
|
||||
pub fn anchor_name(&self) -> Option<String> {
|
||||
match self.anchor {
|
||||
Some(AnchorLocation::File(ref file)) => Some(file.clone()),
|
||||
Some(AnchorLocation::Url(ref url)) => Some(url.clone()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
@ -350,12 +419,35 @@ pub fn tag_for_tagged_list(mut iter: impl Iterator<Item = Tag>) -> Tag {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn span_for_spanned_list(mut iter: impl Iterator<Item = Span>) -> Span {
|
||||
let first = iter.next();
|
||||
|
||||
let first = match first {
|
||||
None => return Span::unknown(),
|
||||
Some(first) => first,
|
||||
};
|
||||
|
||||
let last = iter.last();
|
||||
|
||||
match last {
|
||||
None => first,
|
||||
Some(last) => first.until(last),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
|
||||
pub struct Span {
|
||||
start: usize,
|
||||
end: usize,
|
||||
}
|
||||
|
||||
impl From<&Span> for Span {
|
||||
fn from(span: &Span) -> Span {
|
||||
*span
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Option<Span>> for Span {
|
||||
fn from(input: Option<Span>) -> Span {
|
||||
match input {
|
||||
@ -417,16 +509,6 @@ impl Span {
|
||||
self.slice(source).to_string().spanned(*self)
|
||||
}
|
||||
|
||||
/*
|
||||
pub fn unknown_with_uuid(uuid: Uuid) -> Span {
|
||||
Span {
|
||||
start: 0,
|
||||
end: 0,
|
||||
source: Some(uuid),
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
pub fn start(&self) -> usize {
|
||||
self.start
|
||||
}
|
||||
@ -461,3 +543,113 @@ impl language_reporting::ReportingSpan for Span {
|
||||
self.end
|
||||
}
|
||||
}
|
||||
|
||||
pub trait HasSpan: PrettyDebugWithSource {
|
||||
fn span(&self) -> Span;
|
||||
}
|
||||
|
||||
pub trait HasFallibleSpan: PrettyDebugWithSource {
|
||||
fn maybe_span(&self) -> Option<Span>;
|
||||
}
|
||||
|
||||
impl<T: HasSpan> HasFallibleSpan for T {
|
||||
fn maybe_span(&self) -> Option<Span> {
|
||||
Some(HasSpan::span(self))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> HasSpan for Spanned<T>
|
||||
where
|
||||
Spanned<T>: PrettyDebugWithSource,
|
||||
{
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Option<Span> {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self {
|
||||
None => b::description("no span"),
|
||||
Some(span) => span.pretty_debug(source),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasFallibleSpan for Option<Span> {
|
||||
fn maybe_span(&self) -> Option<Span> {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Span {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::typed(
|
||||
"spanned",
|
||||
b::keyword("for") + b::space() + b::description(format!("{:?}", source)),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for Span {
|
||||
fn span(&self) -> Span {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PrettyDebugWithSource for Option<Spanned<T>>
|
||||
where
|
||||
Spanned<T>: PrettyDebugWithSource,
|
||||
{
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self {
|
||||
None => b::description("nothing"),
|
||||
Some(v) => v.pretty_debug(source),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> HasFallibleSpan for Option<Spanned<T>>
|
||||
where
|
||||
Spanned<T>: PrettyDebugWithSource,
|
||||
{
|
||||
fn maybe_span(&self) -> Option<Span> {
|
||||
match self {
|
||||
None => None,
|
||||
Some(value) => Some(value.span),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PrettyDebugWithSource for Option<Tagged<T>>
|
||||
where
|
||||
Tagged<T>: PrettyDebugWithSource,
|
||||
{
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self {
|
||||
None => b::description("nothing"),
|
||||
Some(d) => d.pretty_debug(source),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> HasFallibleSpan for Option<Tagged<T>>
|
||||
where
|
||||
Tagged<T>: PrettyDebugWithSource,
|
||||
{
|
||||
fn maybe_span(&self) -> Option<Span> {
|
||||
match self {
|
||||
None => None,
|
||||
Some(value) => Some(value.tag.span),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> HasSpan for Tagged<T>
|
||||
where
|
||||
Tagged<T>: PrettyDebugWithSource,
|
||||
{
|
||||
fn span(&self) -> Span {
|
||||
self.tag.span
|
||||
}
|
||||
}
|
495
crates/nu-source/src/pretty.rs
Normal file
495
crates/nu-source/src/pretty.rs
Normal file
@ -0,0 +1,495 @@
|
||||
use crate::term_colored::TermColored;
|
||||
use crate::text::Text;
|
||||
use derive_new::new;
|
||||
use pretty::{BoxAllocator, DocAllocator};
|
||||
use std::hash::Hash;
|
||||
use termcolor::{Color, ColorSpec};
|
||||
|
||||
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd, Hash)]
|
||||
pub enum ShellStyle {
|
||||
Delimiter,
|
||||
Key,
|
||||
Value,
|
||||
Equals,
|
||||
Kind,
|
||||
Keyword,
|
||||
Operator,
|
||||
Variable,
|
||||
Primitive,
|
||||
Opaque,
|
||||
Description,
|
||||
Error,
|
||||
}
|
||||
|
||||
impl From<ShellAnnotation> for ColorSpec {
|
||||
fn from(ann: ShellAnnotation) -> ColorSpec {
|
||||
match ann.style {
|
||||
ShellStyle::Delimiter => ColorSpec::new()
|
||||
.set_fg(Some(Color::White))
|
||||
.set_intense(false)
|
||||
.clone(),
|
||||
ShellStyle::Key => ColorSpec::new()
|
||||
.set_fg(Some(Color::Black))
|
||||
.set_intense(true)
|
||||
.clone(),
|
||||
ShellStyle::Value => ColorSpec::new()
|
||||
.set_fg(Some(Color::White))
|
||||
.set_intense(true)
|
||||
.clone(),
|
||||
ShellStyle::Equals => ColorSpec::new()
|
||||
.set_fg(Some(Color::Black))
|
||||
.set_intense(true)
|
||||
.clone(),
|
||||
ShellStyle::Kind => ColorSpec::new().set_fg(Some(Color::Cyan)).clone(),
|
||||
ShellStyle::Variable => ColorSpec::new()
|
||||
.set_fg(Some(Color::Green))
|
||||
.set_intense(true)
|
||||
.clone(),
|
||||
ShellStyle::Keyword => ColorSpec::new().set_fg(Some(Color::Magenta)).clone(),
|
||||
ShellStyle::Operator => ColorSpec::new().set_fg(Some(Color::Yellow)).clone(),
|
||||
ShellStyle::Primitive => ColorSpec::new()
|
||||
.set_fg(Some(Color::Green))
|
||||
.set_intense(true)
|
||||
.clone(),
|
||||
ShellStyle::Opaque => ColorSpec::new()
|
||||
.set_fg(Some(Color::Yellow))
|
||||
.set_intense(true)
|
||||
.clone(),
|
||||
ShellStyle::Description => ColorSpec::new()
|
||||
.set_fg(Some(Color::Black))
|
||||
.set_intense(true)
|
||||
.clone(),
|
||||
ShellStyle::Error => ColorSpec::new()
|
||||
.set_fg(Some(Color::Red))
|
||||
.set_intense(true)
|
||||
.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Eq, Ord, PartialEq, PartialOrd, Hash, new)]
|
||||
pub struct ShellAnnotation {
|
||||
style: ShellStyle,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for ShellAnnotation {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{:?}", self.style)
|
||||
}
|
||||
}
|
||||
|
||||
impl ShellAnnotation {
|
||||
pub fn style(style: impl Into<ShellStyle>) -> ShellAnnotation {
|
||||
ShellAnnotation {
|
||||
style: style.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub type PrettyDebugDoc =
|
||||
pretty::Doc<'static, pretty::BoxDoc<'static, ShellAnnotation>, ShellAnnotation>;
|
||||
|
||||
pub type PrettyDebugDocBuilder = pretty::DocBuilder<'static, pretty::BoxAllocator, ShellAnnotation>;
|
||||
|
||||
pub use self::DebugDocBuilder as b;
|
||||
|
||||
#[derive(Clone, new)]
|
||||
pub struct DebugDocBuilder {
|
||||
pub inner: PrettyDebugDocBuilder,
|
||||
}
|
||||
|
||||
impl PrettyDebug for DebugDocBuilder {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Add for DebugDocBuilder {
|
||||
type Output = DebugDocBuilder;
|
||||
|
||||
fn add(self, rhs: DebugDocBuilder) -> DebugDocBuilder {
|
||||
DebugDocBuilder::new(self.inner.append(rhs.inner))
|
||||
}
|
||||
}
|
||||
|
||||
impl DebugDocBuilder {
|
||||
pub fn from_doc(doc: DebugDoc) -> DebugDocBuilder {
|
||||
DebugDocBuilder {
|
||||
inner: BoxAllocator.nil().append(doc),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn blank() -> DebugDocBuilder {
|
||||
BoxAllocator.nil().into()
|
||||
}
|
||||
|
||||
pub fn delimiter(string: impl std::fmt::Display) -> DebugDocBuilder {
|
||||
DebugDocBuilder::styled(string, ShellStyle::Delimiter)
|
||||
}
|
||||
|
||||
pub fn key(string: impl std::fmt::Display) -> DebugDocBuilder {
|
||||
DebugDocBuilder::styled(string, ShellStyle::Key)
|
||||
}
|
||||
|
||||
pub fn value(string: impl std::fmt::Display) -> DebugDocBuilder {
|
||||
DebugDocBuilder::styled(string, ShellStyle::Value)
|
||||
}
|
||||
|
||||
pub fn as_value(self) -> DebugDocBuilder {
|
||||
self.inner
|
||||
.annotate(ShellAnnotation::style(ShellStyle::Value))
|
||||
.into()
|
||||
}
|
||||
|
||||
pub fn equals() -> DebugDocBuilder {
|
||||
DebugDocBuilder::styled("=", ShellStyle::Equals)
|
||||
}
|
||||
|
||||
pub fn kind(string: impl std::fmt::Display) -> DebugDocBuilder {
|
||||
DebugDocBuilder::styled(string, ShellStyle::Kind)
|
||||
}
|
||||
|
||||
pub fn as_kind(self) -> DebugDocBuilder {
|
||||
self.inner
|
||||
.annotate(ShellAnnotation::style(ShellStyle::Kind))
|
||||
.into()
|
||||
}
|
||||
|
||||
pub fn typed(kind: &str, value: DebugDocBuilder) -> DebugDocBuilder {
|
||||
b::delimit("(", b::kind(kind) + b::space() + value.group(), ")").group()
|
||||
}
|
||||
|
||||
pub fn subtyped(
|
||||
kind: &str,
|
||||
subkind: impl std::fmt::Display,
|
||||
value: DebugDocBuilder,
|
||||
) -> DebugDocBuilder {
|
||||
b::delimit(
|
||||
"(",
|
||||
(b::kind(kind) + b::delimit("[", b::kind(format!("{}", subkind)), "]")).group()
|
||||
+ b::space()
|
||||
+ value.group(),
|
||||
")",
|
||||
)
|
||||
.group()
|
||||
}
|
||||
|
||||
pub fn keyword(string: impl std::fmt::Display) -> DebugDocBuilder {
|
||||
DebugDocBuilder::styled(string, ShellStyle::Keyword)
|
||||
}
|
||||
|
||||
pub fn var(string: impl std::fmt::Display) -> DebugDocBuilder {
|
||||
DebugDocBuilder::styled(string, ShellStyle::Variable)
|
||||
}
|
||||
|
||||
pub fn operator(string: impl std::fmt::Display) -> DebugDocBuilder {
|
||||
DebugDocBuilder::styled(string, ShellStyle::Operator)
|
||||
}
|
||||
|
||||
pub fn primitive(string: impl std::fmt::Display) -> DebugDocBuilder {
|
||||
DebugDocBuilder::styled(format!("{}", string), ShellStyle::Primitive)
|
||||
}
|
||||
|
||||
pub fn opaque(string: impl std::fmt::Display) -> DebugDocBuilder {
|
||||
DebugDocBuilder::styled(string, ShellStyle::Opaque)
|
||||
}
|
||||
|
||||
pub fn description(string: impl std::fmt::Display) -> DebugDocBuilder {
|
||||
DebugDocBuilder::styled(string, ShellStyle::Description)
|
||||
}
|
||||
|
||||
pub fn error(string: impl std::fmt::Display) -> DebugDocBuilder {
|
||||
DebugDocBuilder::styled(string, ShellStyle::Error)
|
||||
}
|
||||
|
||||
pub fn delimit(start: &str, doc: DebugDocBuilder, end: &str) -> DebugDocBuilder {
|
||||
DebugDocBuilder::delimiter(start) + doc + DebugDocBuilder::delimiter(end)
|
||||
}
|
||||
|
||||
pub fn preceded(before: DebugDocBuilder, body: DebugDocBuilder) -> DebugDocBuilder {
|
||||
if body.is_empty() {
|
||||
body
|
||||
} else {
|
||||
before + body
|
||||
}
|
||||
}
|
||||
|
||||
pub fn surrounded_option(
|
||||
before: Option<DebugDocBuilder>,
|
||||
builder: Option<DebugDocBuilder>,
|
||||
after: Option<DebugDocBuilder>,
|
||||
) -> DebugDocBuilder {
|
||||
match builder {
|
||||
None => DebugDocBuilder::blank(),
|
||||
Some(b) => b::option(before) + b + b::option(after),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn preceded_option(
|
||||
before: Option<DebugDocBuilder>,
|
||||
builder: Option<DebugDocBuilder>,
|
||||
) -> DebugDocBuilder {
|
||||
DebugDocBuilder::surrounded_option(before, builder, None)
|
||||
}
|
||||
|
||||
pub fn option(builder: Option<DebugDocBuilder>) -> DebugDocBuilder {
|
||||
match builder {
|
||||
None => DebugDocBuilder::blank(),
|
||||
Some(b) => b,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn space() -> DebugDocBuilder {
|
||||
BoxAllocator.space().into()
|
||||
}
|
||||
|
||||
pub fn newline() -> DebugDocBuilder {
|
||||
BoxAllocator.newline().into()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
match &self.inner.1 {
|
||||
pretty::Doc::Nil => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn or(self, doc: DebugDocBuilder) -> DebugDocBuilder {
|
||||
if self.is_empty() {
|
||||
doc
|
||||
} else {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn group(self) -> DebugDocBuilder {
|
||||
self.inner.group().into()
|
||||
}
|
||||
|
||||
pub fn nest(self) -> DebugDocBuilder {
|
||||
self.inner.nest(1).group().into()
|
||||
}
|
||||
|
||||
pub fn intersperse_with_source<'a, T: PrettyDebugWithSource + 'a>(
|
||||
list: impl IntoIterator<Item = &'a T>,
|
||||
separator: DebugDocBuilder,
|
||||
source: &str,
|
||||
) -> DebugDocBuilder {
|
||||
BoxAllocator
|
||||
.intersperse(
|
||||
list.into_iter().filter_map(|item| {
|
||||
let item = item.pretty_debug(source);
|
||||
if item.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(item)
|
||||
}
|
||||
}),
|
||||
separator,
|
||||
)
|
||||
.into()
|
||||
}
|
||||
|
||||
pub fn intersperse<T: PrettyDebug>(
|
||||
list: impl IntoIterator<Item = T>,
|
||||
separator: DebugDocBuilder,
|
||||
) -> DebugDocBuilder {
|
||||
BoxAllocator
|
||||
.intersperse(
|
||||
list.into_iter().filter_map(|item| {
|
||||
let item = item.pretty();
|
||||
if item.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(item)
|
||||
}
|
||||
}),
|
||||
separator,
|
||||
)
|
||||
.into()
|
||||
}
|
||||
|
||||
pub fn list(list: impl IntoIterator<Item = DebugDocBuilder>) -> DebugDocBuilder {
|
||||
let mut result: DebugDocBuilder = BoxAllocator.nil().into();
|
||||
|
||||
for item in list {
|
||||
result = result + item;
|
||||
}
|
||||
|
||||
result.into()
|
||||
}
|
||||
|
||||
fn styled(string: impl std::fmt::Display, style: ShellStyle) -> DebugDocBuilder {
|
||||
BoxAllocator
|
||||
.text(string.to_string())
|
||||
.annotate(ShellAnnotation::style(style))
|
||||
.into()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for DebugDocBuilder {
|
||||
type Target = PrettyDebugDocBuilder;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.inner
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, Ord, PartialEq, PartialOrd, new)]
|
||||
pub struct DebugDoc {
|
||||
pub inner: PrettyDebugDoc,
|
||||
}
|
||||
|
||||
pub trait PrettyDebugWithSource: Sized {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder;
|
||||
|
||||
// This is a transitional convenience method
|
||||
fn debug(&self, source: impl Into<Text>) -> String
|
||||
where
|
||||
Self: Clone,
|
||||
{
|
||||
self.clone().debuggable(source).display()
|
||||
}
|
||||
|
||||
fn debuggable(self, source: impl Into<Text>) -> DebuggableWithSource<Self> {
|
||||
DebuggableWithSource {
|
||||
inner: self,
|
||||
source: source.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: PrettyDebug> PrettyDebugWithSource for T {
|
||||
fn pretty_debug(&self, _source: &str) -> DebugDocBuilder {
|
||||
self.pretty()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DebuggableWithSource<T: PrettyDebugWithSource> {
|
||||
inner: T,
|
||||
source: Text,
|
||||
}
|
||||
|
||||
impl<T> PrettyDebug for DebuggableWithSource<T>
|
||||
where
|
||||
T: PrettyDebugWithSource,
|
||||
{
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
self.inner.pretty_debug(&self.source)
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for DebugDoc {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
DebugDocBuilder::new(BoxAllocator.nil().append(self.inner.clone()))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait PrettyDebug {
|
||||
fn pretty(&self) -> DebugDocBuilder;
|
||||
|
||||
fn to_doc(&self) -> DebugDoc {
|
||||
DebugDoc::new(self.pretty().into())
|
||||
}
|
||||
|
||||
fn pretty_doc(&self) -> PrettyDebugDoc {
|
||||
let builder = self.pretty();
|
||||
builder.inner.into()
|
||||
}
|
||||
|
||||
fn pretty_builder(&self) -> PrettyDebugDocBuilder {
|
||||
let doc = self.pretty();
|
||||
doc.inner
|
||||
}
|
||||
|
||||
/// A convenience method that prints out the document without colors in
|
||||
/// 70 columns. Generally, you should use plain_string or colored_string
|
||||
/// if possible, but display() can be useful for trace lines and things
|
||||
/// like that, where you don't have control over the terminal.
|
||||
fn display(&self) -> String {
|
||||
self.plain_string(70)
|
||||
}
|
||||
|
||||
fn plain_string(&self, width: usize) -> String {
|
||||
let doc = self.pretty_doc();
|
||||
let mut buffer = termcolor::Buffer::no_color();
|
||||
|
||||
doc.render_raw(width, &mut TermColored::new(&mut buffer))
|
||||
.unwrap();
|
||||
|
||||
String::from_utf8_lossy(buffer.as_slice()).to_string()
|
||||
}
|
||||
|
||||
fn colored_string(&self, width: usize) -> String {
|
||||
let doc = self.pretty_doc();
|
||||
let mut buffer = termcolor::Buffer::ansi();
|
||||
|
||||
doc.render_raw(width, &mut TermColored::new(&mut buffer))
|
||||
.unwrap();
|
||||
|
||||
String::from_utf8_lossy(buffer.as_slice()).to_string()
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<DebugDocBuilder> for PrettyDebugDocBuilder {
|
||||
fn into(self) -> DebugDocBuilder {
|
||||
DebugDocBuilder { inner: self }
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for DebugDoc {
|
||||
type Target = PrettyDebugDoc;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.inner
|
||||
}
|
||||
}
|
||||
|
||||
impl From<DebugDoc> for PrettyDebugDoc {
|
||||
fn from(input: DebugDoc) -> PrettyDebugDoc {
|
||||
input.inner
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<PrettyDebugDoc> for DebugDocBuilder {
|
||||
fn into(self) -> PrettyDebugDoc {
|
||||
self.inner.into()
|
||||
}
|
||||
}
|
||||
|
||||
fn hash_doc<H: std::hash::Hasher>(doc: &PrettyDebugDoc, state: &mut H) {
|
||||
match doc {
|
||||
pretty::Doc::Nil => 0u8.hash(state),
|
||||
pretty::Doc::Append(a, b) => {
|
||||
1u8.hash(state);
|
||||
hash_doc(&*a, state);
|
||||
hash_doc(&*b, state);
|
||||
}
|
||||
pretty::Doc::Group(a) => {
|
||||
2u8.hash(state);
|
||||
hash_doc(&*a, state);
|
||||
}
|
||||
pretty::Doc::Nest(a, b) => {
|
||||
3u8.hash(state);
|
||||
a.hash(state);
|
||||
hash_doc(&*b, state);
|
||||
}
|
||||
pretty::Doc::Space => 4u8.hash(state),
|
||||
pretty::Doc::Newline => 5u8.hash(state),
|
||||
pretty::Doc::Text(t) => {
|
||||
6u8.hash(state);
|
||||
t.hash(state);
|
||||
}
|
||||
pretty::Doc::Annotated(a, b) => {
|
||||
7u8.hash(state);
|
||||
a.hash(state);
|
||||
hash_doc(&*b, state);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::hash::Hash for DebugDoc {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
hash_doc(&self.inner, state);
|
||||
}
|
||||
}
|
51
crates/nu-source/src/term_colored.rs
Normal file
51
crates/nu-source/src/term_colored.rs
Normal file
@ -0,0 +1,51 @@
|
||||
use crate::pretty::ShellAnnotation;
|
||||
use pretty::{Render, RenderAnnotated};
|
||||
use std::io;
|
||||
use termcolor::WriteColor;
|
||||
|
||||
pub struct TermColored<'a, W> {
|
||||
color_stack: Vec<ShellAnnotation>,
|
||||
upstream: &'a mut W,
|
||||
}
|
||||
|
||||
impl<'a, W> TermColored<'a, W> {
|
||||
pub fn new(upstream: &'a mut W) -> TermColored<'a, W> {
|
||||
TermColored {
|
||||
color_stack: Vec::new(),
|
||||
upstream,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, W> Render for TermColored<'a, W>
|
||||
where
|
||||
W: io::Write,
|
||||
{
|
||||
type Error = io::Error;
|
||||
|
||||
fn write_str(&mut self, s: &str) -> io::Result<usize> {
|
||||
self.upstream.write(s.as_bytes())
|
||||
}
|
||||
|
||||
fn write_str_all(&mut self, s: &str) -> io::Result<()> {
|
||||
self.upstream.write_all(s.as_bytes())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, W> RenderAnnotated<ShellAnnotation> for TermColored<'a, W>
|
||||
where
|
||||
W: WriteColor,
|
||||
{
|
||||
fn push_annotation(&mut self, ann: &ShellAnnotation) -> Result<(), Self::Error> {
|
||||
self.color_stack.push(*ann);
|
||||
self.upstream.set_color(&(*ann).into())
|
||||
}
|
||||
|
||||
fn pop_annotation(&mut self) -> Result<(), Self::Error> {
|
||||
self.color_stack.pop();
|
||||
match self.color_stack.last() {
|
||||
Some(previous) => self.upstream.set_color(&(*previous).into()),
|
||||
None => self.upstream.reset(),
|
||||
}
|
||||
}
|
||||
}
|
@ -74,6 +74,12 @@ impl From<&str> for Text {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Text> for Text {
|
||||
fn from(text: &Text) -> Self {
|
||||
text.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::borrow::Borrow<str> for Text {
|
||||
fn borrow(&self) -> &str {
|
||||
&*self
|
32
crates/nu-source/src/tracable.rs
Normal file
32
crates/nu-source/src/tracable.rs
Normal file
@ -0,0 +1,32 @@
|
||||
use derive_new::new;
|
||||
use nom_locate::LocatedSpanEx;
|
||||
use nom_tracable::{HasTracableInfo, TracableInfo};
|
||||
|
||||
pub type NomSpan<'a> = LocatedSpanEx<&'a str, TracableContext>;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, new)]
|
||||
pub struct TracableContext {
|
||||
pub(crate) info: TracableInfo,
|
||||
}
|
||||
|
||||
impl HasTracableInfo for TracableContext {
|
||||
fn get_tracable_info(&self) -> TracableInfo {
|
||||
self.info
|
||||
}
|
||||
|
||||
fn set_tracable_info(self, info: TracableInfo) -> Self {
|
||||
TracableContext { info }
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for TracableContext {
|
||||
type Target = TracableInfo;
|
||||
|
||||
fn deref(&self) -> &TracableInfo {
|
||||
&self.info
|
||||
}
|
||||
}
|
||||
|
||||
pub fn nom_input(s: &str) -> NomSpan<'_> {
|
||||
LocatedSpanEx::new_extra(s, TracableContext::new(TracableInfo::new()))
|
||||
}
|
@ -11,9 +11,9 @@ RUN apt-get update && apt-get install -y libssl-dev \
|
||||
|
||||
ARG RELEASE=false
|
||||
WORKDIR /code
|
||||
COPY ./rust-toolchain ./rust-toolchain
|
||||
RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain `cat rust-toolchain`
|
||||
RUN curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "stable"
|
||||
ENV PATH=/root/.cargo/bin:$PATH
|
||||
RUN rustup update
|
||||
COPY . /code
|
||||
RUN echo "##vso[task.prependpath]/root/.cargo/bin" && \
|
||||
rustc -Vv && \
|
||||
|
@ -2,7 +2,7 @@ version: '3'
|
||||
|
||||
services:
|
||||
nushell:
|
||||
image: ${REGISTRY}/nu:${TAG}
|
||||
image: ${DOCKER_REGISTRY}/nu:${DOCKER_TAG}
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: docker/Package${PATCH}.Dockerfile
|
||||
|
53
docs/commands/append.md
Normal file
53
docs/commands/append.md
Normal file
@ -0,0 +1,53 @@
|
||||
# append
|
||||
This command allows you to append the given row to the table.
|
||||
|
||||
**Note**:
|
||||
- `append` does not change a file itself. If you want to save your changes, you need to run the `save` command
|
||||
- if you want to add something containing a whitespace character, you need to put it in quotation marks
|
||||
|
||||
## Examples
|
||||
|
||||
Let's add more cities to this table:
|
||||
|
||||
```shell
|
||||
> open cities.txt | lines
|
||||
━━━┯━━━━━━━━━━━━
|
||||
# │ <value>
|
||||
───┼────────────
|
||||
0 │ Canberra
|
||||
1 │ London
|
||||
2 │ Nairobi
|
||||
3 │ Washington
|
||||
━━━┷━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
You can add a new row by using `append`:
|
||||
|
||||
```shell
|
||||
> open cities.txt | lines | append Beijing
|
||||
━━━┯━━━━━━━━━━━━
|
||||
# │ <value>
|
||||
───┼────────────
|
||||
0 │ Canberra
|
||||
1 │ London
|
||||
2 │ Nairobi
|
||||
3 │ Washington
|
||||
4 │ Beijing
|
||||
━━━┷━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
It's not possible to add multiple rows at once, so you'll need to call `append` multiple times:
|
||||
|
||||
```shell
|
||||
> open cities.txt | lines | append Beijing | append "Buenos Aires"
|
||||
━━━┯━━━━━━━━━━━━━━
|
||||
# │ <value>
|
||||
───┼──────────────
|
||||
0 │ Canberra
|
||||
1 │ London
|
||||
2 │ Nairobi
|
||||
3 │ Washington
|
||||
4 │ Beijing
|
||||
5 │ Buenos Aires
|
||||
━━━┷━━━━━━━━━━━━━━
|
||||
```
|
45
docs/commands/average.md
Normal file
45
docs/commands/average.md
Normal file
@ -0,0 +1,45 @@
|
||||
# average
|
||||
This command allows you to calculate the average of values in a column.
|
||||
|
||||
## Examples
|
||||
To get the average of the file sizes in a directory, simply pipe the size column from the ls command to the average command.
|
||||
|
||||
```shell
|
||||
> ls | get size | average
|
||||
━━━━━━━━━
|
||||
<value>
|
||||
━━━━━━━━━
|
||||
2282.727272727273
|
||||
━━━━━━━━━
|
||||
```
|
||||
|
||||
```shell
|
||||
> pwd | split-row / | size | get chars | average
|
||||
━━━━━━━━━
|
||||
<value>
|
||||
━━━━━━━━━
|
||||
5.250000000000000
|
||||
━━━━━━━━━
|
||||
```
|
||||
|
||||
Note that average only works for integer and byte values. If the shell doesn't recognize the values in a column as one of those types, it will return an error.
|
||||
One way to solve this is to convert each row to an integer when possible and then pipe the result to `average`
|
||||
|
||||
```shell
|
||||
> open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | average
|
||||
error: Unrecognized type in stream: Primitive(String("2509000000"))
|
||||
- shell:1:0
|
||||
1 | open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | average
|
||||
| ^^^^ source
|
||||
```
|
||||
|
||||
```shell
|
||||
> open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | str --to-int | average
|
||||
━━━━━━━━━━━━━━━━━━━
|
||||
<value>
|
||||
───────────────────
|
||||
3239404444.000000
|
||||
━━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
|
48
docs/commands/count.md
Normal file
48
docs/commands/count.md
Normal file
@ -0,0 +1,48 @@
|
||||
# count
|
||||
|
||||
This command counts the number of rows in a table.
|
||||
|
||||
## Examples -
|
||||
|
||||
```shell
|
||||
> ls
|
||||
━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━
|
||||
# │ name │ type │ readonly │ size │ created │ accessed │ modified
|
||||
────┼──────────────────────────────┼───────────┼──────────┼─────────┼──────────────┼──────────────┼──────────────
|
||||
0 │ Desktop │ Directory │ │ 4.1 KB │ 2 months ago │ 2 months ago │ 2 months ago
|
||||
1 │ aur │ Directory │ │ 4.1 KB │ 4 hours ago │ 4 hours ago │ 4 hours ago
|
||||
...
|
||||
75 │ .emulator_console_auth_token │ File │ │ 16 B │ 2 months ago │ 2 months ago │ 2 months ago
|
||||
76 │ bin │ Directory │ │ 4.1 KB │ 2 months ago │ 2 months ago │ 2 months ago
|
||||
━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━
|
||||
> ls | count
|
||||
━━━━━━━━━
|
||||
<value>
|
||||
─────────
|
||||
77
|
||||
━━━━━━━━━
|
||||
> ls | get name | count
|
||||
━━━━━━━━━
|
||||
<value>
|
||||
─────────
|
||||
77
|
||||
━━━━━━━━━
|
||||
> ls | where type == File | count
|
||||
━━━━━━━━━
|
||||
<value>
|
||||
─────────
|
||||
29
|
||||
━━━━━━━━━
|
||||
> ls | where type == Directory | count
|
||||
━━━━━━━━━
|
||||
<value>
|
||||
─────────
|
||||
48
|
||||
━━━━━━━━━
|
||||
> ls | where size > 2KB | count
|
||||
━━━━━━━━━
|
||||
<value>
|
||||
─────────
|
||||
57
|
||||
━━━━━━━━━
|
||||
```
|
@ -9,22 +9,22 @@ Exits the nu shell. If you have multiple nu shells, use `exit --now` to exit all
|
||||
```
|
||||
|
||||
```
|
||||
/home/username/stuff/books> shells
|
||||
---+---+------------+----------------------------
|
||||
# | | name | path
|
||||
---+---+------------+----------------------------
|
||||
0 | | filesystem | /home/username/stuff/notes
|
||||
1 | | filesystem | /home/username/stuff/videos
|
||||
2 | X | filesystem | /home/username/stuff/books
|
||||
---+---+------------+----------------------------
|
||||
/home/username/stuff/books> exit
|
||||
/home/username/stuff/videos> shells
|
||||
---+---+------------+----------------------------
|
||||
# | | name | path
|
||||
---+---+------------+----------------------------
|
||||
0 | | filesystem | /home/username/stuff/notes
|
||||
1 | X | filesystem | /home/username/stuff/videos
|
||||
---+---+------------+----------------------------
|
||||
/home/username/stuff/videos> exit --now
|
||||
> shells
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼─────────────────────────────────────
|
||||
0 │ │ filesystem │ /home/jonathanturner/Source/nushell
|
||||
1 │ │ filesystem │ /home
|
||||
2 │ X │ filesystem │ /usr
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
> exit
|
||||
> shells
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼─────────────────────────────────────
|
||||
0 │ │ filesystem │ /home/jonathanturner/Source/nushell
|
||||
1 │ X │ filesystem │ /home
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
> exit --now
|
||||
exits both the shells
|
||||
```
|
||||
|
116
docs/commands/from-csv.md
Normal file
116
docs/commands/from-csv.md
Normal file
@ -0,0 +1,116 @@
|
||||
# from-csv
|
||||
|
||||
Converts csv data into table. Use this when nushell cannot dertermine the input file extension.
|
||||
|
||||
## Example
|
||||
|
||||
Let's say we have the following file :
|
||||
|
||||
```shell
|
||||
> cat pets.txt
|
||||
animal, name, age
|
||||
cat, Tom, 7
|
||||
dog, Alfred, 10
|
||||
chameleon, Linda, 1
|
||||
```
|
||||
|
||||
`pets.txt` is actually a .csv file but it has the .txt extension, `open` is not able to convert it into a table :
|
||||
|
||||
```shell
|
||||
> open pets.txt
|
||||
animal, name, age
|
||||
cat, Tom, 7
|
||||
dog, Alfred, 10
|
||||
chameleon, Linda, 1
|
||||
```
|
||||
|
||||
To get a table from `pets.txt` we need to use the `from-csv` command :
|
||||
|
||||
```shell
|
||||
> open pets.txt | from-csv
|
||||
━━━┯━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━
|
||||
# │ animal │ name │ age
|
||||
───┼───────────┼─────────┼──────
|
||||
0 │ cat │ Tom │ 7
|
||||
1 │ dog │ Alfred │ 10
|
||||
2 │ chameleon │ Linda │ 1
|
||||
━━━┷━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━
|
||||
```
|
||||
|
||||
To ignore the csv headers use `--headerless` :
|
||||
|
||||
```shell
|
||||
━━━┯━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━
|
||||
# │ Column1 │ Column2 │ Column3
|
||||
───┼───────────┼─────────┼─────────
|
||||
0 │ dog │ Alfred │ 10
|
||||
1 │ chameleon │ Linda │ 1
|
||||
━━━┷━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━
|
||||
```
|
||||
|
||||
To split on a character other than ',' use `--separator` :
|
||||
|
||||
```shell
|
||||
> open pets.txt
|
||||
animal; name; age
|
||||
cat; Tom; 7
|
||||
dog; Alfred; 10
|
||||
chameleon; Linda; 1
|
||||
```
|
||||
|
||||
```shell
|
||||
> open pets.txt | from-csv --separator ';'
|
||||
━━━┯━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━
|
||||
# │ animal │ name │ age
|
||||
───┼───────────┼─────────┼──────
|
||||
0 │ cat │ Tom │ 7
|
||||
1 │ dog │ Alfred │ 10
|
||||
2 │ chameleon │ Linda │ 1
|
||||
━━━┷━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━
|
||||
```
|
||||
|
||||
To use this command to open a csv with separators other than a comma, use the `--raw` switch of `open` to open the csv, othewise the csv will enter `from-csv` as a table split on commas rather than raw text.
|
||||
|
||||
```shell
|
||||
> mv pets.txt pets.csv
|
||||
> open pets.csv | from-csv --separator ';'
|
||||
error: Expected a string from pipeline
|
||||
- shell:1:16
|
||||
1 | open pets.csv | from-csv --separator ';'
|
||||
| ^^^^^^^^ requires string input
|
||||
- shell:1:0
|
||||
1 | open pets.csv | from-csv --separator ';'
|
||||
| value originates from here
|
||||
|
||||
> open pets.csv --raw | from-csv --separator ';'
|
||||
━━━┯━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━
|
||||
# │ animal │ name │ age
|
||||
───┼───────────┼─────────┼──────
|
||||
0 │ cat │ Tom │ 7
|
||||
1 │ dog │ Alfred │ 10
|
||||
2 │ chameleon │ Linda │ 1
|
||||
━━━┷━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━
|
||||
```
|
||||
|
||||
The string '\t' can be used to separate on tabs. Note that this is the same as using the from-tsv command.
|
||||
|
||||
Newlines '\n' are not acceptable separators.
|
||||
|
||||
Note that separators are currently provided as strings and need to be wrapped in quotes.
|
||||
|
||||
```shell
|
||||
> open pets.csv --raw | from-csv --separator ;
|
||||
- shell:1:43
|
||||
1 | open pets.csv --raw | from-csv --separator ;
|
||||
| ^
|
||||
```
|
||||
|
||||
It is also considered an error to use a separator greater than one char :
|
||||
|
||||
```shell
|
||||
> open pets.txt | from-csv --separator '123'
|
||||
error: Expected a single separator char from --separator
|
||||
- shell:1:37
|
||||
1 | open pets.txt | from-csv --separator '123'
|
||||
| ^^^^^ requires a single character string input
|
||||
```
|
33
docs/commands/from-json.md
Normal file
33
docs/commands/from-json.md
Normal file
@ -0,0 +1,33 @@
|
||||
# from-json
|
||||
|
||||
Parse text as `.json` and create table. Use this when nushell cannot dertermine the input file extension.
|
||||
|
||||
Syntax: `from-json {flags}`
|
||||
|
||||
### Flags:
|
||||
|
||||
--objects
|
||||
treat each line as a separate value
|
||||
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
> open command_from-json
|
||||
[
|
||||
{
|
||||
title: "from-json",
|
||||
type: "command",
|
||||
flags: true
|
||||
}
|
||||
]
|
||||
```
|
||||
|
||||
```shell
|
||||
> open command_from-json | from-json
|
||||
━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━
|
||||
title │ type │ flags
|
||||
───────────┼─────────┼───────
|
||||
from-json │ command │ Yes
|
||||
━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━
|
||||
```
|
23
docs/commands/from-toml.md
Normal file
23
docs/commands/from-toml.md
Normal file
@ -0,0 +1,23 @@
|
||||
# from-toml
|
||||
Converts toml data into table. Use this when nushell cannot dertermine the input file extension.
|
||||
|
||||
## Example
|
||||
Let's say we have the following Rust .lock file :
|
||||
```shell
|
||||
> open Cargo.lock
|
||||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing. [[package]] name = "adler32" version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
...
|
||||
```
|
||||
|
||||
The "Cargo.lock" file is actually a .toml file, but the file extension isn't .toml. That's okay, we can use the `from-toml` command :
|
||||
|
||||
|
||||
```shell
|
||||
> open Cargo.lock | from-toml
|
||||
━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━
|
||||
metadata │ package
|
||||
────────────────┼───────────────────
|
||||
[table: 1 row] │ [table: 154 rows]
|
||||
━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━
|
||||
```
|
24
docs/commands/from-yaml.md
Normal file
24
docs/commands/from-yaml.md
Normal file
@ -0,0 +1,24 @@
|
||||
# from-yaml
|
||||
|
||||
Parse text as `.yaml/.yml` and create table.
|
||||
|
||||
Syntax: `from-yaml`
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
> open command_from-yaml
|
||||
title: from-yaml
|
||||
type: command
|
||||
flags: false
|
||||
```
|
||||
|
||||
```shell
|
||||
> open command_from-yaml | from-yaml
|
||||
━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━
|
||||
title │ type │ flags
|
||||
───────────┼─────────┼───────
|
||||
from-yaml │ command │ No
|
||||
━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━
|
||||
|
||||
```
|
72
docs/commands/group-by.md
Normal file
72
docs/commands/group-by.md
Normal file
@ -0,0 +1,72 @@
|
||||
# group-by
|
||||
|
||||
This command creates a new table with the data from the table rows grouped by the column given.
|
||||
|
||||
## Examples
|
||||
|
||||
Let's say we have this table of all countries in the world sorted by their population:
|
||||
|
||||
```shell
|
||||
> open countries_by_population.json | from-json | first 10
|
||||
━━━┯━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━
|
||||
# │ rank │ country or area │ UN continental region │ UN statistical region │ population 2018 │ population 2019 │ change
|
||||
───┼──────┼─────────────────┼───────────────────────┼───────────────────────┼─────────────────┼─────────────────┼────────
|
||||
0 │ 1 │ China │ Asia │ Eastern Asia │ 1,427,647,786 │ 1,433,783,686 │ +0.4%
|
||||
1 │ 2 │ India │ Asia │ Southern Asia │ 1,352,642,280 │ 1,366,417,754 │ +1.0%
|
||||
2 │ 3 │ United States │ Americas │ Northern America │ 327,096,265 │ 329,064,917 │ +0.6%
|
||||
3 │ 4 │ Indonesia │ Asia │ South-eastern Asia │ 267,670,543 │ 270,625,568 │ +1.1%
|
||||
4 │ 5 │ Pakistan │ Asia │ Southern Asia │ 212,228,286 │ 216,565,318 │ +2.0%
|
||||
5 │ 6 │ Brazil │ Americas │ South America │ 209,469,323 │ 211,049,527 │ +0.8%
|
||||
6 │ 7 │ Nigeria │ Africa │ Western Africa │ 195,874,683 │ 200,963,599 │ +2.6%
|
||||
7 │ 8 │ Bangladesh │ Asia │ Southern Asia │ 161,376,708 │ 163,046,161 │ +1.0%
|
||||
8 │ 9 │ Russia │ Europe │ Eastern Europe │ 145,734,038 │ 145,872,256 │ +0.1%
|
||||
9 │ 10 │ Mexico │ Americas │ Central America │ 126,190,788 │ 127,575,529 │ +1.1%
|
||||
━━━┷━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━
|
||||
```
|
||||
|
||||
Here we have listed only the first 10 lines. In total this table has got 233 rows which is to big to get information easily out of it.
|
||||
|
||||
We can use the `group-by` command on 'UN statistical region' to create a table per continental region.
|
||||
|
||||
```shell
|
||||
> open countries_by_population.json | from-json | group-by "UN continental region"
|
||||
━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━
|
||||
Asia │ Americas │ Africa │ Europe │ Oceania
|
||||
──────────────────┼──────────────────┼──────────────────┼──────────────────┼──────────────────
|
||||
[table: 51 rows] │ [table: 53 rows] │ [table: 58 rows] │ [table: 48 rows] │ [table: 23 rows]
|
||||
━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
Now we can already get some informations like "which continental regions are there" and "how many countries are in each region".
|
||||
If we want to see only the countries in the continental region of Oceania we can type:
|
||||
|
||||
```shell
|
||||
> open countries_by_population.json | from-json | group-by "UN continental region" | get Oceania
|
||||
━━━━┯━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━
|
||||
# │ rank │ country or area │ UN continental region │ UN statistical region │ population 2018 │ population 2019 │ change
|
||||
────┼──────┼────────────────────────────────┼───────────────────────┼───────────────────────────┼─────────────────┼─────────────────┼────────
|
||||
0 │ 55 │ Australia │ Oceania │ Australia and New Zealand │ 24,898,152 │ 25,203,198 │ +1.2%
|
||||
1 │ 98 │ Papua New Guinea │ Oceania │ Melanesia │ 8,606,323 │ 8,776,109 │ +2.0%
|
||||
2 │ 125 │ New Zealand │ Oceania │ Australia and New Zealand │ 4,743,131 │ 4,783,063 │ +0.8%
|
||||
3 │ 161 │ Fiji │ Oceania │ Melanesia │ 883,483 │ 889,953 │ +0.7%
|
||||
4 │ 166 │ Solomon Islands │ Oceania │ Melanesia │ 652,857 │ 669,823 │ +2.6%
|
||||
5 │ 181 │ Vanuatu │ Oceania │ Melanesia │ 292,680 │ 299,882 │ +2.5%
|
||||
6 │ 183 │ New Caledonia │ Oceania │ Melanesia │ 279,993 │ 282,750 │ +1.0%
|
||||
7 │ 185 │ French Polynesia │ Oceania │ Polynesia │ 277,679 │ 279,287 │ +0.6%
|
||||
8 │ 188 │ Samoa │ Oceania │ Polynesia │ 196,129 │ 197,097 │ +0.5%
|
||||
9 │ 191 │ Guam │ Oceania │ Micronesia │ 165,768 │ 167,294 │ +0.9%
|
||||
10 │ 193 │ Kiribati │ Oceania │ Micronesia │ 115,847 │ 117,606 │ +1.5%
|
||||
11 │ 194 │ Federated States of Micronesia │ Oceania │ Micronesia │ 112,640 │ 113,815 │ +1.0%
|
||||
12 │ 196 │ Tonga │ Oceania │ Polynesia │ 110,589 │ 110,940 │ +0.3%
|
||||
13 │ 207 │ Marshall Islands │ Oceania │ Micronesia │ 58,413 │ 58,791 │ +0.6%
|
||||
14 │ 209 │ Northern Mariana Islands │ Oceania │ Micronesia │ 56,882 │ 56,188 │ −1.2%
|
||||
15 │ 210 │ American Samoa │ Oceania │ Polynesia │ 55,465 │ 55,312 │ −0.3%
|
||||
16 │ 221 │ Palau │ Oceania │ Micronesia │ 17,907 │ 18,008 │ +0.6%
|
||||
17 │ 222 │ Cook Islands │ Oceania │ Polynesia │ 17,518 │ 17,548 │ +0.2%
|
||||
18 │ 224 │ Tuvalu │ Oceania │ Polynesia │ 11,508 │ 11,646 │ +1.2%
|
||||
19 │ 225 │ Wallis and Futuna │ Oceania │ Polynesia │ 11,661 │ 11,432 │ −2.0%
|
||||
20 │ 226 │ Nauru │ Oceania │ Micronesia │ 10,670 │ 10,756 │ +0.8%
|
||||
21 │ 231 │ Niue │ Oceania │ Polynesia │ 1,620 │ 1,615 │ −0.3%
|
||||
22 │ 232 │ Tokelau │ Oceania │ Polynesia │ 1,319 │ 1,340 │ +1.6%
|
||||
━━━━┷━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━
|
||||
```
|
80
docs/commands/histogram.md
Normal file
80
docs/commands/histogram.md
Normal file
@ -0,0 +1,80 @@
|
||||
# histogram
|
||||
|
||||
Creates a new table with a histogram based on the column name passed in.
|
||||
|
||||
Syntax: `histogram <column_name> ...args`
|
||||
|
||||
### Parameters
|
||||
|
||||
* `<column-name>`: name of the column to graph by
|
||||
* `args`: column name to give the histogram's frequency column
|
||||
|
||||
## Examples
|
||||
|
||||
Let's say we have this file `random_numers.csv` which contains 50 random numbers.
|
||||
|
||||
**Note**: The input doesn't have to be numbers it works on strings too. Try it out.
|
||||
|
||||
```shell
|
||||
> open random_numbers.csv
|
||||
open random_numbers2.csv
|
||||
━━━━┯━━━━━━━━━━━━━━━━
|
||||
# │ random numbers
|
||||
────┼────────────────
|
||||
0 │ 0
|
||||
1 │ 5
|
||||
2 │ 5
|
||||
...
|
||||
47 │ 0
|
||||
48 │ 2
|
||||
49 │ 4
|
||||
━━━━┷━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
If we now want to see how often the different numbers were generated, we can use the `histogram` function:
|
||||
|
||||
```shell
|
||||
> open random_numbers2.csv | histogram "random numbers"
|
||||
━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ random numbers │ frequency
|
||||
───┼────────────────┼──────────────────────────────────────────────────────────────────────────────────────────────────────
|
||||
0 │ 0 │ ****************************************************************************************************
|
||||
1 │ 1 │ ******************************
|
||||
2 │ 2 │ *************************************************************
|
||||
3 │ 3 │ *********************************************************************
|
||||
4 │ 4 │ *****************************************************
|
||||
5 │ 5 │ *********************************************************************
|
||||
━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
We can also set the name of the second column or sort the table:
|
||||
|
||||
```shell
|
||||
> open random_numbers2.csv | histogram "random numbers" probability
|
||||
━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ random numbers │ probability
|
||||
───┼────────────────┼──────────────────────────────────────────────────────────────────────────────────────────────────────
|
||||
0 │ 0 │ ****************************************************************************************************
|
||||
1 │ 1 │ ******************************
|
||||
2 │ 2 │ *************************************************************
|
||||
3 │ 3 │ *********************************************************************
|
||||
4 │ 4 │ *****************************************************
|
||||
5 │ 5 │ *********************************************************************
|
||||
━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
|
||||
```
|
||||
|
||||
```shell
|
||||
> open random_numbers2.csv | histogram "random numbers" probability | sort-by probability
|
||||
━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ random numbers │ probability
|
||||
───┼────────────────┼──────────────────────────────────────────────────────────────────────────────────────────────────────
|
||||
0 │ 1 │ ******************************
|
||||
1 │ 4 │ *****************************************************
|
||||
2 │ 2 │ *************************************************************
|
||||
3 │ 3 │ *********************************************************************
|
||||
4 │ 5 │ *********************************************************************
|
||||
5 │ 0 │ ****************************************************************************************************
|
||||
━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
|
||||
```
|
17
docs/commands/history.md
Normal file
17
docs/commands/history.md
Normal file
@ -0,0 +1,17 @@
|
||||
# history
|
||||
|
||||
Displays the last 100 commands.
|
||||
|
||||
## Example
|
||||
|
||||
```shell
|
||||
> history
|
||||
━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ <value>
|
||||
────┼───────────────────────────────────────────────────────────────────────────
|
||||
...
|
||||
97 │ ls
|
||||
98 │ ls | where accessed < 1d
|
||||
99 │ cd
|
||||
━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
```
|
@ -6,17 +6,17 @@ This command increments the value of variable by one.
|
||||
|
||||
```shell
|
||||
> open rustfmt.toml
|
||||
---------
|
||||
━━━━━━━━━
|
||||
edition
|
||||
---------
|
||||
─────────
|
||||
2018
|
||||
---------
|
||||
━━━━━━━━━
|
||||
> open rustfmt.toml | inc edition
|
||||
---------
|
||||
━━━━━━━━━
|
||||
edition
|
||||
---------
|
||||
─────────
|
||||
2019
|
||||
---------
|
||||
━━━━━━━━━
|
||||
```
|
||||
|
||||
```shell
|
||||
|
@ -3,10 +3,13 @@
|
||||
This command returns the nth row of a table, starting from 0.
|
||||
If the number given is less than 0 or more than the number of rows, nothing is returned.
|
||||
|
||||
## Usage
|
||||
### Usage
|
||||
```shell
|
||||
> [input-command] | nth [row-number]
|
||||
> [input-command] | nth <row number> ...args
|
||||
```
|
||||
### Parameters:
|
||||
* `<row number>` the number of the row to return
|
||||
* `args`: Optionally return more rows
|
||||
|
||||
## Examples
|
||||
```shell
|
||||
@ -21,11 +24,19 @@ If the number given is less than 0 or more than the number of rows, nothing is r
|
||||
━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━
|
||||
|
||||
> ls | nth 0
|
||||
━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━
|
||||
name │ type │ readonly │ size │ accessed │ modified
|
||||
────────────┼──────┼──────────┼────────┼───────────────┼───────────────
|
||||
Cargo.toml │ File │ │ 239 B │ 2 minutes ago │ 2 minutes ago
|
||||
━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━
|
||||
━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||
───┼────────────┼───────────┼──────────┼────────┼───────────────┼───────────────
|
||||
0 │ Cargo.toml │ File │ │ 239 B │ 2 minutes ago │ 2 minutes ago
|
||||
━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━
|
||||
|
||||
> ls | nth 0 2
|
||||
━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||
───┼────────────┼───────────┼──────────┼────────┼───────────────┼───────────────
|
||||
0 │ Cargo.toml │ File │ │ 239 B │ 2 minutes ago │ 2 minutes ago
|
||||
2 │ .gitignore │ File │ │ 19 B │ 2 minutes ago │ 2 minutes ago
|
||||
━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━
|
||||
|
||||
> ls | nth 5
|
||||
```
|
53
docs/commands/pick.md
Normal file
53
docs/commands/pick.md
Normal file
@ -0,0 +1,53 @@
|
||||
# pick
|
||||
|
||||
This command displays only the column names passed on to it.
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
> ls
|
||||
━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━
|
||||
# │ name │ type │ readonly │ size │ created │ accessed │ modified
|
||||
───┼────────────────────────────┼──────┼──────────┼────────┼─────────────┼─────────────┼─────────────
|
||||
0 │ zeusiscrazy.txt │ File │ │ 556 B │ a month ago │ a month ago │ a month ago
|
||||
1 │ coww.txt │ File │ │ 24 B │ a month ago │ a month ago │ a month ago
|
||||
2 │ randomweirdstuff.txt │ File │ │ 197 B │ a month ago │ a month ago │ a month ago
|
||||
3 │ abaracadabra.txt │ File │ │ 401 B │ a month ago │ a month ago │ a month ago
|
||||
4 │ youshouldeatmorecereal.txt │ File │ │ 768 B │ a month ago │ a month ago │ a month ago
|
||||
━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━
|
||||
> ls | pick name
|
||||
━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ name
|
||||
───┼────────────────────────────
|
||||
0 │ zeusiscrazy.txt
|
||||
1 │ coww.txt
|
||||
2 │ randomweirdstuff.txt
|
||||
3 │ abaracadabra.txt
|
||||
4 │ youshouldeatmorecereal.txt
|
||||
━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
The order in which you put the column names matters:
|
||||
|
||||
```shell
|
||||
> ls | pick type name size
|
||||
━━━┯━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━
|
||||
# │ type │ name │ size
|
||||
───┼──────┼────────────────────────────┼────────
|
||||
0 │ File │ zeusiscrazy.txt │ 556 B
|
||||
1 │ File │ coww.txt │ 24 B
|
||||
2 │ File │ randomweirdstuff.txt │ 197 B
|
||||
3 │ File │ abaracadabra.txt │ 401 B
|
||||
4 │ File │ youshouldeatmorecereal.txt │ 768 B
|
||||
━━━┷━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━
|
||||
> ls | pick size type name
|
||||
━━━┯━━━━━━━━┯━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ size │ type │ name
|
||||
───┼────────┼──────┼────────────────────────────
|
||||
0 │ 556 B │ File │ zeusiscrazy.txt
|
||||
1 │ 24 B │ File │ coww.txt
|
||||
2 │ 197 B │ File │ randomweirdstuff.txt
|
||||
3 │ 401 B │ File │ abaracadabra.txt
|
||||
4 │ 768 B │ File │ youshouldeatmorecereal.txt
|
||||
━━━┷━━━━━━━━┷━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
```
|
75
docs/commands/pivot.md
Normal file
75
docs/commands/pivot.md
Normal file
@ -0,0 +1,75 @@
|
||||
# pivot
|
||||
|
||||
Pivots the table contents so rows become columns and columns become rows.
|
||||
|
||||
## Examples
|
||||
|
||||
```sh
|
||||
> ls docs
|
||||
━━━┯━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||
───┼────────────────────┼───────────┼──────────┼────────┼─────────────┼─────────────
|
||||
0 │ docs/commands │ Directory │ │ 4.1 KB │ an hour ago │ an hour ago
|
||||
1 │ docs/docker.md │ File │ │ 7.0 KB │ an hour ago │ a day ago
|
||||
2 │ docs/philosophy.md │ File │ │ 896 B │ an hour ago │ a day ago
|
||||
━━━┷━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━
|
||||
|
||||
> ls docs | pivot
|
||||
━━━┯━━━━━━━━━━┯━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━
|
||||
# │ Column0 │ Column1 │ Column2 │ Column3
|
||||
───┼──────────┼───────────────┼────────────────┼────────────────────
|
||||
0 │ name │ docs/commands │ docs/docker.md │ docs/philosophy.md
|
||||
1 │ type │ Directory │ File │ File
|
||||
2 │ readonly │ │ │
|
||||
3 │ size │ 4.1 KB │ 7.0 KB │ 896 B
|
||||
4 │ accessed │ an hour ago │ an hour ago │ an hour ago
|
||||
5 │ modified │ an hour ago │ a day ago │ a day ago
|
||||
━━━┷━━━━━━━━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
Use `--header-row` to treat the first row as column names:
|
||||
|
||||
```shell
|
||||
> ls docs | pivot --header-row
|
||||
━━━┯━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━
|
||||
# │ docs/commands │ docs/docker.md │ docs/philosophy.md
|
||||
───┼───────────────┼────────────────┼────────────────────
|
||||
0 │ Directory │ File │ File
|
||||
1 │ │ │
|
||||
2 │ 4.1 KB │ 7.0 KB │ 896 B
|
||||
3 │ an hour ago │ an hour ago │ an hour ago
|
||||
4 │ an hour ago │ a day ago │ a day ago
|
||||
━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
Use `--ignore-titles` to prevent pivoting the column names into values:
|
||||
|
||||
```shell
|
||||
> ls docs | pivot --ignore-titles
|
||||
━━━┯━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━
|
||||
# │ Column0 │ Column1 │ Column2
|
||||
───┼───────────────┼────────────────┼────────────────────
|
||||
0 │ docs/commands │ docs/docker.md │ docs/philosophy.md
|
||||
1 │ Directory │ File │ File
|
||||
2 │ │ │
|
||||
3 │ 4.1 KB │ 7.0 KB │ 896 B
|
||||
4 │ an hour ago │ an hour ago │ an hour ago
|
||||
5 │ an hour ago │ a day ago │ a day ago
|
||||
━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
Additional arguments are used as column names:
|
||||
|
||||
```shell
|
||||
> ls docs | pivot foo bar baz
|
||||
━━━┯━━━━━━━━━━┯━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━
|
||||
# │ foo │ bar │ baz │ Column3
|
||||
───┼──────────┼───────────────┼────────────────┼────────────────────
|
||||
0 │ name │ docs/commands │ docs/docker.md │ docs/philosophy.md
|
||||
1 │ type │ Directory │ File │ File
|
||||
2 │ readonly │ │ │
|
||||
3 │ size │ 4.1 KB │ 7.0 KB │ 896 B
|
||||
4 │ accessed │ 2 hours ago │ 2 hours ago │ 2 hours ago
|
||||
5 │ modified │ 2 hours ago │ a day ago │ a day ago
|
||||
━━━┷━━━━━━━━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━
|
||||
```
|
56
docs/commands/prepend.md
Normal file
56
docs/commands/prepend.md
Normal file
@ -0,0 +1,56 @@
|
||||
# prepend
|
||||
This command prepends the given row to the front of the table
|
||||
|
||||
**Note**:
|
||||
- `prepend` does not change a file itself. If you want to save your changes, you need to run the `save` command
|
||||
- if you want to add something containing a whitespace character, you need to put it in quotation marks
|
||||
|
||||
## Examples
|
||||
|
||||
Let's complete this table with the missing continents:
|
||||
|
||||
```shell
|
||||
> open continents.txt | lines
|
||||
━━━┯━━━━━━━━━━━━━━━
|
||||
# │ <value>
|
||||
───┼───────────────
|
||||
0 │ Africa
|
||||
1 │ South America
|
||||
2 │ Australia
|
||||
3 │ Europe
|
||||
4 │ Antarctica
|
||||
━━━┷━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
You can add a new row at the top by using `prepend`:
|
||||
|
||||
```shell
|
||||
> open continents.txt | lines | prepend Asia
|
||||
━━━┯━━━━━━━━━━━━━━━
|
||||
# │ <value>
|
||||
───┼───────────────
|
||||
0 │ Asia
|
||||
1 │ Africa
|
||||
2 │ South America
|
||||
3 │ Australia
|
||||
4 │ Europe
|
||||
5 │ Antarctica
|
||||
━━━┷━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
It's not possible to add multiple rows at once, so you'll need to call `prepend` multiple times:
|
||||
|
||||
```shell
|
||||
> open continents.txt | lines | prepend Asia | prepend "North America"
|
||||
━━━┯━━━━━━━━━━━━━━━
|
||||
# │ <value>
|
||||
───┼───────────────
|
||||
0 │ North America
|
||||
1 │ Asia
|
||||
2 │ Africa
|
||||
3 │ South America
|
||||
4 │ Australia
|
||||
5 │ Europe
|
||||
6 │ Antarctica
|
||||
━━━┷━━━━━━━━━━━━━━━
|
||||
```
|
38
docs/commands/reject.md
Normal file
38
docs/commands/reject.md
Normal file
@ -0,0 +1,38 @@
|
||||
# reject
|
||||
|
||||
This column removes or rejects the columns passed to it.
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
> ls
|
||||
━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━
|
||||
# │ name │ type │ readonly │ size │ created │ accessed │ modified
|
||||
───┼────────────────────────────┼──────┼──────────┼────────┼─────────────┼─────────────┼─────────────
|
||||
0 │ zeusiscrazy.txt │ File │ │ 556 B │ a month ago │ a month ago │ a month ago
|
||||
1 │ coww.txt │ File │ │ 24 B │ a month ago │ a month ago │ a month ago
|
||||
2 │ randomweirdstuff.txt │ File │ │ 197 B │ a month ago │ a month ago │ a month ago
|
||||
3 │ abaracadabra.txt │ File │ │ 401 B │ a month ago │ a month ago │ a month ago
|
||||
4 │ youshouldeatmorecereal.txt │ File │ │ 768 B │ a month ago │ a month ago │ a month ago
|
||||
━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━
|
||||
> ls | reject readonly
|
||||
━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━
|
||||
# │ name │ type │ size │ created │ accessed │ modified
|
||||
───┼────────────────────────────┼──────┼────────┼─────────────┼─────────────┼─────────────
|
||||
0 │ zeusiscrazy.txt │ File │ 556 B │ a month ago │ a month ago │ a month ago
|
||||
1 │ coww.txt │ File │ 24 B │ a month ago │ a month ago │ a month ago
|
||||
2 │ randomweirdstuff.txt │ File │ 197 B │ a month ago │ a month ago │ a month ago
|
||||
3 │ abaracadabra.txt │ File │ 401 B │ a month ago │ a month ago │ a month ago
|
||||
4 │ youshouldeatmorecereal.txt │ File │ 768 B │ a month ago │ a month ago │ a month ago
|
||||
━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━
|
||||
> ls | reject readonly accessed
|
||||
━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━
|
||||
# │ name │ type │ size │ created │ modified
|
||||
───┼────────────────────────────┼──────┼────────┼─────────────┼─────────────
|
||||
0 │ zeusiscrazy.txt │ File │ 556 B │ a month ago │ a month ago
|
||||
1 │ coww.txt │ File │ 24 B │ a month ago │ a month ago
|
||||
2 │ randomweirdstuff.txt │ File │ 197 B │ a month ago │ a month ago
|
||||
3 │ abaracadabra.txt │ File │ 401 B │ a month ago │ a month ago
|
||||
4 │ youshouldeatmorecereal.txt │ File │ 768 B │ a month ago │ a month ago
|
||||
━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━
|
||||
```
|
@ -6,21 +6,21 @@ Lists all the active nu shells with a number/index, a name and the path. Also ma
|
||||
|
||||
```
|
||||
> shells
|
||||
---+---+------------+---------------
|
||||
# | | name | path
|
||||
---+---+------------+---------------
|
||||
0 | | filesystem | /usr
|
||||
1 | | filesystem | /home
|
||||
2 | X | filesystem | /home/username
|
||||
---+---+------------+---------------
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼─────────────────────────────────────
|
||||
0 │ │ filesystem │ /home/jonathanturner/Source/nushell
|
||||
1 │ │ filesystem │ /usr
|
||||
2 │ X │ filesystem │ /home
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
```
|
||||
/> shells
|
||||
---+---+-------------------------------------------------+------------------------------------
|
||||
# | | name | path
|
||||
---+---+-------------------------------------------------+------------------------------------
|
||||
0 | | filesystem | /Users/username/Code/nushell
|
||||
1 | X | {/Users/username/Code/nushell/Cargo.toml} | /
|
||||
---+---+-------------------------------------------------+------------------------------------
|
||||
━━━┯━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼──────────────────────────────────────────────────┼─────────────────────────────────────
|
||||
0 │ │ filesystem │ /home/jonathanturner/Source/nushell
|
||||
1 │ X │ {/home/jonathanturner/Source/nushell/Cargo.toml} │ /
|
||||
━━━┷━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
20
docs/commands/size.md
Normal file
20
docs/commands/size.md
Normal file
@ -0,0 +1,20 @@
|
||||
# size
|
||||
|
||||
This commands gives word count statistics on any text.
|
||||
|
||||
## Examples -
|
||||
|
||||
```shell
|
||||
> open lalala.txt | size
|
||||
━━━━━━━┯━━━━━━━┯━━━━━━━┯━━━━━━━━━━━━
|
||||
lines │ words │ chars │ max length
|
||||
───────┼───────┼───────┼────────────
|
||||
4 │ 10 │ 72 │ 72
|
||||
━━━━━━━┷━━━━━━━┷━━━━━━━┷━━━━━━━━━━━━
|
||||
> open the_mysterious_affair_at_styles.txt | size
|
||||
━━━━━━━┯━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━
|
||||
lines │ words │ chars │ max length
|
||||
───────┼───────┼────────┼────────────
|
||||
8935 │ 62352 │ 349459 │ 361771
|
||||
━━━━━━━┷━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━
|
||||
```
|
56
docs/commands/sort-by.md
Normal file
56
docs/commands/sort-by.md
Normal file
@ -0,0 +1,56 @@
|
||||
|
||||
# sort-by
|
||||
|
||||
The `sort-by` command sorts the table being displayed in the terminal by a chosen column(s).
|
||||
|
||||
`sort-by` takes multiple arguments (being the names of columns) sorting by each argument in order.
|
||||
|
||||
|
||||
## Examples -
|
||||
|
||||
```shell
|
||||
/home/example> ls | sort-by size
|
||||
━━━┯━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||
───┼──────┼──────┼──────────┼────────┼────────────────┼────────────────
|
||||
0 │ az │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago
|
||||
1 │ a │ File │ │ 18 B │ 4 minutes ago │ 38 minutes ago
|
||||
2 │ ad │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago
|
||||
3 │ ac │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago
|
||||
4 │ ab │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago
|
||||
5 │ c │ File │ │ 102 B │ 35 minutes ago │ 35 minutes ago
|
||||
6 │ d │ File │ │ 189 B │ 35 minutes ago │ 34 minutes ago
|
||||
7 │ b │ File │ │ 349 B │ 35 minutes ago │ 35 minutes ago
|
||||
━━━┷━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
```shell
|
||||
/home/example> ls | sort-by size name
|
||||
━━━┯━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||
───┼──────┼──────┼──────────┼────────┼────────────────┼────────────────
|
||||
0 │ a │ File │ │ 18 B │ 4 minutes ago │ 39 minutes ago
|
||||
1 │ ab │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago
|
||||
2 │ ac │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago
|
||||
3 │ ad │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago
|
||||
4 │ az │ File │ │ 18 B │ 4 minutes ago │ 4 minutes ago
|
||||
5 │ c │ File │ │ 102 B │ 36 minutes ago │ 35 minutes ago
|
||||
6 │ d │ File │ │ 189 B │ 35 minutes ago │ 35 minutes ago
|
||||
7 │ b │ File │ │ 349 B │ 36 minutes ago │ 36 minutes ago
|
||||
```
|
||||
|
||||
```
|
||||
/home/example> ls | sort-by accessed
|
||||
━━━┯━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||
───┼──────┼──────┼──────────┼────────┼────────────────┼────────────────
|
||||
0 │ b │ File │ │ 349 B │ 37 minutes ago │ 37 minutes ago
|
||||
1 │ c │ File │ │ 102 B │ 37 minutes ago │ 37 minutes ago
|
||||
2 │ d │ File │ │ 189 B │ 37 minutes ago │ 36 minutes ago
|
||||
3 │ a │ File │ │ 18 B │ 6 minutes ago │ 40 minutes ago
|
||||
4 │ ab │ File │ │ 18 B │ 6 minutes ago │ 6 minutes ago
|
||||
5 │ ac │ File │ │ 18 B │ 6 minutes ago │ 6 minutes ago
|
||||
6 │ ad │ File │ │ 18 B │ 5 minutes ago │ 5 minutes ago
|
||||
7 │ az │ File │ │ 18 B │ 5 minutes ago │ 5 minutes ago
|
||||
━━━┷━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━
|
||||
```
|
72
docs/commands/split-column.md
Normal file
72
docs/commands/split-column.md
Normal file
@ -0,0 +1,72 @@
|
||||
# split-column
|
||||
|
||||
Split row contents across multiple columns via the separator.
|
||||
|
||||
Syntax: `split-column <separator> ...args{flags}`
|
||||
|
||||
### Parameters
|
||||
|
||||
* `<seperator>`: string that denotes what separates columns
|
||||
* `args`: column names to give the new columns. If not specified they will be set to `Column1` `Column2` ...
|
||||
|
||||
### Flags
|
||||
|
||||
--collapse-empty
|
||||
Removes empty columns
|
||||
|
||||
## Examples
|
||||
|
||||
If we have file structured like this:
|
||||
|
||||
```shell
|
||||
0.12643678160919541 | 0.6851851851851852 | 0.273972602739726
|
||||
0.28735632183908044 | 0.09259259259259259 | 0.6986301369863014
|
||||
0.8045977011494253 | 0.8148148148148148 | 0.7397260273972602
|
||||
0.28735632183908044 | 0.09259259259259259 | 0.547945205479452
|
||||
0.6896551724137931 | 0.7037037037037037 | 1.2465753424657535
|
||||
0.6896551724137931 | 0.8333333333333334 | 0.4657534246575342
|
||||
0.9080459770114943 | 1.3333333333333333 | 0.4931506849315068
|
||||
0.9310344827586207 | 1.1296296296296295 | 0.7123287671232876
|
||||
0.3448275862068966 | 0.018518518518518517 | 0.6575342465753424
|
||||
1.0459770114942528 | 1.0925925925925926 | 0.6164383561643836
|
||||
```
|
||||
|
||||
We can build a table from it using the `split-column` command
|
||||
|
||||
```shell
|
||||
> open coordinates.txt | lines | split-column " | "
|
||||
━━━┯━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━
|
||||
# │ Column1 │ Column2 │ Column3
|
||||
───┼─────────────────────┼──────────────────────┼────────────────────
|
||||
0 │ 0.12643678160919541 │ 0.6851851851851852 │ 0.273972602739726
|
||||
1 │ 0.28735632183908044 │ 0.09259259259259259 │ 0.6986301369863014
|
||||
2 │ 0.8045977011494253 │ 0.8148148148148148 │ 0.7397260273972602
|
||||
3 │ 0.28735632183908044 │ 0.09259259259259259 │ 0.547945205479452
|
||||
4 │ 0.6896551724137931 │ 0.7037037037037037 │ 1.2465753424657535
|
||||
5 │ 0.6896551724137931 │ 0.8333333333333334 │ 0.4657534246575342
|
||||
6 │ 0.9080459770114943 │ 1.3333333333333333 │ 0.4931506849315068
|
||||
7 │ 0.9310344827586207 │ 1.1296296296296295 │ 0.7123287671232876
|
||||
8 │ 0.3448275862068966 │ 0.018518518518518517 │ 0.6575342465753424
|
||||
9 │ 1.0459770114942528 │ 1.0925925925925926 │ 0.6164383561643836
|
||||
━━━┷━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
And give names to the columns
|
||||
|
||||
```shell
|
||||
> open coordinates.txt | lines | split-column " | " x y z
|
||||
━━━┯━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━
|
||||
# │ x │ y │ z
|
||||
───┼─────────────────────┼──────────────────────┼────────────────────
|
||||
0 │ 0.12643678160919541 │ 0.6851851851851852 │ 0.273972602739726
|
||||
1 │ 0.28735632183908044 │ 0.09259259259259259 │ 0.6986301369863014
|
||||
2 │ 0.8045977011494253 │ 0.8148148148148148 │ 0.7397260273972602
|
||||
3 │ 0.28735632183908044 │ 0.09259259259259259 │ 0.547945205479452
|
||||
4 │ 0.6896551724137931 │ 0.7037037037037037 │ 1.2465753424657535
|
||||
5 │ 0.6896551724137931 │ 0.8333333333333334 │ 0.4657534246575342
|
||||
6 │ 0.9080459770114943 │ 1.3333333333333333 │ 0.4931506849315068
|
||||
7 │ 0.9310344827586207 │ 1.1296296296296295 │ 0.7123287671232876
|
||||
8 │ 0.3448275862068966 │ 0.018518518518518517 │ 0.6575342465753424
|
||||
9 │ 1.0459770114942528 │ 1.0925925925925926 │ 0.6164383561643836
|
||||
━━━┷━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━
|
||||
```
|
34
docs/commands/split-row.md
Normal file
34
docs/commands/split-row.md
Normal file
@ -0,0 +1,34 @@
|
||||
# split-row
|
||||
|
||||
Split row contents over multiple rows via the separator.
|
||||
|
||||
Syntax: `split-row <separator>`
|
||||
|
||||
### Parameters:
|
||||
* `<separator>` the character that denotes what separates rows
|
||||
|
||||
## Examples
|
||||
|
||||
We can build a table from a file that looks like this
|
||||
|
||||
```shell
|
||||
> open table.txt
|
||||
4, 0, 2, 0, 7, 8
|
||||
|
||||
```
|
||||
|
||||
using the `split-row` command.
|
||||
|
||||
```shell
|
||||
open table.txt | split-row ", "
|
||||
━━━┯━━━━━━━━━
|
||||
# │ <value>
|
||||
───┼─────────
|
||||
0 │ 4
|
||||
1 │ 0
|
||||
2 │ 2
|
||||
3 │ 0
|
||||
4 │ 7
|
||||
5 │ 8
|
||||
━━━┷━━━━━━━━━
|
||||
```
|
50
docs/commands/str.md
Normal file
50
docs/commands/str.md
Normal file
@ -0,0 +1,50 @@
|
||||
# str
|
||||
|
||||
Consumes either a single value or a table and converts the provided data to a string and optionally applies a change.
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
> shells
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼────────────────────────────────
|
||||
0 │ X │ filesystem │ /home/TUX/stuff/expr/stuff
|
||||
1 │ │ filesystem │ /
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
> shells | str path --upcase
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼────────────────────────────────
|
||||
0 │ X │ filesystem │ /HOME/TUX/STUFF/EXPR/STUFF
|
||||
1 │ │ filesystem │ /
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
> shells | str path --downcase
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼────────────────────────────────
|
||||
0 │ X │ filesystem │ /home/tux/stuff/expr/stuff
|
||||
1 │ │ filesystem │ /
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
> shells | str # --substring "21, 99"
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼────────────────────────────────
|
||||
0 │ X │ filesystem │ stuff
|
||||
1 │ │ filesystem │
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
> shells | str # --substring "6,"
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼────────────────────────────────
|
||||
0 │ X │ filesystem │ TUX/stuff/expr/stuff
|
||||
1 │ │ filesystem │
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
|
||||
> echo "1, 2, 3" | split-row "," | str --to-int | sum
|
||||
━━━━━━━━━
|
||||
<value>
|
||||
─────────
|
||||
6
|
||||
━━━━━━━━━
|
||||
```
|
@ -1,9 +1,7 @@
|
||||
# sum
|
||||
|
||||
This command allows you to calculate the sum of values in a column.
|
||||
|
||||
## Examples
|
||||
# sum
|
||||
This command allows you to calculate the sum of values in a column.
|
||||
|
||||
## Examples
|
||||
To get the sum of the file sizes in a directory, simply pipe the size column from the ls command to the sum command.
|
||||
|
||||
```shell
|
||||
@ -15,21 +13,32 @@ To get the sum of the file sizes in a directory, simply pipe the size column fro
|
||||
━━━━━━━━━
|
||||
```
|
||||
|
||||
Note that sum only works for integer and byte values at the moment, and if the shell doesn't recognize the values in a column as one of those types, it will return an error.
|
||||
To get the sum of the characters that make up your present working directory.
|
||||
```shell
|
||||
> pwd | split-row / | size | get chars | sum
|
||||
━━━━━━━━━
|
||||
<value>
|
||||
━━━━━━━━━
|
||||
21
|
||||
━━━━━━━━━
|
||||
```
|
||||
|
||||
Note that sum only works for integer and byte values. If the shell doesn't recognize the values in a column as one of those types, it will return an error.
|
||||
One way to solve this is to convert each row to an integer when possible and then pipe the result to `sum`
|
||||
|
||||
```shell
|
||||
> open example.csv
|
||||
━━━┯━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━
|
||||
# │ fruit │ amount │ quality
|
||||
───┼─────────┼────────┼──────────
|
||||
0 │ apples │ 1 │ fresh
|
||||
1 │ bananas │ 2 │ old
|
||||
2 │ oranges │ 7 │ fresh
|
||||
3 │ kiwis │ 25 │ rotten
|
||||
━━━┷━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━
|
||||
> open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | sum
|
||||
error: Unrecognized type in stream: Primitive(String("2509000000"))
|
||||
- shell:1:0
|
||||
1 | open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | sum
|
||||
| ^^^^ source
|
||||
```
|
||||
|
||||
```shell
|
||||
> open example.csv | get amount | sum
|
||||
error: Unrecognized type in stream: Primitive(String("1"))
|
||||
> open tests/fixtures/formats/caco3_plastics.csv | get tariff_item | str --to-int | sum
|
||||
━━━━━━━━━━━━━
|
||||
<value>
|
||||
─────────────
|
||||
29154639996
|
||||
━━━━━━━━━━━━━
|
||||
```
|
||||
|
47
docs/commands/tags.md
Normal file
47
docs/commands/tags.md
Normal file
@ -0,0 +1,47 @@
|
||||
# tags
|
||||
|
||||
The tags commands allows users to access the metadata of the previous value in
|
||||
the pipeline. This command may be run on multiple values of input as well.
|
||||
|
||||
As of writing this, the only metadata returned includes:
|
||||
|
||||
- `span`: the start and end indices of the previous value's substring location
|
||||
- `anchor`: the source where data was loaded from; this may not appear if the
|
||||
previous pipeline value didn't actually have a source (like trying to `open` a
|
||||
dir, or running `ls` on a dir)
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
> open README.md | tags
|
||||
━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
span │ anchor
|
||||
────────────────┼──────────────────────────────────────────────────
|
||||
[table: 1 row] │ /Users/danielh/Projects/github/nushell/README.md
|
||||
━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
```shell
|
||||
> open README.md | tags | get span
|
||||
━━━━━━━┯━━━━━
|
||||
start │ end
|
||||
───────┼─────
|
||||
5 │ 14
|
||||
━━━━━━━┷━━━━━
|
||||
```
|
||||
|
||||
```shell
|
||||
> ls | tags | first 3 | get span
|
||||
━━━┯━━━━━━━┯━━━━━
|
||||
# │ start │ end
|
||||
───┼───────┼─────
|
||||
0 │ 0 │ 2
|
||||
1 │ 0 │ 2
|
||||
2 │ 0 │ 2
|
||||
━━━┷━━━━━━━┷━━━━━
|
||||
```
|
||||
|
||||
## Reference
|
||||
|
||||
More useful information on the `tags` command can be found by referencing [The
|
||||
Nu Book's entry on Metadata](https://book.nushell.sh/en/metadata)
|
@ -7,11 +7,11 @@ Converts table data into csv text.
|
||||
```shell
|
||||
> shells
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼────────────────────────
|
||||
0 │ X │ filesystem │ /home/shaurya
|
||||
1 │ │ filesystem │ /home/shaurya/Pictures
|
||||
2 │ │ filesystem │ /home/shaurya/Desktop
|
||||
0 │ X │ filesystem │ /home/shaurya
|
||||
1 │ │ filesystem │ /home/shaurya/Pictures
|
||||
2 │ │ filesystem │ /home/shaurya/Desktop
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
> shells | to-csv
|
||||
,name,path
|
||||
@ -23,48 +23,48 @@ X,filesystem,/home/shaurya
|
||||
```shell
|
||||
> open caco3_plastics.csv
|
||||
━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━━━━
|
||||
# │ importer │ shipper │ tariff_item │ name │ origin │ shipped_at │ arrived_at │ net_weight │ fob_price │ cif_price │ cif_per_net_
|
||||
│ │ │ │ │ │ │ │ │ │ │ weight
|
||||
# │ importer │ shipper │ tariff_item │ name │ origin │ shipped_at │ arrived_at │ net_weight │ fob_price │ cif_price │ cif_per_net_
|
||||
│ │ │ │ │ │ │ │ │ │ │ weight
|
||||
───┼──────────────┼──────────────┼─────────────┼──────────────┼──────────┼────────────┼────────────┼────────────┼───────────┼───────────┼──────────────
|
||||
0 │ PLASTICOS │ S A REVERTE │ 2509000000 │ CARBONATO DE │ SPAIN │ 18/03/2016 │ 17/04/2016 │ 81,000.00 │ 14,417.58 │ 18,252.34 │ 0.23
|
||||
│ RIVAL CIA │ │ │ CALCIO TIPO │ │ │ │ │ │ │
|
||||
│ LTDA │ │ │ CALCIPORE │ │ │ │ │ │ │
|
||||
│ │ │ │ 160 T AL │ │ │ │ │ │ │
|
||||
1 │ MEXICHEM │ OMYA ANDINA │ 2836500000 │ CARBONATO │ COLOMBIA │ 07/07/2016 │ 10/07/2016 │ 26,000.00 │ 7,072.00 │ 8,127.18 │ 0.31
|
||||
│ ECUADOR S.A. │ S A │ │ │ │ │ │ │ │ │
|
||||
2 │ PLASTIAZUAY │ SA REVERTE │ 2836500000 │ CARBONATO DE │ SPAIN │ 27/07/2016 │ 09/08/2016 │ 81,000.00 │ 8,100.00 │ 11,474.55 │ 0.14
|
||||
│ SA │ │ │ CALCIO │ │ │ │ │ │ │
|
||||
3 │ PLASTICOS │ AND │ 2836500000 │ CALCIUM │ TURKEY │ 04/10/2016 │ 11/11/2016 │ 100,000.00 │ 17,500.00 │ 22,533.75 │ 0.23
|
||||
│ RIVAL CIA │ ENDUSTRIYEL │ │ CARBONATE │ │ │ │ │ │ │
|
||||
│ LTDA │ HAMMADDELER │ │ ANADOLU │ │ │ │ │ │ │
|
||||
│ │ DIS TCARET │ │ ANDCARB CT-1 │ │ │ │ │ │ │
|
||||
│ │ LTD.STI. │ │ │ │ │ │ │ │ │
|
||||
4 │ QUIMICA │ SA REVERTE │ 2836500000 │ CARBONATO DE │ SPAIN │ 24/06/2016 │ 12/07/2016 │ 27,000.00 │ 3,258.90 │ 5,585.00 │ 0.21
|
||||
│ COMERCIAL │ │ │ CALCIO │ │ │ │ │ │ │
|
||||
│ QUIMICIAL │ │ │ │ │ │ │ │ │ │
|
||||
│ CIA. LTDA. │ │ │ │ │ │ │ │ │ │
|
||||
5 │ PICA │ OMYA ANDINA │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/01/1900 │ 18/01/2016 │ 66,500.00 │ 12,635.00 │ 18,670.52 │ 0.28
|
||||
│ PLASTICOS │ S.A │ │ CALCIO │ │ │ │ │ │ │
|
||||
│ INDUSTRIALES │ │ │ │ │ │ │ │ │ │
|
||||
│ C.A. │ │ │ │ │ │ │ │ │ │
|
||||
6 │ PLASTIQUIM │ OMYA ANDINA │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/01/1900 │ 25/10/2016 │ 33,000.00 │ 6,270.00 │ 9,999.00 │ 0.30
|
||||
│ S.A. │ S.A NIT │ │ CALCIO │ │ │ │ │ │ │
|
||||
│ │ 830.027.386- │ │ RECUBIERTO │ │ │ │ │ │ │
|
||||
│ │ 6 │ │ CON ACIDO │ │ │ │ │ │ │
|
||||
│ │ │ │ ESTEARICO │ │ │ │ │ │ │
|
||||
│ │ │ │ OMYA CARB 1T │ │ │ │ │ │ │
|
||||
│ │ │ │ CG BBS 1000 │ │ │ │ │ │ │
|
||||
7 │ QUIMICOS │ SIBELCO │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/11/2016 │ 03/11/2016 │ 52,000.00 │ 8,944.00 │ 13,039.05 │ 0.25
|
||||
│ ANDINOS │ COLOMBIA SAS │ │ CALCIO │ │ │ │ │ │ │
|
||||
│ QUIMANDI │ │ │ RECUBIERTO │ │ │ │ │ │ │
|
||||
│ S.A. │ │ │ │ │ │ │ │ │ │
|
||||
8 │ TIGRE │ OMYA ANDINA │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/01/1900 │ 28/10/2016 │ 66,000.00 │ 11,748.00 │ 18,216.00 │ 0.28
|
||||
│ ECUADOR S.A. │ S.A NIT │ │ CALCIO │ │ │ │ │ │ │
|
||||
│ ECUATIGRE │ 830.027.386- │ │ RECUBIERTO │ │ │ │ │ │ │
|
||||
│ │ 6 │ │ CON ACIDO │ │ │ │ │ │ │
|
||||
│ │ │ │ ESTEARICO │ │ │ │ │ │ │
|
||||
│ │ │ │ OMYACARB 1T │ │ │ │ │ │ │
|
||||
│ │ │ │ CG BPA 25 NO │ │ │ │ │ │ │
|
||||
0 │ PLASTICOS │ S A REVERTE │ 2509000000 │ CARBONATO DE │ SPAIN │ 18/03/2016 │ 17/04/2016 │ 81,000.00 │ 14,417.58 │ 18,252.34 │ 0.23
|
||||
│ RIVAL CIA │ │ │ CALCIO TIPO │ │ │ │ │ │ │
|
||||
│ LTDA │ │ │ CALCIPORE │ │ │ │ │ │ │
|
||||
│ │ │ │ 160 T AL │ │ │ │ │ │ │
|
||||
1 │ MEXICHEM │ OMYA ANDINA │ 2836500000 │ CARBONATO │ COLOMBIA │ 07/07/2016 │ 10/07/2016 │ 26,000.00 │ 7,072.00 │ 8,127.18 │ 0.31
|
||||
│ ECUADOR S.A. │ S A │ │ │ │ │ │ │ │ │
|
||||
2 │ PLASTIAZUAY │ SA REVERTE │ 2836500000 │ CARBONATO DE │ SPAIN │ 27/07/2016 │ 09/08/2016 │ 81,000.00 │ 8,100.00 │ 11,474.55 │ 0.14
|
||||
│ SA │ │ │ CALCIO │ │ │ │ │ │ │
|
||||
3 │ PLASTICOS │ AND │ 2836500000 │ CALCIUM │ TURKEY │ 04/10/2016 │ 11/11/2016 │ 100,000.00 │ 17,500.00 │ 22,533.75 │ 0.23
|
||||
│ RIVAL CIA │ ENDUSTRIYEL │ │ CARBONATE │ │ │ │ │ │ │
|
||||
│ LTDA │ HAMMADDELER │ │ ANADOLU │ │ │ │ │ │ │
|
||||
│ │ DIS TCARET │ │ ANDCARB CT-1 │ │ │ │ │ │ │
|
||||
│ │ LTD.STI. │ │ │ │ │ │ │ │ │
|
||||
4 │ QUIMICA │ SA REVERTE │ 2836500000 │ CARBONATO DE │ SPAIN │ 24/06/2016 │ 12/07/2016 │ 27,000.00 │ 3,258.90 │ 5,585.00 │ 0.21
|
||||
│ COMERCIAL │ │ │ CALCIO │ │ │ │ │ │ │
|
||||
│ QUIMICIAL │ │ │ │ │ │ │ │ │ │
|
||||
│ CIA. LTDA. │ │ │ │ │ │ │ │ │ │
|
||||
5 │ PICA │ OMYA ANDINA │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/01/1900 │ 18/01/2016 │ 66,500.00 │ 12,635.00 │ 18,670.52 │ 0.28
|
||||
│ PLASTICOS │ S.A │ │ CALCIO │ │ │ │ │ │ │
|
||||
│ INDUSTRIALES │ │ │ │ │ │ │ │ │ │
|
||||
│ C.A. │ │ │ │ │ │ │ │ │ │
|
||||
6 │ PLASTIQUIM │ OMYA ANDINA │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/01/1900 │ 25/10/2016 │ 33,000.00 │ 6,270.00 │ 9,999.00 │ 0.30
|
||||
│ S.A. │ S.A NIT │ │ CALCIO │ │ │ │ │ │ │
|
||||
│ │ 830.027.386- │ │ RECUBIERTO │ │ │ │ │ │ │
|
||||
│ │ 6 │ │ CON ACIDO │ │ │ │ │ │ │
|
||||
│ │ │ │ ESTEARICO │ │ │ │ │ │ │
|
||||
│ │ │ │ OMYA CARB 1T │ │ │ │ │ │ │
|
||||
│ │ │ │ CG BBS 1000 │ │ │ │ │ │ │
|
||||
7 │ QUIMICOS │ SIBELCO │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/11/2016 │ 03/11/2016 │ 52,000.00 │ 8,944.00 │ 13,039.05 │ 0.25
|
||||
│ ANDINOS │ COLOMBIA SAS │ │ CALCIO │ │ │ │ │ │ │
|
||||
│ QUIMANDI │ │ │ RECUBIERTO │ │ │ │ │ │ │
|
||||
│ S.A. │ │ │ │ │ │ │ │ │ │
|
||||
8 │ TIGRE │ OMYA ANDINA │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/01/1900 │ 28/10/2016 │ 66,000.00 │ 11,748.00 │ 18,216.00 │ 0.28
|
||||
│ ECUADOR S.A. │ S.A NIT │ │ CALCIO │ │ │ │ │ │ │
|
||||
│ ECUATIGRE │ 830.027.386- │ │ RECUBIERTO │ │ │ │ │ │ │
|
||||
│ │ 6 │ │ CON ACIDO │ │ │ │ │ │ │
|
||||
│ │ │ │ ESTEARICO │ │ │ │ │ │ │
|
||||
│ │ │ │ OMYACARB 1T │ │ │ │ │ │ │
|
||||
│ │ │ │ CG BPA 25 NO │ │ │ │ │ │ │
|
||||
━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━━━━
|
||||
> open caco3_plastics.csv | to-csv
|
||||
importer,shipper,tariff_item,name,origin,shipped_at,arrived_at,net_weight,fob_price,cif_price,cif_per_net_weight
|
||||
@ -78,3 +78,37 @@ PLASTIQUIM S.A.,OMYA ANDINA S.A NIT 830.027.386-6,3824909999,CARBONATO DE CALCIO
|
||||
QUIMICOS ANDINOS QUIMANDI S.A.,SIBELCO COLOMBIA SAS,3824909999,CARBONATO DE CALCIO RECUBIERTO,COLOMBIA,01/11/2016,03/11/2016,"52,000.00","8,944.00","13,039.05",0.25
|
||||
TIGRE ECUADOR S.A. ECUATIGRE,OMYA ANDINA S.A NIT 830.027.386-6,3824909999,CARBONATO DE CALCIO RECUBIERTO CON ACIDO ESTEARICO OMYACARB 1T CG BPA 25 NO,COLOMBIA,01/01/1900,28/10/2016,"66,000.00","11,748.00","18,216.00",0.28
|
||||
```
|
||||
|
||||
To use a character other than ',' to separate records, use `--separator` :
|
||||
|
||||
```shell
|
||||
> shells
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼────────────────────────
|
||||
0 │ X │ filesystem │ /home/shaurya
|
||||
1 │ │ filesystem │ /home/shaurya/Pictures
|
||||
2 │ │ filesystem │ /home/shaurya/Desktop
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
> shells | to-csv --separator ';'
|
||||
;name,path
|
||||
X;filesystem;/home/shaurya
|
||||
;filesystem;/home/shaurya/Pictures
|
||||
;filesystem;/home/shaurya/Desktop
|
||||
```
|
||||
|
||||
The string '\t' can be used to separate on tabs. Note that this is the same as using the to-tsv command.
|
||||
|
||||
Newlines '\n' are not acceptable separators.
|
||||
|
||||
Note that separators are currently provided as strings and need to be wrapped in quotes.
|
||||
|
||||
It is also considered an error to use a separator greater than one char :
|
||||
|
||||
```shell
|
||||
> open pets.txt | from-csv --separator '123'
|
||||
error: Expected a single separator char from --separator
|
||||
- shell:1:37
|
||||
1 | open pets.txt | from-csv --separator '123'
|
||||
| ^^^^^ requires a single character string input
|
||||
```
|
||||
|
@ -9,6 +9,6 @@ Outputs the nushell version.
|
||||
━━━━━━━━━
|
||||
version
|
||||
─────────
|
||||
0.3.0
|
||||
0.6.0
|
||||
━━━━━━━━━
|
||||
```
|
||||
|
@ -2,6 +2,39 @@
|
||||
|
||||
This command filters the content of a table based on a condition passed as a parameter, which must be a boolean expression making use of any of the table columns. Other commands such as `ls` are capable of feeding `where` with their output through pipelines.
|
||||
|
||||
Where has two general forms:
|
||||
- `where <column_name> <comparison> <value>`
|
||||
- `where <column_name>`
|
||||
|
||||
## Where with comparison
|
||||
|
||||
In the first form, `where` is passed a column name that the filter will run against. Next, is the operator used to compare this column to its value. The following operators are supported:
|
||||
|
||||
- `<` (less than)
|
||||
- `<=` (less than or equal)
|
||||
- `>` (greater than)
|
||||
- `>=` (greater than or equal)
|
||||
- `!=` (not equal)
|
||||
- `==` (equal)
|
||||
|
||||
Strings have two additional operators:
|
||||
- `=~` (fuzzy match to allow)
|
||||
- `!~` (fuzzy match to not allow)
|
||||
|
||||
Dates can also be compared using the duration types. For example, `where accessed > 2w` will check the date in accessed to see if it's greater than 2 weeks ago. Durations currently allow these abbreviations:
|
||||
|
||||
- `1s` (one second)
|
||||
- `1m` (one minute)
|
||||
- `1h` (one hour)
|
||||
- `1d` (one day)
|
||||
- `1w` (one week)
|
||||
- `1M` (one month)
|
||||
- `1y` (one year)
|
||||
|
||||
## Boolean check
|
||||
|
||||
Where with the form `| where readonly` is used to check boolean values. For example, the command `ls --full | where readonly` will list only those files that are readonly.
|
||||
|
||||
## Usage
|
||||
```shell
|
||||
> [input-command] | where [condition]
|
||||
@ -11,24 +44,47 @@ This command filters the content of a table based on a condition passed as a par
|
||||
|
||||
```shell
|
||||
> ls | where size > 4kb
|
||||
----+----------------+------+----------+----------+----------------+----------------
|
||||
# | name | type | readonly | size | accessed | modified
|
||||
----+----------------+------+----------+----------+----------------+----------------
|
||||
0 | IMG_1291.jpg | File | | 115.5 KB | a month ago | 4 months ago
|
||||
1 | README.md | File | | 11.1 KB | 2 days ago | 2 days ago
|
||||
2 | IMG_1291.png | File | | 589.0 KB | a month ago | a month ago
|
||||
3 | IMG_1381.jpg | File | | 81.0 KB | a month ago | 4 months ago
|
||||
4 | butterfly.jpeg | File | | 4.2 KB | a month ago | a month ago
|
||||
5 | Cargo.lock | File | | 199.6 KB | 22 minutes ago | 22 minutes ago
|
||||
━━━┯━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━
|
||||
# │ name │ type │ size │ created │ accessed │ modified
|
||||
───┼────────────┼──────┼─────────┼─────────────┼─────────────┼─────────────
|
||||
0 │ Cargo.lock │ File │ 87.2 KB │ 7 hours ago │ 7 hours ago │ 7 hours ago
|
||||
1 │ README.md │ File │ 19.5 KB │ 7 hours ago │ 7 hours ago │ 7 hours ago
|
||||
2 │ Cargo.toml │ File │ 4.7 KB │ 7 hours ago │ 7 hours ago │ 7 hours ago
|
||||
━━━┷━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
```shell
|
||||
> ps | where cpu > 10
|
||||
---+-------+----------+-------+-----------------------------
|
||||
# | pid | status | cpu | name
|
||||
---+-------+----------+-------+-----------------------------
|
||||
0 | 1992 | Sleeping | 44.52 | /usr/bin/gnome-shell
|
||||
1 | 1069 | Sleeping | 16.15 |
|
||||
2 | 24116 | Sleeping | 13.70 | /opt/google/chrome/chrome
|
||||
3 | 21976 | Sleeping | 12.67 | /usr/share/discord/Discord
|
||||
> ps | where cpu > 0
|
||||
━━━┯━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━
|
||||
# │ pid │ name │ status │ cpu
|
||||
───┼───────┼───────────────────────┼──────────┼───────────────────
|
||||
0 │ 1546 │ Xorg │ Sleeping │ 10.65405000000000
|
||||
1 │ 1769 │ gnome-shell │ Sleeping │ 5.271094000000000
|
||||
2 │ 2153 │ gnome-terminal-server │ Sleeping │ 5.193664000000000
|
||||
3 │ 13556 │ nu_plugin_ps │ Sleeping │ 40.70250000000000
|
||||
━━━┷━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
```shell
|
||||
> ls | where accessed <= 1w
|
||||
━━━┯━━━━━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━
|
||||
# │ name │ type │ size │ accessed │ modified
|
||||
───┼───────────────┼───────────┼──────────┼────────────┼────────────
|
||||
0 │ Cargo.toml │ File │ 4.7 KB │ 2 days ago │ 2 days ago
|
||||
1 │ target │ Directory │ 4.1 KB │ 2 days ago │ 2 days ago
|
||||
2 │ Makefile.toml │ File │ 449 B │ 4 days ago │ 4 days ago
|
||||
3 │ README.md │ File │ 19.5 KB │ 2 days ago │ 2 days ago
|
||||
4 │ Cargo.lock │ File │ 170.7 KB │ 2 days ago │ 2 days ago
|
||||
5 │ crates │ Directory │ 4.1 KB │ 2 days ago │ 2 days ago
|
||||
6 │ TODO.md │ File │ 1.3 KB │ 2 days ago │ 2 days ago
|
||||
━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
```shell
|
||||
> ls | where name =~ "yml"
|
||||
━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━
|
||||
name │ type │ size │ accessed │ modified
|
||||
─────────────┼──────┼───────┼────────────┼────────────
|
||||
.gitpod.yml │ File │ 780 B │ a week ago │ a week ago
|
||||
━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━
|
||||
```
|
||||
|
@ -10,4 +10,12 @@ reason = """
|
||||
This is laying the groundwork for merging coloring and parsing. It also makes token_nodes.atomic() naturally
|
||||
work with coloring, which is pretty useful on its own.
|
||||
"""
|
||||
enabled = false
|
||||
enabled = false
|
||||
|
||||
[data_processing_primitives]
|
||||
|
||||
description = "Groundwork so tables can be data processed"
|
||||
reason = """
|
||||
These will allow take tables and be able to transform, process, and explore.
|
||||
"""
|
||||
enabled = false
|
||||
|
@ -1 +0,0 @@
|
||||
beta-2019-09-25
|
283
src/cli.rs
283
src/cli.rs
@ -1,29 +1,31 @@
|
||||
use crate::commands::classified::{
|
||||
ClassifiedCommand, ClassifiedInputStream, ClassifiedPipeline, ExternalCommand, InternalCommand,
|
||||
StreamNext,
|
||||
ClassifiedCommand, ClassifiedInputStream, ClassifiedPipeline, ExternalArg, ExternalArgs,
|
||||
ExternalCommand, InternalCommand, StreamNext,
|
||||
};
|
||||
use crate::commands::plugin::JsonRpc;
|
||||
use crate::commands::plugin::{PluginCommand, PluginSink};
|
||||
use crate::commands::whole_stream_command;
|
||||
use crate::context::Context;
|
||||
use crate::data::config;
|
||||
use crate::data::Value;
|
||||
use crate::data::{
|
||||
base::{UntaggedValue, Value},
|
||||
config,
|
||||
};
|
||||
pub(crate) use crate::errors::ShellError;
|
||||
use crate::fuzzysearch::{interactive_fuzzy_search, SelectionResult};
|
||||
#[cfg(not(feature = "starship-prompt"))]
|
||||
use crate::git::current_branch;
|
||||
use crate::parser::registry::Signature;
|
||||
use crate::parser::{
|
||||
hir,
|
||||
hir::syntax_shape::{expand_syntax, PipelineShape},
|
||||
hir::{expand_external_tokens::expand_external_tokens, tokens_iterator::TokensIterator},
|
||||
hir::syntax_shape::{expand_syntax, ExpandContext, PipelineShape},
|
||||
hir::{expand_external_tokens::ExternalTokensShape, tokens_iterator::TokensIterator},
|
||||
TokenNode,
|
||||
};
|
||||
use crate::prelude::*;
|
||||
use nu_source::{Spanned, Tagged};
|
||||
|
||||
use log::{debug, trace};
|
||||
use log::{debug, log_enabled, trace};
|
||||
use rustyline::error::ReadlineError;
|
||||
use rustyline::{self, config::Configurer, config::EditMode, ColorMode, Config, Editor};
|
||||
use std::env;
|
||||
use std::error::Error;
|
||||
use std::io::{BufRead, BufReader, Write};
|
||||
use std::iter::Iterator;
|
||||
@ -119,13 +121,6 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel
|
||||
fn search_paths() -> Vec<std::path::PathBuf> {
|
||||
let mut search_paths = Vec::new();
|
||||
|
||||
match env::var_os("PATH") {
|
||||
Some(paths) => {
|
||||
search_paths = env::split_paths(&paths).collect::<Vec<_>>();
|
||||
}
|
||||
None => println!("PATH is not defined in the environment."),
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
// Use our debug plugins in debug mode
|
||||
@ -140,6 +135,15 @@ fn search_paths() -> Vec<std::path::PathBuf> {
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
{
|
||||
use std::env;
|
||||
|
||||
match env::var_os("PATH") {
|
||||
Some(paths) => {
|
||||
search_paths = env::split_paths(&paths).collect::<Vec<_>>();
|
||||
}
|
||||
None => println!("PATH is not defined in the environment."),
|
||||
}
|
||||
|
||||
// Use our release plugins in release mode
|
||||
let mut path = std::path::PathBuf::from(".");
|
||||
path.push("target");
|
||||
@ -163,6 +167,8 @@ fn load_plugins(context: &mut Context) -> Result<(), ShellError> {
|
||||
require_literal_leading_dot: false,
|
||||
};
|
||||
|
||||
set_env_from_config();
|
||||
|
||||
for path in search_paths() {
|
||||
let mut pattern = path.to_path_buf();
|
||||
|
||||
@ -257,13 +263,14 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
whole_stream_command(Nth),
|
||||
whole_stream_command(Next),
|
||||
whole_stream_command(Previous),
|
||||
whole_stream_command(Debug),
|
||||
whole_stream_command(Shells),
|
||||
whole_stream_command(SplitColumn),
|
||||
whole_stream_command(SplitRow),
|
||||
whole_stream_command(Lines),
|
||||
whole_stream_command(Reject),
|
||||
whole_stream_command(Reverse),
|
||||
whole_stream_command(Append),
|
||||
whole_stream_command(Prepend),
|
||||
whole_stream_command(Trim),
|
||||
whole_stream_command(ToBSON),
|
||||
whole_stream_command(ToCSV),
|
||||
@ -275,6 +282,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
whole_stream_command(ToURL),
|
||||
whole_stream_command(ToYAML),
|
||||
whole_stream_command(SortBy),
|
||||
whole_stream_command(GroupBy),
|
||||
whole_stream_command(Tags),
|
||||
whole_stream_command(Count),
|
||||
whole_stream_command(First),
|
||||
@ -290,11 +298,13 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
whole_stream_command(FromSQLite),
|
||||
whole_stream_command(FromTOML),
|
||||
whole_stream_command(FromURL),
|
||||
whole_stream_command(FromXLSX),
|
||||
whole_stream_command(FromXML),
|
||||
whole_stream_command(FromYAML),
|
||||
whole_stream_command(FromYML),
|
||||
whole_stream_command(Pick),
|
||||
whole_stream_command(Get),
|
||||
whole_stream_command(Histogram),
|
||||
per_item_command(Remove),
|
||||
per_item_command(Fetch),
|
||||
per_item_command(Open),
|
||||
@ -302,9 +312,12 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
per_item_command(Where),
|
||||
per_item_command(Echo),
|
||||
whole_stream_command(Config),
|
||||
whole_stream_command(Compact),
|
||||
whole_stream_command(Default),
|
||||
whole_stream_command(SkipWhile),
|
||||
per_item_command(Enter),
|
||||
per_item_command(Help),
|
||||
per_item_command(History),
|
||||
whole_stream_command(Exit),
|
||||
whole_stream_command(Autoview),
|
||||
whole_stream_command(Pivot),
|
||||
@ -313,11 +326,25 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
per_item_command(Mkdir),
|
||||
per_item_command(Move),
|
||||
whole_stream_command(Save),
|
||||
whole_stream_command(SplitBy),
|
||||
whole_stream_command(Table),
|
||||
whole_stream_command(Version),
|
||||
whole_stream_command(What),
|
||||
whole_stream_command(Which),
|
||||
whole_stream_command(Debug),
|
||||
]);
|
||||
|
||||
cfg_if::cfg_if! {
|
||||
if #[cfg(data_processing_primitives)] {
|
||||
context.add_commands(vec![
|
||||
whole_stream_command(ReduceBy),
|
||||
whole_stream_command(EvaluateBy),
|
||||
whole_stream_command(TSortBy),
|
||||
whole_stream_command(MapMaxBy),
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "clipboard")]
|
||||
{
|
||||
context.add_commands(vec![whole_stream_command(
|
||||
@ -357,7 +384,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
|
||||
let edit_mode = config::config(Tag::unknown())?
|
||||
.get("edit_mode")
|
||||
.map(|s| match s.as_string().unwrap().as_ref() {
|
||||
.map(|s| match s.value.expect_string() {
|
||||
"vi" => EditMode::Vi,
|
||||
"emacs" => EditMode::Emacs,
|
||||
_ => EditMode::Emacs,
|
||||
@ -366,58 +393,66 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
|
||||
rl.set_edit_mode(edit_mode);
|
||||
|
||||
// Register Ctrl-r for history fuzzy search
|
||||
// rustyline doesn't support custom commands, so we override Ctrl-D (EOF)
|
||||
// https://github.com/nushell/nushell/issues/689
|
||||
#[cfg(all(not(windows), feature = "crossterm"))]
|
||||
rl.bind_sequence(rustyline::KeyPress::Ctrl('R'), rustyline::Cmd::EndOfFile);
|
||||
// Redefine Ctrl-D to same command as Ctrl-C
|
||||
rl.bind_sequence(rustyline::KeyPress::Ctrl('D'), rustyline::Cmd::Interrupt);
|
||||
|
||||
let prompt = &format!(
|
||||
"{}{}> ",
|
||||
cwd,
|
||||
match current_branch() {
|
||||
Some(s) => format!("({})", s),
|
||||
None => "".to_string(),
|
||||
let colored_prompt = {
|
||||
#[cfg(feature = "starship-prompt")]
|
||||
{
|
||||
std::env::set_var("STARSHIP_SHELL", "");
|
||||
starship::print::get_prompt(starship::context::Context::new_with_dir(
|
||||
clap::ArgMatches::default(),
|
||||
cwd,
|
||||
))
|
||||
}
|
||||
);
|
||||
#[cfg(not(feature = "starship-prompt"))]
|
||||
{
|
||||
format!(
|
||||
"\x1b[32m{}{}\x1b[m> ",
|
||||
cwd,
|
||||
match current_branch() {
|
||||
Some(s) => format!("({})", s),
|
||||
None => "".to_string(),
|
||||
}
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
let prompt = {
|
||||
let bytes = strip_ansi_escapes::strip(&colored_prompt).unwrap();
|
||||
|
||||
String::from_utf8_lossy(&bytes).to_string()
|
||||
};
|
||||
|
||||
rl.helper_mut().expect("No helper").colored_prompt = colored_prompt;
|
||||
let mut initial_command = Some(String::new());
|
||||
let mut readline = Err(ReadlineError::Eof);
|
||||
while let Some(ref cmd) = initial_command {
|
||||
readline = rl.readline_with_initial(prompt, (&cmd, ""));
|
||||
if let Err(ReadlineError::Eof) = &readline {
|
||||
// Fuzzy search in history
|
||||
let lines = rl.history().iter().rev().map(|s| s.as_str()).collect();
|
||||
let selection = interactive_fuzzy_search(&lines, 5); // Clears last line with prompt
|
||||
match selection {
|
||||
SelectionResult::Selected(line) => {
|
||||
println!("{}{}", &prompt, &line); // TODO: colorize prompt
|
||||
readline = Ok(line.clone());
|
||||
initial_command = None;
|
||||
}
|
||||
SelectionResult::Edit(line) => {
|
||||
initial_command = Some(line);
|
||||
}
|
||||
SelectionResult::NoSelection => {
|
||||
readline = Ok("".to_string());
|
||||
initial_command = None;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
initial_command = None;
|
||||
}
|
||||
readline = rl.readline_with_initial(&prompt, (&cmd, ""));
|
||||
initial_command = None;
|
||||
}
|
||||
|
||||
match process_line(readline, &mut context).await {
|
||||
let line = process_line(readline, &mut context).await;
|
||||
|
||||
match line {
|
||||
LineResult::Success(line) => {
|
||||
rl.add_history_entry(line.clone());
|
||||
let _ = rl.save_history(&History::path());
|
||||
context.maybe_print_errors(Text::from(line));
|
||||
}
|
||||
|
||||
LineResult::Error(line, err) => {
|
||||
rl.add_history_entry(line.clone());
|
||||
let _ = rl.save_history(&History::path());
|
||||
|
||||
context.with_host(|host| {
|
||||
print_err(err, host, &Text::from(line.clone()));
|
||||
});
|
||||
|
||||
context.maybe_print_errors(Text::from(line.clone()));
|
||||
}
|
||||
|
||||
LineResult::CtrlC => {
|
||||
let config_ctrlc_exit = config::config(Tag::unknown())?
|
||||
.get("ctrlc_exit")
|
||||
.map(|s| match s.as_string().unwrap().as_ref() {
|
||||
.map(|s| match s.value.expect_string() {
|
||||
"true" => true,
|
||||
_ => false,
|
||||
})
|
||||
@ -437,14 +472,6 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
}
|
||||
}
|
||||
|
||||
LineResult::Error(line, err) => {
|
||||
rl.add_history_entry(line.clone());
|
||||
|
||||
context.with_host(|host| {
|
||||
print_err(err, host, &Text::from(line));
|
||||
})
|
||||
}
|
||||
|
||||
LineResult::Break => {
|
||||
break;
|
||||
}
|
||||
@ -466,6 +493,70 @@ fn chomp_newline(s: &str) -> &str {
|
||||
}
|
||||
}
|
||||
|
||||
fn set_env_from_config() {
|
||||
let config = crate::data::config::read(Tag::unknown(), &None).unwrap();
|
||||
|
||||
if config.contains_key("env") {
|
||||
// Clear the existing vars, we're about to replace them
|
||||
for (key, _value) in std::env::vars() {
|
||||
std::env::remove_var(key);
|
||||
}
|
||||
|
||||
let value = config.get("env");
|
||||
|
||||
match value {
|
||||
Some(Value {
|
||||
value: UntaggedValue::Row(r),
|
||||
..
|
||||
}) => {
|
||||
for (k, v) in &r.entries {
|
||||
match v.as_string() {
|
||||
Ok(value_string) => {
|
||||
std::env::set_var(k, value_string);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
if config.contains_key("path") {
|
||||
// Override the path with what they give us from config
|
||||
let value = config.get("path");
|
||||
|
||||
match value {
|
||||
Some(value) => match value {
|
||||
Value {
|
||||
value: UntaggedValue::Table(table),
|
||||
..
|
||||
} => {
|
||||
let mut paths = vec![];
|
||||
for val in table {
|
||||
let path_str = val.as_string();
|
||||
match path_str {
|
||||
Err(_) => {}
|
||||
Ok(path_str) => {
|
||||
paths.push(PathBuf::from(path_str));
|
||||
}
|
||||
}
|
||||
}
|
||||
let path_os_string = std::env::join_paths(&paths);
|
||||
match path_os_string {
|
||||
Ok(path_os_string) => {
|
||||
std::env::set_var("PATH", path_os_string);
|
||||
}
|
||||
Err(_) => {}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum LineResult {
|
||||
Success(String),
|
||||
Error(String, ShellError),
|
||||
@ -496,10 +587,11 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
};
|
||||
|
||||
match pipeline.commands.last() {
|
||||
match pipeline.commands.list.last() {
|
||||
Some(ClassifiedCommand::External(_)) => {}
|
||||
_ => pipeline
|
||||
.commands
|
||||
.list
|
||||
.push(ClassifiedCommand::Internal(InternalCommand {
|
||||
name: "autoview".to_string(),
|
||||
name_tag: Tag::unknown(),
|
||||
@ -507,14 +599,17 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
Box::new(hir::Expression::synthetic_string("autoview")),
|
||||
None,
|
||||
None,
|
||||
Span::unknown(),
|
||||
),
|
||||
})),
|
||||
}
|
||||
|
||||
let mut input = ClassifiedInputStream::new();
|
||||
let mut iter = pipeline.commands.list.into_iter().peekable();
|
||||
|
||||
let mut iter = pipeline.commands.into_iter().peekable();
|
||||
let mut is_first_command = true;
|
||||
// Check the config to see if we need to update the path
|
||||
// TODO: make sure config is cached so we don't path this load every call
|
||||
set_env_from_config();
|
||||
|
||||
loop {
|
||||
let item: Option<ClassifiedCommand> = iter.next();
|
||||
@ -548,28 +643,28 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
(
|
||||
Some(ClassifiedCommand::Internal(left)),
|
||||
Some(ClassifiedCommand::External(_)),
|
||||
) => match left.run(ctx, input, Text::from(line), is_first_command) {
|
||||
) => match left.run(ctx, input, Text::from(line)) {
|
||||
Ok(val) => ClassifiedInputStream::from_input_stream(val),
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
},
|
||||
|
||||
(Some(ClassifiedCommand::Internal(left)), Some(_)) => {
|
||||
match left.run(ctx, input, Text::from(line), is_first_command) {
|
||||
match left.run(ctx, input, Text::from(line)) {
|
||||
Ok(val) => ClassifiedInputStream::from_input_stream(val),
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
}
|
||||
}
|
||||
|
||||
(Some(ClassifiedCommand::Internal(left)), None) => {
|
||||
match left.run(ctx, input, Text::from(line), is_first_command) {
|
||||
match left.run(ctx, input, Text::from(line)) {
|
||||
Ok(val) => {
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
let mut output_stream: OutputStream = val.into();
|
||||
loop {
|
||||
match output_stream.try_next().await {
|
||||
Ok(Some(ReturnSuccess::Value(Tagged {
|
||||
item: Value::Error(e),
|
||||
Ok(Some(ReturnSuccess::Value(Value {
|
||||
value: UntaggedValue::Error(e),
|
||||
..
|
||||
}))) => {
|
||||
return LineResult::Error(line.to_string(), e);
|
||||
@ -613,8 +708,6 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
is_first_command = false;
|
||||
}
|
||||
|
||||
LineResult::Success(line.to_string())
|
||||
@ -622,7 +715,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
Err(ReadlineError::Interrupted) => LineResult::CtrlC,
|
||||
Err(ReadlineError::Eof) => LineResult::Break,
|
||||
Err(err) => {
|
||||
println!("Error: {:?}", err);
|
||||
outln!("Error: {:?}", err);
|
||||
LineResult::Break
|
||||
}
|
||||
}
|
||||
@ -634,13 +727,22 @@ fn classify_pipeline(
|
||||
source: &Text,
|
||||
) -> Result<ClassifiedPipeline, ShellError> {
|
||||
let mut pipeline_list = vec![pipeline.clone()];
|
||||
let mut iterator = TokensIterator::all(&mut pipeline_list, pipeline.span());
|
||||
let mut iterator = TokensIterator::all(&mut pipeline_list, source.clone(), pipeline.span());
|
||||
|
||||
expand_syntax(
|
||||
let result = expand_syntax(
|
||||
&PipelineShape,
|
||||
&mut iterator,
|
||||
&context.expand_context(source, pipeline.span()),
|
||||
&context.expand_context(source),
|
||||
)
|
||||
.map_err(|err| err.into());
|
||||
|
||||
if log_enabled!(target: "nu::expand_syntax", log::Level::Debug) {
|
||||
outln!("");
|
||||
ptree::print_tree(&iterator.expand_tracer().print(source.clone())).unwrap();
|
||||
outln!("");
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
// Classify this command as an external command, which doesn't give special meaning
|
||||
@ -648,21 +750,24 @@ fn classify_pipeline(
|
||||
// strings.
|
||||
pub(crate) fn external_command(
|
||||
tokens: &mut TokensIterator,
|
||||
source: &Text,
|
||||
context: &ExpandContext,
|
||||
name: Tagged<&str>,
|
||||
) -> Result<ClassifiedCommand, ShellError> {
|
||||
let arg_list_strings = expand_external_tokens(tokens, source)?;
|
||||
) -> Result<ClassifiedCommand, ParseError> {
|
||||
let Spanned { item, span } = expand_syntax(&ExternalTokensShape, tokens, context)?.tokens;
|
||||
|
||||
Ok(ClassifiedCommand::External(ExternalCommand {
|
||||
name: name.to_string(),
|
||||
name_tag: name.tag(),
|
||||
args: arg_list_strings
|
||||
.iter()
|
||||
.map(|x| Tagged {
|
||||
tag: x.span.into(),
|
||||
item: x.item.clone(),
|
||||
})
|
||||
.collect(),
|
||||
args: ExternalArgs {
|
||||
list: item
|
||||
.iter()
|
||||
.map(|x| ExternalArg {
|
||||
tag: x.span.into(),
|
||||
arg: x.item.clone(),
|
||||
})
|
||||
.collect(),
|
||||
span,
|
||||
},
|
||||
}))
|
||||
}
|
||||
|
||||
|
@ -1,20 +1,28 @@
|
||||
#[macro_use]
|
||||
pub(crate) mod macros;
|
||||
|
||||
mod from_delimited_data;
|
||||
mod to_delimited_data;
|
||||
|
||||
pub(crate) mod append;
|
||||
pub(crate) mod args;
|
||||
pub(crate) mod autoview;
|
||||
pub(crate) mod cd;
|
||||
pub(crate) mod classified;
|
||||
pub(crate) mod clip;
|
||||
pub(crate) mod command;
|
||||
pub(crate) mod compact;
|
||||
pub(crate) mod config;
|
||||
pub(crate) mod count;
|
||||
pub(crate) mod cp;
|
||||
pub(crate) mod date;
|
||||
pub(crate) mod debug;
|
||||
pub(crate) mod default;
|
||||
pub(crate) mod echo;
|
||||
pub(crate) mod enter;
|
||||
pub(crate) mod env;
|
||||
#[allow(unused)]
|
||||
pub(crate) mod evaluate_by;
|
||||
pub(crate) mod exit;
|
||||
pub(crate) mod fetch;
|
||||
pub(crate) mod first;
|
||||
@ -27,13 +35,19 @@ pub(crate) mod from_ssv;
|
||||
pub(crate) mod from_toml;
|
||||
pub(crate) mod from_tsv;
|
||||
pub(crate) mod from_url;
|
||||
pub(crate) mod from_xlsx;
|
||||
pub(crate) mod from_xml;
|
||||
pub(crate) mod from_yaml;
|
||||
pub(crate) mod get;
|
||||
pub(crate) mod group_by;
|
||||
pub(crate) mod help;
|
||||
pub(crate) mod histogram;
|
||||
pub(crate) mod history;
|
||||
pub(crate) mod last;
|
||||
pub(crate) mod lines;
|
||||
pub(crate) mod ls;
|
||||
#[allow(unused)]
|
||||
pub(crate) mod map_max_by;
|
||||
pub(crate) mod mkdir;
|
||||
pub(crate) mod mv;
|
||||
pub(crate) mod next;
|
||||
@ -43,8 +57,11 @@ pub(crate) mod pick;
|
||||
pub(crate) mod pivot;
|
||||
pub(crate) mod plugin;
|
||||
pub(crate) mod post;
|
||||
pub(crate) mod prepend;
|
||||
pub(crate) mod prev;
|
||||
pub(crate) mod pwd;
|
||||
#[allow(unused)]
|
||||
pub(crate) mod reduce_by;
|
||||
pub(crate) mod reject;
|
||||
pub(crate) mod reverse;
|
||||
pub(crate) mod rm;
|
||||
@ -53,8 +70,11 @@ pub(crate) mod shells;
|
||||
pub(crate) mod size;
|
||||
pub(crate) mod skip_while;
|
||||
pub(crate) mod sort_by;
|
||||
pub(crate) mod split_by;
|
||||
pub(crate) mod split_column;
|
||||
pub(crate) mod split_row;
|
||||
#[allow(unused)]
|
||||
pub(crate) mod t_sort_by;
|
||||
pub(crate) mod table;
|
||||
pub(crate) mod tags;
|
||||
pub(crate) mod to_bson;
|
||||
@ -67,6 +87,7 @@ pub(crate) mod to_url;
|
||||
pub(crate) mod to_yaml;
|
||||
pub(crate) mod trim;
|
||||
pub(crate) mod version;
|
||||
pub(crate) mod what;
|
||||
pub(crate) mod where_;
|
||||
pub(crate) mod which_;
|
||||
|
||||
@ -77,15 +98,20 @@ pub(crate) use command::{
|
||||
UnevaluatedCallInfo, WholeStreamCommand,
|
||||
};
|
||||
|
||||
pub(crate) use append::Append;
|
||||
pub(crate) use classified::ClassifiedCommand;
|
||||
pub(crate) use compact::Compact;
|
||||
pub(crate) use config::Config;
|
||||
pub(crate) use count::Count;
|
||||
pub(crate) use cp::Cpy;
|
||||
pub(crate) use date::Date;
|
||||
pub(crate) use debug::Debug;
|
||||
pub(crate) use default::Default;
|
||||
pub(crate) use echo::Echo;
|
||||
pub(crate) use enter::Enter;
|
||||
pub(crate) use env::Env;
|
||||
#[allow(unused)]
|
||||
pub(crate) use evaluate_by::EvaluateBy;
|
||||
pub(crate) use exit::Exit;
|
||||
pub(crate) use fetch::Fetch;
|
||||
pub(crate) use first::First;
|
||||
@ -99,14 +125,20 @@ pub(crate) use from_ssv::FromSSV;
|
||||
pub(crate) use from_toml::FromTOML;
|
||||
pub(crate) use from_tsv::FromTSV;
|
||||
pub(crate) use from_url::FromURL;
|
||||
pub(crate) use from_xlsx::FromXLSX;
|
||||
pub(crate) use from_xml::FromXML;
|
||||
pub(crate) use from_yaml::FromYAML;
|
||||
pub(crate) use from_yaml::FromYML;
|
||||
pub(crate) use get::Get;
|
||||
pub(crate) use group_by::GroupBy;
|
||||
pub(crate) use help::Help;
|
||||
pub(crate) use histogram::Histogram;
|
||||
pub(crate) use history::History;
|
||||
pub(crate) use last::Last;
|
||||
pub(crate) use lines::Lines;
|
||||
pub(crate) use ls::LS;
|
||||
#[allow(unused)]
|
||||
pub(crate) use map_max_by::MapMaxBy;
|
||||
pub(crate) use mkdir::Mkdir;
|
||||
pub(crate) use mv::Move;
|
||||
pub(crate) use next::Next;
|
||||
@ -115,8 +147,11 @@ pub(crate) use open::Open;
|
||||
pub(crate) use pick::Pick;
|
||||
pub(crate) use pivot::Pivot;
|
||||
pub(crate) use post::Post;
|
||||
pub(crate) use prepend::Prepend;
|
||||
pub(crate) use prev::Previous;
|
||||
pub(crate) use pwd::PWD;
|
||||
#[allow(unused)]
|
||||
pub(crate) use reduce_by::ReduceBy;
|
||||
pub(crate) use reject::Reject;
|
||||
pub(crate) use reverse::Reverse;
|
||||
pub(crate) use rm::Remove;
|
||||
@ -125,8 +160,11 @@ pub(crate) use shells::Shells;
|
||||
pub(crate) use size::Size;
|
||||
pub(crate) use skip_while::SkipWhile;
|
||||
pub(crate) use sort_by::SortBy;
|
||||
pub(crate) use split_by::SplitBy;
|
||||
pub(crate) use split_column::SplitColumn;
|
||||
pub(crate) use split_row::SplitRow;
|
||||
#[allow(unused)]
|
||||
pub(crate) use t_sort_by::TSortBy;
|
||||
pub(crate) use table::Table;
|
||||
pub(crate) use tags::Tags;
|
||||
pub(crate) use to_bson::ToBSON;
|
||||
@ -140,5 +178,6 @@ pub(crate) use to_url::ToURL;
|
||||
pub(crate) use to_yaml::ToYAML;
|
||||
pub(crate) use trim::Trim;
|
||||
pub(crate) use version::Version;
|
||||
pub(crate) use what::What;
|
||||
pub(crate) use where_::Where;
|
||||
pub(crate) use which_::Which;
|
||||
|
47
src/commands/append.rs
Normal file
47
src/commands/append.rs
Normal file
@ -0,0 +1,47 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::CommandRegistry;
|
||||
use crate::prelude::*;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct AppendArgs {
|
||||
row: Value,
|
||||
}
|
||||
|
||||
pub struct Append;
|
||||
|
||||
impl WholeStreamCommand for Append {
|
||||
fn name(&self) -> &str {
|
||||
"append"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("append").required(
|
||||
"row value",
|
||||
SyntaxShape::Any,
|
||||
"the value of the row to append to the table",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Append the given row to the table"
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, append)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
fn append(
|
||||
AppendArgs { row }: AppendArgs,
|
||||
RunnableContext { input, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let mut after: VecDeque<Value> = VecDeque::new();
|
||||
after.push_back(row);
|
||||
|
||||
Ok(OutputStream::from_input(input.values.chain(after)))
|
||||
}
|
@ -46,7 +46,9 @@ pub fn autoview(
|
||||
Ok(OutputStream::new(async_stream! {
|
||||
let mut output_stream: OutputStream = context.input.into();
|
||||
|
||||
match output_stream.try_next().await {
|
||||
let next = output_stream.try_next().await;
|
||||
|
||||
match next {
|
||||
Ok(Some(x)) => {
|
||||
match output_stream.try_next().await {
|
||||
Ok(Some(y)) => {
|
||||
@ -91,14 +93,33 @@ pub fn autoview(
|
||||
|
||||
let raw = raw.clone();
|
||||
|
||||
let mut command_args = raw.with_input(new_input.into());
|
||||
let input: Vec<Value> = new_input.into();
|
||||
|
||||
if input.len() > 0 && input.iter().all(|value| value.value.is_error()) {
|
||||
let first = &input[0];
|
||||
|
||||
let mut host = context.host.clone();
|
||||
let mut host = match host.lock() {
|
||||
Err(err) => {
|
||||
errln!("Unexpected error acquiring host lock: {:?}", err);
|
||||
return;
|
||||
}
|
||||
Ok(val) => val
|
||||
};
|
||||
|
||||
crate::cli::print_err(first.value.expect_error(), &*host, &context.source);
|
||||
return;
|
||||
}
|
||||
|
||||
let mut command_args = raw.with_input(input);
|
||||
let mut named_args = NamedArguments::new();
|
||||
named_args.insert_optional("start_number", Some(Expression::number(current_idx, Tag::unknown())));
|
||||
command_args.call_info.args.named = Some(named_args);
|
||||
|
||||
let result = table.run(command_args, &context.commands, false);
|
||||
let result = table.run(command_args, &context.commands);
|
||||
result.collect::<Vec<_>>().await;
|
||||
|
||||
|
||||
if finished {
|
||||
break;
|
||||
} else {
|
||||
@ -110,49 +131,49 @@ pub fn autoview(
|
||||
_ => {
|
||||
if let ReturnSuccess::Value(x) = x {
|
||||
match x {
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::String(ref s)),
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(Primitive::String(ref s)),
|
||||
tag: Tag { anchor, span },
|
||||
} if anchor.is_some() => {
|
||||
if let Some(text) = text {
|
||||
let mut stream = VecDeque::new();
|
||||
stream.push_back(Value::string(s).tagged(Tag { anchor, span }));
|
||||
let result = text.run(raw.with_input(stream.into()), &context.commands, false);
|
||||
stream.push_back(UntaggedValue::string(s).into_value(Tag { anchor, span }));
|
||||
let result = text.run(raw.with_input(stream.into()), &context.commands);
|
||||
result.collect::<Vec<_>>().await;
|
||||
} else {
|
||||
println!("{}", s);
|
||||
outln!("{}", s);
|
||||
}
|
||||
}
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::String(s)),
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(Primitive::String(s)),
|
||||
..
|
||||
} => {
|
||||
println!("{}", s);
|
||||
outln!("{}", s);
|
||||
}
|
||||
|
||||
Tagged { item: Value::Primitive(Primitive::Binary(ref b)), .. } => {
|
||||
Value { value: UntaggedValue::Primitive(Primitive::Binary(ref b)), .. } => {
|
||||
if let Some(binary) = binary {
|
||||
let mut stream = VecDeque::new();
|
||||
stream.push_back(x.clone());
|
||||
let result = binary.run(raw.with_input(stream.into()), &context.commands, false);
|
||||
stream.push_back(x);
|
||||
let result = binary.run(raw.with_input(stream.into()), &context.commands);
|
||||
result.collect::<Vec<_>>().await;
|
||||
} else {
|
||||
use pretty_hex::*;
|
||||
println!("{:?}", b.hex_dump());
|
||||
outln!("{:?}", b.hex_dump());
|
||||
}
|
||||
}
|
||||
|
||||
Tagged { item: Value::Error(e), .. } => {
|
||||
Value { value: UntaggedValue::Error(e), .. } => {
|
||||
yield Err(e);
|
||||
}
|
||||
Tagged { item: ref item, .. } => {
|
||||
Value { value: ref item, .. } => {
|
||||
if let Some(table) = table {
|
||||
let mut stream = VecDeque::new();
|
||||
stream.push_back(x.clone());
|
||||
let result = table.run(raw.with_input(stream.into()), &context.commands, false);
|
||||
stream.push_back(x);
|
||||
let result = table.run(raw.with_input(stream.into()), &context.commands);
|
||||
result.collect::<Vec<_>>().await;
|
||||
} else {
|
||||
println!("{:?}", item);
|
||||
outln!("{:?}", item);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -161,13 +182,13 @@ pub fn autoview(
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
//println!("<no results>");
|
||||
//outln!("<no results>");
|
||||
}
|
||||
}
|
||||
|
||||
// Needed for async_stream to type check
|
||||
if false {
|
||||
yield ReturnSuccess::value(Value::nothing().tagged_unknown());
|
||||
yield ReturnSuccess::value(UntaggedValue::nothing().into_untagged_value());
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
@ -10,7 +10,11 @@ impl WholeStreamCommand for CD {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("cd").optional("directory", SyntaxShape::Path)
|
||||
Signature::build("cd").optional(
|
||||
"directory",
|
||||
SyntaxShape::Path,
|
||||
"the directory to change to",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
7
src/commands/classified/dynamic.rs
Normal file
7
src/commands/classified/dynamic.rs
Normal file
@ -0,0 +1,7 @@
|
||||
use crate::parser::hir;
|
||||
use derive_new::new;
|
||||
|
||||
#[derive(new, Debug, Eq, PartialEq)]
|
||||
pub(crate) struct Command {
|
||||
pub(crate) args: hir::Call,
|
||||
}
|
@ -1,10 +1,9 @@
|
||||
use crate::parser::{hir, TokenNode};
|
||||
use super::ClassifiedInputStream;
|
||||
use crate::prelude::*;
|
||||
use bytes::{BufMut, BytesMut};
|
||||
use derive_new::new;
|
||||
use futures::stream::StreamExt;
|
||||
use futures_codec::{Decoder, Encoder, Framed};
|
||||
use log::{log_enabled, trace};
|
||||
use log::trace;
|
||||
use std::io::{Error, ErrorKind};
|
||||
use subprocess::Exec;
|
||||
|
||||
@ -44,169 +43,34 @@ impl Decoder for LinesCodec {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct ClassifiedInputStream {
|
||||
pub(crate) objects: InputStream,
|
||||
pub(crate) stdin: Option<std::fs::File>,
|
||||
}
|
||||
|
||||
impl ClassifiedInputStream {
|
||||
pub(crate) fn new() -> ClassifiedInputStream {
|
||||
ClassifiedInputStream {
|
||||
objects: VecDeque::new().into(),
|
||||
stdin: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn from_input_stream(stream: impl Into<InputStream>) -> ClassifiedInputStream {
|
||||
ClassifiedInputStream {
|
||||
objects: stream.into(),
|
||||
stdin: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn from_stdout(stdout: std::fs::File) -> ClassifiedInputStream {
|
||||
ClassifiedInputStream {
|
||||
objects: VecDeque::new().into(),
|
||||
stdin: Some(stdout),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ClassifiedPipeline {
|
||||
pub(crate) commands: Vec<ClassifiedCommand>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub(crate) enum ClassifiedCommand {
|
||||
#[allow(unused)]
|
||||
Expr(TokenNode),
|
||||
Internal(InternalCommand),
|
||||
#[allow(unused)]
|
||||
Dynamic(hir::Call),
|
||||
External(ExternalCommand),
|
||||
}
|
||||
|
||||
#[derive(new, Debug, Eq, PartialEq)]
|
||||
pub(crate) struct InternalCommand {
|
||||
pub(crate) name: String,
|
||||
pub(crate) name_tag: Tag,
|
||||
pub(crate) args: hir::Call,
|
||||
}
|
||||
|
||||
#[derive(new, Debug, Eq, PartialEq)]
|
||||
pub(crate) struct DynamicCommand {
|
||||
pub(crate) args: hir::Call,
|
||||
}
|
||||
|
||||
impl InternalCommand {
|
||||
pub(crate) fn run(
|
||||
self,
|
||||
context: &mut Context,
|
||||
input: ClassifiedInputStream,
|
||||
source: Text,
|
||||
is_first_command: bool,
|
||||
) -> Result<InputStream, ShellError> {
|
||||
if log_enabled!(log::Level::Trace) {
|
||||
trace!(target: "nu::run::internal", "->");
|
||||
trace!(target: "nu::run::internal", "{}", self.name);
|
||||
trace!(target: "nu::run::internal", "{}", self.args.debug(&source));
|
||||
}
|
||||
|
||||
let objects: InputStream =
|
||||
trace_stream!(target: "nu::trace_stream::internal", "input" = input.objects);
|
||||
|
||||
let command = context.expect_command(&self.name);
|
||||
|
||||
let result = {
|
||||
context.run_command(
|
||||
command,
|
||||
self.name_tag.clone(),
|
||||
self.args,
|
||||
&source,
|
||||
objects,
|
||||
is_first_command,
|
||||
)
|
||||
};
|
||||
|
||||
let result = trace_out_stream!(target: "nu::trace_stream::internal", source: &source, "output" = result);
|
||||
let mut result = result.values;
|
||||
let mut context = context.clone();
|
||||
|
||||
let stream = async_stream! {
|
||||
while let Some(item) = result.next().await {
|
||||
match item {
|
||||
Ok(ReturnSuccess::Action(action)) => match action {
|
||||
CommandAction::ChangePath(path) => {
|
||||
context.shell_manager.set_path(path);
|
||||
}
|
||||
CommandAction::Exit => std::process::exit(0), // TODO: save history.txt
|
||||
CommandAction::EnterHelpShell(value) => {
|
||||
match value {
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::String(cmd)),
|
||||
tag,
|
||||
} => {
|
||||
context.shell_manager.insert_at_current(Box::new(
|
||||
HelpShell::for_command(
|
||||
Value::string(cmd).tagged(tag),
|
||||
&context.registry(),
|
||||
).unwrap(),
|
||||
));
|
||||
}
|
||||
_ => {
|
||||
context.shell_manager.insert_at_current(Box::new(
|
||||
HelpShell::index(&context.registry()).unwrap(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
CommandAction::EnterValueShell(value) => {
|
||||
context
|
||||
.shell_manager
|
||||
.insert_at_current(Box::new(ValueShell::new(value)));
|
||||
}
|
||||
CommandAction::EnterShell(location) => {
|
||||
context.shell_manager.insert_at_current(Box::new(
|
||||
FilesystemShell::with_location(location, context.registry().clone()).unwrap(),
|
||||
));
|
||||
}
|
||||
CommandAction::PreviousShell => {
|
||||
context.shell_manager.prev();
|
||||
}
|
||||
CommandAction::NextShell => {
|
||||
context.shell_manager.next();
|
||||
}
|
||||
CommandAction::LeaveShell => {
|
||||
context.shell_manager.remove_at_current();
|
||||
if context.shell_manager.is_empty() {
|
||||
std::process::exit(0); // TODO: save history.txt
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
Ok(ReturnSuccess::Value(v)) => {
|
||||
yield Ok(v);
|
||||
}
|
||||
|
||||
Err(x) => {
|
||||
yield Ok(Value::Error(x).tagged_unknown());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_input_stream())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub(crate) struct ExternalCommand {
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Command {
|
||||
pub(crate) name: String,
|
||||
|
||||
pub(crate) name_tag: Tag,
|
||||
pub(crate) args: Vec<Tagged<String>>,
|
||||
pub(crate) args: ExternalArgs,
|
||||
}
|
||||
|
||||
impl HasSpan for Command {
|
||||
fn span(&self) -> Span {
|
||||
self.name_tag.span.until(self.args.span)
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for Command {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
b::typed(
|
||||
"external command",
|
||||
b::description(&self.name)
|
||||
+ b::preceded(
|
||||
b::space(),
|
||||
b::intersperse(
|
||||
self.args.iter().map(|a| b::primitive(format!("{}", a.arg))),
|
||||
b::space(),
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -216,7 +80,7 @@ pub(crate) enum StreamNext {
|
||||
Internal,
|
||||
}
|
||||
|
||||
impl ExternalCommand {
|
||||
impl Command {
|
||||
pub(crate) async fn run(
|
||||
self,
|
||||
context: &mut Context,
|
||||
@ -224,16 +88,18 @@ impl ExternalCommand {
|
||||
stream_next: StreamNext,
|
||||
) -> Result<ClassifiedInputStream, ShellError> {
|
||||
let stdin = input.stdin;
|
||||
let inputs: Vec<Tagged<Value>> = input.objects.into_vec().await;
|
||||
let inputs: Vec<Value> = input.objects.into_vec().await;
|
||||
|
||||
trace!(target: "nu::run::external", "-> {}", self.name);
|
||||
trace!(target: "nu::run::external", "inputs = {:?}", inputs);
|
||||
|
||||
let mut arg_string = format!("{}", self.name);
|
||||
for arg in &self.args {
|
||||
for arg in &self.args.list {
|
||||
arg_string.push_str(&arg);
|
||||
}
|
||||
|
||||
let home_dir = dirs::home_dir();
|
||||
|
||||
trace!(target: "nu::run::external", "command = {:?}", self.name);
|
||||
|
||||
let mut process;
|
||||
@ -242,12 +108,12 @@ impl ExternalCommand {
|
||||
.iter()
|
||||
.map(|i| {
|
||||
i.as_string().map_err(|_| {
|
||||
let arg = self.args.iter().find(|arg| arg.item.contains("$it"));
|
||||
let arg = self.args.iter().find(|arg| arg.arg.contains("$it"));
|
||||
if let Some(arg) = arg {
|
||||
ShellError::labeled_error(
|
||||
"External $it needs string data",
|
||||
"given row instead of string data",
|
||||
arg.tag(),
|
||||
&arg.tag,
|
||||
)
|
||||
} else {
|
||||
ShellError::labeled_error(
|
||||
@ -265,6 +131,13 @@ impl ExternalCommand {
|
||||
if arg.chars().all(|c| c.is_whitespace()) {
|
||||
None
|
||||
} else {
|
||||
// Let's also replace ~ as we shell out
|
||||
let arg = if let Some(ref home_dir) = home_dir {
|
||||
arg.replace("~", home_dir.to_str().unwrap())
|
||||
} else {
|
||||
arg.replace("~", "~")
|
||||
};
|
||||
|
||||
Some(arg.replace("$it", &i))
|
||||
}
|
||||
});
|
||||
@ -275,7 +148,14 @@ impl ExternalCommand {
|
||||
process = Exec::shell(itertools::join(commands, " && "))
|
||||
} else {
|
||||
process = Exec::cmd(&self.name);
|
||||
for arg in &self.args {
|
||||
for arg in &self.args.list {
|
||||
// Let's also replace ~ as we shell out
|
||||
let arg = if let Some(ref home_dir) = home_dir {
|
||||
arg.replace("~", home_dir.to_str().unwrap())
|
||||
} else {
|
||||
arg.replace("~", "~")
|
||||
};
|
||||
|
||||
let arg_chars: Vec<_> = arg.chars().collect();
|
||||
if arg_chars.len() > 1
|
||||
&& arg_chars[0] == '"'
|
||||
@ -285,7 +165,7 @@ impl ExternalCommand {
|
||||
let new_arg: String = arg_chars[1..arg_chars.len() - 1].iter().collect();
|
||||
process = process.arg(new_arg);
|
||||
} else {
|
||||
process = process.arg(arg.item.clone());
|
||||
process = process.arg(arg.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -342,10 +222,11 @@ impl ExternalCommand {
|
||||
let stdout = popen.stdout.take().unwrap();
|
||||
let file = futures::io::AllowStdIo::new(stdout);
|
||||
let stream = Framed::new(file, LinesCodec {});
|
||||
let stream =
|
||||
stream.map(move |line| Value::string(line.unwrap()).tagged(&name_tag));
|
||||
let stream = stream.map(move |line| {
|
||||
UntaggedValue::string(line.unwrap()).into_value(&name_tag)
|
||||
});
|
||||
Ok(ClassifiedInputStream::from_input_stream(
|
||||
stream.boxed() as BoxStream<'static, Tagged<Value>>
|
||||
stream.boxed() as BoxStream<'static, Value>
|
||||
))
|
||||
}
|
||||
}
|
||||
@ -358,3 +239,37 @@ impl ExternalCommand {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct ExternalArg {
|
||||
pub arg: String,
|
||||
pub tag: Tag,
|
||||
}
|
||||
|
||||
impl std::ops::Deref for ExternalArg {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &str {
|
||||
&self.arg
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct ExternalArgs {
|
||||
pub list: Vec<ExternalArg>,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl ExternalArgs {
|
||||
pub fn iter(&self) -> impl Iterator<Item = &ExternalArg> {
|
||||
self.list.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for ExternalArgs {
|
||||
type Target = [ExternalArg];
|
||||
|
||||
fn deref(&self) -> &[ExternalArg] {
|
||||
&self.list
|
||||
}
|
||||
}
|
147
src/commands/classified/internal.rs
Normal file
147
src/commands/classified/internal.rs
Normal file
@ -0,0 +1,147 @@
|
||||
use crate::parser::hir;
|
||||
use crate::prelude::*;
|
||||
use derive_new::new;
|
||||
use log::{log_enabled, trace};
|
||||
|
||||
use super::ClassifiedInputStream;
|
||||
|
||||
#[derive(new, Debug, Clone, Eq, PartialEq)]
|
||||
pub struct Command {
|
||||
pub(crate) name: String,
|
||||
pub(crate) name_tag: Tag,
|
||||
pub(crate) args: hir::Call,
|
||||
}
|
||||
|
||||
impl HasSpan for Command {
|
||||
fn span(&self) -> Span {
|
||||
let start = self.name_tag.span;
|
||||
|
||||
start.until(self.args.span)
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Command {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::typed(
|
||||
"internal command",
|
||||
b::description(&self.name) + b::space() + self.args.pretty_debug(source),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Command {
|
||||
pub(crate) fn run(
|
||||
self,
|
||||
context: &mut Context,
|
||||
input: ClassifiedInputStream,
|
||||
source: Text,
|
||||
) -> Result<InputStream, ShellError> {
|
||||
if log_enabled!(log::Level::Trace) {
|
||||
trace!(target: "nu::run::internal", "->");
|
||||
trace!(target: "nu::run::internal", "{}", self.name);
|
||||
trace!(target: "nu::run::internal", "{}", self.args.debug(&source));
|
||||
}
|
||||
|
||||
let objects: InputStream =
|
||||
trace_stream!(target: "nu::trace_stream::internal", "input" = input.objects);
|
||||
|
||||
let command = context.expect_command(&self.name);
|
||||
|
||||
let result =
|
||||
{ context.run_command(command, self.name_tag.clone(), self.args, &source, objects) };
|
||||
|
||||
let result = trace_out_stream!(target: "nu::trace_stream::internal", "output" = result);
|
||||
let mut result = result.values;
|
||||
let mut context = context.clone();
|
||||
|
||||
let stream = async_stream! {
|
||||
let mut soft_errs: Vec<ShellError> = vec![];
|
||||
let mut yielded = false;
|
||||
|
||||
while let Some(item) = result.next().await {
|
||||
match item {
|
||||
Ok(ReturnSuccess::Action(action)) => match action {
|
||||
CommandAction::ChangePath(path) => {
|
||||
context.shell_manager.set_path(path);
|
||||
}
|
||||
CommandAction::Exit => std::process::exit(0), // TODO: save history.txt
|
||||
CommandAction::Error(err) => {
|
||||
context.error(err);
|
||||
break;
|
||||
}
|
||||
CommandAction::EnterHelpShell(value) => {
|
||||
match value {
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(Primitive::String(cmd)),
|
||||
tag,
|
||||
} => {
|
||||
context.shell_manager.insert_at_current(Box::new(
|
||||
HelpShell::for_command(
|
||||
UntaggedValue::string(cmd).into_value(tag),
|
||||
&context.registry(),
|
||||
).unwrap(),
|
||||
));
|
||||
}
|
||||
_ => {
|
||||
context.shell_manager.insert_at_current(Box::new(
|
||||
HelpShell::index(&context.registry()).unwrap(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
CommandAction::EnterValueShell(value) => {
|
||||
context
|
||||
.shell_manager
|
||||
.insert_at_current(Box::new(ValueShell::new(value)));
|
||||
}
|
||||
CommandAction::EnterShell(location) => {
|
||||
context.shell_manager.insert_at_current(Box::new(
|
||||
FilesystemShell::with_location(location, context.registry().clone()).unwrap(),
|
||||
));
|
||||
}
|
||||
CommandAction::PreviousShell => {
|
||||
context.shell_manager.prev();
|
||||
}
|
||||
CommandAction::NextShell => {
|
||||
context.shell_manager.next();
|
||||
}
|
||||
CommandAction::LeaveShell => {
|
||||
context.shell_manager.remove_at_current();
|
||||
if context.shell_manager.is_empty() {
|
||||
std::process::exit(0); // TODO: save history.txt
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
Ok(ReturnSuccess::Value(v)) => {
|
||||
yielded = true;
|
||||
yield Ok(v);
|
||||
}
|
||||
|
||||
Ok(ReturnSuccess::DebugValue(v)) => {
|
||||
yielded = true;
|
||||
|
||||
let doc = PrettyDebug::pretty_doc(&v);
|
||||
let mut buffer = termcolor::Buffer::ansi();
|
||||
|
||||
doc.render_raw(
|
||||
context.with_host(|host| host.width() - 5),
|
||||
&mut crate::parser::debug::TermColored::new(&mut buffer),
|
||||
).unwrap();
|
||||
|
||||
let value = String::from_utf8_lossy(buffer.as_slice());
|
||||
|
||||
yield Ok(UntaggedValue::string(value).into_untagged_value())
|
||||
}
|
||||
|
||||
Err(err) => {
|
||||
context.error(err);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_input_stream())
|
||||
}
|
||||
}
|
74
src/commands/classified/mod.rs
Normal file
74
src/commands/classified/mod.rs
Normal file
@ -0,0 +1,74 @@
|
||||
use crate::parser::{hir, TokenNode};
|
||||
use crate::prelude::*;
|
||||
|
||||
mod dynamic;
|
||||
mod external;
|
||||
mod internal;
|
||||
mod pipeline;
|
||||
|
||||
#[allow(unused_imports)]
|
||||
pub(crate) use dynamic::Command as DynamicCommand;
|
||||
#[allow(unused_imports)]
|
||||
pub(crate) use external::{Command as ExternalCommand, ExternalArg, ExternalArgs, StreamNext};
|
||||
pub(crate) use internal::Command as InternalCommand;
|
||||
pub(crate) use pipeline::Pipeline as ClassifiedPipeline;
|
||||
|
||||
pub(crate) struct ClassifiedInputStream {
|
||||
pub(crate) objects: InputStream,
|
||||
pub(crate) stdin: Option<std::fs::File>,
|
||||
}
|
||||
|
||||
impl ClassifiedInputStream {
|
||||
pub(crate) fn new() -> ClassifiedInputStream {
|
||||
ClassifiedInputStream {
|
||||
objects: vec![UntaggedValue::nothing().into_untagged_value()].into(),
|
||||
stdin: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn from_input_stream(stream: impl Into<InputStream>) -> ClassifiedInputStream {
|
||||
ClassifiedInputStream {
|
||||
objects: stream.into(),
|
||||
stdin: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn from_stdout(stdout: std::fs::File) -> ClassifiedInputStream {
|
||||
ClassifiedInputStream {
|
||||
objects: VecDeque::new().into(),
|
||||
stdin: Some(stdout),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum ClassifiedCommand {
|
||||
#[allow(unused)]
|
||||
Expr(TokenNode),
|
||||
#[allow(unused)]
|
||||
Dynamic(hir::Call),
|
||||
Internal(InternalCommand),
|
||||
External(ExternalCommand),
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for ClassifiedCommand {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self {
|
||||
ClassifiedCommand::Expr(token) => b::typed("command", token.pretty_debug(source)),
|
||||
ClassifiedCommand::Dynamic(call) => b::typed("command", call.pretty_debug(source)),
|
||||
ClassifiedCommand::Internal(internal) => internal.pretty_debug(source),
|
||||
ClassifiedCommand::External(external) => external.pretty_debug(source),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for ClassifiedCommand {
|
||||
fn span(&self) -> Span {
|
||||
match self {
|
||||
ClassifiedCommand::Expr(node) => node.span(),
|
||||
ClassifiedCommand::Internal(command) => command.span(),
|
||||
ClassifiedCommand::Dynamic(call) => call.span,
|
||||
ClassifiedCommand::External(command) => command.span(),
|
||||
}
|
||||
}
|
||||
}
|
40
src/commands/classified/pipeline.rs
Normal file
40
src/commands/classified/pipeline.rs
Normal file
@ -0,0 +1,40 @@
|
||||
use super::ClassifiedCommand;
|
||||
use crate::prelude::*;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct Pipeline {
|
||||
pub(crate) commands: ClassifiedCommands,
|
||||
}
|
||||
|
||||
impl Pipeline {
|
||||
pub fn commands(list: Vec<ClassifiedCommand>, span: impl Into<Span>) -> Pipeline {
|
||||
Pipeline {
|
||||
commands: ClassifiedCommands {
|
||||
list,
|
||||
span: span.into(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ClassifiedCommands {
|
||||
pub list: Vec<ClassifiedCommand>,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl HasSpan for Pipeline {
|
||||
fn span(&self) -> Span {
|
||||
self.commands.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Pipeline {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::intersperse(
|
||||
self.commands.list.iter().map(|c| c.pretty_debug(source)),
|
||||
b::operator(" | "),
|
||||
)
|
||||
.or(b::delimit("<", b::description("empty pipeline"), ">"))
|
||||
}
|
||||
}
|
@ -40,7 +40,7 @@ pub mod clipboard {
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
let mut clip_stream = inner_clip(values, name).await;
|
||||
while let Some(value) = clip_stream.next().await {
|
||||
@ -53,7 +53,7 @@ pub mod clipboard {
|
||||
Ok(OutputStream::from(stream))
|
||||
}
|
||||
|
||||
async fn inner_clip(input: Vec<Tagged<Value>>, name: Tag) -> OutputStream {
|
||||
async fn inner_clip(input: Vec<Value>, name: Tag) -> OutputStream {
|
||||
let mut clip_context: ClipboardContext = ClipboardProvider::new().unwrap();
|
||||
let mut new_copy_data = String::new();
|
||||
|
||||
|
@ -7,7 +7,6 @@ use crate::prelude::*;
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt;
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
@ -19,12 +18,6 @@ pub struct UnevaluatedCallInfo {
|
||||
pub name_tag: Tag,
|
||||
}
|
||||
|
||||
impl ToDebug for UnevaluatedCallInfo {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||
self.args.fmt_debug(f, source)
|
||||
}
|
||||
}
|
||||
|
||||
impl UnevaluatedCallInfo {
|
||||
pub fn evaluate(
|
||||
self,
|
||||
@ -68,7 +61,7 @@ impl CallInfo {
|
||||
#[derive(Getters)]
|
||||
#[get = "pub(crate)"]
|
||||
pub struct CommandArgs {
|
||||
pub host: Arc<Mutex<dyn Host>>,
|
||||
pub host: Arc<Mutex<Box<dyn Host>>>,
|
||||
pub ctrl_c: Arc<AtomicBool>,
|
||||
pub shell_manager: ShellManager,
|
||||
pub call_info: UnevaluatedCallInfo,
|
||||
@ -78,14 +71,14 @@ pub struct CommandArgs {
|
||||
#[derive(Getters, Clone)]
|
||||
#[get = "pub(crate)"]
|
||||
pub struct RawCommandArgs {
|
||||
pub host: Arc<Mutex<dyn Host>>,
|
||||
pub host: Arc<Mutex<Box<dyn Host>>>,
|
||||
pub ctrl_c: Arc<AtomicBool>,
|
||||
pub shell_manager: ShellManager,
|
||||
pub call_info: UnevaluatedCallInfo,
|
||||
}
|
||||
|
||||
impl RawCommandArgs {
|
||||
pub fn with_input(self, input: Vec<Tagged<Value>>) -> CommandArgs {
|
||||
pub fn with_input(self, input: Vec<Value>) -> CommandArgs {
|
||||
CommandArgs {
|
||||
host: self.host,
|
||||
ctrl_c: self.ctrl_c,
|
||||
@ -94,11 +87,15 @@ impl RawCommandArgs {
|
||||
input: input.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn source(&self) -> Text {
|
||||
self.call_info.source.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl ToDebug for CommandArgs {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||
self.call_info.fmt_debug(f, source)
|
||||
impl std::fmt::Debug for CommandArgs {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.call_info.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
@ -122,13 +119,18 @@ impl CommandArgs {
|
||||
))
|
||||
}
|
||||
|
||||
pub fn process<'de, T: Deserialize<'de>>(
|
||||
pub fn source(&self) -> Text {
|
||||
self.call_info.source.clone()
|
||||
}
|
||||
|
||||
pub fn process<'de, T: Deserialize<'de>, O: ToOutputStream>(
|
||||
self,
|
||||
registry: &CommandRegistry,
|
||||
callback: fn(T, RunnableContext) -> Result<OutputStream, ShellError>,
|
||||
) -> Result<RunnableArgs<T>, ShellError> {
|
||||
callback: fn(T, RunnableContext) -> Result<O, ShellError>,
|
||||
) -> Result<RunnableArgs<T, O>, ShellError> {
|
||||
let shell_manager = self.shell_manager.clone();
|
||||
let host = self.host.clone();
|
||||
let source = self.source();
|
||||
let ctrl_c = self.ctrl_c.clone();
|
||||
let args = self.evaluate_once(registry)?;
|
||||
let call_info = args.call_info.clone();
|
||||
@ -141,6 +143,7 @@ impl CommandArgs {
|
||||
context: RunnableContext {
|
||||
input,
|
||||
commands: registry.clone(),
|
||||
source,
|
||||
shell_manager,
|
||||
name: name_tag,
|
||||
host,
|
||||
@ -164,6 +167,7 @@ impl CommandArgs {
|
||||
|
||||
let shell_manager = self.shell_manager.clone();
|
||||
let host = self.host.clone();
|
||||
let source = self.source();
|
||||
let ctrl_c = self.ctrl_c.clone();
|
||||
let args = self.evaluate_once(registry)?;
|
||||
let call_info = args.call_info.clone();
|
||||
@ -177,6 +181,7 @@ impl CommandArgs {
|
||||
context: RunnableContext {
|
||||
input,
|
||||
commands: registry.clone(),
|
||||
source,
|
||||
shell_manager,
|
||||
name: name_tag,
|
||||
host,
|
||||
@ -202,7 +207,8 @@ impl RunnablePerItemContext {
|
||||
pub struct RunnableContext {
|
||||
pub input: InputStream,
|
||||
pub shell_manager: ShellManager,
|
||||
pub host: Arc<Mutex<dyn Host>>,
|
||||
pub host: Arc<Mutex<Box<dyn Host>>>,
|
||||
pub source: Text,
|
||||
pub ctrl_c: Arc<AtomicBool>,
|
||||
pub commands: CommandRegistry,
|
||||
pub name: Tag,
|
||||
@ -226,15 +232,15 @@ impl<T> RunnablePerItemArgs<T> {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct RunnableArgs<T> {
|
||||
pub struct RunnableArgs<T, O: ToOutputStream> {
|
||||
args: T,
|
||||
context: RunnableContext,
|
||||
callback: fn(T, RunnableContext) -> Result<OutputStream, ShellError>,
|
||||
callback: fn(T, RunnableContext) -> Result<O, ShellError>,
|
||||
}
|
||||
|
||||
impl<T> RunnableArgs<T> {
|
||||
impl<T, O: ToOutputStream> RunnableArgs<T, O> {
|
||||
pub fn run(self) -> Result<OutputStream, ShellError> {
|
||||
(self.callback)(self.args, self.context)
|
||||
(self.callback)(self.args, self.context).map(|v| v.to_output_stream())
|
||||
}
|
||||
}
|
||||
|
||||
@ -347,11 +353,11 @@ impl EvaluatedCommandArgs {
|
||||
&self.call_info.args
|
||||
}
|
||||
|
||||
pub fn nth(&self, pos: usize) -> Option<&Tagged<Value>> {
|
||||
pub fn nth(&self, pos: usize) -> Option<&Value> {
|
||||
self.call_info.args.nth(pos)
|
||||
}
|
||||
|
||||
pub fn expect_nth(&self, pos: usize) -> Result<&Tagged<Value>, ShellError> {
|
||||
pub fn expect_nth(&self, pos: usize) -> Result<&Value, ShellError> {
|
||||
self.call_info.args.expect_nth(pos)
|
||||
}
|
||||
|
||||
@ -359,11 +365,11 @@ impl EvaluatedCommandArgs {
|
||||
self.call_info.args.len()
|
||||
}
|
||||
|
||||
pub fn get(&self, name: &str) -> Option<&Tagged<Value>> {
|
||||
pub fn get(&self, name: &str) -> Option<&Value> {
|
||||
self.call_info.args.get(name)
|
||||
}
|
||||
|
||||
pub fn slice_from(&self, from: usize) -> Vec<Tagged<Value>> {
|
||||
pub fn slice_from(&self, from: usize) -> Vec<Value> {
|
||||
let positional = &self.call_info.args.positional;
|
||||
|
||||
match positional {
|
||||
@ -377,57 +383,56 @@ impl EvaluatedCommandArgs {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum CommandAction {
|
||||
ChangePath(String),
|
||||
Exit,
|
||||
Error(ShellError),
|
||||
EnterShell(String),
|
||||
EnterValueShell(Tagged<Value>),
|
||||
EnterHelpShell(Tagged<Value>),
|
||||
EnterValueShell(Value),
|
||||
EnterHelpShell(Value),
|
||||
PreviousShell,
|
||||
NextShell,
|
||||
LeaveShell,
|
||||
}
|
||||
|
||||
impl ToDebug for CommandAction {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result {
|
||||
impl PrettyDebug for CommandAction {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
match self {
|
||||
CommandAction::ChangePath(s) => write!(f, "action:change-path={}", s),
|
||||
CommandAction::Exit => write!(f, "action:exit"),
|
||||
CommandAction::EnterShell(s) => write!(f, "action:enter-shell={}", s),
|
||||
CommandAction::EnterValueShell(t) => {
|
||||
write!(f, "action:enter-value-shell={:?}", t.debug())
|
||||
}
|
||||
CommandAction::EnterHelpShell(t) => {
|
||||
write!(f, "action:enter-help-shell={:?}", t.debug())
|
||||
}
|
||||
CommandAction::PreviousShell => write!(f, "action:previous-shell"),
|
||||
CommandAction::NextShell => write!(f, "action:next-shell"),
|
||||
CommandAction::LeaveShell => write!(f, "action:leave-shell"),
|
||||
CommandAction::ChangePath(path) => b::typed("change path", b::description(path)),
|
||||
CommandAction::Exit => b::description("exit"),
|
||||
CommandAction::Error(_) => b::error("error"),
|
||||
CommandAction::EnterShell(s) => b::typed("enter shell", b::description(s)),
|
||||
CommandAction::EnterValueShell(v) => b::typed("enter value shell", v.pretty()),
|
||||
CommandAction::EnterHelpShell(v) => b::typed("enter help shell", v.pretty()),
|
||||
CommandAction::PreviousShell => b::description("previous shell"),
|
||||
CommandAction::NextShell => b::description("next shell"),
|
||||
CommandAction::LeaveShell => b::description("leave shell"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum ReturnSuccess {
|
||||
Value(Tagged<Value>),
|
||||
Value(Value),
|
||||
DebugValue(Value),
|
||||
Action(CommandAction),
|
||||
}
|
||||
|
||||
impl PrettyDebug for ReturnSuccess {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
match self {
|
||||
ReturnSuccess::Value(value) => b::typed("value", value.pretty()),
|
||||
ReturnSuccess::DebugValue(value) => b::typed("debug value", value.pretty()),
|
||||
ReturnSuccess::Action(action) => b::typed("action", action.pretty()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub type ReturnValue = Result<ReturnSuccess, ShellError>;
|
||||
|
||||
impl ToDebug for ReturnValue {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||
match self {
|
||||
Err(err) => write!(f, "{}", err.debug(source)),
|
||||
Ok(ReturnSuccess::Value(v)) => write!(f, "{:?}", v.debug()),
|
||||
Ok(ReturnSuccess::Action(a)) => write!(f, "{}", a.debug(source)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Tagged<Value>> for ReturnValue {
|
||||
fn from(input: Tagged<Value>) -> ReturnValue {
|
||||
impl From<Value> for ReturnValue {
|
||||
fn from(input: Value) -> ReturnValue {
|
||||
Ok(ReturnSuccess::Value(input))
|
||||
}
|
||||
}
|
||||
@ -437,10 +442,14 @@ impl ReturnSuccess {
|
||||
Ok(ReturnSuccess::Action(CommandAction::ChangePath(path)))
|
||||
}
|
||||
|
||||
pub fn value(input: impl Into<Tagged<Value>>) -> ReturnValue {
|
||||
pub fn value(input: impl Into<Value>) -> ReturnValue {
|
||||
Ok(ReturnSuccess::Value(input.into()))
|
||||
}
|
||||
|
||||
pub fn debug_value(input: impl Into<Value>) -> ReturnValue {
|
||||
Ok(ReturnSuccess::DebugValue(input.into()))
|
||||
}
|
||||
|
||||
pub fn action(input: CommandAction) -> ReturnValue {
|
||||
Ok(ReturnSuccess::Action(input))
|
||||
}
|
||||
@ -494,7 +503,7 @@ pub trait PerItemCommand: Send + Sync {
|
||||
call_info: &CallInfo,
|
||||
registry: &CommandRegistry,
|
||||
raw_args: &RawCommandArgs,
|
||||
input: Tagged<Value>,
|
||||
input: Value,
|
||||
) -> Result<OutputStream, ShellError>;
|
||||
|
||||
fn is_binary(&self) -> bool {
|
||||
@ -507,6 +516,29 @@ pub enum Command {
|
||||
PerItem(Arc<dyn PerItemCommand>),
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Command {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self {
|
||||
Command::WholeStream(command) => b::typed(
|
||||
"whole stream command",
|
||||
b::description(command.name())
|
||||
+ b::space()
|
||||
+ b::equals()
|
||||
+ b::space()
|
||||
+ command.signature().pretty_debug(source),
|
||||
),
|
||||
Command::PerItem(command) => b::typed(
|
||||
"per item command",
|
||||
b::description(command.name())
|
||||
+ b::space()
|
||||
+ b::equals()
|
||||
+ b::space()
|
||||
+ command.signature().pretty_debug(source),
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Command {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
@ -538,20 +570,13 @@ impl Command {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: ®istry::CommandRegistry,
|
||||
is_first_command: bool,
|
||||
) -> OutputStream {
|
||||
pub fn run(&self, args: CommandArgs, registry: ®istry::CommandRegistry) -> OutputStream {
|
||||
match self {
|
||||
Command::WholeStream(command) => match command.run(args, registry) {
|
||||
Ok(stream) => stream,
|
||||
Err(err) => OutputStream::one(Err(err)),
|
||||
},
|
||||
Command::PerItem(command) => {
|
||||
self.run_helper(command.clone(), args, registry.clone(), is_first_command)
|
||||
}
|
||||
Command::PerItem(command) => self.run_helper(command.clone(), args, registry.clone()),
|
||||
}
|
||||
}
|
||||
|
||||
@ -560,7 +585,6 @@ impl Command {
|
||||
command: Arc<dyn PerItemCommand>,
|
||||
args: CommandArgs,
|
||||
registry: CommandRegistry,
|
||||
is_first_command: bool,
|
||||
) -> OutputStream {
|
||||
let raw_args = RawCommandArgs {
|
||||
host: args.host,
|
||||
@ -569,40 +593,23 @@ impl Command {
|
||||
call_info: args.call_info,
|
||||
};
|
||||
|
||||
if !is_first_command {
|
||||
let out = args
|
||||
.input
|
||||
.values
|
||||
.map(move |x| {
|
||||
let call_info = raw_args
|
||||
.clone()
|
||||
.call_info
|
||||
.evaluate(®istry, &Scope::it_value(x.clone()))
|
||||
.unwrap();
|
||||
match command.run(&call_info, ®istry, &raw_args, x) {
|
||||
Ok(o) => o,
|
||||
Err(e) => VecDeque::from(vec![ReturnValue::Err(e)]).to_output_stream(),
|
||||
}
|
||||
})
|
||||
.flatten();
|
||||
let out = args
|
||||
.input
|
||||
.values
|
||||
.map(move |x| {
|
||||
let call_info = raw_args
|
||||
.clone()
|
||||
.call_info
|
||||
.evaluate(®istry, &Scope::it_value(x.clone()))
|
||||
.unwrap();
|
||||
match command.run(&call_info, ®istry, &raw_args, x) {
|
||||
Ok(o) => o,
|
||||
Err(e) => VecDeque::from(vec![ReturnValue::Err(e)]).to_output_stream(),
|
||||
}
|
||||
})
|
||||
.flatten();
|
||||
|
||||
out.to_output_stream()
|
||||
} else {
|
||||
let nothing = Value::nothing().tagged(Tag::unknown());
|
||||
let call_info = raw_args
|
||||
.clone()
|
||||
.call_info
|
||||
.evaluate(®istry, &Scope::it_value(nothing.clone()))
|
||||
.unwrap();
|
||||
|
||||
match command
|
||||
.run(&call_info, ®istry, &raw_args, nothing)
|
||||
.into()
|
||||
{
|
||||
Ok(o) => o,
|
||||
Err(e) => OutputStream::one(Err(e)),
|
||||
}
|
||||
}
|
||||
out.to_output_stream()
|
||||
}
|
||||
|
||||
pub fn is_binary(&self) -> bool {
|
||||
|
61
src/commands/compact.rs
Normal file
61
src/commands/compact.rs
Normal file
@ -0,0 +1,61 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::data::base::UntaggedValue;
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::registry::{CommandRegistry, Signature};
|
||||
use crate::prelude::*;
|
||||
use futures::stream::StreamExt;
|
||||
use nu_source::Tagged;
|
||||
|
||||
pub struct Compact;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct CompactArgs {
|
||||
rest: Vec<Tagged<String>>,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for Compact {
|
||||
fn name(&self) -> &str {
|
||||
"compact"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("compact").rest(SyntaxShape::Any, "the columns to compact from the table")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Creates a table with non-empty rows"
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, compact)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn compact(
|
||||
CompactArgs { rest: columns }: CompactArgs,
|
||||
RunnableContext { input, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let objects = input.values.filter(move |item| {
|
||||
let keep = if columns.is_empty() {
|
||||
item.is_some()
|
||||
} else {
|
||||
match item {
|
||||
Value {
|
||||
value: UntaggedValue::Row(ref r),
|
||||
..
|
||||
} => columns
|
||||
.iter()
|
||||
.all(|field| r.get_data(field).borrow().is_some()),
|
||||
_ => false,
|
||||
}
|
||||
};
|
||||
|
||||
futures::future::ready(keep)
|
||||
});
|
||||
|
||||
Ok(objects.from_input_stream())
|
||||
}
|
@ -4,7 +4,7 @@ use crate::errors::ShellError;
|
||||
use crate::parser::hir::SyntaxShape;
|
||||
use crate::parser::registry::{self};
|
||||
use crate::prelude::*;
|
||||
use std::iter::FromIterator;
|
||||
use nu_source::Tagged;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub struct Config;
|
||||
@ -12,7 +12,8 @@ pub struct Config;
|
||||
#[derive(Deserialize)]
|
||||
pub struct ConfigArgs {
|
||||
load: Option<Tagged<PathBuf>>,
|
||||
set: Option<(Tagged<String>, Tagged<Value>)>,
|
||||
set: Option<(Tagged<String>, Value)>,
|
||||
set_into: Option<Tagged<String>>,
|
||||
get: Option<Tagged<String>>,
|
||||
clear: Tagged<bool>,
|
||||
remove: Option<Tagged<String>>,
|
||||
@ -26,12 +27,25 @@ impl WholeStreamCommand for Config {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("config")
|
||||
.named("load", SyntaxShape::Path)
|
||||
.named("set", SyntaxShape::Any)
|
||||
.named("get", SyntaxShape::Any)
|
||||
.named("remove", SyntaxShape::Any)
|
||||
.switch("clear")
|
||||
.switch("path")
|
||||
.named(
|
||||
"load",
|
||||
SyntaxShape::Path,
|
||||
"load the config from the path give",
|
||||
)
|
||||
.named(
|
||||
"set",
|
||||
SyntaxShape::Any,
|
||||
"set a value in the config, eg) --set [key value]",
|
||||
)
|
||||
.named(
|
||||
"set_into",
|
||||
SyntaxShape::Member,
|
||||
"sets a variable from values in the pipeline",
|
||||
)
|
||||
.named("get", SyntaxShape::Any, "get a value from the config")
|
||||
.named("remove", SyntaxShape::Any, "remove a value from the config")
|
||||
.switch("clear", "clear the config")
|
||||
.switch("path", "return the path to the config file")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -51,85 +65,111 @@ pub fn config(
|
||||
ConfigArgs {
|
||||
load,
|
||||
set,
|
||||
set_into,
|
||||
get,
|
||||
clear,
|
||||
remove,
|
||||
path,
|
||||
}: ConfigArgs,
|
||||
RunnableContext { name, .. }: RunnableContext,
|
||||
RunnableContext { name, input, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let name_span = name.clone();
|
||||
|
||||
let configuration = if let Some(supplied) = load {
|
||||
Some(supplied.item().clone())
|
||||
} else {
|
||||
None
|
||||
let stream = async_stream! {
|
||||
let configuration = if let Some(supplied) = load {
|
||||
Some(supplied.item().clone())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mut result = crate::data::config::read(name_span, &configuration)?;
|
||||
|
||||
if let Some(v) = get {
|
||||
let key = v.to_string();
|
||||
let value = result
|
||||
.get(&key)
|
||||
.ok_or_else(|| ShellError::labeled_error("Missing key in config", "key", v.tag()))?;
|
||||
|
||||
match value {
|
||||
Value {
|
||||
value: UntaggedValue::Table(list),
|
||||
..
|
||||
} => {
|
||||
for l in list {
|
||||
let value = l.clone();
|
||||
yield ReturnSuccess::value(l.clone());
|
||||
}
|
||||
}
|
||||
x => yield ReturnSuccess::value(x.clone()),
|
||||
}
|
||||
}
|
||||
else if let Some((key, value)) = set {
|
||||
result.insert(key.to_string(), value.clone());
|
||||
|
||||
config::write(&result, &configuration)?;
|
||||
|
||||
yield ReturnSuccess::value(UntaggedValue::Row(result.into()).into_value(&value.tag));
|
||||
}
|
||||
else if let Some(v) = set_into {
|
||||
let rows: Vec<Value> = input.values.collect().await;
|
||||
let key = v.to_string();
|
||||
|
||||
if rows.len() == 0 {
|
||||
yield Err(ShellError::labeled_error("No values given for set_into", "needs value(s) from pipeline", v.tag()));
|
||||
} else if rows.len() == 1 {
|
||||
// A single value
|
||||
let value = &rows[0];
|
||||
|
||||
result.insert(key.to_string(), value.clone());
|
||||
|
||||
config::write(&result, &configuration)?;
|
||||
|
||||
yield ReturnSuccess::value(UntaggedValue::Row(result.into()).into_value(name));
|
||||
} else {
|
||||
// Take in the pipeline as a table
|
||||
let value = UntaggedValue::Table(rows).into_value(name.clone());
|
||||
|
||||
result.insert(key.to_string(), value.clone());
|
||||
|
||||
config::write(&result, &configuration)?;
|
||||
|
||||
yield ReturnSuccess::value(UntaggedValue::Row(result.into()).into_value(name));
|
||||
}
|
||||
}
|
||||
else if let Tagged { item: true, tag } = clear {
|
||||
result.clear();
|
||||
|
||||
config::write(&result, &configuration)?;
|
||||
|
||||
yield ReturnSuccess::value(UntaggedValue::Row(result.into()).into_value(tag));
|
||||
|
||||
return;
|
||||
}
|
||||
else if let Tagged { item: true, tag } = path {
|
||||
let path = config::default_path_for(&configuration)?;
|
||||
|
||||
yield ReturnSuccess::value(UntaggedValue::Primitive(Primitive::Path(path)).into_value(tag));
|
||||
}
|
||||
else if let Some(v) = remove {
|
||||
let key = v.to_string();
|
||||
|
||||
if result.contains_key(&key) {
|
||||
result.swap_remove(&key);
|
||||
config::write(&result, &configuration).unwrap();
|
||||
} else {
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Key does not exist in config",
|
||||
"key",
|
||||
v.tag(),
|
||||
));
|
||||
}
|
||||
|
||||
yield ReturnSuccess::value(UntaggedValue::Row(result.into()).into_value(v.tag()));
|
||||
}
|
||||
else {
|
||||
yield ReturnSuccess::value(UntaggedValue::Row(result.into()).into_value(name));
|
||||
}
|
||||
};
|
||||
|
||||
let mut result = crate::data::config::read(name_span, &configuration)?;
|
||||
|
||||
if let Some(v) = get {
|
||||
let key = v.to_string();
|
||||
let value = result
|
||||
.get(&key)
|
||||
.ok_or_else(|| ShellError::labeled_error("Missing key in config", "key", v.tag()))?;
|
||||
|
||||
let mut results = VecDeque::new();
|
||||
|
||||
match value {
|
||||
Tagged {
|
||||
item: Value::Table(list),
|
||||
..
|
||||
} => {
|
||||
for l in list {
|
||||
results.push_back(ReturnSuccess::value(l.clone()));
|
||||
}
|
||||
}
|
||||
x => results.push_back(ReturnSuccess::value(x.clone())),
|
||||
}
|
||||
|
||||
return Ok(results.to_output_stream());
|
||||
}
|
||||
|
||||
if let Some((key, value)) = set {
|
||||
result.insert(key.to_string(), value.clone());
|
||||
|
||||
config::write(&result, &configuration)?;
|
||||
|
||||
return Ok(stream![Value::Row(result.into()).tagged(value.tag())].from_input_stream());
|
||||
}
|
||||
|
||||
if let Tagged { item: true, tag } = clear {
|
||||
result.clear();
|
||||
|
||||
config::write(&result, &configuration)?;
|
||||
|
||||
return Ok(stream![Value::Row(result.into()).tagged(tag)].from_input_stream());
|
||||
}
|
||||
|
||||
if let Tagged { item: true, tag } = path {
|
||||
let path = config::default_path_for(&configuration)?;
|
||||
|
||||
return Ok(stream![Value::Primitive(Primitive::Path(path)).tagged(tag)].from_input_stream());
|
||||
}
|
||||
|
||||
if let Some(v) = remove {
|
||||
let key = v.to_string();
|
||||
|
||||
if result.contains_key(&key) {
|
||||
result.swap_remove(&key);
|
||||
config::write(&result, &configuration)?;
|
||||
} else {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Key does not exist in config",
|
||||
"key",
|
||||
v.tag(),
|
||||
));
|
||||
}
|
||||
|
||||
let obj = VecDeque::from_iter(vec![Value::Row(result.into()).tagged(v.tag())]);
|
||||
return Ok(obj.from_input_stream());
|
||||
}
|
||||
|
||||
return Ok(vec![Value::Row(result.into()).tagged(name)].into());
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
|
@ -37,9 +37,9 @@ pub fn count(
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream! {
|
||||
let rows: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
let rows: Vec<Value> = input.values.collect().await;
|
||||
|
||||
yield ReturnSuccess::value(Value::int(rows.len()).tagged(name))
|
||||
yield ReturnSuccess::value(UntaggedValue::int(rows.len()).into_value(name))
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
|
@ -3,6 +3,7 @@ use crate::errors::ShellError;
|
||||
use crate::parser::hir::SyntaxShape;
|
||||
use crate::parser::registry::{CommandRegistry, Signature};
|
||||
use crate::prelude::*;
|
||||
use nu_source::Tagged;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub struct Cpy;
|
||||
@ -21,10 +22,9 @@ impl PerItemCommand for Cpy {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("cp")
|
||||
.required("src", SyntaxShape::Pattern)
|
||||
.required("dst", SyntaxShape::Path)
|
||||
.named("file", SyntaxShape::Any)
|
||||
.switch("recursive")
|
||||
.required("src", SyntaxShape::Pattern, "the place to copy from")
|
||||
.required("dst", SyntaxShape::Path, "the place to copy to")
|
||||
.switch("recursive", "copy recursively through subdirectories")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -36,7 +36,7 @@ impl PerItemCommand for Cpy {
|
||||
call_info: &CallInfo,
|
||||
_registry: &CommandRegistry,
|
||||
raw_args: &RawCommandArgs,
|
||||
_input: Tagged<Value>,
|
||||
_input: Value,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
call_info.process(&raw_args.shell_manager, cp)?.run()
|
||||
}
|
||||
|
@ -17,7 +17,9 @@ impl WholeStreamCommand for Date {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("date").switch("utc").switch("local")
|
||||
Signature::build("date")
|
||||
.switch("utc", "use universal time (UTC)")
|
||||
.switch("local", "use the local time")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -33,26 +35,44 @@ impl WholeStreamCommand for Date {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn date_to_value<T: TimeZone>(dt: DateTime<T>, tag: Tag) -> Tagged<Value>
|
||||
pub fn date_to_value<T: TimeZone>(dt: DateTime<T>, tag: Tag) -> Value
|
||||
where
|
||||
T::Offset: Display,
|
||||
{
|
||||
let mut indexmap = IndexMap::new();
|
||||
|
||||
indexmap.insert("year".to_string(), Value::int(dt.year()).tagged(&tag));
|
||||
indexmap.insert("month".to_string(), Value::int(dt.month()).tagged(&tag));
|
||||
indexmap.insert("day".to_string(), Value::int(dt.day()).tagged(&tag));
|
||||
indexmap.insert("hour".to_string(), Value::int(dt.hour()).tagged(&tag));
|
||||
indexmap.insert("minute".to_string(), Value::int(dt.minute()).tagged(&tag));
|
||||
indexmap.insert("second".to_string(), Value::int(dt.second()).tagged(&tag));
|
||||
indexmap.insert(
|
||||
"year".to_string(),
|
||||
UntaggedValue::int(dt.year()).into_value(&tag),
|
||||
);
|
||||
indexmap.insert(
|
||||
"month".to_string(),
|
||||
UntaggedValue::int(dt.month()).into_value(&tag),
|
||||
);
|
||||
indexmap.insert(
|
||||
"day".to_string(),
|
||||
UntaggedValue::int(dt.day()).into_value(&tag),
|
||||
);
|
||||
indexmap.insert(
|
||||
"hour".to_string(),
|
||||
UntaggedValue::int(dt.hour()).into_value(&tag),
|
||||
);
|
||||
indexmap.insert(
|
||||
"minute".to_string(),
|
||||
UntaggedValue::int(dt.minute()).into_value(&tag),
|
||||
);
|
||||
indexmap.insert(
|
||||
"second".to_string(),
|
||||
UntaggedValue::int(dt.second()).into_value(&tag),
|
||||
);
|
||||
|
||||
let tz = dt.offset();
|
||||
indexmap.insert(
|
||||
"timezone".to_string(),
|
||||
Value::string(format!("{}", tz)).tagged(&tag),
|
||||
UntaggedValue::string(format!("{}", tz)).into_value(&tag),
|
||||
);
|
||||
|
||||
Value::Row(Dictionary::from(indexmap)).tagged(&tag)
|
||||
UntaggedValue::Row(Dictionary::from(indexmap)).into_value(&tag)
|
||||
}
|
||||
|
||||
pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
|
@ -1,9 +1,11 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::errors::ShellError;
|
||||
use crate::prelude::*;
|
||||
|
||||
pub struct Debug;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct DebugArgs {}
|
||||
|
||||
impl WholeStreamCommand for Debug {
|
||||
fn name(&self) -> &str {
|
||||
"debug"
|
||||
@ -14,7 +16,7 @@ impl WholeStreamCommand for Debug {
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Debug input fed."
|
||||
"Print the Rust debug representation of the values"
|
||||
}
|
||||
|
||||
fn run(
|
||||
@ -22,18 +24,18 @@ impl WholeStreamCommand for Debug {
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
debug(args, registry)
|
||||
args.process(registry, debug_value)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn debug(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let input = args.input;
|
||||
|
||||
fn debug_value(
|
||||
_args: DebugArgs,
|
||||
RunnableContext { input, .. }: RunnableContext,
|
||||
) -> Result<impl ToOutputStream, ShellError> {
|
||||
Ok(input
|
||||
.values
|
||||
.map(|v| {
|
||||
println!("{:?}", v);
|
||||
ReturnSuccess::value(v)
|
||||
ReturnSuccess::value(UntaggedValue::string(format!("{:?}", v)).into_untagged_value())
|
||||
})
|
||||
.to_output_stream())
|
||||
}
|
||||
|
73
src/commands/default.rs
Normal file
73
src/commands/default.rs
Normal file
@ -0,0 +1,73 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::CommandRegistry;
|
||||
use crate::prelude::*;
|
||||
use nu_source::Tagged;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct DefaultArgs {
|
||||
column: Tagged<String>,
|
||||
value: Value,
|
||||
}
|
||||
|
||||
pub struct Default;
|
||||
|
||||
impl WholeStreamCommand for Default {
|
||||
fn name(&self) -> &str {
|
||||
"default"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("default")
|
||||
.required("column name", SyntaxShape::String, "the name of the column")
|
||||
.required(
|
||||
"column value",
|
||||
SyntaxShape::Any,
|
||||
"the value of the column to default",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Sets a default row's column if missing."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, default)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
fn default(
|
||||
DefaultArgs { column, value }: DefaultArgs,
|
||||
RunnableContext { input, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let stream = input
|
||||
.values
|
||||
.map(move |item| {
|
||||
let mut result = VecDeque::new();
|
||||
|
||||
let should_add = match item {
|
||||
Value {
|
||||
value: UntaggedValue::Row(ref r),
|
||||
..
|
||||
} => r.get_data(&column.item).borrow().is_none(),
|
||||
_ => false,
|
||||
};
|
||||
|
||||
if should_add {
|
||||
match item.insert_data_at_path(&column.item, value.clone()) {
|
||||
Some(new_value) => result.push_back(ReturnSuccess::value(new_value)),
|
||||
None => result.push_back(ReturnSuccess::value(item)),
|
||||
}
|
||||
} else {
|
||||
result.push_back(ReturnSuccess::value(item));
|
||||
}
|
||||
result
|
||||
})
|
||||
.flatten();
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
@ -12,7 +12,7 @@ impl PerItemCommand for Echo {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("echo").rest(SyntaxShape::Any)
|
||||
Signature::build("echo").rest(SyntaxShape::Any, "the values to echo")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -24,7 +24,7 @@ impl PerItemCommand for Echo {
|
||||
call_info: &CallInfo,
|
||||
registry: &CommandRegistry,
|
||||
raw_args: &RawCommandArgs,
|
||||
_input: Tagged<Value>,
|
||||
_input: Value,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
run(call_info, registry, raw_args)
|
||||
}
|
||||
@ -35,37 +35,34 @@ fn run(
|
||||
_registry: &CommandRegistry,
|
||||
_raw_args: &RawCommandArgs,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let name = call_info.name_tag.clone();
|
||||
|
||||
let mut output = String::new();
|
||||
|
||||
let mut first = true;
|
||||
let mut output = vec![];
|
||||
|
||||
if let Some(ref positional) = call_info.args.positional {
|
||||
for i in positional {
|
||||
match i.as_string() {
|
||||
Ok(s) => {
|
||||
if !first {
|
||||
output.push_str(" ");
|
||||
} else {
|
||||
first = false;
|
||||
output.push(Ok(ReturnSuccess::Value(
|
||||
UntaggedValue::string(s).into_value(i.tag.clone()),
|
||||
)));
|
||||
}
|
||||
_ => match i {
|
||||
Value {
|
||||
value: UntaggedValue::Table(table),
|
||||
..
|
||||
} => {
|
||||
for value in table {
|
||||
output.push(Ok(ReturnSuccess::Value(value.clone())));
|
||||
}
|
||||
}
|
||||
|
||||
output.push_str(&s);
|
||||
}
|
||||
_ => {
|
||||
return Err(ShellError::type_error(
|
||||
"a string-compatible value",
|
||||
i.tagged_type_name(),
|
||||
))
|
||||
}
|
||||
_ => {
|
||||
output.push(Ok(ReturnSuccess::Value(i.clone())));
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let stream = VecDeque::from(vec![Ok(ReturnSuccess::Value(
|
||||
Value::string(output).tagged(name),
|
||||
))]);
|
||||
let stream = VecDeque::from(output);
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
|
@ -1,7 +1,6 @@
|
||||
use crate::commands::command::CommandAction;
|
||||
use crate::commands::PerItemCommand;
|
||||
use crate::commands::UnevaluatedCallInfo;
|
||||
use crate::data::meta::Span;
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::registry;
|
||||
use crate::prelude::*;
|
||||
@ -15,7 +14,11 @@ impl PerItemCommand for Enter {
|
||||
}
|
||||
|
||||
fn signature(&self) -> registry::Signature {
|
||||
Signature::build("enter").required("location", SyntaxShape::Path)
|
||||
Signature::build("enter").required(
|
||||
"location",
|
||||
SyntaxShape::Path,
|
||||
"the location to create a new shell from",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -27,17 +30,19 @@ impl PerItemCommand for Enter {
|
||||
call_info: &CallInfo,
|
||||
registry: ®istry::CommandRegistry,
|
||||
raw_args: &RawCommandArgs,
|
||||
_input: Tagged<Value>,
|
||||
_input: Value,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let registry = registry.clone();
|
||||
let raw_args = raw_args.clone();
|
||||
match call_info.args.expect_nth(0)? {
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::Path(location)),
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(Primitive::Path(location)),
|
||||
tag,
|
||||
..
|
||||
} => {
|
||||
let location_string = location.display().to_string();
|
||||
let location_clone = location_string.clone();
|
||||
let tag_clone = tag.clone();
|
||||
|
||||
if location.starts_with("help") {
|
||||
let spec = location_string.split(":").collect::<Vec<&str>>();
|
||||
@ -46,12 +51,12 @@ impl PerItemCommand for Enter {
|
||||
|
||||
if registry.has(command) {
|
||||
Ok(vec![Ok(ReturnSuccess::Action(CommandAction::EnterHelpShell(
|
||||
Value::string(command).tagged(Tag::unknown()),
|
||||
UntaggedValue::string(command).into_value(Tag::unknown()),
|
||||
)))]
|
||||
.into())
|
||||
} else {
|
||||
Ok(vec![Ok(ReturnSuccess::Action(CommandAction::EnterHelpShell(
|
||||
Value::nothing().tagged(Tag::unknown()),
|
||||
UntaggedValue::nothing().into_value(Tag::unknown()),
|
||||
)))]
|
||||
.into())
|
||||
}
|
||||
@ -71,13 +76,12 @@ impl PerItemCommand for Enter {
|
||||
crate::commands::open::fetch(
|
||||
&full_path,
|
||||
&location_clone,
|
||||
Span::unknown(),
|
||||
)
|
||||
.await.unwrap();
|
||||
tag_clone.span,
|
||||
).await?;
|
||||
|
||||
match contents {
|
||||
Value::Primitive(Primitive::String(_)) => {
|
||||
let tagged_contents = contents.tagged(&contents_tag);
|
||||
UntaggedValue::Primitive(Primitive::String(_)) => {
|
||||
let tagged_contents = contents.into_value(&contents_tag);
|
||||
|
||||
if let Some(extension) = file_extension {
|
||||
let command_name = format!("from-{}", extension);
|
||||
@ -93,6 +97,7 @@ impl PerItemCommand for Enter {
|
||||
head: raw_args.call_info.args.head,
|
||||
positional: None,
|
||||
named: None,
|
||||
span: Span::unknown()
|
||||
},
|
||||
source: raw_args.call_info.source,
|
||||
name_tag: raw_args.call_info.name_tag,
|
||||
@ -101,19 +106,18 @@ impl PerItemCommand for Enter {
|
||||
let mut result = converter.run(
|
||||
new_args.with_input(vec![tagged_contents]),
|
||||
®istry,
|
||||
false
|
||||
);
|
||||
let result_vec: Vec<Result<ReturnSuccess, ShellError>> =
|
||||
result.drain_vec().await;
|
||||
for res in result_vec {
|
||||
match res {
|
||||
Ok(ReturnSuccess::Value(Tagged {
|
||||
item,
|
||||
Ok(ReturnSuccess::Value(Value {
|
||||
value,
|
||||
..
|
||||
})) => {
|
||||
yield Ok(ReturnSuccess::Action(CommandAction::EnterValueShell(
|
||||
Tagged {
|
||||
item,
|
||||
Value {
|
||||
value,
|
||||
tag: contents_tag.clone(),
|
||||
})));
|
||||
}
|
||||
@ -128,7 +132,7 @@ impl PerItemCommand for Enter {
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
let tagged_contents = contents.tagged(contents_tag);
|
||||
let tagged_contents = contents.into_value(contents_tag);
|
||||
|
||||
yield Ok(ReturnSuccess::Action(CommandAction::EnterValueShell(tagged_contents)));
|
||||
}
|
||||
|
@ -33,34 +33,49 @@ impl WholeStreamCommand for Env {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_environment(tag: Tag) -> Result<Tagged<Value>, Box<dyn std::error::Error>> {
|
||||
pub fn get_environment(tag: Tag) -> Result<Value, Box<dyn std::error::Error>> {
|
||||
let mut indexmap = IndexMap::new();
|
||||
|
||||
let path = std::env::current_dir()?;
|
||||
indexmap.insert("cwd".to_string(), Value::path(path).tagged(&tag));
|
||||
indexmap.insert(
|
||||
"cwd".to_string(),
|
||||
UntaggedValue::path(path).into_value(&tag),
|
||||
);
|
||||
|
||||
if let Some(home) = dirs::home_dir() {
|
||||
indexmap.insert("home".to_string(), Value::path(home).tagged(&tag));
|
||||
indexmap.insert(
|
||||
"home".to_string(),
|
||||
UntaggedValue::path(home).into_value(&tag),
|
||||
);
|
||||
}
|
||||
|
||||
let config = config::default_path()?;
|
||||
indexmap.insert("config".to_string(), Value::path(config).tagged(&tag));
|
||||
indexmap.insert(
|
||||
"config".to_string(),
|
||||
UntaggedValue::path(config).into_value(&tag),
|
||||
);
|
||||
|
||||
let history = History::path();
|
||||
indexmap.insert("history".to_string(), Value::path(history).tagged(&tag));
|
||||
indexmap.insert(
|
||||
"history".to_string(),
|
||||
UntaggedValue::path(history).into_value(&tag),
|
||||
);
|
||||
|
||||
let temp = std::env::temp_dir();
|
||||
indexmap.insert("temp".to_string(), Value::path(temp).tagged(&tag));
|
||||
indexmap.insert(
|
||||
"temp".to_string(),
|
||||
UntaggedValue::path(temp).into_value(&tag),
|
||||
);
|
||||
|
||||
let mut dict = TaggedDictBuilder::new(&tag);
|
||||
for v in std::env::vars() {
|
||||
dict.insert(v.0, Value::string(v.1));
|
||||
dict.insert_untagged(v.0, UntaggedValue::string(v.1));
|
||||
}
|
||||
if !dict.is_empty() {
|
||||
indexmap.insert("vars".to_string(), dict.into_tagged_value());
|
||||
indexmap.insert("vars".to_string(), dict.into_value());
|
||||
}
|
||||
|
||||
Ok(Value::Row(Dictionary::from(indexmap)).tagged(&tag))
|
||||
Ok(UntaggedValue::Row(Dictionary::from(indexmap)).into_value(&tag))
|
||||
}
|
||||
|
||||
pub fn env(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
|
254
src/commands/evaluate_by.rs
Normal file
254
src/commands/evaluate_by.rs
Normal file
@ -0,0 +1,254 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::parser::hir::SyntaxShape;
|
||||
use crate::prelude::*;
|
||||
use nu_source::{SpannedItem, Tagged};
|
||||
|
||||
pub struct EvaluateBy;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct EvaluateByArgs {
|
||||
evaluate_with: Option<Tagged<String>>,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for EvaluateBy {
|
||||
fn name(&self) -> &str {
|
||||
"evaluate-by"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("evaluate-by").named(
|
||||
"evaluate_with",
|
||||
SyntaxShape::String,
|
||||
"the name of the column to evaluate by",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Creates a new table with the data from the tables rows evaluated by the column given."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, evaluate_by)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn evaluate_by(
|
||||
EvaluateByArgs { evaluate_with }: EvaluateByArgs,
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
|
||||
if values.is_empty() {
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Expected table from pipeline",
|
||||
"requires a table input",
|
||||
name
|
||||
))
|
||||
} else {
|
||||
|
||||
let evaluate_with = if let Some(evaluator) = evaluate_with {
|
||||
Some(evaluator.item().clone())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
match evaluate(&values[0], evaluate_with, name) {
|
||||
Ok(evaluated) => yield ReturnSuccess::value(evaluated),
|
||||
Err(err) => yield Err(err)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
|
||||
fn fetch(key: Option<String>) -> Box<dyn Fn(Value, Tag) -> Option<Value> + 'static> {
|
||||
Box::new(move |value: Value, tag| match &key {
|
||||
Some(key_given) => value.get_data_by_key(key_given[..].spanned(tag.span)),
|
||||
None => Some(UntaggedValue::int(1).into_value(tag)),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn evaluate(
|
||||
values: &Value,
|
||||
evaluator: Option<String>,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Result<Value, ShellError> {
|
||||
let tag = tag.into();
|
||||
|
||||
let evaluate_with = match evaluator {
|
||||
Some(keyfn) => fetch(Some(keyfn)),
|
||||
None => fetch(None),
|
||||
};
|
||||
|
||||
let results: Value = match values {
|
||||
Value {
|
||||
value: UntaggedValue::Table(datasets),
|
||||
..
|
||||
} => {
|
||||
let datasets: Vec<_> = datasets
|
||||
.into_iter()
|
||||
.map(|subsets| match subsets {
|
||||
Value {
|
||||
value: UntaggedValue::Table(subsets),
|
||||
..
|
||||
} => {
|
||||
let subsets: Vec<_> = subsets
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|data| match data {
|
||||
Value {
|
||||
value: UntaggedValue::Table(data),
|
||||
..
|
||||
} => {
|
||||
let data: Vec<_> = data
|
||||
.into_iter()
|
||||
.map(|x| evaluate_with(x.clone(), tag.clone()).unwrap())
|
||||
.collect();
|
||||
UntaggedValue::Table(data).into_value(&tag)
|
||||
}
|
||||
_ => UntaggedValue::Table(vec![]).into_value(&tag),
|
||||
})
|
||||
.collect();
|
||||
UntaggedValue::Table(subsets).into_value(&tag)
|
||||
}
|
||||
_ => UntaggedValue::Table(vec![]).into_value(&tag),
|
||||
})
|
||||
.collect();
|
||||
|
||||
UntaggedValue::Table(datasets.clone()).into_value(&tag)
|
||||
}
|
||||
_ => UntaggedValue::Table(vec![]).into_value(&tag),
|
||||
};
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use crate::commands::evaluate_by::{evaluate, fetch};
|
||||
use crate::commands::group_by::group;
|
||||
use crate::commands::t_sort_by::t_sort;
|
||||
use crate::prelude::*;
|
||||
use crate::Value;
|
||||
use indexmap::IndexMap;
|
||||
use nu_source::TaggedItem;
|
||||
|
||||
fn int(s: impl Into<BigInt>) -> Value {
|
||||
UntaggedValue::int(s).into_untagged_value()
|
||||
}
|
||||
|
||||
fn string(input: impl Into<String>) -> Value {
|
||||
UntaggedValue::string(input.into()).into_untagged_value()
|
||||
}
|
||||
|
||||
fn row(entries: IndexMap<String, Value>) -> Value {
|
||||
UntaggedValue::row(entries).into_untagged_value()
|
||||
}
|
||||
|
||||
fn table(list: &Vec<Value>) -> Value {
|
||||
UntaggedValue::table(list).into_untagged_value()
|
||||
}
|
||||
|
||||
fn nu_releases_sorted_by_date() -> Value {
|
||||
let key = String::from("date");
|
||||
|
||||
t_sort(
|
||||
Some(key),
|
||||
None,
|
||||
&nu_releases_grouped_by_date(),
|
||||
Tag::unknown(),
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn nu_releases_grouped_by_date() -> Value {
|
||||
let key = String::from("date").tagged_unknown();
|
||||
group(&key, nu_releases_commiters(), Tag::unknown()).unwrap()
|
||||
}
|
||||
|
||||
fn nu_releases_commiters() -> Vec<Value> {
|
||||
vec![
|
||||
row(
|
||||
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("September 24-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("September 24-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("September 24-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")},
|
||||
),
|
||||
]
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn evaluator_fetches_by_column_if_supplied_a_column_name() {
|
||||
let subject = row(indexmap! { "name".into() => string("andres") });
|
||||
|
||||
let evaluator = fetch(Some(String::from("name")));
|
||||
|
||||
assert_eq!(evaluator(subject, Tag::unknown()), Some(string("andres")));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn evaluator_returns_1_if_no_column_name_given() {
|
||||
let subject = row(indexmap! { "name".into() => string("andres") });
|
||||
let evaluator = fetch(None);
|
||||
|
||||
assert_eq!(
|
||||
evaluator(subject, Tag::unknown()),
|
||||
Some(UntaggedValue::int(1).into_untagged_value())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn evaluates_the_tables() {
|
||||
assert_eq!(
|
||||
evaluate(&nu_releases_sorted_by_date(), None, Tag::unknown()).unwrap(),
|
||||
table(&vec![table(&vec![
|
||||
table(&vec![int(1), int(1), int(1)]),
|
||||
table(&vec![int(1), int(1), int(1)]),
|
||||
table(&vec![int(1), int(1), int(1)]),
|
||||
]),])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn evaluates_the_tables_with_custom_evaluator() {
|
||||
let eval = String::from("name");
|
||||
|
||||
assert_eq!(
|
||||
evaluate(&nu_releases_sorted_by_date(), Some(eval), Tag::unknown()).unwrap(),
|
||||
table(&vec![table(&vec![
|
||||
table(&vec![string("AR"), string("JT"), string("YK")]),
|
||||
table(&vec![string("AR"), string("YK"), string("JT")]),
|
||||
table(&vec![string("YK"), string("JT"), string("AR")]),
|
||||
]),])
|
||||
);
|
||||
}
|
||||
}
|
@ -11,7 +11,7 @@ impl WholeStreamCommand for Exit {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("exit").switch("now")
|
||||
Signature::build("exit").switch("now", "exit out of the shell immediately")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -1,15 +1,15 @@
|
||||
use crate::commands::UnevaluatedCallInfo;
|
||||
use crate::context::AnchorLocation;
|
||||
use crate::data::meta::Span;
|
||||
use crate::data::Value;
|
||||
use crate::data::base::Value;
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::hir::SyntaxShape;
|
||||
use crate::parser::registry::Signature;
|
||||
use crate::prelude::*;
|
||||
use mime::Mime;
|
||||
use nu_source::{AnchorLocation, Span};
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use surf::mime;
|
||||
|
||||
pub struct Fetch;
|
||||
|
||||
impl PerItemCommand for Fetch {
|
||||
@ -19,8 +19,12 @@ impl PerItemCommand for Fetch {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(self.name())
|
||||
.required("path", SyntaxShape::Path)
|
||||
.switch("raw")
|
||||
.required(
|
||||
"path",
|
||||
SyntaxShape::Path,
|
||||
"the URL to fetch the contents from",
|
||||
)
|
||||
.switch("raw", "fetch contents as text rather than a table")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -32,7 +36,7 @@ impl PerItemCommand for Fetch {
|
||||
call_info: &CallInfo,
|
||||
registry: &CommandRegistry,
|
||||
raw_args: &RawCommandArgs,
|
||||
_input: Tagged<Value>,
|
||||
_input: Value,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
run(call_info, registry, raw_args)
|
||||
}
|
||||
@ -77,7 +81,7 @@ fn run(
|
||||
file_extension.or(path_str.split('.').last().map(String::from))
|
||||
};
|
||||
|
||||
let tagged_contents = contents.tagged(&contents_tag);
|
||||
let tagged_contents = contents.retag(&contents_tag);
|
||||
|
||||
if let Some(extension) = file_extension {
|
||||
let command_name = format!("from-{}", extension);
|
||||
@ -90,23 +94,24 @@ fn run(
|
||||
args: crate::parser::hir::Call {
|
||||
head: raw_args.call_info.args.head,
|
||||
positional: None,
|
||||
named: None
|
||||
named: None,
|
||||
span: Span::unknown()
|
||||
},
|
||||
source: raw_args.call_info.source,
|
||||
name_tag: raw_args.call_info.name_tag,
|
||||
}
|
||||
};
|
||||
let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry, false);
|
||||
let mut result = converter.run(new_args.with_input(vec![tagged_contents]), ®istry);
|
||||
let result_vec: Vec<Result<ReturnSuccess, ShellError>> = result.drain_vec().await;
|
||||
for res in result_vec {
|
||||
match res {
|
||||
Ok(ReturnSuccess::Value(Tagged { item: Value::Table(list), ..})) => {
|
||||
Ok(ReturnSuccess::Value(Value { value: UntaggedValue::Table(list), ..})) => {
|
||||
for l in list {
|
||||
yield Ok(ReturnSuccess::Value(l));
|
||||
}
|
||||
}
|
||||
Ok(ReturnSuccess::Value(Tagged { item, .. })) => {
|
||||
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() }));
|
||||
Ok(ReturnSuccess::Value(Value { value, .. })) => {
|
||||
yield Ok(ReturnSuccess::Value(value.into_value(contents_tag.clone())));
|
||||
}
|
||||
x => yield x,
|
||||
}
|
||||
@ -122,7 +127,10 @@ fn run(
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
|
||||
pub async fn fetch(location: &str, span: Span) -> Result<(Option<String>, Value, Tag), ShellError> {
|
||||
pub async fn fetch(
|
||||
location: &str,
|
||||
span: Span,
|
||||
) -> Result<(Option<String>, UntaggedValue, Tag), ShellError> {
|
||||
if let Err(_) = url::Url::parse(location) {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Incomplete or incorrect url",
|
||||
@ -139,7 +147,7 @@ pub async fn fetch(location: &str, span: Span) -> Result<(Option<String>, Value,
|
||||
match (content_type.type_(), content_type.subtype()) {
|
||||
(mime::APPLICATION, mime::XML) => Ok((
|
||||
Some("xml".to_string()),
|
||||
Value::string(r.body_string().await.map_err(|_| {
|
||||
UntaggedValue::string(r.body_string().await.map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not load text from remote url",
|
||||
"could not load",
|
||||
@ -153,7 +161,7 @@ pub async fn fetch(location: &str, span: Span) -> Result<(Option<String>, Value,
|
||||
)),
|
||||
(mime::APPLICATION, mime::JSON) => Ok((
|
||||
Some("json".to_string()),
|
||||
Value::string(r.body_string().await.map_err(|_| {
|
||||
UntaggedValue::string(r.body_string().await.map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not load text from remote url",
|
||||
"could not load",
|
||||
@ -175,7 +183,7 @@ pub async fn fetch(location: &str, span: Span) -> Result<(Option<String>, Value,
|
||||
})?;
|
||||
Ok((
|
||||
None,
|
||||
Value::binary(buf),
|
||||
UntaggedValue::binary(buf),
|
||||
Tag {
|
||||
span,
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
@ -184,7 +192,7 @@ pub async fn fetch(location: &str, span: Span) -> Result<(Option<String>, Value,
|
||||
}
|
||||
(mime::IMAGE, mime::SVG) => Ok((
|
||||
Some("svg".to_string()),
|
||||
Value::string(r.body_string().await.map_err(|_| {
|
||||
UntaggedValue::string(r.body_string().await.map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not load svg from remote url",
|
||||
"could not load",
|
||||
@ -206,7 +214,7 @@ pub async fn fetch(location: &str, span: Span) -> Result<(Option<String>, Value,
|
||||
})?;
|
||||
Ok((
|
||||
Some(image_ty.to_string()),
|
||||
Value::binary(buf),
|
||||
UntaggedValue::binary(buf),
|
||||
Tag {
|
||||
span,
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
@ -215,7 +223,7 @@ pub async fn fetch(location: &str, span: Span) -> Result<(Option<String>, Value,
|
||||
}
|
||||
(mime::TEXT, mime::HTML) => Ok((
|
||||
Some("html".to_string()),
|
||||
Value::string(r.body_string().await.map_err(|_| {
|
||||
UntaggedValue::string(r.body_string().await.map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not load text from remote url",
|
||||
"could not load",
|
||||
@ -241,7 +249,7 @@ pub async fn fetch(location: &str, span: Span) -> Result<(Option<String>, Value,
|
||||
|
||||
Ok((
|
||||
path_extension,
|
||||
Value::string(r.body_string().await.map_err(|_| {
|
||||
UntaggedValue::string(r.body_string().await.map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not load text from remote url",
|
||||
"could not load",
|
||||
@ -256,7 +264,10 @@ pub async fn fetch(location: &str, span: Span) -> Result<(Option<String>, Value,
|
||||
}
|
||||
(ty, sub_ty) => Ok((
|
||||
None,
|
||||
Value::string(format!("Not yet supported MIME type: {} {}", ty, sub_ty)),
|
||||
UntaggedValue::string(format!(
|
||||
"Not yet supported MIME type: {} {}",
|
||||
ty, sub_ty
|
||||
)),
|
||||
Tag {
|
||||
span,
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
@ -266,7 +277,7 @@ pub async fn fetch(location: &str, span: Span) -> Result<(Option<String>, Value,
|
||||
}
|
||||
None => Ok((
|
||||
None,
|
||||
Value::string(format!("No content type found")),
|
||||
UntaggedValue::string(format!("No content type found")),
|
||||
Tag {
|
||||
span,
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
|
@ -2,6 +2,7 @@ use crate::commands::WholeStreamCommand;
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::CommandRegistry;
|
||||
use crate::prelude::*;
|
||||
use nu_source::Tagged;
|
||||
|
||||
pub struct First;
|
||||
|
||||
@ -16,7 +17,11 @@ impl WholeStreamCommand for First {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("first").optional("rows", SyntaxShape::Int)
|
||||
Signature::build("first").optional(
|
||||
"rows",
|
||||
SyntaxShape::Int,
|
||||
"starting from the front, the number of rows to return",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -10,7 +10,7 @@ pub(crate) fn format(input: Vec<Value>, host: &mut dyn Host) {
|
||||
crate::format::print_view(&view, &mut *host);
|
||||
|
||||
if last != i {
|
||||
println!("");
|
||||
outln!("");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,8 +1,9 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::data::{Primitive, TaggedDictBuilder, Value};
|
||||
use crate::data::TaggedDictBuilder;
|
||||
use crate::errors::ExpectedRange;
|
||||
use crate::prelude::*;
|
||||
use bson::{decode_document, spec::BinarySubtype, Bson};
|
||||
use nu_source::SpannedItem;
|
||||
use std::str::FromStr;
|
||||
|
||||
pub struct FromBSON;
|
||||
@ -29,7 +30,7 @@ impl WholeStreamCommand for FromBSON {
|
||||
}
|
||||
}
|
||||
|
||||
fn bson_array(input: &Vec<Bson>, tag: Tag) -> Result<Vec<Tagged<Value>>, ShellError> {
|
||||
fn bson_array(input: &Vec<Bson>, tag: Tag) -> Result<Vec<Value>, ShellError> {
|
||||
let mut out = vec![];
|
||||
|
||||
for value in input {
|
||||
@ -39,109 +40,114 @@ fn bson_array(input: &Vec<Bson>, tag: Tag) -> Result<Vec<Tagged<Value>>, ShellEr
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
fn convert_bson_value_to_nu_value(
|
||||
v: &Bson,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Result<Tagged<Value>, ShellError> {
|
||||
fn convert_bson_value_to_nu_value(v: &Bson, tag: impl Into<Tag>) -> Result<Value, ShellError> {
|
||||
let tag = tag.into();
|
||||
let span = tag.span;
|
||||
|
||||
Ok(match v {
|
||||
Bson::FloatingPoint(n) => Value::Primitive(Primitive::from(*n)).tagged(&tag),
|
||||
Bson::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(&tag),
|
||||
Bson::Array(a) => Value::Table(bson_array(a, tag.clone())?).tagged(&tag),
|
||||
Bson::FloatingPoint(n) => UntaggedValue::Primitive(Primitive::from(*n)).into_value(&tag),
|
||||
Bson::String(s) => {
|
||||
UntaggedValue::Primitive(Primitive::String(String::from(s))).into_value(&tag)
|
||||
}
|
||||
Bson::Array(a) => UntaggedValue::Table(bson_array(a, tag.clone())?).into_value(&tag),
|
||||
Bson::Document(doc) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
for (k, v) in doc.iter() {
|
||||
collected.insert_tagged(k.clone(), convert_bson_value_to_nu_value(v, &tag)?);
|
||||
collected.insert_value(k.clone(), convert_bson_value_to_nu_value(v, &tag)?);
|
||||
}
|
||||
|
||||
collected.into_tagged_value()
|
||||
collected.into_value()
|
||||
}
|
||||
Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(&tag),
|
||||
Bson::Null => Value::Primitive(Primitive::Nothing).tagged(&tag),
|
||||
Bson::Boolean(b) => UntaggedValue::Primitive(Primitive::Boolean(*b)).into_value(&tag),
|
||||
Bson::Null => UntaggedValue::Primitive(Primitive::Nothing).into_value(&tag),
|
||||
Bson::RegExp(r, opts) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_tagged(
|
||||
collected.insert_value(
|
||||
"$regex".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(r))).tagged(&tag),
|
||||
UntaggedValue::Primitive(Primitive::String(String::from(r))).into_value(&tag),
|
||||
);
|
||||
collected.insert_tagged(
|
||||
collected.insert_value(
|
||||
"$options".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(opts))).tagged(&tag),
|
||||
UntaggedValue::Primitive(Primitive::String(String::from(opts))).into_value(&tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
collected.into_value()
|
||||
}
|
||||
Bson::I32(n) => Value::number(n).tagged(&tag),
|
||||
Bson::I64(n) => Value::number(n).tagged(&tag),
|
||||
Bson::I32(n) => UntaggedValue::number(n).into_value(&tag),
|
||||
Bson::I64(n) => UntaggedValue::number(n).into_value(&tag),
|
||||
Bson::Decimal128(n) => {
|
||||
// TODO: this really isn't great, and we should update this to do a higher
|
||||
// fidelity translation
|
||||
let decimal = BigDecimal::from_str(&format!("{}", n)).map_err(|_| {
|
||||
ShellError::range_error(
|
||||
ExpectedRange::BigDecimal,
|
||||
&n.tagged(&tag),
|
||||
&n.spanned(span),
|
||||
format!("converting BSON Decimal128 to BigDecimal"),
|
||||
)
|
||||
})?;
|
||||
Value::Primitive(Primitive::Decimal(decimal)).tagged(&tag)
|
||||
UntaggedValue::Primitive(Primitive::Decimal(decimal)).into_value(&tag)
|
||||
}
|
||||
Bson::JavaScriptCode(js) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_tagged(
|
||||
collected.insert_value(
|
||||
"$javascript".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(js))).tagged(&tag),
|
||||
UntaggedValue::Primitive(Primitive::String(String::from(js))).into_value(&tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
collected.into_value()
|
||||
}
|
||||
Bson::JavaScriptCodeWithScope(js, doc) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_tagged(
|
||||
collected.insert_value(
|
||||
"$javascript".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(js))).tagged(&tag),
|
||||
UntaggedValue::Primitive(Primitive::String(String::from(js))).into_value(&tag),
|
||||
);
|
||||
collected.insert_tagged(
|
||||
collected.insert_value(
|
||||
"$scope".to_string(),
|
||||
convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag.clone())?,
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
collected.into_value()
|
||||
}
|
||||
Bson::TimeStamp(ts) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_tagged("$timestamp".to_string(), Value::number(ts).tagged(&tag));
|
||||
collected.into_tagged_value()
|
||||
collected.insert_value(
|
||||
"$timestamp".to_string(),
|
||||
UntaggedValue::number(ts).into_value(&tag),
|
||||
);
|
||||
collected.into_value()
|
||||
}
|
||||
Bson::Binary(bst, bytes) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_tagged(
|
||||
collected.insert_value(
|
||||
"$binary_subtype".to_string(),
|
||||
match bst {
|
||||
BinarySubtype::UserDefined(u) => Value::number(u),
|
||||
_ => Value::Primitive(Primitive::String(binary_subtype_to_string(*bst))),
|
||||
BinarySubtype::UserDefined(u) => UntaggedValue::number(u),
|
||||
_ => {
|
||||
UntaggedValue::Primitive(Primitive::String(binary_subtype_to_string(*bst)))
|
||||
}
|
||||
}
|
||||
.tagged(&tag),
|
||||
.into_value(&tag),
|
||||
);
|
||||
collected.insert_tagged(
|
||||
collected.insert_value(
|
||||
"$binary".to_string(),
|
||||
Value::Primitive(Primitive::Binary(bytes.to_owned())).tagged(&tag),
|
||||
UntaggedValue::Primitive(Primitive::Binary(bytes.to_owned())).into_value(&tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
collected.into_value()
|
||||
}
|
||||
Bson::ObjectId(obj_id) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_tagged(
|
||||
collected.insert_value(
|
||||
"$object_id".to_string(),
|
||||
Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(&tag),
|
||||
UntaggedValue::Primitive(Primitive::String(obj_id.to_hex())).into_value(&tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
collected.into_value()
|
||||
}
|
||||
Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(&tag),
|
||||
Bson::UtcDatetime(dt) => UntaggedValue::Primitive(Primitive::Date(*dt)).into_value(&tag),
|
||||
Bson::Symbol(s) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_tagged(
|
||||
collected.insert_value(
|
||||
"$symbol".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(s))).tagged(&tag),
|
||||
UntaggedValue::Primitive(Primitive::String(String::from(s))).into_value(&tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
collected.into_value()
|
||||
}
|
||||
})
|
||||
}
|
||||
@ -183,10 +189,7 @@ impl std::io::Read for BytesReader {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_bson_bytes_to_value(
|
||||
bytes: Vec<u8>,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Result<Tagged<Value>, ShellError> {
|
||||
pub fn from_bson_bytes_to_value(bytes: Vec<u8>, tag: impl Into<Tag>) -> Result<Value, ShellError> {
|
||||
let mut docs = Vec::new();
|
||||
let mut b_reader = BytesReader::new(bytes);
|
||||
while let Ok(v) = decode_document(&mut b_reader) {
|
||||
@ -202,12 +205,12 @@ fn from_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::Binary(vb)) =>
|
||||
let value_tag = &value.tag;
|
||||
match value.value {
|
||||
UntaggedValue::Primitive(Primitive::Binary(vb)) =>
|
||||
match from_bson_bytes_to_value(vb, tag.clone()) {
|
||||
Ok(x) => yield ReturnSuccess::value(x),
|
||||
Err(_) => {
|
||||
|
@ -1,13 +1,14 @@
|
||||
use crate::commands::from_delimited_data::from_delimited_data;
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::data::{Primitive, TaggedDictBuilder, Value};
|
||||
use crate::data::{Primitive, Value};
|
||||
use crate::prelude::*;
|
||||
use csv::ReaderBuilder;
|
||||
|
||||
pub struct FromCSV;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct FromCSVArgs {
|
||||
headerless: bool,
|
||||
separator: Option<Value>,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for FromCSV {
|
||||
@ -16,11 +17,17 @@ impl WholeStreamCommand for FromCSV {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("from-csv").switch("headerless")
|
||||
Signature::build("from-csv")
|
||||
.named(
|
||||
"separator",
|
||||
SyntaxShape::String,
|
||||
"a character to separate columns, defaults to ','",
|
||||
)
|
||||
.switch("headerless", "don't treat the first row as column names")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Parse text as .csv and create table"
|
||||
"Parse text as .csv and create table."
|
||||
}
|
||||
|
||||
fn run(
|
||||
@ -32,107 +39,35 @@ impl WholeStreamCommand for FromCSV {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_csv_string_to_value(
|
||||
s: String,
|
||||
headerless: bool,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Result<Tagged<Value>, csv::Error> {
|
||||
let mut reader = ReaderBuilder::new()
|
||||
.has_headers(false)
|
||||
.from_reader(s.as_bytes());
|
||||
let tag = tag.into();
|
||||
|
||||
let mut fields: VecDeque<String> = VecDeque::new();
|
||||
let mut iter = reader.records();
|
||||
let mut rows = vec![];
|
||||
|
||||
if let Some(result) = iter.next() {
|
||||
let line = result?;
|
||||
|
||||
for (idx, item) in line.iter().enumerate() {
|
||||
if headerless {
|
||||
fields.push_back(format!("Column{}", idx + 1));
|
||||
} else {
|
||||
fields.push_back(item.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
loop {
|
||||
if let Some(row_values) = iter.next() {
|
||||
let row_values = row_values?;
|
||||
|
||||
let mut row = TaggedDictBuilder::new(tag.clone());
|
||||
|
||||
for (idx, entry) in row_values.iter().enumerate() {
|
||||
row.insert_tagged(
|
||||
fields.get(idx).unwrap(),
|
||||
Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag),
|
||||
);
|
||||
}
|
||||
|
||||
rows.push(row.into_tagged_value());
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Value::Table(rows).tagged(&tag))
|
||||
}
|
||||
|
||||
fn from_csv(
|
||||
FromCSVArgs {
|
||||
headerless: skip_headers,
|
||||
headerless,
|
||||
separator,
|
||||
}: FromCSVArgs,
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
runnable_context: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let name_tag = name;
|
||||
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
let mut latest_tag: Option<Tag> = None;
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
concat_string.push_str("\n");
|
||||
}
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
name_tag.clone(),
|
||||
"value originates from here",
|
||||
value_tag.clone(),
|
||||
)),
|
||||
|
||||
let sep = match separator {
|
||||
Some(Value {
|
||||
value: UntaggedValue::Primitive(Primitive::String(s)),
|
||||
tag,
|
||||
..
|
||||
}) => {
|
||||
if s == r"\t" {
|
||||
'\t'
|
||||
} else {
|
||||
let vec_s: Vec<char> = s.chars().collect();
|
||||
if vec_s.len() != 1 {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Expected a single separator char from --separator",
|
||||
"requires a single character string input",
|
||||
tag,
|
||||
));
|
||||
};
|
||||
vec_s[0]
|
||||
}
|
||||
}
|
||||
|
||||
match from_csv_string_to_value(concat_string, skip_headers, name_tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
for l in list {
|
||||
yield ReturnSuccess::value(l);
|
||||
}
|
||||
}
|
||||
x => yield ReturnSuccess::value(x),
|
||||
},
|
||||
Err(_) => if let Some(last_tag) = latest_tag {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as CSV",
|
||||
"input cannot be parsed as CSV",
|
||||
name_tag.clone(),
|
||||
"value originates from here",
|
||||
last_tag.clone(),
|
||||
))
|
||||
} ,
|
||||
}
|
||||
_ => ',',
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
from_delimited_data(headerless, sep, "CSV", runnable_context)
|
||||
}
|
||||
|
97
src/commands/from_delimited_data.rs
Normal file
97
src/commands/from_delimited_data.rs
Normal file
@ -0,0 +1,97 @@
|
||||
use crate::data::{Primitive, TaggedDictBuilder, Value};
|
||||
use crate::prelude::*;
|
||||
use csv::ReaderBuilder;
|
||||
|
||||
fn from_delimited_string_to_value(
|
||||
s: String,
|
||||
headerless: bool,
|
||||
separator: char,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Result<Value, csv::Error> {
|
||||
let mut reader = ReaderBuilder::new()
|
||||
.has_headers(!headerless)
|
||||
.delimiter(separator as u8)
|
||||
.from_reader(s.as_bytes());
|
||||
let tag = tag.into();
|
||||
|
||||
let headers = if headerless {
|
||||
(1..=reader.headers()?.len())
|
||||
.map(|i| format!("Column{}", i))
|
||||
.collect::<Vec<String>>()
|
||||
} else {
|
||||
reader.headers()?.iter().map(String::from).collect()
|
||||
};
|
||||
|
||||
let mut rows = vec![];
|
||||
for row in reader.records() {
|
||||
let mut tagged_row = TaggedDictBuilder::new(&tag);
|
||||
for (value, header) in row?.iter().zip(headers.iter()) {
|
||||
tagged_row.insert_value(
|
||||
header,
|
||||
UntaggedValue::Primitive(Primitive::String(String::from(value))).into_value(&tag),
|
||||
)
|
||||
}
|
||||
rows.push(tagged_row.into_value());
|
||||
}
|
||||
|
||||
Ok(UntaggedValue::Table(rows).into_value(&tag))
|
||||
}
|
||||
|
||||
pub fn from_delimited_data(
|
||||
headerless: bool,
|
||||
sep: char,
|
||||
format_name: &'static str,
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let name_tag = name;
|
||||
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
let mut latest_tag: Option<Tag> = None;
|
||||
|
||||
for value in values {
|
||||
let value_tag = &value.tag;
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match &value.value {
|
||||
UntaggedValue::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
concat_string.push_str("\n");
|
||||
}
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
name_tag.clone(),
|
||||
"value originates from here",
|
||||
value_tag.clone(),
|
||||
)),
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
match from_delimited_string_to_value(concat_string, headerless, sep, name_tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Value { value: UntaggedValue::Table(list), .. } => {
|
||||
for l in list {
|
||||
yield ReturnSuccess::value(l);
|
||||
}
|
||||
}
|
||||
x => yield ReturnSuccess::value(x),
|
||||
},
|
||||
Err(_) => if let Some(last_tag) = latest_tag {
|
||||
let line_one = format!("Could not parse as {}", format_name);
|
||||
let line_two = format!("input cannot be parsed as {}", format_name);
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
line_one,
|
||||
line_two,
|
||||
name_tag.clone(),
|
||||
"value originates from here",
|
||||
last_tag.clone(),
|
||||
))
|
||||
} ,
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
@ -27,40 +27,37 @@ impl WholeStreamCommand for FromINI {
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_ini_second_to_nu_value(
|
||||
v: &HashMap<String, String>,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Tagged<Value> {
|
||||
fn convert_ini_second_to_nu_value(v: &HashMap<String, String>, tag: impl Into<Tag>) -> Value {
|
||||
let mut second = TaggedDictBuilder::new(tag);
|
||||
|
||||
for (key, value) in v.into_iter() {
|
||||
second.insert(key.clone(), Primitive::String(value.clone()));
|
||||
second.insert_untagged(key.clone(), Primitive::String(value.clone()));
|
||||
}
|
||||
|
||||
second.into_tagged_value()
|
||||
second.into_value()
|
||||
}
|
||||
|
||||
fn convert_ini_top_to_nu_value(
|
||||
v: &HashMap<String, HashMap<String, String>>,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Tagged<Value> {
|
||||
) -> Value {
|
||||
let tag = tag.into();
|
||||
let mut top_level = TaggedDictBuilder::new(tag.clone());
|
||||
|
||||
for (key, value) in v.iter() {
|
||||
top_level.insert_tagged(
|
||||
top_level.insert_value(
|
||||
key.clone(),
|
||||
convert_ini_second_to_nu_value(value, tag.clone()),
|
||||
);
|
||||
}
|
||||
|
||||
top_level.into_tagged_value()
|
||||
top_level.into_value()
|
||||
}
|
||||
|
||||
pub fn from_ini_string_to_value(
|
||||
s: String,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Result<Tagged<Value>, serde_ini::de::Error> {
|
||||
) -> Result<Value, serde_ini::de::Error> {
|
||||
let v: HashMap<String, HashMap<String, String>> = serde_ini::from_str(&s)?;
|
||||
Ok(convert_ini_top_to_nu_value(&v, tag))
|
||||
}
|
||||
@ -68,28 +65,29 @@ pub fn from_ini_string_to_value(
|
||||
fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let tag = args.name_tag();
|
||||
let span = tag.span;
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
let mut latest_tag: Option<Tag> = None;
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
latest_tag = Some(value.tag.clone());
|
||||
let value_span = value.tag.span;
|
||||
match &value.value {
|
||||
UntaggedValue::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
concat_string.push_str("\n");
|
||||
}
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
&tag,
|
||||
span,
|
||||
"value originates from here",
|
||||
&value_tag,
|
||||
value_span,
|
||||
)),
|
||||
|
||||
}
|
||||
@ -97,7 +95,7 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
|
||||
match from_ini_string_to_value(concat_string, tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
Value { value: UntaggedValue::Table(list), .. } => {
|
||||
for l in list {
|
||||
yield ReturnSuccess::value(l);
|
||||
}
|
||||
|
@ -15,7 +15,7 @@ impl WholeStreamCommand for FromJSON {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("from-json").switch("objects")
|
||||
Signature::build("from-json").switch("objects", "treat each line as a separate value")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -31,39 +31,36 @@ impl WholeStreamCommand for FromJSON {
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_json_value_to_nu_value(v: &serde_hjson::Value, tag: impl Into<Tag>) -> Tagged<Value> {
|
||||
fn convert_json_value_to_nu_value(v: &serde_hjson::Value, tag: impl Into<Tag>) -> Value {
|
||||
let tag = tag.into();
|
||||
|
||||
match v {
|
||||
serde_hjson::Value::Null => Value::Primitive(Primitive::Nothing).tagged(&tag),
|
||||
serde_hjson::Value::Bool(b) => Value::boolean(*b).tagged(&tag),
|
||||
serde_hjson::Value::F64(n) => Value::number(n).tagged(&tag),
|
||||
serde_hjson::Value::U64(n) => Value::number(n).tagged(&tag),
|
||||
serde_hjson::Value::I64(n) => Value::number(n).tagged(&tag),
|
||||
serde_hjson::Value::Null => UntaggedValue::Primitive(Primitive::Nothing).into_value(&tag),
|
||||
serde_hjson::Value::Bool(b) => UntaggedValue::boolean(*b).into_value(&tag),
|
||||
serde_hjson::Value::F64(n) => UntaggedValue::number(n).into_value(&tag),
|
||||
serde_hjson::Value::U64(n) => UntaggedValue::number(n).into_value(&tag),
|
||||
serde_hjson::Value::I64(n) => UntaggedValue::number(n).into_value(&tag),
|
||||
serde_hjson::Value::String(s) => {
|
||||
Value::Primitive(Primitive::String(String::from(s))).tagged(&tag)
|
||||
UntaggedValue::Primitive(Primitive::String(String::from(s))).into_value(&tag)
|
||||
}
|
||||
serde_hjson::Value::Array(a) => Value::Table(
|
||||
serde_hjson::Value::Array(a) => UntaggedValue::Table(
|
||||
a.iter()
|
||||
.map(|x| convert_json_value_to_nu_value(x, &tag))
|
||||
.collect(),
|
||||
)
|
||||
.tagged(tag),
|
||||
.into_value(tag),
|
||||
serde_hjson::Value::Object(o) => {
|
||||
let mut collected = TaggedDictBuilder::new(&tag);
|
||||
for (k, v) in o.iter() {
|
||||
collected.insert_tagged(k.clone(), convert_json_value_to_nu_value(v, &tag));
|
||||
collected.insert_value(k.clone(), convert_json_value_to_nu_value(v, &tag));
|
||||
}
|
||||
|
||||
collected.into_tagged_value()
|
||||
collected.into_value()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_json_string_to_value(
|
||||
s: String,
|
||||
tag: impl Into<Tag>,
|
||||
) -> serde_hjson::Result<Tagged<Value>> {
|
||||
pub fn from_json_string_to_value(s: String, tag: impl Into<Tag>) -> serde_hjson::Result<Value> {
|
||||
let v: serde_hjson::Value = serde_hjson::from_str(&s)?;
|
||||
Ok(convert_json_value_to_nu_value(&v, tag))
|
||||
}
|
||||
@ -72,28 +69,29 @@ fn from_json(
|
||||
FromJSONArgs { objects }: FromJSONArgs,
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let name_span = name.span;
|
||||
let name_tag = name;
|
||||
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
let mut latest_tag: Option<Tag> = None;
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
latest_tag = Some(value.tag.clone());
|
||||
let value_span = value.tag.span;
|
||||
match &value.value {
|
||||
UntaggedValue::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
concat_string.push_str("\n");
|
||||
}
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
&name_tag,
|
||||
name_span,
|
||||
"value originates from here",
|
||||
&value_tag,
|
||||
value_span,
|
||||
)),
|
||||
|
||||
}
|
||||
@ -125,7 +123,7 @@ fn from_json(
|
||||
match from_json_string_to_value(concat_string, name_tag.clone()) {
|
||||
Ok(x) =>
|
||||
match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
Value { value: UntaggedValue::Table(list), .. } => {
|
||||
for l in list {
|
||||
yield ReturnSuccess::value(l);
|
||||
}
|
||||
|
@ -57,7 +57,7 @@ impl WholeStreamCommand for FromDB {
|
||||
pub fn convert_sqlite_file_to_nu_value(
|
||||
path: &Path,
|
||||
tag: impl Into<Tag> + Clone,
|
||||
) -> Result<Tagged<Value>, rusqlite::Error> {
|
||||
) -> Result<Value, rusqlite::Error> {
|
||||
let conn = Connection::open(path)?;
|
||||
|
||||
let mut meta_out = Vec::new();
|
||||
@ -72,48 +72,54 @@ pub fn convert_sqlite_file_to_nu_value(
|
||||
while let Some(table_row) = table_rows.next()? {
|
||||
out.push(convert_sqlite_row_to_nu_value(table_row, tag.clone())?)
|
||||
}
|
||||
meta_dict.insert_tagged(
|
||||
meta_dict.insert_value(
|
||||
"table_name".to_string(),
|
||||
Value::Primitive(Primitive::String(table_name)).tagged(tag.clone()),
|
||||
UntaggedValue::Primitive(Primitive::String(table_name)).into_value(tag.clone()),
|
||||
);
|
||||
meta_dict.insert_tagged("table_values", Value::Table(out).tagged(tag.clone()));
|
||||
meta_out.push(meta_dict.into_tagged_value());
|
||||
meta_dict.insert_value(
|
||||
"table_values",
|
||||
UntaggedValue::Table(out).into_value(tag.clone()),
|
||||
);
|
||||
meta_out.push(meta_dict.into_value());
|
||||
}
|
||||
let tag = tag.into();
|
||||
Ok(Value::Table(meta_out).tagged(tag))
|
||||
Ok(UntaggedValue::Table(meta_out).into_value(tag))
|
||||
}
|
||||
|
||||
fn convert_sqlite_row_to_nu_value(
|
||||
row: &Row,
|
||||
tag: impl Into<Tag> + Clone,
|
||||
) -> Result<Tagged<Value>, rusqlite::Error> {
|
||||
) -> Result<Value, rusqlite::Error> {
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
for (i, c) in row.columns().iter().enumerate() {
|
||||
collected.insert_tagged(
|
||||
collected.insert_value(
|
||||
c.name().to_string(),
|
||||
convert_sqlite_value_to_nu_value(row.get_raw(i), tag.clone()),
|
||||
);
|
||||
}
|
||||
return Ok(collected.into_tagged_value());
|
||||
return Ok(collected.into_value());
|
||||
}
|
||||
|
||||
fn convert_sqlite_value_to_nu_value(value: ValueRef, tag: impl Into<Tag> + Clone) -> Tagged<Value> {
|
||||
fn convert_sqlite_value_to_nu_value(value: ValueRef, tag: impl Into<Tag> + Clone) -> Value {
|
||||
match value {
|
||||
ValueRef::Null => Value::Primitive(Primitive::String(String::from(""))).tagged(tag),
|
||||
ValueRef::Integer(i) => Value::number(i).tagged(tag),
|
||||
ValueRef::Real(f) => Value::number(f).tagged(tag),
|
||||
ValueRef::Null => {
|
||||
UntaggedValue::Primitive(Primitive::String(String::from(""))).into_value(tag)
|
||||
}
|
||||
ValueRef::Integer(i) => UntaggedValue::number(i).into_value(tag),
|
||||
ValueRef::Real(f) => UntaggedValue::number(f).into_value(tag),
|
||||
t @ ValueRef::Text(_) => {
|
||||
// this unwrap is safe because we know the ValueRef is Text.
|
||||
Value::Primitive(Primitive::String(t.as_str().unwrap().to_string())).tagged(tag)
|
||||
UntaggedValue::Primitive(Primitive::String(t.as_str().unwrap().to_string()))
|
||||
.into_value(tag)
|
||||
}
|
||||
ValueRef::Blob(u) => Value::binary(u.to_owned()).tagged(tag),
|
||||
ValueRef::Blob(u) => UntaggedValue::binary(u.to_owned()).into_value(tag),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_sqlite_bytes_to_value(
|
||||
mut bytes: Vec<u8>,
|
||||
tag: impl Into<Tag> + Clone,
|
||||
) -> Result<Tagged<Value>, std::io::Error> {
|
||||
) -> Result<Value, std::io::Error> {
|
||||
// FIXME: should probably write a sqlite virtual filesystem
|
||||
// that will allow us to use bytes as a file to avoid this
|
||||
// write out, but this will require C code. Might be
|
||||
@ -132,15 +138,15 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::Binary(vb)) =>
|
||||
let value_tag = &value.tag;
|
||||
match value.value {
|
||||
UntaggedValue::Primitive(Primitive::Binary(vb)) =>
|
||||
match from_sqlite_bytes_to_value(vb, tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
Value { value: UntaggedValue::Table(list), .. } => {
|
||||
for l in list {
|
||||
yield ReturnSuccess::value(l);
|
||||
}
|
||||
@ -158,8 +164,8 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
|
||||
}
|
||||
}
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
"Expected binary data from pipeline",
|
||||
"requires binary data input",
|
||||
&tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
|
@ -1,12 +1,15 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::data::{Primitive, TaggedDictBuilder, Value};
|
||||
use crate::prelude::*;
|
||||
use nu_source::Tagged;
|
||||
|
||||
pub struct FromSSV;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct FromSSVArgs {
|
||||
headerless: bool,
|
||||
#[serde(rename(deserialize = "aligned-columns"))]
|
||||
aligned_columns: bool,
|
||||
#[serde(rename(deserialize = "minimum-spaces"))]
|
||||
minimum_spaces: Option<Tagged<usize>>,
|
||||
}
|
||||
@ -21,8 +24,13 @@ impl WholeStreamCommand for FromSSV {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(STRING_REPRESENTATION)
|
||||
.switch("headerless")
|
||||
.named("minimum-spaces", SyntaxShape::Int)
|
||||
.switch("headerless", "don't treat the first row as column names")
|
||||
.switch("aligned-columns", "assume columns are aligned")
|
||||
.named(
|
||||
"minimum-spaces",
|
||||
SyntaxShape::Int,
|
||||
"the mininum spaces to separate columns",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -38,80 +46,214 @@ impl WholeStreamCommand for FromSSV {
|
||||
}
|
||||
}
|
||||
|
||||
enum HeaderOptions<'a> {
|
||||
WithHeaders(&'a str),
|
||||
WithoutHeaders,
|
||||
}
|
||||
|
||||
fn parse_aligned_columns<'a>(
|
||||
lines: impl Iterator<Item = &'a str>,
|
||||
headers: HeaderOptions,
|
||||
separator: &str,
|
||||
) -> Vec<Vec<(String, String)>> {
|
||||
fn construct<'a>(
|
||||
lines: impl Iterator<Item = &'a str>,
|
||||
headers: Vec<(String, usize)>,
|
||||
) -> Vec<Vec<(String, String)>> {
|
||||
lines
|
||||
.map(|l| {
|
||||
headers
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, (header_name, start_position))| {
|
||||
let val = match headers.get(i + 1) {
|
||||
Some((_, end)) => {
|
||||
if *end < l.len() {
|
||||
l.get(*start_position..*end)
|
||||
} else {
|
||||
l.get(*start_position..)
|
||||
}
|
||||
}
|
||||
None => l.get(*start_position..),
|
||||
}
|
||||
.unwrap_or("")
|
||||
.trim()
|
||||
.into();
|
||||
(header_name.clone(), val)
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
let find_indices = |line: &str| {
|
||||
let values = line
|
||||
.split(&separator)
|
||||
.map(str::trim)
|
||||
.filter(|s| !s.is_empty());
|
||||
values
|
||||
.fold(
|
||||
(0, vec![]),
|
||||
|(current_pos, mut indices), value| match line[current_pos..].find(value) {
|
||||
None => (current_pos, indices),
|
||||
Some(index) => {
|
||||
let absolute_index = current_pos + index;
|
||||
indices.push(absolute_index);
|
||||
(absolute_index + value.len(), indices)
|
||||
}
|
||||
},
|
||||
)
|
||||
.1
|
||||
};
|
||||
|
||||
let parse_with_headers = |lines, headers_raw: &str| {
|
||||
let indices = find_indices(headers_raw);
|
||||
let headers = headers_raw
|
||||
.split(&separator)
|
||||
.map(str::trim)
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(String::from)
|
||||
.zip(indices);
|
||||
|
||||
let columns = headers.collect::<Vec<(String, usize)>>();
|
||||
|
||||
construct(lines, columns)
|
||||
};
|
||||
|
||||
let parse_without_headers = |ls: Vec<&str>| {
|
||||
let mut indices = ls
|
||||
.iter()
|
||||
.flat_map(|s| find_indices(*s))
|
||||
.collect::<Vec<usize>>();
|
||||
|
||||
indices.sort();
|
||||
indices.dedup();
|
||||
|
||||
let headers: Vec<(String, usize)> = indices
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, position)| (format!("Column{}", i + 1), *position))
|
||||
.collect();
|
||||
|
||||
construct(ls.iter().map(|s| s.to_owned()), headers)
|
||||
};
|
||||
|
||||
match headers {
|
||||
HeaderOptions::WithHeaders(headers_raw) => parse_with_headers(lines, headers_raw),
|
||||
HeaderOptions::WithoutHeaders => parse_without_headers(lines.collect()),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_separated_columns<'a>(
|
||||
lines: impl Iterator<Item = &'a str>,
|
||||
headers: HeaderOptions,
|
||||
separator: &str,
|
||||
) -> Vec<Vec<(String, String)>> {
|
||||
fn collect<'a>(
|
||||
headers: Vec<String>,
|
||||
rows: impl Iterator<Item = &'a str>,
|
||||
separator: &str,
|
||||
) -> Vec<Vec<(String, String)>> {
|
||||
rows.map(|r| {
|
||||
headers
|
||||
.iter()
|
||||
.zip(r.split(separator).map(str::trim).filter(|s| !s.is_empty()))
|
||||
.map(|(a, b)| (a.to_owned(), b.to_owned()))
|
||||
.collect()
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
let parse_with_headers = |lines, headers_raw: &str| {
|
||||
let headers = headers_raw
|
||||
.split(&separator)
|
||||
.map(str::trim)
|
||||
.map(|s| s.to_owned())
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect();
|
||||
collect(headers, lines, separator)
|
||||
};
|
||||
|
||||
let parse_without_headers = |ls: Vec<&str>| {
|
||||
let num_columns = ls.iter().map(|r| r.len()).max().unwrap_or(0);
|
||||
|
||||
let headers = (1..=num_columns)
|
||||
.map(|i| format!("Column{}", i))
|
||||
.collect::<Vec<String>>();
|
||||
collect(headers, ls.iter().map(|s| s.as_ref()), separator)
|
||||
};
|
||||
|
||||
match headers {
|
||||
HeaderOptions::WithHeaders(headers_raw) => parse_with_headers(lines, headers_raw),
|
||||
HeaderOptions::WithoutHeaders => parse_without_headers(lines.collect()),
|
||||
}
|
||||
}
|
||||
|
||||
fn string_to_table(
|
||||
s: &str,
|
||||
headerless: bool,
|
||||
aligned_columns: bool,
|
||||
split_at: usize,
|
||||
) -> Option<Vec<Vec<(String, String)>>> {
|
||||
let mut lines = s.lines().filter(|l| !l.trim().is_empty());
|
||||
let separator = " ".repeat(std::cmp::max(split_at, 1));
|
||||
|
||||
let headers = lines
|
||||
.next()?
|
||||
.split(&separator)
|
||||
.map(|s| s.trim())
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(|s| s.to_owned())
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
let header_row = if headerless {
|
||||
(1..=headers.len())
|
||||
.map(|i| format!("Column{}", i))
|
||||
.collect::<Vec<String>>()
|
||||
let (ls, header_options) = if headerless {
|
||||
(lines, HeaderOptions::WithoutHeaders)
|
||||
} else {
|
||||
headers
|
||||
let headers = lines.next()?;
|
||||
(lines, HeaderOptions::WithHeaders(headers))
|
||||
};
|
||||
|
||||
Some(
|
||||
lines
|
||||
.map(|l| {
|
||||
header_row
|
||||
.iter()
|
||||
.zip(
|
||||
l.split(&separator)
|
||||
.map(|s| s.trim())
|
||||
.filter(|s| !s.is_empty()),
|
||||
)
|
||||
.map(|(a, b)| (String::from(a), String::from(b)))
|
||||
.collect()
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
let f = if aligned_columns {
|
||||
parse_aligned_columns
|
||||
} else {
|
||||
parse_separated_columns
|
||||
};
|
||||
|
||||
let parsed = f(ls, header_options, &separator);
|
||||
match parsed.len() {
|
||||
0 => None,
|
||||
_ => Some(parsed),
|
||||
}
|
||||
}
|
||||
|
||||
fn from_ssv_string_to_value(
|
||||
s: &str,
|
||||
headerless: bool,
|
||||
aligned_columns: bool,
|
||||
split_at: usize,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Option<Tagged<Value>> {
|
||||
) -> Option<Value> {
|
||||
let tag = tag.into();
|
||||
let rows = string_to_table(s, headerless, split_at)?
|
||||
let rows = string_to_table(s, headerless, aligned_columns, split_at)?
|
||||
.iter()
|
||||
.map(|row| {
|
||||
let mut tagged_dict = TaggedDictBuilder::new(&tag);
|
||||
for (col, entry) in row {
|
||||
tagged_dict.insert_tagged(
|
||||
tagged_dict.insert_value(
|
||||
col,
|
||||
Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag),
|
||||
UntaggedValue::Primitive(Primitive::String(String::from(entry)))
|
||||
.into_value(&tag),
|
||||
)
|
||||
}
|
||||
tagged_dict.into_tagged_value()
|
||||
tagged_dict.into_value()
|
||||
})
|
||||
.collect();
|
||||
|
||||
Some(Value::Table(rows).tagged(&tag))
|
||||
Some(UntaggedValue::Table(rows).into_value(&tag))
|
||||
}
|
||||
|
||||
fn from_ssv(
|
||||
FromSSVArgs {
|
||||
headerless,
|
||||
aligned_columns,
|
||||
minimum_spaces,
|
||||
}: FromSSVArgs,
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
let mut concat_string = String::new();
|
||||
let mut latest_tag: Option<Tag> = None;
|
||||
let split_at = match minimum_spaces {
|
||||
@ -120,10 +262,10 @@ fn from_ssv(
|
||||
};
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
let value_tag = value.tag.clone();
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
match &value.value {
|
||||
UntaggedValue::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
}
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary (
|
||||
@ -136,9 +278,9 @@ fn from_ssv(
|
||||
}
|
||||
}
|
||||
|
||||
match from_ssv_string_to_value(&concat_string, headerless, split_at, name.clone()) {
|
||||
match from_ssv_string_to_value(&concat_string, headerless, aligned_columns, split_at, name.clone()) {
|
||||
Some(x) => match x {
|
||||
Tagged { item: Value::Table(list), ..} => {
|
||||
Value { value: UntaggedValue::Table(list), ..} => {
|
||||
for l in list { yield ReturnSuccess::value(l) }
|
||||
}
|
||||
x => yield ReturnSuccess::value(x)
|
||||
@ -171,11 +313,11 @@ mod tests {
|
||||
|
||||
a b
|
||||
|
||||
1 2
|
||||
1 2
|
||||
|
||||
3 4
|
||||
3 4
|
||||
"#;
|
||||
let result = string_to_table(input, false, 1);
|
||||
let result = string_to_table(input, false, true, 1);
|
||||
assert_eq!(
|
||||
result,
|
||||
Some(vec![
|
||||
@ -186,16 +328,31 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_ignores_headers_when_headerless() {
|
||||
fn it_deals_with_single_column_input() {
|
||||
let input = r#"
|
||||
a
|
||||
1
|
||||
2
|
||||
"#;
|
||||
let result = string_to_table(input, false, true, 1);
|
||||
assert_eq!(
|
||||
result,
|
||||
Some(vec![vec![owned("a", "1")], vec![owned("a", "2")]])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_uses_first_row_as_data_when_headerless() {
|
||||
let input = r#"
|
||||
a b
|
||||
1 2
|
||||
3 4
|
||||
"#;
|
||||
let result = string_to_table(input, true, 1);
|
||||
let result = string_to_table(input, true, true, 1);
|
||||
assert_eq!(
|
||||
result,
|
||||
Some(vec![
|
||||
vec![owned("Column1", "a"), owned("Column2", "b")],
|
||||
vec![owned("Column1", "1"), owned("Column2", "2")],
|
||||
vec![owned("Column1", "3"), owned("Column2", "4")]
|
||||
])
|
||||
@ -205,19 +362,19 @@ mod tests {
|
||||
#[test]
|
||||
fn it_returns_none_given_an_empty_string() {
|
||||
let input = "";
|
||||
let result = string_to_table(input, true, 1);
|
||||
assert_eq!(result, None);
|
||||
let result = string_to_table(input, true, true, 1);
|
||||
assert!(result.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_allows_a_predefined_number_of_spaces() {
|
||||
let input = r#"
|
||||
column a column b
|
||||
entry 1 entry number 2
|
||||
3 four
|
||||
entry 1 entry number 2
|
||||
3 four
|
||||
"#;
|
||||
|
||||
let result = string_to_table(input, false, 3);
|
||||
let result = string_to_table(input, false, true, 3);
|
||||
assert_eq!(
|
||||
result,
|
||||
Some(vec![
|
||||
@ -239,12 +396,111 @@ mod tests {
|
||||
|
||||
let trimmed = |s: &str| s.trim() == s;
|
||||
|
||||
let result = string_to_table(input, false, 2).unwrap();
|
||||
let result = string_to_table(input, false, true, 2).unwrap();
|
||||
assert!(result
|
||||
.iter()
|
||||
.all(|row| row.iter().all(|(a, b)| trimmed(a) && trimmed(b))))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_keeps_empty_columns() {
|
||||
let input = r#"
|
||||
colA col B col C
|
||||
val2 val3
|
||||
val4 val 5 val 6
|
||||
val7 val8
|
||||
"#;
|
||||
|
||||
let result = string_to_table(input, false, true, 2).unwrap();
|
||||
assert_eq!(
|
||||
true,
|
||||
result
|
||||
.iter()
|
||||
.all(|row| row.iter().all(|(a, b)| trimmed(a) && trimmed(b)))
|
||||
result,
|
||||
vec![
|
||||
vec![
|
||||
owned("colA", ""),
|
||||
owned("col B", "val2"),
|
||||
owned("col C", "val3")
|
||||
],
|
||||
vec![
|
||||
owned("colA", "val4"),
|
||||
owned("col B", "val 5"),
|
||||
owned("col C", "val 6")
|
||||
],
|
||||
vec![
|
||||
owned("colA", "val7"),
|
||||
owned("col B", ""),
|
||||
owned("col C", "val8")
|
||||
],
|
||||
]
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_uses_the_full_final_column() {
|
||||
let input = r#"
|
||||
colA col B
|
||||
val1 val2 trailing value that should be included
|
||||
"#;
|
||||
|
||||
let result = string_to_table(input, false, true, 2).unwrap();
|
||||
assert_eq!(
|
||||
result,
|
||||
vec![vec![
|
||||
owned("colA", "val1"),
|
||||
owned("col B", "val2 trailing value that should be included"),
|
||||
],]
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_handles_empty_values_when_headerless_and_aligned_columns() {
|
||||
let input = r#"
|
||||
a multi-word value b d
|
||||
1 3-3 4
|
||||
last
|
||||
"#;
|
||||
|
||||
let result = string_to_table(input, true, true, 2).unwrap();
|
||||
assert_eq!(
|
||||
result,
|
||||
vec![
|
||||
vec![
|
||||
owned("Column1", "a multi-word value"),
|
||||
owned("Column2", "b"),
|
||||
owned("Column3", ""),
|
||||
owned("Column4", "d"),
|
||||
owned("Column5", "")
|
||||
],
|
||||
vec![
|
||||
owned("Column1", "1"),
|
||||
owned("Column2", ""),
|
||||
owned("Column3", "3-3"),
|
||||
owned("Column4", "4"),
|
||||
owned("Column5", "")
|
||||
],
|
||||
vec![
|
||||
owned("Column1", ""),
|
||||
owned("Column2", ""),
|
||||
owned("Column3", ""),
|
||||
owned("Column4", ""),
|
||||
owned("Column5", "last")
|
||||
],
|
||||
]
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn input_is_parsed_correctly_if_either_option_works() {
|
||||
let input = r#"
|
||||
docker-registry docker-registry=default docker-registry=default 172.30.78.158 5000/TCP
|
||||
kubernetes component=apiserver,provider=kubernetes <none> 172.30.0.2 443/TCP
|
||||
kubernetes-ro component=apiserver,provider=kubernetes <none> 172.30.0.1 80/TCP
|
||||
"#;
|
||||
|
||||
let aligned_columns_headerless = string_to_table(input, true, true, 2).unwrap();
|
||||
let separator_headerless = string_to_table(input, true, false, 2).unwrap();
|
||||
let aligned_columns_with_headers = string_to_table(input, false, true, 2).unwrap();
|
||||
let separator_with_headers = string_to_table(input, false, false, 2).unwrap();
|
||||
assert_eq!(aligned_columns_headerless, separator_headerless);
|
||||
assert_eq!(aligned_columns_with_headers, separator_with_headers);
|
||||
}
|
||||
}
|
||||
|
@ -26,39 +26,38 @@ impl WholeStreamCommand for FromTOML {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into<Tag>) -> Tagged<Value> {
|
||||
pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into<Tag>) -> Value {
|
||||
let tag = tag.into();
|
||||
|
||||
match v {
|
||||
toml::Value::Boolean(b) => Value::boolean(*b).tagged(tag),
|
||||
toml::Value::Integer(n) => Value::number(n).tagged(tag),
|
||||
toml::Value::Float(n) => Value::number(n).tagged(tag),
|
||||
toml::Value::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag),
|
||||
toml::Value::Array(a) => Value::Table(
|
||||
toml::Value::Boolean(b) => UntaggedValue::boolean(*b).into_value(tag),
|
||||
toml::Value::Integer(n) => UntaggedValue::number(n).into_value(tag),
|
||||
toml::Value::Float(n) => UntaggedValue::number(n).into_value(tag),
|
||||
toml::Value::String(s) => {
|
||||
UntaggedValue::Primitive(Primitive::String(String::from(s))).into_value(tag)
|
||||
}
|
||||
toml::Value::Array(a) => UntaggedValue::Table(
|
||||
a.iter()
|
||||
.map(|x| convert_toml_value_to_nu_value(x, &tag))
|
||||
.collect(),
|
||||
)
|
||||
.tagged(tag),
|
||||
.into_value(tag),
|
||||
toml::Value::Datetime(dt) => {
|
||||
Value::Primitive(Primitive::String(dt.to_string())).tagged(tag)
|
||||
UntaggedValue::Primitive(Primitive::String(dt.to_string())).into_value(tag)
|
||||
}
|
||||
toml::Value::Table(t) => {
|
||||
let mut collected = TaggedDictBuilder::new(&tag);
|
||||
|
||||
for (k, v) in t.iter() {
|
||||
collected.insert_tagged(k.clone(), convert_toml_value_to_nu_value(v, &tag));
|
||||
collected.insert_value(k.clone(), convert_toml_value_to_nu_value(v, &tag));
|
||||
}
|
||||
|
||||
collected.into_tagged_value()
|
||||
collected.into_value()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_toml_string_to_value(
|
||||
s: String,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Result<Tagged<Value>, toml::de::Error> {
|
||||
pub fn from_toml_string_to_value(s: String, tag: impl Into<Tag>) -> Result<Value, toml::de::Error> {
|
||||
let v: toml::Value = s.parse::<toml::Value>()?;
|
||||
Ok(convert_toml_value_to_nu_value(&v, tag))
|
||||
}
|
||||
@ -69,28 +68,29 @@ pub fn from_toml(
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let tag = args.name_tag();
|
||||
let name_span = tag.span;
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
let mut latest_tag: Option<Tag> = None;
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
latest_tag = Some(value.tag.clone());
|
||||
let value_span = value.tag.span;
|
||||
match value.value {
|
||||
UntaggedValue::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
concat_string.push_str("\n");
|
||||
}
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
&tag,
|
||||
name_span,
|
||||
"value originates from here",
|
||||
&value_tag,
|
||||
value_span,
|
||||
)),
|
||||
|
||||
}
|
||||
@ -98,7 +98,7 @@ pub fn from_toml(
|
||||
|
||||
match from_toml_string_to_value(concat_string, tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
Value { value: UntaggedValue::Table(list), .. } => {
|
||||
for l in list {
|
||||
yield ReturnSuccess::value(l);
|
||||
}
|
||||
|
@ -1,7 +1,6 @@
|
||||
use crate::commands::from_delimited_data::from_delimited_data;
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::data::{Primitive, TaggedDictBuilder, Value};
|
||||
use crate::prelude::*;
|
||||
use csv::ReaderBuilder;
|
||||
|
||||
pub struct FromTSV;
|
||||
|
||||
@ -16,7 +15,8 @@ impl WholeStreamCommand for FromTSV {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("from-tsv").switch("headerless")
|
||||
Signature::build("from-tsv")
|
||||
.switch("headerless", "don't treat the first row as column names")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -32,108 +32,9 @@ impl WholeStreamCommand for FromTSV {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_tsv_string_to_value(
|
||||
s: String,
|
||||
headerless: bool,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Result<Tagged<Value>, csv::Error> {
|
||||
let mut reader = ReaderBuilder::new()
|
||||
.has_headers(false)
|
||||
.delimiter(b'\t')
|
||||
.from_reader(s.as_bytes());
|
||||
let tag = tag.into();
|
||||
|
||||
let mut fields: VecDeque<String> = VecDeque::new();
|
||||
let mut iter = reader.records();
|
||||
let mut rows = vec![];
|
||||
|
||||
if let Some(result) = iter.next() {
|
||||
let line = result?;
|
||||
|
||||
for (idx, item) in line.iter().enumerate() {
|
||||
if headerless {
|
||||
fields.push_back(format!("Column{}", idx + 1));
|
||||
} else {
|
||||
fields.push_back(item.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
loop {
|
||||
if let Some(row_values) = iter.next() {
|
||||
let row_values = row_values?;
|
||||
|
||||
let mut row = TaggedDictBuilder::new(&tag);
|
||||
|
||||
for (idx, entry) in row_values.iter().enumerate() {
|
||||
row.insert_tagged(
|
||||
fields.get(idx).unwrap(),
|
||||
Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag),
|
||||
);
|
||||
}
|
||||
|
||||
rows.push(row.into_tagged_value());
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Value::Table(rows).tagged(&tag))
|
||||
}
|
||||
|
||||
fn from_tsv(
|
||||
FromTSVArgs {
|
||||
headerless: skip_headers,
|
||||
}: FromTSVArgs,
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
FromTSVArgs { headerless }: FromTSVArgs,
|
||||
runnable_context: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let name_tag = name;
|
||||
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
let mut latest_tag: Option<Tag> = None;
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
concat_string.push_str("\n");
|
||||
}
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
&name_tag,
|
||||
"value originates from here",
|
||||
&value_tag,
|
||||
)),
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
match from_tsv_string_to_value(concat_string, skip_headers, name_tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
for l in list {
|
||||
yield ReturnSuccess::value(l);
|
||||
}
|
||||
}
|
||||
x => yield ReturnSuccess::value(x),
|
||||
},
|
||||
Err(_) => if let Some(last_tag) = latest_tag {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as TSV",
|
||||
"input cannot be parsed as TSV",
|
||||
&name_tag,
|
||||
"value originates from here",
|
||||
&last_tag,
|
||||
))
|
||||
} ,
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
from_delimited_data(headerless, '\t', "TSV", runnable_context)
|
||||
}
|
||||
|
@ -29,27 +29,28 @@ impl WholeStreamCommand for FromURL {
|
||||
fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let tag = args.name_tag();
|
||||
let name_span = tag.span;
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
let mut latest_tag: Option<Tag> = None;
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
latest_tag = Some(value.tag.clone());
|
||||
let value_span = value.tag.span;
|
||||
match value.value {
|
||||
UntaggedValue::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
}
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
&tag,
|
||||
name_span,
|
||||
"value originates from here",
|
||||
&value_tag,
|
||||
value_span,
|
||||
)),
|
||||
|
||||
}
|
||||
@ -62,10 +63,10 @@ fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
let mut row = TaggedDictBuilder::new(tag);
|
||||
|
||||
for (k,v) in result {
|
||||
row.insert(k, Value::string(v));
|
||||
row.insert_untagged(k, UntaggedValue::string(v));
|
||||
}
|
||||
|
||||
yield ReturnSuccess::value(row.into_tagged_value());
|
||||
yield ReturnSuccess::value(row.into_value());
|
||||
}
|
||||
_ => {
|
||||
if let Some(last_tag) = latest_tag {
|
||||
|
104
src/commands/from_xlsx.rs
Normal file
104
src/commands/from_xlsx.rs
Normal file
@ -0,0 +1,104 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::data::{Primitive, Value};
|
||||
use crate::prelude::*;
|
||||
use crate::{TaggedDictBuilder, TaggedListBuilder};
|
||||
use calamine::*;
|
||||
use std::io::Cursor;
|
||||
|
||||
pub struct FromXLSX;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct FromXLSXArgs {
|
||||
headerless: bool,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for FromXLSX {
|
||||
fn name(&self) -> &str {
|
||||
"from-xlsx"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("from-xlsx")
|
||||
.switch("headerless", "don't treat the first row as column names")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Parse binary Excel(.xlsx) data and create table."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, from_xlsx)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
fn from_xlsx(
|
||||
FromXLSXArgs {
|
||||
headerless: _headerless,
|
||||
}: FromXLSXArgs,
|
||||
runnable_context: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let input = runnable_context.input;
|
||||
let tag = runnable_context.name;
|
||||
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
for value in values {
|
||||
let value_span = value.tag.span;
|
||||
let value_tag = value.tag.clone();
|
||||
|
||||
match value.value {
|
||||
UntaggedValue::Primitive(Primitive::Binary(vb)) => {
|
||||
let mut buf: Cursor<Vec<u8>> = Cursor::new(vb);
|
||||
let mut xls = Xlsx::<_>::new(buf).unwrap();
|
||||
|
||||
let mut dict = TaggedDictBuilder::new(&tag);
|
||||
|
||||
let sheet_names = xls.sheet_names().to_owned();
|
||||
|
||||
for sheet_name in &sheet_names {
|
||||
let mut sheet_output = TaggedListBuilder::new(&tag);
|
||||
|
||||
let current_sheet = xls.worksheet_range(sheet_name).unwrap().unwrap();
|
||||
|
||||
for row in current_sheet.rows() {
|
||||
let mut row_output = TaggedDictBuilder::new(&tag);
|
||||
for (i, cell) in row.iter().enumerate() {
|
||||
let value = match cell {
|
||||
DataType::Empty => UntaggedValue::nothing(),
|
||||
DataType::String(s) => UntaggedValue::string(s),
|
||||
DataType::Float(f) => UntaggedValue::decimal(*f),
|
||||
DataType::Int(i) => UntaggedValue::int(*i),
|
||||
DataType::Bool(b) => UntaggedValue::boolean(*b),
|
||||
_ => UntaggedValue::nothing(),
|
||||
};
|
||||
|
||||
row_output.insert_untagged(&format!("Column{}", i), value);
|
||||
}
|
||||
|
||||
sheet_output.push_untagged(row_output.into_untagged_value());
|
||||
}
|
||||
|
||||
dict.insert_untagged(sheet_name, sheet_output.into_untagged_value());
|
||||
}
|
||||
|
||||
yield ReturnSuccess::value(dict.into_value());
|
||||
}
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected binary data from pipeline",
|
||||
"requires binary data input",
|
||||
&tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
)),
|
||||
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
@ -1,5 +1,6 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::data::{Primitive, TaggedDictBuilder, Value};
|
||||
use crate::data::base::{Primitive, UntaggedValue, Value};
|
||||
use crate::data::TaggedDictBuilder;
|
||||
use crate::prelude::*;
|
||||
|
||||
pub struct FromXML;
|
||||
@ -26,7 +27,7 @@ impl WholeStreamCommand for FromXML {
|
||||
}
|
||||
}
|
||||
|
||||
fn from_node_to_value<'a, 'd>(n: &roxmltree::Node<'a, 'd>, tag: impl Into<Tag>) -> Tagged<Value> {
|
||||
fn from_node_to_value<'a, 'd>(n: &roxmltree::Node<'a, 'd>, tag: impl Into<Tag>) -> Value {
|
||||
let tag = tag.into();
|
||||
|
||||
if n.is_element() {
|
||||
@ -37,11 +38,11 @@ fn from_node_to_value<'a, 'd>(n: &roxmltree::Node<'a, 'd>, tag: impl Into<Tag>)
|
||||
children_values.push(from_node_to_value(&c, &tag));
|
||||
}
|
||||
|
||||
let children_values: Vec<Tagged<Value>> = children_values
|
||||
let children_values: Vec<Value> = children_values
|
||||
.into_iter()
|
||||
.filter(|x| match x {
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::String(f)),
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(Primitive::String(f)),
|
||||
..
|
||||
} => {
|
||||
if f.trim() == "" {
|
||||
@ -55,28 +56,25 @@ fn from_node_to_value<'a, 'd>(n: &roxmltree::Node<'a, 'd>, tag: impl Into<Tag>)
|
||||
.collect();
|
||||
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
collected.insert(name.clone(), Value::Table(children_values));
|
||||
collected.insert_untagged(name.clone(), UntaggedValue::Table(children_values));
|
||||
|
||||
collected.into_tagged_value()
|
||||
collected.into_value()
|
||||
} else if n.is_comment() {
|
||||
Value::string("<comment>").tagged(tag)
|
||||
UntaggedValue::string("<comment>").into_value(tag)
|
||||
} else if n.is_pi() {
|
||||
Value::string("<processing_instruction>").tagged(tag)
|
||||
UntaggedValue::string("<processing_instruction>").into_value(tag)
|
||||
} else if n.is_text() {
|
||||
Value::string(n.text().unwrap()).tagged(tag)
|
||||
UntaggedValue::string(n.text().unwrap()).into_value(tag)
|
||||
} else {
|
||||
Value::string("<unknown>").tagged(tag)
|
||||
UntaggedValue::string("<unknown>").into_value(tag)
|
||||
}
|
||||
}
|
||||
|
||||
fn from_document_to_value(d: &roxmltree::Document, tag: impl Into<Tag>) -> Tagged<Value> {
|
||||
fn from_document_to_value(d: &roxmltree::Document, tag: impl Into<Tag>) -> Value {
|
||||
from_node_to_value(&d.root_element(), tag)
|
||||
}
|
||||
|
||||
pub fn from_xml_string_to_value(
|
||||
s: String,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Result<Tagged<Value>, roxmltree::Error> {
|
||||
pub fn from_xml_string_to_value(s: String, tag: impl Into<Tag>) -> Result<Value, roxmltree::Error> {
|
||||
let parsed = roxmltree::Document::parse(&s)?;
|
||||
Ok(from_document_to_value(&parsed, tag))
|
||||
}
|
||||
@ -84,28 +82,30 @@ pub fn from_xml_string_to_value(
|
||||
fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let tag = args.name_tag();
|
||||
let name_span = tag.span;
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
let mut latest_tag: Option<Tag> = None;
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
latest_tag = Some(value.tag.clone());
|
||||
let value_span = value.tag.span;
|
||||
|
||||
match value.value {
|
||||
UntaggedValue::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
concat_string.push_str("\n");
|
||||
}
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
&tag,
|
||||
name_span,
|
||||
"value originates from here",
|
||||
&value_tag,
|
||||
value_span,
|
||||
)),
|
||||
|
||||
}
|
||||
@ -113,7 +113,7 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
|
||||
match from_xml_string_to_value(concat_string, tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
Value { value: UntaggedValue::Table(list), .. } => {
|
||||
for l in list {
|
||||
yield ReturnSuccess::value(l);
|
||||
}
|
||||
@ -134,3 +134,73 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use crate::commands::from_xml;
|
||||
use crate::data::base::{UntaggedValue, Value};
|
||||
use indexmap::IndexMap;
|
||||
use nu_source::*;
|
||||
|
||||
fn string(input: impl Into<String>) -> Value {
|
||||
UntaggedValue::string(input.into()).into_untagged_value()
|
||||
}
|
||||
|
||||
fn row(entries: IndexMap<String, Value>) -> Value {
|
||||
UntaggedValue::row(entries).into_untagged_value()
|
||||
}
|
||||
|
||||
fn table(list: &Vec<Value>) -> Value {
|
||||
UntaggedValue::table(list).into_untagged_value()
|
||||
}
|
||||
|
||||
fn parse(xml: &str) -> Value {
|
||||
from_xml::from_xml_string_to_value(xml.to_string(), Tag::unknown()).unwrap()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_empty_element() {
|
||||
let source = "<nu></nu>";
|
||||
|
||||
assert_eq!(
|
||||
parse(source),
|
||||
row(indexmap! {
|
||||
"nu".into() => table(&vec![])
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_element_with_text() {
|
||||
let source = "<nu>La era de los tres caballeros</nu>";
|
||||
|
||||
assert_eq!(
|
||||
parse(source),
|
||||
row(indexmap! {
|
||||
"nu".into() => table(&vec![string("La era de los tres caballeros")])
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_element_with_elements() {
|
||||
let source = "\
|
||||
<nu>
|
||||
<dev>Andrés</dev>
|
||||
<dev>Jonathan</dev>
|
||||
<dev>Yehuda</dev>
|
||||
</nu>";
|
||||
|
||||
assert_eq!(
|
||||
parse(source),
|
||||
row(indexmap! {
|
||||
"nu".into() => table(&vec![
|
||||
row(indexmap! {"dev".into() => table(&vec![string("Andrés")])}),
|
||||
row(indexmap! {"dev".into() => table(&vec![string("Jonathan")])}),
|
||||
row(indexmap! {"dev".into() => table(&vec![string("Yehuda")])})
|
||||
])
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -50,47 +50,44 @@ impl WholeStreamCommand for FromYML {
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_yaml_value_to_nu_value(v: &serde_yaml::Value, tag: impl Into<Tag>) -> Tagged<Value> {
|
||||
fn convert_yaml_value_to_nu_value(v: &serde_yaml::Value, tag: impl Into<Tag>) -> Value {
|
||||
let tag = tag.into();
|
||||
|
||||
match v {
|
||||
serde_yaml::Value::Bool(b) => Value::boolean(*b).tagged(tag),
|
||||
serde_yaml::Value::Bool(b) => UntaggedValue::boolean(*b).into_value(tag),
|
||||
serde_yaml::Value::Number(n) if n.is_i64() => {
|
||||
Value::number(n.as_i64().unwrap()).tagged(tag)
|
||||
UntaggedValue::number(n.as_i64().unwrap()).into_value(tag)
|
||||
}
|
||||
serde_yaml::Value::Number(n) if n.is_f64() => {
|
||||
Value::Primitive(Primitive::from(n.as_f64().unwrap())).tagged(tag)
|
||||
UntaggedValue::Primitive(Primitive::from(n.as_f64().unwrap())).into_value(tag)
|
||||
}
|
||||
serde_yaml::Value::String(s) => Value::string(s).tagged(tag),
|
||||
serde_yaml::Value::Sequence(a) => Value::Table(
|
||||
serde_yaml::Value::String(s) => UntaggedValue::string(s).into_value(tag),
|
||||
serde_yaml::Value::Sequence(a) => UntaggedValue::Table(
|
||||
a.iter()
|
||||
.map(|x| convert_yaml_value_to_nu_value(x, &tag))
|
||||
.collect(),
|
||||
)
|
||||
.tagged(tag),
|
||||
.into_value(tag),
|
||||
serde_yaml::Value::Mapping(t) => {
|
||||
let mut collected = TaggedDictBuilder::new(&tag);
|
||||
|
||||
for (k, v) in t.iter() {
|
||||
match k {
|
||||
serde_yaml::Value::String(k) => {
|
||||
collected.insert_tagged(k.clone(), convert_yaml_value_to_nu_value(v, &tag));
|
||||
collected.insert_value(k.clone(), convert_yaml_value_to_nu_value(v, &tag));
|
||||
}
|
||||
_ => unimplemented!("Unknown key type"),
|
||||
}
|
||||
}
|
||||
|
||||
collected.into_tagged_value()
|
||||
collected.into_value()
|
||||
}
|
||||
serde_yaml::Value::Null => Value::Primitive(Primitive::Nothing).tagged(tag),
|
||||
serde_yaml::Value::Null => UntaggedValue::Primitive(Primitive::Nothing).into_value(tag),
|
||||
x => unimplemented!("Unsupported yaml case: {:?}", x),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_yaml_string_to_value(
|
||||
s: String,
|
||||
tag: impl Into<Tag>,
|
||||
) -> serde_yaml::Result<Tagged<Value>> {
|
||||
pub fn from_yaml_string_to_value(s: String, tag: impl Into<Tag>) -> serde_yaml::Result<Value> {
|
||||
let v: serde_yaml::Value = serde_yaml::from_str(&s)?;
|
||||
Ok(convert_yaml_value_to_nu_value(&v, tag))
|
||||
}
|
||||
@ -98,28 +95,30 @@ pub fn from_yaml_string_to_value(
|
||||
fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let tag = args.name_tag();
|
||||
let name_span = tag.span;
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
let mut latest_tag: Option<Tag> = None;
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
latest_tag = Some(value.tag.clone());
|
||||
let value_span = value.tag.span;
|
||||
|
||||
match &value.value {
|
||||
UntaggedValue::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
concat_string.push_str("\n");
|
||||
}
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
&tag,
|
||||
name_span,
|
||||
"value originates from here",
|
||||
&value_tag,
|
||||
value_span,
|
||||
)),
|
||||
|
||||
}
|
||||
@ -127,7 +126,7 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
|
||||
|
||||
match from_yaml_string_to_value(concat_string, tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
Value { value: UntaggedValue::Table(list), .. } => {
|
||||
for l in list {
|
||||
yield ReturnSuccess::value(l);
|
||||
}
|
||||
|
@ -1,15 +1,18 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::data::meta::tag_for_tagged_list;
|
||||
use crate::data::base::shape::Shapes;
|
||||
use crate::data::Value;
|
||||
use crate::errors::ShellError;
|
||||
use crate::prelude::*;
|
||||
use crate::utils::did_you_mean;
|
||||
use crate::ColumnPath;
|
||||
use futures_util::pin_mut;
|
||||
use log::trace;
|
||||
use nu_source::{span_for_spanned_list, PrettyDebug};
|
||||
|
||||
pub struct Get;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct GetArgs {
|
||||
member: ColumnPath,
|
||||
rest: Vec<ColumnPath>,
|
||||
}
|
||||
|
||||
@ -19,9 +22,10 @@ impl WholeStreamCommand for Get {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("get")
|
||||
.required("member", SyntaxShape::ColumnPath)
|
||||
.rest(SyntaxShape::ColumnPath)
|
||||
Signature::build("get").rest(
|
||||
SyntaxShape::ColumnPath,
|
||||
"optionally return additional data by path",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -37,103 +41,123 @@ impl WholeStreamCommand for Get {
|
||||
}
|
||||
}
|
||||
|
||||
pub type ColumnPath = Vec<Tagged<String>>;
|
||||
pub fn get_column_path(path: &ColumnPath, obj: &Value) -> Result<Value, ShellError> {
|
||||
let fields = path.clone();
|
||||
|
||||
pub fn get_column_path(
|
||||
path: &ColumnPath,
|
||||
obj: &Tagged<Value>,
|
||||
) -> Result<Tagged<Value>, ShellError> {
|
||||
let mut current = Some(obj);
|
||||
for p in path.iter() {
|
||||
if let Some(obj) = current {
|
||||
current = match obj.get_data_by_key(&p) {
|
||||
Some(v) => Some(v),
|
||||
None =>
|
||||
// Before we give up, see if they gave us a path that matches a field name by itself
|
||||
{
|
||||
let possibilities = obj.data_descriptors();
|
||||
|
||||
let mut possible_matches: Vec<_> = possibilities
|
||||
obj.get_data_by_column_path(
|
||||
path,
|
||||
Box::new(move |(obj_source, column_path_tried, error)| {
|
||||
match &obj_source.value {
|
||||
UntaggedValue::Table(rows) => {
|
||||
let total = rows.len();
|
||||
let end_tag = match fields
|
||||
.members()
|
||||
.iter()
|
||||
.map(|x| (natural::distance::levenshtein_distance(x, &p), x))
|
||||
.collect();
|
||||
.nth_back(if fields.members().len() > 2 { 1 } else { 0 })
|
||||
{
|
||||
Some(last_field) => last_field.span,
|
||||
None => column_path_tried.span,
|
||||
};
|
||||
|
||||
possible_matches.sort();
|
||||
|
||||
if possible_matches.len() > 0 {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Unknown column",
|
||||
format!("did you mean '{}'?", possible_matches[0].1),
|
||||
tag_for_tagged_list(path.iter().map(|p| p.tag())),
|
||||
));
|
||||
} else {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Unknown column",
|
||||
"row does not contain this column",
|
||||
tag_for_tagged_list(path.iter().map(|p| p.tag())),
|
||||
));
|
||||
}
|
||||
return ShellError::labeled_error_with_secondary(
|
||||
"Row not found",
|
||||
format!(
|
||||
"There isn't a row indexed at {}",
|
||||
column_path_tried.display()
|
||||
),
|
||||
column_path_tried.span,
|
||||
if total == 1 {
|
||||
format!("The table only has 1 row")
|
||||
} else {
|
||||
format!("The table only has {} rows (0 to {})", total, total - 1)
|
||||
},
|
||||
end_tag,
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match current {
|
||||
Some(v) => Ok(v.clone()),
|
||||
None => match obj {
|
||||
// If its None check for certain values.
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::String(_)),
|
||||
..
|
||||
} => Ok(obj.clone()),
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::Path(_)),
|
||||
..
|
||||
} => Ok(obj.clone()),
|
||||
_ => Ok(Value::nothing().tagged(&obj.tag)),
|
||||
},
|
||||
}
|
||||
match did_you_mean(&obj_source, column_path_tried) {
|
||||
Some(suggestions) => {
|
||||
return ShellError::labeled_error(
|
||||
"Unknown column",
|
||||
format!("did you mean '{}'?", suggestions[0].1),
|
||||
span_for_spanned_list(fields.members().iter().map(|p| p.span)),
|
||||
)
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
|
||||
return error;
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get(
|
||||
GetArgs {
|
||||
member,
|
||||
rest: fields,
|
||||
}: GetArgs,
|
||||
GetArgs { rest: mut fields }: GetArgs,
|
||||
RunnableContext { input, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
trace!("get {:?} {:?}", member, fields);
|
||||
if fields.len() == 0 {
|
||||
let stream = async_stream! {
|
||||
let values = input.values;
|
||||
pin_mut!(values);
|
||||
|
||||
let stream = input
|
||||
.values
|
||||
.map(move |item| {
|
||||
let mut result = VecDeque::new();
|
||||
let mut shapes = Shapes::new();
|
||||
let mut index = 0;
|
||||
|
||||
let member = vec![member.clone()];
|
||||
|
||||
let fields = vec![&member, &fields]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<&ColumnPath>>();
|
||||
|
||||
for column_path in &fields {
|
||||
match get_column_path(column_path, &item) {
|
||||
Ok(Tagged {
|
||||
item: Value::Table(l),
|
||||
..
|
||||
}) => {
|
||||
for item in l {
|
||||
result.push_back(ReturnSuccess::value(item.clone()));
|
||||
}
|
||||
}
|
||||
Ok(x) => result.push_back(ReturnSuccess::value(x.clone())),
|
||||
Err(x) => result.push_back(Err(x)),
|
||||
}
|
||||
while let Some(row) = values.next().await {
|
||||
shapes.add(&row, index);
|
||||
index += 1;
|
||||
}
|
||||
|
||||
result
|
||||
})
|
||||
.flatten();
|
||||
for row in shapes.to_values() {
|
||||
yield ReturnSuccess::value(row);
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
let stream: BoxStream<'static, ReturnValue> = stream.boxed();
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
} else {
|
||||
let member = fields.remove(0);
|
||||
trace!("get {:?} {:?}", member, fields);
|
||||
let stream = input
|
||||
.values
|
||||
.map(move |item| {
|
||||
let mut result = VecDeque::new();
|
||||
|
||||
let member = vec![member.clone()];
|
||||
|
||||
let column_paths = vec![&member, &fields]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<&ColumnPath>>();
|
||||
|
||||
for path in column_paths {
|
||||
let res = get_column_path(&path, &item);
|
||||
|
||||
match res {
|
||||
Ok(got) => match got {
|
||||
Value {
|
||||
value: UntaggedValue::Table(rows),
|
||||
..
|
||||
} => {
|
||||
for item in rows {
|
||||
result.push_back(ReturnSuccess::value(item.clone()));
|
||||
}
|
||||
}
|
||||
other => result.push_back(ReturnSuccess::value(other.clone())),
|
||||
},
|
||||
Err(reason) => result.push_back(ReturnSuccess::value(
|
||||
UntaggedValue::Error(reason).into_untagged_value(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
})
|
||||
.flatten();
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
}
|
||||
|
218
src/commands/group_by.rs
Normal file
218
src/commands/group_by.rs
Normal file
@ -0,0 +1,218 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::data::base::UntaggedValue;
|
||||
use crate::data::TaggedDictBuilder;
|
||||
use crate::errors::ShellError;
|
||||
use crate::prelude::*;
|
||||
use nu_source::Tagged;
|
||||
|
||||
pub struct GroupBy;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct GroupByArgs {
|
||||
column_name: Tagged<String>,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for GroupBy {
|
||||
fn name(&self) -> &str {
|
||||
"group-by"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("group-by").required(
|
||||
"column_name",
|
||||
SyntaxShape::String,
|
||||
"the name of the column to group by",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Creates a new table with the data from the table rows grouped by the column given."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, group_by)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn group_by(
|
||||
GroupByArgs { column_name }: GroupByArgs,
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
if values.is_empty() {
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Expected table from pipeline",
|
||||
"requires a table input",
|
||||
column_name.span()
|
||||
))
|
||||
} else {
|
||||
match group(&column_name, values, name) {
|
||||
Ok(grouped) => yield ReturnSuccess::value(grouped),
|
||||
Err(err) => yield Err(err)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
|
||||
pub fn group(
|
||||
column_name: &Tagged<String>,
|
||||
values: Vec<Value>,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Result<Value, ShellError> {
|
||||
let tag = tag.into();
|
||||
|
||||
let mut groups = indexmap::IndexMap::new();
|
||||
|
||||
for value in values {
|
||||
let group_key = value.get_data_by_key(column_name.borrow_spanned());
|
||||
|
||||
if group_key.is_none() {
|
||||
let possibilities = value.data_descriptors();
|
||||
|
||||
let mut possible_matches: Vec<_> = possibilities
|
||||
.iter()
|
||||
.map(|x| (natural::distance::levenshtein_distance(x, column_name), x))
|
||||
.collect();
|
||||
|
||||
possible_matches.sort();
|
||||
|
||||
if possible_matches.len() > 0 {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Unknown column",
|
||||
format!("did you mean '{}'?", possible_matches[0].1),
|
||||
column_name.tag(),
|
||||
));
|
||||
} else {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Unknown column",
|
||||
"row does not contain this column",
|
||||
column_name.tag(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
let group_key = group_key.unwrap().as_string()?;
|
||||
let group = groups.entry(group_key).or_insert(vec![]);
|
||||
group.push(value);
|
||||
}
|
||||
|
||||
let mut out = TaggedDictBuilder::new(&tag);
|
||||
|
||||
for (k, v) in groups.iter() {
|
||||
out.insert_untagged(k, UntaggedValue::table(v));
|
||||
}
|
||||
|
||||
Ok(out.into_value())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::commands::group_by::group;
|
||||
use crate::data::base::{UntaggedValue, Value};
|
||||
use indexmap::IndexMap;
|
||||
use nu_source::*;
|
||||
|
||||
fn string(input: impl Into<String>) -> Value {
|
||||
UntaggedValue::string(input.into()).into_untagged_value()
|
||||
}
|
||||
|
||||
fn row(entries: IndexMap<String, Value>) -> Value {
|
||||
UntaggedValue::row(entries).into_untagged_value()
|
||||
}
|
||||
|
||||
fn table(list: &Vec<Value>) -> Value {
|
||||
UntaggedValue::table(list).into_untagged_value()
|
||||
}
|
||||
|
||||
fn nu_releases_commiters() -> Vec<Value> {
|
||||
vec![
|
||||
row(
|
||||
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")},
|
||||
),
|
||||
]
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn groups_table_by_date_column() {
|
||||
let for_key = String::from("date").tagged_unknown();
|
||||
|
||||
assert_eq!(
|
||||
group(&for_key, nu_releases_commiters(), Tag::unknown()).unwrap(),
|
||||
row(indexmap! {
|
||||
"August 23-2019".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}),
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}),
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")})
|
||||
]),
|
||||
"October 10-2019".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}),
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}),
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")})
|
||||
]),
|
||||
"Sept 24-2019".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}),
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}),
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")})
|
||||
]),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn groups_table_by_country_column() {
|
||||
let for_key = String::from("country").tagged_unknown();
|
||||
|
||||
assert_eq!(
|
||||
group(&for_key, nu_releases_commiters(), Tag::unknown()).unwrap(),
|
||||
row(indexmap! {
|
||||
"EC".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}),
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}),
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")})
|
||||
]),
|
||||
"NZ".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}),
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}),
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")})
|
||||
]),
|
||||
"US".into() => table(&vec![
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}),
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}),
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}),
|
||||
]),
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
@ -3,6 +3,7 @@ use crate::data::{command_dict, TaggedDictBuilder};
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::registry::{self, NamedType, PositionalType};
|
||||
use crate::prelude::*;
|
||||
use nu_source::SpannedItem;
|
||||
|
||||
pub struct Help;
|
||||
|
||||
@ -12,7 +13,7 @@ impl PerItemCommand for Help {
|
||||
}
|
||||
|
||||
fn signature(&self) -> registry::Signature {
|
||||
Signature::build("help").rest(SyntaxShape::Any)
|
||||
Signature::build("help").rest(SyntaxShape::Any, "the name of command(s) to get help on")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -24,13 +25,13 @@ impl PerItemCommand for Help {
|
||||
call_info: &CallInfo,
|
||||
registry: &CommandRegistry,
|
||||
_raw_args: &RawCommandArgs,
|
||||
_input: Tagged<Value>,
|
||||
_input: Value,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let tag = &call_info.name_tag;
|
||||
|
||||
match call_info.args.nth(0) {
|
||||
Some(Tagged {
|
||||
item: Value::Primitive(Primitive::String(document)),
|
||||
Some(Value {
|
||||
value: UntaggedValue::Primitive(Primitive::String(document)),
|
||||
tag,
|
||||
}) => {
|
||||
let mut help = VecDeque::new();
|
||||
@ -41,13 +42,17 @@ impl PerItemCommand for Help {
|
||||
let mut short_desc = TaggedDictBuilder::new(tag.clone());
|
||||
let value = command_dict(registry.get_command(&cmd).unwrap(), tag.clone());
|
||||
|
||||
short_desc.insert("name", cmd);
|
||||
short_desc.insert(
|
||||
short_desc.insert_untagged("name", cmd);
|
||||
short_desc.insert_untagged(
|
||||
"description",
|
||||
value.get_data_by_key("usage").unwrap().as_string().unwrap(),
|
||||
value
|
||||
.get_data_by_key("usage".spanned_unknown())
|
||||
.unwrap()
|
||||
.as_string()
|
||||
.unwrap(),
|
||||
);
|
||||
|
||||
help.push_back(ReturnSuccess::value(short_desc.into_tagged_value()));
|
||||
help.push_back(ReturnSuccess::value(short_desc.into_value()));
|
||||
}
|
||||
} else {
|
||||
if let Some(command) = registry.get_command(document) {
|
||||
@ -61,12 +66,9 @@ impl PerItemCommand for Help {
|
||||
let mut one_liner = String::new();
|
||||
one_liner.push_str(&signature.name);
|
||||
one_liner.push_str(" ");
|
||||
if signature.named.len() > 0 {
|
||||
one_liner.push_str("{flags} ");
|
||||
}
|
||||
|
||||
for positional in signature.positional {
|
||||
match positional {
|
||||
for positional in &signature.positional {
|
||||
match &positional.0 {
|
||||
PositionalType::Mandatory(name, _m) => {
|
||||
one_liner.push_str(&format!("<{}> ", name));
|
||||
}
|
||||
@ -77,32 +79,77 @@ impl PerItemCommand for Help {
|
||||
}
|
||||
|
||||
if signature.rest_positional.is_some() {
|
||||
one_liner.push_str(" ...args");
|
||||
one_liner.push_str(&format!(" ...args",));
|
||||
}
|
||||
|
||||
if signature.named.len() > 0 {
|
||||
one_liner.push_str("{flags} ");
|
||||
}
|
||||
|
||||
long_desc.push_str(&format!("\nUsage:\n > {}\n", one_liner));
|
||||
|
||||
if signature.positional.len() > 0 || signature.rest_positional.is_some() {
|
||||
long_desc.push_str("\nparameters:\n");
|
||||
for positional in signature.positional {
|
||||
match positional.0 {
|
||||
PositionalType::Mandatory(name, _m) => {
|
||||
long_desc
|
||||
.push_str(&format!(" <{}> {}\n", name, positional.1));
|
||||
}
|
||||
PositionalType::Optional(name, _o) => {
|
||||
long_desc
|
||||
.push_str(&format!(" ({}) {}\n", name, positional.1));
|
||||
}
|
||||
}
|
||||
}
|
||||
if signature.rest_positional.is_some() {
|
||||
long_desc.push_str(&format!(
|
||||
" ...args{} {}\n",
|
||||
if signature.rest_positional.is_some() {
|
||||
":"
|
||||
} else {
|
||||
""
|
||||
},
|
||||
signature.rest_positional.unwrap().1
|
||||
));
|
||||
}
|
||||
}
|
||||
if signature.named.len() > 0 {
|
||||
long_desc.push_str("\nflags:\n");
|
||||
for (flag, ty) in signature.named {
|
||||
match ty {
|
||||
match ty.0 {
|
||||
NamedType::Switch => {
|
||||
long_desc.push_str(&format!(" --{}\n", flag));
|
||||
long_desc.push_str(&format!(
|
||||
" --{}{} {}\n",
|
||||
flag,
|
||||
if ty.1.len() > 0 { ":" } else { "" },
|
||||
ty.1
|
||||
));
|
||||
}
|
||||
NamedType::Mandatory(m) => {
|
||||
long_desc.push_str(&format!(
|
||||
" --{} <{}> (required parameter)\n",
|
||||
flag, m
|
||||
" --{} <{}> (required parameter){} {}\n",
|
||||
flag,
|
||||
m.display(),
|
||||
if ty.1.len() > 0 { ":" } else { "" },
|
||||
ty.1
|
||||
));
|
||||
}
|
||||
NamedType::Optional(o) => {
|
||||
long_desc.push_str(&format!(" --{} <{}>\n", flag, o));
|
||||
long_desc.push_str(&format!(
|
||||
" --{} <{}>{} {}\n",
|
||||
flag,
|
||||
o.display(),
|
||||
if ty.1.len() > 0 { ":" } else { "" },
|
||||
ty.1
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
help.push_back(ReturnSuccess::value(
|
||||
Value::string(long_desc).tagged(tag.clone()),
|
||||
UntaggedValue::string(long_desc).into_value(tag.clone()),
|
||||
));
|
||||
}
|
||||
}
|
||||
@ -120,7 +167,9 @@ You can also learn more at https://book.nushell.sh"#;
|
||||
|
||||
let mut output_stream = VecDeque::new();
|
||||
|
||||
output_stream.push_back(ReturnSuccess::value(Value::string(msg).tagged(tag)));
|
||||
output_stream.push_back(ReturnSuccess::value(
|
||||
UntaggedValue::string(msg).into_value(tag),
|
||||
));
|
||||
|
||||
Ok(output_stream.to_output_stream())
|
||||
}
|
||||
|
165
src/commands/histogram.rs
Normal file
165
src/commands/histogram.rs
Normal file
@ -0,0 +1,165 @@
|
||||
use crate::commands::evaluate_by::evaluate;
|
||||
use crate::commands::group_by::group;
|
||||
use crate::commands::map_max_by::map_max;
|
||||
use crate::commands::reduce_by::reduce;
|
||||
use crate::commands::t_sort_by::columns_sorted;
|
||||
use crate::commands::t_sort_by::t_sort;
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::data::TaggedDictBuilder;
|
||||
use crate::errors::ShellError;
|
||||
use crate::prelude::*;
|
||||
use nu_source::Tagged;
|
||||
use num_traits::cast::ToPrimitive;
|
||||
|
||||
pub struct Histogram;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct HistogramArgs {
|
||||
column_name: Tagged<String>,
|
||||
rest: Vec<Tagged<String>>,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for Histogram {
|
||||
fn name(&self) -> &str {
|
||||
"histogram"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("histogram")
|
||||
.required(
|
||||
"column_name",
|
||||
SyntaxShape::String,
|
||||
"the name of the column to graph by",
|
||||
)
|
||||
.rest(
|
||||
SyntaxShape::Member,
|
||||
"column name to give the histogram's frequency column",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Creates a new table with a histogram based on the column name passed in."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, histogram)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn histogram(
|
||||
HistogramArgs { column_name, rest }: HistogramArgs,
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
let Tagged { item: group_by, .. } = column_name.clone();
|
||||
|
||||
let groups = group(&column_name, values, &name)?;
|
||||
let group_labels = columns_sorted(Some(group_by.clone()), &groups, &name);
|
||||
let sorted = t_sort(Some(group_by.clone()), None, &groups, &name)?;
|
||||
let evaled = evaluate(&sorted, None, &name)?;
|
||||
let reduced = reduce(&evaled, None, &name)?;
|
||||
let maxima = map_max(&reduced, None, &name)?;
|
||||
let percents = percentages(&reduced, maxima, &name)?;
|
||||
|
||||
match percents {
|
||||
Value {
|
||||
value: UntaggedValue::Table(datasets),
|
||||
..
|
||||
} => {
|
||||
|
||||
let mut idx = 0;
|
||||
|
||||
let column_names_supplied: Vec<_> = rest.iter().map(|f| f.item.clone()).collect();
|
||||
|
||||
let frequency_column_name = if column_names_supplied.is_empty() {
|
||||
"frequency".to_string()
|
||||
} else {
|
||||
column_names_supplied[0].clone()
|
||||
};
|
||||
|
||||
let column = (*column_name).clone();
|
||||
|
||||
if let Value { value: UntaggedValue::Table(start), .. } = datasets.get(0).unwrap() {
|
||||
for percentage in start.into_iter() {
|
||||
|
||||
let mut fact = TaggedDictBuilder::new(&name);
|
||||
let value: Tagged<String> = group_labels.get(idx).unwrap().clone();
|
||||
fact.insert_value(&column, UntaggedValue::string(value.item).into_value(value.tag));
|
||||
|
||||
if let Value { value: UntaggedValue::Primitive(Primitive::Int(ref num)), .. } = percentage.clone() {
|
||||
let string = std::iter::repeat("*").take(num.to_i32().unwrap() as usize).collect::<String>();
|
||||
fact.insert_untagged(&frequency_column_name, UntaggedValue::string(string));
|
||||
}
|
||||
|
||||
idx = idx + 1;
|
||||
|
||||
yield ReturnSuccess::value(fact.into_value());
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
|
||||
fn percentages(values: &Value, max: Value, tag: impl Into<Tag>) -> Result<Value, ShellError> {
|
||||
let tag = tag.into();
|
||||
|
||||
let results: Value = match values {
|
||||
Value {
|
||||
value: UntaggedValue::Table(datasets),
|
||||
..
|
||||
} => {
|
||||
let datasets: Vec<_> = datasets
|
||||
.into_iter()
|
||||
.map(|subsets| match subsets {
|
||||
Value {
|
||||
value: UntaggedValue::Table(data),
|
||||
..
|
||||
} => {
|
||||
let data =
|
||||
data.into_iter()
|
||||
.map(|d| match d {
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(Primitive::Int(n)),
|
||||
..
|
||||
} => {
|
||||
let max = match max {
|
||||
Value {
|
||||
value:
|
||||
UntaggedValue::Primitive(Primitive::Int(
|
||||
ref maxima,
|
||||
)),
|
||||
..
|
||||
} => maxima.to_i32().unwrap(),
|
||||
_ => 0,
|
||||
};
|
||||
|
||||
let n = { n.to_i32().unwrap() * 100 / max };
|
||||
|
||||
UntaggedValue::number(n).into_value(&tag)
|
||||
}
|
||||
_ => UntaggedValue::number(0).into_value(&tag),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
UntaggedValue::Table(data).into_value(&tag)
|
||||
}
|
||||
_ => UntaggedValue::Table(vec![]).into_value(&tag),
|
||||
})
|
||||
.collect();
|
||||
|
||||
UntaggedValue::Table(datasets).into_value(&tag)
|
||||
}
|
||||
other => other.clone(),
|
||||
};
|
||||
|
||||
Ok(results)
|
||||
}
|
49
src/commands/history.rs
Normal file
49
src/commands/history.rs
Normal file
@ -0,0 +1,49 @@
|
||||
use crate::cli::History as HistoryFile;
|
||||
use crate::commands::PerItemCommand;
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::registry::{self};
|
||||
use crate::prelude::*;
|
||||
use std::fs::File;
|
||||
use std::io::{BufRead, BufReader};
|
||||
|
||||
pub struct History;
|
||||
|
||||
impl PerItemCommand for History {
|
||||
fn name(&self) -> &str {
|
||||
"history"
|
||||
}
|
||||
|
||||
fn signature(&self) -> registry::Signature {
|
||||
Signature::build("history")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Display command history."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
call_info: &CallInfo,
|
||||
_registry: &CommandRegistry,
|
||||
_raw_args: &RawCommandArgs,
|
||||
_input: Value,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let tag = call_info.name_tag.clone();
|
||||
|
||||
let stream = async_stream! {
|
||||
let history_path = HistoryFile::path();
|
||||
let file = File::open(history_path);
|
||||
if let Ok(file) = file {
|
||||
let reader = BufReader::new(file);
|
||||
for line in reader.lines() {
|
||||
if let Ok(line) = line {
|
||||
yield ReturnSuccess::value(UntaggedValue::string(line).into_value(tag.clone()));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
yield Err(ShellError::labeled_error("Could not open history", "history file could not be opened", tag.clone()));
|
||||
}
|
||||
};
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
}
|
@ -2,6 +2,7 @@ use crate::commands::WholeStreamCommand;
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::CommandRegistry;
|
||||
use crate::prelude::*;
|
||||
use nu_source::Tagged;
|
||||
|
||||
pub struct Last;
|
||||
|
||||
@ -16,7 +17,11 @@ impl WholeStreamCommand for Last {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("last").optional("rows", SyntaxShape::Number)
|
||||
Signature::build("last").optional(
|
||||
"rows",
|
||||
SyntaxShape::Number,
|
||||
"starting from the back, the number of rows to return",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -46,7 +51,7 @@ fn last(LastArgs { rows }: LastArgs, context: RunnableContext) -> Result<OutputS
|
||||
if count < v.len() {
|
||||
let k = v.len() - count;
|
||||
for x in v[k..].iter() {
|
||||
let y: Tagged<Value> = x.clone();
|
||||
let y: Value = x.clone();
|
||||
yield ReturnSuccess::value(y)
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::data::{Primitive, Value};
|
||||
use crate::data::Primitive;
|
||||
use crate::errors::ShellError;
|
||||
use crate::prelude::*;
|
||||
use log::trace;
|
||||
@ -33,14 +33,13 @@ impl WholeStreamCommand for Lines {
|
||||
fn lines(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let tag = args.name_tag();
|
||||
let name_span = tag.span;
|
||||
let input = args.input;
|
||||
|
||||
let input: InputStream = trace_stream!(target: "nu::trace_stream::lines", "input" = input);
|
||||
|
||||
let stream = input
|
||||
.values
|
||||
.map(move |v| match v.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
.map(move |v| match v.value {
|
||||
UntaggedValue::Primitive(Primitive::String(s)) => {
|
||||
let split_result: Vec<_> = s.lines().filter(|s| s.trim() != "").collect();
|
||||
|
||||
trace!("split result = {:?}", split_result);
|
||||
@ -48,19 +47,21 @@ fn lines(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
|
||||
let mut result = VecDeque::new();
|
||||
for s in split_result {
|
||||
result.push_back(ReturnSuccess::value(
|
||||
Value::Primitive(Primitive::String(s.into())).tagged_unknown(),
|
||||
UntaggedValue::Primitive(Primitive::String(s.into())).into_untagged_value(),
|
||||
));
|
||||
}
|
||||
result
|
||||
}
|
||||
_ => {
|
||||
let mut result = VecDeque::new();
|
||||
let value_span = v.tag.span;
|
||||
|
||||
result.push_back(Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
&tag,
|
||||
name_span,
|
||||
"value originates from here",
|
||||
v.tag(),
|
||||
value_span,
|
||||
)));
|
||||
result
|
||||
}
|
||||
|
@ -1,6 +1,7 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::errors::ShellError;
|
||||
use crate::prelude::*;
|
||||
use nu_source::Tagged;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub struct LS;
|
||||
@ -8,6 +9,7 @@ pub struct LS;
|
||||
#[derive(Deserialize)]
|
||||
pub struct LsArgs {
|
||||
path: Option<Tagged<PathBuf>>,
|
||||
full: bool,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for LS {
|
||||
@ -16,7 +18,13 @@ impl WholeStreamCommand for LS {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("ls").optional("path", SyntaxShape::Pattern)
|
||||
Signature::build("ls")
|
||||
.optional(
|
||||
"path",
|
||||
SyntaxShape::Pattern,
|
||||
"a path to get the directory contents from",
|
||||
)
|
||||
.switch("full", "list all available columns for each entry")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -33,6 +41,6 @@ impl WholeStreamCommand for LS {
|
||||
}
|
||||
}
|
||||
|
||||
fn ls(LsArgs { path }: LsArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
|
||||
context.shell_manager.ls(path, &context)
|
||||
fn ls(LsArgs { path, full }: LsArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
|
||||
context.shell_manager.ls(path, &context, full)
|
||||
}
|
||||
|
227
src/commands/map_max_by.rs
Normal file
227
src/commands/map_max_by.rs
Normal file
@ -0,0 +1,227 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::parser::hir::SyntaxShape;
|
||||
use crate::prelude::*;
|
||||
use nu_source::Tagged;
|
||||
use num_traits::cast::ToPrimitive;
|
||||
|
||||
pub struct MapMaxBy;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct MapMaxByArgs {
|
||||
column_name: Option<Tagged<String>>,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for MapMaxBy {
|
||||
fn name(&self) -> &str {
|
||||
"map-max-by"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("map-max-by").named(
|
||||
"column_name",
|
||||
SyntaxShape::String,
|
||||
"the name of the column to map-max the table's rows",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Creates a new table with the data from the tables rows maxed by the column given."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, map_max_by)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn map_max_by(
|
||||
MapMaxByArgs { column_name }: MapMaxByArgs,
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
|
||||
if values.is_empty() {
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Expected table from pipeline",
|
||||
"requires a table input",
|
||||
name
|
||||
))
|
||||
} else {
|
||||
|
||||
let map_by_column = if let Some(column_to_map) = column_name {
|
||||
Some(column_to_map.item().clone())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
match map_max(&values[0], map_by_column, name) {
|
||||
Ok(table_maxed) => yield ReturnSuccess::value(table_maxed),
|
||||
Err(err) => yield Err(err)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
|
||||
pub fn map_max(
|
||||
values: &Value,
|
||||
_map_by_column_name: Option<String>,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Result<Value, ShellError> {
|
||||
let tag = tag.into();
|
||||
|
||||
let results: Value = match values {
|
||||
Value {
|
||||
value: UntaggedValue::Table(datasets),
|
||||
..
|
||||
} => {
|
||||
let datasets: Vec<_> = datasets
|
||||
.into_iter()
|
||||
.map(|subsets| match subsets {
|
||||
Value {
|
||||
value: UntaggedValue::Table(data),
|
||||
..
|
||||
} => {
|
||||
let data = data.into_iter().fold(0, |acc, value| match value {
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(Primitive::Int(n)),
|
||||
..
|
||||
} => {
|
||||
if n.to_i32().unwrap() > acc {
|
||||
n.to_i32().unwrap()
|
||||
} else {
|
||||
acc
|
||||
}
|
||||
}
|
||||
_ => acc,
|
||||
});
|
||||
UntaggedValue::number(data).into_value(&tag)
|
||||
}
|
||||
_ => UntaggedValue::number(0).into_value(&tag),
|
||||
})
|
||||
.collect();
|
||||
|
||||
let datasets = datasets.iter().fold(0, |max, value| match value {
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(Primitive::Int(n)),
|
||||
..
|
||||
} => {
|
||||
if n.to_i32().unwrap() > max {
|
||||
n.to_i32().unwrap()
|
||||
} else {
|
||||
max
|
||||
}
|
||||
}
|
||||
_ => max,
|
||||
});
|
||||
UntaggedValue::number(datasets).into_value(&tag)
|
||||
}
|
||||
_ => UntaggedValue::number(-1).into_value(&tag),
|
||||
};
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use crate::commands::evaluate_by::evaluate;
|
||||
use crate::commands::group_by::group;
|
||||
use crate::commands::map_max_by::map_max;
|
||||
use crate::commands::reduce_by::reduce;
|
||||
use crate::commands::t_sort_by::t_sort;
|
||||
use crate::prelude::*;
|
||||
use crate::Value;
|
||||
use indexmap::IndexMap;
|
||||
use nu_source::*;
|
||||
|
||||
fn int(s: impl Into<BigInt>) -> Value {
|
||||
UntaggedValue::int(s).into_untagged_value()
|
||||
}
|
||||
|
||||
fn string(input: impl Into<String>) -> Value {
|
||||
UntaggedValue::string(input.into()).into_untagged_value()
|
||||
}
|
||||
|
||||
fn row(entries: IndexMap<String, Value>) -> Value {
|
||||
UntaggedValue::row(entries).into_untagged_value()
|
||||
}
|
||||
|
||||
fn nu_releases_evaluated_by_default_one() -> Value {
|
||||
evaluate(&nu_releases_sorted_by_date(), None, Tag::unknown()).unwrap()
|
||||
}
|
||||
|
||||
fn nu_releases_reduced_by_sum() -> Value {
|
||||
reduce(
|
||||
&nu_releases_evaluated_by_default_one(),
|
||||
Some(String::from("sum")),
|
||||
Tag::unknown(),
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn nu_releases_sorted_by_date() -> Value {
|
||||
let key = String::from("date");
|
||||
|
||||
t_sort(
|
||||
Some(key),
|
||||
None,
|
||||
&nu_releases_grouped_by_date(),
|
||||
Tag::unknown(),
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn nu_releases_grouped_by_date() -> Value {
|
||||
let key = String::from("date").tagged_unknown();
|
||||
group(&key, nu_releases_commiters(), Tag::unknown()).unwrap()
|
||||
}
|
||||
|
||||
fn nu_releases_commiters() -> Vec<Value> {
|
||||
vec![
|
||||
row(
|
||||
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("September 24-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("September 24-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("September 24-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")},
|
||||
),
|
||||
row(
|
||||
indexmap! {"name".into() => string("JK"), "country".into() => string("US"), "date".into() => string("August 23-2019")},
|
||||
),
|
||||
]
|
||||
}
|
||||
#[test]
|
||||
fn maps_and_gets_max_value() {
|
||||
assert_eq!(
|
||||
map_max(&nu_releases_reduced_by_sum(), None, Tag::unknown()).unwrap(),
|
||||
int(4)
|
||||
);
|
||||
}
|
||||
}
|
@ -2,6 +2,7 @@ use crate::commands::command::RunnablePerItemContext;
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::registry::{CommandRegistry, Signature};
|
||||
use crate::prelude::*;
|
||||
use nu_source::Tagged;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub struct Mkdir;
|
||||
@ -17,7 +18,7 @@ impl PerItemCommand for Mkdir {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("mkdir").rest(SyntaxShape::Path)
|
||||
Signature::build("mkdir").rest(SyntaxShape::Path, "the name(s) of the path(s) to create")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -29,7 +30,7 @@ impl PerItemCommand for Mkdir {
|
||||
call_info: &CallInfo,
|
||||
_registry: &CommandRegistry,
|
||||
raw_args: &RawCommandArgs,
|
||||
_input: Tagged<Value>,
|
||||
_input: Value,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
call_info.process(&raw_args.shell_manager, mkdir)?.run()
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user