mirror of
https://github.com/nushell/nushell.git
synced 2025-06-30 22:50:14 +02:00
Compare commits
244 Commits
Author | SHA1 | Date | |
---|---|---|---|
791f7dd9c3 | |||
a4c1b092ba | |||
6e71c1008d | |||
906d0b920f | |||
efbf4f48c6 | |||
2ddab3e8ce | |||
35dc7438a5 | |||
2a54ee0c54 | |||
cad2741e9e | |||
ae5f3c8210 | |||
a5e97ca549 | |||
06f87cfbe8 | |||
d4e78c6f47 | |||
3653400ebc | |||
81a48d6d0e | |||
f030ab3f12 | |||
0dc0c6a10a | |||
53c8185af3 | |||
36b5d063c1 | |||
a7ec00a037 | |||
918822ae0d | |||
ab5e24a0e7 | |||
b5ea522f0e | |||
afa963fd50 | |||
1e343ff00c | |||
21a543a901 | |||
390deb4ff7 | |||
1c4cb30d64 | |||
1ec2ec72b5 | |||
0d244a9701 | |||
b36d21e76f | |||
d8c4565413 | |||
22ba4c2a2f | |||
8d19b21b9f | |||
45a3afdc79 | |||
2d078849cb | |||
b6363f3ce1 | |||
5ca9e12b7f | |||
5b0b2f1ddd | |||
3afb53b8ce | |||
b40d16310c | |||
d3718d00db | |||
f716f61fc1 | |||
b2ce669791 | |||
cd155f63e1 | |||
9eaa6877f3 | |||
a6b6afbca9 | |||
62666bebc9 | |||
d1fcce0cd3 | |||
a2443fbe02 | |||
db16b56fe1 | |||
54bf671a50 | |||
755d0e648b | |||
e440d8c939 | |||
01dd358a18 | |||
50fb97f6b7 | |||
ebf139f5e5 | |||
8925ca5da3 | |||
287652573b | |||
db24ad8f36 | |||
f88674f353 | |||
59cb0ba381 | |||
c4cfab5e16 | |||
b2c5af457e | |||
c731a5b628 | |||
f97f9d4af3 | |||
ed7d3fed66 | |||
7304d06c0b | |||
ca615d9389 | |||
6d096206b6 | |||
2a8cb24309 | |||
8d38743e27 | |||
eabfa2de54 | |||
a86a0abb90 | |||
adcda450d5 | |||
147b9d4436 | |||
c43a58d9d6 | |||
e38442782e | |||
b98f893217 | |||
bd6556eee1 | |||
18d988d4c8 | |||
0f7c723672 | |||
afce2fd0f9 | |||
4fd9974204 | |||
71615f77a7 | |||
9bc5022c9c | |||
552848b8b9 | |||
8ae8ebd107 | |||
473e9f9422 | |||
96985aa692 | |||
0961da406d | |||
84927d52b5 | |||
73312b506f | |||
c1bec3b443 | |||
c0be02a434 | |||
2ab8d035e6 | |||
24094acee9 | |||
0b2be52bb5 | |||
6a371802b4 | |||
29ccb9f5cd | |||
20ab125861 | |||
fb532f3f4e | |||
a29d52158e | |||
dc50e61f26 | |||
a2668e3327 | |||
e606407d79 | |||
5f4fae5b06 | |||
3687603799 | |||
643b532537 | |||
ed86b1fbe8 | |||
44a114111e | |||
812a76d588 | |||
e3be849c2a | |||
ba1b67c072 | |||
fa910b95b7 | |||
427bde83f7 | |||
7a0bc6bc46 | |||
c6da56949c | |||
5b398d2ed2 | |||
dcdfa2a866 | |||
9474fa1ea5 | |||
49a1385543 | |||
6427ea2331 | |||
3610baa227 | |||
4e201d20ca | |||
1fa21ff056 | |||
0bbd12e37f | |||
7df8fdfb28 | |||
6a39cd8546 | |||
dc3370b103 | |||
ac5ad45783 | |||
8ef5c47515 | |||
5b19bebe7d | |||
2c529cd849 | |||
407f36af29 | |||
763fcbc137 | |||
7061af712e | |||
9b4ba09c95 | |||
9ec6d0c90e | |||
f20a4a42e8 | |||
caa6830184 | |||
f8be1becf2 | |||
af51a0e6f0 | |||
23d11d5e84 | |||
6da9e2aced | |||
32dfb32741 | |||
d48f99cb0e | |||
35359cbc22 | |||
b52dbcc8ef | |||
4429a75e17 | |||
583f27dc41 | |||
83db5c34c3 | |||
cdbfdf282f | |||
a5e1372bc2 | |||
798a24eda5 | |||
a2bb23d78c | |||
d38a63473b | |||
2b37ae3e81 | |||
bc5a969562 | |||
fe4ad5f77e | |||
07191754bf | |||
66bd331ba9 | |||
762c798670 | |||
3c01526869 | |||
7efb31a4e4 | |||
c8dd7838a8 | |||
3b57ee5dda | |||
fb977ab941 | |||
e059c74a06 | |||
47d987d37f | |||
3abfefc025 | |||
a5c5b4e711 | |||
ba9cb753d5 | |||
ba7a1752db | |||
29431e73c2 | |||
d29fe6f6de | |||
e2e9abab0a | |||
2956b0b087 | |||
b32eceffb3 | |||
3adf52b1c4 | |||
78a644da2b | |||
98028433ad | |||
2ab5803f00 | |||
65980c7beb | |||
29fd8b55fb | |||
2f039b3abc | |||
d3dae05714 | |||
5fd3191d91 | |||
0dcd90cb8f | |||
02d0a4107e | |||
63885c4ee6 | |||
147bfefd7e | |||
60043df917 | |||
6d3a30772d | |||
347f91ab53 | |||
5692a08e7f | |||
515a3b33f8 | |||
c3e466e464 | |||
00c0327031 | |||
7451414b9e | |||
41ebc6b42d | |||
b574dc6365 | |||
4af9e1de41 | |||
77d856fd53 | |||
6dceabf389 | |||
5919c6c433 | |||
339a2de0eb | |||
3e3cb15f3d | |||
5e31851070 | |||
0f626dd076 | |||
aa577bf9bf | |||
25298d35e4 | |||
78016446dc | |||
b304de8199 | |||
72838cc083 | |||
8093612cac | |||
f37f29b441 | |||
dba82ac530 | |||
0615adac94 | |||
21e508009f | |||
a9317d939f | |||
65d843c2a1 | |||
f6c62bf121 | |||
b4bc5fe9af | |||
10368d7060 | |||
68a314b5cb | |||
3c7633ae9f | |||
dba347ad00 | |||
bfba2c57f8 | |||
c69bf9f46f | |||
7ce1ddc6fd | |||
e7ce6f2fcd | |||
0c786bb890 | |||
8d31c32bda | |||
e7fb15be59 | |||
be7550822c | |||
0ce216eec4 | |||
1fe85cb91e | |||
8cadc5a4ac | |||
f9da7f7d58 | |||
367f11a62e | |||
8a45ca9cc3 | |||
e336930fd8 | |||
172ccc910e |
@ -4,25 +4,25 @@ trigger:
|
||||
strategy:
|
||||
matrix:
|
||||
linux-stable:
|
||||
image: ubuntu-16.04
|
||||
image: ubuntu-18.04
|
||||
style: 'unflagged'
|
||||
macos-stable:
|
||||
image: macos-10.14
|
||||
style: 'unflagged'
|
||||
windows-stable:
|
||||
image: vs2017-win2016
|
||||
image: windows-2019
|
||||
style: 'unflagged'
|
||||
linux-nightly-canary:
|
||||
image: ubuntu-16.04
|
||||
image: ubuntu-18.04
|
||||
style: 'canary'
|
||||
macos-nightly-canary:
|
||||
image: macos-10.14
|
||||
style: 'canary'
|
||||
windows-nightly-canary:
|
||||
image: vs2017-win2016
|
||||
image: windows-2019
|
||||
style: 'canary'
|
||||
fmt:
|
||||
image: ubuntu-16.04
|
||||
image: ubuntu-18.04
|
||||
style: 'fmt'
|
||||
|
||||
pool:
|
||||
@ -35,19 +35,27 @@ steps:
|
||||
then
|
||||
sudo apt-get -y install libxcb-composite0-dev libx11-dev
|
||||
fi
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "stable"
|
||||
export PATH=$HOME/.cargo/bin:$PATH
|
||||
if [ "$(uname)" == "Darwin" ]; then
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "stable"
|
||||
export PATH=$HOME/.cargo/bin:$PATH
|
||||
fi
|
||||
rustup update
|
||||
rustc -Vv
|
||||
echo "##vso[task.prependpath]$HOME/.cargo/bin"
|
||||
rustup component add rustfmt --toolchain "stable"
|
||||
rustup component add rustfmt
|
||||
displayName: Install Rust
|
||||
- bash: RUSTFLAGS="-D warnings" cargo test --all --features=stable
|
||||
- bash: RUSTFLAGS="-D warnings" cargo test --all --features stable,test-bins
|
||||
condition: eq(variables['style'], 'unflagged')
|
||||
displayName: Run tests
|
||||
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all --features=stable
|
||||
- bash: RUSTFLAGS="-D warnings" cargo clippy --all --features=stable -- -D clippy::result_unwrap_used -D clippy::option_unwrap_used
|
||||
condition: eq(variables['style'], 'unflagged')
|
||||
displayName: Check clippy lints
|
||||
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all --features stable,test-bins
|
||||
condition: eq(variables['style'], 'canary')
|
||||
displayName: Run tests
|
||||
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo clippy --all --features=stable -- -D clippy::result_unwrap_used -D clippy::option_unwrap_used
|
||||
condition: eq(variables['style'], 'canary')
|
||||
displayName: Check clippy lints
|
||||
- bash: cargo fmt --all -- --check
|
||||
condition: eq(variables['style'], 'fmt')
|
||||
displayName: Lint
|
||||
|
@ -1,3 +1,3 @@
|
||||
[build]
|
||||
|
||||
#rustflags = ["--cfg", "coloring_in_tokens"]
|
||||
#rustflags = ["--cfg", "data_processing_primitives"]
|
||||
|
@ -38,7 +38,7 @@ workflows:
|
||||
extra_build_args: --cache-from=quay.io/nushell/nu-base:devel
|
||||
filters:
|
||||
branches:
|
||||
ignore:
|
||||
ignore:
|
||||
- master
|
||||
before_build:
|
||||
- pull_cache
|
||||
|
1
.dockerignore
Normal file
1
.dockerignore
Normal file
@ -0,0 +1 @@
|
||||
target
|
18
.github/workflows/docker-publish.yml
vendored
18
.github/workflows/docker-publish.yml
vendored
@ -52,15 +52,15 @@ jobs:
|
||||
- glibc
|
||||
- musl
|
||||
include:
|
||||
- { tag: alpine, base-image: alpine, arch: x86_64-unknown-linux-musl, plugin: true }
|
||||
- { tag: slim, base-image: 'debian:stable-slim', arch: x86_64-unknown-linux-gnu, plugin: true }
|
||||
- { tag: debian, base-image: debian, arch: x86_64-unknown-linux-gnu, plugin: true }
|
||||
- { tag: glibc-busybox, base-image: 'busybox:glibc', arch: x86_64-unknown-linux-gnu, use-patch: true }
|
||||
- { tag: musl-busybox, base-image: 'busybox:musl', arch: x86_64-unknown-linux-musl, }
|
||||
- { tag: musl-distroless, base-image: 'gcr.io/distroless/static', arch: x86_64-unknown-linux-musl, }
|
||||
- { tag: glibc-distroless, base-image: 'gcr.io/distroless/cc', arch: x86_64-unknown-linux-gnu, use-patch: true }
|
||||
- { tag: glibc, base-image: scratch, arch: x86_64-unknown-linux-gnu, }
|
||||
- { tag: musl, base-image: scratch, arch: x86_64-unknown-linux-musl, }
|
||||
- { tag: alpine, base-image: alpine, arch: x86_64-unknown-linux-musl, plugin: true, use-patch: false}
|
||||
- { tag: slim, base-image: 'debian:stable-slim', arch: x86_64-unknown-linux-gnu, plugin: true, use-patch: false}
|
||||
- { tag: debian, base-image: debian, arch: x86_64-unknown-linux-gnu, plugin: true, use-patch: false}
|
||||
- { tag: glibc-busybox, base-image: 'busybox:glibc', arch: x86_64-unknown-linux-gnu, plugin: false, use-patch: true }
|
||||
- { tag: musl-busybox, base-image: 'busybox:musl', arch: x86_64-unknown-linux-musl, plugin: false, use-patch: false}
|
||||
- { tag: musl-distroless, base-image: 'gcr.io/distroless/static', arch: x86_64-unknown-linux-musl, plugin: false, use-patch: false}
|
||||
- { tag: glibc-distroless, base-image: 'gcr.io/distroless/cc', arch: x86_64-unknown-linux-gnu, plugin: false, use-patch: true }
|
||||
- { tag: glibc, base-image: scratch, arch: x86_64-unknown-linux-gnu, plugin: false, use-patch: false}
|
||||
- { tag: musl, base-image: scratch, arch: x86_64-unknown-linux-musl, plugin: false, use-patch: false}
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions/download-artifact@master
|
||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -3,6 +3,7 @@
|
||||
**/*.rs.bk
|
||||
history.txt
|
||||
tests/fixtures/nuplayground
|
||||
crates/*/target
|
||||
|
||||
# Debian/Ubuntu
|
||||
debian/.debhelper/
|
||||
|
14
.gitpod.Dockerfile
vendored
14
.gitpod.Dockerfile
vendored
@ -1,7 +1,9 @@
|
||||
FROM gitpod/workspace-full
|
||||
USER root
|
||||
RUN apt-get update && apt-get install -y libssl-dev \
|
||||
libxcb-composite0-dev \
|
||||
pkg-config \
|
||||
curl \
|
||||
rustc
|
||||
|
||||
USER gitpod
|
||||
|
||||
RUN sudo apt-get update && \
|
||||
sudo apt-get install -y \
|
||||
libssl-dev \
|
||||
libxcb-composite0-dev \
|
||||
pkg-config
|
||||
|
10
.gitpod.yml
10
.gitpod.yml
@ -1,7 +1,7 @@
|
||||
image:
|
||||
file: .gitpod.Dockerfile
|
||||
tasks:
|
||||
- init: cargo install nu --features=stable
|
||||
- init: cargo install --path . --force --features=stable
|
||||
command: nu
|
||||
github:
|
||||
prebuilds:
|
||||
@ -19,3 +19,11 @@ github:
|
||||
addBadge: false
|
||||
# add a label once the prebuild is ready to pull requests (defaults to false)
|
||||
addLabel: prebuilt-in-gitpod
|
||||
vscode:
|
||||
extensions:
|
||||
- hbenl.vscode-test-explorer@2.15.0:koqDUMWDPJzELp/hdS/lWw==
|
||||
- Swellaby.vscode-rust-test-adapter@0.11.0:Xg+YeZZQiVpVUsIkH+uiiw==
|
||||
- serayuzgur.crates@0.4.7:HMkoguLcXp9M3ud7ac3eIw==
|
||||
- belfz.search-crates-io@1.2.1:kSLnyrOhXtYPjQpKnMr4eQ==
|
||||
- vadimcn.vscode-lldb@1.4.5:lwHCNwtm0kmOBXeQUIPGMQ==
|
||||
- bungcip.better-toml@0.3.2:3QfgGxxYtGHfJKQU7H0nEw==
|
||||
|
1740
Cargo.lock
generated
1740
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
265
Cargo.toml
265
Cargo.toml
@ -1,8 +1,8 @@
|
||||
[package]
|
||||
name = "nu"
|
||||
version = "0.7.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||
description = "A shell for the GitHub era"
|
||||
version = "0.12.0"
|
||||
authors = ["The Nu Project Contributors"]
|
||||
description = "A new kind of shell"
|
||||
license = "MIT"
|
||||
edition = "2018"
|
||||
readme = "README.md"
|
||||
@ -10,164 +10,106 @@ default-run = "nu"
|
||||
repository = "https://github.com/nushell/nushell"
|
||||
homepage = "https://www.nushell.sh"
|
||||
documentation = "https://www.nushell.sh/book/"
|
||||
exclude = ["images"]
|
||||
|
||||
[workspace]
|
||||
|
||||
members = [
|
||||
"crates/nu-macros",
|
||||
"crates/nu-errors",
|
||||
"crates/nu-source",
|
||||
"crates/nu_plugin_average",
|
||||
"crates/nu_plugin_binaryview",
|
||||
"crates/nu_plugin_fetch",
|
||||
"crates/nu_plugin_inc",
|
||||
"crates/nu_plugin_match",
|
||||
"crates/nu_plugin_post",
|
||||
"crates/nu_plugin_ps",
|
||||
"crates/nu_plugin_str",
|
||||
"crates/nu_plugin_sum",
|
||||
"crates/nu_plugin_sys",
|
||||
"crates/nu_plugin_textview",
|
||||
"crates/nu_plugin_tree",
|
||||
"crates/nu-protocol",
|
||||
"crates/nu-parser",
|
||||
"crates/nu-value-ext",
|
||||
"crates/nu-build"
|
||||
]
|
||||
members = ["crates/*/"]
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
nu-source = { version = "0.7.0", path = "./crates/nu-source" }
|
||||
nu-protocol = { version = "0.7.0", path = "./crates/nu-protocol" }
|
||||
nu-errors = { version = "0.7.0", path = "./crates/nu-errors" }
|
||||
nu-parser = { version = "0.7.0", path = "./crates/nu-parser" }
|
||||
nu-value-ext = { version = "0.7.0", path = "./crates/nu-value-ext" }
|
||||
nu_plugin_average = {version = "0.7.0", path = "./crates/nu_plugin_average", optional=true}
|
||||
nu_plugin_binaryview = {version = "0.7.0", path = "./crates/nu_plugin_binaryview", optional=true}
|
||||
nu_plugin_fetch = {version = "0.7.0", path = "./crates/nu_plugin_fetch", optional=true}
|
||||
nu_plugin_inc = {version = "0.7.0", path = "./crates/nu_plugin_inc", optional=true}
|
||||
nu_plugin_match = {version = "0.7.0", path = "./crates/nu_plugin_match", optional=true}
|
||||
nu_plugin_post = {version = "0.7.0", path = "./crates/nu_plugin_post", optional=true}
|
||||
nu_plugin_ps = {version = "0.7.0", path = "./crates/nu_plugin_ps", optional=true}
|
||||
nu_plugin_str = {version = "0.7.0", path = "./crates/nu_plugin_str", optional=true}
|
||||
nu_plugin_sum = {version = "0.7.0", path = "./crates/nu_plugin_sum", optional=true}
|
||||
nu_plugin_sys = {version = "0.7.0", path = "./crates/nu_plugin_sys", optional=true}
|
||||
nu_plugin_textview = {version = "0.7.0", path = "./crates/nu_plugin_textview", optional=true}
|
||||
nu_plugin_tree = {version = "0.7.0", path = "./crates/nu_plugin_tree", optional=true}
|
||||
nu-macros = { version = "0.7.0", path = "./crates/nu-macros" }
|
||||
nu-cli = { version = "0.12.0", path = "./crates/nu-cli" }
|
||||
nu-source = { version = "0.12.0", path = "./crates/nu-source" }
|
||||
nu-plugin = { version = "0.12.0", path = "./crates/nu-plugin" }
|
||||
nu-protocol = { version = "0.12.0", path = "./crates/nu-protocol" }
|
||||
nu-errors = { version = "0.12.0", path = "./crates/nu-errors" }
|
||||
nu-parser = { version = "0.12.0", path = "./crates/nu-parser" }
|
||||
nu-value-ext = { version = "0.12.0", path = "./crates/nu-value-ext" }
|
||||
nu_plugin_average = { version = "0.12.0", path = "./crates/nu_plugin_average", optional=true }
|
||||
nu_plugin_binaryview = { version = "0.12.0", path = "./crates/nu_plugin_binaryview", optional=true }
|
||||
nu_plugin_fetch = { version = "0.12.0", path = "./crates/nu_plugin_fetch", optional=true }
|
||||
nu_plugin_inc = { version = "0.12.0", path = "./crates/nu_plugin_inc", optional=true }
|
||||
nu_plugin_match = { version = "0.12.0", path = "./crates/nu_plugin_match", optional=true }
|
||||
nu_plugin_post = { version = "0.12.0", path = "./crates/nu_plugin_post", optional=true }
|
||||
nu_plugin_ps = { version = "0.12.0", path = "./crates/nu_plugin_ps", optional=true }
|
||||
nu_plugin_str = { version = "0.12.0", path = "./crates/nu_plugin_str", optional=true }
|
||||
nu_plugin_sys = { version = "0.12.0", path = "./crates/nu_plugin_sys", optional=true }
|
||||
nu_plugin_textview = { version = "0.12.0", path = "./crates/nu_plugin_textview", optional=true }
|
||||
nu_plugin_tree = { version = "0.12.0", path = "./crates/nu_plugin_tree", optional=true }
|
||||
nu-macros = { version = "0.12.0", path = "./crates/nu-macros" }
|
||||
|
||||
crossterm = { version = "0.16.0", optional = true }
|
||||
onig_sys = { version = "=69.1.0", optional = true }
|
||||
semver = { version = "0.9.0", optional = true }
|
||||
syntect = { version = "3.2.0", optional = true }
|
||||
url = { version = "2.1.1", optional = true }
|
||||
|
||||
query_interface = "0.3.5"
|
||||
typetag = "0.1.4"
|
||||
rustyline = "5.0.4"
|
||||
chrono = { version = "0.4.10", features = ["serde"] }
|
||||
derive-new = "0.5.8"
|
||||
prettytable-rs = "0.8.0"
|
||||
itertools = "0.8.2"
|
||||
ansi_term = "0.12.1"
|
||||
nom = "5.0.1"
|
||||
dunce = "1.0.0"
|
||||
indexmap = { version = "1.3.0", features = ["serde-1"] }
|
||||
chrono-humanize = "0.0.11"
|
||||
byte-unit = "3.0.3"
|
||||
base64 = "0.11"
|
||||
futures-preview = { version = "=0.3.0-alpha.19", features = ["compat", "io-compat"] }
|
||||
async-stream = "0.1.2"
|
||||
futures_codec = "0.2.5"
|
||||
num-traits = "0.2.10"
|
||||
term = "0.5.2"
|
||||
bytes = "0.4.12"
|
||||
log = "0.4.8"
|
||||
pretty_env_logger = "0.3.1"
|
||||
serde = { version = "1.0.103", features = ["derive"] }
|
||||
bson = { version = "0.14.0", features = ["decimal128"] }
|
||||
serde_json = "1.0.44"
|
||||
serde-hjson = "0.9.1"
|
||||
serde_yaml = "0.8"
|
||||
serde_bytes = "0.11.3"
|
||||
getset = "0.0.9"
|
||||
language-reporting = "0.4.0"
|
||||
app_dirs = "1.2.1"
|
||||
csv = "1.1"
|
||||
toml = "0.5.5"
|
||||
clap = "2.33.0"
|
||||
git2 = { version = "0.10.2", default_features = false }
|
||||
dirs = "2.0.2"
|
||||
glob = "0.3.0"
|
||||
ctrlc = "3.1.3"
|
||||
roxmltree = "0.7.3"
|
||||
nom_locate = "1.0.0"
|
||||
nom-tracable = "0.4.1"
|
||||
unicode-xid = "0.2.0"
|
||||
serde_ini = "0.2.0"
|
||||
subprocess = "0.1.18"
|
||||
pretty-hex = "0.1.1"
|
||||
hex = "0.4"
|
||||
tempfile = "3.1.0"
|
||||
which = "3.1"
|
||||
textwrap = {version = "0.11.0", features = ["term_size"]}
|
||||
shellexpand = "1.0.0"
|
||||
pin-utils = "0.1.0-alpha.4"
|
||||
num-bigint = { version = "0.2.3", features = ["serde"] }
|
||||
bigdecimal = { version = "0.1.0", features = ["serde"] }
|
||||
serde_urlencoded = "0.6.1"
|
||||
trash = "1.0.0"
|
||||
regex = "1"
|
||||
cfg-if = "0.1"
|
||||
strip-ansi-escapes = "0.1.0"
|
||||
calamine = "0.16"
|
||||
umask = "0.1"
|
||||
futures-util = "0.3.1"
|
||||
termcolor = "1.0.5"
|
||||
natural = "0.3.0"
|
||||
|
||||
clipboard = {version = "0.5", optional = true }
|
||||
ptree = {version = "0.2" }
|
||||
starship = { version = "0.28", optional = true}
|
||||
heim = {version = "0.0.9", optional = true}
|
||||
battery = {version = "0.7.5", optional = true}
|
||||
syntect = {version = "3.2.0", optional = true }
|
||||
onig_sys = {version = "=69.1.0", optional = true }
|
||||
crossterm = {version = "0.10.2", optional = true}
|
||||
futures-timer = {version = "1.0.2", optional = true}
|
||||
url = {version = "2.1.0", optional = true}
|
||||
semver = {version = "0.9.0", optional = true}
|
||||
|
||||
[features]
|
||||
default = ["sys", "ps", "textview", "inc", "str"]
|
||||
stable = ["sys", "ps", "starship-prompt", "textview", "binaryview", "match", "tree", "average", "sum"]
|
||||
|
||||
sys = ["heim", "battery"]
|
||||
ps = ["heim", "futures-timer"]
|
||||
textview = ["crossterm", "syntect", "onig_sys", "url"]
|
||||
str = []
|
||||
|
||||
inc = ["semver"]
|
||||
starship-prompt = ["starship"]
|
||||
binaryview = ["nu_plugin_binaryview"]
|
||||
match = ["nu_plugin_match"]
|
||||
tree = ["nu_plugin_tree"]
|
||||
average = ["nu_plugin_average"]
|
||||
sum = ["nu_plugin_sum"]
|
||||
trace = ["nu-parser/trace"]
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "0.20.0"
|
||||
features = ["bundled", "blob"]
|
||||
ctrlc = "3.1.4"
|
||||
dunce = "1.0.0"
|
||||
futures = { version = "0.3", features = ["compat", "io-compat"] }
|
||||
log = "0.4.8"
|
||||
pretty_env_logger = "0.4.0"
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = "0.6.1"
|
||||
nu-test-support = { version = "0.7.0", path = "./crates/nu-test-support" }
|
||||
nu-test-support = { version = "0.12.0", path = "./crates/nu-test-support" }
|
||||
|
||||
[build-dependencies]
|
||||
toml = "0.5.5"
|
||||
serde = { version = "1.0.103", features = ["derive"] }
|
||||
nu-build = { version = "0.7.0", path = "./crates/nu-build" }
|
||||
toml = "0.5.6"
|
||||
serde = { version = "1.0.105", features = ["derive"] }
|
||||
nu-build = { version = "0.12.0", path = "./crates/nu-build" }
|
||||
|
||||
[lib]
|
||||
name = "nu"
|
||||
path = "src/lib.rs"
|
||||
[features]
|
||||
# Test executables
|
||||
test-bins = []
|
||||
|
||||
default = ["sys", "ps", "textview", "inc", "str"]
|
||||
stable = ["default", "starship-prompt", "binaryview", "match", "tree", "average", "post", "fetch", "clipboard-cli"]
|
||||
|
||||
# Default
|
||||
textview = ["crossterm", "syntect", "onig_sys", "url", "nu_plugin_textview"]
|
||||
sys = ["nu_plugin_sys"]
|
||||
ps = ["nu_plugin_ps"]
|
||||
inc = ["semver", "nu_plugin_inc"]
|
||||
str = ["nu_plugin_str"]
|
||||
|
||||
# Stable
|
||||
average = ["nu_plugin_average"]
|
||||
binaryview = ["nu_plugin_binaryview"]
|
||||
fetch = ["nu_plugin_fetch"]
|
||||
match = ["nu_plugin_match"]
|
||||
post = ["nu_plugin_post"]
|
||||
trace = ["nu-parser/trace"]
|
||||
tree = ["nu_plugin_tree"]
|
||||
|
||||
clipboard-cli = ["nu-cli/clipboard-cli"]
|
||||
starship-prompt = ["nu-cli/starship-prompt"]
|
||||
|
||||
[[bin]]
|
||||
name = "fail"
|
||||
path = "crates/nu-test-support/src/bins/fail.rs"
|
||||
required-features = ["test-bins"]
|
||||
|
||||
[[bin]]
|
||||
name = "chop"
|
||||
path = "crates/nu-test-support/src/bins/chop.rs"
|
||||
required-features = ["test-bins"]
|
||||
|
||||
[[bin]]
|
||||
name = "cococo"
|
||||
path = "crates/nu-test-support/src/bins/cococo.rs"
|
||||
required-features = ["test-bins"]
|
||||
|
||||
[[bin]]
|
||||
name = "nonu"
|
||||
path = "crates/nu-test-support/src/bins/nonu.rs"
|
||||
required-features = ["test-bins"]
|
||||
|
||||
[[bin]]
|
||||
name = "iecho"
|
||||
path = "crates/nu-test-support/src/bins/iecho.rs"
|
||||
required-features = ["test-bins"]
|
||||
|
||||
# Core plugins that ship with `cargo install nu` by default
|
||||
# Currently, Cargo limits us to installing only one binary
|
||||
@ -197,6 +139,37 @@ name = "nu_plugin_core_sys"
|
||||
path = "src/plugins/nu_plugin_core_sys.rs"
|
||||
required-features = ["sys"]
|
||||
|
||||
# Stable plugins
|
||||
[[bin]]
|
||||
name = "nu_plugin_stable_average"
|
||||
path = "src/plugins/nu_plugin_stable_average.rs"
|
||||
required-features = ["average"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_stable_fetch"
|
||||
path = "src/plugins/nu_plugin_stable_fetch.rs"
|
||||
required-features = ["fetch"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_stable_binaryview"
|
||||
path = "src/plugins/nu_plugin_stable_binaryview.rs"
|
||||
required-features = ["binaryview"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_stable_match"
|
||||
path = "src/plugins/nu_plugin_stable_match.rs"
|
||||
required-features = ["match"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_stable_post"
|
||||
path = "src/plugins/nu_plugin_stable_post.rs"
|
||||
required-features = ["post"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_stable_tree"
|
||||
path = "src/plugins/nu_plugin_stable_tree.rs"
|
||||
required-features = ["tree"]
|
||||
|
||||
# Main nu binary
|
||||
[[bin]]
|
||||
name = "nu"
|
||||
|
2
LICENSE
2
LICENSE
@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2019 Yehuda Katz, Jonathan Turner
|
||||
Copyright (c) 2019 - 2020 Yehuda Katz, Jonathan Turner
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
209
README.md
209
README.md
@ -1,3 +1,4 @@
|
||||
[](https://gitpod.io/#https://github.com/nushell/nushell)
|
||||
[](https://crates.io/crates/nu)
|
||||
[](https://dev.azure.com/nushell/nushell/_build/latest?definitionId=2&branchName=master)
|
||||
[](https://discord.gg/NtAbbGn)
|
||||
@ -6,24 +7,33 @@
|
||||
|
||||
# Nu Shell
|
||||
|
||||
A modern shell for the GitHub era.
|
||||
A new type of shell.
|
||||
|
||||

|
||||
|
||||
# Status
|
||||
|
||||
This project has reached a minimum-viable product level of quality. While contributors dogfood it as their daily driver, it may be unstable for some commands. Future releases will work to fill out missing features and improve stability. Its design is also subject to change as it matures.
|
||||
This project has reached a minimum-viable product level of quality.
|
||||
While contributors dogfood it as their daily driver, it may be unstable for some commands.
|
||||
Future releases will work to fill out missing features and improve stability.
|
||||
Its design is also subject to change as it matures.
|
||||
|
||||
Nu comes with a set of built-in commands (listed below). If a command is unknown, the command will shell-out and execute it (using cmd on Windows or bash on Linux and macOS), correctly passing through stdin, stdout, and stderr, so things like your daily git workflows and even `vim` will work just fine.
|
||||
Nu comes with a set of built-in commands (listed below).
|
||||
If a command is unknown, the command will shell-out and execute it (using cmd on Windows or bash on Linux and macOS), correctly passing through stdin, stdout, and stderr, so things like your daily git workflows and even `vim` will work just fine.
|
||||
|
||||
# Learning more
|
||||
|
||||
There are a few good resources to learn about Nu. There is a [book](https://www.nushell.sh/book/) about Nu that is currently in progress. The book focuses on using Nu and its core concepts.
|
||||
There are a few good resources to learn about Nu.
|
||||
There is a [book](https://www.nushell.sh/book/) about Nu that is currently in progress.
|
||||
The book focuses on using Nu and its core concepts.
|
||||
|
||||
If you're a developer who would like to contribute to Nu, we're also working on a [book for developers](https://www.nushell.sh/contributor-book/) to help you get started. There are also [good first issues](https://github.com/nushell/nushell/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) to help you dive in.
|
||||
If you're a developer who would like to contribute to Nu, we're also working on a [book for developers](https://www.nushell.sh/contributor-book/) to help you get started.
|
||||
There are also [good first issues](https://github.com/nushell/nushell/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) to help you dive in.
|
||||
|
||||
We also have an active [Discord](https://discord.gg/NtAbbGn) and [Twitter](https://twitter.com/nu_shell) if you'd like to come and chat with us.
|
||||
|
||||
You can also find more learning resources in our [documentation](https://www.nushell.sh/documentation.html) site.
|
||||
|
||||
Try it in Gitpod.
|
||||
|
||||
[](https://gitpod.io/#https://github.com/nushell/nushell)
|
||||
@ -52,14 +62,24 @@ To install Nu via cargo (make sure you have installed [rustup](https://rustup.rs
|
||||
cargo install nu
|
||||
```
|
||||
|
||||
You can also install Nu with all the bells and whistles (be sure to have installed the [dependencies](https://www.nushell.sh/book/en/installation.html#dependencies) for your platform):
|
||||
You can also build Nu yourself with all the bells and whistles (be sure to have installed the [dependencies](https://www.nushell.sh/book/en/installation.html#dependencies) for your platform), once you have checked out this repo with git:
|
||||
|
||||
```
|
||||
cargo install nu --features=stable
|
||||
cargo build --workspace --features=stable
|
||||
```
|
||||
|
||||
## Docker
|
||||
|
||||
### Quickstart
|
||||
|
||||
Want to try Nu right away? Execute the following to get started.
|
||||
|
||||
```bash
|
||||
docker run -it quay.io/nushell/nu:latest
|
||||
```
|
||||
|
||||
### Guide
|
||||
|
||||
If you want to pull a pre-built container, you can browse tags for the [nushell organization](https://quay.io/organization/nushell)
|
||||
on Quay.io. Pulling a container would come down to:
|
||||
|
||||
@ -104,11 +124,18 @@ The second container is a bit smaller if the size is important to you.
|
||||
|
||||
# Philosophy
|
||||
|
||||
Nu draws inspiration from projects like PowerShell, functional programming languages, and modern CLI tools. Rather than thinking of files and services as raw streams of text, Nu looks at each input as something with structure. For example, when you list the contents of a directory, what you get back is a table of rows, where each row represents an item in that directory. These values can be piped through a series of steps, in a series of commands called a 'pipeline'.
|
||||
Nu draws inspiration from projects like PowerShell, functional programming languages, and modern CLI tools.
|
||||
Rather than thinking of files and services as raw streams of text, Nu looks at each input as something with structure.
|
||||
For example, when you list the contents of a directory, what you get back is a table of rows, where each row represents an item in that directory.
|
||||
These values can be piped through a series of steps, in a series of commands called a 'pipeline'.
|
||||
|
||||
## Pipelines
|
||||
|
||||
In Unix, it's common to pipe between commands to split up a sophisticated command over multiple steps. Nu takes this a step further and builds heavily on the idea of _pipelines_. Just as the Unix philosophy, Nu allows commands to output from stdout and read from stdin. Additionally, commands can output structured data (you can think of this as a third kind of stream). Commands that work in the pipeline fit into one of three categories:
|
||||
In Unix, it's common to pipe between commands to split up a sophisticated command over multiple steps.
|
||||
Nu takes this a step further and builds heavily on the idea of _pipelines_.
|
||||
Just as the Unix philosophy, Nu allows commands to output from stdout and read from stdin.
|
||||
Additionally, commands can output structured data (you can think of this as a third kind of stream).
|
||||
Commands that work in the pipeline fit into one of three categories:
|
||||
|
||||
* Commands that produce a stream (eg, `ls`)
|
||||
* Commands that filter a stream (eg, `where type == "Directory"`)
|
||||
@ -118,7 +145,7 @@ Commands are separated by the pipe symbol (`|`) to denote a pipeline flowing lef
|
||||
|
||||
```
|
||||
/home/jonathan/Source/nushell(master)> ls | where type == "Directory" | autoview
|
||||
━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━
|
||||
────┬───────────┬───────────┬──────────┬────────┬──────────────┬────────────────
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||
────┼───────────┼───────────┼──────────┼────────┼──────────────┼────────────────
|
||||
0 │ .azure │ Directory │ │ 4.1 KB │ 2 months ago │ a day ago
|
||||
@ -129,59 +156,61 @@ Commands are separated by the pipe symbol (`|`) to denote a pipeline flowing lef
|
||||
5 │ src │ Directory │ │ 4.1 KB │ 2 months ago │ 37 minutes ago
|
||||
6 │ assets │ Directory │ │ 4.1 KB │ a month ago │ a month ago
|
||||
7 │ docs │ Directory │ │ 4.1 KB │ 2 months ago │ 2 months ago
|
||||
━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━
|
||||
────┴───────────┴───────────┴──────────┴────────┴──────────────┴────────────────
|
||||
```
|
||||
|
||||
Because most of the time you'll want to see the output of a pipeline, `autoview` is assumed. We could have also written the above:
|
||||
Because most of the time you'll want to see the output of a pipeline, `autoview` is assumed.
|
||||
We could have also written the above:
|
||||
|
||||
```
|
||||
/home/jonathan/Source/nushell(master)> ls | where type == Directory
|
||||
```
|
||||
|
||||
Being able to use the same commands and compose them differently is an important philosophy in Nu. For example, we could use the built-in `ps` command as well to get a list of the running processes, using the same `where` as above.
|
||||
Being able to use the same commands and compose them differently is an important philosophy in Nu.
|
||||
For example, we could use the built-in `ps` command as well to get a list of the running processes, using the same `where` as above.
|
||||
|
||||
```text
|
||||
/home/jonathan/Source/nushell(master)> ps | where cpu > 0
|
||||
━━━┯━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━━━
|
||||
───┬───────┬─────────────────┬──────────┬──────────
|
||||
# │ pid │ name │ status │ cpu
|
||||
───┼───────┼─────────────────┼──────────┼──────────
|
||||
0 │ 992 │ chrome │ Sleeping │ 6.988768
|
||||
1 │ 4240 │ chrome │ Sleeping │ 5.645982
|
||||
2 │ 13973 │ qemu-system-x86 │ Sleeping │ 4.996551
|
||||
3 │ 15746 │ nu │ Sleeping │ 84.59905
|
||||
━━━┷━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━━━
|
||||
|
||||
───┴───────┴─────────────────┴──────────┴──────────
|
||||
```
|
||||
|
||||
## Opening files
|
||||
|
||||
Nu can load file and URL contents as raw text or as structured data (if it recognizes the format). For example, you can load a .toml file as structured data and explore it:
|
||||
Nu can load file and URL contents as raw text or as structured data (if it recognizes the format).
|
||||
For example, you can load a .toml file as structured data and explore it:
|
||||
|
||||
```
|
||||
/home/jonathan/Source/nushell(master)> open Cargo.toml
|
||||
━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━
|
||||
──────────────────┬────────────────┬──────────────────
|
||||
bin │ dependencies │ dev-dependencies
|
||||
──────────────────┼────────────────┼──────────────────
|
||||
[table: 12 rows] │ [table: 1 row] │ [table: 1 row]
|
||||
━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━
|
||||
──────────────────┴────────────────┴──────────────────
|
||||
```
|
||||
|
||||
We can pipeline this into a command that gets the contents of one of the columns:
|
||||
|
||||
```
|
||||
/home/jonathan/Source/nushell(master)> open Cargo.toml | get package
|
||||
━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━┯━━━━━━┯━━━━━━━━━
|
||||
─────────────────┬────────────────────────────┬─────────┬─────────┬──────┬─────────
|
||||
authors │ description │ edition │ license │ name │ version
|
||||
─────────────────┼────────────────────────────┼─────────┼─────────┼──────┼─────────
|
||||
[table: 3 rows] │ A shell for the GitHub era │ 2018 │ MIT │ nu │ 0.6.1
|
||||
━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━┷━━━━━━┷━━━━━━━━━
|
||||
[table: 3 rows] │ A shell for the GitHub era │ 2018 │ MIT │ nu │ 0.9.0
|
||||
─────────────────┴────────────────────────────┴─────────┴─────────┴──────┴─────────
|
||||
```
|
||||
|
||||
Finally, we can use commands outside of Nu once we have the data we want:
|
||||
|
||||
```
|
||||
/home/jonathan/Source/nushell(master)> open Cargo.toml | get package.version | echo $it
|
||||
0.6.1
|
||||
0.9.0
|
||||
```
|
||||
|
||||
Here we use the variable `$it` to refer to the value being piped to the external command.
|
||||
@ -190,36 +219,44 @@ Here we use the variable `$it` to refer to the value being piped to the external
|
||||
|
||||
Nu has early support for configuring the shell. It currently supports the following settings:
|
||||
|
||||
| Variable | Type | Description |
|
||||
| ------------- | ------------- | ----- |
|
||||
| path | table of strings | PATH to use to find binaries |
|
||||
| env | row | the environment variables to pass to external commands |
|
||||
| ctrlc_exit | boolean | whether or not to exit Nu after multiple ctrl-c presses |
|
||||
| table_mode | "light" or other | enable lightweight or normal tables |
|
||||
| edit_mode | "vi" or "emacs" | changes line editing to "vi" or "emacs" mode |
|
||||
| Variable | Type | Description |
|
||||
| --------------- | -------------------- | -------------------------------------------------------------- |
|
||||
| path | table of strings | PATH to use to find binaries |
|
||||
| env | row | the environment variables to pass to external commands |
|
||||
| ctrlc_exit | boolean | whether or not to exit Nu after multiple ctrl-c presses |
|
||||
| table_mode | "light" or other | enable lightweight or normal tables |
|
||||
| edit_mode | "vi" or "emacs" | changes line editing to "vi" or "emacs" mode |
|
||||
| completion_mode | "circular" or "list" | changes completion type to "circular" (default) or "list" mode |
|
||||
|
||||
To set one of these variables, you can use `config --set`. For example:
|
||||
|
||||
```
|
||||
> config --set [edit_mode "vi"]
|
||||
> config --set [path $nu:path]
|
||||
> config --set [path $nu.path]
|
||||
```
|
||||
|
||||
## Shells
|
||||
|
||||
Nu will work inside of a single directory and allow you to navigate around your filesystem by default. Nu also offers a way of adding additional working directories that you can jump between, allowing you to work in multiple directories at the same time.
|
||||
Nu will work inside of a single directory and allow you to navigate around your filesystem by default.
|
||||
Nu also offers a way of adding additional working directories that you can jump between, allowing you to work in multiple directories at the same time.
|
||||
|
||||
To do so, use the `enter` command, which will allow you create a new "shell" and enter it at the specified path. You can toggle between this new shell and the original shell with the `p` (for previous) and `n` (for next), allowing you to navigate around a ring buffer of shells. Once you're done with a shell, you can `exit` it and remove it from the ring buffer.
|
||||
To do so, use the `enter` command, which will allow you create a new "shell" and enter it at the specified path.
|
||||
You can toggle between this new shell and the original shell with the `p` (for previous) and `n` (for next), allowing you to navigate around a ring buffer of shells.
|
||||
Once you're done with a shell, you can `exit` it and remove it from the ring buffer.
|
||||
|
||||
Finally, to get a list of all the current shells, you can use the `shells` command.
|
||||
|
||||
## Plugins
|
||||
|
||||
Nu supports plugins that offer additional functionality to the shell and follow the same structured data model that built-in commands use. This allows you to extend nu for your needs.
|
||||
Nu supports plugins that offer additional functionality to the shell and follow the same structured data model that built-in commands use.
|
||||
This allows you to extend nu for your needs.
|
||||
|
||||
There are a few examples in the `plugins` directory.
|
||||
|
||||
Plugins are binaries that are available in your path and follow a `nu_plugin_*` naming convention. These binaries interact with nu via a simple JSON-RPC protocol where the command identifies itself and passes along its configuration, which then makes it available for use. If the plugin is a filter, data streams to it one element at a time, and it can stream data back in return via stdin/stdout. If the plugin is a sink, it is given the full vector of final data and is given free reign over stdin/stdout to use as it pleases.
|
||||
Plugins are binaries that are available in your path and follow a `nu_plugin_*` naming convention.
|
||||
These binaries interact with nu via a simple JSON-RPC protocol where the command identifies itself and passes along its configuration, which then makes it available for use.
|
||||
If the plugin is a filter, data streams to it one element at a time, and it can stream data back in return via stdin/stdout.
|
||||
If the plugin is a sink, it is given the full vector of final data and is given free reign over stdin/stdout to use as it pleases.
|
||||
|
||||
# Goals
|
||||
|
||||
@ -236,108 +273,10 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat
|
||||
* Finally, Nu views data functionally. Rather than using mutation, pipelines act as a means to load, change, and save data without mutable state.
|
||||
|
||||
# Commands
|
||||
## Initial commands
|
||||
| command | description |
|
||||
| ------------- | ------------- |
|
||||
| cd path | Change to a new path |
|
||||
| cp source path | Copy files |
|
||||
| date (--utc) | Get the current datetime |
|
||||
| fetch url | Fetch contents from a url and retrieve data as a table if possible |
|
||||
| help | Display help information about commands |
|
||||
| ls (path) | View the contents of the current or given path |
|
||||
| mkdir path | Make directories, creates intermediary directories as required. |
|
||||
| mv source target | Move files or directories. |
|
||||
| open filename | Load a file into a cell, convert to table if possible (avoid by appending '--raw') |
|
||||
| post url body (--user <user>) (--password <password>) | Post content to a url and retrieve data as a table if possible |
|
||||
| ps | View current processes |
|
||||
| sys | View information about the current system |
|
||||
| which filename | Finds a program file. |
|
||||
| rm {file or directory} | Remove a file, (for removing directory append '--recursive') |
|
||||
| version | Display Nu version |
|
||||
|
||||
## Shell commands
|
||||
| command | description |
|
||||
| ------- | ----------- |
|
||||
| exit (--now) | Exit the current shell (or all shells) |
|
||||
| enter (path) | Create a new shell and begin at this path |
|
||||
| p | Go to previous shell |
|
||||
| n | Go to next shell |
|
||||
| shells | Display the list of current shells |
|
||||
|
||||
## Filters on tables (structured data)
|
||||
| command | description |
|
||||
| ------------- | ------------- |
|
||||
| append row-data | Append a row to the end of the table |
|
||||
| compact ...columns | Remove rows where given columns are empty |
|
||||
| count | Show the total number of rows |
|
||||
| default column row-data | Sets a default row's column if missing |
|
||||
| edit column-or-column-path value | Edit an existing column to have a new value |
|
||||
| embed column | Creates a new table of one column with the given name, and places the current table inside of it |
|
||||
| first amount | Show only the first number of rows |
|
||||
| format pattern | Format table row data as a string following the given pattern |
|
||||
| get column-or-column-path | Open column and get data from the corresponding cells |
|
||||
| group-by column | Creates a new table with the data from the table rows grouped by the column given |
|
||||
| histogram column ...column-names | Creates a new table with a histogram based on the column name passed in, optionally give the frequency column name
|
||||
| inc (column-or-column-path) | Increment a value or version. Optionally use the column of a table |
|
||||
| insert column-or-column-path value | Insert a new column to the table |
|
||||
| last amount | Show only the last number of rows |
|
||||
| nth ...row-numbers | Return only the selected rows |
|
||||
| pick ...columns | Down-select table to only these columns |
|
||||
| pivot --header-row <headers> | Pivot the tables, making columns into rows and vice versa |
|
||||
| prepend row-data | Prepend a row to the beginning of the table |
|
||||
| reject ...columns | Remove the given columns from the table |
|
||||
| reverse | Reverses the table. |
|
||||
| skip amount | Skip a number of rows |
|
||||
| skip-while condition | Skips rows while the condition matches |
|
||||
| split-by column | Creates a new table with the data from the inner tables splitted by the column given |
|
||||
| sort-by ...columns | Sort by the given columns |
|
||||
| str (column) | Apply string function. Optionally use the column of a table |
|
||||
| sum | Sum a column of values |
|
||||
| tags | Read the tags (metadata) for values |
|
||||
| to-bson | Convert table into .bson binary data |
|
||||
| to-csv | Convert table into .csv text |
|
||||
| to-json | Convert table into .json text |
|
||||
| to-sqlite | Convert table to sqlite .db binary data |
|
||||
| to-toml | Convert table into .toml text |
|
||||
| to-tsv | Convert table into .tsv text |
|
||||
| to-url | Convert table to a urlencoded string |
|
||||
| to-yaml | Convert table into .yaml text |
|
||||
| where condition | Filter table to match the condition |
|
||||
|
||||
## Filters on text (unstructured data)
|
||||
| command | description |
|
||||
| ------------- | ------------- |
|
||||
| from-bson | Parse binary data as .bson and create table |
|
||||
| from-csv | Parse text as .csv and create table |
|
||||
| from-ini | Parse text as .ini and create table |
|
||||
| from-json | Parse text as .json and create table |
|
||||
| from-sqlite | Parse binary data as sqlite .db and create table |
|
||||
| from-ssv --minimum-spaces <minimum number of spaces to count as a separator> | Parse text as space-separated values and create table |
|
||||
| from-toml | Parse text as .toml and create table |
|
||||
| from-tsv | Parse text as .tsv and create table |
|
||||
| from-url | Parse urlencoded string and create a table |
|
||||
| from-xml | Parse text as .xml and create a table |
|
||||
| from-yaml | Parse text as a .yaml/.yml and create a table |
|
||||
| lines | Split single string into rows, one per line |
|
||||
| parse pattern | Convert text to a table by matching the given pattern |
|
||||
| size | Gather word count statistics on the text |
|
||||
| split-column sep ...column-names | Split row contents across multiple columns via the separator, optionally give the columns names |
|
||||
| split-row sep | Split row contents over multiple rows via the separator |
|
||||
| trim | Trim leading and following whitespace from text data |
|
||||
| {external-command} $it | Run external command with given arguments, replacing $it with each row text |
|
||||
|
||||
## Consuming commands
|
||||
| command | description |
|
||||
| ------------- | ------------- |
|
||||
| autoview | View the contents of the pipeline as a table or list |
|
||||
| binaryview | Autoview of binary data (optional feature) |
|
||||
| clip | Copy the contents of the pipeline to the copy/paste buffer (optional feature) |
|
||||
| save filename | Save the contents of the pipeline to a file |
|
||||
| table | View the contents of the pipeline as a table |
|
||||
| textview | Autoview of text data |
|
||||
| tree | View the contents of the pipeline as a tree (optional feature) |
|
||||
You can find a list of Nu commands, complete with documentation, in [quick command references](https://www.nushell.sh/documentation.html#quick-command-references).
|
||||
|
||||
# License
|
||||
|
||||
The project is made available under the MIT license. See "LICENSE" for more information.
|
||||
The project is made available under the MIT license. See the `LICENSE` file for more information.
|
||||
|
||||
|
8
TODO.md
8
TODO.md
@ -50,3 +50,11 @@ textview in own crate
|
||||
Combine atomic and atomic_parse in parser
|
||||
|
||||
at_end_possible_ws needs to be comment and separator sensitive
|
||||
|
||||
Eliminate unnecessary `nodes` parser
|
||||
|
||||
#[derive(HasSpan)]
|
||||
|
||||
Figure out a solution for the duplication in stuff like NumberShape vs. NumberExpressionShape
|
||||
|
||||
use `struct Expander` from signature.rs
|
@ -1,12 +1,13 @@
|
||||
[package]
|
||||
name = "nu-build"
|
||||
version = "0.7.0"
|
||||
version = "0.12.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||
edition = "2018"
|
||||
description = "Core build system for nushell"
|
||||
license = "MIT"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
serde = { version = "1.0.103", features = ["derive"] }
|
||||
|
@ -13,10 +13,10 @@ lazy_static! {
|
||||
|
||||
// got from https://github.com/mitsuhiko/insta/blob/b113499249584cb650150d2d01ed96ee66db6b30/src/runtime.rs#L67-L88
|
||||
|
||||
fn get_cargo_workspace(manifest_dir: &str) -> Option<&Path> {
|
||||
let mut workspaces = WORKSPACES.lock().unwrap();
|
||||
fn get_cargo_workspace(manifest_dir: &str) -> Result<Option<&Path>, Box<dyn std::error::Error>> {
|
||||
let mut workspaces = WORKSPACES.lock()?;
|
||||
if let Some(rv) = workspaces.get(manifest_dir) {
|
||||
Some(rv)
|
||||
Ok(Some(rv))
|
||||
} else {
|
||||
#[derive(Deserialize)]
|
||||
struct Manifest {
|
||||
@ -26,12 +26,11 @@ fn get_cargo_workspace(manifest_dir: &str) -> Option<&Path> {
|
||||
.arg("metadata")
|
||||
.arg("--format-version=1")
|
||||
.current_dir(manifest_dir)
|
||||
.output()
|
||||
.unwrap();
|
||||
let manifest: Manifest = serde_json::from_slice(&output.stdout).unwrap();
|
||||
.output()?;
|
||||
let manifest: Manifest = serde_json::from_slice(&output.stdout)?;
|
||||
let path = Box::leak(Box::new(PathBuf::from(manifest.workspace_root)));
|
||||
workspaces.insert(manifest_dir.to_string(), path.as_path());
|
||||
workspaces.get(manifest_dir).map(|w| *w)
|
||||
Ok(workspaces.get(manifest_dir).cloned())
|
||||
}
|
||||
}
|
||||
|
||||
@ -43,11 +42,11 @@ struct Feature {
|
||||
}
|
||||
|
||||
pub fn build() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let input = env::var("CARGO_MANIFEST_DIR").unwrap();
|
||||
let input = env::var("CARGO_MANIFEST_DIR")?;
|
||||
|
||||
let all_on = env::var("NUSHELL_ENABLE_ALL_FLAGS").is_ok();
|
||||
let flags: HashSet<String> = env::var("NUSHELL_ENABLE_FLAGS")
|
||||
.map(|s| s.split(",").map(|s| s.to_string()).collect())
|
||||
.map(|s| s.split(',').map(|s| s.to_string()).collect())
|
||||
.unwrap_or_else(|_| HashSet::new());
|
||||
|
||||
if all_on && !flags.is_empty() {
|
||||
@ -56,7 +55,7 @@ pub fn build() -> Result<(), Box<dyn std::error::Error>> {
|
||||
);
|
||||
}
|
||||
|
||||
let workspace = match get_cargo_workspace(&input) {
|
||||
let workspace = match get_cargo_workspace(&input)? {
|
||||
// If the crate is being downloaded from crates.io, it won't have a workspace root, and that's ok
|
||||
None => return Ok(()),
|
||||
Some(workspace) => workspace,
|
||||
@ -72,7 +71,7 @@ pub fn build() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let toml: HashMap<String, Feature> = toml::from_str(&std::fs::read_to_string(path)?)?;
|
||||
|
||||
for (key, value) in toml.iter() {
|
||||
if value.enabled == true || all_on || flags.contains(key) {
|
||||
if value.enabled || all_on || flags.contains(key) {
|
||||
println!("cargo:rustc-cfg={}", key);
|
||||
}
|
||||
}
|
||||
|
110
crates/nu-cli/Cargo.toml
Normal file
110
crates/nu-cli/Cargo.toml
Normal file
@ -0,0 +1,110 @@
|
||||
[package]
|
||||
name = "nu-cli"
|
||||
version = "0.12.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||
description = "CLI for nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-source = { version = "0.12.0", path = "../nu-source" }
|
||||
nu-plugin = { version = "0.12.0", path = "../nu-plugin" }
|
||||
nu-protocol = { version = "0.12.0", path = "../nu-protocol" }
|
||||
nu-errors = { version = "0.12.0", path = "../nu-errors" }
|
||||
nu-parser = { version = "0.12.0", path = "../nu-parser" }
|
||||
nu-value-ext = { version = "0.12.0", path = "../nu-value-ext" }
|
||||
nu-macros = { version = "0.12.0", path = "../nu-macros" }
|
||||
nu-test-support = { version = "0.12.0", path = "../nu-test-support" }
|
||||
|
||||
ansi_term = "0.12.1"
|
||||
app_dirs = "1.2.1"
|
||||
async-stream = "0.2"
|
||||
base64 = "0.12.0"
|
||||
bigdecimal = { version = "0.1.0", features = ["serde"] }
|
||||
bson = { version = "0.14.1", features = ["decimal128"] }
|
||||
byte-unit = "3.0.3"
|
||||
bytes = "0.5.4"
|
||||
calamine = "0.16"
|
||||
cfg-if = "0.1"
|
||||
chrono = { version = "0.4.11", features = ["serde"] }
|
||||
clap = "2.33.0"
|
||||
csv = "1.1"
|
||||
ctrlc = "3.1.4"
|
||||
derive-new = "0.5.8"
|
||||
dirs = "2.0.2"
|
||||
dunce = "1.0.0"
|
||||
filesize = "0.1.0"
|
||||
futures = { version = "0.3", features = ["compat", "io-compat"] }
|
||||
futures-util = "0.3.4"
|
||||
futures_codec = "0.4"
|
||||
getset = "0.1.0"
|
||||
git2 = { version = "0.13.0", default_features = false }
|
||||
glob = "0.3.0"
|
||||
hex = "0.4"
|
||||
htmlescape = "0.3.1"
|
||||
ical = "0.6.*"
|
||||
ichwh = "0.3"
|
||||
indexmap = { version = "1.3.2", features = ["serde-1"] }
|
||||
itertools = "0.9.0"
|
||||
language-reporting = "0.4.0"
|
||||
log = "0.4.8"
|
||||
meval = "0.2"
|
||||
natural = "0.5.0"
|
||||
nom = "5.0.1"
|
||||
nom-tracable = "0.4.1"
|
||||
nom_locate = "1.0.0"
|
||||
num-bigint = { version = "0.2.6", features = ["serde"] }
|
||||
num-traits = "0.2.11"
|
||||
parking_lot = "0.10.0"
|
||||
pin-utils = "0.1.0-alpha.4"
|
||||
pretty-hex = "0.1.1"
|
||||
pretty_env_logger = "0.4.0"
|
||||
prettytable-rs = "0.8.0"
|
||||
ptree = {version = "0.2" }
|
||||
query_interface = "0.3.5"
|
||||
rand = "0.7"
|
||||
regex = "1"
|
||||
roxmltree = "0.10.0"
|
||||
rustyline = "6.0.0"
|
||||
serde = { version = "1.0.105", features = ["derive"] }
|
||||
serde-hjson = "0.9.1"
|
||||
serde_bytes = "0.11.3"
|
||||
serde_ini = "0.2.0"
|
||||
serde_json = "1.0.48"
|
||||
serde_urlencoded = "0.6.1"
|
||||
serde_yaml = "0.8"
|
||||
shellexpand = "2.0.0"
|
||||
strip-ansi-escapes = "0.1.0"
|
||||
tempfile = "3.1.0"
|
||||
term = "0.5.2"
|
||||
termcolor = "1.1.0"
|
||||
textwrap = {version = "0.11.0", features = ["term_size"]}
|
||||
toml = "0.5.6"
|
||||
trash = "1.0.0"
|
||||
typetag = "0.1.4"
|
||||
umask = "0.1"
|
||||
unicode-xid = "0.2.0"
|
||||
|
||||
clipboard = { version = "0.5", optional = true }
|
||||
starship = { version = "0.38.0", optional = true }
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
users = "0.9"
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "0.21.0"
|
||||
features = ["bundled", "blob"]
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = "0.6.1"
|
||||
|
||||
[build-dependencies]
|
||||
nu-build = { version = "0.12.0", path = "../nu-build" }
|
||||
|
||||
[features]
|
||||
stable = []
|
||||
starship-prompt = ["starship"]
|
||||
clipboard-cli = ["clipboard"]
|
@ -1,23 +1,27 @@
|
||||
use crate::commands::classified::external::{MaybeTextCodec, StringOrBinary};
|
||||
use crate::commands::classified::pipeline::run_pipeline;
|
||||
use crate::commands::classified::ClassifiedInputStream;
|
||||
use crate::commands::plugin::JsonRpc;
|
||||
use crate::commands::plugin::{PluginCommand, PluginSink};
|
||||
use crate::commands::whole_stream_command;
|
||||
use crate::context::Context;
|
||||
use crate::data::config;
|
||||
#[cfg(not(feature = "starship-prompt"))]
|
||||
use crate::git::current_branch;
|
||||
use crate::prelude::*;
|
||||
use futures_codec::FramedRead;
|
||||
|
||||
use nu_errors::ShellError;
|
||||
use nu_parser::{
|
||||
expand_syntax, hir, ClassifiedCommand, ClassifiedPipeline, InternalCommand, PipelineShape,
|
||||
TokenNode, TokensIterator,
|
||||
ClassifiedCommand, ClassifiedPipeline, ExternalCommand, PipelineShape, SpannedToken,
|
||||
TokensIterator,
|
||||
};
|
||||
use nu_protocol::{Signature, UntaggedValue, Value};
|
||||
use nu_protocol::{Primitive, ReturnSuccess, Signature, UntaggedValue, Value};
|
||||
|
||||
use log::{debug, log_enabled, trace};
|
||||
use rustyline::error::ReadlineError;
|
||||
use rustyline::{self, config::Configurer, config::EditMode, ColorMode, Config, Editor};
|
||||
use rustyline::{
|
||||
self, config::Configurer, config::EditMode, At, Cmd, ColorMode, CompletionType, Config, Editor,
|
||||
KeyPress, Movement, Word,
|
||||
};
|
||||
use std::error::Error;
|
||||
use std::io::{BufRead, BufReader, Write};
|
||||
use std::iter::Iterator;
|
||||
@ -94,60 +98,41 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel
|
||||
}
|
||||
|
||||
fn search_paths() -> Vec<std::path::PathBuf> {
|
||||
use std::env;
|
||||
|
||||
let mut search_paths = Vec::new();
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
{
|
||||
// Use our debug plugins in debug mode
|
||||
let mut path = std::path::PathBuf::from(".");
|
||||
path.push("target");
|
||||
path.push("debug");
|
||||
|
||||
if path.exists() {
|
||||
search_paths.push(path);
|
||||
// Automatically add path `nu` is in as a search path
|
||||
if let Ok(exe_path) = env::current_exe() {
|
||||
if let Some(exe_dir) = exe_path.parent() {
|
||||
search_paths.push(exe_dir.to_path_buf());
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(debug_assertions))]
|
||||
{
|
||||
use std::env;
|
||||
|
||||
match env::var_os("PATH") {
|
||||
Some(paths) => {
|
||||
search_paths = env::split_paths(&paths).collect::<Vec<_>>();
|
||||
search_paths.extend(env::split_paths(&paths).collect::<Vec<_>>());
|
||||
}
|
||||
None => println!("PATH is not defined in the environment."),
|
||||
}
|
||||
|
||||
// Use our release plugins in release mode
|
||||
let mut path = std::path::PathBuf::from(".");
|
||||
path.push("target");
|
||||
path.push("release");
|
||||
|
||||
if path.exists() {
|
||||
search_paths.push(path);
|
||||
}
|
||||
}
|
||||
|
||||
// permit Nu finding and picking up development plugins
|
||||
// if there are any first.
|
||||
search_paths.reverse();
|
||||
search_paths
|
||||
}
|
||||
|
||||
fn load_plugins(context: &mut Context) -> Result<(), ShellError> {
|
||||
pub fn load_plugins(context: &mut Context) -> Result<(), ShellError> {
|
||||
let opts = glob::MatchOptions {
|
||||
case_sensitive: false,
|
||||
require_literal_separator: false,
|
||||
require_literal_leading_dot: false,
|
||||
};
|
||||
|
||||
set_env_from_config();
|
||||
|
||||
for path in search_paths() {
|
||||
let mut pattern = path.to_path_buf();
|
||||
|
||||
pattern.push(std::path::Path::new("nu_plugin_[a-z]*"));
|
||||
pattern.push(std::path::Path::new("nu_plugin_[a-z0-9][a-z0-9]*"));
|
||||
|
||||
match glob::glob_with(&pattern.to_string_lossy(), opts) {
|
||||
Err(_) => {}
|
||||
@ -173,14 +158,14 @@ fn load_plugins(context: &mut Context) -> Result<(), ShellError> {
|
||||
{
|
||||
bin_name
|
||||
.chars()
|
||||
.all(|c| c.is_ascii_alphabetic() || c == '_' || c == '.')
|
||||
.all(|c| c.is_ascii_alphanumeric() || c == '_' || c == '.')
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
bin_name
|
||||
.chars()
|
||||
.all(|c| c.is_ascii_alphabetic() || c == '_')
|
||||
.all(|c| c.is_ascii_alphanumeric() || c == '_')
|
||||
}
|
||||
};
|
||||
|
||||
@ -224,29 +209,55 @@ impl History {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
#[allow(dead_code)]
|
||||
fn create_default_starship_config() -> Option<toml::Value> {
|
||||
let mut map = toml::value::Table::new();
|
||||
map.insert("add_newline".into(), toml::Value::Boolean(false));
|
||||
|
||||
let mut git_branch = toml::value::Table::new();
|
||||
git_branch.insert("symbol".into(), toml::Value::String("📙 ".into()));
|
||||
map.insert("git_branch".into(), toml::Value::Table(git_branch));
|
||||
|
||||
let mut git_status = toml::value::Table::new();
|
||||
git_status.insert("disabled".into(), toml::Value::Boolean(true));
|
||||
map.insert("git_status".into(), toml::Value::Table(git_status));
|
||||
|
||||
Some(toml::Value::Table(map))
|
||||
}
|
||||
|
||||
pub fn create_default_context(
|
||||
syncer: &mut crate::env::environment_syncer::EnvironmentSyncer,
|
||||
) -> Result<Context, Box<dyn Error>> {
|
||||
syncer.load_environment();
|
||||
|
||||
let mut context = Context::basic()?;
|
||||
syncer.sync_env_vars(&mut context);
|
||||
syncer.sync_path_vars(&mut context);
|
||||
|
||||
{
|
||||
use crate::commands::*;
|
||||
|
||||
context.add_commands(vec![
|
||||
// System/file operations
|
||||
whole_stream_command(PWD),
|
||||
whole_stream_command(LS),
|
||||
whole_stream_command(CD),
|
||||
whole_stream_command(Env),
|
||||
whole_stream_command(Pwd),
|
||||
per_item_command(Ls),
|
||||
per_item_command(Du),
|
||||
whole_stream_command(Cd),
|
||||
per_item_command(Remove),
|
||||
per_item_command(Open),
|
||||
whole_stream_command(Config),
|
||||
per_item_command(Help),
|
||||
per_item_command(History),
|
||||
whole_stream_command(Save),
|
||||
per_item_command(Touch),
|
||||
per_item_command(Cpy),
|
||||
whole_stream_command(Date),
|
||||
per_item_command(Calc),
|
||||
per_item_command(Mkdir),
|
||||
per_item_command(Move),
|
||||
per_item_command(Kill),
|
||||
whole_stream_command(Version),
|
||||
whole_stream_command(Clear),
|
||||
whole_stream_command(What),
|
||||
whole_stream_command(Which),
|
||||
whole_stream_command(Debug),
|
||||
@ -294,17 +305,24 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
whole_stream_command(Default),
|
||||
whole_stream_command(SkipWhile),
|
||||
whole_stream_command(Range),
|
||||
whole_stream_command(Rename),
|
||||
whole_stream_command(Uniq),
|
||||
// Table manipulation
|
||||
whole_stream_command(Shuffle),
|
||||
whole_stream_command(Wrap),
|
||||
whole_stream_command(Pivot),
|
||||
whole_stream_command(Headers),
|
||||
// Data processing
|
||||
whole_stream_command(Histogram),
|
||||
whole_stream_command(Sum),
|
||||
// File format output
|
||||
whole_stream_command(ToBSON),
|
||||
whole_stream_command(ToCSV),
|
||||
whole_stream_command(ToHTML),
|
||||
whole_stream_command(ToJSON),
|
||||
whole_stream_command(ToSQLite),
|
||||
whole_stream_command(ToDB),
|
||||
whole_stream_command(ToMarkdown),
|
||||
whole_stream_command(ToTOML),
|
||||
whole_stream_command(ToTSV),
|
||||
whole_stream_command(ToURL),
|
||||
@ -316,6 +334,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
whole_stream_command(FromINI),
|
||||
whole_stream_command(FromBSON),
|
||||
whole_stream_command(FromJSON),
|
||||
whole_stream_command(FromODS),
|
||||
whole_stream_command(FromDB),
|
||||
whole_stream_command(FromSQLite),
|
||||
whole_stream_command(FromTOML),
|
||||
@ -324,6 +343,8 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
whole_stream_command(FromXML),
|
||||
whole_stream_command(FromYAML),
|
||||
whole_stream_command(FromYML),
|
||||
whole_stream_command(FromIcs),
|
||||
whole_stream_command(FromVcf),
|
||||
]);
|
||||
|
||||
cfg_if::cfg_if! {
|
||||
@ -345,11 +366,70 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
}
|
||||
}
|
||||
|
||||
Ok(context)
|
||||
}
|
||||
|
||||
pub async fn run_pipeline_standalone(
|
||||
pipeline: String,
|
||||
redirect_stdin: bool,
|
||||
context: &mut Context,
|
||||
) -> Result<(), Box<dyn Error>> {
|
||||
let line = process_line(Ok(pipeline), context, redirect_stdin, false).await;
|
||||
|
||||
match line {
|
||||
LineResult::Success(line) => {
|
||||
let error_code = {
|
||||
let errors = context.current_errors.clone();
|
||||
let errors = errors.lock();
|
||||
|
||||
if errors.len() > 0 {
|
||||
1
|
||||
} else {
|
||||
0
|
||||
}
|
||||
};
|
||||
|
||||
context.maybe_print_errors(Text::from(line));
|
||||
if error_code != 0 {
|
||||
std::process::exit(error_code);
|
||||
}
|
||||
}
|
||||
|
||||
LineResult::Error(line, err) => {
|
||||
context.with_host(|host| {
|
||||
print_err(err, host, &Text::from(line.clone()));
|
||||
});
|
||||
|
||||
context.maybe_print_errors(Text::from(line));
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// The entry point for the CLI. Will register all known internal commands, load experimental commands, load plugins, then prepare the prompt and line reader for input.
|
||||
pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
let mut syncer = crate::env::environment_syncer::EnvironmentSyncer::new();
|
||||
let mut context = create_default_context(&mut syncer)?;
|
||||
|
||||
let _ = load_plugins(&mut context);
|
||||
|
||||
let config = Config::builder().color_mode(ColorMode::Forced).build();
|
||||
let mut rl: Editor<_> = Editor::with_config(config);
|
||||
|
||||
// add key bindings to move over a whole word with Ctrl+ArrowLeft and Ctrl+ArrowRight
|
||||
rl.bind_sequence(
|
||||
KeyPress::ControlLeft,
|
||||
Cmd::Move(Movement::BackwardWord(1, Word::Vi)),
|
||||
);
|
||||
rl.bind_sequence(
|
||||
KeyPress::ControlRight,
|
||||
Cmd::Move(Movement::ForwardWord(1, At::AfterEnd, Word::Vi)),
|
||||
);
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
let _ = ansi_term::enable_ansi_support();
|
||||
@ -385,14 +465,34 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
|
||||
rl.set_edit_mode(edit_mode);
|
||||
|
||||
let completion_mode = config::config(Tag::unknown())?
|
||||
.get("completion_mode")
|
||||
.map(|s| match s.value.expect_string() {
|
||||
"list" => CompletionType::List,
|
||||
"circular" => CompletionType::Circular,
|
||||
_ => CompletionType::Circular,
|
||||
})
|
||||
.unwrap_or(CompletionType::Circular);
|
||||
|
||||
rl.set_completion_type(completion_mode);
|
||||
|
||||
let colored_prompt = {
|
||||
#[cfg(feature = "starship-prompt")]
|
||||
{
|
||||
std::env::set_var("STARSHIP_SHELL", "");
|
||||
starship::print::get_prompt(starship::context::Context::new_with_dir(
|
||||
clap::ArgMatches::default(),
|
||||
cwd,
|
||||
))
|
||||
let mut starship_context =
|
||||
starship::context::Context::new_with_dir(clap::ArgMatches::default(), cwd);
|
||||
|
||||
match starship_context.config.config {
|
||||
None => {
|
||||
starship_context.config.config = create_default_starship_config();
|
||||
}
|
||||
Some(toml::Value::Table(t)) if t.is_empty() => {
|
||||
starship_context.config.config = create_default_starship_config();
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
starship::print::get_prompt(starship_context)
|
||||
}
|
||||
#[cfg(not(feature = "starship-prompt"))]
|
||||
{
|
||||
@ -408,9 +508,11 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
};
|
||||
|
||||
let prompt = {
|
||||
let bytes = strip_ansi_escapes::strip(&colored_prompt).unwrap();
|
||||
|
||||
String::from_utf8_lossy(&bytes).to_string()
|
||||
if let Ok(bytes) = strip_ansi_escapes::strip(&colored_prompt) {
|
||||
String::from_utf8_lossy(&bytes).to_string()
|
||||
} else {
|
||||
"> ".to_string()
|
||||
}
|
||||
};
|
||||
|
||||
rl.helper_mut().expect("No helper").colored_prompt = colored_prompt;
|
||||
@ -421,7 +523,14 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
initial_command = None;
|
||||
}
|
||||
|
||||
let line = process_line(readline, &mut context).await;
|
||||
let line = process_line(readline, &mut context, false, true).await;
|
||||
|
||||
// Check the config to see if we need to update the path
|
||||
// TODO: make sure config is cached so we don't path this load every call
|
||||
// FIXME: we probably want to be a bit more graceful if we can't set the environment
|
||||
syncer.reload();
|
||||
syncer.sync_env_vars(&mut context);
|
||||
syncer.sync_path_vars(&mut context);
|
||||
|
||||
match line {
|
||||
LineResult::Success(line) => {
|
||||
@ -485,57 +594,6 @@ fn chomp_newline(s: &str) -> &str {
|
||||
}
|
||||
}
|
||||
|
||||
fn set_env_from_config() {
|
||||
let config = crate::data::config::read(Tag::unknown(), &None).unwrap();
|
||||
|
||||
if config.contains_key("env") {
|
||||
// Clear the existing vars, we're about to replace them
|
||||
for (key, _value) in std::env::vars() {
|
||||
std::env::remove_var(key);
|
||||
}
|
||||
|
||||
let value = config.get("env");
|
||||
|
||||
if let Some(Value {
|
||||
value: UntaggedValue::Row(r),
|
||||
..
|
||||
}) = value
|
||||
{
|
||||
for (k, v) in &r.entries {
|
||||
if let Ok(value_string) = v.as_string() {
|
||||
std::env::set_var(k, value_string);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if config.contains_key("path") {
|
||||
// Override the path with what they give us from config
|
||||
let value = config.get("path");
|
||||
|
||||
if let Some(Value {
|
||||
value: UntaggedValue::Table(table),
|
||||
..
|
||||
}) = value
|
||||
{
|
||||
let mut paths = vec![];
|
||||
|
||||
for val in table {
|
||||
let path_str = val.as_string();
|
||||
|
||||
if let Ok(path_str) = path_str {
|
||||
paths.push(PathBuf::from(path_str));
|
||||
}
|
||||
}
|
||||
|
||||
let path_os_string = std::env::join_paths(&paths);
|
||||
if let Ok(path_os_string) = path_os_string {
|
||||
std::env::set_var("PATH", path_os_string);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum LineResult {
|
||||
Success(String),
|
||||
Error(String, ShellError),
|
||||
@ -543,7 +601,13 @@ enum LineResult {
|
||||
Break,
|
||||
}
|
||||
|
||||
async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context) -> LineResult {
|
||||
/// Process the line by parsing the text to turn it into commands, classify those commands so that we understand what is being called in the pipeline, and then run this pipeline
|
||||
async fn process_line(
|
||||
readline: Result<String, ReadlineError>,
|
||||
ctx: &mut Context,
|
||||
redirect_stdin: bool,
|
||||
cli_mode: bool,
|
||||
) -> LineResult {
|
||||
match &readline {
|
||||
Ok(line) if line.trim() == "" => LineResult::Success(line.clone()),
|
||||
|
||||
@ -561,36 +625,131 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
debug!("=== Parsed ===");
|
||||
debug!("{:#?}", result);
|
||||
|
||||
let mut pipeline = match classify_pipeline(&result, ctx, &Text::from(line)) {
|
||||
Ok(pipeline) => pipeline,
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
};
|
||||
let pipeline = classify_pipeline(&result, &ctx, &Text::from(line));
|
||||
|
||||
match pipeline.commands.list.last() {
|
||||
Some(ClassifiedCommand::External(_)) => {}
|
||||
_ => pipeline
|
||||
.commands
|
||||
.list
|
||||
.push(ClassifiedCommand::Internal(InternalCommand {
|
||||
name: "autoview".to_string(),
|
||||
name_tag: Tag::unknown(),
|
||||
args: hir::Call::new(
|
||||
Box::new(hir::Expression::synthetic_string("autoview")),
|
||||
None,
|
||||
None,
|
||||
Span::unknown(),
|
||||
),
|
||||
})),
|
||||
if let Some(failure) = pipeline.failed {
|
||||
return LineResult::Error(line.to_string(), failure.into());
|
||||
}
|
||||
|
||||
// Check the config to see if we need to update the path
|
||||
// TODO: make sure config is cached so we don't path this load every call
|
||||
set_env_from_config();
|
||||
// There's a special case to check before we process the pipeline:
|
||||
// If we're giving a path by itself
|
||||
// ...and it's not a command in the path
|
||||
// ...and it doesn't have any arguments
|
||||
// ...and we're in the CLI
|
||||
// ...then change to this directory
|
||||
if cli_mode && pipeline.commands.list.len() == 1 {
|
||||
if let ClassifiedCommand::External(ExternalCommand {
|
||||
ref name, ref args, ..
|
||||
}) = pipeline.commands.list[0]
|
||||
{
|
||||
if dunce::canonicalize(name).is_ok()
|
||||
&& PathBuf::from(name).is_dir()
|
||||
&& ichwh::which(name).await.unwrap_or(None).is_none()
|
||||
&& args.list.is_empty()
|
||||
{
|
||||
// Here we work differently if we're in Windows because of the expected Windows behavior
|
||||
#[cfg(windows)]
|
||||
{
|
||||
if name.ends_with(':') {
|
||||
// This looks like a drive shortcut. We need to a) switch drives and b) go back to the previous directory we were viewing on that drive
|
||||
// But first, we need to save where we are now
|
||||
let current_path = ctx.shell_manager.path();
|
||||
|
||||
let input = ClassifiedInputStream::new();
|
||||
let split_path: Vec<_> = current_path.split(':').collect();
|
||||
if split_path.len() > 1 {
|
||||
ctx.windows_drives_previous_cwd
|
||||
.lock()
|
||||
.insert(split_path[0].to_string(), current_path);
|
||||
}
|
||||
|
||||
match run_pipeline(pipeline, ctx, input, line).await {
|
||||
Ok(_) => LineResult::Success(line.to_string()),
|
||||
let name = name.to_uppercase();
|
||||
let new_drive: Vec<_> = name.split(':').collect();
|
||||
|
||||
if let Some(val) =
|
||||
ctx.windows_drives_previous_cwd.lock().get(new_drive[0])
|
||||
{
|
||||
ctx.shell_manager.set_path(val.to_string());
|
||||
return LineResult::Success(line.to_string());
|
||||
} else {
|
||||
ctx.shell_manager.set_path(name.to_string());
|
||||
return LineResult::Success(line.to_string());
|
||||
}
|
||||
} else {
|
||||
ctx.shell_manager.set_path(name.to_string());
|
||||
return LineResult::Success(line.to_string());
|
||||
}
|
||||
}
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
ctx.shell_manager.set_path(name.to_string());
|
||||
return LineResult::Success(line.to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let input_stream = if redirect_stdin {
|
||||
let file = futures::io::AllowStdIo::new(std::io::stdin());
|
||||
let stream = FramedRead::new(file, MaybeTextCodec).map(|line| {
|
||||
if let Ok(line) = line {
|
||||
match line {
|
||||
StringOrBinary::String(s) => Ok(Value {
|
||||
value: UntaggedValue::Primitive(Primitive::String(s)),
|
||||
tag: Tag::unknown(),
|
||||
}),
|
||||
StringOrBinary::Binary(b) => Ok(Value {
|
||||
value: UntaggedValue::Primitive(Primitive::Binary(
|
||||
b.into_iter().collect(),
|
||||
)),
|
||||
tag: Tag::unknown(),
|
||||
}),
|
||||
}
|
||||
} else {
|
||||
panic!("Internal error: could not read lines of text from stdin")
|
||||
}
|
||||
});
|
||||
Some(stream.to_input_stream())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
match run_pipeline(pipeline, ctx, input_stream, line).await {
|
||||
Ok(Some(input)) => {
|
||||
// Running a pipeline gives us back a stream that we can then
|
||||
// work through. At the top level, we just want to pull on the
|
||||
// values to compute them.
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
let context = RunnableContext {
|
||||
input,
|
||||
shell_manager: ctx.shell_manager.clone(),
|
||||
host: ctx.host.clone(),
|
||||
ctrl_c: ctx.ctrl_c.clone(),
|
||||
commands: ctx.registry.clone(),
|
||||
name: Tag::unknown(),
|
||||
source: Text::from(String::new()),
|
||||
};
|
||||
|
||||
if let Ok(mut output_stream) = crate::commands::autoview::autoview(context) {
|
||||
loop {
|
||||
match output_stream.try_next().await {
|
||||
Ok(Some(ReturnSuccess::Value(Value {
|
||||
value: UntaggedValue::Error(e),
|
||||
..
|
||||
}))) => return LineResult::Error(line.to_string(), e),
|
||||
Ok(Some(_item)) => {
|
||||
if ctx.ctrl_c.load(Ordering::SeqCst) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Ok(None) => break,
|
||||
Err(e) => return LineResult::Error(line.to_string(), e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
LineResult::Success(line.to_string())
|
||||
}
|
||||
Ok(None) => LineResult::Success(line.to_string()),
|
||||
Err(err) => LineResult::Error(line.to_string(), err),
|
||||
}
|
||||
}
|
||||
@ -604,23 +763,19 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
}
|
||||
|
||||
pub fn classify_pipeline(
|
||||
pipeline: &TokenNode,
|
||||
pipeline: &SpannedToken,
|
||||
context: &Context,
|
||||
source: &Text,
|
||||
) -> Result<ClassifiedPipeline, ShellError> {
|
||||
) -> ClassifiedPipeline {
|
||||
let pipeline_list = vec![pipeline.clone()];
|
||||
let mut iterator = TokensIterator::all(&pipeline_list, source.clone(), pipeline.span());
|
||||
let expand_context = context.expand_context(source);
|
||||
let mut iterator = TokensIterator::new(&pipeline_list, expand_context, pipeline.span());
|
||||
|
||||
let result = expand_syntax(
|
||||
&PipelineShape,
|
||||
&mut iterator,
|
||||
&context.expand_context(source),
|
||||
)
|
||||
.map_err(|err| err.into());
|
||||
let result = iterator.expand_infallible(PipelineShape);
|
||||
|
||||
if log_enabled!(target: "nu::expand_syntax", log::Level::Debug) {
|
||||
outln!("");
|
||||
ptree::print_tree(&iterator.expand_tracer().print(source.clone())).unwrap();
|
||||
let _ = ptree::print_tree(&iterator.expand_tracer().print(source.clone()));
|
||||
outln!("");
|
||||
}
|
||||
|
||||
@ -628,7 +783,7 @@ pub fn classify_pipeline(
|
||||
}
|
||||
|
||||
pub fn print_err(err: ShellError, host: &dyn Host, source: &Text) {
|
||||
let diag = err.to_diagnostic();
|
||||
let diag = err.into_diagnostic();
|
||||
|
||||
let writer = host.err_termcolor();
|
||||
let mut source = source.to_string();
|
@ -7,6 +7,7 @@ mod to_delimited_data;
|
||||
pub(crate) mod append;
|
||||
pub(crate) mod args;
|
||||
pub(crate) mod autoview;
|
||||
pub(crate) mod calc;
|
||||
pub(crate) mod cd;
|
||||
pub(crate) mod classified;
|
||||
pub(crate) mod clip;
|
||||
@ -18,10 +19,10 @@ pub(crate) mod cp;
|
||||
pub(crate) mod date;
|
||||
pub(crate) mod debug;
|
||||
pub(crate) mod default;
|
||||
pub(crate) mod du;
|
||||
pub(crate) mod echo;
|
||||
pub(crate) mod edit;
|
||||
pub(crate) mod enter;
|
||||
pub(crate) mod env;
|
||||
#[allow(unused)]
|
||||
pub(crate) mod evaluate_by;
|
||||
pub(crate) mod exit;
|
||||
@ -29,18 +30,22 @@ pub(crate) mod first;
|
||||
pub(crate) mod format;
|
||||
pub(crate) mod from_bson;
|
||||
pub(crate) mod from_csv;
|
||||
pub(crate) mod from_ics;
|
||||
pub(crate) mod from_ini;
|
||||
pub(crate) mod from_json;
|
||||
pub(crate) mod from_ods;
|
||||
pub(crate) mod from_sqlite;
|
||||
pub(crate) mod from_ssv;
|
||||
pub(crate) mod from_toml;
|
||||
pub(crate) mod from_tsv;
|
||||
pub(crate) mod from_url;
|
||||
pub(crate) mod from_vcf;
|
||||
pub(crate) mod from_xlsx;
|
||||
pub(crate) mod from_xml;
|
||||
pub(crate) mod from_yaml;
|
||||
pub(crate) mod get;
|
||||
pub(crate) mod group_by;
|
||||
pub(crate) mod headers;
|
||||
pub(crate) mod help;
|
||||
pub(crate) mod histogram;
|
||||
pub(crate) mod history;
|
||||
@ -66,10 +71,12 @@ pub(crate) mod range;
|
||||
#[allow(unused)]
|
||||
pub(crate) mod reduce_by;
|
||||
pub(crate) mod reject;
|
||||
pub(crate) mod rename;
|
||||
pub(crate) mod reverse;
|
||||
pub(crate) mod rm;
|
||||
pub(crate) mod save;
|
||||
pub(crate) mod shells;
|
||||
pub(crate) mod shuffle;
|
||||
pub(crate) mod size;
|
||||
pub(crate) mod skip;
|
||||
pub(crate) mod skip_while;
|
||||
@ -77,19 +84,23 @@ pub(crate) mod sort_by;
|
||||
pub(crate) mod split_by;
|
||||
pub(crate) mod split_column;
|
||||
pub(crate) mod split_row;
|
||||
pub(crate) mod sum;
|
||||
#[allow(unused)]
|
||||
pub(crate) mod t_sort_by;
|
||||
pub(crate) mod table;
|
||||
pub(crate) mod tags;
|
||||
pub(crate) mod to_bson;
|
||||
pub(crate) mod to_csv;
|
||||
pub(crate) mod to_html;
|
||||
pub(crate) mod to_json;
|
||||
pub(crate) mod to_md;
|
||||
pub(crate) mod to_sqlite;
|
||||
pub(crate) mod to_toml;
|
||||
pub(crate) mod to_tsv;
|
||||
pub(crate) mod to_url;
|
||||
pub(crate) mod to_yaml;
|
||||
pub(crate) mod trim;
|
||||
pub(crate) mod uniq;
|
||||
pub(crate) mod version;
|
||||
pub(crate) mod what;
|
||||
pub(crate) mod where_;
|
||||
@ -97,13 +108,14 @@ pub(crate) mod which_;
|
||||
pub(crate) mod wrap;
|
||||
|
||||
pub(crate) use autoview::Autoview;
|
||||
pub(crate) use cd::CD;
|
||||
pub(crate) use cd::Cd;
|
||||
pub(crate) use command::{
|
||||
per_item_command, whole_stream_command, Command, PerItemCommand, RawCommandArgs,
|
||||
UnevaluatedCallInfo, WholeStreamCommand,
|
||||
per_item_command, whole_stream_command, Command, PerItemCommand, UnevaluatedCallInfo,
|
||||
WholeStreamCommand,
|
||||
};
|
||||
|
||||
pub(crate) use append::Append;
|
||||
pub(crate) use calc::Calc;
|
||||
pub(crate) use compact::Compact;
|
||||
pub(crate) use config::Config;
|
||||
pub(crate) use count::Count;
|
||||
@ -111,39 +123,48 @@ pub(crate) use cp::Cpy;
|
||||
pub(crate) use date::Date;
|
||||
pub(crate) use debug::Debug;
|
||||
pub(crate) use default::Default;
|
||||
pub(crate) use du::Du;
|
||||
pub(crate) use echo::Echo;
|
||||
pub(crate) use edit::Edit;
|
||||
pub(crate) mod kill;
|
||||
pub(crate) use kill::Kill;
|
||||
pub(crate) mod clear;
|
||||
pub(crate) use clear::Clear;
|
||||
pub(crate) mod touch;
|
||||
pub(crate) use enter::Enter;
|
||||
pub(crate) use env::Env;
|
||||
#[allow(unused)]
|
||||
#[allow(unused_imports)]
|
||||
pub(crate) use evaluate_by::EvaluateBy;
|
||||
pub(crate) use exit::Exit;
|
||||
pub(crate) use first::First;
|
||||
pub(crate) use format::Format;
|
||||
pub(crate) use from_bson::FromBSON;
|
||||
pub(crate) use from_csv::FromCSV;
|
||||
pub(crate) use from_ics::FromIcs;
|
||||
pub(crate) use from_ini::FromINI;
|
||||
pub(crate) use from_json::FromJSON;
|
||||
pub(crate) use from_ods::FromODS;
|
||||
pub(crate) use from_sqlite::FromDB;
|
||||
pub(crate) use from_sqlite::FromSQLite;
|
||||
pub(crate) use from_ssv::FromSSV;
|
||||
pub(crate) use from_toml::FromTOML;
|
||||
pub(crate) use from_tsv::FromTSV;
|
||||
pub(crate) use from_url::FromURL;
|
||||
pub(crate) use from_vcf::FromVcf;
|
||||
pub(crate) use from_xlsx::FromXLSX;
|
||||
pub(crate) use from_xml::FromXML;
|
||||
pub(crate) use from_yaml::FromYAML;
|
||||
pub(crate) use from_yaml::FromYML;
|
||||
pub(crate) use get::Get;
|
||||
pub(crate) use group_by::GroupBy;
|
||||
pub(crate) use headers::Headers;
|
||||
pub(crate) use help::Help;
|
||||
pub(crate) use histogram::Histogram;
|
||||
pub(crate) use history::History;
|
||||
pub(crate) use insert::Insert;
|
||||
pub(crate) use last::Last;
|
||||
pub(crate) use lines::Lines;
|
||||
pub(crate) use ls::LS;
|
||||
#[allow(unused)]
|
||||
pub(crate) use ls::Ls;
|
||||
#[allow(unused_imports)]
|
||||
pub(crate) use map_max_by::MapMaxBy;
|
||||
pub(crate) use mkdir::Mkdir;
|
||||
pub(crate) use mv::Move;
|
||||
@ -155,15 +176,17 @@ pub(crate) use pick::Pick;
|
||||
pub(crate) use pivot::Pivot;
|
||||
pub(crate) use prepend::Prepend;
|
||||
pub(crate) use prev::Previous;
|
||||
pub(crate) use pwd::PWD;
|
||||
pub(crate) use pwd::Pwd;
|
||||
pub(crate) use range::Range;
|
||||
#[allow(unused)]
|
||||
#[allow(unused_imports)]
|
||||
pub(crate) use reduce_by::ReduceBy;
|
||||
pub(crate) use reject::Reject;
|
||||
pub(crate) use rename::Rename;
|
||||
pub(crate) use reverse::Reverse;
|
||||
pub(crate) use rm::Remove;
|
||||
pub(crate) use save::Save;
|
||||
pub(crate) use shells::Shells;
|
||||
pub(crate) use shuffle::Shuffle;
|
||||
pub(crate) use size::Size;
|
||||
pub(crate) use skip::Skip;
|
||||
pub(crate) use skip_while::SkipWhile;
|
||||
@ -171,20 +194,25 @@ pub(crate) use sort_by::SortBy;
|
||||
pub(crate) use split_by::SplitBy;
|
||||
pub(crate) use split_column::SplitColumn;
|
||||
pub(crate) use split_row::SplitRow;
|
||||
#[allow(unused)]
|
||||
pub(crate) use sum::Sum;
|
||||
#[allow(unused_imports)]
|
||||
pub(crate) use t_sort_by::TSortBy;
|
||||
pub(crate) use table::Table;
|
||||
pub(crate) use tags::Tags;
|
||||
pub(crate) use to_bson::ToBSON;
|
||||
pub(crate) use to_csv::ToCSV;
|
||||
pub(crate) use to_html::ToHTML;
|
||||
pub(crate) use to_json::ToJSON;
|
||||
pub(crate) use to_md::ToMarkdown;
|
||||
pub(crate) use to_sqlite::ToDB;
|
||||
pub(crate) use to_sqlite::ToSQLite;
|
||||
pub(crate) use to_toml::ToTOML;
|
||||
pub(crate) use to_tsv::ToTSV;
|
||||
pub(crate) use to_url::ToURL;
|
||||
pub(crate) use to_yaml::ToYAML;
|
||||
pub(crate) use touch::Touch;
|
||||
pub(crate) use trim::Trim;
|
||||
pub(crate) use uniq::Uniq;
|
||||
pub(crate) use version::Version;
|
||||
pub(crate) use what::What;
|
||||
pub(crate) use where_::Where;
|
@ -43,6 +43,7 @@ fn append(
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let mut after: VecDeque<Value> = VecDeque::new();
|
||||
after.push_back(row);
|
||||
let after = futures::stream::iter(after);
|
||||
|
||||
Ok(OutputStream::from_input(input.values.chain(after)))
|
||||
}
|
296
crates/nu-cli/src/commands/autoview.rs
Normal file
296
crates/nu-cli/src/commands/autoview.rs
Normal file
@ -0,0 +1,296 @@
|
||||
use crate::commands::UnevaluatedCallInfo;
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_parser::{hir, hir::Expression, hir::Literal, hir::SpannedExpression};
|
||||
use nu_protocol::{Primitive, ReturnSuccess, Signature, UntaggedValue, Value};
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
pub struct Autoview;
|
||||
|
||||
impl WholeStreamCommand for Autoview {
|
||||
fn name(&self) -> &str {
|
||||
"autoview"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("autoview")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"View the contents of the pipeline as a table or list."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
autoview(RunnableContext {
|
||||
input: args.input,
|
||||
commands: registry.clone(),
|
||||
shell_manager: args.shell_manager,
|
||||
host: args.host,
|
||||
source: args.call_info.source,
|
||||
ctrl_c: args.ctrl_c,
|
||||
name: args.call_info.name_tag,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct RunnableContextWithoutInput {
|
||||
pub shell_manager: ShellManager,
|
||||
pub host: Arc<parking_lot::Mutex<Box<dyn Host>>>,
|
||||
pub source: Text,
|
||||
pub ctrl_c: Arc<AtomicBool>,
|
||||
pub commands: CommandRegistry,
|
||||
pub name: Tag,
|
||||
}
|
||||
|
||||
impl RunnableContextWithoutInput {
|
||||
pub fn convert(context: RunnableContext) -> (InputStream, RunnableContextWithoutInput) {
|
||||
let new_context = RunnableContextWithoutInput {
|
||||
shell_manager: context.shell_manager,
|
||||
host: context.host,
|
||||
source: context.source,
|
||||
ctrl_c: context.ctrl_c,
|
||||
commands: context.commands,
|
||||
name: context.name,
|
||||
};
|
||||
(context.input, new_context)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn autoview(context: RunnableContext) -> Result<OutputStream, ShellError> {
|
||||
let binary = context.get_command("binaryview");
|
||||
let text = context.get_command("textview");
|
||||
let table = context.get_command("table");
|
||||
|
||||
Ok(OutputStream::new(async_stream! {
|
||||
let (mut input_stream, context) = RunnableContextWithoutInput::convert(context);
|
||||
|
||||
match input_stream.next().await {
|
||||
Some(x) => {
|
||||
match input_stream.next().await {
|
||||
Some(y) => {
|
||||
let ctrl_c = context.ctrl_c.clone();
|
||||
let stream = async_stream! {
|
||||
yield Ok(x);
|
||||
yield Ok(y);
|
||||
|
||||
loop {
|
||||
match input_stream.next().await {
|
||||
Some(z) => {
|
||||
if ctrl_c.load(Ordering::SeqCst) {
|
||||
break;
|
||||
}
|
||||
yield Ok(z);
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
};
|
||||
let stream = stream.to_input_stream();
|
||||
|
||||
if let Some(table) = table {
|
||||
let command_args = create_default_command_args(&context).with_input(stream);
|
||||
let result = table.run(command_args, &context.commands);
|
||||
result.collect::<Vec<_>>().await;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
match x {
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(Primitive::String(ref s)),
|
||||
tag: Tag { anchor, span },
|
||||
} if anchor.is_some() => {
|
||||
if let Some(text) = text {
|
||||
let mut stream = VecDeque::new();
|
||||
stream.push_back(UntaggedValue::string(s).into_value(Tag { anchor, span }));
|
||||
let command_args = create_default_command_args(&context).with_input(stream);
|
||||
let result = text.run(command_args, &context.commands);
|
||||
result.collect::<Vec<_>>().await;
|
||||
} else {
|
||||
out!("{}", s);
|
||||
}
|
||||
}
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(Primitive::String(s)),
|
||||
..
|
||||
} => {
|
||||
out!("{}", s);
|
||||
}
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(Primitive::Line(ref s)),
|
||||
tag: Tag { anchor, span },
|
||||
} if anchor.is_some() => {
|
||||
if let Some(text) = text {
|
||||
let mut stream = VecDeque::new();
|
||||
stream.push_back(UntaggedValue::string(s).into_value(Tag { anchor, span }));
|
||||
let command_args = create_default_command_args(&context).with_input(stream);
|
||||
let result = text.run(command_args, &context.commands);
|
||||
result.collect::<Vec<_>>().await;
|
||||
} else {
|
||||
out!("{}\n", s);
|
||||
}
|
||||
}
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(Primitive::Line(s)),
|
||||
..
|
||||
} => {
|
||||
out!("{}\n", s);
|
||||
}
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(Primitive::Path(s)),
|
||||
..
|
||||
} => {
|
||||
out!("{}", s.display());
|
||||
}
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(Primitive::Int(n)),
|
||||
..
|
||||
} => {
|
||||
out!("{}", n);
|
||||
}
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(Primitive::Decimal(n)),
|
||||
..
|
||||
} => {
|
||||
out!("{}", n);
|
||||
}
|
||||
|
||||
Value { value: UntaggedValue::Primitive(Primitive::Binary(ref b)), .. } => {
|
||||
if let Some(binary) = binary {
|
||||
let mut stream = VecDeque::new();
|
||||
stream.push_back(x);
|
||||
let command_args = create_default_command_args(&context).with_input(stream);
|
||||
let result = binary.run(command_args, &context.commands);
|
||||
result.collect::<Vec<_>>().await;
|
||||
} else {
|
||||
use pretty_hex::*;
|
||||
out!("{:?}", b.hex_dump());
|
||||
}
|
||||
}
|
||||
|
||||
Value { value: UntaggedValue::Error(e), .. } => {
|
||||
yield Err(e);
|
||||
}
|
||||
|
||||
Value { value: UntaggedValue::Row(row), ..} => {
|
||||
use prettytable::format::{FormatBuilder, LinePosition, LineSeparator};
|
||||
use prettytable::{color, Attr, Cell, Row, Table};
|
||||
use crate::data::value::{format_leaf, style_leaf};
|
||||
use textwrap::fill;
|
||||
|
||||
let termwidth = std::cmp::max(textwrap::termwidth(), 20);
|
||||
|
||||
enum TableMode {
|
||||
Light,
|
||||
Normal,
|
||||
}
|
||||
|
||||
let mut table = Table::new();
|
||||
let table_mode = crate::data::config::config(Tag::unknown());
|
||||
|
||||
let table_mode = if let Some(s) = table_mode?.get("table_mode") {
|
||||
match s.as_string() {
|
||||
Ok(typ) if typ == "light" => TableMode::Light,
|
||||
_ => TableMode::Normal,
|
||||
}
|
||||
} else {
|
||||
TableMode::Normal
|
||||
};
|
||||
|
||||
match table_mode {
|
||||
TableMode::Light => {
|
||||
table.set_format(
|
||||
FormatBuilder::new()
|
||||
.separator(LinePosition::Title, LineSeparator::new('─', '─', ' ', ' '))
|
||||
.padding(1, 1)
|
||||
.build(),
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
table.set_format(
|
||||
FormatBuilder::new()
|
||||
.column_separator('│')
|
||||
.separator(LinePosition::Top, LineSeparator::new('─', '┬', ' ', ' '))
|
||||
.separator(LinePosition::Title, LineSeparator::new('─', '┼', ' ', ' '))
|
||||
.separator(LinePosition::Bottom, LineSeparator::new('─', '┴', ' ', ' '))
|
||||
.padding(1, 1)
|
||||
.build(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let mut max_key_len = 0;
|
||||
for (key, _) in row.entries.iter() {
|
||||
max_key_len = std::cmp::max(max_key_len, key.chars().count());
|
||||
}
|
||||
|
||||
if max_key_len > (termwidth/2 - 1) {
|
||||
max_key_len = termwidth/2 - 1;
|
||||
}
|
||||
|
||||
let max_val_len = termwidth - max_key_len - 5;
|
||||
|
||||
for (key, value) in row.entries.iter() {
|
||||
table.add_row(Row::new(vec![Cell::new(&fill(&key, max_key_len)).with_style(Attr::ForegroundColor(color::GREEN)).with_style(Attr::Bold),
|
||||
Cell::new(&fill(&format_leaf(value).plain_string(100_000), max_val_len))]));
|
||||
}
|
||||
|
||||
table.printstd();
|
||||
|
||||
// table.print_term(&mut *context.host.lock().out_terminal().ok_or_else(|| ShellError::untagged_runtime_error("Could not open terminal for output"))?)
|
||||
// .map_err(|_| ShellError::untagged_runtime_error("Internal error: could not print to terminal (for unix systems check to make sure TERM is set)"))?;
|
||||
}
|
||||
|
||||
Value { value: ref item, .. } => {
|
||||
if let Some(table) = table {
|
||||
let mut stream = VecDeque::new();
|
||||
stream.push_back(x);
|
||||
let command_args = create_default_command_args(&context).with_input(stream);
|
||||
let result = table.run(command_args, &context.commands);
|
||||
result.collect::<Vec<_>>().await;
|
||||
} else {
|
||||
out!("{:?}", item);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
//out!("<no results>");
|
||||
}
|
||||
}
|
||||
|
||||
// Needed for async_stream to type check
|
||||
if false {
|
||||
yield ReturnSuccess::value(UntaggedValue::nothing().into_untagged_value());
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
fn create_default_command_args(context: &RunnableContextWithoutInput) -> RawCommandArgs {
|
||||
let span = context.name.span;
|
||||
RawCommandArgs {
|
||||
host: context.host.clone(),
|
||||
ctrl_c: context.ctrl_c.clone(),
|
||||
shell_manager: context.shell_manager.clone(),
|
||||
call_info: UnevaluatedCallInfo {
|
||||
args: hir::Call {
|
||||
head: Box::new(SpannedExpression::new(
|
||||
Expression::Literal(Literal::String(span)),
|
||||
span,
|
||||
)),
|
||||
positional: None,
|
||||
named: None,
|
||||
span,
|
||||
},
|
||||
source: context.source.clone(),
|
||||
name_tag: context.name.clone(),
|
||||
},
|
||||
}
|
||||
}
|
63
crates/nu-cli/src/commands/calc.rs
Normal file
63
crates/nu-cli/src/commands/calc.rs
Normal file
@ -0,0 +1,63 @@
|
||||
use crate::commands::PerItemCommand;
|
||||
use crate::prelude::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{CallInfo, Primitive, ReturnSuccess, UntaggedValue, Value};
|
||||
|
||||
pub struct Calc;
|
||||
|
||||
impl PerItemCommand for Calc {
|
||||
fn name(&self) -> &str {
|
||||
"calc"
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Parse a math expression into a number"
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
_call_info: &CallInfo,
|
||||
_registry: &CommandRegistry,
|
||||
raw_args: &RawCommandArgs,
|
||||
input: Value,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
calc(input, raw_args)
|
||||
}
|
||||
}
|
||||
|
||||
fn calc(input: Value, args: &RawCommandArgs) -> Result<OutputStream, ShellError> {
|
||||
let name_span = &args.call_info.name_tag.span;
|
||||
|
||||
let output = if let Ok(string) = input.as_string() {
|
||||
match parse(&string, &input.tag) {
|
||||
Ok(value) => ReturnSuccess::value(value),
|
||||
Err(err) => Err(ShellError::labeled_error(
|
||||
"Calculation error",
|
||||
err,
|
||||
&input.tag.span,
|
||||
)),
|
||||
}
|
||||
} else {
|
||||
Err(ShellError::labeled_error(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
name_span,
|
||||
))
|
||||
};
|
||||
|
||||
Ok(vec![output].into())
|
||||
}
|
||||
|
||||
pub fn parse(math_expression: &str, tag: impl Into<Tag>) -> Result<Value, String> {
|
||||
use std::f64;
|
||||
let num = meval::eval_str(math_expression);
|
||||
match num {
|
||||
Ok(num) => {
|
||||
if num == f64::INFINITY || num == f64::NEG_INFINITY {
|
||||
return Err(String::from("cannot represent result"));
|
||||
}
|
||||
Ok(UntaggedValue::from(Primitive::from(num)).into_value(tag))
|
||||
}
|
||||
Err(error) => Err(error.to_string()),
|
||||
}
|
||||
}
|
@ -4,9 +4,9 @@ use nu_errors::ShellError;
|
||||
use nu_macros::signature;
|
||||
use nu_protocol::{Signature, SyntaxShape};
|
||||
|
||||
pub struct CD;
|
||||
pub struct Cd;
|
||||
|
||||
impl WholeStreamCommand for CD {
|
||||
impl WholeStreamCommand for Cd {
|
||||
fn name(&self) -> &str {
|
||||
"cd"
|
||||
}
|
853
crates/nu-cli/src/commands/classified/external.rs
Normal file
853
crates/nu-cli/src/commands/classified/external.rs
Normal file
@ -0,0 +1,853 @@
|
||||
use crate::futures::ThreadedReceiver;
|
||||
use crate::prelude::*;
|
||||
use bytes::{BufMut, Bytes, BytesMut};
|
||||
use futures::executor::block_on_stream;
|
||||
use futures::stream::StreamExt;
|
||||
use futures_codec::FramedRead;
|
||||
use log::trace;
|
||||
use nu_errors::ShellError;
|
||||
use nu_parser::commands::classified::external::ExternalArg;
|
||||
use nu_parser::ExternalCommand;
|
||||
use nu_protocol::{ColumnPath, Primitive, ShellTypeName, UntaggedValue, Value};
|
||||
use nu_source::{Tag, Tagged};
|
||||
use nu_value_ext::as_column_path;
|
||||
use std::io::Write;
|
||||
use std::ops::Deref;
|
||||
use std::process::{Command, Stdio};
|
||||
use std::sync::mpsc;
|
||||
|
||||
pub enum StringOrBinary {
|
||||
String(String),
|
||||
Binary(Vec<u8>),
|
||||
}
|
||||
pub struct MaybeTextCodec;
|
||||
|
||||
impl futures_codec::Encoder for MaybeTextCodec {
|
||||
type Item = StringOrBinary;
|
||||
type Error = std::io::Error;
|
||||
|
||||
fn encode(&mut self, item: Self::Item, dst: &mut BytesMut) -> Result<(), Self::Error> {
|
||||
match item {
|
||||
StringOrBinary::String(s) => {
|
||||
dst.reserve(s.len());
|
||||
dst.put(s.as_bytes());
|
||||
Ok(())
|
||||
}
|
||||
StringOrBinary::Binary(b) => {
|
||||
dst.reserve(b.len());
|
||||
dst.put(Bytes::from(b));
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl futures_codec::Decoder for MaybeTextCodec {
|
||||
type Item = StringOrBinary;
|
||||
type Error = std::io::Error;
|
||||
|
||||
fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {
|
||||
let v: Vec<u8> = src.to_vec();
|
||||
match String::from_utf8(v) {
|
||||
Ok(s) => {
|
||||
src.clear();
|
||||
if s.is_empty() {
|
||||
Ok(None)
|
||||
} else {
|
||||
Ok(Some(StringOrBinary::String(s)))
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
// Note: the longest UTF-8 character per Unicode spec is currently 6 bytes. If we fail somewhere earlier than the last 6 bytes,
|
||||
// we know that we're failing to understand the string encoding and not just seeing a partial character. When this happens, let's
|
||||
// fall back to assuming it's a binary buffer.
|
||||
if src.is_empty() {
|
||||
Ok(None)
|
||||
} else if src.len() > 6 && (src.len() - err.utf8_error().valid_up_to() > 6) {
|
||||
// Fall back to assuming binary
|
||||
let buf = src.to_vec();
|
||||
src.clear();
|
||||
Ok(Some(StringOrBinary::Binary(buf)))
|
||||
} else {
|
||||
// Looks like a utf-8 string, so let's assume that
|
||||
let buf = src.split_to(err.utf8_error().valid_up_to() + 1);
|
||||
String::from_utf8(buf.to_vec())
|
||||
.map(|x| Some(StringOrBinary::String(x)))
|
||||
.map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidData, e))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn nu_value_to_string(command: &ExternalCommand, from: &Value) -> Result<String, ShellError> {
|
||||
match &from.value {
|
||||
UntaggedValue::Primitive(Primitive::Int(i)) => Ok(i.to_string()),
|
||||
UntaggedValue::Primitive(Primitive::String(s))
|
||||
| UntaggedValue::Primitive(Primitive::Line(s)) => Ok(s.clone()),
|
||||
UntaggedValue::Primitive(Primitive::Path(p)) => Ok(p.to_string_lossy().to_string()),
|
||||
unsupported => Err(ShellError::labeled_error(
|
||||
format!("needs string data (given: {})", unsupported.type_name()),
|
||||
"expected a string",
|
||||
&command.name_tag,
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn run_external_command(
|
||||
command: ExternalCommand,
|
||||
context: &mut Context,
|
||||
input: Option<InputStream>,
|
||||
is_last: bool,
|
||||
) -> Result<Option<InputStream>, ShellError> {
|
||||
trace!(target: "nu::run::external", "-> {}", command.name);
|
||||
|
||||
if !did_find_command(&command.name).await {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Command not found",
|
||||
"command not found",
|
||||
&command.name_tag,
|
||||
));
|
||||
}
|
||||
|
||||
if command.has_it_argument() || command.has_nu_argument() {
|
||||
run_with_iterator_arg(command, context, input, is_last)
|
||||
} else {
|
||||
run_with_stdin(command, context, input, is_last)
|
||||
}
|
||||
}
|
||||
|
||||
fn prepare_column_path_for_fetching_it_variable(
|
||||
argument: &ExternalArg,
|
||||
) -> Result<Tagged<ColumnPath>, ShellError> {
|
||||
// We have "$it.[contents of interest]"
|
||||
// and start slicing from "$it.[member+]"
|
||||
// ^ here.
|
||||
let key = nu_source::Text::from(argument.deref()).slice(4..argument.len());
|
||||
|
||||
to_column_path(&key, &argument.tag)
|
||||
}
|
||||
|
||||
fn prepare_column_path_for_fetching_nu_variable(
|
||||
argument: &ExternalArg,
|
||||
) -> Result<Tagged<ColumnPath>, ShellError> {
|
||||
// We have "$nu.[contents of interest]"
|
||||
// and start slicing from "$nu.[member+]"
|
||||
// ^ here.
|
||||
let key = nu_source::Text::from(argument.deref()).slice(4..argument.len());
|
||||
|
||||
to_column_path(&key, &argument.tag)
|
||||
}
|
||||
|
||||
fn to_column_path(
|
||||
path_members: &str,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Result<Tagged<ColumnPath>, ShellError> {
|
||||
let tag = tag.into();
|
||||
|
||||
as_column_path(
|
||||
&UntaggedValue::Table(
|
||||
path_members
|
||||
.split('.')
|
||||
.map(|x| {
|
||||
let member = match x.parse::<u64>() {
|
||||
Ok(v) => UntaggedValue::int(v),
|
||||
Err(_) => UntaggedValue::string(x),
|
||||
};
|
||||
|
||||
member.into_value(&tag)
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
.into_value(&tag),
|
||||
)
|
||||
}
|
||||
|
||||
fn run_with_iterator_arg(
|
||||
command: ExternalCommand,
|
||||
context: &mut Context,
|
||||
input: Option<InputStream>,
|
||||
is_last: bool,
|
||||
) -> Result<Option<InputStream>, ShellError> {
|
||||
let path = context.shell_manager.path();
|
||||
|
||||
let mut inputs: InputStream = if let Some(input) = input {
|
||||
trace_stream!(target: "nu::trace_stream::external::it", "input" = input)
|
||||
} else {
|
||||
InputStream::empty()
|
||||
};
|
||||
|
||||
let stream = async_stream! {
|
||||
while let Some(value) = inputs.next().await {
|
||||
let name = command.name.clone();
|
||||
let name_tag = command.name_tag.clone();
|
||||
let home_dir = dirs::home_dir();
|
||||
let path = &path;
|
||||
let args = command.args.clone();
|
||||
|
||||
let it_replacement = {
|
||||
if command.has_it_argument() {
|
||||
let empty_arg = ExternalArg {
|
||||
arg: "".to_string(),
|
||||
tag: name_tag.clone()
|
||||
};
|
||||
|
||||
let key = args.iter()
|
||||
.find(|arg| arg.looks_like_it())
|
||||
.unwrap_or_else(|| &empty_arg);
|
||||
|
||||
if args.iter().all(|arg| !arg.is_it()) {
|
||||
let key = match prepare_column_path_for_fetching_it_variable(&key) {
|
||||
Ok(keypath) => keypath,
|
||||
Err(reason) => {
|
||||
yield Ok(Value {
|
||||
value: UntaggedValue::Error(reason),
|
||||
tag: name_tag
|
||||
});
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
match crate::commands::get::get_column_path(&key, &value) {
|
||||
Ok(field) => {
|
||||
match nu_value_to_string(&command, &field) {
|
||||
Ok(val) => Some(val),
|
||||
Err(reason) => {
|
||||
yield Ok(Value {
|
||||
value: UntaggedValue::Error(reason),
|
||||
tag: name_tag
|
||||
});
|
||||
return;
|
||||
},
|
||||
}
|
||||
},
|
||||
Err(reason) => {
|
||||
yield Ok(Value {
|
||||
value: UntaggedValue::Error(reason),
|
||||
tag: name_tag
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
match nu_value_to_string(&command, &value) {
|
||||
Ok(val) => Some(val),
|
||||
Err(reason) => {
|
||||
yield Ok(Value {
|
||||
value: UntaggedValue::Error(reason),
|
||||
tag: name_tag
|
||||
});
|
||||
return;
|
||||
},
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
let nu_replacement = {
|
||||
if command.has_nu_argument() {
|
||||
let empty_arg = ExternalArg {
|
||||
arg: "".to_string(),
|
||||
tag: name_tag.clone()
|
||||
};
|
||||
|
||||
let key = args.iter()
|
||||
.find(|arg| arg.looks_like_nu())
|
||||
.unwrap_or_else(|| &empty_arg);
|
||||
|
||||
let nu_var = match crate::evaluate::variables::nu(&name_tag) {
|
||||
Ok(variables) => variables,
|
||||
Err(reason) => {
|
||||
yield Ok(Value {
|
||||
value: UntaggedValue::Error(reason),
|
||||
tag: name_tag
|
||||
});
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
if args.iter().all(|arg| !arg.is_nu()) {
|
||||
let key = match prepare_column_path_for_fetching_nu_variable(&key) {
|
||||
Ok(keypath) => keypath,
|
||||
Err(reason) => {
|
||||
yield Ok(Value {
|
||||
value: UntaggedValue::Error(reason),
|
||||
tag: name_tag
|
||||
});
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
match crate::commands::get::get_column_path(&key, &nu_var) {
|
||||
Ok(field) => {
|
||||
match nu_value_to_string(&command, &field) {
|
||||
Ok(val) => Some(val),
|
||||
Err(reason) => {
|
||||
yield Ok(Value {
|
||||
value: UntaggedValue::Error(reason),
|
||||
tag: name_tag
|
||||
});
|
||||
return;
|
||||
},
|
||||
}
|
||||
},
|
||||
Err(reason) => {
|
||||
yield Ok(Value {
|
||||
value: UntaggedValue::Error(reason),
|
||||
tag: name_tag
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
match nu_value_to_string(&command, &nu_var) {
|
||||
Ok(val) => Some(val),
|
||||
Err(reason) => {
|
||||
yield Ok(Value {
|
||||
value: UntaggedValue::Error(reason),
|
||||
tag: name_tag
|
||||
});
|
||||
return;
|
||||
},
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
};
|
||||
|
||||
let process_args = args.iter().filter_map(|arg| {
|
||||
if arg.chars().all(|c| c.is_whitespace()) {
|
||||
None
|
||||
} else {
|
||||
let arg = if arg.looks_like_it() {
|
||||
if let Some(mut value) = it_replacement.to_owned() {
|
||||
let mut value = expand_tilde(&value, || home_dir.as_ref()).as_ref().to_string();
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
value = {
|
||||
if argument_contains_whitespace(&value) && !argument_is_quoted(&value) {
|
||||
add_quotes(&value)
|
||||
} else {
|
||||
value
|
||||
}
|
||||
};
|
||||
}
|
||||
Some(value)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else if arg.looks_like_nu() {
|
||||
if let Some(mut value) = nu_replacement.to_owned() {
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
value = {
|
||||
if argument_contains_whitespace(&value) && !argument_is_quoted(&value) {
|
||||
add_quotes(&value)
|
||||
} else {
|
||||
value
|
||||
}
|
||||
};
|
||||
}
|
||||
Some(value)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
Some(arg.to_string())
|
||||
};
|
||||
|
||||
arg
|
||||
}
|
||||
}).collect::<Vec<String>>();
|
||||
|
||||
match spawn(&command, &path, &process_args[..], None, is_last) {
|
||||
Ok(res) => {
|
||||
if let Some(mut res) = res {
|
||||
while let Some(item) = res.next().await {
|
||||
yield Ok(item)
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(reason) => {
|
||||
yield Ok(Value {
|
||||
value: UntaggedValue::Error(reason),
|
||||
tag: name_tag
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Some(stream.to_input_stream()))
|
||||
}
|
||||
|
||||
fn run_with_stdin(
|
||||
command: ExternalCommand,
|
||||
context: &mut Context,
|
||||
input: Option<InputStream>,
|
||||
is_last: bool,
|
||||
) -> Result<Option<InputStream>, ShellError> {
|
||||
let path = context.shell_manager.path();
|
||||
|
||||
let input = input
|
||||
.map(|input| trace_stream!(target: "nu::trace_stream::external::stdin", "input" = input));
|
||||
|
||||
let process_args = command
|
||||
.args
|
||||
.iter()
|
||||
.map(|arg| {
|
||||
let arg = expand_tilde(arg.deref(), dirs::home_dir);
|
||||
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
if argument_contains_whitespace(&arg) && argument_is_quoted(&arg) {
|
||||
if let Some(unquoted) = remove_quotes(&arg) {
|
||||
format!(r#""{}""#, unquoted)
|
||||
} else {
|
||||
arg.as_ref().to_string()
|
||||
}
|
||||
} else {
|
||||
arg.as_ref().to_string()
|
||||
}
|
||||
}
|
||||
#[cfg(windows)]
|
||||
{
|
||||
if let Some(unquoted) = remove_quotes(&arg) {
|
||||
unquoted.to_string()
|
||||
} else {
|
||||
arg.as_ref().to_string()
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
spawn(&command, &path, &process_args[..], input, is_last)
|
||||
}
|
||||
|
||||
fn spawn(
|
||||
command: &ExternalCommand,
|
||||
path: &str,
|
||||
args: &[String],
|
||||
input: Option<InputStream>,
|
||||
is_last: bool,
|
||||
) -> Result<Option<InputStream>, ShellError> {
|
||||
let command = command.clone();
|
||||
|
||||
let mut process = {
|
||||
#[cfg(windows)]
|
||||
{
|
||||
let mut process = Command::new("cmd");
|
||||
process.arg("/c");
|
||||
process.arg(&command.name);
|
||||
for arg in args {
|
||||
process.arg(&arg);
|
||||
}
|
||||
process
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
let cmd_with_args = vec![command.name.clone(), args.join(" ")].join(" ");
|
||||
let mut process = Command::new("sh");
|
||||
process.arg("-c").arg(cmd_with_args);
|
||||
process
|
||||
}
|
||||
};
|
||||
|
||||
process.current_dir(path);
|
||||
trace!(target: "nu::run::external", "cwd = {:?}", &path);
|
||||
|
||||
// We want stdout regardless of what
|
||||
// we are doing ($it case or pipe stdin)
|
||||
if !is_last {
|
||||
process.stdout(Stdio::piped());
|
||||
trace!(target: "nu::run::external", "set up stdout pipe");
|
||||
}
|
||||
|
||||
// open since we have some contents for stdin
|
||||
if input.is_some() {
|
||||
process.stdin(Stdio::piped());
|
||||
trace!(target: "nu::run::external", "set up stdin pipe");
|
||||
}
|
||||
|
||||
trace!(target: "nu::run::external", "built command {:?}", process);
|
||||
|
||||
// TODO Switch to async_std::process once it's stabilized
|
||||
if let Ok(mut child) = process.spawn() {
|
||||
let (tx, rx) = mpsc::sync_channel(0);
|
||||
|
||||
let mut stdin = child.stdin.take();
|
||||
|
||||
let stdin_write_tx = tx.clone();
|
||||
let stdout_read_tx = tx;
|
||||
let stdin_name_tag = command.name_tag.clone();
|
||||
let stdout_name_tag = command.name_tag;
|
||||
|
||||
std::thread::spawn(move || {
|
||||
if let Some(input) = input {
|
||||
let mut stdin_write = stdin
|
||||
.take()
|
||||
.expect("Internal error: could not get stdin pipe for external command");
|
||||
|
||||
for value in block_on_stream(input) {
|
||||
match &value.value {
|
||||
UntaggedValue::Primitive(Primitive::Nothing) => continue,
|
||||
UntaggedValue::Primitive(Primitive::String(s))
|
||||
| UntaggedValue::Primitive(Primitive::Line(s)) => {
|
||||
if let Err(e) = stdin_write.write(s.as_bytes()) {
|
||||
let message = format!("Unable to write to stdin (error = {})", e);
|
||||
|
||||
let _ = stdin_write_tx.send(Ok(Value {
|
||||
value: UntaggedValue::Error(ShellError::labeled_error(
|
||||
message,
|
||||
"application may have closed before completing pipeline",
|
||||
&stdin_name_tag,
|
||||
)),
|
||||
tag: stdin_name_tag,
|
||||
}));
|
||||
return Err(());
|
||||
}
|
||||
}
|
||||
UntaggedValue::Primitive(Primitive::Binary(b)) => {
|
||||
if let Err(e) = stdin_write.write(b) {
|
||||
let message = format!("Unable to write to stdin (error = {})", e);
|
||||
|
||||
let _ = stdin_write_tx.send(Ok(Value {
|
||||
value: UntaggedValue::Error(ShellError::labeled_error(
|
||||
message,
|
||||
"application may have closed before completing pipeline",
|
||||
&stdin_name_tag,
|
||||
)),
|
||||
tag: stdin_name_tag,
|
||||
}));
|
||||
return Err(());
|
||||
}
|
||||
}
|
||||
unsupported => {
|
||||
let _ = stdin_write_tx.send(Ok(Value {
|
||||
value: UntaggedValue::Error(ShellError::labeled_error(
|
||||
format!(
|
||||
"Received unexpected type from pipeline ({})",
|
||||
unsupported.type_name()
|
||||
),
|
||||
"expected a string",
|
||||
stdin_name_tag.clone(),
|
||||
)),
|
||||
tag: stdin_name_tag,
|
||||
}));
|
||||
return Err(());
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
});
|
||||
|
||||
std::thread::spawn(move || {
|
||||
if !is_last {
|
||||
let stdout = if let Some(stdout) = child.stdout.take() {
|
||||
stdout
|
||||
} else {
|
||||
let _ = stdout_read_tx.send(Ok(Value {
|
||||
value: UntaggedValue::Error(ShellError::labeled_error(
|
||||
"Can't redirect the stdout for external command",
|
||||
"can't redirect stdout",
|
||||
&stdout_name_tag,
|
||||
)),
|
||||
tag: stdout_name_tag,
|
||||
}));
|
||||
return Err(());
|
||||
};
|
||||
|
||||
let file = futures::io::AllowStdIo::new(stdout);
|
||||
let stream = FramedRead::new(file, MaybeTextCodec);
|
||||
|
||||
for line in block_on_stream(stream) {
|
||||
match line {
|
||||
Ok(line) => match line {
|
||||
StringOrBinary::String(s) => {
|
||||
let result = stdout_read_tx.send(Ok(Value {
|
||||
value: UntaggedValue::Primitive(Primitive::String(s.clone())),
|
||||
tag: stdout_name_tag.clone(),
|
||||
}));
|
||||
|
||||
if result.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
StringOrBinary::Binary(b) => {
|
||||
let result = stdout_read_tx.send(Ok(Value {
|
||||
value: UntaggedValue::Primitive(Primitive::Binary(
|
||||
b.into_iter().collect(),
|
||||
)),
|
||||
tag: stdout_name_tag.clone(),
|
||||
}));
|
||||
|
||||
if result.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
},
|
||||
Err(_) => {
|
||||
let _ = stdout_read_tx.send(Ok(Value {
|
||||
value: UntaggedValue::Error(ShellError::labeled_error(
|
||||
"Unable to read from stdout.",
|
||||
"unable to read from stdout",
|
||||
&stdout_name_tag,
|
||||
)),
|
||||
tag: stdout_name_tag.clone(),
|
||||
}));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// We can give an error when we see a non-zero exit code, but this is different
|
||||
// than what other shells will do.
|
||||
if child.wait().is_err() {
|
||||
let cfg = crate::data::config::config(Tag::unknown());
|
||||
if let Ok(cfg) = cfg {
|
||||
if cfg.contains_key("nonzero_exit_errors") {
|
||||
let _ = stdout_read_tx.send(Ok(Value {
|
||||
value: UntaggedValue::Error(ShellError::labeled_error(
|
||||
"External command failed",
|
||||
"command failed",
|
||||
&stdout_name_tag,
|
||||
)),
|
||||
tag: stdout_name_tag,
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
});
|
||||
|
||||
let stream = ThreadedReceiver::new(rx);
|
||||
Ok(Some(stream.to_input_stream()))
|
||||
} else {
|
||||
Err(ShellError::labeled_error(
|
||||
"Failed to spawn process",
|
||||
"failed to spawn",
|
||||
&command.name_tag,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
async fn did_find_command(name: &str) -> bool {
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
ichwh::which(name).await.unwrap_or(None).is_some()
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
if ichwh::which(name).await.unwrap_or(None).is_some() {
|
||||
true
|
||||
} else {
|
||||
let cmd_builtins = [
|
||||
"call", "cls", "color", "date", "dir", "echo", "find", "hostname", "pause",
|
||||
"start", "time", "title", "ver", "copy", "mkdir", "rename", "rd", "rmdir", "type",
|
||||
];
|
||||
|
||||
cmd_builtins.contains(&name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn expand_tilde<SI: ?Sized, P, HD>(input: &SI, home_dir: HD) -> std::borrow::Cow<str>
|
||||
where
|
||||
SI: AsRef<str>,
|
||||
P: AsRef<std::path::Path>,
|
||||
HD: FnOnce() -> Option<P>,
|
||||
{
|
||||
shellexpand::tilde_with_context(input, home_dir)
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn argument_contains_whitespace(argument: &str) -> bool {
|
||||
argument.chars().any(|c| c.is_whitespace())
|
||||
}
|
||||
|
||||
fn argument_is_quoted(argument: &str) -> bool {
|
||||
if argument.len() < 2 {
|
||||
return false;
|
||||
}
|
||||
|
||||
(argument.starts_with('"') && argument.ends_with('"'))
|
||||
|| (argument.starts_with('\'') && argument.ends_with('\''))
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
fn add_quotes(argument: &str) -> String {
|
||||
format!("\"{}\"", argument)
|
||||
}
|
||||
|
||||
fn remove_quotes(argument: &str) -> Option<&str> {
|
||||
if !argument_is_quoted(argument) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let size = argument.len();
|
||||
|
||||
Some(&argument[1..size - 1])
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
fn shell_os_paths() -> Vec<std::path::PathBuf> {
|
||||
let mut original_paths = vec![];
|
||||
|
||||
if let Some(paths) = std::env::var_os("PATH") {
|
||||
original_paths = std::env::split_paths(&paths).collect::<Vec<_>>();
|
||||
}
|
||||
|
||||
original_paths
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{
|
||||
add_quotes, argument_contains_whitespace, argument_is_quoted, expand_tilde, remove_quotes,
|
||||
run_external_command, Context,
|
||||
};
|
||||
use futures::executor::block_on;
|
||||
use nu_errors::ShellError;
|
||||
use nu_test_support::commands::ExternalBuilder;
|
||||
|
||||
// async fn read(mut stream: OutputStream) -> Option<Value> {
|
||||
// match stream.try_next().await {
|
||||
// Ok(val) => {
|
||||
// if let Some(val) = val {
|
||||
// val.raw_value()
|
||||
// } else {
|
||||
// None
|
||||
// }
|
||||
// }
|
||||
// Err(_) => None,
|
||||
// }
|
||||
// }
|
||||
|
||||
async fn non_existent_run() -> Result<(), ShellError> {
|
||||
let cmd = ExternalBuilder::for_name("i_dont_exist.exe").build();
|
||||
|
||||
let mut ctx = Context::basic().expect("There was a problem creating a basic context.");
|
||||
|
||||
assert!(run_external_command(cmd, &mut ctx, None, false)
|
||||
.await
|
||||
.is_err());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// async fn failure_run() -> Result<(), ShellError> {
|
||||
// let cmd = ExternalBuilder::for_name("fail").build();
|
||||
|
||||
// let mut ctx = Context::basic().expect("There was a problem creating a basic context.");
|
||||
// let stream = run_external_command(cmd, &mut ctx, None, false)
|
||||
// .await?
|
||||
// .expect("There was a problem running the external command.");
|
||||
|
||||
// match read(stream.into()).await {
|
||||
// Some(Value {
|
||||
// value: UntaggedValue::Error(_),
|
||||
// ..
|
||||
// }) => {}
|
||||
// None | _ => panic!("Command didn't fail."),
|
||||
// }
|
||||
|
||||
// Ok(())
|
||||
// }
|
||||
|
||||
// #[test]
|
||||
// fn identifies_command_failed() -> Result<(), ShellError> {
|
||||
// block_on(failure_run())
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn identifies_command_not_found() -> Result<(), ShellError> {
|
||||
block_on(non_existent_run())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn checks_contains_whitespace_from_argument_to_be_passed_in() {
|
||||
assert_eq!(argument_contains_whitespace("andrés"), false);
|
||||
assert_eq!(argument_contains_whitespace("and rés"), true);
|
||||
assert_eq!(argument_contains_whitespace(r#"and\ rés"#), true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn checks_quotes_from_argument_to_be_passed_in() {
|
||||
assert_eq!(argument_is_quoted(""), false);
|
||||
|
||||
assert_eq!(argument_is_quoted("'"), false);
|
||||
assert_eq!(argument_is_quoted("'a"), false);
|
||||
assert_eq!(argument_is_quoted("a"), false);
|
||||
assert_eq!(argument_is_quoted("a'"), false);
|
||||
assert_eq!(argument_is_quoted("''"), true);
|
||||
|
||||
assert_eq!(argument_is_quoted(r#"""#), false);
|
||||
assert_eq!(argument_is_quoted(r#""a"#), false);
|
||||
assert_eq!(argument_is_quoted(r#"a"#), false);
|
||||
assert_eq!(argument_is_quoted(r#"a""#), false);
|
||||
assert_eq!(argument_is_quoted(r#""""#), true);
|
||||
|
||||
assert_eq!(argument_is_quoted("'andrés"), false);
|
||||
assert_eq!(argument_is_quoted("andrés'"), false);
|
||||
assert_eq!(argument_is_quoted(r#""andrés"#), false);
|
||||
assert_eq!(argument_is_quoted(r#"andrés""#), false);
|
||||
assert_eq!(argument_is_quoted("'andrés'"), true);
|
||||
assert_eq!(argument_is_quoted(r#""andrés""#), true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn adds_quotes_to_argument_to_be_passed_in() {
|
||||
assert_eq!(add_quotes("andrés"), "\"andrés\"");
|
||||
//assert_eq!(add_quotes("\"andrés\""), "\"andrés\"");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn strips_quotes_from_argument_to_be_passed_in() {
|
||||
assert_eq!(remove_quotes(""), None);
|
||||
|
||||
assert_eq!(remove_quotes("'"), None);
|
||||
assert_eq!(remove_quotes("'a"), None);
|
||||
assert_eq!(remove_quotes("a"), None);
|
||||
assert_eq!(remove_quotes("a'"), None);
|
||||
assert_eq!(remove_quotes("''"), Some(""));
|
||||
|
||||
assert_eq!(remove_quotes(r#"""#), None);
|
||||
assert_eq!(remove_quotes(r#""a"#), None);
|
||||
assert_eq!(remove_quotes(r#"a"#), None);
|
||||
assert_eq!(remove_quotes(r#"a""#), None);
|
||||
assert_eq!(remove_quotes(r#""""#), Some(""));
|
||||
|
||||
assert_eq!(remove_quotes("'andrés"), None);
|
||||
assert_eq!(remove_quotes("andrés'"), None);
|
||||
assert_eq!(remove_quotes(r#""andrés"#), None);
|
||||
assert_eq!(remove_quotes(r#"andrés""#), None);
|
||||
assert_eq!(remove_quotes("'andrés'"), Some("andrés"));
|
||||
assert_eq!(remove_quotes(r#""andrés""#), Some("andrés"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expands_tilde_if_starts_with_tilde_character() {
|
||||
assert_eq!(
|
||||
expand_tilde("~", || Some(std::path::Path::new("the_path_to_nu_light"))),
|
||||
"the_path_to_nu_light"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn does_not_expand_tilde_if_tilde_is_not_first_character() {
|
||||
assert_eq!(
|
||||
expand_tilde("1~1", || Some(std::path::Path::new("the_path_to_nu_light"))),
|
||||
"1~1"
|
||||
);
|
||||
}
|
||||
}
|
@ -5,28 +5,29 @@ use nu_errors::ShellError;
|
||||
use nu_parser::InternalCommand;
|
||||
use nu_protocol::{CommandAction, Primitive, ReturnSuccess, UntaggedValue, Value};
|
||||
|
||||
use super::ClassifiedInputStream;
|
||||
|
||||
pub(crate) async fn run_internal_command(
|
||||
pub(crate) fn run_internal_command(
|
||||
command: InternalCommand,
|
||||
context: &mut Context,
|
||||
input: ClassifiedInputStream,
|
||||
input: Option<InputStream>,
|
||||
source: Text,
|
||||
) -> Result<InputStream, ShellError> {
|
||||
) -> Result<Option<InputStream>, ShellError> {
|
||||
if log_enabled!(log::Level::Trace) {
|
||||
trace!(target: "nu::run::internal", "->");
|
||||
trace!(target: "nu::run::internal", "{}", command.name);
|
||||
trace!(target: "nu::run::internal", "{}", command.args.debug(&source));
|
||||
}
|
||||
|
||||
let objects: InputStream =
|
||||
trace_stream!(target: "nu::trace_stream::internal", "input" = input.objects);
|
||||
let objects: InputStream = if let Some(input) = input {
|
||||
trace_stream!(target: "nu::trace_stream::internal", "input" = input)
|
||||
} else {
|
||||
InputStream::empty()
|
||||
};
|
||||
|
||||
let internal_command = context.expect_command(&command.name);
|
||||
|
||||
let result = {
|
||||
context.run_command(
|
||||
internal_command,
|
||||
internal_command?,
|
||||
command.name_tag.clone(),
|
||||
command.args.clone(),
|
||||
&source,
|
||||
@ -103,12 +104,12 @@ pub(crate) async fn run_internal_command(
|
||||
HelpShell::for_command(
|
||||
UntaggedValue::string(cmd).into_value(tag),
|
||||
&context.registry(),
|
||||
).unwrap(),
|
||||
)?,
|
||||
));
|
||||
}
|
||||
_ => {
|
||||
context.shell_manager.insert_at_current(Box::new(
|
||||
HelpShell::index(&context.registry()).unwrap(),
|
||||
HelpShell::index(&context.registry())?,
|
||||
));
|
||||
}
|
||||
}
|
||||
@ -120,7 +121,7 @@ pub(crate) async fn run_internal_command(
|
||||
}
|
||||
CommandAction::EnterShell(location) => {
|
||||
context.shell_manager.insert_at_current(Box::new(
|
||||
FilesystemShell::with_location(location, context.registry().clone()).unwrap(),
|
||||
FilesystemShell::with_location(location, context.registry().clone()),
|
||||
));
|
||||
}
|
||||
CommandAction::PreviousShell => {
|
||||
@ -137,6 +138,14 @@ pub(crate) async fn run_internal_command(
|
||||
}
|
||||
},
|
||||
|
||||
Ok(ReturnSuccess::Value(Value {
|
||||
value: UntaggedValue::Error(err),
|
||||
..
|
||||
})) => {
|
||||
context.error(err);
|
||||
break;
|
||||
}
|
||||
|
||||
Ok(ReturnSuccess::Value(v)) => {
|
||||
yielded = true;
|
||||
yield Ok(v);
|
||||
@ -148,10 +157,10 @@ pub(crate) async fn run_internal_command(
|
||||
let doc = PrettyDebug::pretty_doc(&v);
|
||||
let mut buffer = termcolor::Buffer::ansi();
|
||||
|
||||
doc.render_raw(
|
||||
let _ = doc.render_raw(
|
||||
context.with_host(|host| host.width() - 5),
|
||||
&mut nu_source::TermColored::new(&mut buffer),
|
||||
).unwrap();
|
||||
);
|
||||
|
||||
let value = String::from_utf8_lossy(buffer.as_slice());
|
||||
|
||||
@ -166,5 +175,5 @@ pub(crate) async fn run_internal_command(
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_input_stream())
|
||||
Ok(Some(stream.to_input_stream()))
|
||||
}
|
7
crates/nu-cli/src/commands/classified/mod.rs
Normal file
7
crates/nu-cli/src/commands/classified/mod.rs
Normal file
@ -0,0 +1,7 @@
|
||||
mod dynamic;
|
||||
pub(crate) mod external;
|
||||
pub(crate) mod internal;
|
||||
pub(crate) mod pipeline;
|
||||
|
||||
#[allow(unused_imports)]
|
||||
pub(crate) use dynamic::Command as DynamicCommand;
|
50
crates/nu-cli/src/commands/classified/pipeline.rs
Normal file
50
crates/nu-cli/src/commands/classified/pipeline.rs
Normal file
@ -0,0 +1,50 @@
|
||||
use crate::commands::classified::external::run_external_command;
|
||||
use crate::commands::classified::internal::run_internal_command;
|
||||
use crate::context::Context;
|
||||
use crate::stream::InputStream;
|
||||
use nu_errors::ShellError;
|
||||
use nu_parser::{ClassifiedCommand, ClassifiedPipeline};
|
||||
use nu_source::Text;
|
||||
|
||||
pub(crate) async fn run_pipeline(
|
||||
pipeline: ClassifiedPipeline,
|
||||
ctx: &mut Context,
|
||||
mut input: Option<InputStream>,
|
||||
line: &str,
|
||||
) -> Result<Option<InputStream>, ShellError> {
|
||||
let mut iter = pipeline.commands.list.into_iter().peekable();
|
||||
|
||||
loop {
|
||||
let item: Option<ClassifiedCommand> = iter.next();
|
||||
let next: Option<&ClassifiedCommand> = iter.peek();
|
||||
|
||||
input = match (item, next) {
|
||||
(Some(ClassifiedCommand::Dynamic(_)), _) | (_, Some(ClassifiedCommand::Dynamic(_))) => {
|
||||
return Err(ShellError::unimplemented("Dynamic commands"))
|
||||
}
|
||||
|
||||
(Some(ClassifiedCommand::Expr(_)), _) | (_, Some(ClassifiedCommand::Expr(_))) => {
|
||||
return Err(ShellError::unimplemented("Expression-only commands"))
|
||||
}
|
||||
|
||||
(Some(ClassifiedCommand::Error(err)), _) => return Err(err.into()),
|
||||
(_, Some(ClassifiedCommand::Error(err))) => return Err(err.clone().into()),
|
||||
|
||||
(Some(ClassifiedCommand::Internal(left)), _) => {
|
||||
run_internal_command(left, ctx, input, Text::from(line))?
|
||||
}
|
||||
|
||||
(Some(ClassifiedCommand::External(left)), None) => {
|
||||
run_external_command(left, ctx, input, true).await?
|
||||
}
|
||||
|
||||
(Some(ClassifiedCommand::External(left)), _) => {
|
||||
run_external_command(left, ctx, input, false).await?
|
||||
}
|
||||
|
||||
(None, _) => break,
|
||||
};
|
||||
}
|
||||
|
||||
Ok(input)
|
||||
}
|
40
crates/nu-cli/src/commands/clear.rs
Normal file
40
crates/nu-cli/src/commands/clear.rs
Normal file
@ -0,0 +1,40 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::Signature;
|
||||
use std::process::Command;
|
||||
|
||||
pub struct Clear;
|
||||
|
||||
impl WholeStreamCommand for Clear {
|
||||
fn name(&self) -> &str {
|
||||
"clear"
|
||||
}
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("clear")
|
||||
}
|
||||
fn usage(&self) -> &str {
|
||||
"clears the terminal"
|
||||
}
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
clear(args, registry)
|
||||
}
|
||||
}
|
||||
fn clear(_args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
if cfg!(windows) {
|
||||
Command::new("cmd")
|
||||
.args(&["/C", "cls"])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
} else if cfg!(unix) {
|
||||
Command::new("/bin/sh")
|
||||
.args(&["-c", "clear"])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
}
|
||||
Ok(OutputStream::empty())
|
||||
}
|
@ -55,35 +55,52 @@ pub mod clipboard {
|
||||
}
|
||||
|
||||
async fn inner_clip(input: Vec<Value>, name: Tag) -> OutputStream {
|
||||
let mut clip_context: ClipboardContext = ClipboardProvider::new().unwrap();
|
||||
let mut new_copy_data = String::new();
|
||||
if let Ok(clip_context) = ClipboardProvider::new() {
|
||||
let mut clip_context: ClipboardContext = clip_context;
|
||||
let mut new_copy_data = String::new();
|
||||
|
||||
if input.len() > 0 {
|
||||
let mut first = true;
|
||||
for i in input.iter() {
|
||||
if !first {
|
||||
new_copy_data.push_str("\n");
|
||||
} else {
|
||||
first = false;
|
||||
}
|
||||
|
||||
let string: String = match i.as_string() {
|
||||
Ok(string) => string.to_string(),
|
||||
Err(_) => {
|
||||
return OutputStream::one(Err(ShellError::labeled_error(
|
||||
"Given non-string data",
|
||||
"expected strings from pipeline",
|
||||
name,
|
||||
)))
|
||||
if !input.is_empty() {
|
||||
let mut first = true;
|
||||
for i in input.iter() {
|
||||
if !first {
|
||||
new_copy_data.push_str("\n");
|
||||
} else {
|
||||
first = false;
|
||||
}
|
||||
};
|
||||
|
||||
new_copy_data.push_str(&string);
|
||||
let string: String = match i.as_string() {
|
||||
Ok(string) => string.to_string(),
|
||||
Err(_) => {
|
||||
return OutputStream::one(Err(ShellError::labeled_error(
|
||||
"Given non-string data",
|
||||
"expected strings from pipeline",
|
||||
name,
|
||||
)))
|
||||
}
|
||||
};
|
||||
|
||||
new_copy_data.push_str(&string);
|
||||
}
|
||||
}
|
||||
|
||||
match clip_context.set_contents(new_copy_data) {
|
||||
Ok(_) => {}
|
||||
Err(_) => {
|
||||
return OutputStream::one(Err(ShellError::labeled_error(
|
||||
"Could not set contents of clipboard",
|
||||
"could not set contents of clipboard",
|
||||
name,
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
OutputStream::empty()
|
||||
} else {
|
||||
OutputStream::one(Err(ShellError::labeled_error(
|
||||
"Could not open clipboard",
|
||||
"could not open clipboard",
|
||||
name,
|
||||
)))
|
||||
}
|
||||
|
||||
clip_context.set_contents(new_copy_data).unwrap();
|
||||
|
||||
OutputStream::empty()
|
||||
}
|
||||
}
|
@ -1,3 +1,4 @@
|
||||
use crate::commands::help::get_help;
|
||||
use crate::context::CommandRegistry;
|
||||
use crate::deserializer::ConfigDeserializer;
|
||||
use crate::evaluate::evaluate_args::evaluate_args;
|
||||
@ -31,12 +32,17 @@ impl UnevaluatedCallInfo {
|
||||
name_tag: self.name_tag,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn switch_present(&self, switch: &str) -> bool {
|
||||
self.args.switch_preset(switch)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait CallInfoExt {
|
||||
fn process<'de, T: Deserialize<'de>>(
|
||||
&self,
|
||||
shell_manager: &ShellManager,
|
||||
ctrl_c: Arc<AtomicBool>,
|
||||
callback: fn(T, &RunnablePerItemContext) -> Result<OutputStream, ShellError>,
|
||||
) -> Result<RunnablePerItemArgs<T>, ShellError>;
|
||||
}
|
||||
@ -45,6 +51,7 @@ impl CallInfoExt for CallInfo {
|
||||
fn process<'de, T: Deserialize<'de>>(
|
||||
&self,
|
||||
shell_manager: &ShellManager,
|
||||
ctrl_c: Arc<AtomicBool>,
|
||||
callback: fn(T, &RunnablePerItemContext) -> Result<OutputStream, ShellError>,
|
||||
) -> Result<RunnablePerItemArgs<T>, ShellError> {
|
||||
let mut deserializer = ConfigDeserializer::from_call_info(self.clone());
|
||||
@ -54,6 +61,7 @@ impl CallInfoExt for CallInfo {
|
||||
context: RunnablePerItemContext {
|
||||
shell_manager: shell_manager.clone(),
|
||||
name: self.name_tag.clone(),
|
||||
ctrl_c,
|
||||
},
|
||||
callback,
|
||||
})
|
||||
@ -63,7 +71,7 @@ impl CallInfoExt for CallInfo {
|
||||
#[derive(Getters)]
|
||||
#[get = "pub(crate)"]
|
||||
pub struct CommandArgs {
|
||||
pub host: Arc<Mutex<Box<dyn Host>>>,
|
||||
pub host: Arc<parking_lot::Mutex<Box<dyn Host>>>,
|
||||
pub ctrl_c: Arc<AtomicBool>,
|
||||
pub shell_manager: ShellManager,
|
||||
pub call_info: UnevaluatedCallInfo,
|
||||
@ -73,14 +81,14 @@ pub struct CommandArgs {
|
||||
#[derive(Getters, Clone)]
|
||||
#[get = "pub(crate)"]
|
||||
pub struct RawCommandArgs {
|
||||
pub host: Arc<Mutex<Box<dyn Host>>>,
|
||||
pub host: Arc<parking_lot::Mutex<Box<dyn Host>>>,
|
||||
pub ctrl_c: Arc<AtomicBool>,
|
||||
pub shell_manager: ShellManager,
|
||||
pub call_info: UnevaluatedCallInfo,
|
||||
}
|
||||
|
||||
impl RawCommandArgs {
|
||||
pub fn with_input(self, input: Vec<Value>) -> CommandArgs {
|
||||
pub fn with_input(self, input: impl Into<InputStream>) -> CommandArgs {
|
||||
CommandArgs {
|
||||
host: self.host,
|
||||
ctrl_c: self.ctrl_c,
|
||||
@ -192,7 +200,7 @@ impl CommandArgs {
|
||||
|
||||
let (input, args) = args.split();
|
||||
let name_tag = args.call_info.name_tag;
|
||||
let mut deserializer = ConfigDeserializer::from_call_info(call_info.clone());
|
||||
let mut deserializer = ConfigDeserializer::from_call_info(call_info);
|
||||
|
||||
Ok(RunnableRawArgs {
|
||||
args: T::deserialize(&mut deserializer)?,
|
||||
@ -214,12 +222,13 @@ impl CommandArgs {
|
||||
pub struct RunnablePerItemContext {
|
||||
pub shell_manager: ShellManager,
|
||||
pub name: Tag,
|
||||
pub ctrl_c: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
pub struct RunnableContext {
|
||||
pub input: InputStream,
|
||||
pub shell_manager: ShellManager,
|
||||
pub host: Arc<Mutex<Box<dyn Host>>>,
|
||||
pub host: Arc<parking_lot::Mutex<Box<dyn Host>>>,
|
||||
pub source: Text,
|
||||
pub ctrl_c: Arc<AtomicBool>,
|
||||
pub commands: CommandRegistry,
|
||||
@ -286,7 +295,7 @@ impl Deref for EvaluatedWholeStreamCommandArgs {
|
||||
|
||||
impl EvaluatedWholeStreamCommandArgs {
|
||||
pub fn new(
|
||||
host: Arc<Mutex<dyn Host>>,
|
||||
host: Arc<parking_lot::Mutex<dyn Host>>,
|
||||
ctrl_c: Arc<AtomicBool>,
|
||||
shell_manager: ShellManager,
|
||||
call_info: CallInfo,
|
||||
@ -335,7 +344,7 @@ impl Deref for EvaluatedFilterCommandArgs {
|
||||
|
||||
impl EvaluatedFilterCommandArgs {
|
||||
pub fn new(
|
||||
host: Arc<Mutex<dyn Host>>,
|
||||
host: Arc<parking_lot::Mutex<dyn Host>>,
|
||||
ctrl_c: Arc<AtomicBool>,
|
||||
shell_manager: ShellManager,
|
||||
call_info: CallInfo,
|
||||
@ -354,7 +363,7 @@ impl EvaluatedFilterCommandArgs {
|
||||
#[derive(Getters, new)]
|
||||
#[get = "pub(crate)"]
|
||||
pub struct EvaluatedCommandArgs {
|
||||
pub host: Arc<Mutex<dyn Host>>,
|
||||
pub host: Arc<parking_lot::Mutex<dyn Host>>,
|
||||
pub ctrl_c: Arc<AtomicBool>,
|
||||
pub shell_manager: ShellManager,
|
||||
pub call_info: CallInfo,
|
||||
@ -476,12 +485,18 @@ impl Command {
|
||||
}
|
||||
|
||||
pub fn run(&self, args: CommandArgs, registry: &CommandRegistry) -> OutputStream {
|
||||
match self {
|
||||
Command::WholeStream(command) => match command.run(args, registry) {
|
||||
Ok(stream) => stream,
|
||||
Err(err) => OutputStream::one(Err(err)),
|
||||
},
|
||||
Command::PerItem(command) => self.run_helper(command.clone(), args, registry.clone()),
|
||||
if args.call_info.switch_present("help") {
|
||||
get_help(self.name(), self.usage(), self.signature()).into()
|
||||
} else {
|
||||
match self {
|
||||
Command::WholeStream(command) => match command.run(args, registry) {
|
||||
Ok(stream) => stream,
|
||||
Err(err) => OutputStream::one(Err(err)),
|
||||
},
|
||||
Command::PerItem(command) => {
|
||||
self.run_helper(command.clone(), args, registry.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -505,11 +520,16 @@ impl Command {
|
||||
let call_info = raw_args
|
||||
.clone()
|
||||
.call_info
|
||||
.evaluate(®istry, &Scope::it_value(x.clone()))
|
||||
.unwrap();
|
||||
match command.run(&call_info, ®istry, &raw_args, x) {
|
||||
Ok(o) => o,
|
||||
Err(e) => VecDeque::from(vec![ReturnValue::Err(e)]).to_output_stream(),
|
||||
.evaluate(®istry, &Scope::it_value(x.clone()));
|
||||
|
||||
match call_info {
|
||||
Ok(call_info) => match command.run(&call_info, ®istry, &raw_args, x) {
|
||||
Ok(o) => o,
|
||||
Err(e) => {
|
||||
futures::stream::iter(vec![ReturnValue::Err(e)]).to_output_stream()
|
||||
}
|
||||
},
|
||||
Err(e) => futures::stream::iter(vec![ReturnValue::Err(e)]).to_output_stream(),
|
||||
}
|
||||
})
|
||||
.flatten();
|
||||
@ -552,8 +572,7 @@ impl WholeStreamCommand for FnFilterCommand {
|
||||
input,
|
||||
} = args;
|
||||
|
||||
let host: Arc<Mutex<dyn Host>> = host.clone();
|
||||
let shell_manager = shell_manager.clone();
|
||||
let host: Arc<parking_lot::Mutex<dyn Host>> = host.clone();
|
||||
let registry: CommandRegistry = registry.clone();
|
||||
let func = self.func;
|
||||
|
@ -31,21 +31,34 @@ impl WholeStreamCommand for Config {
|
||||
"load",
|
||||
SyntaxShape::Path,
|
||||
"load the config from the path give",
|
||||
Some('l'),
|
||||
)
|
||||
.named(
|
||||
"set",
|
||||
SyntaxShape::Any,
|
||||
"set a value in the config, eg) --set [key value]",
|
||||
Some('s'),
|
||||
)
|
||||
.named(
|
||||
"set_into",
|
||||
SyntaxShape::Member,
|
||||
"sets a variable from values in the pipeline",
|
||||
Some('i'),
|
||||
)
|
||||
.named("get", SyntaxShape::Any, "get a value from the config")
|
||||
.named("remove", SyntaxShape::Any, "remove a value from the config")
|
||||
.switch("clear", "clear the config")
|
||||
.switch("path", "return the path to the config file")
|
||||
.named(
|
||||
"get",
|
||||
SyntaxShape::Any,
|
||||
"get a value from the config",
|
||||
Some('g'),
|
||||
)
|
||||
.named(
|
||||
"remove",
|
||||
SyntaxShape::Any,
|
||||
"remove a value from the config",
|
||||
Some('r'),
|
||||
)
|
||||
.switch("clear", "clear the config", Some('c'))
|
||||
.switch("path", "return the path to the config file", Some('p'))
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -155,7 +168,7 @@ pub fn config(
|
||||
|
||||
if result.contains_key(&key) {
|
||||
result.swap_remove(&key);
|
||||
config::write(&result, &configuration).unwrap();
|
||||
config::write(&result, &configuration)?
|
||||
} else {
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Key does not exist in config",
|
@ -24,7 +24,11 @@ impl PerItemCommand for Cpy {
|
||||
Signature::build("cp")
|
||||
.required("src", SyntaxShape::Pattern, "the place to copy from")
|
||||
.required("dst", SyntaxShape::Path, "the place to copy to")
|
||||
.switch("recursive", "copy recursively through subdirectories")
|
||||
.switch(
|
||||
"recursive",
|
||||
"copy recursively through subdirectories",
|
||||
Some('r'),
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -38,7 +42,9 @@ impl PerItemCommand for Cpy {
|
||||
raw_args: &RawCommandArgs,
|
||||
_input: Value,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
call_info.process(&raw_args.shell_manager, cp)?.run()
|
||||
call_info
|
||||
.process(&raw_args.shell_manager, raw_args.ctrl_c.clone(), cp)?
|
||||
.run()
|
||||
}
|
||||
}
|
||||
|
@ -18,8 +18,8 @@ impl WholeStreamCommand for Date {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("date")
|
||||
.switch("utc", "use universal time (UTC)")
|
||||
.switch("local", "use the local time")
|
||||
.switch("utc", "use universal time (UTC)", Some('u'))
|
||||
.switch("local", "use the local time", Some('l'))
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -91,5 +91,5 @@ pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
|
||||
date_out.push_back(value);
|
||||
|
||||
Ok(date_out.to_output_stream())
|
||||
Ok(futures::stream::iter(date_out).to_output_stream())
|
||||
}
|
@ -6,7 +6,9 @@ use nu_protocol::{ReturnSuccess, Signature, UntaggedValue};
|
||||
pub struct Debug;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct DebugArgs {}
|
||||
pub struct DebugArgs {
|
||||
raw: bool,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for Debug {
|
||||
fn name(&self) -> &str {
|
||||
@ -14,7 +16,7 @@ impl WholeStreamCommand for Debug {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("debug")
|
||||
Signature::build("debug").switch("raw", "Prints the raw value representation.", Some('r'))
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -31,13 +33,19 @@ impl WholeStreamCommand for Debug {
|
||||
}
|
||||
|
||||
fn debug_value(
|
||||
_args: DebugArgs,
|
||||
DebugArgs { raw }: DebugArgs,
|
||||
RunnableContext { input, .. }: RunnableContext,
|
||||
) -> Result<impl ToOutputStream, ShellError> {
|
||||
Ok(input
|
||||
.values
|
||||
.map(|v| {
|
||||
ReturnSuccess::value(UntaggedValue::string(format!("{:?}", v)).into_untagged_value())
|
||||
.map(move |v| {
|
||||
if raw {
|
||||
ReturnSuccess::value(
|
||||
UntaggedValue::string(format!("{:#?}", v)).into_untagged_value(),
|
||||
)
|
||||
} else {
|
||||
ReturnSuccess::debug_value(v)
|
||||
}
|
||||
})
|
||||
.to_output_stream())
|
||||
}
|
@ -67,7 +67,8 @@ fn default(
|
||||
} else {
|
||||
result.push_back(ReturnSuccess::value(item));
|
||||
}
|
||||
result
|
||||
|
||||
futures::stream::iter(result)
|
||||
})
|
||||
.flatten();
|
||||
|
376
crates/nu-cli/src/commands/du.rs
Normal file
376
crates/nu-cli/src/commands/du.rs
Normal file
@ -0,0 +1,376 @@
|
||||
extern crate filesize;
|
||||
|
||||
use crate::commands::command::RunnablePerItemContext;
|
||||
use crate::prelude::*;
|
||||
use filesize::file_real_size_fast;
|
||||
use glob::*;
|
||||
use indexmap::map::IndexMap;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{CallInfo, ReturnSuccess, Signature, SyntaxShape, UntaggedValue, Value};
|
||||
use nu_source::Tagged;
|
||||
use std::path::PathBuf;
|
||||
|
||||
const NAME: &str = "du";
|
||||
const GLOB_PARAMS: MatchOptions = MatchOptions {
|
||||
case_sensitive: true,
|
||||
require_literal_separator: true,
|
||||
require_literal_leading_dot: false,
|
||||
};
|
||||
|
||||
pub struct Du;
|
||||
|
||||
#[derive(Deserialize, Clone)]
|
||||
pub struct DuArgs {
|
||||
path: Option<Tagged<PathBuf>>,
|
||||
all: bool,
|
||||
deref: bool,
|
||||
exclude: Option<Tagged<String>>,
|
||||
#[serde(rename = "max-depth")]
|
||||
max_depth: Option<Tagged<u64>>,
|
||||
#[serde(rename = "min-size")]
|
||||
min_size: Option<Tagged<u64>>,
|
||||
}
|
||||
|
||||
impl PerItemCommand for Du {
|
||||
fn name(&self) -> &str {
|
||||
NAME
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(NAME)
|
||||
.optional("path", SyntaxShape::Pattern, "starting directory")
|
||||
.switch(
|
||||
"all",
|
||||
"Output file sizes as well as directory sizes",
|
||||
Some('a'),
|
||||
)
|
||||
.switch(
|
||||
"deref",
|
||||
"Dereference symlinks to their targets for size",
|
||||
Some('r'),
|
||||
)
|
||||
.named(
|
||||
"exclude",
|
||||
SyntaxShape::Pattern,
|
||||
"Exclude these file names",
|
||||
Some('x'),
|
||||
)
|
||||
.named(
|
||||
"max-depth",
|
||||
SyntaxShape::Int,
|
||||
"Directory recursion limit",
|
||||
Some('d'),
|
||||
)
|
||||
.named(
|
||||
"min-size",
|
||||
SyntaxShape::Int,
|
||||
"Exclude files below this size",
|
||||
Some('m'),
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Find disk usage sizes of specified items"
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
call_info: &CallInfo,
|
||||
_registry: &CommandRegistry,
|
||||
raw_args: &RawCommandArgs,
|
||||
_input: Value,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
call_info
|
||||
.process(&raw_args.shell_manager, raw_args.ctrl_c.clone(), du)?
|
||||
.run()
|
||||
}
|
||||
}
|
||||
|
||||
fn du(args: DuArgs, ctx: &RunnablePerItemContext) -> Result<OutputStream, ShellError> {
|
||||
let tag = ctx.name.clone();
|
||||
|
||||
let exclude = args.exclude.map_or(Ok(None), move |x| {
|
||||
Pattern::new(&x.item)
|
||||
.map(Option::Some)
|
||||
.map_err(|e| ShellError::labeled_error(e.msg, "glob error", x.tag.clone()))
|
||||
})?;
|
||||
|
||||
let include_files = args.all;
|
||||
let paths = match args.path {
|
||||
Some(p) => {
|
||||
let p = p.item.to_str().expect("Why isn't this encoded properly?");
|
||||
glob::glob_with(p, GLOB_PARAMS)
|
||||
}
|
||||
None => glob::glob_with("*", GLOB_PARAMS),
|
||||
}
|
||||
.map_err(|e| ShellError::labeled_error(e.msg, "glob error", tag.clone()))?
|
||||
.filter(move |p| {
|
||||
if include_files {
|
||||
true
|
||||
} else {
|
||||
match p {
|
||||
Ok(f) if f.is_dir() => true,
|
||||
Err(e) if e.path().is_dir() => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
})
|
||||
.map(|v| v.map_err(glob_err_into));
|
||||
|
||||
let ctrl_c = ctx.ctrl_c.clone();
|
||||
let all = args.all;
|
||||
let deref = args.deref;
|
||||
let max_depth = args.max_depth.map(|f| f.item);
|
||||
let min_size = args.min_size.map(|f| f.item);
|
||||
|
||||
let params = DirBuilder {
|
||||
tag: tag.clone(),
|
||||
min: min_size,
|
||||
deref,
|
||||
exclude,
|
||||
all,
|
||||
};
|
||||
|
||||
let stream = futures::stream::iter(paths)
|
||||
.interruptible(ctrl_c)
|
||||
.map(move |path| match path {
|
||||
Ok(p) => {
|
||||
if p.is_dir() {
|
||||
Ok(ReturnSuccess::Value(
|
||||
DirInfo::new(p, ¶ms, max_depth).into(),
|
||||
))
|
||||
} else {
|
||||
FileInfo::new(p, deref, tag.clone()).map(|v| ReturnSuccess::Value(v.into()))
|
||||
}
|
||||
}
|
||||
Err(e) => Err(e),
|
||||
});
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
|
||||
struct DirBuilder {
|
||||
tag: Tag,
|
||||
min: Option<u64>,
|
||||
deref: bool,
|
||||
exclude: Option<Pattern>,
|
||||
all: bool,
|
||||
}
|
||||
|
||||
struct DirInfo {
|
||||
dirs: Vec<DirInfo>,
|
||||
files: Vec<FileInfo>,
|
||||
errors: Vec<ShellError>,
|
||||
size: u64,
|
||||
blocks: u64,
|
||||
path: PathBuf,
|
||||
tag: Tag,
|
||||
}
|
||||
|
||||
struct FileInfo {
|
||||
path: PathBuf,
|
||||
size: u64,
|
||||
blocks: Option<u64>,
|
||||
tag: Tag,
|
||||
}
|
||||
|
||||
impl FileInfo {
|
||||
fn new(path: impl Into<PathBuf>, deref: bool, tag: Tag) -> Result<Self, ShellError> {
|
||||
let path = path.into();
|
||||
let m = if deref {
|
||||
std::fs::metadata(&path)
|
||||
} else {
|
||||
std::fs::symlink_metadata(&path)
|
||||
};
|
||||
|
||||
match m {
|
||||
Ok(d) => {
|
||||
let block_size = file_real_size_fast(&path, &d).ok();
|
||||
|
||||
Ok(FileInfo {
|
||||
path,
|
||||
blocks: block_size,
|
||||
size: d.len(),
|
||||
tag,
|
||||
})
|
||||
}
|
||||
Err(e) => Err(e.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DirInfo {
|
||||
fn new(path: impl Into<PathBuf>, params: &DirBuilder, depth: Option<u64>) -> Self {
|
||||
let path = path.into();
|
||||
|
||||
let mut s = Self {
|
||||
dirs: Vec::new(),
|
||||
errors: Vec::new(),
|
||||
files: Vec::new(),
|
||||
size: 0,
|
||||
blocks: 0,
|
||||
tag: params.tag.clone(),
|
||||
path,
|
||||
};
|
||||
|
||||
match std::fs::read_dir(&s.path) {
|
||||
Ok(d) => {
|
||||
for f in d {
|
||||
match f {
|
||||
Ok(i) => match i.file_type() {
|
||||
Ok(t) if t.is_dir() => s = s.add_dir(i.path(), depth, ¶ms),
|
||||
Ok(_t) => s = s.add_file(i.path(), ¶ms),
|
||||
Err(e) => s = s.add_error(e.into()),
|
||||
},
|
||||
Err(e) => s = s.add_error(e.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => s = s.add_error(e.into()),
|
||||
}
|
||||
s
|
||||
}
|
||||
|
||||
fn add_dir(
|
||||
mut self,
|
||||
path: impl Into<PathBuf>,
|
||||
mut depth: Option<u64>,
|
||||
params: &DirBuilder,
|
||||
) -> Self {
|
||||
if let Some(current) = depth {
|
||||
if let Some(new) = current.checked_sub(1) {
|
||||
depth = Some(new);
|
||||
} else {
|
||||
return self;
|
||||
}
|
||||
}
|
||||
|
||||
let d = DirInfo::new(path, ¶ms, depth);
|
||||
self.size += d.size;
|
||||
self.blocks += d.blocks;
|
||||
self.dirs.push(d);
|
||||
self
|
||||
}
|
||||
|
||||
fn add_file(mut self, f: impl Into<PathBuf>, params: &DirBuilder) -> Self {
|
||||
let f = f.into();
|
||||
let include = params
|
||||
.exclude
|
||||
.as_ref()
|
||||
.map_or(true, |x| !x.matches_path(&f));
|
||||
if include {
|
||||
match FileInfo::new(f, params.deref, self.tag.clone()) {
|
||||
Ok(file) => {
|
||||
let inc = params.min.map_or(true, |s| file.size >= s);
|
||||
if inc {
|
||||
self.size += file.size;
|
||||
self.blocks += file.blocks.unwrap_or(0);
|
||||
if params.all {
|
||||
self.files.push(file);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => self = self.add_error(e),
|
||||
}
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
fn add_error(mut self, e: ShellError) -> Self {
|
||||
self.errors.push(e);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
fn glob_err_into(e: GlobError) -> ShellError {
|
||||
let e = e.into_error();
|
||||
ShellError::from(e)
|
||||
}
|
||||
|
||||
fn value_from_vec<V>(vec: Vec<V>, tag: &Tag) -> Value
|
||||
where
|
||||
V: Into<Value>,
|
||||
{
|
||||
if vec.is_empty() {
|
||||
UntaggedValue::nothing()
|
||||
} else {
|
||||
let values = vec.into_iter().map(Into::into).collect::<Vec<Value>>();
|
||||
UntaggedValue::Table(values)
|
||||
}
|
||||
.retag(tag)
|
||||
}
|
||||
|
||||
impl From<DirInfo> for Value {
|
||||
fn from(d: DirInfo) -> Self {
|
||||
let mut r: IndexMap<String, Value> = IndexMap::new();
|
||||
|
||||
r.insert(
|
||||
"path".to_string(),
|
||||
UntaggedValue::path(d.path).retag(&d.tag),
|
||||
);
|
||||
|
||||
r.insert(
|
||||
"apparent".to_string(),
|
||||
UntaggedValue::bytes(d.size).retag(&d.tag),
|
||||
);
|
||||
|
||||
r.insert(
|
||||
"physical".to_string(),
|
||||
UntaggedValue::bytes(d.blocks).retag(&d.tag),
|
||||
);
|
||||
|
||||
r.insert("directories".to_string(), value_from_vec(d.dirs, &d.tag));
|
||||
|
||||
r.insert("files".to_string(), value_from_vec(d.files, &d.tag));
|
||||
|
||||
if !d.errors.is_empty() {
|
||||
let v = UntaggedValue::Table(
|
||||
d.errors
|
||||
.into_iter()
|
||||
.map(move |e| UntaggedValue::Error(e).into_untagged_value())
|
||||
.collect::<Vec<Value>>(),
|
||||
)
|
||||
.retag(&d.tag);
|
||||
|
||||
r.insert("errors".to_string(), v);
|
||||
}
|
||||
|
||||
Value {
|
||||
value: UntaggedValue::row(r),
|
||||
tag: d.tag,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FileInfo> for Value {
|
||||
fn from(f: FileInfo) -> Self {
|
||||
let mut r: IndexMap<String, Value> = IndexMap::new();
|
||||
|
||||
r.insert(
|
||||
"path".to_string(),
|
||||
UntaggedValue::path(f.path).retag(&f.tag),
|
||||
);
|
||||
|
||||
r.insert(
|
||||
"apparent".to_string(),
|
||||
UntaggedValue::bytes(f.size).retag(&f.tag),
|
||||
);
|
||||
|
||||
let b = f
|
||||
.blocks
|
||||
.map(UntaggedValue::bytes)
|
||||
.unwrap_or_else(UntaggedValue::nothing)
|
||||
.retag(&f.tag);
|
||||
|
||||
r.insert("physical".to_string(), b);
|
||||
|
||||
r.insert(
|
||||
"directories".to_string(),
|
||||
UntaggedValue::nothing().retag(&f.tag),
|
||||
);
|
||||
|
||||
r.insert("files".to_string(), UntaggedValue::nothing().retag(&f.tag));
|
||||
|
||||
UntaggedValue::row(r).retag(&f.tag)
|
||||
}
|
||||
}
|
@ -60,7 +60,8 @@ fn run(
|
||||
}
|
||||
}
|
||||
|
||||
let stream = VecDeque::from(output);
|
||||
// TODO: This whole block can probably be replaced with `.map()`
|
||||
let stream = futures::stream::iter(output);
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
@ -38,15 +38,17 @@ impl PerItemCommand for Edit {
|
||||
value: Value,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let value_tag = value.tag();
|
||||
let field = call_info.args.expect_nth(0)?.as_column_path().unwrap();
|
||||
let field = call_info.args.expect_nth(0)?.as_column_path()?;
|
||||
let replacement = call_info.args.expect_nth(1)?.tagged_unknown();
|
||||
|
||||
let stream = match value {
|
||||
obj @ Value {
|
||||
obj
|
||||
@
|
||||
Value {
|
||||
value: UntaggedValue::Row(_),
|
||||
..
|
||||
} => match obj.replace_data_at_column_path(&field, replacement.item.clone()) {
|
||||
Some(v) => VecDeque::from(vec![Ok(ReturnSuccess::Value(v))]),
|
||||
Some(v) => futures::stream::iter(vec![Ok(ReturnSuccess::Value(v))]),
|
||||
None => {
|
||||
return Err(ShellError::labeled_error(
|
||||
"edit could not find place to insert column",
|
@ -6,7 +6,6 @@ use nu_errors::ShellError;
|
||||
use nu_protocol::{
|
||||
CallInfo, CommandAction, Primitive, ReturnSuccess, Signature, SyntaxShape, UntaggedValue, Value,
|
||||
};
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub struct Enter;
|
||||
|
||||
@ -46,23 +45,26 @@ impl PerItemCommand for Enter {
|
||||
let location_clone = location_string.clone();
|
||||
let tag_clone = tag.clone();
|
||||
|
||||
if location.starts_with("help") {
|
||||
if location_string.starts_with("help") {
|
||||
let spec = location_string.split(':').collect::<Vec<&str>>();
|
||||
|
||||
let (_, command) = (spec[0], spec[1]);
|
||||
if spec.len() == 2 {
|
||||
let (_, command) = (spec[0], spec[1]);
|
||||
|
||||
if registry.has(command) {
|
||||
Ok(vec![Ok(ReturnSuccess::Action(CommandAction::EnterHelpShell(
|
||||
UntaggedValue::string(command).into_value(Tag::unknown()),
|
||||
)))]
|
||||
.into())
|
||||
} else {
|
||||
Ok(vec![Ok(ReturnSuccess::Action(CommandAction::EnterHelpShell(
|
||||
UntaggedValue::nothing().into_value(Tag::unknown()),
|
||||
)))]
|
||||
.into())
|
||||
if registry.has(command) {
|
||||
return Ok(vec![Ok(ReturnSuccess::Action(
|
||||
CommandAction::EnterHelpShell(
|
||||
UntaggedValue::string(command).into_value(Tag::unknown()),
|
||||
),
|
||||
))]
|
||||
.into());
|
||||
}
|
||||
}
|
||||
} else if PathBuf::from(location).is_dir() {
|
||||
Ok(vec![Ok(ReturnSuccess::Action(CommandAction::EnterHelpShell(
|
||||
UntaggedValue::nothing().into_value(Tag::unknown()),
|
||||
)))]
|
||||
.into())
|
||||
} else if location.is_dir() {
|
||||
Ok(vec![Ok(ReturnSuccess::Action(CommandAction::EnterShell(
|
||||
location_clone,
|
||||
)))]
|
72
crates/nu-cli/src/commands/evaluate_by.rs
Normal file
72
crates/nu-cli/src/commands/evaluate_by.rs
Normal file
@ -0,0 +1,72 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use crate::utils::data_processing::{evaluate, fetch};
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{ReturnSuccess, Signature, SyntaxShape, UntaggedValue, Value};
|
||||
use nu_source::{SpannedItem, Tagged};
|
||||
use nu_value_ext::ValueExt;
|
||||
|
||||
pub struct EvaluateBy;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct EvaluateByArgs {
|
||||
evaluate_with: Option<Tagged<String>>,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for EvaluateBy {
|
||||
fn name(&self) -> &str {
|
||||
"evaluate-by"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("evaluate-by").named(
|
||||
"evaluate_with",
|
||||
SyntaxShape::String,
|
||||
"the name of the column to evaluate by",
|
||||
Some('w'),
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Creates a new table with the data from the tables rows evaluated by the column given."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, evaluate_by)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn evaluate_by(
|
||||
EvaluateByArgs { evaluate_with }: EvaluateByArgs,
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
if values.is_empty() {
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Expected table from pipeline",
|
||||
"requires a table input",
|
||||
name
|
||||
))
|
||||
} else {
|
||||
|
||||
let evaluate_with = if let Some(evaluator) = evaluate_with {
|
||||
Some(evaluator.item().clone())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
match evaluate(&values[0], evaluate_with, name) {
|
||||
Ok(evaluated) => yield ReturnSuccess::value(evaluated),
|
||||
Err(err) => yield Err(err)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
@ -12,7 +12,7 @@ impl WholeStreamCommand for Exit {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("exit").switch("now", "exit out of the shell immediately")
|
||||
Signature::build("exit").switch("now", "exit out of the shell immediately", Some('n'))
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
@ -9,7 +9,7 @@ pub struct First;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct FirstArgs {
|
||||
rows: Option<Tagged<u64>>,
|
||||
rows: Option<Tagged<usize>>,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for First {
|
157
crates/nu-cli/src/commands/format.rs
Normal file
157
crates/nu-cli/src/commands/format.rs
Normal file
@ -0,0 +1,157 @@
|
||||
use crate::commands::PerItemCommand;
|
||||
use crate::context::CommandRegistry;
|
||||
use crate::prelude::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{
|
||||
CallInfo, ColumnPath, ReturnSuccess, Signature, SyntaxShape, UntaggedValue, Value,
|
||||
};
|
||||
use nu_source::Tagged;
|
||||
use nu_value_ext::{as_column_path, get_data_by_column_path};
|
||||
use std::borrow::Borrow;
|
||||
|
||||
use nom::{
|
||||
bytes::complete::{tag, take_while},
|
||||
IResult,
|
||||
};
|
||||
|
||||
pub struct Format;
|
||||
|
||||
impl PerItemCommand for Format {
|
||||
fn name(&self) -> &str {
|
||||
"format"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("format").required(
|
||||
"pattern",
|
||||
SyntaxShape::Any,
|
||||
"the pattern to output. Eg) \"{foo}: {bar}\"",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Format columns into a string using a simple pattern."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
call_info: &CallInfo,
|
||||
_registry: &CommandRegistry,
|
||||
_raw_args: &RawCommandArgs,
|
||||
value: Value,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
//let value_tag = value.tag();
|
||||
let pattern = call_info.args.expect_nth(0)?;
|
||||
let pattern_tag = pattern.tag.clone();
|
||||
let pattern = pattern.as_string()?;
|
||||
|
||||
let format_pattern = format(&pattern).map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not create format pattern",
|
||||
"could not create format pattern",
|
||||
&pattern_tag,
|
||||
)
|
||||
})?;
|
||||
let commands = format_pattern.1;
|
||||
|
||||
let output = match value {
|
||||
value
|
||||
@
|
||||
Value {
|
||||
value: UntaggedValue::Row(_),
|
||||
..
|
||||
} => {
|
||||
let mut output = String::new();
|
||||
|
||||
for command in &commands {
|
||||
match command {
|
||||
FormatCommand::Text(s) => {
|
||||
output.push_str(s);
|
||||
}
|
||||
FormatCommand::Column(c) => {
|
||||
let key = to_column_path(&c, &pattern_tag)?;
|
||||
|
||||
let fetcher = get_data_by_column_path(
|
||||
&value,
|
||||
&key,
|
||||
Box::new(move |(_, _, error)| error),
|
||||
);
|
||||
|
||||
if let Ok(c) = fetcher {
|
||||
output
|
||||
.push_str(&value::format_leaf(c.borrow()).plain_string(100_000))
|
||||
}
|
||||
// That column doesn't match, so don't emit anything
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
_ => String::new(),
|
||||
};
|
||||
|
||||
Ok(futures::stream::iter(vec![ReturnSuccess::value(
|
||||
UntaggedValue::string(output).into_untagged_value(),
|
||||
)])
|
||||
.to_output_stream())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum FormatCommand {
|
||||
Text(String),
|
||||
Column(String),
|
||||
}
|
||||
|
||||
fn format(input: &str) -> IResult<&str, Vec<FormatCommand>> {
|
||||
let mut output = vec![];
|
||||
|
||||
let mut loop_input = input;
|
||||
loop {
|
||||
let (input, before) = take_while(|c| c != '{')(loop_input)?;
|
||||
if !before.is_empty() {
|
||||
output.push(FormatCommand::Text(before.to_string()));
|
||||
}
|
||||
if input != "" {
|
||||
// Look for column as we're now at one
|
||||
let (input, _) = tag("{")(input)?;
|
||||
let (input, column) = take_while(|c| c != '}')(input)?;
|
||||
let (input, _) = tag("}")(input)?;
|
||||
|
||||
output.push(FormatCommand::Column(column.to_string()));
|
||||
loop_input = input;
|
||||
} else {
|
||||
loop_input = input;
|
||||
}
|
||||
if loop_input == "" {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok((loop_input, output))
|
||||
}
|
||||
|
||||
fn to_column_path(
|
||||
path_members: &str,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Result<Tagged<ColumnPath>, ShellError> {
|
||||
let tag = tag.into();
|
||||
|
||||
as_column_path(
|
||||
&UntaggedValue::Table(
|
||||
path_members
|
||||
.split('.')
|
||||
.map(|x| {
|
||||
let member = match x.parse::<u64>() {
|
||||
Ok(v) => UntaggedValue::int(v),
|
||||
Err(_) => UntaggedValue::string(x),
|
||||
};
|
||||
|
||||
member.into_value(&tag)
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
.into_value(&tag),
|
||||
)
|
||||
}
|
@ -102,7 +102,7 @@ fn convert_bson_value_to_nu_value(v: &Bson, tag: impl Into<Tag>) -> Result<Value
|
||||
);
|
||||
collected.insert_value(
|
||||
"$scope".to_string(),
|
||||
convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag.clone())?,
|
||||
convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag)?,
|
||||
);
|
||||
collected.into_value()
|
||||
}
|
||||
@ -205,32 +205,18 @@ fn from_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
let bytes = input.collect_binary(tag.clone()).await?;
|
||||
|
||||
for value in values {
|
||||
let value_tag = &value.tag;
|
||||
match value.value {
|
||||
UntaggedValue::Primitive(Primitive::Binary(vb)) =>
|
||||
match from_bson_bytes_to_value(vb, tag.clone()) {
|
||||
Ok(x) => yield ReturnSuccess::value(x),
|
||||
Err(_) => {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as BSON",
|
||||
"input cannot be parsed as BSON",
|
||||
tag.clone(),
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
))
|
||||
}
|
||||
}
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
match from_bson_bytes_to_value(bytes.item, tag.clone()) {
|
||||
Ok(x) => yield ReturnSuccess::value(x),
|
||||
Err(_) => {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as BSON",
|
||||
"input cannot be parsed as BSON",
|
||||
tag.clone(),
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
)),
|
||||
|
||||
bytes.tag,
|
||||
))
|
||||
}
|
||||
}
|
||||
};
|
@ -23,8 +23,13 @@ impl WholeStreamCommand for FromCSV {
|
||||
"separator",
|
||||
SyntaxShape::String,
|
||||
"a character to separate columns, defaults to ','",
|
||||
Some('s'),
|
||||
)
|
||||
.switch(
|
||||
"headerless",
|
||||
"don't treat the first row as column names",
|
||||
None,
|
||||
)
|
||||
.switch("headerless", "don't treat the first row as column names")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
169
crates/nu-cli/src/commands/from_delimited_data.rs
Normal file
169
crates/nu-cli/src/commands/from_delimited_data.rs
Normal file
@ -0,0 +1,169 @@
|
||||
use crate::prelude::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_parser::hir::syntax_shape::{ExpandContext, SignatureRegistry};
|
||||
use nu_parser::utils::{parse_line_with_separator as parse, LineSeparatedShape};
|
||||
use nu_parser::TokensIterator;
|
||||
use nu_protocol::{ReturnSuccess, Signature, TaggedDictBuilder, UntaggedValue, Value};
|
||||
use nu_source::nom_input;
|
||||
|
||||
use derive_new::new;
|
||||
|
||||
pub fn from_delimited_data(
|
||||
headerless: bool,
|
||||
sep: char,
|
||||
format_name: &'static str,
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let name_tag = name;
|
||||
|
||||
let stream = async_stream! {
|
||||
let concat_string = input.collect_string(name_tag.clone()).await?;
|
||||
|
||||
match from_delimited_string_to_value(concat_string.item, headerless, sep, name_tag.clone()) {
|
||||
Ok(rows) => {
|
||||
for row in rows {
|
||||
match row {
|
||||
Value { value: UntaggedValue::Table(list), .. } => {
|
||||
for l in list {
|
||||
yield ReturnSuccess::value(l);
|
||||
}
|
||||
}
|
||||
x => yield ReturnSuccess::value(x),
|
||||
}
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
let line_one = format!("Could not parse as {}", format_name);
|
||||
let line_two = format!("input cannot be parsed as {}", format_name);
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
line_one,
|
||||
line_two,
|
||||
name_tag.clone(),
|
||||
"value originates from here",
|
||||
concat_string.tag,
|
||||
))
|
||||
} ,
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, new)]
|
||||
pub struct EmptyRegistry {
|
||||
#[new(default)]
|
||||
signatures: indexmap::IndexMap<String, Signature>,
|
||||
}
|
||||
|
||||
impl EmptyRegistry {}
|
||||
|
||||
impl SignatureRegistry for EmptyRegistry {
|
||||
fn has(&self, _name: &str) -> bool {
|
||||
false
|
||||
}
|
||||
fn get(&self, _name: &str) -> Option<Signature> {
|
||||
None
|
||||
}
|
||||
fn clone_box(&self) -> Box<dyn SignatureRegistry> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
fn from_delimited_string_to_value(
|
||||
s: String,
|
||||
headerless: bool,
|
||||
sep: char,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Result<Vec<Value>, ShellError> {
|
||||
let tag = tag.into();
|
||||
|
||||
let mut entries = s.lines();
|
||||
|
||||
let mut fields = vec![];
|
||||
let mut out = vec![];
|
||||
|
||||
if let Some(first_entry) = entries.next() {
|
||||
let tokens = match parse(&sep.to_string(), nom_input(first_entry)) {
|
||||
Ok((_, tokens)) => tokens,
|
||||
Err(err) => return Err(ShellError::parse_error(err)),
|
||||
};
|
||||
|
||||
let tokens_span = tokens.span;
|
||||
let source: nu_source::Text = tokens_span.slice(&first_entry).into();
|
||||
|
||||
if !headerless {
|
||||
fields = tokens
|
||||
.item
|
||||
.iter()
|
||||
.filter(|token| !token.is_separator())
|
||||
.map(|field| field.source(&source).to_string())
|
||||
.collect::<Vec<_>>();
|
||||
}
|
||||
|
||||
let registry = Box::new(EmptyRegistry::new());
|
||||
let ctx = ExpandContext::new(registry, &source, None);
|
||||
|
||||
let mut iterator = TokensIterator::new(&tokens.item, ctx, tokens_span);
|
||||
let (results, tokens_identified) = iterator.expand(LineSeparatedShape);
|
||||
let results = results?;
|
||||
|
||||
let mut row = TaggedDictBuilder::new(&tag);
|
||||
|
||||
if headerless {
|
||||
let fallback_columns = (1..=tokens_identified)
|
||||
.map(|i| format!("Column{}", i))
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
for (idx, field) in results.into_iter().enumerate() {
|
||||
let key = if headerless {
|
||||
&fallback_columns[idx]
|
||||
} else {
|
||||
&fields[idx]
|
||||
};
|
||||
|
||||
row.insert_value(key, field.into_value(&tag));
|
||||
}
|
||||
|
||||
out.push(row.into_value())
|
||||
}
|
||||
}
|
||||
|
||||
for entry in entries {
|
||||
let tokens = match parse(&sep.to_string(), nom_input(entry)) {
|
||||
Ok((_, tokens)) => tokens,
|
||||
Err(err) => return Err(ShellError::parse_error(err)),
|
||||
};
|
||||
let tokens_span = tokens.span;
|
||||
|
||||
let source: nu_source::Text = tokens_span.slice(&entry).into();
|
||||
let registry = Box::new(EmptyRegistry::new());
|
||||
let ctx = ExpandContext::new(registry, &source, None);
|
||||
|
||||
let mut iterator = TokensIterator::new(&tokens.item, ctx, tokens_span);
|
||||
let (results, tokens_identified) = iterator.expand(LineSeparatedShape);
|
||||
let results = results?;
|
||||
|
||||
let mut row = TaggedDictBuilder::new(&tag);
|
||||
|
||||
let fallback_columns = (1..=tokens_identified)
|
||||
.map(|i| format!("Column{}", i))
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
for (idx, field) in results.into_iter().enumerate() {
|
||||
let key = if headerless {
|
||||
&fallback_columns[idx]
|
||||
} else {
|
||||
match fields.get(idx) {
|
||||
Some(key) => key,
|
||||
None => &fallback_columns[idx],
|
||||
}
|
||||
};
|
||||
|
||||
row.insert_value(key, field.into_value(&tag));
|
||||
}
|
||||
|
||||
out.push(row.into_value())
|
||||
}
|
||||
|
||||
Ok(out)
|
||||
}
|
240
crates/nu-cli/src/commands/from_ics.rs
Normal file
240
crates/nu-cli/src/commands/from_ics.rs
Normal file
@ -0,0 +1,240 @@
|
||||
extern crate ical;
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use ical::parser::ical::component::*;
|
||||
use ical::property::Property;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{Primitive, ReturnSuccess, Signature, TaggedDictBuilder, UntaggedValue, Value};
|
||||
use std::io::BufReader;
|
||||
|
||||
pub struct FromIcs;
|
||||
|
||||
impl WholeStreamCommand for FromIcs {
|
||||
fn name(&self) -> &str {
|
||||
"from-ics"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("from-ics")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Parse text as .ics and create table."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
from_ics(args, registry)
|
||||
}
|
||||
}
|
||||
|
||||
fn from_ics(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let tag = args.name_tag();
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream! {
|
||||
let input_string = input.collect_string(tag.clone()).await?.item;
|
||||
let input_bytes = input_string.as_bytes();
|
||||
let buf_reader = BufReader::new(input_bytes);
|
||||
let parser = ical::IcalParser::new(buf_reader);
|
||||
|
||||
for calendar in parser {
|
||||
match calendar {
|
||||
Ok(c) => yield ReturnSuccess::value(calendar_to_value(c, tag.clone())),
|
||||
Err(_) => yield Err(ShellError::labeled_error(
|
||||
"Could not parse as .ics",
|
||||
"input cannot be parsed as .ics",
|
||||
tag.clone()
|
||||
)),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
|
||||
fn calendar_to_value(calendar: IcalCalendar, tag: Tag) -> Value {
|
||||
let mut row = TaggedDictBuilder::new(tag.clone());
|
||||
|
||||
row.insert_untagged(
|
||||
"properties",
|
||||
properties_to_value(calendar.properties, tag.clone()),
|
||||
);
|
||||
row.insert_untagged("events", events_to_value(calendar.events, tag.clone()));
|
||||
row.insert_untagged("alarms", alarms_to_value(calendar.alarms, tag.clone()));
|
||||
row.insert_untagged("to-Dos", todos_to_value(calendar.todos, tag.clone()));
|
||||
row.insert_untagged(
|
||||
"journals",
|
||||
journals_to_value(calendar.journals, tag.clone()),
|
||||
);
|
||||
row.insert_untagged(
|
||||
"free-busys",
|
||||
free_busys_to_value(calendar.free_busys, tag.clone()),
|
||||
);
|
||||
row.insert_untagged("timezones", timezones_to_value(calendar.timezones, tag));
|
||||
|
||||
row.into_value()
|
||||
}
|
||||
|
||||
fn events_to_value(events: Vec<IcalEvent>, tag: Tag) -> UntaggedValue {
|
||||
UntaggedValue::table(
|
||||
&events
|
||||
.into_iter()
|
||||
.map(|event| {
|
||||
let mut row = TaggedDictBuilder::new(tag.clone());
|
||||
row.insert_untagged(
|
||||
"properties",
|
||||
properties_to_value(event.properties, tag.clone()),
|
||||
);
|
||||
row.insert_untagged("alarms", alarms_to_value(event.alarms, tag.clone()));
|
||||
row.into_value()
|
||||
})
|
||||
.collect::<Vec<Value>>(),
|
||||
)
|
||||
}
|
||||
|
||||
fn alarms_to_value(alarms: Vec<IcalAlarm>, tag: Tag) -> UntaggedValue {
|
||||
UntaggedValue::table(
|
||||
&alarms
|
||||
.into_iter()
|
||||
.map(|alarm| {
|
||||
let mut row = TaggedDictBuilder::new(tag.clone());
|
||||
row.insert_untagged(
|
||||
"properties",
|
||||
properties_to_value(alarm.properties, tag.clone()),
|
||||
);
|
||||
row.into_value()
|
||||
})
|
||||
.collect::<Vec<Value>>(),
|
||||
)
|
||||
}
|
||||
|
||||
fn todos_to_value(todos: Vec<IcalTodo>, tag: Tag) -> UntaggedValue {
|
||||
UntaggedValue::table(
|
||||
&todos
|
||||
.into_iter()
|
||||
.map(|todo| {
|
||||
let mut row = TaggedDictBuilder::new(tag.clone());
|
||||
row.insert_untagged(
|
||||
"properties",
|
||||
properties_to_value(todo.properties, tag.clone()),
|
||||
);
|
||||
row.insert_untagged("alarms", alarms_to_value(todo.alarms, tag.clone()));
|
||||
row.into_value()
|
||||
})
|
||||
.collect::<Vec<Value>>(),
|
||||
)
|
||||
}
|
||||
|
||||
fn journals_to_value(journals: Vec<IcalJournal>, tag: Tag) -> UntaggedValue {
|
||||
UntaggedValue::table(
|
||||
&journals
|
||||
.into_iter()
|
||||
.map(|journal| {
|
||||
let mut row = TaggedDictBuilder::new(tag.clone());
|
||||
row.insert_untagged(
|
||||
"properties",
|
||||
properties_to_value(journal.properties, tag.clone()),
|
||||
);
|
||||
row.into_value()
|
||||
})
|
||||
.collect::<Vec<Value>>(),
|
||||
)
|
||||
}
|
||||
|
||||
fn free_busys_to_value(free_busys: Vec<IcalFreeBusy>, tag: Tag) -> UntaggedValue {
|
||||
UntaggedValue::table(
|
||||
&free_busys
|
||||
.into_iter()
|
||||
.map(|free_busy| {
|
||||
let mut row = TaggedDictBuilder::new(tag.clone());
|
||||
row.insert_untagged(
|
||||
"properties",
|
||||
properties_to_value(free_busy.properties, tag.clone()),
|
||||
);
|
||||
row.into_value()
|
||||
})
|
||||
.collect::<Vec<Value>>(),
|
||||
)
|
||||
}
|
||||
|
||||
fn timezones_to_value(timezones: Vec<IcalTimeZone>, tag: Tag) -> UntaggedValue {
|
||||
UntaggedValue::table(
|
||||
&timezones
|
||||
.into_iter()
|
||||
.map(|timezone| {
|
||||
let mut row = TaggedDictBuilder::new(tag.clone());
|
||||
row.insert_untagged(
|
||||
"properties",
|
||||
properties_to_value(timezone.properties, tag.clone()),
|
||||
);
|
||||
row.insert_untagged(
|
||||
"transitions",
|
||||
timezone_transitions_to_value(timezone.transitions, tag.clone()),
|
||||
);
|
||||
row.into_value()
|
||||
})
|
||||
.collect::<Vec<Value>>(),
|
||||
)
|
||||
}
|
||||
|
||||
fn timezone_transitions_to_value(
|
||||
transitions: Vec<IcalTimeZoneTransition>,
|
||||
tag: Tag,
|
||||
) -> UntaggedValue {
|
||||
UntaggedValue::table(
|
||||
&transitions
|
||||
.into_iter()
|
||||
.map(|transition| {
|
||||
let mut row = TaggedDictBuilder::new(tag.clone());
|
||||
row.insert_untagged(
|
||||
"properties",
|
||||
properties_to_value(transition.properties, tag.clone()),
|
||||
);
|
||||
row.into_value()
|
||||
})
|
||||
.collect::<Vec<Value>>(),
|
||||
)
|
||||
}
|
||||
|
||||
fn properties_to_value(properties: Vec<Property>, tag: Tag) -> UntaggedValue {
|
||||
UntaggedValue::table(
|
||||
&properties
|
||||
.into_iter()
|
||||
.map(|prop| {
|
||||
let mut row = TaggedDictBuilder::new(tag.clone());
|
||||
|
||||
let name = UntaggedValue::string(prop.name);
|
||||
let value = match prop.value {
|
||||
Some(val) => UntaggedValue::string(val),
|
||||
None => UntaggedValue::Primitive(Primitive::Nothing),
|
||||
};
|
||||
let params = match prop.params {
|
||||
Some(param_list) => params_to_value(param_list, tag.clone()).into(),
|
||||
None => UntaggedValue::Primitive(Primitive::Nothing),
|
||||
};
|
||||
|
||||
row.insert_untagged("name", name);
|
||||
row.insert_untagged("value", value);
|
||||
row.insert_untagged("params", params);
|
||||
row.into_value()
|
||||
})
|
||||
.collect::<Vec<Value>>(),
|
||||
)
|
||||
}
|
||||
|
||||
fn params_to_value(params: Vec<(String, Vec<String>)>, tag: Tag) -> Value {
|
||||
let mut row = TaggedDictBuilder::new(tag);
|
||||
|
||||
for (param_name, param_values) in params {
|
||||
let values: Vec<Value> = param_values.into_iter().map(|val| val.into()).collect();
|
||||
let values = UntaggedValue::table(&values);
|
||||
row.insert_untagged(param_name, values);
|
||||
}
|
||||
|
||||
row.into_value()
|
||||
}
|
@ -66,32 +66,12 @@ pub fn from_ini_string_to_value(
|
||||
fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let tag = args.name_tag();
|
||||
let span = tag.span;
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
let concat_string = input.collect_string(tag.clone()).await?;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
let mut latest_tag: Option<Tag> = None;
|
||||
|
||||
for value in values {
|
||||
latest_tag = Some(value.tag.clone());
|
||||
let value_span = value.tag.span;
|
||||
if let Ok(s) = value.as_string() {
|
||||
concat_string.push_str(&s);
|
||||
} else {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
span,
|
||||
"value originates from here",
|
||||
value_span,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
match from_ini_string_to_value(concat_string, tag.clone()) {
|
||||
match from_ini_string_to_value(concat_string.item, tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Value { value: UntaggedValue::Table(list), .. } => {
|
||||
for l in list {
|
||||
@ -100,15 +80,15 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
}
|
||||
x => yield ReturnSuccess::value(x),
|
||||
},
|
||||
Err(_) => if let Some(last_tag) = latest_tag {
|
||||
Err(_) => {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as INI",
|
||||
"input cannot be parsed as INI",
|
||||
&tag,
|
||||
"value originates from here",
|
||||
last_tag,
|
||||
concat_string.tag,
|
||||
))
|
||||
} ,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -16,7 +16,11 @@ impl WholeStreamCommand for FromJSON {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("from-json").switch("objects", "treat each line as a separate value")
|
||||
Signature::build("from-json").switch(
|
||||
"objects",
|
||||
"treat each line as a separate value",
|
||||
Some('o'),
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -70,35 +74,13 @@ fn from_json(
|
||||
FromJSONArgs { objects }: FromJSONArgs,
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let name_span = name.span;
|
||||
let name_tag = name;
|
||||
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
let mut latest_tag: Option<Tag> = None;
|
||||
|
||||
for value in values {
|
||||
latest_tag = Some(value.tag.clone());
|
||||
let value_span = value.tag.span;
|
||||
|
||||
if let Ok(s) = value.as_string() {
|
||||
concat_string.push_str(&s);
|
||||
} else {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
name_span,
|
||||
"value originates from here",
|
||||
value_span,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
let concat_string = input.collect_string(name_tag.clone()).await?;
|
||||
|
||||
if objects {
|
||||
for json_str in concat_string.lines() {
|
||||
for json_str in concat_string.item.lines() {
|
||||
if json_str.is_empty() {
|
||||
continue;
|
||||
}
|
||||
@ -106,20 +88,22 @@ fn from_json(
|
||||
match from_json_string_to_value(json_str.to_string(), &name_tag) {
|
||||
Ok(x) =>
|
||||
yield ReturnSuccess::value(x),
|
||||
Err(_) => {
|
||||
if let Some(ref last_tag) = latest_tag {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could nnot parse as JSON",
|
||||
"input cannot be parsed as JSON",
|
||||
&name_tag,
|
||||
"value originates from here",
|
||||
last_tag))
|
||||
}
|
||||
Err(e) => {
|
||||
let mut message = "Could not parse as JSON (".to_string();
|
||||
message.push_str(&e.to_string());
|
||||
message.push_str(")");
|
||||
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
message,
|
||||
"input cannot be parsed as JSON",
|
||||
&name_tag,
|
||||
"value originates from here",
|
||||
concat_string.tag.clone()))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
match from_json_string_to_value(concat_string, name_tag.clone()) {
|
||||
match from_json_string_to_value(concat_string.item, name_tag.clone()) {
|
||||
Ok(x) =>
|
||||
match x {
|
||||
Value { value: UntaggedValue::Table(list), .. } => {
|
||||
@ -129,15 +113,17 @@ fn from_json(
|
||||
}
|
||||
x => yield ReturnSuccess::value(x),
|
||||
}
|
||||
Err(_) => {
|
||||
if let Some(last_tag) = latest_tag {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as JSON",
|
||||
"input cannot be parsed as JSON",
|
||||
name_tag,
|
||||
"value originates from here",
|
||||
last_tag))
|
||||
}
|
||||
Err(e) => {
|
||||
let mut message = "Could not parse as JSON (".to_string();
|
||||
message.push_str(&e.to_string());
|
||||
message.push_str(")");
|
||||
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
message,
|
||||
"input cannot be parsed as JSON",
|
||||
name_tag,
|
||||
"value originates from here",
|
||||
concat_string.tag))
|
||||
}
|
||||
}
|
||||
}
|
98
crates/nu-cli/src/commands/from_ods.rs
Normal file
98
crates/nu-cli/src/commands/from_ods.rs
Normal file
@ -0,0 +1,98 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use crate::TaggedListBuilder;
|
||||
use calamine::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{ReturnSuccess, Signature, TaggedDictBuilder, UntaggedValue};
|
||||
use std::io::Cursor;
|
||||
|
||||
pub struct FromODS;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct FromODSArgs {
|
||||
headerless: bool,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for FromODS {
|
||||
fn name(&self) -> &str {
|
||||
"from-ods"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("from-ods").switch(
|
||||
"headerless",
|
||||
"don't treat the first row as column names",
|
||||
None,
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Parse OpenDocument Spreadsheet(.ods) data and create table."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, from_ods)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
fn from_ods(
|
||||
FromODSArgs {
|
||||
headerless: _headerless,
|
||||
}: FromODSArgs,
|
||||
runnable_context: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let input = runnable_context.input;
|
||||
let tag = runnable_context.name;
|
||||
|
||||
let stream = async_stream! {
|
||||
let bytes = input.collect_binary(tag.clone()).await?;
|
||||
let mut buf: Cursor<Vec<u8>> = Cursor::new(bytes.item);
|
||||
let mut ods = Ods::<_>::new(buf).map_err(|_| ShellError::labeled_error(
|
||||
"Could not load ods file",
|
||||
"could not load ods file",
|
||||
&tag))?;
|
||||
|
||||
let mut dict = TaggedDictBuilder::new(&tag);
|
||||
|
||||
let sheet_names = ods.sheet_names().to_owned();
|
||||
|
||||
for sheet_name in &sheet_names {
|
||||
let mut sheet_output = TaggedListBuilder::new(&tag);
|
||||
|
||||
if let Some(Ok(current_sheet)) = ods.worksheet_range(sheet_name) {
|
||||
for row in current_sheet.rows() {
|
||||
let mut row_output = TaggedDictBuilder::new(&tag);
|
||||
for (i, cell) in row.iter().enumerate() {
|
||||
let value = match cell {
|
||||
DataType::Empty => UntaggedValue::nothing(),
|
||||
DataType::String(s) => UntaggedValue::string(s),
|
||||
DataType::Float(f) => UntaggedValue::decimal(*f),
|
||||
DataType::Int(i) => UntaggedValue::int(*i),
|
||||
DataType::Bool(b) => UntaggedValue::boolean(*b),
|
||||
_ => UntaggedValue::nothing(),
|
||||
};
|
||||
|
||||
row_output.insert_untagged(&format!("Column{}", i), value);
|
||||
}
|
||||
|
||||
sheet_output.push_untagged(row_output.into_untagged_value());
|
||||
}
|
||||
|
||||
dict.insert_untagged(sheet_name, sheet_output.into_untagged_value());
|
||||
} else {
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Could not load sheet",
|
||||
"could not load sheet",
|
||||
&tag));
|
||||
}
|
||||
}
|
||||
|
||||
yield ReturnSuccess::value(dict.into_value());
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
@ -107,9 +107,9 @@ fn convert_sqlite_value_to_nu_value(value: ValueRef, tag: impl Into<Tag> + Clone
|
||||
}
|
||||
ValueRef::Integer(i) => UntaggedValue::int(i).into_value(tag),
|
||||
ValueRef::Real(f) => UntaggedValue::decimal(f).into_value(tag),
|
||||
t @ ValueRef::Text(_) => {
|
||||
ValueRef::Text(s) => {
|
||||
// this unwrap is safe because we know the ValueRef is Text.
|
||||
UntaggedValue::Primitive(Primitive::String(t.as_str().unwrap().to_string()))
|
||||
UntaggedValue::Primitive(Primitive::String(String::from_utf8_lossy(s).to_string()))
|
||||
.into_value(tag)
|
||||
}
|
||||
ValueRef::Blob(u) => UntaggedValue::binary(u.to_owned()).into_value(tag),
|
||||
@ -138,39 +138,25 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
for value in values {
|
||||
let value_tag = &value.tag;
|
||||
match value.value {
|
||||
UntaggedValue::Primitive(Primitive::Binary(vb)) =>
|
||||
match from_sqlite_bytes_to_value(vb, tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Value { value: UntaggedValue::Table(list), .. } => {
|
||||
for l in list {
|
||||
yield ReturnSuccess::value(l);
|
||||
}
|
||||
}
|
||||
_ => yield ReturnSuccess::value(x),
|
||||
}
|
||||
Err(_) => {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as SQLite",
|
||||
"input cannot be parsed as SQLite",
|
||||
&tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
))
|
||||
}
|
||||
let bytes = input.collect_binary(tag.clone()).await?;
|
||||
match from_sqlite_bytes_to_value(bytes.item, tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Value { value: UntaggedValue::Table(list), .. } => {
|
||||
for l in list {
|
||||
yield ReturnSuccess::value(l);
|
||||
}
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected binary data from pipeline",
|
||||
"requires binary data input",
|
||||
}
|
||||
_ => yield ReturnSuccess::value(x),
|
||||
}
|
||||
Err(err) => {
|
||||
println!("{:?}", err);
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as SQLite",
|
||||
"input cannot be parsed as SQLite",
|
||||
&tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
)),
|
||||
|
||||
bytes.tag,
|
||||
))
|
||||
}
|
||||
}
|
||||
};
|
@ -27,12 +27,17 @@ impl WholeStreamCommand for FromSSV {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(STRING_REPRESENTATION)
|
||||
.switch("headerless", "don't treat the first row as column names")
|
||||
.switch("aligned-columns", "assume columns are aligned")
|
||||
.switch(
|
||||
"headerless",
|
||||
"don't treat the first row as column names",
|
||||
None,
|
||||
)
|
||||
.switch("aligned-columns", "assume columns are aligned", Some('a'))
|
||||
.named(
|
||||
"minimum-spaces",
|
||||
SyntaxShape::Int,
|
||||
"the mininum spaces to separate columns",
|
||||
"the minimum spaces to separate columns",
|
||||
Some('m'),
|
||||
)
|
||||
}
|
||||
|
||||
@ -183,7 +188,7 @@ fn parse_separated_columns<'a>(
|
||||
let headers = (1..=num_columns)
|
||||
.map(|i| format!("Column{}", i))
|
||||
.collect::<Vec<String>>();
|
||||
collect(headers, ls.iter().map(|s| s.as_ref()), separator)
|
||||
collect(headers, ls.into_iter(), separator)
|
||||
};
|
||||
|
||||
match headers {
|
||||
@ -197,15 +202,17 @@ fn string_to_table(
|
||||
headerless: bool,
|
||||
aligned_columns: bool,
|
||||
split_at: usize,
|
||||
) -> Option<Vec<Vec<(String, String)>>> {
|
||||
) -> Vec<Vec<(String, String)>> {
|
||||
let mut lines = s.lines().filter(|l| !l.trim().is_empty());
|
||||
let separator = " ".repeat(std::cmp::max(split_at, 1));
|
||||
|
||||
let (ls, header_options) = if headerless {
|
||||
(lines, HeaderOptions::WithoutHeaders)
|
||||
} else {
|
||||
let headers = lines.next()?;
|
||||
(lines, HeaderOptions::WithHeaders(headers))
|
||||
match lines.next() {
|
||||
Some(header) => (lines, HeaderOptions::WithHeaders(header)),
|
||||
None => return vec![],
|
||||
}
|
||||
};
|
||||
|
||||
let f = if aligned_columns {
|
||||
@ -214,11 +221,7 @@ fn string_to_table(
|
||||
parse_separated_columns
|
||||
};
|
||||
|
||||
let parsed = f(ls, header_options, &separator);
|
||||
match parsed.len() {
|
||||
0 => None,
|
||||
_ => Some(parsed),
|
||||
}
|
||||
f(ls, header_options, &separator)
|
||||
}
|
||||
|
||||
fn from_ssv_string_to_value(
|
||||
@ -229,7 +232,7 @@ fn from_ssv_string_to_value(
|
||||
tag: impl Into<Tag>,
|
||||
) -> Option<Value> {
|
||||
let tag = tag.into();
|
||||
let rows = string_to_table(s, headerless, aligned_columns, split_at)?
|
||||
let rows = string_to_table(s, headerless, aligned_columns, split_at)
|
||||
.iter()
|
||||
.map(|row| {
|
||||
let mut tagged_dict = TaggedDictBuilder::new(&tag);
|
||||
@ -256,45 +259,26 @@ fn from_ssv(
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
let mut concat_string = String::new();
|
||||
let mut latest_tag: Option<Tag> = None;
|
||||
let concat_string = input.collect_string(name.clone()).await?;
|
||||
let split_at = match minimum_spaces {
|
||||
Some(number) => number.item,
|
||||
None => DEFAULT_MINIMUM_SPACES
|
||||
};
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag.clone();
|
||||
latest_tag = Some(value_tag.clone());
|
||||
if let Ok(s) = value.as_string() {
|
||||
concat_string.push_str(&s);
|
||||
}
|
||||
else {
|
||||
yield Err(ShellError::labeled_error_with_secondary (
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
&name,
|
||||
"value originates from here",
|
||||
&value_tag
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
match from_ssv_string_to_value(&concat_string, headerless, aligned_columns, split_at, name.clone()) {
|
||||
match from_ssv_string_to_value(&concat_string.item, headerless, aligned_columns, split_at, name.clone()) {
|
||||
Some(x) => match x {
|
||||
Value { value: UntaggedValue::Table(list), ..} => {
|
||||
for l in list { yield ReturnSuccess::value(l) }
|
||||
}
|
||||
x => yield ReturnSuccess::value(x)
|
||||
},
|
||||
None => if let Some(tag) = latest_tag {
|
||||
None => {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as SSV",
|
||||
"input cannot be parsed ssv",
|
||||
&name,
|
||||
"value originates from here",
|
||||
&tag,
|
||||
&concat_string.tag,
|
||||
))
|
||||
},
|
||||
}
|
||||
@ -323,10 +307,10 @@ mod tests {
|
||||
let result = string_to_table(input, false, true, 1);
|
||||
assert_eq!(
|
||||
result,
|
||||
Some(vec![
|
||||
vec![
|
||||
vec![owned("a", "1"), owned("b", "2")],
|
||||
vec![owned("a", "3"), owned("b", "4")]
|
||||
])
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
@ -338,10 +322,7 @@ mod tests {
|
||||
2
|
||||
"#;
|
||||
let result = string_to_table(input, false, true, 1);
|
||||
assert_eq!(
|
||||
result,
|
||||
Some(vec![vec![owned("a", "1")], vec![owned("a", "2")]])
|
||||
);
|
||||
assert_eq!(result, vec![vec![owned("a", "1")], vec![owned("a", "2")]]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -354,21 +335,14 @@ mod tests {
|
||||
let result = string_to_table(input, true, true, 1);
|
||||
assert_eq!(
|
||||
result,
|
||||
Some(vec![
|
||||
vec![
|
||||
vec![owned("Column1", "a"), owned("Column2", "b")],
|
||||
vec![owned("Column1", "1"), owned("Column2", "2")],
|
||||
vec![owned("Column1", "3"), owned("Column2", "4")]
|
||||
])
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_returns_none_given_an_empty_string() {
|
||||
let input = "";
|
||||
let result = string_to_table(input, true, true, 1);
|
||||
assert!(result.is_none());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_allows_a_predefined_number_of_spaces() {
|
||||
let input = r#"
|
||||
@ -380,13 +354,13 @@ mod tests {
|
||||
let result = string_to_table(input, false, true, 3);
|
||||
assert_eq!(
|
||||
result,
|
||||
Some(vec![
|
||||
vec![
|
||||
vec![
|
||||
owned("column a", "entry 1"),
|
||||
owned("column b", "entry number 2")
|
||||
],
|
||||
vec![owned("column a", "3"), owned("column b", "four")]
|
||||
])
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
@ -399,10 +373,10 @@ mod tests {
|
||||
|
||||
let trimmed = |s: &str| s.trim() == s;
|
||||
|
||||
let result = string_to_table(input, false, true, 2).unwrap();
|
||||
let result = string_to_table(input, false, true, 2);
|
||||
assert!(result
|
||||
.iter()
|
||||
.all(|row| row.iter().all(|(a, b)| trimmed(a) && trimmed(b))))
|
||||
.all(|row| row.iter().all(|(a, b)| trimmed(a) && trimmed(b))));
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -414,7 +388,7 @@ mod tests {
|
||||
val7 val8
|
||||
"#;
|
||||
|
||||
let result = string_to_table(input, false, true, 2).unwrap();
|
||||
let result = string_to_table(input, false, true, 2);
|
||||
assert_eq!(
|
||||
result,
|
||||
vec![
|
||||
@ -434,7 +408,16 @@ mod tests {
|
||||
owned("col C", "val8")
|
||||
],
|
||||
]
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_can_produce_an_empty_stream_for_header_only_input() {
|
||||
let input = "colA col B";
|
||||
|
||||
let result = string_to_table(input, false, true, 2);
|
||||
let expected: Vec<Vec<(String, String)>> = vec![];
|
||||
assert_eq!(expected, result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -444,14 +427,14 @@ mod tests {
|
||||
val1 val2 trailing value that should be included
|
||||
"#;
|
||||
|
||||
let result = string_to_table(input, false, true, 2).unwrap();
|
||||
let result = string_to_table(input, false, true, 2);
|
||||
assert_eq!(
|
||||
result,
|
||||
vec![vec![
|
||||
owned("colA", "val1"),
|
||||
owned("col B", "val2 trailing value that should be included"),
|
||||
],]
|
||||
)
|
||||
]]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -462,7 +445,7 @@ mod tests {
|
||||
last
|
||||
"#;
|
||||
|
||||
let result = string_to_table(input, true, true, 2).unwrap();
|
||||
let result = string_to_table(input, true, true, 2);
|
||||
assert_eq!(
|
||||
result,
|
||||
vec![
|
||||
@ -488,7 +471,7 @@ mod tests {
|
||||
owned("Column5", "last")
|
||||
],
|
||||
]
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -499,10 +482,10 @@ mod tests {
|
||||
kubernetes-ro component=apiserver,provider=kubernetes <none> 172.30.0.1 80/TCP
|
||||
"#;
|
||||
|
||||
let aligned_columns_headerless = string_to_table(input, true, true, 2).unwrap();
|
||||
let separator_headerless = string_to_table(input, true, false, 2).unwrap();
|
||||
let aligned_columns_with_headers = string_to_table(input, false, true, 2).unwrap();
|
||||
let separator_with_headers = string_to_table(input, false, false, 2).unwrap();
|
||||
let aligned_columns_headerless = string_to_table(input, true, true, 2);
|
||||
let separator_headerless = string_to_table(input, true, false, 2);
|
||||
let aligned_columns_with_headers = string_to_table(input, false, true, 2);
|
||||
let separator_with_headers = string_to_table(input, false, false, 2);
|
||||
assert_eq!(aligned_columns_headerless, separator_headerless);
|
||||
assert_eq!(aligned_columns_with_headers, separator_with_headers);
|
||||
}
|
@ -69,33 +69,11 @@ pub fn from_toml(
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let tag = args.name_tag();
|
||||
let name_span = tag.span;
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
let mut latest_tag: Option<Tag> = None;
|
||||
|
||||
for value in values {
|
||||
latest_tag = Some(value.tag.clone());
|
||||
let value_span = value.tag.span;
|
||||
if let Ok(s) = value.as_string() {
|
||||
concat_string.push_str(&s);
|
||||
}
|
||||
else {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
name_span,
|
||||
"value originates from here",
|
||||
value_span,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
match from_toml_string_to_value(concat_string, tag.clone()) {
|
||||
let concat_string = input.collect_string(tag.clone()).await?;
|
||||
match from_toml_string_to_value(concat_string.item, tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Value { value: UntaggedValue::Table(list), .. } => {
|
||||
for l in list {
|
||||
@ -104,15 +82,15 @@ pub fn from_toml(
|
||||
}
|
||||
x => yield ReturnSuccess::value(x),
|
||||
},
|
||||
Err(_) => if let Some(last_tag) = latest_tag {
|
||||
Err(_) => {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as TOML",
|
||||
"input cannot be parsed as TOML",
|
||||
&tag,
|
||||
"value originates from here",
|
||||
last_tag,
|
||||
concat_string.tag,
|
||||
))
|
||||
} ,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -17,8 +17,11 @@ impl WholeStreamCommand for FromTSV {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("from-tsv")
|
||||
.switch("headerless", "don't treat the first row as column names")
|
||||
Signature::build("from-tsv").switch(
|
||||
"headerless",
|
||||
"don't treat the first row as column names",
|
||||
None,
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
@ -1,7 +1,7 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{ReturnSuccess, Signature, TaggedDictBuilder, UntaggedValue, Value};
|
||||
use nu_protocol::{ReturnSuccess, Signature, TaggedDictBuilder, UntaggedValue};
|
||||
|
||||
pub struct FromURL;
|
||||
|
||||
@ -30,32 +30,12 @@ impl WholeStreamCommand for FromURL {
|
||||
fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let tag = args.name_tag();
|
||||
let name_span = tag.span;
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
let concat_string = input.collect_string(tag.clone()).await?;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
let mut latest_tag: Option<Tag> = None;
|
||||
|
||||
for value in values {
|
||||
latest_tag = Some(value.tag.clone());
|
||||
let value_span = value.tag.span;
|
||||
if let Ok(s) = value.as_string() {
|
||||
concat_string.push_str(&s);
|
||||
} else {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
name_span,
|
||||
"value originates from here",
|
||||
value_span,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
let result = serde_urlencoded::from_str::<Vec<(String, String)>>(&concat_string);
|
||||
let result = serde_urlencoded::from_str::<Vec<(String, String)>>(&concat_string.item);
|
||||
|
||||
match result {
|
||||
Ok(result) => {
|
||||
@ -68,15 +48,13 @@ fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
yield ReturnSuccess::value(row.into_value());
|
||||
}
|
||||
_ => {
|
||||
if let Some(last_tag) = latest_tag {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"String not compatible with url-encoding",
|
||||
"input not url-encoded",
|
||||
tag,
|
||||
"value originates from here",
|
||||
last_tag,
|
||||
));
|
||||
}
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"String not compatible with url-encoding",
|
||||
"input not url-encoded",
|
||||
tag,
|
||||
"value originates from here",
|
||||
concat_string.tag,
|
||||
));
|
||||
}
|
||||
}
|
||||
};
|
102
crates/nu-cli/src/commands/from_vcf.rs
Normal file
102
crates/nu-cli/src/commands/from_vcf.rs
Normal file
@ -0,0 +1,102 @@
|
||||
extern crate ical;
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use ical::parser::vcard::component::*;
|
||||
use ical::property::Property;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{Primitive, ReturnSuccess, Signature, TaggedDictBuilder, UntaggedValue, Value};
|
||||
use std::io::BufReader;
|
||||
|
||||
pub struct FromVcf;
|
||||
|
||||
impl WholeStreamCommand for FromVcf {
|
||||
fn name(&self) -> &str {
|
||||
"from-vcf"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("from-vcf")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Parse text as .vcf and create table."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
from_vcf(args, registry)
|
||||
}
|
||||
}
|
||||
|
||||
fn from_vcf(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let tag = args.name_tag();
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream! {
|
||||
let input_string = input.collect_string(tag.clone()).await?.item;
|
||||
let input_bytes = input_string.as_bytes();
|
||||
let buf_reader = BufReader::new(input_bytes);
|
||||
let parser = ical::VcardParser::new(buf_reader);
|
||||
|
||||
for contact in parser {
|
||||
match contact {
|
||||
Ok(c) => yield ReturnSuccess::value(contact_to_value(c, tag.clone())),
|
||||
Err(_) => yield Err(ShellError::labeled_error(
|
||||
"Could not parse as .vcf",
|
||||
"input cannot be parsed as .vcf",
|
||||
tag.clone()
|
||||
)),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
|
||||
fn contact_to_value(contact: VcardContact, tag: Tag) -> Value {
|
||||
let mut row = TaggedDictBuilder::new(tag.clone());
|
||||
row.insert_untagged("properties", properties_to_value(contact.properties, tag));
|
||||
row.into_value()
|
||||
}
|
||||
|
||||
fn properties_to_value(properties: Vec<Property>, tag: Tag) -> UntaggedValue {
|
||||
UntaggedValue::table(
|
||||
&properties
|
||||
.into_iter()
|
||||
.map(|prop| {
|
||||
let mut row = TaggedDictBuilder::new(tag.clone());
|
||||
|
||||
let name = UntaggedValue::string(prop.name);
|
||||
let value = match prop.value {
|
||||
Some(val) => UntaggedValue::string(val),
|
||||
None => UntaggedValue::Primitive(Primitive::Nothing),
|
||||
};
|
||||
let params = match prop.params {
|
||||
Some(param_list) => params_to_value(param_list, tag.clone()).into(),
|
||||
None => UntaggedValue::Primitive(Primitive::Nothing),
|
||||
};
|
||||
|
||||
row.insert_untagged("name", name);
|
||||
row.insert_untagged("value", value);
|
||||
row.insert_untagged("params", params);
|
||||
row.into_value()
|
||||
})
|
||||
.collect::<Vec<Value>>(),
|
||||
)
|
||||
}
|
||||
|
||||
fn params_to_value(params: Vec<(String, Vec<String>)>, tag: Tag) -> Value {
|
||||
let mut row = TaggedDictBuilder::new(tag);
|
||||
|
||||
for (param_name, param_values) in params {
|
||||
let values: Vec<Value> = param_values.into_iter().map(|val| val.into()).collect();
|
||||
let values = UntaggedValue::table(&values);
|
||||
row.insert_untagged(param_name, values);
|
||||
}
|
||||
|
||||
row.into_value()
|
||||
}
|
99
crates/nu-cli/src/commands/from_xlsx.rs
Normal file
99
crates/nu-cli/src/commands/from_xlsx.rs
Normal file
@ -0,0 +1,99 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use crate::TaggedListBuilder;
|
||||
use calamine::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{ReturnSuccess, Signature, TaggedDictBuilder, UntaggedValue};
|
||||
use std::io::Cursor;
|
||||
|
||||
pub struct FromXLSX;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct FromXLSXArgs {
|
||||
headerless: bool,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for FromXLSX {
|
||||
fn name(&self) -> &str {
|
||||
"from-xlsx"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("from-xlsx").switch(
|
||||
"headerless",
|
||||
"don't treat the first row as column names",
|
||||
None,
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Parse binary Excel(.xlsx) data and create table."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, from_xlsx)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
fn from_xlsx(
|
||||
FromXLSXArgs {
|
||||
headerless: _headerless,
|
||||
}: FromXLSXArgs,
|
||||
runnable_context: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let input = runnable_context.input;
|
||||
let tag = runnable_context.name;
|
||||
|
||||
let stream = async_stream! {
|
||||
let value = input.collect_binary(tag.clone()).await?;
|
||||
|
||||
let mut buf: Cursor<Vec<u8>> = Cursor::new(value.item);
|
||||
let mut xls = Xlsx::<_>::new(buf).map_err(|_| {
|
||||
ShellError::labeled_error("Could not load xlsx file", "could not load xlsx file", &tag)
|
||||
})?;
|
||||
|
||||
let mut dict = TaggedDictBuilder::new(&tag);
|
||||
|
||||
let sheet_names = xls.sheet_names().to_owned();
|
||||
|
||||
for sheet_name in &sheet_names {
|
||||
let mut sheet_output = TaggedListBuilder::new(&tag);
|
||||
|
||||
if let Some(Ok(current_sheet)) = xls.worksheet_range(sheet_name) {
|
||||
for row in current_sheet.rows() {
|
||||
let mut row_output = TaggedDictBuilder::new(&tag);
|
||||
for (i, cell) in row.iter().enumerate() {
|
||||
let value = match cell {
|
||||
DataType::Empty => UntaggedValue::nothing(),
|
||||
DataType::String(s) => UntaggedValue::string(s),
|
||||
DataType::Float(f) => UntaggedValue::decimal(*f),
|
||||
DataType::Int(i) => UntaggedValue::int(*i),
|
||||
DataType::Bool(b) => UntaggedValue::boolean(*b),
|
||||
_ => UntaggedValue::nothing(),
|
||||
};
|
||||
|
||||
row_output.insert_untagged(&format!("Column{}", i), value);
|
||||
}
|
||||
|
||||
sheet_output.push_untagged(row_output.into_untagged_value());
|
||||
}
|
||||
|
||||
dict.insert_untagged(sheet_name, sheet_output.into_untagged_value());
|
||||
} else {
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Could not load sheet",
|
||||
"could not load sheet",
|
||||
&tag,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
yield ReturnSuccess::value(dict.into_value());
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
303
crates/nu-cli/src/commands/from_xml.rs
Normal file
303
crates/nu-cli/src/commands/from_xml.rs
Normal file
@ -0,0 +1,303 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{Primitive, ReturnSuccess, Signature, TaggedDictBuilder, UntaggedValue, Value};
|
||||
|
||||
pub struct FromXML;
|
||||
|
||||
impl WholeStreamCommand for FromXML {
|
||||
fn name(&self) -> &str {
|
||||
"from-xml"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("from-xml")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Parse text as .xml and create table."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
from_xml(args, registry)
|
||||
}
|
||||
}
|
||||
|
||||
fn from_attributes_to_value(attributes: &[roxmltree::Attribute], tag: impl Into<Tag>) -> Value {
|
||||
let tag = tag.into();
|
||||
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
for a in attributes {
|
||||
collected.insert_untagged(String::from(a.name()), UntaggedValue::string(a.value()));
|
||||
}
|
||||
|
||||
collected.into_value()
|
||||
}
|
||||
|
||||
fn from_node_to_value<'a, 'd>(n: &roxmltree::Node<'a, 'd>, tag: impl Into<Tag>) -> Value {
|
||||
let tag = tag.into();
|
||||
|
||||
if n.is_element() {
|
||||
let name = n.tag_name().name().trim().to_string();
|
||||
|
||||
let mut children_values = vec![];
|
||||
for c in n.children() {
|
||||
children_values.push(from_node_to_value(&c, &tag));
|
||||
}
|
||||
|
||||
let children_values: Vec<Value> = children_values
|
||||
.into_iter()
|
||||
.filter(|x| match x {
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(Primitive::String(f)),
|
||||
..
|
||||
} => {
|
||||
!f.trim().is_empty() // non-whitespace characters?
|
||||
}
|
||||
_ => true,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let mut collected = TaggedDictBuilder::new(&tag);
|
||||
|
||||
let attribute_value: Value = from_attributes_to_value(&n.attributes(), &tag);
|
||||
|
||||
let mut row = TaggedDictBuilder::new(&tag);
|
||||
row.insert_untagged(
|
||||
String::from("children"),
|
||||
UntaggedValue::Table(children_values),
|
||||
);
|
||||
row.insert_untagged(String::from("attributes"), attribute_value);
|
||||
collected.insert_untagged(name, row.into_value());
|
||||
|
||||
collected.into_value()
|
||||
} else if n.is_comment() {
|
||||
UntaggedValue::string("<comment>").into_value(tag)
|
||||
} else if n.is_pi() {
|
||||
UntaggedValue::string("<processing_instruction>").into_value(tag)
|
||||
} else if n.is_text() {
|
||||
match n.text() {
|
||||
Some(text) => UntaggedValue::string(text).into_value(tag),
|
||||
None => UntaggedValue::string("<error>").into_value(tag),
|
||||
}
|
||||
} else {
|
||||
UntaggedValue::string("<unknown>").into_value(tag)
|
||||
}
|
||||
}
|
||||
|
||||
fn from_document_to_value(d: &roxmltree::Document, tag: impl Into<Tag>) -> Value {
|
||||
from_node_to_value(&d.root_element(), tag)
|
||||
}
|
||||
|
||||
pub fn from_xml_string_to_value(s: String, tag: impl Into<Tag>) -> Result<Value, roxmltree::Error> {
|
||||
let parsed = roxmltree::Document::parse(&s)?;
|
||||
Ok(from_document_to_value(&parsed, tag))
|
||||
}
|
||||
|
||||
fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let tag = args.name_tag();
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream! {
|
||||
let concat_string = input.collect_string(tag.clone()).await?;
|
||||
|
||||
match from_xml_string_to_value(concat_string.item, tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Value { value: UntaggedValue::Table(list), .. } => {
|
||||
for l in list {
|
||||
yield ReturnSuccess::value(l);
|
||||
}
|
||||
}
|
||||
x => yield ReturnSuccess::value(x),
|
||||
},
|
||||
Err(_) => {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as XML",
|
||||
"input cannot be parsed as XML",
|
||||
&tag,
|
||||
"value originates from here",
|
||||
&concat_string.tag,
|
||||
))
|
||||
} ,
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
|
||||
use crate::commands::from_xml;
|
||||
use indexmap::IndexMap;
|
||||
use nu_protocol::{UntaggedValue, Value};
|
||||
use nu_source::*;
|
||||
|
||||
fn string(input: impl Into<String>) -> Value {
|
||||
UntaggedValue::string(input.into()).into_untagged_value()
|
||||
}
|
||||
|
||||
fn row(entries: IndexMap<String, Value>) -> Value {
|
||||
UntaggedValue::row(entries).into_untagged_value()
|
||||
}
|
||||
|
||||
fn table(list: &[Value]) -> Value {
|
||||
UntaggedValue::table(list).into_untagged_value()
|
||||
}
|
||||
|
||||
fn parse(xml: &str) -> Result<Value, roxmltree::Error> {
|
||||
from_xml::from_xml_string_to_value(xml.to_string(), Tag::unknown())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_empty_element() -> Result<(), roxmltree::Error> {
|
||||
let source = "<nu></nu>";
|
||||
|
||||
assert_eq!(
|
||||
parse(source)?,
|
||||
row(indexmap! {
|
||||
"nu".into() => row(indexmap! {
|
||||
"children".into() => table(&[]),
|
||||
"attributes".into() => row(indexmap! {})
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_element_with_text() -> Result<(), roxmltree::Error> {
|
||||
let source = "<nu>La era de los tres caballeros</nu>";
|
||||
|
||||
assert_eq!(
|
||||
parse(source)?,
|
||||
row(indexmap! {
|
||||
"nu".into() => row(indexmap! {
|
||||
"children".into() => table(&[string("La era de los tres caballeros")]),
|
||||
"attributes".into() => row(indexmap! {})
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_element_with_elements() -> Result<(), roxmltree::Error> {
|
||||
let source = "\
|
||||
<nu>
|
||||
<dev>Andrés</dev>
|
||||
<dev>Jonathan</dev>
|
||||
<dev>Yehuda</dev>
|
||||
</nu>";
|
||||
|
||||
assert_eq!(
|
||||
parse(source)?,
|
||||
row(indexmap! {
|
||||
"nu".into() => row(indexmap! {
|
||||
"children".into() => table(&[
|
||||
row(indexmap! {
|
||||
"dev".into() => row(indexmap! {
|
||||
"children".into() => table(&[string("Andrés")]),
|
||||
"attributes".into() => row(indexmap! {})
|
||||
})
|
||||
}),
|
||||
row(indexmap! {
|
||||
"dev".into() => row(indexmap! {
|
||||
"children".into() => table(&[string("Jonathan")]),
|
||||
"attributes".into() => row(indexmap! {})
|
||||
})
|
||||
}),
|
||||
row(indexmap! {
|
||||
"dev".into() => row(indexmap! {
|
||||
"children".into() => table(&[string("Yehuda")]),
|
||||
"attributes".into() => row(indexmap! {})
|
||||
})
|
||||
})
|
||||
]),
|
||||
"attributes".into() => row(indexmap! {})
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_element_with_attribute() -> Result<(), roxmltree::Error> {
|
||||
let source = "\
|
||||
<nu version=\"2.0\">
|
||||
</nu>";
|
||||
|
||||
assert_eq!(
|
||||
parse(source)?,
|
||||
row(indexmap! {
|
||||
"nu".into() => row(indexmap! {
|
||||
"children".into() => table(&[]),
|
||||
"attributes".into() => row(indexmap! {
|
||||
"version".into() => string("2.0")
|
||||
})
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_element_with_attribute_and_element() -> Result<(), roxmltree::Error> {
|
||||
let source = "\
|
||||
<nu version=\"2.0\">
|
||||
<version>2.0</version>
|
||||
</nu>";
|
||||
|
||||
assert_eq!(
|
||||
parse(source)?,
|
||||
row(indexmap! {
|
||||
"nu".into() => row(indexmap! {
|
||||
"children".into() => table(&[
|
||||
row(indexmap! {
|
||||
"version".into() => row(indexmap! {
|
||||
"children".into() => table(&[string("2.0")]),
|
||||
"attributes".into() => row(indexmap! {})
|
||||
})
|
||||
})
|
||||
]),
|
||||
"attributes".into() => row(indexmap! {
|
||||
"version".into() => string("2.0")
|
||||
})
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parses_element_with_multiple_attributes() -> Result<(), roxmltree::Error> {
|
||||
let source = "\
|
||||
<nu version=\"2.0\" age=\"25\">
|
||||
</nu>";
|
||||
|
||||
assert_eq!(
|
||||
parse(source)?,
|
||||
row(indexmap! {
|
||||
"nu".into() => row(indexmap! {
|
||||
"children".into() => table(&[]),
|
||||
"attributes".into() => row(indexmap! {
|
||||
"version".into() => string("2.0"),
|
||||
"age".into() => string("25")
|
||||
})
|
||||
})
|
||||
})
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
@ -51,31 +51,49 @@ impl WholeStreamCommand for FromYML {
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_yaml_value_to_nu_value(v: &serde_yaml::Value, tag: impl Into<Tag>) -> Value {
|
||||
fn convert_yaml_value_to_nu_value(
|
||||
v: &serde_yaml::Value,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Result<Value, ShellError> {
|
||||
let tag = tag.into();
|
||||
|
||||
match v {
|
||||
Ok(match v {
|
||||
serde_yaml::Value::Bool(b) => UntaggedValue::boolean(*b).into_value(tag),
|
||||
serde_yaml::Value::Number(n) if n.is_i64() => {
|
||||
UntaggedValue::int(n.as_i64().unwrap()).into_value(tag)
|
||||
UntaggedValue::int(n.as_i64().ok_or_else(|| {
|
||||
ShellError::labeled_error(
|
||||
"Expected a compatible number",
|
||||
"expected a compatible number",
|
||||
&tag,
|
||||
)
|
||||
})?)
|
||||
.into_value(tag)
|
||||
}
|
||||
serde_yaml::Value::Number(n) if n.is_f64() => {
|
||||
UntaggedValue::decimal(n.as_f64().unwrap()).into_value(tag)
|
||||
UntaggedValue::decimal(n.as_f64().ok_or_else(|| {
|
||||
ShellError::labeled_error(
|
||||
"Expected a compatible number",
|
||||
"expected a compatible number",
|
||||
&tag,
|
||||
)
|
||||
})?)
|
||||
.into_value(tag)
|
||||
}
|
||||
serde_yaml::Value::String(s) => UntaggedValue::string(s).into_value(tag),
|
||||
serde_yaml::Value::Sequence(a) => UntaggedValue::Table(
|
||||
a.iter()
|
||||
serde_yaml::Value::Sequence(a) => {
|
||||
let result: Result<Vec<Value>, ShellError> = a
|
||||
.iter()
|
||||
.map(|x| convert_yaml_value_to_nu_value(x, &tag))
|
||||
.collect(),
|
||||
)
|
||||
.into_value(tag),
|
||||
.collect();
|
||||
UntaggedValue::Table(result?).into_value(tag)
|
||||
}
|
||||
serde_yaml::Value::Mapping(t) => {
|
||||
let mut collected = TaggedDictBuilder::new(&tag);
|
||||
|
||||
for (k, v) in t.iter() {
|
||||
match k {
|
||||
serde_yaml::Value::String(k) => {
|
||||
collected.insert_value(k.clone(), convert_yaml_value_to_nu_value(v, &tag));
|
||||
collected.insert_value(k.clone(), convert_yaml_value_to_nu_value(v, &tag)?);
|
||||
}
|
||||
_ => unimplemented!("Unknown key type"),
|
||||
}
|
||||
@ -85,45 +103,30 @@ fn convert_yaml_value_to_nu_value(v: &serde_yaml::Value, tag: impl Into<Tag>) ->
|
||||
}
|
||||
serde_yaml::Value::Null => UntaggedValue::Primitive(Primitive::Nothing).into_value(tag),
|
||||
x => unimplemented!("Unsupported yaml case: {:?}", x),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn from_yaml_string_to_value(s: String, tag: impl Into<Tag>) -> serde_yaml::Result<Value> {
|
||||
let v: serde_yaml::Value = serde_yaml::from_str(&s)?;
|
||||
Ok(convert_yaml_value_to_nu_value(&v, tag))
|
||||
pub fn from_yaml_string_to_value(s: String, tag: impl Into<Tag>) -> Result<Value, ShellError> {
|
||||
let tag = tag.into();
|
||||
let v: serde_yaml::Value = serde_yaml::from_str(&s).map_err(|x| {
|
||||
ShellError::labeled_error(
|
||||
format!("Could not load yaml: {}", x),
|
||||
"could not load yaml from text",
|
||||
&tag,
|
||||
)
|
||||
})?;
|
||||
Ok(convert_yaml_value_to_nu_value(&v, tag)?)
|
||||
}
|
||||
|
||||
fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let tag = args.name_tag();
|
||||
let name_span = tag.span;
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
let concat_string = input.collect_string(tag.clone()).await?;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
let mut latest_tag: Option<Tag> = None;
|
||||
|
||||
for value in values {
|
||||
latest_tag = Some(value.tag.clone());
|
||||
let value_span = value.tag.span;
|
||||
|
||||
if let Ok(s) = value.as_string() {
|
||||
concat_string.push_str(&s);
|
||||
}
|
||||
else {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
name_span,
|
||||
"value originates from here",
|
||||
value_span,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
match from_yaml_string_to_value(concat_string, tag.clone()) {
|
||||
match from_yaml_string_to_value(concat_string.item, tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Value { value: UntaggedValue::Table(list), .. } => {
|
||||
for l in list {
|
||||
@ -132,15 +135,15 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
|
||||
}
|
||||
x => yield ReturnSuccess::value(x),
|
||||
},
|
||||
Err(_) => if let Some(last_tag) = latest_tag {
|
||||
Err(_) => {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as YAML",
|
||||
"input cannot be parsed as YAML",
|
||||
&tag,
|
||||
"value originates from here",
|
||||
&last_tag,
|
||||
&concat_string.tag,
|
||||
))
|
||||
} ,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
242
crates/nu-cli/src/commands/get.rs
Normal file
242
crates/nu-cli/src/commands/get.rs
Normal file
@ -0,0 +1,242 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use indexmap::set::IndexSet;
|
||||
use log::trace;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{
|
||||
did_you_mean, ColumnPath, PathMember, Primitive, ReturnSuccess, ReturnValue, Signature,
|
||||
SyntaxShape, UnspannedPathMember, UntaggedValue, Value,
|
||||
};
|
||||
use nu_source::span_for_spanned_list;
|
||||
use nu_value_ext::get_data_by_column_path;
|
||||
|
||||
pub struct Get;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct GetArgs {
|
||||
rest: Vec<ColumnPath>,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for Get {
|
||||
fn name(&self) -> &str {
|
||||
"get"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("get").rest(
|
||||
SyntaxShape::ColumnPath,
|
||||
"optionally return additional data by path",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Open given cells as text."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, get)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_column_path(path: &ColumnPath, obj: &Value) -> Result<Value, ShellError> {
|
||||
let fields = path.clone();
|
||||
|
||||
get_data_by_column_path(
|
||||
obj,
|
||||
path,
|
||||
Box::new(move |(obj_source, column_path_tried, error)| {
|
||||
let path_members_span = span_for_spanned_list(fields.members().iter().map(|p| p.span));
|
||||
|
||||
match &obj_source.value {
|
||||
UntaggedValue::Table(rows) => match column_path_tried {
|
||||
PathMember {
|
||||
unspanned: UnspannedPathMember::String(column),
|
||||
..
|
||||
} => {
|
||||
let primary_label = format!("There isn't a column named '{}'", &column);
|
||||
|
||||
let suggestions: IndexSet<_> = rows
|
||||
.iter()
|
||||
.filter_map(|r| did_you_mean(&r, &column_path_tried))
|
||||
.map(|s| s[0].1.to_owned())
|
||||
.collect();
|
||||
let mut existing_columns: IndexSet<_> = IndexSet::default();
|
||||
let mut names: Vec<String> = vec![];
|
||||
|
||||
for row in rows {
|
||||
for field in row.data_descriptors() {
|
||||
if !existing_columns.contains(&field[..]) {
|
||||
existing_columns.insert(field.clone());
|
||||
names.push(field);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if names.is_empty() {
|
||||
return ShellError::labeled_error_with_secondary(
|
||||
"Unknown column",
|
||||
primary_label,
|
||||
column_path_tried.span,
|
||||
"Appears to contain rows. Try indexing instead.",
|
||||
column_path_tried.span.since(path_members_span),
|
||||
);
|
||||
} else {
|
||||
return ShellError::labeled_error_with_secondary(
|
||||
"Unknown column",
|
||||
primary_label,
|
||||
column_path_tried.span,
|
||||
format!(
|
||||
"Perhaps you meant '{}'? Columns available: {}",
|
||||
suggestions
|
||||
.iter()
|
||||
.map(|x| x.to_owned())
|
||||
.collect::<Vec<String>>()
|
||||
.join(","),
|
||||
names.join(",")
|
||||
),
|
||||
column_path_tried.span.since(path_members_span),
|
||||
);
|
||||
};
|
||||
}
|
||||
PathMember {
|
||||
unspanned: UnspannedPathMember::Int(idx),
|
||||
..
|
||||
} => {
|
||||
let total = rows.len();
|
||||
|
||||
let secondary_label = if total == 1 {
|
||||
"The table only has 1 row".to_owned()
|
||||
} else {
|
||||
format!("The table only has {} rows (0 to {})", total, total - 1)
|
||||
};
|
||||
|
||||
return ShellError::labeled_error_with_secondary(
|
||||
"Row not found",
|
||||
format!("There isn't a row indexed at {}", idx),
|
||||
column_path_tried.span,
|
||||
secondary_label,
|
||||
column_path_tried.span.since(path_members_span),
|
||||
);
|
||||
}
|
||||
},
|
||||
UntaggedValue::Row(columns) => match column_path_tried {
|
||||
PathMember {
|
||||
unspanned: UnspannedPathMember::String(column),
|
||||
..
|
||||
} => {
|
||||
let primary_label = format!("There isn't a column named '{}'", &column);
|
||||
|
||||
if let Some(suggestions) = did_you_mean(&obj_source, column_path_tried) {
|
||||
return ShellError::labeled_error_with_secondary(
|
||||
"Unknown column",
|
||||
primary_label,
|
||||
column_path_tried.span,
|
||||
format!(
|
||||
"Perhaps you meant '{}'? Columns available: {}",
|
||||
suggestions[0].1,
|
||||
&obj_source.data_descriptors().join(",")
|
||||
),
|
||||
column_path_tried.span.since(path_members_span),
|
||||
);
|
||||
}
|
||||
}
|
||||
PathMember {
|
||||
unspanned: UnspannedPathMember::Int(idx),
|
||||
..
|
||||
} => {
|
||||
return ShellError::labeled_error_with_secondary(
|
||||
"No rows available",
|
||||
format!("A row at '{}' can't be indexed.", &idx),
|
||||
column_path_tried.span,
|
||||
format!(
|
||||
"Appears to contain columns. Columns available: {}",
|
||||
columns.keys().join(",")
|
||||
),
|
||||
column_path_tried.span.since(path_members_span),
|
||||
)
|
||||
}
|
||||
},
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if let Some(suggestions) = did_you_mean(&obj_source, column_path_tried) {
|
||||
return ShellError::labeled_error(
|
||||
"Unknown column",
|
||||
format!("did you mean '{}'?", suggestions[0].1),
|
||||
column_path_tried.span.since(path_members_span),
|
||||
);
|
||||
}
|
||||
|
||||
error
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get(
|
||||
GetArgs { rest: mut fields }: GetArgs,
|
||||
RunnableContext { mut input, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
if fields.is_empty() {
|
||||
let stream = async_stream! {
|
||||
let mut vec = input.drain_vec().await;
|
||||
|
||||
let descs = nu_protocol::merge_descriptors(&vec);
|
||||
for desc in descs {
|
||||
yield ReturnSuccess::value(desc);
|
||||
}
|
||||
};
|
||||
|
||||
let stream: BoxStream<'static, ReturnValue> = stream.boxed();
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
} else {
|
||||
let member = fields.remove(0);
|
||||
trace!("get {:?} {:?}", member, fields);
|
||||
let stream = input
|
||||
.values
|
||||
.map(move |item| {
|
||||
let mut result = VecDeque::new();
|
||||
|
||||
let member = vec![member.clone()];
|
||||
|
||||
let column_paths = vec![&member, &fields]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<&ColumnPath>>();
|
||||
|
||||
for path in column_paths {
|
||||
let res = get_column_path(&path, &item);
|
||||
|
||||
match res {
|
||||
Ok(got) => match got {
|
||||
Value {
|
||||
value: UntaggedValue::Table(rows),
|
||||
..
|
||||
} => {
|
||||
for item in rows {
|
||||
result.push_back(ReturnSuccess::value(item.clone()));
|
||||
}
|
||||
}
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(Primitive::Nothing),
|
||||
..
|
||||
} => {}
|
||||
other => result.push_back(ReturnSuccess::value(other.clone())),
|
||||
},
|
||||
Err(reason) => result.push_back(ReturnSuccess::value(
|
||||
UntaggedValue::Error(reason).into_untagged_value(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
futures::stream::iter(result)
|
||||
})
|
||||
.flatten();
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
}
|
@ -74,7 +74,11 @@ pub fn group(
|
||||
for value in values {
|
||||
let group_key = get_data_by_key(&value, column_name.borrow_spanned());
|
||||
|
||||
if group_key.is_none() {
|
||||
if let Some(group_key) = group_key {
|
||||
let group_key = group_key.as_string()?.to_string();
|
||||
let group = groups.entry(group_key).or_insert(vec![]);
|
||||
group.push(value);
|
||||
} else {
|
||||
let possibilities = value.data_descriptors();
|
||||
|
||||
let mut possible_matches: Vec<_> = possibilities
|
||||
@ -98,10 +102,6 @@ pub fn group(
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
let group_key = group_key.unwrap().as_string()?.to_string();
|
||||
let group = groups.entry(group_key).or_insert(vec![]);
|
||||
group.push(value);
|
||||
}
|
||||
|
||||
let mut out = TaggedDictBuilder::new(&tag);
|
||||
@ -117,6 +117,7 @@ pub fn group(
|
||||
mod tests {
|
||||
use crate::commands::group_by::group;
|
||||
use indexmap::IndexMap;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{UntaggedValue, Value};
|
||||
use nu_source::*;
|
||||
|
||||
@ -128,7 +129,7 @@ mod tests {
|
||||
UntaggedValue::row(entries).into_untagged_value()
|
||||
}
|
||||
|
||||
fn table(list: &Vec<Value>) -> Value {
|
||||
fn table(list: &[Value]) -> Value {
|
||||
UntaggedValue::table(list).into_untagged_value()
|
||||
}
|
||||
|
||||
@ -165,54 +166,58 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn groups_table_by_date_column() {
|
||||
fn groups_table_by_date_column() -> Result<(), ShellError> {
|
||||
let for_key = String::from("date").tagged_unknown();
|
||||
|
||||
assert_eq!(
|
||||
group(&for_key, nu_releases_commiters(), Tag::unknown()).unwrap(),
|
||||
group(&for_key, nu_releases_commiters(), Tag::unknown())?,
|
||||
row(indexmap! {
|
||||
"August 23-2019".into() => table(&vec![
|
||||
"August 23-2019".into() => table(&[
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}),
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}),
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")})
|
||||
]),
|
||||
"October 10-2019".into() => table(&vec![
|
||||
"October 10-2019".into() => table(&[
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}),
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}),
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")})
|
||||
]),
|
||||
"Sept 24-2019".into() => table(&vec![
|
||||
"Sept 24-2019".into() => table(&[
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}),
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}),
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")})
|
||||
]),
|
||||
})
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn groups_table_by_country_column() {
|
||||
fn groups_table_by_country_column() -> Result<(), ShellError> {
|
||||
let for_key = String::from("country").tagged_unknown();
|
||||
|
||||
assert_eq!(
|
||||
group(&for_key, nu_releases_commiters(), Tag::unknown()).unwrap(),
|
||||
group(&for_key, nu_releases_commiters(), Tag::unknown())?,
|
||||
row(indexmap! {
|
||||
"EC".into() => table(&vec![
|
||||
"EC".into() => table(&[
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}),
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}),
|
||||
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")})
|
||||
]),
|
||||
"NZ".into() => table(&vec![
|
||||
"NZ".into() => table(&[
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}),
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}),
|
||||
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")})
|
||||
]),
|
||||
"US".into() => table(&vec![
|
||||
"US".into() => table(&[
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}),
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}),
|
||||
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}),
|
||||
]),
|
||||
})
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
80
crates/nu-cli/src/commands/headers.rs
Normal file
80
crates/nu-cli/src/commands/headers.rs
Normal file
@ -0,0 +1,80 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::context::CommandRegistry;
|
||||
use crate::prelude::*;
|
||||
use futures::stream::StreamExt;
|
||||
use indexmap::IndexMap;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::Dictionary;
|
||||
use nu_protocol::{ReturnSuccess, Signature, UntaggedValue, Value};
|
||||
|
||||
pub struct Headers;
|
||||
#[derive(Deserialize)]
|
||||
pub struct HeadersArgs {}
|
||||
|
||||
impl WholeStreamCommand for Headers {
|
||||
fn name(&self) -> &str {
|
||||
"headers"
|
||||
}
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("headers")
|
||||
}
|
||||
fn usage(&self) -> &str {
|
||||
"Use the first row of the table as column names"
|
||||
}
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, headers)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn headers(
|
||||
HeadersArgs {}: HeadersArgs,
|
||||
RunnableContext { input, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream! {
|
||||
let rows: Vec<Value> = input.values.collect().await;
|
||||
|
||||
if rows.len() < 1 {
|
||||
yield Err(ShellError::untagged_runtime_error("Couldn't find headers, was the input a properly formatted, non-empty table?"));
|
||||
}
|
||||
|
||||
//the headers are the first row in the table
|
||||
let headers: Vec<String> = match &rows[0].value {
|
||||
UntaggedValue::Row(d) => {
|
||||
Ok(d.entries.iter().map(|(k, v)| {
|
||||
match v.as_string() {
|
||||
Ok(s) => s,
|
||||
Err(_) => { //If a cell that should contain a header name is empty, we name the column Column[index]
|
||||
match d.entries.get_full(k) {
|
||||
Some((index, _, _)) => format!("Column{}", index),
|
||||
None => "unknownColumn".to_string()
|
||||
}
|
||||
}
|
||||
}
|
||||
}).collect())
|
||||
}
|
||||
_ => Err(ShellError::unexpected_eof("Could not get headers, is the table empty?", rows[0].tag.span))
|
||||
}?;
|
||||
|
||||
//Each row is a dictionary with the headers as keys
|
||||
for r in rows.iter().skip(1) {
|
||||
match &r.value {
|
||||
UntaggedValue::Row(d) => {
|
||||
let mut i = 0;
|
||||
let mut entries = IndexMap::new();
|
||||
for (_, v) in d.entries.iter() {
|
||||
entries.insert(headers[i].clone(), v.clone());
|
||||
i += 1;
|
||||
}
|
||||
yield Ok(ReturnSuccess::Value(UntaggedValue::Row(Dictionary{entries}).into_value(r.tag.clone())))
|
||||
}
|
||||
_ => yield Err(ShellError::unexpected_eof("Couldn't iterate through rows, was the input a properly formatted table?", r.tag.span))
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
242
crates/nu-cli/src/commands/help.rs
Normal file
242
crates/nu-cli/src/commands/help.rs
Normal file
@ -0,0 +1,242 @@
|
||||
use crate::commands::PerItemCommand;
|
||||
use crate::data::command_dict;
|
||||
|
||||
use crate::prelude::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{
|
||||
CallInfo, NamedType, PositionalType, Primitive, ReturnSuccess, Signature, SyntaxShape,
|
||||
TaggedDictBuilder, UntaggedValue, Value,
|
||||
};
|
||||
use nu_source::SpannedItem;
|
||||
use nu_value_ext::get_data_by_key;
|
||||
|
||||
pub struct Help;
|
||||
|
||||
impl PerItemCommand for Help {
|
||||
fn name(&self) -> &str {
|
||||
"help"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("help").rest(SyntaxShape::Any, "the name of command(s) to get help on")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Display help information about commands."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
call_info: &CallInfo,
|
||||
registry: &CommandRegistry,
|
||||
_raw_args: &RawCommandArgs,
|
||||
_input: Value,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let tag = &call_info.name_tag;
|
||||
|
||||
match call_info.args.nth(0) {
|
||||
Some(Value {
|
||||
value: UntaggedValue::Primitive(Primitive::String(document)),
|
||||
tag,
|
||||
}) => {
|
||||
let mut help = VecDeque::new();
|
||||
if document == "commands" {
|
||||
let mut sorted_names = registry.names();
|
||||
sorted_names.sort();
|
||||
for cmd in sorted_names {
|
||||
let mut short_desc = TaggedDictBuilder::new(tag.clone());
|
||||
let value = command_dict(
|
||||
registry.get_command(&cmd).ok_or_else(|| {
|
||||
ShellError::labeled_error(
|
||||
format!("Could not load {}", cmd),
|
||||
"could not load command",
|
||||
tag,
|
||||
)
|
||||
})?,
|
||||
tag.clone(),
|
||||
);
|
||||
|
||||
short_desc.insert_untagged("name", cmd);
|
||||
short_desc.insert_untagged(
|
||||
"description",
|
||||
get_data_by_key(&value, "usage".spanned_unknown())
|
||||
.ok_or_else(|| {
|
||||
ShellError::labeled_error(
|
||||
"Expected a usage key",
|
||||
"expected a 'usage' key",
|
||||
&value.tag,
|
||||
)
|
||||
})?
|
||||
.as_string()?,
|
||||
);
|
||||
|
||||
help.push_back(ReturnSuccess::value(short_desc.into_value()));
|
||||
}
|
||||
} else if let Some(command) = registry.get_command(document) {
|
||||
return Ok(
|
||||
get_help(&command.name(), &command.usage(), command.signature()).into(),
|
||||
);
|
||||
}
|
||||
let help = futures::stream::iter(help);
|
||||
Ok(help.to_output_stream())
|
||||
}
|
||||
_ => {
|
||||
let msg = r#"Welcome to Nushell.
|
||||
|
||||
Here are some tips to help you get started.
|
||||
* help commands - list all available commands
|
||||
* help <command name> - display help about a particular command
|
||||
|
||||
Nushell works on the idea of a "pipeline". Pipelines are commands connected with the '|' character.
|
||||
Each stage in the pipeline works together to load, parse, and display information to you.
|
||||
|
||||
[Examples]
|
||||
|
||||
List the files in the current directory, sorted by size:
|
||||
ls | sort-by size
|
||||
|
||||
Get information about the current system:
|
||||
sys | get host
|
||||
|
||||
Get the processes on your system actively using CPU:
|
||||
ps | where cpu > 0
|
||||
|
||||
You can also learn more at https://www.nushell.sh/book/"#;
|
||||
|
||||
let output_stream = futures::stream::iter(vec![ReturnSuccess::value(
|
||||
UntaggedValue::string(msg).into_value(tag),
|
||||
)]);
|
||||
|
||||
Ok(output_stream.to_output_stream())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_help(
|
||||
cmd_name: &str,
|
||||
cmd_usage: &str,
|
||||
cmd_sig: Signature,
|
||||
) -> impl Into<OutputStream> {
|
||||
let mut help = VecDeque::new();
|
||||
let mut long_desc = String::new();
|
||||
|
||||
long_desc.push_str(&cmd_usage);
|
||||
long_desc.push_str("\n");
|
||||
|
||||
let signature = cmd_sig;
|
||||
|
||||
let mut one_liner = String::new();
|
||||
one_liner.push_str(&signature.name);
|
||||
one_liner.push_str(" ");
|
||||
|
||||
for positional in &signature.positional {
|
||||
match &positional.0 {
|
||||
PositionalType::Mandatory(name, _m) => {
|
||||
one_liner.push_str(&format!("<{}> ", name));
|
||||
}
|
||||
PositionalType::Optional(name, _o) => {
|
||||
one_liner.push_str(&format!("({}) ", name));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if signature.rest_positional.is_some() {
|
||||
one_liner.push_str(" ...args");
|
||||
}
|
||||
|
||||
if !signature.named.is_empty() {
|
||||
one_liner.push_str("{flags} ");
|
||||
}
|
||||
|
||||
long_desc.push_str(&format!("\nUsage:\n > {}\n", one_liner));
|
||||
|
||||
if !signature.positional.is_empty() || signature.rest_positional.is_some() {
|
||||
long_desc.push_str("\nparameters:\n");
|
||||
for positional in signature.positional {
|
||||
match positional.0 {
|
||||
PositionalType::Mandatory(name, _m) => {
|
||||
long_desc.push_str(&format!(" <{}> {}\n", name, positional.1));
|
||||
}
|
||||
PositionalType::Optional(name, _o) => {
|
||||
long_desc.push_str(&format!(" ({}) {}\n", name, positional.1));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(rest_positional) = signature.rest_positional {
|
||||
long_desc.push_str(&format!(" ...args: {}\n", rest_positional.1));
|
||||
}
|
||||
}
|
||||
if !signature.named.is_empty() {
|
||||
long_desc.push_str("\nflags:\n");
|
||||
for (flag, ty) in signature.named {
|
||||
let msg = match ty.0 {
|
||||
NamedType::Switch(s) => {
|
||||
if let Some(c) = s {
|
||||
format!(
|
||||
" -{}, --{}{} {}\n",
|
||||
c,
|
||||
flag,
|
||||
if !ty.1.is_empty() { ":" } else { "" },
|
||||
ty.1
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
" --{}{} {}\n",
|
||||
flag,
|
||||
if !ty.1.is_empty() { ":" } else { "" },
|
||||
ty.1
|
||||
)
|
||||
}
|
||||
}
|
||||
NamedType::Mandatory(s, m) => {
|
||||
if let Some(c) = s {
|
||||
format!(
|
||||
" -{}, --{} <{}> (required parameter){} {}\n",
|
||||
c,
|
||||
flag,
|
||||
m.display(),
|
||||
if !ty.1.is_empty() { ":" } else { "" },
|
||||
ty.1
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
" --{} <{}> (required parameter){} {}\n",
|
||||
flag,
|
||||
m.display(),
|
||||
if !ty.1.is_empty() { ":" } else { "" },
|
||||
ty.1
|
||||
)
|
||||
}
|
||||
}
|
||||
NamedType::Optional(s, o) => {
|
||||
if let Some(c) = s {
|
||||
format!(
|
||||
" -{}, --{} <{}>{} {}\n",
|
||||
c,
|
||||
flag,
|
||||
o.display(),
|
||||
if !ty.1.is_empty() { ":" } else { "" },
|
||||
ty.1
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
" --{} <{}>{} {}\n",
|
||||
flag,
|
||||
o.display(),
|
||||
if !ty.1.is_empty() { ":" } else { "" },
|
||||
ty.1
|
||||
)
|
||||
}
|
||||
}
|
||||
};
|
||||
long_desc.push_str(&msg);
|
||||
}
|
||||
}
|
||||
|
||||
help.push_back(ReturnSuccess::value(
|
||||
UntaggedValue::string(long_desc).into_value(Tag::from((0, cmd_name.len(), None))),
|
||||
));
|
||||
help
|
||||
}
|
@ -1,17 +1,13 @@
|
||||
use crate::commands::evaluate_by::evaluate;
|
||||
use crate::commands::group_by::group;
|
||||
use crate::commands::map_max_by::map_max;
|
||||
use crate::commands::reduce_by::reduce;
|
||||
use crate::commands::t_sort_by::columns_sorted;
|
||||
use crate::commands::t_sort_by::t_sort;
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use crate::utils::data_processing::{columns_sorted, evaluate, map_max, reduce, t_sort};
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{
|
||||
Primitive, ReturnSuccess, Signature, SyntaxShape, TaggedDictBuilder, UntaggedValue, Value,
|
||||
};
|
||||
use nu_source::Tagged;
|
||||
use num_traits::cast::ToPrimitive;
|
||||
use num_traits::{ToPrimitive, Zero};
|
||||
|
||||
pub struct Histogram;
|
||||
|
||||
@ -87,15 +83,15 @@ pub fn histogram(
|
||||
|
||||
let column = (*column_name).clone();
|
||||
|
||||
if let Value { value: UntaggedValue::Table(start), .. } = datasets.get(0).unwrap() {
|
||||
if let Value { value: UntaggedValue::Table(start), .. } = datasets.get(0).ok_or_else(|| ShellError::labeled_error("Unable to load dataset", "unabled to load dataset", &name))? {
|
||||
for percentage in start.iter() {
|
||||
|
||||
let mut fact = TaggedDictBuilder::new(&name);
|
||||
let value: Tagged<String> = group_labels.get(idx).unwrap().clone();
|
||||
let value: Tagged<String> = group_labels.get(idx).ok_or_else(|| ShellError::labeled_error("Unable to load group labels", "unabled to load group labels", &name))?.clone();
|
||||
fact.insert_value(&column, UntaggedValue::string(value.item).into_value(value.tag));
|
||||
|
||||
if let Value { value: UntaggedValue::Primitive(Primitive::Int(ref num)), .. } = percentage.clone() {
|
||||
let string = std::iter::repeat("*").take(num.to_i32().unwrap() as usize).collect::<String>();
|
||||
if let Value { value: UntaggedValue::Primitive(Primitive::Int(ref num)), ref tag } = percentage.clone() {
|
||||
let string = std::iter::repeat("*").take(num.to_i32().ok_or_else(|| ShellError::labeled_error("Expected a number", "expected a number", tag))? as usize).collect::<String>();
|
||||
fact.insert_untagged(&frequency_column_name, UntaggedValue::string(string));
|
||||
}
|
||||
|
||||
@ -127,31 +123,28 @@ fn percentages(values: &Value, max: Value, tag: impl Into<Tag>) -> Result<Value,
|
||||
value: UntaggedValue::Table(data),
|
||||
..
|
||||
} => {
|
||||
let data =
|
||||
data.iter()
|
||||
.map(|d| match d {
|
||||
let data = data
|
||||
.iter()
|
||||
.map(|d| match d {
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(Primitive::Int(n)),
|
||||
..
|
||||
} => {
|
||||
let max = match &max {
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(Primitive::Int(n)),
|
||||
value: UntaggedValue::Primitive(Primitive::Int(maxima)),
|
||||
..
|
||||
} => {
|
||||
let max = match max {
|
||||
Value {
|
||||
value:
|
||||
UntaggedValue::Primitive(Primitive::Int(
|
||||
ref maxima,
|
||||
)),
|
||||
..
|
||||
} => maxima.to_i32().unwrap(),
|
||||
_ => 0,
|
||||
};
|
||||
} => maxima.clone(),
|
||||
_ => Zero::zero(),
|
||||
};
|
||||
|
||||
let n = { n.to_i32().unwrap() * 100 / max };
|
||||
let n = (n * 100) / max;
|
||||
|
||||
UntaggedValue::int(n).into_value(&tag)
|
||||
}
|
||||
_ => UntaggedValue::int(0).into_value(&tag),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
UntaggedValue::int(n).into_value(&tag)
|
||||
}
|
||||
_ => UntaggedValue::int(0).into_value(&tag),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
UntaggedValue::Table(data).into_value(&tag)
|
||||
}
|
||||
_ => UntaggedValue::Table(vec![]).into_value(&tag),
|
@ -38,15 +38,17 @@ impl PerItemCommand for Insert {
|
||||
value: Value,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let value_tag = value.tag();
|
||||
let field = call_info.args.expect_nth(0)?.as_column_path().unwrap();
|
||||
let field = call_info.args.expect_nth(0)?.as_column_path()?;
|
||||
let replacement = call_info.args.expect_nth(1)?.tagged_unknown();
|
||||
|
||||
let stream = match value {
|
||||
obj @ Value {
|
||||
obj
|
||||
@
|
||||
Value {
|
||||
value: UntaggedValue::Row(_),
|
||||
..
|
||||
} => match obj.insert_data_at_column_path(&field, replacement.item.clone()) {
|
||||
Ok(v) => VecDeque::from(vec![Ok(ReturnSuccess::Value(v))]),
|
||||
Ok(v) => futures::stream::iter(vec![Ok(ReturnSuccess::Value(v))]),
|
||||
Err(err) => return Err(err),
|
||||
},
|
||||
|
104
crates/nu-cli/src/commands/kill.rs
Normal file
104
crates/nu-cli/src/commands/kill.rs
Normal file
@ -0,0 +1,104 @@
|
||||
use crate::commands::command::RunnablePerItemContext;
|
||||
use crate::context::CommandRegistry;
|
||||
use crate::prelude::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{CallInfo, Signature, SyntaxShape, Value};
|
||||
use nu_source::Tagged;
|
||||
use std::process::{Command, Stdio};
|
||||
|
||||
pub struct Kill;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct KillArgs {
|
||||
pub pid: Tagged<u64>,
|
||||
pub rest: Vec<Tagged<u64>>,
|
||||
pub force: Tagged<bool>,
|
||||
pub quiet: Tagged<bool>,
|
||||
}
|
||||
|
||||
impl PerItemCommand for Kill {
|
||||
fn name(&self) -> &str {
|
||||
"kill"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("kill")
|
||||
.required(
|
||||
"pid",
|
||||
SyntaxShape::Int,
|
||||
"process id of process that is to be killed",
|
||||
)
|
||||
.rest(SyntaxShape::Int, "rest of processes to kill")
|
||||
.switch("force", "forcefully kill the process", Some('f'))
|
||||
.switch("quiet", "won't print anything to the console", Some('q'))
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Kill a process using the process id."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
call_info: &CallInfo,
|
||||
_registry: &CommandRegistry,
|
||||
raw_args: &RawCommandArgs,
|
||||
_input: Value,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
call_info
|
||||
.process(&raw_args.shell_manager, raw_args.ctrl_c.clone(), kill)?
|
||||
.run()
|
||||
}
|
||||
}
|
||||
|
||||
fn kill(
|
||||
KillArgs {
|
||||
pid,
|
||||
rest,
|
||||
force,
|
||||
quiet,
|
||||
}: KillArgs,
|
||||
_context: &RunnablePerItemContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let mut cmd = if cfg!(windows) {
|
||||
let mut cmd = Command::new("taskkill");
|
||||
|
||||
if *force {
|
||||
cmd.arg("/F");
|
||||
}
|
||||
|
||||
cmd.arg("/PID");
|
||||
cmd.arg(pid.item().to_string());
|
||||
|
||||
// each pid must written as `/PID 0` otherwise
|
||||
// taskkill will act as `killall` unix command
|
||||
for id in &rest {
|
||||
cmd.arg("/PID");
|
||||
cmd.arg(id.item().to_string());
|
||||
}
|
||||
|
||||
cmd
|
||||
} else {
|
||||
let mut cmd = Command::new("kill");
|
||||
|
||||
if *force {
|
||||
cmd.arg("-9");
|
||||
}
|
||||
|
||||
cmd.arg(pid.item().to_string());
|
||||
|
||||
cmd.args(rest.iter().map(move |id| id.item().to_string()));
|
||||
|
||||
cmd
|
||||
};
|
||||
|
||||
// pipe everything to null
|
||||
if *quiet {
|
||||
cmd.stdin(Stdio::null())
|
||||
.stdout(Stdio::null())
|
||||
.stderr(Stdio::null());
|
||||
}
|
||||
|
||||
cmd.status().expect("failed to execute shell command");
|
||||
|
||||
Ok(OutputStream::empty())
|
||||
}
|
116
crates/nu-cli/src/commands/lines.rs
Normal file
116
crates/nu-cli/src/commands/lines.rs
Normal file
@ -0,0 +1,116 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{Primitive, ReturnSuccess, Signature, UntaggedValue, Value};
|
||||
|
||||
pub struct Lines;
|
||||
|
||||
impl WholeStreamCommand for Lines {
|
||||
fn name(&self) -> &str {
|
||||
"lines"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("lines")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Split single string into rows, one per line."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
lines(args, registry)
|
||||
}
|
||||
}
|
||||
|
||||
fn ends_with_line_ending(st: &str) -> bool {
|
||||
let mut temp = st.to_string();
|
||||
let last = temp.pop();
|
||||
if let Some(c) = last {
|
||||
c == '\n'
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn lines(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let tag = args.name_tag();
|
||||
let name_span = tag.span;
|
||||
let mut input = args.input;
|
||||
|
||||
let mut leftover = vec![];
|
||||
let mut leftover_string = String::new();
|
||||
let stream = async_stream! {
|
||||
loop {
|
||||
match input.values.next().await {
|
||||
Some(Value { value: UntaggedValue::Primitive(Primitive::String(st)), ..}) => {
|
||||
let mut st = leftover_string.clone() + &st;
|
||||
leftover.clear();
|
||||
|
||||
let mut lines: Vec<String> = st.lines().map(|x| x.to_string()).collect();
|
||||
|
||||
if !ends_with_line_ending(&st) {
|
||||
if let Some(last) = lines.pop() {
|
||||
leftover_string = last;
|
||||
} else {
|
||||
leftover_string.clear();
|
||||
}
|
||||
} else {
|
||||
leftover_string.clear();
|
||||
}
|
||||
|
||||
let success_lines: Vec<_> = lines.iter().map(|x| ReturnSuccess::value(UntaggedValue::line(x).into_untagged_value())).collect();
|
||||
yield futures::stream::iter(success_lines)
|
||||
}
|
||||
Some(Value { value: UntaggedValue::Primitive(Primitive::Line(st)), ..}) => {
|
||||
let mut st = leftover_string.clone() + &st;
|
||||
leftover.clear();
|
||||
|
||||
let mut lines: Vec<String> = st.lines().map(|x| x.to_string()).collect();
|
||||
if !ends_with_line_ending(&st) {
|
||||
if let Some(last) = lines.pop() {
|
||||
leftover_string = last;
|
||||
} else {
|
||||
leftover_string.clear();
|
||||
}
|
||||
} else {
|
||||
leftover_string.clear();
|
||||
}
|
||||
|
||||
let success_lines: Vec<_> = lines.iter().map(|x| ReturnSuccess::value(UntaggedValue::line(x).into_untagged_value())).collect();
|
||||
yield futures::stream::iter(success_lines)
|
||||
}
|
||||
Some( Value { tag: value_span, ..}) => {
|
||||
yield futures::stream::iter(vec![Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
name_span,
|
||||
"value originates from here",
|
||||
value_span,
|
||||
))]);
|
||||
}
|
||||
None => {
|
||||
if !leftover.is_empty() {
|
||||
let mut st = leftover_string.clone();
|
||||
if let Ok(extra) = String::from_utf8(leftover) {
|
||||
st.push_str(&extra);
|
||||
}
|
||||
yield futures::stream::iter(vec![ReturnSuccess::value(UntaggedValue::string(st).into_untagged_value())])
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if !leftover_string.is_empty() {
|
||||
yield futures::stream::iter(vec![ReturnSuccess::value(UntaggedValue::string(leftover_string).into_untagged_value())]);
|
||||
}
|
||||
}
|
||||
.flatten();
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
70
crates/nu-cli/src/commands/ls.rs
Normal file
70
crates/nu-cli/src/commands/ls.rs
Normal file
@ -0,0 +1,70 @@
|
||||
use crate::commands::command::RunnablePerItemContext;
|
||||
use crate::prelude::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{CallInfo, Signature, SyntaxShape, Value};
|
||||
use nu_source::Tagged;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub struct Ls;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct LsArgs {
|
||||
pub path: Option<Tagged<PathBuf>>,
|
||||
pub all: bool,
|
||||
pub full: bool,
|
||||
#[serde(rename = "short-names")]
|
||||
pub short_names: bool,
|
||||
#[serde(rename = "with-symlink-targets")]
|
||||
pub with_symlink_targets: bool,
|
||||
}
|
||||
|
||||
impl PerItemCommand for Ls {
|
||||
fn name(&self) -> &str {
|
||||
"ls"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("ls")
|
||||
.optional(
|
||||
"path",
|
||||
SyntaxShape::Pattern,
|
||||
"a path to get the directory contents from",
|
||||
)
|
||||
.switch("all", "also show hidden files", Some('a'))
|
||||
.switch(
|
||||
"full",
|
||||
"list all available columns for each entry",
|
||||
Some('f'),
|
||||
)
|
||||
.switch(
|
||||
"short-names",
|
||||
"only print the file names and not the path",
|
||||
Some('s'),
|
||||
)
|
||||
.switch(
|
||||
"with-symlink-targets",
|
||||
"display the paths to the target files that symlinks point to",
|
||||
Some('w'),
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"View the contents of the current or given path."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
call_info: &CallInfo,
|
||||
_registry: &CommandRegistry,
|
||||
raw_args: &RawCommandArgs,
|
||||
_input: Value,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
call_info
|
||||
.process(&raw_args.shell_manager, raw_args.ctrl_c.clone(), ls)?
|
||||
.run()
|
||||
}
|
||||
}
|
||||
|
||||
fn ls(args: LsArgs, context: &RunnablePerItemContext) -> Result<OutputStream, ShellError> {
|
||||
context.shell_manager.ls(args, context)
|
||||
}
|
@ -114,7 +114,7 @@ macro_rules! command {
|
||||
$($extract)* {
|
||||
use std::convert::TryInto;
|
||||
|
||||
$args.get(stringify!($param_name)).clone().try_into()?
|
||||
$args.get(stringify!($param_name)).try_into()?
|
||||
}
|
||||
}
|
||||
);
|
||||
@ -164,7 +164,7 @@ macro_rules! command {
|
||||
$($extract)* {
|
||||
use std::convert::TryInto;
|
||||
|
||||
$args.get(stringify!($param_name)).clone().try_into()?
|
||||
$args.get(stringify!($param_name)).try_into()?
|
||||
}
|
||||
}
|
||||
);
|
||||
@ -214,7 +214,7 @@ macro_rules! command {
|
||||
$($extract)* {
|
||||
use std::convert::TryInto;
|
||||
|
||||
$args.get(stringify!($param_name)).clone().try_into()?
|
||||
$args.get(stringify!($param_name)).try_into()?
|
||||
}
|
||||
}
|
||||
);
|
74
crates/nu-cli/src/commands/map_max_by.rs
Normal file
74
crates/nu-cli/src/commands/map_max_by.rs
Normal file
@ -0,0 +1,74 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::data::value;
|
||||
use crate::prelude::*;
|
||||
use crate::utils::data_processing::map_max;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{Primitive, ReturnSuccess, Signature, SyntaxShape, UntaggedValue, Value};
|
||||
use nu_source::Tagged;
|
||||
use num_traits::cast::ToPrimitive;
|
||||
|
||||
pub struct MapMaxBy;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct MapMaxByArgs {
|
||||
column_name: Option<Tagged<String>>,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for MapMaxBy {
|
||||
fn name(&self) -> &str {
|
||||
"map-max-by"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("map-max-by").named(
|
||||
"column_name",
|
||||
SyntaxShape::String,
|
||||
"the name of the column to map-max the table's rows",
|
||||
Some('c'),
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Creates a new table with the data from the tables rows maxed by the column given."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, map_max_by)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn map_max_by(
|
||||
MapMaxByArgs { column_name }: MapMaxByArgs,
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
|
||||
if values.is_empty() {
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Expected table from pipeline",
|
||||
"requires a table input",
|
||||
name
|
||||
))
|
||||
} else {
|
||||
|
||||
let map_by_column = if let Some(column_to_map) = column_name {
|
||||
Some(column_to_map.item().clone())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
match map_max(&values[0], map_by_column, name) {
|
||||
Ok(table_maxed) => yield ReturnSuccess::value(table_maxed),
|
||||
Err(err) => yield Err(err)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
@ -33,7 +33,9 @@ impl PerItemCommand for Mkdir {
|
||||
raw_args: &RawCommandArgs,
|
||||
_input: Value,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
call_info.process(&raw_args.shell_manager, mkdir)?.run()
|
||||
call_info
|
||||
.process(&raw_args.shell_manager, raw_args.ctrl_c.clone(), mkdir)?
|
||||
.run()
|
||||
}
|
||||
}
|
||||
|
@ -44,7 +44,9 @@ impl PerItemCommand for Move {
|
||||
raw_args: &RawCommandArgs,
|
||||
_input: Value,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
call_info.process(&raw_args.shell_manager, mv)?.run()
|
||||
call_info
|
||||
.process(&raw_args.shell_manager, raw_args.ctrl_c.clone(), mv)?
|
||||
.run()
|
||||
}
|
||||
}
|
||||
|
@ -65,10 +65,10 @@ fn nth(
|
||||
.iter()
|
||||
.any(|requested| requested.item == idx as u64)
|
||||
{
|
||||
result.push_back(ReturnSuccess::value(item.clone()));
|
||||
result.push_back(ReturnSuccess::value(item));
|
||||
}
|
||||
|
||||
result
|
||||
futures::stream::iter(result)
|
||||
})
|
||||
.flatten();
|
||||
|
@ -20,7 +20,11 @@ impl PerItemCommand for Open {
|
||||
SyntaxShape::Path,
|
||||
"the file path to load values from",
|
||||
)
|
||||
.switch("raw", "load content as a string insead of a table")
|
||||
.switch(
|
||||
"raw",
|
||||
"load content as a string instead of a table",
|
||||
Some('r'),
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -64,7 +68,7 @@ fn run(call_info: &CallInfo, raw_args: &RawCommandArgs) -> Result<OutputStream,
|
||||
yield Err(e);
|
||||
return;
|
||||
}
|
||||
let (file_extension, contents, contents_tag) = result.unwrap();
|
||||
let (file_extension, contents, contents_tag) = result?;
|
||||
|
||||
let file_extension = if has_raw {
|
||||
None
|
@ -101,13 +101,21 @@ impl PerItemCommand for Parse {
|
||||
value: Value,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
//let value_tag = value.tag();
|
||||
let pattern = call_info.args.expect_nth(0)?.as_string().unwrap();
|
||||
let pattern = call_info.args.expect_nth(0)?.as_string()?;
|
||||
|
||||
let parse_pattern = parse(&pattern).unwrap();
|
||||
let parse_pattern = parse(&pattern).map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not create parse pattern",
|
||||
"could not create parse pattern",
|
||||
&value.tag,
|
||||
)
|
||||
})?;
|
||||
let parse_regex = build_regex(&parse_pattern.1);
|
||||
|
||||
let column_names = column_names(&parse_pattern.1);
|
||||
let regex = Regex::new(&parse_regex).unwrap();
|
||||
let regex = Regex::new(&parse_regex).map_err(|_| {
|
||||
ShellError::labeled_error("Could not parse regex", "could not parse regex", &value.tag)
|
||||
})?;
|
||||
|
||||
let output = if let Ok(s) = value.as_string() {
|
||||
let mut results = vec![];
|
||||
@ -128,6 +136,6 @@ impl PerItemCommand for Parse {
|
||||
} else {
|
||||
VecDeque::new()
|
||||
};
|
||||
Ok(output.to_output_stream())
|
||||
Ok(output.into())
|
||||
}
|
||||
}
|
167
crates/nu-cli/src/commands/pick.rs
Normal file
167
crates/nu-cli/src/commands/pick.rs
Normal file
@ -0,0 +1,167 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::context::CommandRegistry;
|
||||
use crate::prelude::*;
|
||||
use futures_util::pin_mut;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{
|
||||
ColumnPath, PathMember, Primitive, ReturnSuccess, ReturnValue, Signature, SyntaxShape,
|
||||
TaggedDictBuilder, UnspannedPathMember, UntaggedValue, Value,
|
||||
};
|
||||
use nu_source::span_for_spanned_list;
|
||||
use nu_value_ext::{as_string, get_data_by_column_path};
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct PickArgs {
|
||||
rest: Vec<ColumnPath>,
|
||||
}
|
||||
|
||||
pub struct Pick;
|
||||
|
||||
impl WholeStreamCommand for Pick {
|
||||
fn name(&self) -> &str {
|
||||
"pick"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("pick").rest(
|
||||
SyntaxShape::ColumnPath,
|
||||
"the columns to select from the table",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Down-select table to only these columns."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, pick)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
fn pick(
|
||||
PickArgs { rest: mut fields }: PickArgs,
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
if fields.is_empty() {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Pick requires columns to pick",
|
||||
"needs parameter",
|
||||
name,
|
||||
));
|
||||
}
|
||||
|
||||
let member = fields.remove(0);
|
||||
let member = vec![member];
|
||||
|
||||
let column_paths = vec![&member, &fields]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.cloned()
|
||||
.collect::<Vec<ColumnPath>>();
|
||||
|
||||
let stream = async_stream! {
|
||||
let values = input.values;
|
||||
pin_mut!(values);
|
||||
|
||||
let mut empty = true;
|
||||
let mut bring_back: indexmap::IndexMap<String, Vec<Value>> = indexmap::IndexMap::new();
|
||||
|
||||
while let Some(value) = values.next().await {
|
||||
for path in &column_paths {
|
||||
let path_members_span = span_for_spanned_list(path.members().iter().map(|p| p.span));
|
||||
|
||||
let fetcher = get_data_by_column_path(&value, &path, Box::new(move |(obj_source, path_member_tried, error)| {
|
||||
if let PathMember { unspanned: UnspannedPathMember::String(column), .. } = path_member_tried {
|
||||
return ShellError::labeled_error_with_secondary(
|
||||
"No data to fetch.",
|
||||
format!("Couldn't pick column \"{}\"", column),
|
||||
path_member_tried.span,
|
||||
format!("How about exploring it with \"get\"? Check the input is appropriate originating from here"),
|
||||
obj_source.tag.span)
|
||||
}
|
||||
|
||||
error
|
||||
}));
|
||||
|
||||
|
||||
let field = path.clone();
|
||||
let key = as_string(&UntaggedValue::Primitive(Primitive::ColumnPath(field.clone())).into_untagged_value())?;
|
||||
|
||||
match fetcher {
|
||||
Ok(results) => {
|
||||
match results.value {
|
||||
UntaggedValue::Table(records) => {
|
||||
for x in records {
|
||||
let mut out = TaggedDictBuilder::new(name.clone());
|
||||
out.insert_untagged(&key, x.value.clone());
|
||||
let group = bring_back.entry(key.clone()).or_insert(vec![]);
|
||||
group.push(out.into_value());
|
||||
}
|
||||
},
|
||||
x => {
|
||||
let mut out = TaggedDictBuilder::new(name.clone());
|
||||
out.insert_untagged(&key, x.clone());
|
||||
let group = bring_back.entry(key.clone()).or_insert(vec![]);
|
||||
group.push(out.into_value());
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
Err(reason) => {
|
||||
// At the moment, we can't add switches, named flags
|
||||
// and the like while already using .rest since it
|
||||
// breaks the parser.
|
||||
//
|
||||
// We allow flexibility for now and skip the error
|
||||
// if a given column isn't present.
|
||||
let strict: Option<bool> = None;
|
||||
|
||||
if strict.is_some() {
|
||||
yield Err(reason);
|
||||
return;
|
||||
}
|
||||
|
||||
bring_back.entry(key.clone()).or_insert(vec![]);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut max = 0;
|
||||
|
||||
if let Some(max_column) = bring_back.values().max() {
|
||||
max = max_column.len();
|
||||
}
|
||||
|
||||
let keys = bring_back.keys().map(|x| x.clone()).collect::<Vec<String>>();
|
||||
|
||||
for mut current in 0..max {
|
||||
let mut out = TaggedDictBuilder::new(name.clone());
|
||||
|
||||
for k in &keys {
|
||||
let nothing = UntaggedValue::Primitive(Primitive::Nothing).into_untagged_value();
|
||||
let subsets = bring_back.get(k);
|
||||
|
||||
match subsets {
|
||||
Some(set) => {
|
||||
match set.get(current) {
|
||||
Some(row) => out.insert_untagged(k, row.get_data(k).borrow().clone()),
|
||||
None => out.insert_untagged(k, nothing.clone()),
|
||||
}
|
||||
}
|
||||
None => out.insert_untagged(k, nothing.clone()),
|
||||
}
|
||||
}
|
||||
|
||||
yield ReturnSuccess::value(out.into_value());
|
||||
}
|
||||
};
|
||||
|
||||
let stream: BoxStream<'static, ReturnValue> = stream.boxed();
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
@ -1,7 +1,9 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{ReturnSuccess, Signature, SyntaxShape, TaggedDictBuilder, UntaggedValue, Value};
|
||||
use nu_protocol::{
|
||||
merge_descriptors, ReturnSuccess, Signature, SyntaxShape, TaggedDictBuilder, UntaggedValue,
|
||||
};
|
||||
use nu_source::{SpannedItem, Tagged};
|
||||
use nu_value_ext::get_data_by_key;
|
||||
|
||||
@ -23,8 +25,16 @@ impl WholeStreamCommand for Pivot {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("pivot")
|
||||
.switch("header-row", "treat the first row as column names")
|
||||
.switch("ignore-titles", "don't pivot the column names into values")
|
||||
.switch(
|
||||
"header-row",
|
||||
"treat the first row as column names",
|
||||
Some('r'),
|
||||
)
|
||||
.switch(
|
||||
"ignore-titles",
|
||||
"don't pivot the column names into values",
|
||||
Some('i'),
|
||||
)
|
||||
.rest(
|
||||
SyntaxShape::String,
|
||||
"the names to give columns once pivoted",
|
||||
@ -44,18 +54,6 @@ impl WholeStreamCommand for Pivot {
|
||||
}
|
||||
}
|
||||
|
||||
fn merge_descriptors(values: &[Value]) -> Vec<String> {
|
||||
let mut ret = vec![];
|
||||
for value in values {
|
||||
for desc in value.data_descriptors() {
|
||||
if !ret.contains(&desc) {
|
||||
ret.push(desc);
|
||||
}
|
||||
}
|
||||
}
|
||||
ret
|
||||
}
|
||||
|
||||
pub fn pivot(args: PivotArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream! {
|
||||
let input = context.input.into_vec().await;
|
@ -84,9 +84,11 @@ pub fn filter_plugin(
|
||||
|
||||
let mut bos: VecDeque<Value> = VecDeque::new();
|
||||
bos.push_back(UntaggedValue::Primitive(Primitive::BeginningOfStream).into_untagged_value());
|
||||
let bos = futures::stream::iter(bos);
|
||||
|
||||
let mut eos: VecDeque<Value> = VecDeque::new();
|
||||
eos.push_back(UntaggedValue::Primitive(Primitive::EndOfStream).into_untagged_value());
|
||||
let eos = futures::stream::iter(eos);
|
||||
|
||||
let call_info = args.call_info.clone();
|
||||
|
||||
@ -106,14 +108,26 @@ pub fn filter_plugin(
|
||||
let mut reader = BufReader::new(stdout);
|
||||
|
||||
let request = JsonRpc::new("begin_filter", call_info.clone());
|
||||
let request_raw = serde_json::to_string(&request).unwrap();
|
||||
match stdin.write(format!("{}\n", request_raw).as_bytes()) {
|
||||
Ok(_) => {}
|
||||
Err(err) => {
|
||||
let request_raw = serde_json::to_string(&request);
|
||||
|
||||
match request_raw {
|
||||
Err(_) => {
|
||||
let mut result = VecDeque::new();
|
||||
result.push_back(Err(ShellError::unexpected(format!("{}", err))));
|
||||
result.push_back(Err(ShellError::labeled_error(
|
||||
"Could not load json from plugin",
|
||||
"could not load json from plugin",
|
||||
&call_info.name_tag,
|
||||
)));
|
||||
return result;
|
||||
}
|
||||
Ok(request_raw) => match stdin.write(format!("{}\n", request_raw).as_bytes()) {
|
||||
Ok(_) => {}
|
||||
Err(err) => {
|
||||
let mut result = VecDeque::new();
|
||||
result.push_back(Err(ShellError::unexpected(format!("{}", err))));
|
||||
return result;
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
let mut input = String::new();
|
||||
@ -179,8 +193,20 @@ pub fn filter_plugin(
|
||||
Ok(params) => {
|
||||
let request: JsonRpc<std::vec::Vec<Value>> =
|
||||
JsonRpc::new("quit", vec![]);
|
||||
let request_raw = serde_json::to_string(&request).unwrap();
|
||||
let _ = stdin.write(format!("{}\n", request_raw).as_bytes()); // TODO: Handle error
|
||||
let request_raw = serde_json::to_string(&request);
|
||||
match request_raw {
|
||||
Ok(request_raw) => {
|
||||
let _ = stdin.write(format!("{}\n", request_raw).as_bytes()); // TODO: Handle error
|
||||
}
|
||||
Err(e) => {
|
||||
let mut result = VecDeque::new();
|
||||
result.push_back(Err(ShellError::untagged_runtime_error(format!(
|
||||
"Error while processing begin_filter response: {:?} {}",
|
||||
e, input
|
||||
))));
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
params
|
||||
}
|
||||
@ -221,8 +247,20 @@ pub fn filter_plugin(
|
||||
let mut reader = BufReader::new(stdout);
|
||||
|
||||
let request = JsonRpc::new("filter", v);
|
||||
let request_raw = serde_json::to_string(&request).unwrap();
|
||||
let _ = stdin.write(format!("{}\n", request_raw).as_bytes()); // TODO: Handle error
|
||||
let request_raw = serde_json::to_string(&request);
|
||||
match request_raw {
|
||||
Ok(request_raw) => {
|
||||
let _ = stdin.write(format!("{}\n", request_raw).as_bytes()); // TODO: Handle error
|
||||
}
|
||||
Err(e) => {
|
||||
let mut result = VecDeque::new();
|
||||
result.push_back(Err(ShellError::untagged_runtime_error(format!(
|
||||
"Error while processing filter response: {:?}",
|
||||
e
|
||||
))));
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
let mut input = String::new();
|
||||
match reader.read_line(&mut input) {
|
||||
@ -258,6 +296,7 @@ pub fn filter_plugin(
|
||||
}
|
||||
}
|
||||
})
|
||||
.map(futures::stream::iter) // convert to a stream
|
||||
.flatten();
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
@ -304,21 +343,31 @@ pub fn sink_plugin(
|
||||
let input: Vec<Value> = args.input.values.collect().await;
|
||||
|
||||
let request = JsonRpc::new("sink", (call_info.clone(), input));
|
||||
let request_raw = serde_json::to_string(&request).unwrap();
|
||||
let mut tmpfile = tempfile::NamedTempFile::new().unwrap();
|
||||
let _ = writeln!(tmpfile, "{}", request_raw);
|
||||
let _ = tmpfile.flush();
|
||||
let request_raw = serde_json::to_string(&request);
|
||||
if let Ok(request_raw) = request_raw {
|
||||
if let Ok(mut tmpfile) = tempfile::NamedTempFile::new() {
|
||||
let _ = writeln!(tmpfile, "{}", request_raw);
|
||||
let _ = tmpfile.flush();
|
||||
|
||||
let mut child = std::process::Command::new(path)
|
||||
.arg(tmpfile.path())
|
||||
.spawn()
|
||||
.expect("Failed to spawn child process");
|
||||
let mut child = std::process::Command::new(path)
|
||||
.arg(tmpfile.path())
|
||||
.spawn();
|
||||
|
||||
let _ = child.wait();
|
||||
if let Ok(mut child) = child {
|
||||
let _ = child.wait();
|
||||
|
||||
// Needed for async_stream to type check
|
||||
if false {
|
||||
yield ReturnSuccess::value(UntaggedValue::nothing().into_untagged_value());
|
||||
// Needed for async_stream to type check
|
||||
if false {
|
||||
yield ReturnSuccess::value(UntaggedValue::nothing().into_untagged_value());
|
||||
}
|
||||
} else {
|
||||
yield Err(ShellError::untagged_runtime_error("Could not create process for sink command"));
|
||||
}
|
||||
} else {
|
||||
yield Err(ShellError::untagged_runtime_error("Could not open file to send sink command message"));
|
||||
}
|
||||
} else {
|
||||
yield Err(ShellError::untagged_runtime_error("Could not create message to sink command"));
|
||||
}
|
||||
};
|
||||
Ok(OutputStream::new(stream))
|
@ -41,8 +41,7 @@ fn prepend(
|
||||
PrependArgs { row }: PrependArgs,
|
||||
RunnableContext { input, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let mut prepend: VecDeque<Value> = VecDeque::new();
|
||||
prepend.push_back(row);
|
||||
let prepend = futures::stream::iter(vec![row]);
|
||||
|
||||
Ok(OutputStream::from_input(prepend.chain(input.values)))
|
||||
}
|
@ -3,9 +3,9 @@ use crate::prelude::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::Signature;
|
||||
|
||||
pub struct PWD;
|
||||
pub struct Pwd;
|
||||
|
||||
impl WholeStreamCommand for PWD {
|
||||
impl WholeStreamCommand for Pwd {
|
||||
fn name(&self) -> &str {
|
||||
"pwd"
|
||||
}
|
@ -41,13 +41,16 @@ impl WholeStreamCommand for Range {
|
||||
|
||||
fn range(
|
||||
RangeArgs { area }: RangeArgs,
|
||||
RunnableContext { input, name: _, .. }: RunnableContext,
|
||||
RunnableContext { input, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let range = area.item;
|
||||
let (from, _) = range.from;
|
||||
let (to, _) = range.to;
|
||||
|
||||
return Ok(OutputStream::from_input(
|
||||
input.values.skip(*from).take(*to - *from + 1),
|
||||
));
|
||||
let from = *from as usize;
|
||||
let to = *to as usize;
|
||||
|
||||
Ok(OutputStream::from_input(
|
||||
input.values.skip(from).take(to - from + 1),
|
||||
))
|
||||
}
|
72
crates/nu-cli/src/commands/reduce_by.rs
Normal file
72
crates/nu-cli/src/commands/reduce_by.rs
Normal file
@ -0,0 +1,72 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use crate::utils::data_processing::reduce;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{Primitive, ReturnSuccess, Signature, SyntaxShape, UntaggedValue, Value};
|
||||
use nu_source::Tagged;
|
||||
use num_traits::cast::ToPrimitive;
|
||||
|
||||
pub struct ReduceBy;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct ReduceByArgs {
|
||||
reduce_with: Option<Tagged<String>>,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for ReduceBy {
|
||||
fn name(&self) -> &str {
|
||||
"reduce-by"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("reduce-by").named(
|
||||
"reduce_with",
|
||||
SyntaxShape::String,
|
||||
"the command to reduce by with",
|
||||
Some('w'),
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Creates a new table with the data from the tables rows reduced by the command given."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, reduce_by)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reduce_by(
|
||||
ReduceByArgs { reduce_with }: ReduceByArgs,
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
if values.is_empty() {
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Expected table from pipeline",
|
||||
"requires a table input",
|
||||
name
|
||||
))
|
||||
} else {
|
||||
|
||||
let reduce_with = if let Some(reducer) = reduce_with {
|
||||
Some(reducer.item().clone())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
match reduce(&values[0], reduce_with, name) {
|
||||
Ok(reduced) => yield ReturnSuccess::value(reduced),
|
||||
Err(err) => yield Err(err)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
97
crates/nu-cli/src/commands/rename.rs
Normal file
97
crates/nu-cli/src/commands/rename.rs
Normal file
@ -0,0 +1,97 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use indexmap::IndexMap;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{ReturnSuccess, Signature, SyntaxShape, UntaggedValue, Value};
|
||||
use nu_source::Tagged;
|
||||
|
||||
pub struct Rename;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct Arguments {
|
||||
column_name: Tagged<String>,
|
||||
rest: Vec<Tagged<String>>,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for Rename {
|
||||
fn name(&self) -> &str {
|
||||
"rename"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("rename")
|
||||
.required(
|
||||
"column_name",
|
||||
SyntaxShape::String,
|
||||
"the name of the column to rename for",
|
||||
)
|
||||
.rest(
|
||||
SyntaxShape::Member,
|
||||
"Additional column name(s) to rename for",
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Creates a new table with columns renamed."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, rename)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn rename(
|
||||
Arguments { column_name, rest }: Arguments,
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let mut new_column_names = vec![vec![column_name]];
|
||||
new_column_names.push(rest);
|
||||
|
||||
let new_column_names = new_column_names.into_iter().flatten().collect::<Vec<_>>();
|
||||
|
||||
let stream = input
|
||||
.values
|
||||
.map(move |item| {
|
||||
let mut result = VecDeque::new();
|
||||
|
||||
if let Value {
|
||||
value: UntaggedValue::Row(row),
|
||||
tag,
|
||||
} = item
|
||||
{
|
||||
let mut renamed_row = IndexMap::new();
|
||||
|
||||
for (idx, (key, value)) in row.entries.iter().enumerate() {
|
||||
let key = if idx < new_column_names.len() {
|
||||
&new_column_names[idx].item
|
||||
} else {
|
||||
key
|
||||
};
|
||||
|
||||
renamed_row.insert(key.clone(), value.clone());
|
||||
}
|
||||
|
||||
let out = UntaggedValue::Row(renamed_row.into()).into_value(tag);
|
||||
|
||||
result.push_back(ReturnSuccess::value(out));
|
||||
} else {
|
||||
result.push_back(ReturnSuccess::value(
|
||||
UntaggedValue::Error(ShellError::labeled_error(
|
||||
"no column names available",
|
||||
"can't rename",
|
||||
&name,
|
||||
))
|
||||
.into_untagged_value(),
|
||||
));
|
||||
}
|
||||
|
||||
futures::stream::iter(result)
|
||||
})
|
||||
.flatten();
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
@ -32,11 +32,11 @@ fn reverse(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let (input, _args) = args.parts();
|
||||
|
||||
let output = input.values.collect::<Vec<_>>();
|
||||
let input = input.values.collect::<Vec<_>>();
|
||||
|
||||
let output = output.map(move |mut vec| {
|
||||
let output = input.map(move |mut vec| {
|
||||
vec.reverse();
|
||||
vec.into_iter().collect::<VecDeque<_>>()
|
||||
futures::stream::iter(vec)
|
||||
});
|
||||
|
||||
Ok(output.flatten_stream().from_input_stream())
|
@ -10,7 +10,7 @@ pub struct Remove;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct RemoveArgs {
|
||||
pub target: Tagged<PathBuf>,
|
||||
pub rest: Vec<Tagged<PathBuf>>,
|
||||
pub recursive: Tagged<bool>,
|
||||
pub trash: Tagged<bool>,
|
||||
}
|
||||
@ -22,16 +22,17 @@ impl PerItemCommand for Remove {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("rm")
|
||||
.required("path", SyntaxShape::Pattern, "the file path to remove")
|
||||
.switch(
|
||||
"trash",
|
||||
"use the platform's recycle bin instead of permanently deleting",
|
||||
Some('t'),
|
||||
)
|
||||
.switch("recursive", "delete subdirectories recursively")
|
||||
.switch("recursive", "delete subdirectories recursively", Some('r'))
|
||||
.rest(SyntaxShape::Pattern, "the file path(s) to remove")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Remove a file"
|
||||
"Remove file(s)"
|
||||
}
|
||||
|
||||
fn run(
|
||||
@ -41,7 +42,9 @@ impl PerItemCommand for Remove {
|
||||
raw_args: &RawCommandArgs,
|
||||
_input: Value,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
call_info.process(&raw_args.shell_manager, rm)?.run()
|
||||
call_info
|
||||
.process(&raw_args.shell_manager, raw_args.ctrl_c.clone(), rm)?
|
||||
.run()
|
||||
}
|
||||
}
|
||||
|
@ -7,6 +7,22 @@ use std::path::{Path, PathBuf};
|
||||
|
||||
pub struct Save;
|
||||
|
||||
macro_rules! process_unknown {
|
||||
($scope:tt, $input:ident, $name_tag:ident) => {{
|
||||
if $input.len() > 0 {
|
||||
match $input[0] {
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(Primitive::Binary(_)),
|
||||
..
|
||||
} => process_binary!($scope, $input, $name_tag),
|
||||
_ => process_string!($scope, $input, $name_tag),
|
||||
}
|
||||
} else {
|
||||
process_string!($scope, $input, $name_tag)
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! process_string {
|
||||
($scope:tt, $input:ident, $name_tag:ident) => {{
|
||||
let mut result_string = String::new();
|
||||
@ -31,6 +47,32 @@ macro_rules! process_string {
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! process_binary {
|
||||
($scope:tt, $input:ident, $name_tag:ident) => {{
|
||||
let mut result_binary: Vec<u8> = Vec::new();
|
||||
for res in $input {
|
||||
match res {
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(Primitive::Binary(b)),
|
||||
..
|
||||
} => {
|
||||
for u in b.into_iter() {
|
||||
result_binary.push(u);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
break $scope Err(ShellError::labeled_error(
|
||||
"Save could not successfully save",
|
||||
"unexpected data during binary save",
|
||||
$name_tag,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(result_binary)
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! process_string_return_success {
|
||||
($scope:tt, $result_vec:ident, $name_tag:ident) => {{
|
||||
let mut result_string = String::new();
|
||||
@ -98,6 +140,7 @@ impl WholeStreamCommand for Save {
|
||||
.switch(
|
||||
"raw",
|
||||
"treat values as-is rather than auto-converting based on file extension",
|
||||
Some('r'),
|
||||
)
|
||||
}
|
||||
|
||||
@ -178,7 +221,7 @@ fn save(
|
||||
let content : Result<Vec<u8>, ShellError> = 'scope: loop {
|
||||
break if !save_raw {
|
||||
if let Some(extension) = full_path.extension() {
|
||||
let command_name = format!("to-{}", extension.to_str().unwrap());
|
||||
let command_name = format!("to-{}", extension.to_string_lossy());
|
||||
if let Some(converter) = registry.get_command(&command_name) {
|
||||
let new_args = RawCommandArgs {
|
||||
host,
|
||||
@ -203,10 +246,10 @@ fn save(
|
||||
process_string_return_success!('scope, result_vec, name_tag)
|
||||
}
|
||||
} else {
|
||||
process_string!('scope, input, name_tag)
|
||||
process_unknown!('scope, input, name_tag)
|
||||
}
|
||||
} else {
|
||||
process_string!('scope, input, name_tag)
|
||||
process_unknown!('scope, input, name_tag)
|
||||
}
|
||||
} else {
|
||||
Ok(string_from(&input).into_bytes())
|
@ -32,7 +32,7 @@ fn shells(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream
|
||||
let mut shells_out = VecDeque::new();
|
||||
let tag = args.call_info.name_tag;
|
||||
|
||||
for (index, shell) in args.shell_manager.shells.lock().unwrap().iter().enumerate() {
|
||||
for (index, shell) in args.shell_manager.shells.lock().iter().enumerate() {
|
||||
let mut dict = TaggedDictBuilder::new(&tag);
|
||||
|
||||
if index == (*args.shell_manager.current_shell).load(Ordering::SeqCst) {
|
||||
@ -46,5 +46,5 @@ fn shells(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream
|
||||
shells_out.push_back(dict.into_value());
|
||||
}
|
||||
|
||||
Ok(shells_out.to_output_stream())
|
||||
Ok(shells_out.into())
|
||||
}
|
69
crates/nu-cli/src/commands/shuffle.rs
Normal file
69
crates/nu-cli/src/commands/shuffle.rs
Normal file
@ -0,0 +1,69 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::context::CommandRegistry;
|
||||
use crate::prelude::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{ReturnSuccess, ReturnValue, Signature, SyntaxShape, Value};
|
||||
use nu_source::Tagged;
|
||||
|
||||
use rand::seq::SliceRandom;
|
||||
use rand::thread_rng;
|
||||
|
||||
pub struct Shuffle;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct Arguments {
|
||||
#[serde(rename = "num")]
|
||||
limit: Option<Tagged<u64>>,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for Shuffle {
|
||||
fn name(&self) -> &str {
|
||||
"shuffle"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("shuffle").named(
|
||||
"num",
|
||||
SyntaxShape::Int,
|
||||
"Limit `num` number of rows",
|
||||
Some('n'),
|
||||
)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Shuffle rows randomly."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, shuffle)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
fn shuffle(
|
||||
Arguments { limit }: Arguments,
|
||||
RunnableContext { input, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream! {
|
||||
let mut values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
let out = if let Some(n) = limit {
|
||||
let (shuffled, _) = values.partial_shuffle(&mut thread_rng(), *n as usize);
|
||||
shuffled.to_vec()
|
||||
} else {
|
||||
values.shuffle(&mut thread_rng());
|
||||
values.clone()
|
||||
};
|
||||
|
||||
for val in out.into_iter() {
|
||||
yield ReturnSuccess::value(val);
|
||||
}
|
||||
};
|
||||
|
||||
let stream: BoxStream<'static, ReturnValue> = stream.boxed();
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
@ -9,7 +9,7 @@ pub struct Skip;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct SkipArgs {
|
||||
rows: Option<Tagged<u64>>,
|
||||
rows: Option<Tagged<usize>>,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for Skip {
|
@ -1,7 +1,7 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{Signature, SyntaxShape, Value};
|
||||
use nu_protocol::{Signature, SyntaxShape, UntaggedValue, Value};
|
||||
use nu_source::Tagged;
|
||||
use nu_value_ext::get_data_by_key;
|
||||
|
||||
@ -41,12 +41,26 @@ fn sort_by(
|
||||
Ok(OutputStream::new(async_stream! {
|
||||
let mut vec = context.input.drain_vec().await;
|
||||
|
||||
let calc_key = |item: &Value| {
|
||||
rest.iter()
|
||||
.map(|f| get_data_by_key(item, f.borrow_spanned()).map(|i| i.clone()))
|
||||
.collect::<Vec<Option<Value>>>()
|
||||
if vec.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
match &vec[0] {
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(_),
|
||||
..
|
||||
} => {
|
||||
vec.sort();
|
||||
},
|
||||
_ => {
|
||||
let calc_key = |item: &Value| {
|
||||
rest.iter()
|
||||
.map(|f| get_data_by_key(item, f.borrow_spanned()))
|
||||
.collect::<Vec<Option<Value>>>()
|
||||
};
|
||||
vec.sort_by_cached_key(calc_key);
|
||||
},
|
||||
};
|
||||
vec.sort_by_cached_key(calc_key);
|
||||
|
||||
for item in vec {
|
||||
yield item.into();
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user