forked from extern/nushell
Compare commits
233 Commits
Author | SHA1 | Date | |
---|---|---|---|
7d3025176f | |||
74111dddb7 | |||
74b0e4e541 | |||
587bb13be5 | |||
79d3237bf5 | |||
f8d44e732b | |||
0d2044e72e | |||
1bb301aafa | |||
5635b8378d | |||
294c2c600d | |||
b4c639a5d9 | |||
e7b37bee08 | |||
d32e97b812 | |||
81affaa584 | |||
f2d54f201d | |||
0373006710 | |||
ec2e35ad81 | |||
821ee5e726 | |||
5ed1ed54a6 | |||
96ef478fbc | |||
3f60c9d416 | |||
ed39377840 | |||
e250a3f213 | |||
3a99456371 | |||
bd6d8189f8 | |||
452b5c58e8 | |||
d1ebc55ed7 | |||
f20f3f56c7 | |||
65008bb912 | |||
d21389d549 | |||
f858a127ad | |||
de12393eaf | |||
b2c53a0967 | |||
65546646a7 | |||
ee8cd671cb | |||
d4df70c53f | |||
43ead45db6 | |||
22d2360c4b | |||
d38b8cf851 | |||
43cf52275b | |||
104b7824f5 | |||
a9293f62a8 | |||
0b210ce5bf | |||
38225d0dba | |||
473b6f727c | |||
63039666b0 | |||
a4a1588fbc | |||
4eafb22d5b | |||
aa09967173 | |||
7c40aed738 | |||
6c0bf6e0ab | |||
20e891db6e | |||
38b5979881 | |||
8422d40e2c | |||
de1c4e6c88 | |||
648d4865b1 | |||
7d4fec4db3 | |||
0f7e73646f | |||
bd6ca75032 | |||
341cc1ea63 | |||
2716bb020f | |||
193b00764b | |||
8ca678440a | |||
439889dcef | |||
5ec6bac7d9 | |||
af2ec60980 | |||
f0ca0312f3 | |||
3317b137e5 | |||
c2c10e2bc0 | |||
d2eb6f6646 | |||
1ad9d6f199 | |||
e18892000a | |||
4d70255696 | |||
77c34acb03 | |||
e72bc8ea8b | |||
a882e640e4 | |||
c09d866a77 | |||
4467e59122 | |||
9c096d320a | |||
9f15017032 | |||
81fec11f88 | |||
8a6a688131 | |||
77a4de31fa | |||
09e88d127e | |||
7ff5734d5d | |||
1d19595996 | |||
7d115da782 | |||
b066775630 | |||
8bb6bcb6eb | |||
20031861b9 | |||
eb297d3b8f | |||
8faa0126eb | |||
6aec03708f | |||
2f7b1e4282 | |||
7492131142 | |||
3c6ee63e59 | |||
45ad18f654 | |||
01829f04d5 | |||
cc1c471877 | |||
de14f9fce8 | |||
6c3ed1dbc2 | |||
cf0fa3141a | |||
539e232f3c | |||
9ed889ccbb | |||
872e26b524 | |||
5bfff0c39b | |||
0505a9d6f7 | |||
9181a046ec | |||
1b0eaac470 | |||
e54cd98a9c | |||
f3eb4fb24e | |||
04854d5d99 | |||
124a814f4d | |||
2e1670fcb8 | |||
7d2747ea9a | |||
36f2b09cad | |||
be51aad9ad | |||
97695b74dd | |||
9d84e47214 | |||
9fb9adb6b4 | |||
91e6d31dc6 | |||
9a1c537854 | |||
2476c8d579 | |||
27e59ea49c | |||
27882efd6b | |||
5e98751c66 | |||
8dec2da564 | |||
27272d3754 | |||
f689434bbc | |||
03728c1868 | |||
ce771903e5 | |||
c78bce2af4 | |||
0b3c9b760e | |||
7e7eba8f4d | |||
a77c222db0 | |||
149961e8f1 | |||
caf3015e66 | |||
459bfdd783 | |||
a2f1cca85c | |||
c09b4b045f | |||
94d81445eb | |||
94744c626c | |||
e62a2509ae | |||
417ac4b69e | |||
b7bf31df99 | |||
a7a0f48286 | |||
1bf0f7110a | |||
ad53eb4e76 | |||
c81d20a069 | |||
08df76486d | |||
fe3753ea68 | |||
abf671da1b | |||
91b4d27931 | |||
310897897e | |||
8ba917b704 | |||
219da892b2 | |||
bbb4cc7d5f | |||
9d04a7cc40 | |||
70d0ae7b42 | |||
ce9e4a61e7 | |||
af8e2f6961 | |||
093b9c1c5b | |||
348d75112f | |||
3c7b1ba854 | |||
b7a8758845 | |||
3812037e2a | |||
c5fdbdb8a1 | |||
c15b5df674 | |||
00f0fd2873 | |||
7269cf7427 | |||
83d82a09b2 | |||
64345b2985 | |||
9c23d78513 | |||
ff92123d93 | |||
e1357a9541 | |||
3b7aa5124c | |||
ce947d70b0 | |||
caed87c125 | |||
e12ba5be8f | |||
d52e087453 | |||
982ebacddd | |||
ee2f54fbb0 | |||
4f5c0314cf | |||
542a3995ea | |||
4af0dbe441 | |||
78ccd4181c | |||
680aeb12c2 | |||
ddcf0b4f5f | |||
74e60fbef8 | |||
b4c783f23d | |||
6f6d2abdac | |||
b123f35d4b | |||
02d6614ae2 | |||
20de0ea01f | |||
9f352ace23 | |||
48cbc5b23c | |||
aa495f4d74 | |||
0d8768b827 | |||
0d076d97be | |||
5b5c33a86f | |||
12f34cc698 | |||
ac116f4f7c | |||
6617731d5b | |||
f7d5ddbc07 | |||
ba778eaff9 | |||
1801c006ec | |||
7a124518c3 | |||
1183d28b15 | |||
1da6ac8de7 | |||
2b89ddfb9e | |||
29734a1dce | |||
def33206d9 | |||
6aad0b8443 | |||
9891e5ab81 | |||
7113c702ff | |||
54edf571af | |||
f85968aba4 | |||
440f553aa8 | |||
a492b019fe | |||
2941740df6 | |||
f0b638063d | |||
0377efdc16 | |||
3d89d2961c | |||
8c240ca3fd | |||
85cd03f899 | |||
3480cdb3b4 | |||
fec83e5164 | |||
837d12decd | |||
ffa536bea3 | |||
a1f26d947d | |||
e6bdef696d | |||
707af3f3ca | |||
480467447e |
@ -5,10 +5,25 @@ strategy:
|
||||
matrix:
|
||||
linux-nightly:
|
||||
image: ubuntu-16.04
|
||||
style: 'unflagged'
|
||||
macos-nightly:
|
||||
image: macos-10.14
|
||||
style: 'unflagged'
|
||||
windows-nightly:
|
||||
image: vs2017-win2016
|
||||
style: 'unflagged'
|
||||
linux-nightly-canary:
|
||||
image: ubuntu-16.04
|
||||
style: 'canary'
|
||||
macos-nightly-canary:
|
||||
image: macos-10.14
|
||||
style: 'canary'
|
||||
windows-nightly-canary:
|
||||
image: vs2017-win2016
|
||||
style: 'canary'
|
||||
fmt:
|
||||
image: ubuntu-16.04
|
||||
style: 'fmt'
|
||||
|
||||
pool:
|
||||
vmImage: $(image)
|
||||
@ -16,6 +31,10 @@ pool:
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
if [ -e /etc/debian_version ]
|
||||
then
|
||||
sudo apt-get -y install libxcb-composite0-dev libx11-dev
|
||||
fi
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain `cat rust-toolchain`
|
||||
export PATH=$HOME/.cargo/bin:$PATH
|
||||
rustc -Vv
|
||||
@ -23,6 +42,11 @@ steps:
|
||||
rustup component add rustfmt --toolchain `cat rust-toolchain`
|
||||
displayName: Install Rust
|
||||
- bash: RUSTFLAGS="-D warnings" cargo test --all-features
|
||||
condition: eq(variables['style'], 'unflagged')
|
||||
displayName: Run tests
|
||||
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all-features
|
||||
condition: eq(variables['style'], 'canary')
|
||||
displayName: Run tests
|
||||
- bash: cargo fmt --all -- --check
|
||||
condition: eq(variables['style'], 'fmt')
|
||||
displayName: Lint
|
||||
|
7
.gitpod.Dockerfile
vendored
Normal file
7
.gitpod.Dockerfile
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
FROM gitpod/workspace-full
|
||||
USER root
|
||||
RUN apt-get update && apt-get install -y libssl-dev \
|
||||
libxcb-composite0-dev \
|
||||
pkg-config \
|
||||
curl \
|
||||
rustc
|
21
.gitpod.yml
Normal file
21
.gitpod.yml
Normal file
@ -0,0 +1,21 @@
|
||||
image:
|
||||
file: .gitpod.Dockerfile
|
||||
tasks:
|
||||
- init: cargo build
|
||||
command: cargo run
|
||||
github:
|
||||
prebuilds:
|
||||
# enable for the master/default branch (defaults to true)
|
||||
master: true
|
||||
# enable for all branches in this repo (defaults to false)
|
||||
branches: true
|
||||
# enable for pull requests coming from this repo (defaults to true)
|
||||
pullRequests: true
|
||||
# enable for pull requests coming from forks (defaults to false)
|
||||
pullRequestsFromForks: true
|
||||
# add a "Review in Gitpod" button as a comment to pull requests (defaults to true)
|
||||
addComment: true
|
||||
# add a "Review in Gitpod" button to pull requests (defaults to false)
|
||||
addBadge: false
|
||||
# add a label once the prebuild is ready to pull requests (defaults to false)
|
||||
addLabel: prebuilt-in-gitpod
|
1299
Cargo.lock
generated
1299
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
30
Cargo.toml
30
Cargo.toml
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "nu"
|
||||
version = "0.3.0"
|
||||
version = "0.4.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||
description = "A shell for the GitHub era"
|
||||
license = "MIT"
|
||||
@ -8,7 +8,7 @@ edition = "2018"
|
||||
readme = "README.md"
|
||||
default-run = "nu"
|
||||
repository = "https://github.com/nushell/nushell"
|
||||
homepage = "http://nushell.sh"
|
||||
homepage = "https://www.nushell.sh"
|
||||
documentation = "https://book.nushell.sh"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
@ -27,7 +27,7 @@ chrono-humanize = "0.0.11"
|
||||
byte-unit = "3.0.1"
|
||||
base64 = "0.10.1"
|
||||
futures-preview = { version = "=0.3.0-alpha.18", features = ["compat", "io-compat"] }
|
||||
futures-async-stream = "=0.1.0-alpha.5"
|
||||
async-stream = "0.1.1"
|
||||
futures_codec = "0.2.5"
|
||||
num-traits = "0.2.8"
|
||||
term = "0.5.2"
|
||||
@ -41,7 +41,7 @@ serde-hjson = "0.9.1"
|
||||
serde_yaml = "0.8"
|
||||
serde_bytes = "0.11.2"
|
||||
getset = "0.0.8"
|
||||
language-reporting = "0.3.1"
|
||||
language-reporting = "0.4.0"
|
||||
app_dirs = "1.2.1"
|
||||
csv = "1.1"
|
||||
toml = "0.5.3"
|
||||
@ -54,7 +54,7 @@ surf = "1.0.2"
|
||||
url = "2.1.0"
|
||||
roxmltree = "0.7.0"
|
||||
nom_locate = "1.0.0"
|
||||
enum-utils = "0.1.1"
|
||||
nom-tracable = "0.4.0"
|
||||
unicode-xid = "0.2.0"
|
||||
serde_ini = "0.2.0"
|
||||
subprocess = "0.1.18"
|
||||
@ -64,7 +64,6 @@ hex = "0.3.2"
|
||||
tempfile = "3.1.0"
|
||||
semver = "0.9.0"
|
||||
which = "2.0.1"
|
||||
uuid = {version = "0.7.4", features = [ "v4", "serde" ]}
|
||||
textwrap = {version = "0.11.0", features = ["term_size"]}
|
||||
shellexpand = "1.0.0"
|
||||
futures-timer = "0.4.0"
|
||||
@ -75,11 +74,12 @@ natural = "0.3.0"
|
||||
serde_urlencoded = "0.6.1"
|
||||
sublime_fuzzy = "0.5"
|
||||
|
||||
regex = {version = "1", optional = true }
|
||||
neso = { version = "0.5.0", optional = true }
|
||||
crossterm = { version = "0.10.2", optional = true }
|
||||
syntect = {version = "3.2.0", optional = true }
|
||||
onig_sys = {version = "=69.1.0", optional = true }
|
||||
heim = {version = "0.0.8-alpha.1", optional = true }
|
||||
heim = {version = "0.0.8", optional = true }
|
||||
battery = {version = "0.7.4", optional = true }
|
||||
rawkey = {version = "0.1.2", optional = true }
|
||||
clipboard = {version = "0.5", optional = true }
|
||||
@ -93,6 +93,8 @@ textview = ["syntect", "onig_sys", "crossterm"]
|
||||
binaryview = ["image", "crossterm"]
|
||||
sys = ["heim", "battery"]
|
||||
ps = ["heim"]
|
||||
# trace = ["nom-tracable/trace"]
|
||||
all = ["raw-key", "textview", "binaryview", "sys", "ps", "clipboard", "ptree"]
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "0.20.0"
|
||||
@ -101,6 +103,10 @@ features = ["bundled", "blob"]
|
||||
[dev-dependencies]
|
||||
pretty_assertions = "0.6.1"
|
||||
|
||||
[build-dependencies]
|
||||
toml = "0.5.3"
|
||||
serde = { version = "1.0.101", features = ["derive"] }
|
||||
|
||||
[lib]
|
||||
name = "nu"
|
||||
path = "src/lib.rs"
|
||||
@ -133,6 +139,11 @@ path = "src/plugins/str.rs"
|
||||
name = "nu_plugin_skip"
|
||||
path = "src/plugins/skip.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_match"
|
||||
path = "src/plugins/match.rs"
|
||||
required-features = ["regex"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_sys"
|
||||
path = "src/plugins/sys.rs"
|
||||
@ -158,6 +169,11 @@ name = "nu_plugin_textview"
|
||||
path = "src/plugins/textview.rs"
|
||||
required-features = ["textview"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_docker"
|
||||
path = "src/plugins/docker.rs"
|
||||
required-features = ["docker"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu"
|
||||
path = "src/main.rs"
|
||||
|
90
README.md
90
README.md
@ -1,31 +1,36 @@
|
||||
[](https://crates.io/crates/nu)
|
||||
[](https://dev.azure.com/nushell/nushell/_build/latest?definitionId=2&branchName=master)
|
||||
[](https://dev.azure.com/nushell/nushell/_build/latest?definitionId=2&branchName=master)
|
||||
[](https://discord.gg/NtAbbGn)
|
||||
[](https://changelog.com/podcast/363)
|
||||
|
||||
|
||||
|
||||
# Nu Shell
|
||||
|
||||
A modern shell for the GitHub era
|
||||
A modern shell for the GitHub era.
|
||||
|
||||

|
||||
|
||||
# Status
|
||||
|
||||
This project has reached a minimum-viable product level of quality. While contributors dogfood it as their daily driver, it may be unstable for some commands. Future releases will work fill out missing features and improve stability. Its design is also subject to change as it matures.
|
||||
This project has reached a minimum-viable product level of quality. While contributors dogfood it as their daily driver, it may be unstable for some commands. Future releases will work to fill out missing features and improve stability. Its design is also subject to change as it matures.
|
||||
|
||||
Nu comes with a set of built-in commands (listed below). If a command is unknown, the command will shell-out and execute it (using cmd on Windows or bash on Linux and MacOS), correctly passing through stdin, stdout and stderr, so things like your daily git workflows and even `vim` will work just fine.
|
||||
Nu comes with a set of built-in commands (listed below). If a command is unknown, the command will shell-out and execute it (using cmd on Windows or bash on Linux and macOS), correctly passing through stdin, stdout, and stderr, so things like your daily git workflows and even `vim` will work just fine.
|
||||
|
||||
# Learning more
|
||||
|
||||
There are a few good resources to learn about Nu. First, there is a [book](https://book.nushell.sh) about Nu, currently in progress. The book focuses on using Nu and its core concepts.
|
||||
There are a few good resources to learn about Nu. There is a [book](https://book.nushell.sh) about Nu that is currently in progress. The book focuses on using Nu and its core concepts.
|
||||
|
||||
If you're a developer who would like to contribute to Nu, we're also working on a [book for developers](https://github.com/nushell/contributor-book/tree/master/en) to help get started. There are also [good first issues](https://github.com/nushell/nushell/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) to help you dive in.
|
||||
If you're a developer who would like to contribute to Nu, we're also working on a [book for developers](https://github.com/nushell/contributor-book/tree/master/en) to help you get started. There are also [good first issues](https://github.com/nushell/nushell/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) to help you dive in.
|
||||
|
||||
We also have an active [discord](https://discord.gg/NtAbbGn) and [twitter](https://twitter.com/nu_shell) if you'd like to come chat with us.
|
||||
We also have an active [Discord](https://discord.gg/NtAbbGn) and [Twitter](https://twitter.com/nu_shell) if you'd like to come and chat with us.
|
||||
|
||||
Try it in Gitpod.
|
||||
|
||||
[](https://gitpod.io/#https://github.com/nushell/nushell)
|
||||
|
||||
# Installation
|
||||
|
||||
## Local
|
||||
## Local
|
||||
|
||||
Up-to-date installation instructions can be found in the [installation chapter of the book](https://book.nushell.sh/en/installation).
|
||||
|
||||
@ -41,16 +46,16 @@ Optional dependencies:
|
||||
* To use Nu with all possible optional features enabled, you'll also need the following:
|
||||
* on Linux (on Debian/Ubuntu): `apt install libxcb-composite0-dev libx11-dev`
|
||||
|
||||
To install Nu via cargo:
|
||||
To install Nu via cargo (make sure you have installed [rustup](https://rustup.rs/) and the beta compiler via `rustup install beta`):
|
||||
|
||||
```
|
||||
cargo +nightly install nu
|
||||
cargo +beta install nu
|
||||
```
|
||||
|
||||
You can also install Nu with all the bells and whistles:
|
||||
You can also install Nu with all the bells and whistles (be sure to have installed the [dependencies](https://book.nushell.sh/en/installation#dependencies) for your platform):
|
||||
|
||||
```
|
||||
cargo +nightly install nu --all-features
|
||||
cargo +beta install nu --all-features
|
||||
```
|
||||
|
||||
## Docker
|
||||
@ -71,13 +76,13 @@ To build the base image:
|
||||
|
||||
```bash
|
||||
$ docker build -f docker/Dockerfile.nu-base -t nushell/nu-base .
|
||||
```
|
||||
```
|
||||
|
||||
And then to build the smaller container (using a Multistage build):
|
||||
|
||||
```bash
|
||||
$ docker build -f docker/Dockerfile -t nushell/nu .
|
||||
```
|
||||
```
|
||||
|
||||
Either way, you can run either container as follows:
|
||||
|
||||
@ -87,42 +92,44 @@ $ docker run -it nushell/nu
|
||||
/> exit
|
||||
```
|
||||
|
||||
The second container is a bit smaller, if size is important to you.
|
||||
The second container is a bit smaller if the size is important to you.
|
||||
|
||||
## Packaging status
|
||||
|
||||
[](https://repology.org/project/nushell/versions)
|
||||
|
||||
### Fedora
|
||||
|
||||
[COPR repo](https://copr.fedorainfracloud.org/coprs/atim/nushell/): `sudo dnf copr enable atim/nushell -y && sudo dnf install nushell -y`
|
||||
|
||||
# Philosophy
|
||||
|
||||
Nu draws inspiration from projects like PowerShell, functional programming languages, and modern cli tools. Rather than thinking of files and services as raw streams of text, Nu looks at each input as something with structure. For example, when you list the contents of a directory, what you get back is a table of rows, where each row represents an item in that directory. These values can be piped through a series of steps, in a series of commands called a 'pipeline'.
|
||||
Nu draws inspiration from projects like PowerShell, functional programming languages, and modern CLI tools. Rather than thinking of files and services as raw streams of text, Nu looks at each input as something with structure. For example, when you list the contents of a directory, what you get back is a table of rows, where each row represents an item in that directory. These values can be piped through a series of steps, in a series of commands called a 'pipeline'.
|
||||
|
||||
## Pipelines
|
||||
|
||||
In Unix, it's common to pipe between commands to split up a sophisticated command over multiple steps. Nu takes this a step further and builds heavily on the idea of _pipelines_. Just as the Unix philosophy, Nu allows commands to output from stdout and read from stdin. Additionally, commands can output structured data (you can think of this as a third kind of stream). Commands that work in the pipeline fit into one of three categories
|
||||
In Unix, it's common to pipe between commands to split up a sophisticated command over multiple steps. Nu takes this a step further and builds heavily on the idea of _pipelines_. Just as the Unix philosophy, Nu allows commands to output from stdout and read from stdin. Additionally, commands can output structured data (you can think of this as a third kind of stream). Commands that work in the pipeline fit into one of three categories:
|
||||
|
||||
* Commands that produce a stream (eg, `ls`)
|
||||
* Commands that filter a stream (eg, `where type == "Directory"`)
|
||||
* Commands that consumes the output of the pipeline (eg, `autoview`)
|
||||
* Commands that consume the output of the pipeline (eg, `autoview`)
|
||||
|
||||
Commands are separated by the pipe symbol (`|`) to denote a pipeline flowing left to right.
|
||||
|
||||
```
|
||||
/home/jonathan/Source/nushell(master)> ls | where type == "Directory" | autoview
|
||||
━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||
────┼───────────┼───────────┼──────────┼────────┼──────────────┼────────────────
|
||||
0 │ .azure │ Directory │ │ 4.1 KB │ 2 months ago │ a day ago
|
||||
1 │ target │ Directory │ │ 4.1 KB │ 3 days ago │ 3 days ago
|
||||
2 │ images │ Directory │ │ 4.1 KB │ 2 months ago │ 2 weeks ago
|
||||
3 │ tests │ Directory │ │ 4.1 KB │ 2 months ago │ 37 minutes ago
|
||||
4 │ tmp │ Directory │ │ 4.1 KB │ 2 weeks ago │ 2 weeks ago
|
||||
5 │ src │ Directory │ │ 4.1 KB │ 2 months ago │ 37 minutes ago
|
||||
6 │ assets │ Directory │ │ 4.1 KB │ a month ago │ a month ago
|
||||
0 │ .azure │ Directory │ │ 4.1 KB │ 2 months ago │ a day ago
|
||||
1 │ target │ Directory │ │ 4.1 KB │ 3 days ago │ 3 days ago
|
||||
2 │ images │ Directory │ │ 4.1 KB │ 2 months ago │ 2 weeks ago
|
||||
3 │ tests │ Directory │ │ 4.1 KB │ 2 months ago │ 37 minutes ago
|
||||
4 │ tmp │ Directory │ │ 4.1 KB │ 2 weeks ago │ 2 weeks ago
|
||||
5 │ src │ Directory │ │ 4.1 KB │ 2 months ago │ 37 minutes ago
|
||||
6 │ assets │ Directory │ │ 4.1 KB │ a month ago │ a month ago
|
||||
7 │ docs │ Directory │ │ 4.1 KB │ 2 months ago │ 2 months ago
|
||||
━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━
|
||||
━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
Because most of the time you'll want to see the output of a pipeline, `autoview` is assumed. We could have also written the above:
|
||||
@ -136,12 +143,12 @@ Being able to use the same commands and compose them differently is an important
|
||||
```text
|
||||
/home/jonathan/Source/nushell(master)> ps | where cpu > 0
|
||||
━━━┯━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━━━
|
||||
# │ pid │ name │ status │ cpu
|
||||
# │ pid │ name │ status │ cpu
|
||||
───┼───────┼─────────────────┼──────────┼──────────
|
||||
0 │ 992 │ chrome │ Sleeping │ 6.988768
|
||||
1 │ 4240 │ chrome │ Sleeping │ 5.645982
|
||||
2 │ 13973 │ qemu-system-x86 │ Sleeping │ 4.996551
|
||||
3 │ 15746 │ nu │ Sleeping │ 84.59905
|
||||
0 │ 992 │ chrome │ Sleeping │ 6.988768
|
||||
1 │ 4240 │ chrome │ Sleeping │ 5.645982
|
||||
2 │ 13973 │ qemu-system-x86 │ Sleeping │ 4.996551
|
||||
3 │ 15746 │ nu │ Sleeping │ 84.59905
|
||||
━━━┷━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━━━
|
||||
|
||||
```
|
||||
@ -153,9 +160,9 @@ Nu can load file and URL contents as raw text or as structured data (if it recog
|
||||
```
|
||||
/home/jonathan/Source/nushell(master)> open Cargo.toml
|
||||
━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━
|
||||
bin │ dependencies │ dev-dependencies
|
||||
bin │ dependencies │ dev-dependencies
|
||||
──────────────────┼────────────────┼──────────────────
|
||||
[table: 12 rows] │ [table: 1 row] │ [table: 1 row]
|
||||
[table: 12 rows] │ [table: 1 row] │ [table: 1 row]
|
||||
━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
@ -164,9 +171,9 @@ We can pipeline this into a command that gets the contents of one of the columns
|
||||
```
|
||||
/home/jonathan/Source/nushell(master)> open Cargo.toml | get package
|
||||
━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━┯━━━━━━┯━━━━━━━━━
|
||||
authors │ description │ edition │ license │ name │ version
|
||||
authors │ description │ edition │ license │ name │ version
|
||||
─────────────────┼────────────────────────────┼─────────┼─────────┼──────┼─────────
|
||||
[table: 3 rows] │ A shell for the GitHub era │ 2018 │ ISC │ nu │ 0.3.0
|
||||
[table: 3 rows] │ A shell for the GitHub era │ 2018 │ MIT │ nu │ 0.4.0
|
||||
━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━┷━━━━━━┷━━━━━━━━━
|
||||
```
|
||||
|
||||
@ -174,14 +181,14 @@ Finally, we can use commands outside of Nu once we have the data we want:
|
||||
|
||||
```
|
||||
/home/jonathan/Source/nushell(master)> open Cargo.toml | get package.version | echo $it
|
||||
0.3.0
|
||||
0.4.0
|
||||
```
|
||||
|
||||
Here we use the variable `$it` to refer to the value being piped to the external command.
|
||||
|
||||
## Shells
|
||||
|
||||
By default, Nu will work inside of a single directory and allow you to navigate around your filesystem. Sometimes, you'll want to work in multiple directories at the same time. For this, Nu offers a way of adding additional working directories that you can jump between.
|
||||
Nu will work inside of a single directory and allow you to navigate around your filesystem by default. Nu also offers a way of adding additional working directories that you can jump between, allowing you to work in multiple directories at the same time.
|
||||
|
||||
To do so, use the `enter` command, which will allow you create a new "shell" and enter it at the specified path. You can toggle between this new shell and the original shell with the `p` (for previous) and `n` (for next), allowing you to navigate around a ring buffer of shells. Once you're done with a shell, you can `exit` it and remove it from the ring buffer.
|
||||
|
||||
@ -193,7 +200,7 @@ Nu supports plugins that offer additional functionality to the shell and follow
|
||||
|
||||
There are a few examples in the `plugins` directory.
|
||||
|
||||
Plugins are binaries that are available in your path and follow a "nu_plugin_*" naming convention. These binaries interact with nu via a simple JSON-RPC protocol where the command identifies itself and passes along its configuration, which then makes it available for use. If the plugin is a filter, data streams to it one element at a time, and it can stream data back in return via stdin/stdout. If the plugin is a sink, it is given the full vector of final data and is given free reign over stdin/stdout to use as it pleases.
|
||||
Plugins are binaries that are available in your path and follow a `nu_plugin_*` naming convention. These binaries interact with nu via a simple JSON-RPC protocol where the command identifies itself and passes along its configuration, which then makes it available for use. If the plugin is a filter, data streams to it one element at a time, and it can stream data back in return via stdin/stdout. If the plugin is a sink, it is given the full vector of final data and is given free reign over stdin/stdout to use as it pleases.
|
||||
|
||||
# Goals
|
||||
|
||||
@ -230,6 +237,8 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat
|
||||
| version | Display Nu version |
|
||||
|
||||
## Shell commands
|
||||
| command | description |
|
||||
| ------- | ----------- |
|
||||
| exit (--now) | Exit the current shell (or all shells) |
|
||||
| enter (path) | Create a new shell and begin at this path |
|
||||
| p | Go to previous shell |
|
||||
@ -275,6 +284,7 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat
|
||||
| from-ini | Parse text as .ini and create table |
|
||||
| from-json | Parse text as .json and create table |
|
||||
| from-sqlite | Parse binary data as sqlite .db and create table |
|
||||
| from-ssv --minimum-spaces <minimum number of spaces to count as a separator> | Parse text as space-separated values and create table |
|
||||
| from-toml | Parse text as .toml and create table |
|
||||
| from-tsv | Parse text as .tsv and create table |
|
||||
| from-url | Parse urlencoded string and create a table |
|
||||
|
39
build.rs
Normal file
39
build.rs
Normal file
@ -0,0 +1,39 @@
|
||||
use serde::Deserialize;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::env;
|
||||
use std::path::Path;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Feature {
|
||||
#[allow(unused)]
|
||||
description: String,
|
||||
enabled: bool,
|
||||
}
|
||||
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let input = env::var("CARGO_MANIFEST_DIR").unwrap();
|
||||
let all_on = env::var("NUSHELL_ENABLE_ALL_FLAGS").is_ok();
|
||||
let flags: HashSet<String> = env::var("NUSHELL_ENABLE_FLAGS")
|
||||
.map(|s| s.split(",").map(|s| s.to_string()).collect())
|
||||
.unwrap_or_else(|_| HashSet::new());
|
||||
|
||||
if all_on && !flags.is_empty() {
|
||||
println!(
|
||||
"cargo:warning={}",
|
||||
"Both NUSHELL_ENABLE_ALL_FLAGS and NUSHELL_ENABLE_FLAGS were set. You don't need both."
|
||||
);
|
||||
}
|
||||
|
||||
let path = Path::new(&input).join("features.toml");
|
||||
|
||||
let toml: HashMap<String, Feature> = toml::from_str(&std::fs::read_to_string(path)?)?;
|
||||
|
||||
for (key, value) in toml.iter() {
|
||||
if value.enabled == true || all_on || flags.contains(key) {
|
||||
println!("cargo:rustc-cfg={}", key);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
1
debian/install
vendored
1
debian/install
vendored
@ -8,3 +8,4 @@ target/release/nu_plugin_sum usr/bin
|
||||
target/release/nu_plugin_sys usr/bin
|
||||
target/release/nu_plugin_textview usr/bin
|
||||
target/release/nu_plugin_tree usr/bin
|
||||
target/release/nu_plugin_docker usr/bin
|
||||
|
@ -17,7 +17,7 @@ ENV PATH=/root/.cargo/bin:$PATH
|
||||
COPY . /code
|
||||
RUN echo "##vso[task.prependpath]/root/.cargo/bin" && \
|
||||
rustc -Vv && \
|
||||
if $RELEASE; then cargo build --release && cargo run --release; \
|
||||
if $RELEASE; then cargo build --release; \
|
||||
cp target/release/nu /usr/local/bin; \
|
||||
else cargo build; \
|
||||
cp target/debug/nu /usr/local/bin; fi;
|
||||
|
25
docs/commands/README.md
Normal file
25
docs/commands/README.md
Normal file
@ -0,0 +1,25 @@
|
||||
# How do I get started?
|
||||
|
||||
Pick any command from the checklist and write a comment acknowledging you started work.
|
||||
|
||||
# Instructions for documenting a Nu command of your choosing
|
||||
|
||||
Name the file after the command, like so:
|
||||
|
||||
`command.md`
|
||||
|
||||
Example: If you want to add documentation for the Nu command `enter`, create a file named `enter.md`, write documentation, save it at `/docs/commands/[your_command_picked].md` as and create your pull request.
|
||||
|
||||
# What kind of documentation should I write?
|
||||
|
||||
Anything you want that you believe it *best* documents the command and the way you would like to see it. Here are some of our ideas of documentation we would *love* to see (feel free to add yours):
|
||||
|
||||
* Examples of using the command (max creativity welcomed!)
|
||||
* Description of the command.
|
||||
* Command usage.
|
||||
|
||||
# Anything else?
|
||||
|
||||
Of course! (These are drafts) so feel free to leave feedback and suggestions in the same file.
|
||||
|
||||
Happy Documenting.
|
28
docs/commands/add.md
Normal file
28
docs/commands/add.md
Normal file
@ -0,0 +1,28 @@
|
||||
# add
|
||||
|
||||
This command adds a column to any table output. The first parameter takes the heading, the second parameter takes the value for all the rows.
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
> ls | add is_on_a_computer yes_obviously
|
||||
━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified │ is_on_a_computer
|
||||
───┼────────────────────────────┼──────┼──────────┼────────┼───────────┼───────────┼──────────────────
|
||||
0 │ zeusiscrazy.txt │ File │ │ 556 B │ a day ago │ a day ago │ yes_obviously
|
||||
1 │ coww.txt │ File │ │ 24 B │ a day ago │ a day ago │ yes_obviously
|
||||
2 │ randomweirdstuff.txt │ File │ │ 197 B │ a day ago │ a day ago │ yes_obviously
|
||||
3 │ abaracadabra.txt │ File │ │ 401 B │ a day ago │ a day ago │ yes_obviously
|
||||
4 │ youshouldeatmorecereal.txt │ File │ │ 768 B │ a day ago │ a day ago │ yes_obviously
|
||||
━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
```shell
|
||||
> shells | add os linux_on_this_machine
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path │ os
|
||||
───┼───┼────────────┼────────────────────────────────┼───────────────────────
|
||||
0 │ X │ filesystem │ /home/shaurya/stuff/expr/stuff │ linux_on_this_machine
|
||||
1 │ │ filesystem │ / │ linux_on_this_machine
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━
|
||||
```
|
33
docs/commands/cd.md
Normal file
33
docs/commands/cd.md
Normal file
@ -0,0 +1,33 @@
|
||||
# cd
|
||||
|
||||
If you didn't already know, the `cd` command is very simple. It stands for 'change directory' and it does exactly that. It changes the current directory to the one specified. If no directory is specified, it takes you to the home directory. Additionally, using `cd ..` takes you to the parent directory.
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
/home/username> cd Desktop
|
||||
/home/username/Desktop> now your current directory has been changed
|
||||
```
|
||||
|
||||
```shell
|
||||
/home/username/Desktop/nested/folders> cd ..
|
||||
/home/username/Desktop/nested> cd ..
|
||||
/home/username/Desktop> cd ../Documents/school_related
|
||||
/home/username/Documents/school_related> cd ../../..
|
||||
/home/>
|
||||
```
|
||||
|
||||
```shell
|
||||
/home/username/Desktop/super/duper/crazy/nested/folders> cd
|
||||
/home/username> cd ../../usr
|
||||
/usr> cd
|
||||
/home/username>
|
||||
```
|
||||
|
||||
Using `cd -` will take you to the previous directory:
|
||||
|
||||
```shell
|
||||
/home/username/Desktop/super/duper/crazy/nested/folders> cd
|
||||
/home/username> cd -
|
||||
/home/username/Desktop/super/duper/crazy/nested/folders> cd
|
||||
```
|
34
docs/commands/date.md
Normal file
34
docs/commands/date.md
Normal file
@ -0,0 +1,34 @@
|
||||
# date
|
||||
|
||||
Use `date` to get the current date and time. Defaults to local timezone but you can get it in UTC too.
|
||||
|
||||
## Flags
|
||||
|
||||
--utc
|
||||
Returns the current date and time in UTC
|
||||
|
||||
--local
|
||||
Returns the current date and time in your local timezone
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
> date
|
||||
━━━━━━┯━━━━━━━┯━━━━━┯━━━━━━┯━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━
|
||||
year │ month │ day │ hour │ minute │ second │ timezone
|
||||
──────┼───────┼─────┼──────┼────────┼────────┼──────────
|
||||
2019 │ 9 │ 30 │ 21 │ 52 │ 30 │ -03:00
|
||||
━━━━━━┷━━━━━━━┷━━━━━┷━━━━━━┷━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━
|
||||
> date --utc
|
||||
━━━━━━┯━━━━━━━┯━━━━━┯━━━━━━┯━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━
|
||||
year │ month │ day │ hour │ minute │ second │ timezone
|
||||
──────┼───────┼─────┼──────┼────────┼────────┼──────────
|
||||
2019 │ 10 │ 1 │ 0 │ 52 │ 32 │ UTC
|
||||
━━━━━━┷━━━━━━━┷━━━━━┷━━━━━━┷━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━
|
||||
> date --local
|
||||
━━━━━━┯━━━━━━━┯━━━━━┯━━━━━━┯━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━
|
||||
year │ month │ day │ hour │ minute │ second │ timezone
|
||||
──────┼───────┼─────┼──────┼────────┼────────┼──────────
|
||||
2019 │ 9 │ 30 │ 21 │ 52 │ 34 │ -03:00
|
||||
━━━━━━┷━━━━━━━┷━━━━━┷━━━━━━┷━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━
|
||||
```
|
12
docs/commands/echo.md
Normal file
12
docs/commands/echo.md
Normal file
@ -0,0 +1,12 @@
|
||||
# echo
|
||||
|
||||
Use `echo` to repeat arguments back to the user
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
> echo Hello world
|
||||
Hello world
|
||||
> echo "Hello, world!"
|
||||
Hello, world!
|
||||
```
|
45
docs/commands/edit.md
Normal file
45
docs/commands/edit.md
Normal file
@ -0,0 +1,45 @@
|
||||
# edit
|
||||
|
||||
Edits an existing column on a table. First parameter is the column to edit and the second parameter is the value to put.
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
> ls
|
||||
━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||
───┼────────────────────────────┼──────┼──────────┼────────┼───────────┼───────────
|
||||
0 │ zeusiscrazy.txt │ File │ │ 556 B │ a day ago │ a day ago
|
||||
1 │ coww.txt │ File │ │ 24 B │ a day ago │ a day ago
|
||||
2 │ randomweirdstuff.txt │ File │ │ 197 B │ a day ago │ a day ago
|
||||
3 │ abaracadabra.txt │ File │ │ 401 B │ a day ago │ a day ago
|
||||
4 │ youshouldeatmorecereal.txt │ File │ │ 768 B │ a day ago │ a day ago
|
||||
━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━
|
||||
> ls | edit modified neverrrr
|
||||
━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||
───┼────────────────────────────┼──────┼──────────┼────────┼───────────┼──────────
|
||||
0 │ zeusiscrazy.txt │ File │ │ 556 B │ a day ago │ neverrrr
|
||||
1 │ coww.txt │ File │ │ 24 B │ a day ago │ neverrrr
|
||||
2 │ randomweirdstuff.txt │ File │ │ 197 B │ a day ago │ neverrrr
|
||||
3 │ abaracadabra.txt │ File │ │ 401 B │ a day ago │ neverrrr
|
||||
4 │ youshouldeatmorecereal.txt │ File │ │ 768 B │ a day ago │ neverrrr
|
||||
━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━
|
||||
```
|
||||
|
||||
```shell
|
||||
> shells
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼────────────────────────────────
|
||||
0 │ X │ filesystem │ /home/username/stuff/expr/stuff
|
||||
1 │ │ filesystem │ /
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
> shells | edit " " X | edit path /
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼──────
|
||||
0 │ X │ filesystem │ /
|
||||
1 │ X │ filesystem │ /
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━
|
||||
```
|
39
docs/commands/enter.md
Normal file
39
docs/commands/enter.md
Normal file
@ -0,0 +1,39 @@
|
||||
# enter
|
||||
|
||||
This command creates a new shell and begin at this path.
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
/home/foobar> cat user.json
|
||||
{
|
||||
"Name": "Peter",
|
||||
"Age": 30,
|
||||
"Telephone": 88204828,
|
||||
"Country": "Singapore"
|
||||
}
|
||||
/home/foobar> enter user.json
|
||||
/> ls
|
||||
━━━━━━━┯━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━
|
||||
Name │ Age │ Telephone │ Country
|
||||
───────┼─────┼───────────┼───────────
|
||||
Peter │ 30 │ 88204828 │ Singapore
|
||||
━━━━━━━┷━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━
|
||||
/> exit
|
||||
/home/foobar>
|
||||
```
|
||||
|
||||
It also provides the ability to work with multiple directories at the same time. This command will allow you to create a new "shell" and enter it at the specified path. You can toggle between this new shell and the original shell with the `p` (for previous) and `n` (for next), allowing you to navigate around a ring buffer of shells. Once you're done with a shell, you can `exit` it and remove it from the ring buffer.
|
||||
|
||||
```shell
|
||||
/> enter /tmp
|
||||
/tmp> enter /usr
|
||||
/usr> enter /bin
|
||||
/bin> enter /opt
|
||||
/opt> p
|
||||
/bin> p
|
||||
/usr> p
|
||||
/tmp> p
|
||||
/> n
|
||||
/tmp>
|
||||
```
|
27
docs/commands/env.md
Normal file
27
docs/commands/env.md
Normal file
@ -0,0 +1,27 @@
|
||||
# env
|
||||
|
||||
The `env` command prints to terminal the environment of nushell
|
||||
|
||||
This includes
|
||||
- cwd : the path to the current working the directory (`cwd`),
|
||||
- home : the path to the home directory
|
||||
- config : the path to the config file for nushell
|
||||
- history : the path to the nushell command history
|
||||
- temp : the path to the temp file
|
||||
- vars : descriptor variable for the table
|
||||
|
||||
`env` does not take any arguments, and ignores any arguments given.
|
||||
|
||||
|
||||
## Examples -
|
||||
|
||||
|
||||
```shell
|
||||
/home/username/mynushell/docs/commands(master)> env
|
||||
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━━━━━━━
|
||||
cwd │ home │ config │ history │ temp │ vars
|
||||
────────────────────────────────────────┼────────────────┼───────────────────────────────────────┼────────────────────────────────────────────┼──────┼────────────────
|
||||
/home/username/mynushell/docs/commands │ /home/username │ /home/username/.config/nu/config.toml │ /home/username/.local/share/nu/history.txt │ /tmp │ [table: 1 row]
|
||||
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
30
docs/commands/exit.md
Normal file
30
docs/commands/exit.md
Normal file
@ -0,0 +1,30 @@
|
||||
# exit
|
||||
|
||||
Exits the nu shell. If you have multiple nu shells, use `exit --now` to exit all of them.
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
> exit
|
||||
```
|
||||
|
||||
```
|
||||
/home/username/stuff/books> shells
|
||||
---+---+------------+----------------------------
|
||||
# | | name | path
|
||||
---+---+------------+----------------------------
|
||||
0 | | filesystem | /home/username/stuff/notes
|
||||
1 | | filesystem | /home/username/stuff/videos
|
||||
2 | X | filesystem | /home/username/stuff/books
|
||||
---+---+------------+----------------------------
|
||||
/home/username/stuff/books> exit
|
||||
/home/username/stuff/videos> shells
|
||||
---+---+------------+----------------------------
|
||||
# | | name | path
|
||||
---+---+------------+----------------------------
|
||||
0 | | filesystem | /home/username/stuff/notes
|
||||
1 | X | filesystem | /home/username/stuff/videos
|
||||
---+---+------------+----------------------------
|
||||
/home/username/stuff/videos> exit --now
|
||||
exits both the shells
|
||||
```
|
32
docs/commands/fetch.md
Normal file
32
docs/commands/fetch.md
Normal file
@ -0,0 +1,32 @@
|
||||
# fetch
|
||||
|
||||
This command loads from a URL into a cell, convert it to table if possible (avoid by appending `--raw` flag)
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
> fetch http://headers.jsontest.com
|
||||
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
X-Cloud-Trace-Context │ Accept │ Host │ Content-Length │ user-agent
|
||||
───────────────────────────────────────────────────────┼────────┼──────────────────────┼────────────────┼─────────────────────────
|
||||
aeee1a8abf08820f6fe19d114dc3bb87/16772233176633589121 │ */* │ headers.jsontest.com │ 0 │ curl/7.54.0 isahc/0.7.1
|
||||
━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
> fetch http://headers.jsontest.com --raw
|
||||
{
|
||||
"X-Cloud-Trace-Context": "aeee1a8abf08820f6fe19d114dc3bb87/16772233176633589121",
|
||||
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3",
|
||||
"Upgrade-Insecure-Requests": "1",
|
||||
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.90 Safari/537.36",
|
||||
"Host": "headers.jsontest.com",
|
||||
"Accept-Language": "en-GB,en-US;q=0.9,en;q=0.8"
|
||||
}
|
||||
```
|
||||
|
||||
```shell
|
||||
> fetch https://www.jonathanturner.org/feed.xml
|
||||
━━━━━━━━━━━━━━━━
|
||||
rss
|
||||
────────────────
|
||||
[table: 1 row]
|
||||
━━━━━━━━━━━━━━━━
|
||||
```
|
28
docs/commands/first.md
Normal file
28
docs/commands/first.md
Normal file
@ -0,0 +1,28 @@
|
||||
# first
|
||||
|
||||
Use `first` to retrieve the first "n" rows of a table. `first` has a required amount parameter that indicates how many rows you would like returned. If more than one row is returned, an index column will be included showing the row number.
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
> ps | first 1
|
||||
━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━
|
||||
pid │ name │ status │ cpu
|
||||
───────┼──────────────┼─────────┼───────────────────
|
||||
60358 │ nu_plugin_ps │ Running │ 5.399802999999999
|
||||
━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
```shell
|
||||
> ps | first 5
|
||||
━━━┯━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━
|
||||
# │ pid │ name │ status │ cpu
|
||||
───┼───────┼──────────────┼─────────┼───────────────────
|
||||
0 │ 60754 │ nu_plugin_ps │ Running │ 4.024156000000000
|
||||
1 │ 60107 │ quicklookd │ Running │ 0.000000000000000
|
||||
2 │ 59356 │ nu │ Running │ 0.000000000000000
|
||||
3 │ 59216 │ zsh │ Running │ 0.000000000000000
|
||||
4 │ 59162 │ vim │ Running │ 0.000000000000000
|
||||
━━━┷━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
47
docs/commands/help.md
Normal file
47
docs/commands/help.md
Normal file
@ -0,0 +1,47 @@
|
||||
# help
|
||||
|
||||
Use `help` for more information on a command.
|
||||
Use `help commands` to list all availble commands.
|
||||
Use `help <command name>` to display help about a particular command.
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
> help
|
||||
Welcome to Nushell.
|
||||
|
||||
Here are some tips to help you get started.
|
||||
* help commands - list all available commands
|
||||
* help <command name> - display help about a particular command
|
||||
|
||||
You can also learn more at https://book.nushell.sh
|
||||
```
|
||||
|
||||
```shell
|
||||
> help commands
|
||||
━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ name │ description
|
||||
────┼──────────────┼────────────────────────────────────────────────────────────────────────────────────────
|
||||
0 │ add │ Add a new field to the table.
|
||||
1 │ autoview │ View the contents of the pipeline as a table or list.
|
||||
2 │ cd │ Change to a new path.
|
||||
3 │ config │ Configuration management.
|
||||
4 │ cp │ Copy files.
|
||||
5 │ date │ Get the current datetime.
|
||||
...
|
||||
70 │ trim │ Trim leading and following whitespace from text data.
|
||||
71 │ version │ Display Nu version
|
||||
72 │ where │ Filter table to match the condition.
|
||||
73 │ which │ Finds a program file.
|
||||
━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
```shell
|
||||
> help cd
|
||||
Change to a new path.
|
||||
|
||||
Usage:
|
||||
> cd (directory)
|
||||
```
|
||||
|
||||
|
31
docs/commands/inc.md
Normal file
31
docs/commands/inc.md
Normal file
@ -0,0 +1,31 @@
|
||||
# inc
|
||||
|
||||
This command increments the value of variable by one.
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
> open rustfmt.toml
|
||||
---------
|
||||
edition
|
||||
---------
|
||||
2018
|
||||
---------
|
||||
> open rustfmt.toml | inc edition
|
||||
---------
|
||||
edition
|
||||
---------
|
||||
2019
|
||||
---------
|
||||
```
|
||||
|
||||
```shell
|
||||
> open Cargo.toml | get package.version
|
||||
0.1.3
|
||||
> open Cargo.toml | inc package.version --major | get package.version
|
||||
1.0.0
|
||||
> open Cargo.toml | inc package.version --minor | get package.version
|
||||
0.2.0
|
||||
> open Cargo.toml | inc package.version --patch | get package.version
|
||||
0.1.4
|
||||
```
|
29
docs/commands/last.md
Normal file
29
docs/commands/last.md
Normal file
@ -0,0 +1,29 @@
|
||||
# last
|
||||
|
||||
Use `last` to retrieve the last "n" rows of a table. `last` has a required amount parameter that indicates how many rows you would like returned. If more than one row is returned, an index column will be included showing the row number. `last` does not alter the order of the rows of the table.
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
> ps | last 1
|
||||
━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━
|
||||
pid │ name │ status │ cpu
|
||||
─────┼─────────────┼─────────┼───────────────────
|
||||
121 │ loginwindow │ Running │ 0.000000000000000
|
||||
━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
```shell
|
||||
> ps | last 5
|
||||
━━━┯━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━
|
||||
# │ pid │ name │ status │ cpu
|
||||
───┼─────┼────────────────┼─────────┼───────────────────
|
||||
0 │ 360 │ CommCenter │ Running │ 0.000000000000000
|
||||
1 │ 358 │ distnoted │ Running │ 0.000000000000000
|
||||
2 │ 356 │ UserEventAgent │ Running │ 0.000000000000000
|
||||
3 │ 354 │ cfprefsd │ Running │ 0.000000000000000
|
||||
4 │ 121 │ loginwindow │ Running │ 0.000000000000000
|
||||
━━━┷━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
|
28
docs/commands/lines.md
Normal file
28
docs/commands/lines.md
Normal file
@ -0,0 +1,28 @@
|
||||
# lines
|
||||
This command takes a string from a pipeline as input, and returns a table where each line of the input string is a row in the table. Empty lines are ignored. This command is capable of feeding other commands, such as `nth`, with its output.
|
||||
|
||||
## Usage
|
||||
```shell
|
||||
> [input-command] | lines
|
||||
```
|
||||
|
||||
## Examples
|
||||
Basic usage:
|
||||
```shell
|
||||
> printf "Hello\nWorld!\nLove, nushell." | lines
|
||||
━━━┯━━━━━━━━━━━━━━━━
|
||||
# │ value
|
||||
───┼────────────────
|
||||
0 │ Hello
|
||||
1 │ World!
|
||||
2 │ Love, nushell.
|
||||
━━━┷━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
One useful application is piping the contents of file into `lines`. This example extracts a certain line from a given file.
|
||||
```shell
|
||||
> cat lines.md | lines | nth 6
|
||||
## Examples
|
||||
```
|
||||
|
||||
Similarly to this example, `lines` can be used to extract certain portions of or apply transformations to data returned by any program which returns a string.
|
31
docs/commands/nth.md
Normal file
31
docs/commands/nth.md
Normal file
@ -0,0 +1,31 @@
|
||||
# nth
|
||||
|
||||
This command returns the nth row of a table, starting from 0.
|
||||
If the number given is less than 0 or more than the number of rows, nothing is returned.
|
||||
|
||||
## Usage
|
||||
```shell
|
||||
> [input-command] | nth [row-number]
|
||||
```
|
||||
|
||||
## Examples
|
||||
```shell
|
||||
> ls
|
||||
━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||
───┼────────────┼───────────┼──────────┼────────┼───────────────┼───────────────
|
||||
0 │ Cargo.toml │ File │ │ 239 B │ 2 minutes ago │ 2 minutes ago
|
||||
1 │ .git │ Directory │ │ 4.1 KB │ 2 minutes ago │ 2 minutes ago
|
||||
2 │ .gitignore │ File │ │ 19 B │ 2 minutes ago │ 2 minutes ago
|
||||
3 │ src │ Directory │ │ 4.1 KB │ 2 minutes ago │ 2 minutes ago
|
||||
━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━
|
||||
|
||||
> ls | nth 0
|
||||
━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━
|
||||
name │ type │ readonly │ size │ accessed │ modified
|
||||
────────────┼──────┼──────────┼────────┼───────────────┼───────────────
|
||||
Cargo.toml │ File │ │ 239 B │ 2 minutes ago │ 2 minutes ago
|
||||
━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━
|
||||
|
||||
> ls | nth 5
|
||||
```
|
95
docs/commands/open.md
Normal file
95
docs/commands/open.md
Normal file
@ -0,0 +1,95 @@
|
||||
# open
|
||||
|
||||
Loads a file into a cell, convert it to table if possible (avoid by appending `--raw` flag)
|
||||
|
||||
## Example
|
||||
|
||||
```shell
|
||||
> cat user.yaml
|
||||
- Name: Peter
|
||||
Age: 30
|
||||
Telephone: 88204828
|
||||
Country: Singapore
|
||||
- Name: Michael
|
||||
Age: 42
|
||||
Telephone: 44002010
|
||||
Country: Spain
|
||||
- Name: Will
|
||||
Age: 50
|
||||
Telephone: 99521080
|
||||
Country: Germany
|
||||
> open user.yaml
|
||||
━━━┯━━━━━━━━━┯━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━
|
||||
# │ Name │ Age │ Telephone │ Country
|
||||
───┼─────────┼─────┼───────────┼───────────
|
||||
0 │ Peter │ 30 │ 88204828 │ Singapore
|
||||
1 │ Michael │ 42 │ 44002010 │ Spain
|
||||
2 │ Will │ 50 │ 99521080 │ Germany
|
||||
━━━┷━━━━━━━━━┷━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━
|
||||
> open user.yaml --raw
|
||||
- Name: Peter
|
||||
Age: 30
|
||||
Telephone: 88204828
|
||||
Country: Singapore
|
||||
- Name: Michael
|
||||
Age: 42
|
||||
Telephone: 44002010
|
||||
Country: Spain
|
||||
- Name: Will
|
||||
Age: 50
|
||||
Telephone: 99521080
|
||||
Country: Germany
|
||||
```
|
||||
|
||||
```shell
|
||||
> cat user.json
|
||||
[
|
||||
{
|
||||
"Name": "Peter",
|
||||
"Age": 30,
|
||||
"Telephone": 88204828,
|
||||
"Country": "Singapore"
|
||||
},
|
||||
{
|
||||
"Name": "Michael",
|
||||
"Age": 42,
|
||||
"Telephone": 44002010,
|
||||
"Country": "Spain"
|
||||
},
|
||||
{
|
||||
"Name": "Will",
|
||||
"Age": 50,
|
||||
"Telephone": 99521080,
|
||||
"Country": "Germany"
|
||||
}
|
||||
]
|
||||
> open user.json
|
||||
━━━┯━━━━━━━━━┯━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━
|
||||
# │ Name │ Age │ Telephone │ Country
|
||||
───┼─────────┼─────┼───────────┼───────────
|
||||
0 │ Peter │ 30 │ 88204828 │ Singapore
|
||||
1 │ Michael │ 42 │ 44002010 │ Spain
|
||||
2 │ Will │ 50 │ 99521080 │ Germany
|
||||
━━━┷━━━━━━━━━┷━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━
|
||||
> open user.json --raw
|
||||
[
|
||||
{
|
||||
"Name": "Peter",
|
||||
"Age": 30,
|
||||
"Telephone": 88204828,
|
||||
"Country": "Singapore"
|
||||
},
|
||||
{
|
||||
"Name": "Michael",
|
||||
"Age": 42,
|
||||
"Telephone": 44002010,
|
||||
"Country": "Spain"
|
||||
},
|
||||
{
|
||||
"Name": "Will",
|
||||
"Age": 50,
|
||||
"Telephone": 99521080,
|
||||
"Country": "Germany"
|
||||
}
|
||||
]
|
||||
```
|
51
docs/commands/reverse.md
Normal file
51
docs/commands/reverse.md
Normal file
@ -0,0 +1,51 @@
|
||||
# reverse
|
||||
|
||||
This command reverses the order of the elements in a sorted table.
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
> ls | sort-by name
|
||||
━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||
───┼────────────────────────────┼──────┼──────────┼────────┼────────────────┼────────────────
|
||||
0 │ abaracadabra.txt │ File │ │ 401 B │ 23 minutes ago │ 16 minutes ago
|
||||
1 │ coww.txt │ File │ │ 24 B │ 22 minutes ago │ 17 minutes ago
|
||||
2 │ randomweirdstuff.txt │ File │ │ 197 B │ 21 minutes ago │ 18 minutes ago
|
||||
3 │ youshouldeatmorecereal.txt │ File │ │ 768 B │ 30 seconds ago │ now
|
||||
4 │ zeusiscrazy.txt │ File │ │ 556 B │ 22 minutes ago │ 18 minutes ago
|
||||
━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━
|
||||
> ls | sort-by name | reverse
|
||||
━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||
───┼────────────────────────────┼──────┼──────────┼────────┼────────────────┼────────────────
|
||||
0 │ zeusiscrazy.txt │ File │ │ 556 B │ 22 minutes ago │ 19 minutes ago
|
||||
1 │ youshouldeatmorecereal.txt │ File │ │ 768 B │ 39 seconds ago │ 18 seconds ago
|
||||
2 │ randomweirdstuff.txt │ File │ │ 197 B │ 21 minutes ago │ 18 minutes ago
|
||||
3 │ coww.txt │ File │ │ 24 B │ 22 minutes ago │ 18 minutes ago
|
||||
4 │ abaracadabra.txt │ File │ │ 401 B │ 23 minutes ago │ 16 minutes ago
|
||||
━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
```shell
|
||||
> ls | sort-by size
|
||||
━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||
───┼────────────────────────────┼──────┼──────────┼────────┼────────────────┼────────────────
|
||||
0 │ coww.txt │ File │ │ 24 B │ 22 minutes ago │ 18 minutes ago
|
||||
1 │ randomweirdstuff.txt │ File │ │ 197 B │ 21 minutes ago │ 18 minutes ago
|
||||
2 │ abaracadabra.txt │ File │ │ 401 B │ 23 minutes ago │ 16 minutes ago
|
||||
3 │ zeusiscrazy.txt │ File │ │ 556 B │ 22 minutes ago │ 19 minutes ago
|
||||
4 │ youshouldeatmorecereal.txt │ File │ │ 768 B │ a minute ago │ 26 seconds ago
|
||||
━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━
|
||||
> ls | sort-by size | reverse
|
||||
━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||
───┼────────────────────────────┼──────┼──────────┼────────┼────────────────┼────────────────
|
||||
0 │ youshouldeatmorecereal.txt │ File │ │ 768 B │ a minute ago │ 32 seconds ago
|
||||
1 │ zeusiscrazy.txt │ File │ │ 556 B │ 22 minutes ago │ 19 minutes ago
|
||||
2 │ abaracadabra.txt │ File │ │ 401 B │ 23 minutes ago │ 16 minutes ago
|
||||
3 │ randomweirdstuff.txt │ File │ │ 197 B │ 21 minutes ago │ 18 minutes ago
|
||||
4 │ coww.txt │ File │ │ 24 B │ 22 minutes ago │ 18 minutes ago
|
||||
━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━
|
||||
```
|
26
docs/commands/shells.md
Normal file
26
docs/commands/shells.md
Normal file
@ -0,0 +1,26 @@
|
||||
# shells
|
||||
|
||||
Lists all the active nu shells with a number/index, a name and the path. Also marks the current nu shell.
|
||||
|
||||
## Examples
|
||||
|
||||
```
|
||||
> shells
|
||||
---+---+------------+---------------
|
||||
# | | name | path
|
||||
---+---+------------+---------------
|
||||
0 | | filesystem | /usr
|
||||
1 | | filesystem | /home
|
||||
2 | X | filesystem | /home/username
|
||||
---+---+------------+---------------
|
||||
```
|
||||
|
||||
```
|
||||
/> shells
|
||||
---+---+-------------------------------------------------+------------------------------------
|
||||
# | | name | path
|
||||
---+---+-------------------------------------------------+------------------------------------
|
||||
0 | | filesystem | /Users/username/Code/nushell
|
||||
1 | X | {/Users/username/Code/nushell/Cargo.toml} | /
|
||||
---+---+-------------------------------------------------+------------------------------------
|
||||
```
|
35
docs/commands/sum.md
Normal file
35
docs/commands/sum.md
Normal file
@ -0,0 +1,35 @@
|
||||
# sum
|
||||
|
||||
This command allows you to calculate the sum of values in a column.
|
||||
|
||||
## Examples
|
||||
|
||||
To get the sum of the file sizes in a directory, simply pipe the size column from the ls command to the sum command.
|
||||
|
||||
```shell
|
||||
> ls | get size | sum
|
||||
━━━━━━━━━
|
||||
value
|
||||
━━━━━━━━━
|
||||
51.0 MB
|
||||
━━━━━━━━━
|
||||
```
|
||||
|
||||
Note that sum only works for integer and byte values at the moment, and if the shell doesn't recognize the values in a column as one of those types, it will return an error.
|
||||
|
||||
```shell
|
||||
> open example.csv
|
||||
━━━┯━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━
|
||||
# │ fruit │ amount │ quality
|
||||
───┼─────────┼────────┼──────────
|
||||
0 │ apples │ 1 │ fresh
|
||||
1 │ bananas │ 2 │ old
|
||||
2 │ oranges │ 7 │ fresh
|
||||
3 │ kiwis │ 25 │ rotten
|
||||
━━━┷━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━
|
||||
```
|
||||
|
||||
```shell
|
||||
> open example.csv | get amount | sum
|
||||
error: Unrecognized type in stream: Primitive(String("1"))
|
||||
```
|
32
docs/commands/sys.md
Normal file
32
docs/commands/sys.md
Normal file
@ -0,0 +1,32 @@
|
||||
# sys
|
||||
|
||||
This command gives information about the system where nu is running on.
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
> sys
|
||||
━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━
|
||||
host │ cpu │ disks │ mem │ net │ battery
|
||||
────────────────┼────────────────┼─────────────────┼────────────────┼──────────────────┼────────────────
|
||||
[table: 1 row] │ [table: 1 row] │ [table: 3 rows] │ [table: 1 row] │ [table: 18 rows] │ [table: 1 row]
|
||||
━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━
|
||||
> sys | get host
|
||||
━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━
|
||||
name │ release │ hostname │ arch │ uptime │ users
|
||||
────────┼─────────┼──────────────┼────────┼────────────────┼──────────────────
|
||||
Darwin │ 18.7.0 │ C02Y437GJGH6 │ x86_64 │ [table: 1 row] │ [table: 17 rows]
|
||||
━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━
|
||||
> sys | get cpu
|
||||
━━━━━━━┯━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━
|
||||
cores │ current ghz │ min ghz │ max ghz
|
||||
───────┼───────────────────┼───────────────────┼───────────────────
|
||||
12 │ 2.600000000000000 │ 2.600000000000000 │ 2.600000000000000
|
||||
━━━━━━━┷━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━
|
||||
> sys | get mem
|
||||
━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━
|
||||
total │ free │ swap total │ swap free
|
||||
─────────┼──────────┼────────────┼───────────
|
||||
34.4 GB │ 545.0 MB │ 2.1 GB │ 723.0 MB
|
||||
━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━
|
||||
```
|
80
docs/commands/to-csv.md
Normal file
80
docs/commands/to-csv.md
Normal file
@ -0,0 +1,80 @@
|
||||
# to-csv
|
||||
|
||||
Converts table data into csv text.
|
||||
|
||||
## Example
|
||||
|
||||
```shell
|
||||
> shells
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼────────────────────────
|
||||
0 │ X │ filesystem │ /home/shaurya
|
||||
1 │ │ filesystem │ /home/shaurya/Pictures
|
||||
2 │ │ filesystem │ /home/shaurya/Desktop
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
> shells | to-csv
|
||||
,name,path
|
||||
X,filesystem,/home/shaurya
|
||||
,filesystem,/home/shaurya/Pictures
|
||||
,filesystem,/home/shaurya/Desktop
|
||||
```
|
||||
|
||||
```shell
|
||||
> open caco3_plastics.csv
|
||||
━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━━━━
|
||||
# │ importer │ shipper │ tariff_item │ name │ origin │ shipped_at │ arrived_at │ net_weight │ fob_price │ cif_price │ cif_per_net_
|
||||
│ │ │ │ │ │ │ │ │ │ │ weight
|
||||
───┼──────────────┼──────────────┼─────────────┼──────────────┼──────────┼────────────┼────────────┼────────────┼───────────┼───────────┼──────────────
|
||||
0 │ PLASTICOS │ S A REVERTE │ 2509000000 │ CARBONATO DE │ SPAIN │ 18/03/2016 │ 17/04/2016 │ 81,000.00 │ 14,417.58 │ 18,252.34 │ 0.23
|
||||
│ RIVAL CIA │ │ │ CALCIO TIPO │ │ │ │ │ │ │
|
||||
│ LTDA │ │ │ CALCIPORE │ │ │ │ │ │ │
|
||||
│ │ │ │ 160 T AL │ │ │ │ │ │ │
|
||||
1 │ MEXICHEM │ OMYA ANDINA │ 2836500000 │ CARBONATO │ COLOMBIA │ 07/07/2016 │ 10/07/2016 │ 26,000.00 │ 7,072.00 │ 8,127.18 │ 0.31
|
||||
│ ECUADOR S.A. │ S A │ │ │ │ │ │ │ │ │
|
||||
2 │ PLASTIAZUAY │ SA REVERTE │ 2836500000 │ CARBONATO DE │ SPAIN │ 27/07/2016 │ 09/08/2016 │ 81,000.00 │ 8,100.00 │ 11,474.55 │ 0.14
|
||||
│ SA │ │ │ CALCIO │ │ │ │ │ │ │
|
||||
3 │ PLASTICOS │ AND │ 2836500000 │ CALCIUM │ TURKEY │ 04/10/2016 │ 11/11/2016 │ 100,000.00 │ 17,500.00 │ 22,533.75 │ 0.23
|
||||
│ RIVAL CIA │ ENDUSTRIYEL │ │ CARBONATE │ │ │ │ │ │ │
|
||||
│ LTDA │ HAMMADDELER │ │ ANADOLU │ │ │ │ │ │ │
|
||||
│ │ DIS TCARET │ │ ANDCARB CT-1 │ │ │ │ │ │ │
|
||||
│ │ LTD.STI. │ │ │ │ │ │ │ │ │
|
||||
4 │ QUIMICA │ SA REVERTE │ 2836500000 │ CARBONATO DE │ SPAIN │ 24/06/2016 │ 12/07/2016 │ 27,000.00 │ 3,258.90 │ 5,585.00 │ 0.21
|
||||
│ COMERCIAL │ │ │ CALCIO │ │ │ │ │ │ │
|
||||
│ QUIMICIAL │ │ │ │ │ │ │ │ │ │
|
||||
│ CIA. LTDA. │ │ │ │ │ │ │ │ │ │
|
||||
5 │ PICA │ OMYA ANDINA │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/01/1900 │ 18/01/2016 │ 66,500.00 │ 12,635.00 │ 18,670.52 │ 0.28
|
||||
│ PLASTICOS │ S.A │ │ CALCIO │ │ │ │ │ │ │
|
||||
│ INDUSTRIALES │ │ │ │ │ │ │ │ │ │
|
||||
│ C.A. │ │ │ │ │ │ │ │ │ │
|
||||
6 │ PLASTIQUIM │ OMYA ANDINA │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/01/1900 │ 25/10/2016 │ 33,000.00 │ 6,270.00 │ 9,999.00 │ 0.30
|
||||
│ S.A. │ S.A NIT │ │ CALCIO │ │ │ │ │ │ │
|
||||
│ │ 830.027.386- │ │ RECUBIERTO │ │ │ │ │ │ │
|
||||
│ │ 6 │ │ CON ACIDO │ │ │ │ │ │ │
|
||||
│ │ │ │ ESTEARICO │ │ │ │ │ │ │
|
||||
│ │ │ │ OMYA CARB 1T │ │ │ │ │ │ │
|
||||
│ │ │ │ CG BBS 1000 │ │ │ │ │ │ │
|
||||
7 │ QUIMICOS │ SIBELCO │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/11/2016 │ 03/11/2016 │ 52,000.00 │ 8,944.00 │ 13,039.05 │ 0.25
|
||||
│ ANDINOS │ COLOMBIA SAS │ │ CALCIO │ │ │ │ │ │ │
|
||||
│ QUIMANDI │ │ │ RECUBIERTO │ │ │ │ │ │ │
|
||||
│ S.A. │ │ │ │ │ │ │ │ │ │
|
||||
8 │ TIGRE │ OMYA ANDINA │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/01/1900 │ 28/10/2016 │ 66,000.00 │ 11,748.00 │ 18,216.00 │ 0.28
|
||||
│ ECUADOR S.A. │ S.A NIT │ │ CALCIO │ │ │ │ │ │ │
|
||||
│ ECUATIGRE │ 830.027.386- │ │ RECUBIERTO │ │ │ │ │ │ │
|
||||
│ │ 6 │ │ CON ACIDO │ │ │ │ │ │ │
|
||||
│ │ │ │ ESTEARICO │ │ │ │ │ │ │
|
||||
│ │ │ │ OMYACARB 1T │ │ │ │ │ │ │
|
||||
│ │ │ │ CG BPA 25 NO │ │ │ │ │ │ │
|
||||
━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━━━━
|
||||
> open caco3_plastics.csv | to-csv
|
||||
importer,shipper,tariff_item,name,origin,shipped_at,arrived_at,net_weight,fob_price,cif_price,cif_per_net_weight
|
||||
PLASTICOS RIVAL CIA LTDA,S A REVERTE,2509000000,CARBONATO DE CALCIO TIPO CALCIPORE 160 T AL,SPAIN,18/03/2016,17/04/2016,"81,000.00","14,417.58","18,252.34",0.23
|
||||
MEXICHEM ECUADOR S.A.,OMYA ANDINA S A,2836500000,CARBONATO,COLOMBIA,07/07/2016,10/07/2016,"26,000.00","7,072.00","8,127.18",0.31
|
||||
PLASTIAZUAY SA,SA REVERTE,2836500000,CARBONATO DE CALCIO,SPAIN,27/07/2016,09/08/2016,"81,000.00","8,100.00","11,474.55",0.14
|
||||
PLASTICOS RIVAL CIA LTDA,AND ENDUSTRIYEL HAMMADDELER DIS TCARET LTD.STI.,2836500000,CALCIUM CARBONATE ANADOLU ANDCARB CT-1,TURKEY,04/10/2016,11/11/2016,"100,000.00","17,500.00","22,533.75",0.23
|
||||
QUIMICA COMERCIAL QUIMICIAL CIA. LTDA.,SA REVERTE,2836500000,CARBONATO DE CALCIO,SPAIN,24/06/2016,12/07/2016,"27,000.00","3,258.90","5,585.00",0.21
|
||||
PICA PLASTICOS INDUSTRIALES C.A.,OMYA ANDINA S.A,3824909999,CARBONATO DE CALCIO,COLOMBIA,01/01/1900,18/01/2016,"66,500.00","12,635.00","18,670.52",0.28
|
||||
PLASTIQUIM S.A.,OMYA ANDINA S.A NIT 830.027.386-6,3824909999,CARBONATO DE CALCIO RECUBIERTO CON ACIDO ESTEARICO OMYA CARB 1T CG BBS 1000,COLOMBIA,01/01/1900,25/10/2016,"33,000.00","6,270.00","9,999.00",0.30
|
||||
QUIMICOS ANDINOS QUIMANDI S.A.,SIBELCO COLOMBIA SAS,3824909999,CARBONATO DE CALCIO RECUBIERTO,COLOMBIA,01/11/2016,03/11/2016,"52,000.00","8,944.00","13,039.05",0.25
|
||||
TIGRE ECUADOR S.A. ECUATIGRE,OMYA ANDINA S.A NIT 830.027.386-6,3824909999,CARBONATO DE CALCIO RECUBIERTO CON ACIDO ESTEARICO OMYACARB 1T CG BPA 25 NO,COLOMBIA,01/01/1900,28/10/2016,"66,000.00","11,748.00","18,216.00",0.28
|
||||
```
|
40
docs/commands/to-json.md
Normal file
40
docs/commands/to-json.md
Normal file
@ -0,0 +1,40 @@
|
||||
# to-json
|
||||
|
||||
Converts table data into json text.
|
||||
|
||||
## Example
|
||||
|
||||
```shell
|
||||
> shells
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼────────────────────────
|
||||
0 │ X │ filesystem │ /home/shaurya
|
||||
1 │ │ filesystem │ /home/shaurya/Pictures
|
||||
2 │ │ filesystem │ /home/shaurya/Desktop
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
> shells | to-json
|
||||
[{" ":"X","name":"filesystem","path":"/home/shaurya"},{" ":" ","name":"filesystem","path":"/home/shaurya/Pictures"},{" ":" ","name":"filesystem","path":"/home/shaurya/Desktop"}]
|
||||
```
|
||||
|
||||
```shell
|
||||
> open sgml_description.json
|
||||
━━━━━━━━━━━━━━━━
|
||||
glossary
|
||||
────────────────
|
||||
[table: 1 row]
|
||||
━━━━━━━━━━━━━━━━
|
||||
> open sgml_description.json | to-json
|
||||
{"glossary":{"title":"example glossary","GlossDiv":{"title":"S","GlossList":{"GlossEntry":{"ID":"SGML","SortAs":"SGML","GlossTerm":"Standard Generalized Markup Language","Acronym":"SGML","Abbrev":"ISO 8879:1986","Height":10,"GlossDef":{"para":"A meta-markup language, used to create markup languages such as DocBook.","GlossSeeAlso":["GML","XML"]},"Sections":[101,102],"GlossSee":"markup"}}}}}
|
||||
```
|
||||
We can also convert formats !
|
||||
```shell
|
||||
> open jonathan.xml
|
||||
━━━━━━━━━━━━━━━━
|
||||
rss
|
||||
────────────────
|
||||
[table: 1 row]
|
||||
━━━━━━━━━━━━━━━━
|
||||
> open jonathan.xml | to-json
|
||||
{"rss":[{"channel":[{"title":["Jonathan Turner"]},{"link":["http://www.jonathanturner.org"]},{"link":[]},{"item":[{"title":["Creating crossplatform Rust terminal apps"]},{"description":["<p><img src=\"/images/pikachu.jpg\" alt=\"Pikachu animation in Windows\" /></p>\n\n<p><em>Look Mom, Pikachu running in Windows CMD!</em></p>\n\n<p>Part of the adventure is not seeing the way ahead and going anyway.</p>\n"]},{"pubDate":["Mon, 05 Oct 2015 00:00:00 +0000"]},{"link":["http://www.jonathanturner.org/2015/10/off-to-new-adventures.html"]},{"guid":["http://www.jonathanturner.org/2015/10/off-to-new-adventures.html"]}]}]}]}
|
||||
```
|
112
docs/commands/to-toml.md
Normal file
112
docs/commands/to-toml.md
Normal file
@ -0,0 +1,112 @@
|
||||
# to-toml
|
||||
|
||||
Converts table data into toml text.
|
||||
|
||||
## Example
|
||||
|
||||
```shell
|
||||
> shells
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼────────────────────────
|
||||
0 │ X │ filesystem │ /home/shaurya
|
||||
1 │ │ filesystem │ /home/shaurya/Pictures
|
||||
2 │ │ filesystem │ /home/shaurya/Desktop
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
> shells | to-toml
|
||||
[[]]
|
||||
" " = "X"
|
||||
name = "filesystem"
|
||||
path = "/home/shaurya"
|
||||
|
||||
[[]]
|
||||
" " = " "
|
||||
name = "filesystem"
|
||||
path = "/home/shaurya/Pictures"
|
||||
|
||||
[[]]
|
||||
" " = " "
|
||||
name = "filesystem"
|
||||
path = "/home/shaurya/Desktop"
|
||||
|
||||
```
|
||||
|
||||
```shell
|
||||
> open cargo_sample.toml
|
||||
━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━
|
||||
dependencies │ dev-dependencies │ package
|
||||
────────────────┼──────────────────┼────────────────
|
||||
[table: 1 row] │ [table: 1 row] │ [table: 1 row]
|
||||
━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━
|
||||
> open cargo_sample.toml | to-toml
|
||||
[dependencies]
|
||||
ansi_term = "0.11.0"
|
||||
app_dirs = "1.2.1"
|
||||
byte-unit = "2.1.0"
|
||||
bytes = "0.4.12"
|
||||
chrono-humanize = "0.0.11"
|
||||
chrono-tz = "0.5.1"
|
||||
clap = "2.33.0"
|
||||
conch-parser = "0.1.1"
|
||||
derive-new = "0.5.6"
|
||||
dunce = "1.0.0"
|
||||
futures-sink-preview = "0.3.0-alpha.16"
|
||||
futures_codec = "0.2.2"
|
||||
getset = "0.0.7"
|
||||
git2 = "0.8.0"
|
||||
itertools = "0.8.0"
|
||||
lalrpop-util = "0.17.0"
|
||||
language-reporting = "0.3.0"
|
||||
log = "0.4.6"
|
||||
logos = "0.10.0-rc2"
|
||||
logos-derive = "0.10.0-rc2"
|
||||
nom = "5.0.0-beta1"
|
||||
ordered-float = "1.0.2"
|
||||
pretty_env_logger = "0.3.0"
|
||||
prettyprint = "0.6.0"
|
||||
prettytable-rs = "0.8.0"
|
||||
regex = "1.1.6"
|
||||
rustyline = "4.1.0"
|
||||
serde = "1.0.91"
|
||||
serde_derive = "1.0.91"
|
||||
serde_json = "1.0.39"
|
||||
subprocess = "0.1.18"
|
||||
sysinfo = "0.8.4"
|
||||
term = "0.5.2"
|
||||
tokio-fs = "0.1.6"
|
||||
toml = "0.5.1"
|
||||
toml-query = "0.9.0"
|
||||
|
||||
[dependencies.chrono]
|
||||
features = ["serde"]
|
||||
version = "0.4.6"
|
||||
|
||||
[dependencies.cursive]
|
||||
default-features = false
|
||||
features = ["pancurses-backend"]
|
||||
version = "0.12.0"
|
||||
|
||||
[dependencies.futures-preview]
|
||||
features = ["compat", "io-compat"]
|
||||
version = "0.3.0-alpha.16"
|
||||
|
||||
[dependencies.indexmap]
|
||||
features = ["serde-1"]
|
||||
version = "1.0.2"
|
||||
|
||||
[dependencies.pancurses]
|
||||
features = ["win32a"]
|
||||
version = "0.16"
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = "0.6.1"
|
||||
|
||||
[package]
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>"]
|
||||
description = "A shell for the GitHub era"
|
||||
edition = "2018"
|
||||
license = "ISC"
|
||||
name = "nu"
|
||||
version = "0.1.1"
|
||||
|
||||
```
|
80
docs/commands/to-tsv.md
Normal file
80
docs/commands/to-tsv.md
Normal file
@ -0,0 +1,80 @@
|
||||
# to-tsv
|
||||
|
||||
Converts table data into tsv text.
|
||||
|
||||
## Example
|
||||
|
||||
```shell
|
||||
> shells
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼────────────────────────
|
||||
0 │ X │ filesystem │ /home/shaurya
|
||||
1 │ │ filesystem │ /home/shaurya/Pictures
|
||||
2 │ │ filesystem │ /home/shaurya/Desktop
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
> shells |to-tsv
|
||||
name path
|
||||
X filesystem /home/shaurya
|
||||
|
||||
```
|
||||
|
||||
```shell
|
||||
> open caco3_plastics.tsv
|
||||
━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━━━━
|
||||
# │ importer │ shipper │ tariff_item │ name │ origin │ shipped_at │ arrived_at │ net_weight │ fob_price │ cif_price │ cif_per_net_
|
||||
│ │ │ │ │ │ │ │ │ │ │ weight
|
||||
───┼──────────────┼──────────────┼─────────────┼──────────────┼──────────┼────────────┼────────────┼────────────┼───────────┼───────────┼──────────────
|
||||
0 │ PLASTICOS │ S A REVERTE │ 2509000000 │ CARBONATO DE │ SPAIN │ 18/03/2016 │ 17/04/2016 │ 81,000.00 │ 14,417.58 │ 18,252.34 │ 0.23
|
||||
│ RIVAL CIA │ │ │ CALCIO TIPO │ │ │ │ │ │ │
|
||||
│ LTDA │ │ │ CALCIPORE │ │ │ │ │ │ │
|
||||
│ │ │ │ 160 T AL │ │ │ │ │ │ │
|
||||
1 │ MEXICHEM │ OMYA ANDINA │ 2836500000 │ CARBONATO │ COLOMBIA │ 07/07/2016 │ 10/07/2016 │ 26,000.00 │ 7,072.00 │ 8,127.18 │ 0.31
|
||||
│ ECUADOR S.A. │ S A │ │ │ │ │ │ │ │ │
|
||||
2 │ PLASTIAZUAY │ SA REVERTE │ 2836500000 │ CARBONATO DE │ SPAIN │ 27/07/2016 │ 09/08/2016 │ 81,000.00 │ 8,100.00 │ 11,474.55 │ 0.14
|
||||
│ SA │ │ │ CALCIO │ │ │ │ │ │ │
|
||||
3 │ PLASTICOS │ AND │ 2836500000 │ CALCIUM │ TURKEY │ 04/10/2016 │ 11/11/2016 │ 100,000.00 │ 17,500.00 │ 22,533.75 │ 0.23
|
||||
│ RIVAL CIA │ ENDUSTRIYEL │ │ CARBONATE │ │ │ │ │ │ │
|
||||
│ LTDA │ HAMMADDELER │ │ ANADOLU │ │ │ │ │ │ │
|
||||
│ │ DIS TCARET │ │ ANDCARB CT-1 │ │ │ │ │ │ │
|
||||
│ │ LTD.STI. │ │ │ │ │ │ │ │ │
|
||||
4 │ QUIMICA │ SA REVERTE │ 2836500000 │ CARBONATO DE │ SPAIN │ 24/06/2016 │ 12/07/2016 │ 27,000.00 │ 3,258.90 │ 5,585.00 │ 0.21
|
||||
│ COMERCIAL │ │ │ CALCIO │ │ │ │ │ │ │
|
||||
│ QUIMICIAL │ │ │ │ │ │ │ │ │ │
|
||||
│ CIA. LTDA. │ │ │ │ │ │ │ │ │ │
|
||||
5 │ PICA │ OMYA ANDINA │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/01/1900 │ 18/01/2016 │ 66,500.00 │ 12,635.00 │ 18,670.52 │ 0.28
|
||||
│ PLASTICOS │ S.A │ │ CALCIO │ │ │ │ │ │ │
|
||||
│ INDUSTRIALES │ │ │ │ │ │ │ │ │ │
|
||||
│ C.A. │ │ │ │ │ │ │ │ │ │
|
||||
6 │ PLASTIQUIM │ OMYA ANDINA │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/01/1900 │ 25/10/2016 │ 33,000.00 │ 6,270.00 │ 9,999.00 │ 0.30
|
||||
│ S.A. │ S.A NIT │ │ CALCIO │ │ │ │ │ │ │
|
||||
│ │ 830.027.386- │ │ RECUBIERTO │ │ │ │ │ │ │
|
||||
│ │ 6 │ │ CON ACIDO │ │ │ │ │ │ │
|
||||
│ │ │ │ ESTEARICO │ │ │ │ │ │ │
|
||||
│ │ │ │ OMYA CARB 1T │ │ │ │ │ │ │
|
||||
│ │ │ │ CG BBS 1000 │ │ │ │ │ │ │
|
||||
7 │ QUIMICOS │ SIBELCO │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/11/2016 │ 03/11/2016 │ 52,000.00 │ 8,944.00 │ 13,039.05 │ 0.25
|
||||
│ ANDINOS │ COLOMBIA SAS │ │ CALCIO │ │ │ │ │ │ │
|
||||
│ QUIMANDI │ │ │ RECUBIERTO │ │ │ │ │ │ │
|
||||
│ S.A. │ │ │ │ │ │ │ │ │ │
|
||||
8 │ TIGRE │ OMYA ANDINA │ 3824909999 │ CARBONATO DE │ COLOMBIA │ 01/01/1900 │ 28/10/2016 │ 66,000.00 │ 11,748.00 │ 18,216.00 │ 0.28
|
||||
│ ECUADOR S.A. │ S.A NIT │ │ CALCIO │ │ │ │ │ │ │
|
||||
│ ECUATIGRE │ 830.027.386- │ │ RECUBIERTO │ │ │ │ │ │ │
|
||||
│ │ 6 │ │ CON ACIDO │ │ │ │ │ │ │
|
||||
│ │ │ │ ESTEARICO │ │ │ │ │ │ │
|
||||
│ │ │ │ OMYACARB 1T │ │ │ │ │ │ │
|
||||
│ │ │ │ CG BPA 25 NO │ │ │ │ │ │ │
|
||||
━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━━━━
|
||||
> open caco3_plastics.tsv | to-tsv
|
||||
importer shipper tariff_item name origin shipped_at arrived_at net_weight fob_price cif_price cif_per_net_weight
|
||||
PLASTICOS RIVAL CIA LTDA S A REVERTE 2509000000 CARBONATO DE CALCIO TIPO CALCIPORE 160 T AL SPAIN 18/03/2016 17/04/2016 81,000.00 14,417.58 18,252.34 0.23
|
||||
MEXICHEM ECUADOR S.A. OMYA ANDINA S A 2836500000 CARBONATO COLOMBIA 07/07/2016 10/07/2016 26,000.00 7,072.00 8,127.18 0.31
|
||||
PLASTIAZUAY SA SA REVERTE 2836500000 CARBONATO DE CALCIO SPAIN 27/07/2016 09/08/2016 81,000.00 8,100.00 11,474.55 0.14
|
||||
PLASTICOS RIVAL CIA LTDA AND ENDUSTRIYEL HAMMADDELER DIS TCARET LTD.STI. 2836500000 CALCIUM CARBONATE ANADOLU ANDCARB CT-1 TURKEY 04/10/2016 11/11/2016 100,000.00 17,500.00 22,533.75 0.23
|
||||
QUIMICA COMERCIAL QUIMICIAL CIA. LTDA. SA REVERTE 2836500000 CARBONATO DE CALCIO SPAIN 24/06/2016 12/07/2016 27,000.00 3,258.90 5,585.00 0.21
|
||||
PICA PLASTICOS INDUSTRIALES C.A. OMYA ANDINA S.A 3824909999 CARBONATO DE CALCIO COLOMBIA 01/01/1900 18/01/2016 66,500.00 12,635.00 18,670.52 0.28
|
||||
PLASTIQUIM S.A. OMYA ANDINA S.A NIT 830.027.386-6 3824909999 CARBONATO DE CALCIO RECUBIERTO CON ACIDO ESTEARICO OMYA CARB 1T CG BBS 1000 COLOMBIA 01/01/1900 25/10/2016 33,000.00 6,270.00 9,999.00 0.30
|
||||
QUIMICOS ANDINOS QUIMANDI S.A. SIBELCO COLOMBIA SAS 3824909999 CARBONATO DE CALCIO RECUBIERTO COLOMBIA 01/11/2016 03/11/2016 52,000.00 8,944.00 13,039.05 0.25
|
||||
TIGRE ECUADOR S.A. ECUATIGRE OMYA ANDINA S.A NIT 830.027.386-6 3824909999 CARBONATO DE CALCIO RECUBIERTO CON ACIDO ESTEARICO OMYACARB 1T CG BPA 25 NO COLOMBIA 01/01/1900 28/10/2016 66,000.00 11,748.00 18,216.00 0.28
|
||||
|
||||
```
|
35
docs/commands/to-url.md
Normal file
35
docs/commands/to-url.md
Normal file
@ -0,0 +1,35 @@
|
||||
# to-url
|
||||
|
||||
Converts table data into url-formatted text.
|
||||
|
||||
## Example
|
||||
|
||||
```shell
|
||||
> shells
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼────────────────────────
|
||||
0 │ X │ filesystem │ /home/shaurya
|
||||
1 │ │ filesystem │ /home/shaurya/Pictures
|
||||
2 │ │ filesystem │ /home/shaurya/Desktop
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
> shells | to-url
|
||||
━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ value
|
||||
───┼───────────────────────────────────────────────────────
|
||||
0 │ +=X&name=filesystem&path=%2Fhome%2Fshaurya
|
||||
1 │ +=+&name=filesystem&path=%2Fhome%2Fshaurya%2FPictures
|
||||
2 │ +=+&name=filesystem&path=%2Fhome%2Fshaurya%2FDesktop
|
||||
━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
```shell
|
||||
> open sample.url
|
||||
━━━━━━━━━━┯━━━━━━━━┯━━━━━━┯━━━━━━━━
|
||||
bread │ cheese │ meat │ fat
|
||||
──────────┼────────┼──────┼────────
|
||||
baguette │ comté │ ham │ butter
|
||||
━━━━━━━━━━┷━━━━━━━━┷━━━━━━┷━━━━━━━━
|
||||
> open sample.url | to-url
|
||||
bread=baguette&cheese=comt%C3%A9&meat=ham&fat=butter
|
||||
```
|
60
docs/commands/to-yaml.md
Normal file
60
docs/commands/to-yaml.md
Normal file
@ -0,0 +1,60 @@
|
||||
# to-yaml
|
||||
|
||||
Converts table data into yaml text.
|
||||
|
||||
## Example
|
||||
|
||||
```shell
|
||||
> shells
|
||||
━━━┯━━━┯━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
# │ │ name │ path
|
||||
───┼───┼────────────┼────────────────────────
|
||||
0 │ X │ filesystem │ /home/shaurya
|
||||
1 │ │ filesystem │ /home/shaurya/Pictures
|
||||
2 │ │ filesystem │ /home/shaurya/Desktop
|
||||
━━━┷━━━┷━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━
|
||||
> shells | to-yaml
|
||||
---
|
||||
- " ": X
|
||||
name: filesystem
|
||||
path: /home/shaurya
|
||||
- " ": " "
|
||||
name: filesystem
|
||||
path: /home/shaurya/Pictures
|
||||
- " ": " "
|
||||
name: filesystem
|
||||
path: /home/shaurya/Desktop
|
||||
```
|
||||
|
||||
```shell
|
||||
> open appveyor.yml
|
||||
━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━
|
||||
image │ environment │ install │ build │ test_script │ cache
|
||||
────────────────────┼────────────────┼─────────────────┼───────┼─────────────────┼─────────────────
|
||||
Visual Studio 2017 │ [table: 1 row] │ [table: 5 rows] │ │ [table: 2 rows] │ [table: 2 rows]
|
||||
━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━
|
||||
> open appveyor.yml | to-yaml
|
||||
---
|
||||
image: Visual Studio 2017
|
||||
environment:
|
||||
global:
|
||||
PROJECT_NAME: nushell
|
||||
RUST_BACKTRACE: 1
|
||||
matrix:
|
||||
- TARGET: x86_64-pc-windows-msvc
|
||||
CHANNEL: nightly
|
||||
BITS: 64
|
||||
install:
|
||||
- "set PATH=C:\\msys64\\mingw%BITS%\\bin;C:\\msys64\\usr\\bin;%PATH%"
|
||||
- "curl -sSf -o rustup-init.exe https://win.rustup.rs"
|
||||
- rustup-init.exe -y --default-host %TARGET% --default-toolchain %CHANNEL%-%TARGET%
|
||||
- "set PATH=%PATH%;C:\\Users\\appveyor\\.cargo\\bin"
|
||||
- "call \"C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\Community\\VC\\Auxiliary\\Build\\vcvars64.bat\""
|
||||
build: false
|
||||
test_script:
|
||||
- cargo build --verbose
|
||||
- cargo test --all --verbose
|
||||
cache:
|
||||
- target -> Cargo.lock
|
||||
- "C:\\Users\\appveyor\\.cargo\\registry -> Cargo.lock"
|
||||
```
|
12
docs/commands/trim.md
Normal file
12
docs/commands/trim.md
Normal file
@ -0,0 +1,12 @@
|
||||
# trim
|
||||
|
||||
Trim leading and following whitespace from text data
|
||||
|
||||
## Example
|
||||
|
||||
```shell
|
||||
> echo " Hello world"
|
||||
Hello world
|
||||
> echo " Hello world" | trim
|
||||
Hello world
|
||||
```
|
14
docs/commands/version.md
Normal file
14
docs/commands/version.md
Normal file
@ -0,0 +1,14 @@
|
||||
# version
|
||||
|
||||
Outputs the nushell version.
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
> version
|
||||
━━━━━━━━━
|
||||
version
|
||||
─────────
|
||||
0.3.0
|
||||
━━━━━━━━━
|
||||
```
|
34
docs/commands/where.md
Normal file
34
docs/commands/where.md
Normal file
@ -0,0 +1,34 @@
|
||||
# where
|
||||
|
||||
This command filters the content of a table based on a condition passed as a parameter, which must be a boolean expression making use of any of the table columns. Other commands such as `ls` are capable of feeding `where` with their output through pipelines.
|
||||
|
||||
## Usage
|
||||
```shell
|
||||
> [input-command] | where [condition]
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
||||
```shell
|
||||
> ls | where size > 4kb
|
||||
----+----------------+------+----------+----------+----------------+----------------
|
||||
# | name | type | readonly | size | accessed | modified
|
||||
----+----------------+------+----------+----------+----------------+----------------
|
||||
0 | IMG_1291.jpg | File | | 115.5 KB | a month ago | 4 months ago
|
||||
1 | README.md | File | | 11.1 KB | 2 days ago | 2 days ago
|
||||
2 | IMG_1291.png | File | | 589.0 KB | a month ago | a month ago
|
||||
3 | IMG_1381.jpg | File | | 81.0 KB | a month ago | 4 months ago
|
||||
4 | butterfly.jpeg | File | | 4.2 KB | a month ago | a month ago
|
||||
5 | Cargo.lock | File | | 199.6 KB | 22 minutes ago | 22 minutes ago
|
||||
```
|
||||
|
||||
```shell
|
||||
> ps | where cpu > 10
|
||||
---+-------+----------+-------+-----------------------------
|
||||
# | pid | status | cpu | name
|
||||
---+-------+----------+-------+-----------------------------
|
||||
0 | 1992 | Sleeping | 44.52 | /usr/bin/gnome-shell
|
||||
1 | 1069 | Sleeping | 16.15 |
|
||||
2 | 24116 | Sleeping | 13.70 | /opt/google/chrome/chrome
|
||||
3 | 21976 | Sleeping | 12.67 | /usr/share/discord/Discord
|
||||
```
|
@ -41,7 +41,7 @@ docker run -it .
|
||||
This image does not contain the common packages contained in the default tag and only contains the minimal packages needed to run `nu`. Unless you are working in an environment where only the `nu` image will be deployed and you have space constraints, it's highly recommended to use the alpine image if you aim for small image size. Only use this image if you really need **both** `glibc` and small image size.
|
||||
|
||||
### `nu:<version>-alpine`
|
||||
This image is based on the popular [Alpine Linux project](http://alpinelinux.org/), available in [the alpine official image][alpine]. Alpine Linux is much smaller than most distribution base images (~5MB), and thus leads to much slimmer images in general.
|
||||
This image is based on the popular [Alpine Linux project](https://alpinelinux.org/), available in [the alpine official image][alpine]. Alpine Linux is much smaller than most distribution base images (~5MB), and thus leads to much slimmer images in general.
|
||||
|
||||
This variant is highly recommended when final image size being as small as possible is desired. The main caveat to note is that it does use `musl` libc instead of `glibc` and friends, so certain software might run into issues depending on the depth of their libc requirements. However, most software doesn't have an issue with this, so this variant is usually a very safe choice. See [this Hacker News comment thread](https://news.ycombinator.com/item?id=10782897) for more discussion of the issues that might arise and some pro/con comparisons of using Alpine-based images.
|
||||
|
||||
@ -104,7 +104,7 @@ ENTRYPOINT ["nu"]
|
||||
</details>
|
||||
|
||||
### `nu:<version>-<libc-variant>-busybox`
|
||||
This image is based on [Busybox](http://www.busybox.net/) which is a very good ingredient to craft space-efficient distributions. It combines tiny versions of many common UNIX utilities into a single small executable. It also provides replacements for most of the utilities you usually find in GNU fileutils, shellutils, etc. The utilities in BusyBox generally have fewer options than their full-featured GNU cousins; however, the options that are included provide the expected functionality and behave very much like their GNU counterparts. Basically, this image provides a fairly complete environment for any small or embedded system.
|
||||
This image is based on [Busybox](https://www.busybox.net/) which is a very good ingredient to craft space-efficient distributions. It combines tiny versions of many common UNIX utilities into a single small executable. It also provides replacements for most of the utilities you usually find in GNU fileutils, shellutils, etc. The utilities in BusyBox generally have fewer options than their full-featured GNU cousins; however, the options that are included provide the expected functionality and behave very much like their GNU counterparts. Basically, this image provides a fairly complete environment for any small or embedded system.
|
||||
|
||||
> Use this only if you need common utilities like `tar`, `awk`, and many more but don't want extra blob like nushell plugins and others.
|
||||
|
||||
@ -120,5 +120,5 @@ ENTRYPOINT ["nu"]
|
||||
```
|
||||
</details>
|
||||
|
||||
[musl]: http://www.musl-libc.org/
|
||||
[musl]: https://www.musl-libc.org/
|
||||
[alpine]: https://hub.docker.com/_/alpine/
|
13
features.toml
Normal file
13
features.toml
Normal file
@ -0,0 +1,13 @@
|
||||
[hintsv1]
|
||||
|
||||
description = "Adding hints based upon error states in the syntax highlighter"
|
||||
enabled = false
|
||||
|
||||
[coloring_in_tokens]
|
||||
|
||||
description = "Move coloring into the TokensIterator so they can be atomic with the rest of the iterator"
|
||||
reason = """
|
||||
This is laying the groundwork for merging coloring and parsing. It also makes token_nodes.atomic() naturally
|
||||
work with coloring, which is pretty useful on its own.
|
||||
"""
|
||||
enabled = false
|
@ -1 +1 @@
|
||||
nightly-2019-09-11
|
||||
beta-2019-09-25
|
||||
|
270
src/cli.rs
270
src/cli.rs
@ -1,4 +1,3 @@
|
||||
use crate::commands::autoview;
|
||||
use crate::commands::classified::{
|
||||
ClassifiedCommand, ClassifiedInputStream, ClassifiedPipeline, ExternalCommand, InternalCommand,
|
||||
StreamNext,
|
||||
@ -13,7 +12,12 @@ pub(crate) use crate::errors::ShellError;
|
||||
use crate::fuzzysearch::{interactive_fuzzy_search, SelectionResult};
|
||||
use crate::git::current_branch;
|
||||
use crate::parser::registry::Signature;
|
||||
use crate::parser::{hir, CallNode, Pipeline, PipelineElement, TokenNode};
|
||||
use crate::parser::{
|
||||
hir,
|
||||
hir::syntax_shape::{expand_syntax, PipelineShape},
|
||||
hir::{expand_external_tokens::expand_external_tokens, tokens_iterator::TokensIterator},
|
||||
TokenNode,
|
||||
};
|
||||
use crate::prelude::*;
|
||||
|
||||
use log::{debug, trace};
|
||||
@ -24,7 +28,7 @@ use std::error::Error;
|
||||
use std::io::{BufRead, BufReader, Write};
|
||||
use std::iter::Iterator;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum MaybeOwned<'a, T> {
|
||||
@ -75,7 +79,7 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel
|
||||
let name = params.name.clone();
|
||||
let fname = fname.to_string();
|
||||
|
||||
if context.has_command(&name) {
|
||||
if let Some(_) = context.get_command(&name) {
|
||||
trace!("plugin {:?} already loaded.", &name);
|
||||
} else {
|
||||
if params.is_filter {
|
||||
@ -94,11 +98,17 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel
|
||||
},
|
||||
Err(e) => {
|
||||
trace!("incompatible plugin {:?}", input);
|
||||
Err(ShellError::string(format!("Error: {:?}", e)))
|
||||
Err(ShellError::untagged_runtime_error(format!(
|
||||
"Error: {:?}",
|
||||
e
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => Err(ShellError::string(format!("Error: {:?}", e))),
|
||||
Err(e) => Err(ShellError::untagged_runtime_error(format!(
|
||||
"Error: {:?}",
|
||||
e
|
||||
))),
|
||||
};
|
||||
|
||||
let _ = child.wait();
|
||||
@ -248,7 +258,6 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
whole_stream_command(Next),
|
||||
whole_stream_command(Previous),
|
||||
whole_stream_command(Debug),
|
||||
whole_stream_command(Lines),
|
||||
whole_stream_command(Shells),
|
||||
whole_stream_command(SplitColumn),
|
||||
whole_stream_command(SplitRow),
|
||||
@ -267,11 +276,13 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
whole_stream_command(ToYAML),
|
||||
whole_stream_command(SortBy),
|
||||
whole_stream_command(Tags),
|
||||
whole_stream_command(Count),
|
||||
whole_stream_command(First),
|
||||
whole_stream_command(Last),
|
||||
whole_stream_command(Env),
|
||||
whole_stream_command(FromCSV),
|
||||
whole_stream_command(FromTSV),
|
||||
whole_stream_command(FromSSV),
|
||||
whole_stream_command(FromINI),
|
||||
whole_stream_command(FromBSON),
|
||||
whole_stream_command(FromJSON),
|
||||
@ -314,6 +325,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
)]);
|
||||
}
|
||||
}
|
||||
|
||||
let _ = load_plugins(&mut context);
|
||||
|
||||
let config = Config::builder().color_mode(ColorMode::Forced).build();
|
||||
@ -327,24 +339,21 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
// we are ok if history does not exist
|
||||
let _ = rl.load_history(&History::path());
|
||||
|
||||
let ctrl_c = Arc::new(AtomicBool::new(false));
|
||||
let cc = ctrl_c.clone();
|
||||
let cc = context.ctrl_c.clone();
|
||||
ctrlc::set_handler(move || {
|
||||
cc.store(true, Ordering::SeqCst);
|
||||
})
|
||||
.expect("Error setting Ctrl-C handler");
|
||||
let mut ctrlcbreak = false;
|
||||
loop {
|
||||
if ctrl_c.load(Ordering::SeqCst) {
|
||||
ctrl_c.store(false, Ordering::SeqCst);
|
||||
if context.ctrl_c.load(Ordering::SeqCst) {
|
||||
context.ctrl_c.store(false, Ordering::SeqCst);
|
||||
continue;
|
||||
}
|
||||
|
||||
let cwd = context.shell_manager.path();
|
||||
|
||||
rl.set_helper(Some(crate::shell::Helper::new(
|
||||
context.shell_manager.clone(),
|
||||
)));
|
||||
rl.set_helper(Some(crate::shell::Helper::new(context.clone())));
|
||||
|
||||
let edit_mode = config::config(Tag::unknown())?
|
||||
.get("edit_mode")
|
||||
@ -359,7 +368,8 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
|
||||
// Register Ctrl-r for history fuzzy search
|
||||
// rustyline doesn't support custom commands, so we override Ctrl-D (EOF)
|
||||
#[cfg(not(windows))] // https://github.com/nushell/nushell/issues/689
|
||||
// https://github.com/nushell/nushell/issues/689
|
||||
#[cfg(all(not(windows), feature = "crossterm"))]
|
||||
rl.bind_sequence(rustyline::KeyPress::Ctrl('R'), rustyline::Cmd::EndOfFile);
|
||||
// Redefine Ctrl-D to same command as Ctrl-C
|
||||
rl.bind_sequence(rustyline::KeyPress::Ctrl('D'), rustyline::Cmd::Interrupt);
|
||||
@ -405,6 +415,18 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
}
|
||||
|
||||
LineResult::CtrlC => {
|
||||
let config_ctrlc_exit = config::config(Tag::unknown())?
|
||||
.get("ctrlc_exit")
|
||||
.map(|s| match s.as_string().unwrap().as_ref() {
|
||||
"true" => true,
|
||||
_ => false,
|
||||
})
|
||||
.unwrap_or(false); // default behavior is to allow CTRL-C spamming similar to other shells
|
||||
|
||||
if !config_ctrlc_exit {
|
||||
continue;
|
||||
}
|
||||
|
||||
if ctrlcbreak {
|
||||
let _ = rl.save_history(&History::path());
|
||||
std::process::exit(0);
|
||||
@ -415,21 +437,11 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
}
|
||||
}
|
||||
|
||||
LineResult::Error(mut line, err) => {
|
||||
LineResult::Error(line, err) => {
|
||||
rl.add_history_entry(line.clone());
|
||||
let diag = err.to_diagnostic();
|
||||
|
||||
context.with_host(|host| {
|
||||
let writer = host.err_termcolor();
|
||||
line.push_str(" ");
|
||||
let files = crate::parser::Files::new(line);
|
||||
let _ = std::panic::catch_unwind(move || {
|
||||
let _ = language_reporting::emit(
|
||||
&mut writer.lock(),
|
||||
&files,
|
||||
&diag,
|
||||
&language_reporting::DefaultConfig,
|
||||
);
|
||||
});
|
||||
print_err(err, host, &Text::from(line));
|
||||
})
|
||||
}
|
||||
|
||||
@ -446,6 +458,14 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn chomp_newline(s: &str) -> &str {
|
||||
if s.ends_with('\n') {
|
||||
&s[..s.len() - 1]
|
||||
} else {
|
||||
s
|
||||
}
|
||||
}
|
||||
|
||||
enum LineResult {
|
||||
Success(String),
|
||||
Error(String, ShellError),
|
||||
@ -458,9 +478,11 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
Ok(line) if line.trim() == "" => LineResult::Success(line.clone()),
|
||||
|
||||
Ok(line) => {
|
||||
let result = match crate::parser::parse(&line, uuid::Uuid::nil()) {
|
||||
let line = chomp_newline(line);
|
||||
|
||||
let result = match crate::parser::parse(&line) {
|
||||
Err(err) => {
|
||||
return LineResult::Error(line.clone(), err);
|
||||
return LineResult::Error(line.to_string(), err);
|
||||
}
|
||||
|
||||
Ok(val) => val,
|
||||
@ -471,7 +493,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
|
||||
let mut pipeline = match classify_pipeline(&result, ctx, &Text::from(line)) {
|
||||
Ok(pipeline) => pipeline,
|
||||
Err(err) => return LineResult::Error(line.clone(), err),
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
};
|
||||
|
||||
match pipeline.commands.last() {
|
||||
@ -479,7 +501,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
_ => pipeline
|
||||
.commands
|
||||
.push(ClassifiedCommand::Internal(InternalCommand {
|
||||
command: whole_stream_command(autoview::Autoview),
|
||||
name: "autoview".to_string(),
|
||||
name_tag: Tag::unknown(),
|
||||
args: hir::Call::new(
|
||||
Box::new(hir::Expression::synthetic_string("autoview")),
|
||||
@ -501,16 +523,24 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
input = match (item, next) {
|
||||
(None, _) => break,
|
||||
|
||||
(Some(ClassifiedCommand::Dynamic(_)), _)
|
||||
| (_, Some(ClassifiedCommand::Dynamic(_))) => {
|
||||
return LineResult::Error(
|
||||
line.to_string(),
|
||||
ShellError::unimplemented("Dynamic commands"),
|
||||
)
|
||||
}
|
||||
|
||||
(Some(ClassifiedCommand::Expr(_)), _) => {
|
||||
return LineResult::Error(
|
||||
line.clone(),
|
||||
line.to_string(),
|
||||
ShellError::unimplemented("Expression-only commands"),
|
||||
)
|
||||
}
|
||||
|
||||
(_, Some(ClassifiedCommand::Expr(_))) => {
|
||||
return LineResult::Error(
|
||||
line.clone(),
|
||||
line.to_string(),
|
||||
ShellError::unimplemented("Expression-only commands"),
|
||||
)
|
||||
}
|
||||
@ -518,31 +548,46 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
(
|
||||
Some(ClassifiedCommand::Internal(left)),
|
||||
Some(ClassifiedCommand::External(_)),
|
||||
) => match left
|
||||
.run(ctx, input, Text::from(line), is_first_command)
|
||||
.await
|
||||
{
|
||||
) => match left.run(ctx, input, Text::from(line), is_first_command) {
|
||||
Ok(val) => ClassifiedInputStream::from_input_stream(val),
|
||||
Err(err) => return LineResult::Error(line.clone(), err),
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
},
|
||||
|
||||
(Some(ClassifiedCommand::Internal(left)), Some(_)) => {
|
||||
match left
|
||||
.run(ctx, input, Text::from(line), is_first_command)
|
||||
.await
|
||||
{
|
||||
match left.run(ctx, input, Text::from(line), is_first_command) {
|
||||
Ok(val) => ClassifiedInputStream::from_input_stream(val),
|
||||
Err(err) => return LineResult::Error(line.clone(), err),
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
}
|
||||
}
|
||||
|
||||
(Some(ClassifiedCommand::Internal(left)), None) => {
|
||||
match left
|
||||
.run(ctx, input, Text::from(line), is_first_command)
|
||||
.await
|
||||
{
|
||||
Ok(val) => ClassifiedInputStream::from_input_stream(val),
|
||||
Err(err) => return LineResult::Error(line.clone(), err),
|
||||
match left.run(ctx, input, Text::from(line), is_first_command) {
|
||||
Ok(val) => {
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
let mut output_stream: OutputStream = val.into();
|
||||
loop {
|
||||
match output_stream.try_next().await {
|
||||
Ok(Some(ReturnSuccess::Value(Tagged {
|
||||
item: Value::Error(e),
|
||||
..
|
||||
}))) => {
|
||||
return LineResult::Error(line.to_string(), e);
|
||||
}
|
||||
Ok(Some(_item)) => {
|
||||
if ctx.ctrl_c.load(Ordering::SeqCst) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return LineResult::Success(line.to_string());
|
||||
}
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
}
|
||||
}
|
||||
|
||||
@ -551,20 +596,20 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
Some(ClassifiedCommand::External(_)),
|
||||
) => match left.run(ctx, input, StreamNext::External).await {
|
||||
Ok(val) => val,
|
||||
Err(err) => return LineResult::Error(line.clone(), err),
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
},
|
||||
|
||||
(Some(ClassifiedCommand::External(left)), Some(_)) => {
|
||||
match left.run(ctx, input, StreamNext::Internal).await {
|
||||
Ok(val) => val,
|
||||
Err(err) => return LineResult::Error(line.clone(), err),
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
}
|
||||
}
|
||||
|
||||
(Some(ClassifiedCommand::External(left)), None) => {
|
||||
match left.run(ctx, input, StreamNext::Last).await {
|
||||
Ok(val) => val,
|
||||
Err(err) => return LineResult::Error(line.clone(), err),
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -572,7 +617,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
is_first_command = false;
|
||||
}
|
||||
|
||||
LineResult::Success(line.clone())
|
||||
LineResult::Success(line.to_string())
|
||||
}
|
||||
Err(ReadlineError::Interrupted) => LineResult::CtrlC,
|
||||
Err(ReadlineError::Eof) => LineResult::Break,
|
||||
@ -588,95 +633,52 @@ fn classify_pipeline(
|
||||
context: &Context,
|
||||
source: &Text,
|
||||
) -> Result<ClassifiedPipeline, ShellError> {
|
||||
let pipeline = pipeline.as_pipeline()?;
|
||||
let mut pipeline_list = vec![pipeline.clone()];
|
||||
let mut iterator = TokensIterator::all(&mut pipeline_list, pipeline.span());
|
||||
|
||||
let Pipeline { parts, .. } = pipeline;
|
||||
|
||||
let commands: Result<Vec<_>, ShellError> = parts
|
||||
.iter()
|
||||
.map(|item| classify_command(&item, context, &source))
|
||||
.collect();
|
||||
|
||||
Ok(ClassifiedPipeline {
|
||||
commands: commands?,
|
||||
})
|
||||
}
|
||||
|
||||
fn classify_command(
|
||||
command: &PipelineElement,
|
||||
context: &Context,
|
||||
source: &Text,
|
||||
) -> Result<ClassifiedCommand, ShellError> {
|
||||
let call = command.call();
|
||||
|
||||
match call {
|
||||
// If the command starts with `^`, treat it as an external command no matter what
|
||||
call if call.head().is_external() => {
|
||||
let name_tag = call.head().expect_external();
|
||||
let name = name_tag.slice(source);
|
||||
|
||||
Ok(external_command(call, source, name.tagged(name_tag)))
|
||||
}
|
||||
|
||||
// Otherwise, if the command is a bare word, we'll need to triage it
|
||||
call if call.head().is_bare() => {
|
||||
let head = call.head();
|
||||
let name = head.source(source);
|
||||
|
||||
match context.has_command(name) {
|
||||
// if the command is in the registry, it's an internal command
|
||||
true => {
|
||||
let command = context.get_command(name);
|
||||
let config = command.signature();
|
||||
|
||||
trace!(target: "nu::build_pipeline", "classifying {:?}", config);
|
||||
|
||||
let args: hir::Call = config.parse_args(call, &context, source)?;
|
||||
|
||||
trace!(target: "nu::build_pipeline", "args :: {}", args.debug(source));
|
||||
|
||||
Ok(ClassifiedCommand::Internal(InternalCommand {
|
||||
command,
|
||||
name_tag: head.tag(),
|
||||
args,
|
||||
}))
|
||||
}
|
||||
|
||||
// otherwise, it's an external command
|
||||
false => Ok(external_command(call, source, name.tagged(head.tag()))),
|
||||
}
|
||||
}
|
||||
|
||||
// If the command is something else (like a number or a variable), that is currently unsupported.
|
||||
// We might support `$somevar` as a curried command in the future.
|
||||
call => Err(ShellError::invalid_command(call.head().tag())),
|
||||
}
|
||||
expand_syntax(
|
||||
&PipelineShape,
|
||||
&mut iterator,
|
||||
&context.expand_context(source, pipeline.span()),
|
||||
)
|
||||
}
|
||||
|
||||
// Classify this command as an external command, which doesn't give special meaning
|
||||
// to nu syntactic constructs, and passes all arguments to the external command as
|
||||
// strings.
|
||||
fn external_command(
|
||||
call: &Tagged<CallNode>,
|
||||
pub(crate) fn external_command(
|
||||
tokens: &mut TokensIterator,
|
||||
source: &Text,
|
||||
name: Tagged<&str>,
|
||||
) -> ClassifiedCommand {
|
||||
let arg_list_strings: Vec<Tagged<String>> = match call.children() {
|
||||
Some(args) => args
|
||||
) -> Result<ClassifiedCommand, ShellError> {
|
||||
let arg_list_strings = expand_external_tokens(tokens, source)?;
|
||||
|
||||
Ok(ClassifiedCommand::External(ExternalCommand {
|
||||
name: name.to_string(),
|
||||
name_tag: name.tag(),
|
||||
args: arg_list_strings
|
||||
.iter()
|
||||
.filter_map(|i| match i {
|
||||
TokenNode::Whitespace(_) => None,
|
||||
other => Some(other.as_external_arg(source).tagged(other.tag())),
|
||||
.map(|x| Tagged {
|
||||
tag: x.span.into(),
|
||||
item: x.item.clone(),
|
||||
})
|
||||
.collect(),
|
||||
None => vec![],
|
||||
};
|
||||
|
||||
let (name, tag) = name.into_parts();
|
||||
|
||||
ClassifiedCommand::External(ExternalCommand {
|
||||
name: name.to_string(),
|
||||
name_tag: tag,
|
||||
args: arg_list_strings,
|
||||
})
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn print_err(err: ShellError, host: &dyn Host, source: &Text) {
|
||||
let diag = err.to_diagnostic();
|
||||
|
||||
let writer = host.err_termcolor();
|
||||
let mut source = source.to_string();
|
||||
source.push_str(" ");
|
||||
let files = crate::parser::Files::new(source);
|
||||
let _ = std::panic::catch_unwind(move || {
|
||||
let _ = language_reporting::emit(
|
||||
&mut writer.lock(),
|
||||
&files,
|
||||
&diag,
|
||||
&language_reporting::DefaultConfig,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
@ -8,6 +8,7 @@ pub(crate) mod classified;
|
||||
pub(crate) mod clip;
|
||||
pub(crate) mod command;
|
||||
pub(crate) mod config;
|
||||
pub(crate) mod count;
|
||||
pub(crate) mod cp;
|
||||
pub(crate) mod date;
|
||||
pub(crate) mod debug;
|
||||
@ -22,6 +23,7 @@ pub(crate) mod from_csv;
|
||||
pub(crate) mod from_ini;
|
||||
pub(crate) mod from_json;
|
||||
pub(crate) mod from_sqlite;
|
||||
pub(crate) mod from_ssv;
|
||||
pub(crate) mod from_toml;
|
||||
pub(crate) mod from_tsv;
|
||||
pub(crate) mod from_url;
|
||||
@ -75,7 +77,9 @@ pub(crate) use command::{
|
||||
UnevaluatedCallInfo, WholeStreamCommand,
|
||||
};
|
||||
|
||||
pub(crate) use classified::ClassifiedCommand;
|
||||
pub(crate) use config::Config;
|
||||
pub(crate) use count::Count;
|
||||
pub(crate) use cp::Cpy;
|
||||
pub(crate) use date::Date;
|
||||
pub(crate) use debug::Debug;
|
||||
@ -91,6 +95,7 @@ pub(crate) use from_ini::FromINI;
|
||||
pub(crate) use from_json::FromJSON;
|
||||
pub(crate) use from_sqlite::FromDB;
|
||||
pub(crate) use from_sqlite::FromSQLite;
|
||||
pub(crate) use from_ssv::FromSSV;
|
||||
pub(crate) use from_toml::FromTOML;
|
||||
pub(crate) use from_tsv::FromTSV;
|
||||
pub(crate) use from_url::FromURL;
|
||||
|
@ -1,9 +1,14 @@
|
||||
use crate::commands::{RawCommandArgs, WholeStreamCommand};
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::hir::{Expression, NamedArguments};
|
||||
use crate::prelude::*;
|
||||
use futures::stream::TryStreamExt;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
pub struct Autoview;
|
||||
|
||||
const STREAM_PAGE_SIZE: u64 = 50;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct AutoviewArgs {}
|
||||
|
||||
@ -31,93 +36,138 @@ impl WholeStreamCommand for Autoview {
|
||||
|
||||
pub fn autoview(
|
||||
AutoviewArgs {}: AutoviewArgs,
|
||||
mut context: RunnableContext,
|
||||
context: RunnableContext,
|
||||
raw: RawCommandArgs,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
Ok(OutputStream::new(async_stream_block! {
|
||||
let input = context.input.drain_vec().await;
|
||||
let binary = context.get_command("binaryview");
|
||||
let text = context.get_command("textview");
|
||||
let table = context.get_command("table");
|
||||
|
||||
if input.len() > 0 {
|
||||
if let Tagged {
|
||||
item: Value::Primitive(Primitive::Binary(_)),
|
||||
..
|
||||
} = input[0usize]
|
||||
{
|
||||
let binary = context.get_command("binaryview");
|
||||
if let Some(binary) = binary {
|
||||
let result = binary.run(raw.with_input(input), &context.commands, false);
|
||||
result.collect::<Vec<_>>().await;
|
||||
} else {
|
||||
for i in input {
|
||||
match i.item {
|
||||
Value::Primitive(Primitive::Binary(b)) => {
|
||||
use pretty_hex::*;
|
||||
println!("{:?}", b.hex_dump());
|
||||
Ok(OutputStream::new(async_stream! {
|
||||
let mut output_stream: OutputStream = context.input.into();
|
||||
|
||||
match output_stream.try_next().await {
|
||||
Ok(Some(x)) => {
|
||||
match output_stream.try_next().await {
|
||||
Ok(Some(y)) => {
|
||||
let ctrl_c = context.ctrl_c.clone();
|
||||
let stream = async_stream! {
|
||||
yield Ok(x);
|
||||
yield Ok(y);
|
||||
|
||||
loop {
|
||||
match output_stream.try_next().await {
|
||||
Ok(Some(z)) => {
|
||||
if ctrl_c.load(Ordering::SeqCst) {
|
||||
break;
|
||||
}
|
||||
yield Ok(z);
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
};
|
||||
if let Some(table) = table {
|
||||
let mut new_output_stream: OutputStream = stream.to_output_stream();
|
||||
let mut finished = false;
|
||||
let mut current_idx = 0;
|
||||
loop {
|
||||
let mut new_input = VecDeque::new();
|
||||
|
||||
for _ in 0..STREAM_PAGE_SIZE {
|
||||
match new_output_stream.try_next().await {
|
||||
|
||||
Ok(Some(a)) => {
|
||||
if let ReturnSuccess::Value(v) = a {
|
||||
new_input.push_back(v);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
finished = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let raw = raw.clone();
|
||||
|
||||
let mut command_args = raw.with_input(new_input.into());
|
||||
let mut named_args = NamedArguments::new();
|
||||
named_args.insert_optional("start_number", Some(Expression::number(current_idx, Tag::unknown())));
|
||||
command_args.call_info.args.named = Some(named_args);
|
||||
|
||||
let result = table.run(command_args, &context.commands, false);
|
||||
result.collect::<Vec<_>>().await;
|
||||
|
||||
if finished {
|
||||
break;
|
||||
} else {
|
||||
current_idx += STREAM_PAGE_SIZE;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
};
|
||||
} else if is_single_origined_text_value(&input) {
|
||||
let text = context.get_command("textview");
|
||||
if let Some(text) = text {
|
||||
let result = text.run(raw.with_input(input), &context.commands, false);
|
||||
result.collect::<Vec<_>>().await;
|
||||
} else {
|
||||
for i in input {
|
||||
match i.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
println!("{}", s);
|
||||
_ => {
|
||||
if let ReturnSuccess::Value(x) = x {
|
||||
match x {
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::String(ref s)),
|
||||
tag: Tag { anchor, span },
|
||||
} if anchor.is_some() => {
|
||||
if let Some(text) = text {
|
||||
let mut stream = VecDeque::new();
|
||||
stream.push_back(Value::string(s).tagged(Tag { anchor, span }));
|
||||
let result = text.run(raw.with_input(stream.into()), &context.commands, false);
|
||||
result.collect::<Vec<_>>().await;
|
||||
} else {
|
||||
println!("{}", s);
|
||||
}
|
||||
}
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::String(s)),
|
||||
..
|
||||
} => {
|
||||
println!("{}", s);
|
||||
}
|
||||
|
||||
Tagged { item: Value::Primitive(Primitive::Binary(ref b)), .. } => {
|
||||
if let Some(binary) = binary {
|
||||
let mut stream = VecDeque::new();
|
||||
stream.push_back(x.clone());
|
||||
let result = binary.run(raw.with_input(stream.into()), &context.commands, false);
|
||||
result.collect::<Vec<_>>().await;
|
||||
} else {
|
||||
use pretty_hex::*;
|
||||
println!("{:?}", b.hex_dump());
|
||||
}
|
||||
}
|
||||
|
||||
Tagged { item: Value::Error(e), .. } => {
|
||||
yield Err(e);
|
||||
}
|
||||
Tagged { item: ref item, .. } => {
|
||||
if let Some(table) = table {
|
||||
let mut stream = VecDeque::new();
|
||||
stream.push_back(x.clone());
|
||||
let result = table.run(raw.with_input(stream.into()), &context.commands, false);
|
||||
result.collect::<Vec<_>>().await;
|
||||
} else {
|
||||
println!("{:?}", item);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if is_single_text_value(&input) {
|
||||
for i in input {
|
||||
match i.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
println!("{}", s);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let table = context.expect_command("table");
|
||||
let result = table.run(raw.with_input(input), &context.commands, false);
|
||||
result.collect::<Vec<_>>().await;
|
||||
}
|
||||
_ => {
|
||||
//println!("<no results>");
|
||||
}
|
||||
}
|
||||
|
||||
// Needed for async_stream to type check
|
||||
if false {
|
||||
yield ReturnSuccess::value(Value::nothing().tagged_unknown());
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
fn is_single_text_value(input: &Vec<Tagged<Value>>) -> bool {
|
||||
if input.len() != 1 {
|
||||
return false;
|
||||
}
|
||||
if let Tagged {
|
||||
item: Value::Primitive(Primitive::String(_)),
|
||||
..
|
||||
} = input[0]
|
||||
{
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn is_single_origined_text_value(input: &Vec<Tagged<Value>>) -> bool {
|
||||
if input.len() != 1 {
|
||||
return false;
|
||||
}
|
||||
|
||||
if let Tagged {
|
||||
item: Value::Primitive(Primitive::String(_)),
|
||||
tag: Tag { origin, .. },
|
||||
} = input[0]
|
||||
{
|
||||
origin != uuid::Uuid::nil()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
@ -1,12 +1,11 @@
|
||||
use crate::commands::Command;
|
||||
use crate::parser::{hir, TokenNode};
|
||||
use crate::prelude::*;
|
||||
use bytes::{BufMut, BytesMut};
|
||||
use derive_new::new;
|
||||
use futures::stream::StreamExt;
|
||||
use futures_codec::{Decoder, Encoder, Framed};
|
||||
use log::{log_enabled, trace};
|
||||
use std::io::{Error, ErrorKind};
|
||||
use std::sync::Arc;
|
||||
use subprocess::Exec;
|
||||
|
||||
/// A simple `Codec` implementation that splits up data into lines.
|
||||
@ -73,25 +72,35 @@ impl ClassifiedInputStream {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ClassifiedPipeline {
|
||||
pub(crate) commands: Vec<ClassifiedCommand>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub(crate) enum ClassifiedCommand {
|
||||
#[allow(unused)]
|
||||
Expr(TokenNode),
|
||||
Internal(InternalCommand),
|
||||
#[allow(unused)]
|
||||
Dynamic(hir::Call),
|
||||
External(ExternalCommand),
|
||||
}
|
||||
|
||||
#[derive(new, Debug, Eq, PartialEq)]
|
||||
pub(crate) struct InternalCommand {
|
||||
pub(crate) command: Arc<Command>,
|
||||
pub(crate) name: String,
|
||||
pub(crate) name_tag: Tag,
|
||||
pub(crate) args: hir::Call,
|
||||
}
|
||||
|
||||
#[derive(new, Debug, Eq, PartialEq)]
|
||||
pub(crate) struct DynamicCommand {
|
||||
pub(crate) args: hir::Call,
|
||||
}
|
||||
|
||||
impl InternalCommand {
|
||||
pub(crate) async fn run(
|
||||
pub(crate) fn run(
|
||||
self,
|
||||
context: &mut Context,
|
||||
input: ClassifiedInputStream,
|
||||
@ -100,91 +109,99 @@ impl InternalCommand {
|
||||
) -> Result<InputStream, ShellError> {
|
||||
if log_enabled!(log::Level::Trace) {
|
||||
trace!(target: "nu::run::internal", "->");
|
||||
trace!(target: "nu::run::internal", "{}", self.command.name());
|
||||
trace!(target: "nu::run::internal", "{}", self.name);
|
||||
trace!(target: "nu::run::internal", "{}", self.args.debug(&source));
|
||||
}
|
||||
|
||||
let objects: InputStream =
|
||||
trace_stream!(target: "nu::trace_stream::internal", "input" = input.objects);
|
||||
|
||||
let result = context.run_command(
|
||||
self.command,
|
||||
self.name_tag.clone(),
|
||||
context.source_map.clone(),
|
||||
self.args,
|
||||
&source,
|
||||
objects,
|
||||
is_first_command,
|
||||
);
|
||||
let command = context.expect_command(&self.name);
|
||||
|
||||
let result = {
|
||||
context.run_command(
|
||||
command,
|
||||
self.name_tag.clone(),
|
||||
self.args,
|
||||
&source,
|
||||
objects,
|
||||
is_first_command,
|
||||
)
|
||||
};
|
||||
|
||||
let result = trace_out_stream!(target: "nu::trace_stream::internal", source: &source, "output" = result);
|
||||
let mut result = result.values;
|
||||
let mut context = context.clone();
|
||||
|
||||
let mut stream = VecDeque::new();
|
||||
while let Some(item) = result.next().await {
|
||||
match item? {
|
||||
ReturnSuccess::Action(action) => match action {
|
||||
CommandAction::ChangePath(path) => {
|
||||
context.shell_manager.set_path(path);
|
||||
}
|
||||
CommandAction::AddSpanSource(uuid, span_source) => {
|
||||
context.add_span_source(uuid, span_source);
|
||||
}
|
||||
CommandAction::Exit => std::process::exit(0), // TODO: save history.txt
|
||||
CommandAction::EnterHelpShell(value) => {
|
||||
match value {
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::String(cmd)),
|
||||
tag,
|
||||
} => {
|
||||
context.shell_manager.insert_at_current(Box::new(
|
||||
HelpShell::for_command(
|
||||
Value::string(cmd).tagged(tag),
|
||||
&context.registry(),
|
||||
)?,
|
||||
));
|
||||
}
|
||||
_ => {
|
||||
context.shell_manager.insert_at_current(Box::new(
|
||||
HelpShell::index(&context.registry())?,
|
||||
));
|
||||
let stream = async_stream! {
|
||||
while let Some(item) = result.next().await {
|
||||
match item {
|
||||
Ok(ReturnSuccess::Action(action)) => match action {
|
||||
CommandAction::ChangePath(path) => {
|
||||
context.shell_manager.set_path(path);
|
||||
}
|
||||
CommandAction::Exit => std::process::exit(0), // TODO: save history.txt
|
||||
CommandAction::EnterHelpShell(value) => {
|
||||
match value {
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::String(cmd)),
|
||||
tag,
|
||||
} => {
|
||||
context.shell_manager.insert_at_current(Box::new(
|
||||
HelpShell::for_command(
|
||||
Value::string(cmd).tagged(tag),
|
||||
&context.registry(),
|
||||
).unwrap(),
|
||||
));
|
||||
}
|
||||
_ => {
|
||||
context.shell_manager.insert_at_current(Box::new(
|
||||
HelpShell::index(&context.registry()).unwrap(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
CommandAction::EnterValueShell(value) => {
|
||||
context
|
||||
.shell_manager
|
||||
.insert_at_current(Box::new(ValueShell::new(value)));
|
||||
}
|
||||
CommandAction::EnterShell(location) => {
|
||||
context.shell_manager.insert_at_current(Box::new(
|
||||
FilesystemShell::with_location(location, context.registry().clone())?,
|
||||
));
|
||||
}
|
||||
CommandAction::PreviousShell => {
|
||||
context.shell_manager.prev();
|
||||
}
|
||||
CommandAction::NextShell => {
|
||||
context.shell_manager.next();
|
||||
}
|
||||
CommandAction::LeaveShell => {
|
||||
context.shell_manager.remove_at_current();
|
||||
if context.shell_manager.is_empty() {
|
||||
std::process::exit(0); // TODO: save history.txt
|
||||
CommandAction::EnterValueShell(value) => {
|
||||
context
|
||||
.shell_manager
|
||||
.insert_at_current(Box::new(ValueShell::new(value)));
|
||||
}
|
||||
}
|
||||
},
|
||||
CommandAction::EnterShell(location) => {
|
||||
context.shell_manager.insert_at_current(Box::new(
|
||||
FilesystemShell::with_location(location, context.registry().clone()).unwrap(),
|
||||
));
|
||||
}
|
||||
CommandAction::PreviousShell => {
|
||||
context.shell_manager.prev();
|
||||
}
|
||||
CommandAction::NextShell => {
|
||||
context.shell_manager.next();
|
||||
}
|
||||
CommandAction::LeaveShell => {
|
||||
context.shell_manager.remove_at_current();
|
||||
if context.shell_manager.is_empty() {
|
||||
std::process::exit(0); // TODO: save history.txt
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
ReturnSuccess::Value(v) => {
|
||||
stream.push_back(v);
|
||||
Ok(ReturnSuccess::Value(v)) => {
|
||||
yield Ok(v);
|
||||
}
|
||||
|
||||
Err(x) => {
|
||||
yield Ok(Value::Error(x).tagged_unknown());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.into())
|
||||
Ok(stream.to_input_stream())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub(crate) struct ExternalCommand {
|
||||
pub(crate) name: String,
|
||||
|
||||
@ -192,6 +209,7 @@ pub(crate) struct ExternalCommand {
|
||||
pub(crate) args: Vec<Tagged<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum StreamNext {
|
||||
Last,
|
||||
External,
|
||||
@ -207,7 +225,6 @@ impl ExternalCommand {
|
||||
) -> Result<ClassifiedInputStream, ShellError> {
|
||||
let stdin = input.stdin;
|
||||
let inputs: Vec<Tagged<Value>> = input.objects.into_vec().await;
|
||||
let name_tag = self.name_tag.clone();
|
||||
|
||||
trace!(target: "nu::run::external", "-> {}", self.name);
|
||||
trace!(target: "nu::run::external", "inputs = {:?}", inputs);
|
||||
@ -217,47 +234,47 @@ impl ExternalCommand {
|
||||
arg_string.push_str(&arg);
|
||||
}
|
||||
|
||||
trace!(target: "nu::run::external", "command = {:?}", self.name);
|
||||
|
||||
let mut process;
|
||||
|
||||
process = Exec::cmd(&self.name);
|
||||
|
||||
if arg_string.contains("$it") {
|
||||
let mut first = true;
|
||||
|
||||
for i in &inputs {
|
||||
if i.as_string().is_err() {
|
||||
let mut tag = None;
|
||||
for arg in &self.args {
|
||||
if arg.item.contains("$it") {
|
||||
tag = Some(arg.tag());
|
||||
let input_strings = inputs
|
||||
.iter()
|
||||
.map(|i| {
|
||||
i.as_string().map_err(|_| {
|
||||
let arg = self.args.iter().find(|arg| arg.item.contains("$it"));
|
||||
if let Some(arg) = arg {
|
||||
ShellError::labeled_error(
|
||||
"External $it needs string data",
|
||||
"given row instead of string data",
|
||||
arg.tag(),
|
||||
)
|
||||
} else {
|
||||
ShellError::labeled_error(
|
||||
"$it needs string data",
|
||||
"given something else",
|
||||
self.name_tag.clone(),
|
||||
)
|
||||
}
|
||||
}
|
||||
if let Some(tag) = tag {
|
||||
return Err(ShellError::labeled_error(
|
||||
"External $it needs string data",
|
||||
"given row instead of string data",
|
||||
tag,
|
||||
));
|
||||
} else {
|
||||
return Err(ShellError::string("Error: $it needs string data"));
|
||||
}
|
||||
}
|
||||
if !first {
|
||||
process = process.arg("&&");
|
||||
process = process.arg(&self.name);
|
||||
} else {
|
||||
first = false;
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<String>, ShellError>>()?;
|
||||
|
||||
for arg in &self.args {
|
||||
let commands = input_strings.iter().map(|i| {
|
||||
let args = self.args.iter().filter_map(|arg| {
|
||||
if arg.chars().all(|c| c.is_whitespace()) {
|
||||
continue;
|
||||
None
|
||||
} else {
|
||||
Some(arg.replace("$it", &i))
|
||||
}
|
||||
});
|
||||
|
||||
process = process.arg(&arg.replace("$it", &i.as_string()?));
|
||||
}
|
||||
}
|
||||
format!("{} {}", self.name, itertools::join(args, " "))
|
||||
});
|
||||
|
||||
process = Exec::shell(itertools::join(commands, " && "))
|
||||
} else {
|
||||
process = Exec::cmd(&self.name);
|
||||
for arg in &self.args {
|
||||
let arg_chars: Vec<_> = arg.chars().collect();
|
||||
if arg_chars.len() > 1
|
||||
@ -275,6 +292,8 @@ impl ExternalCommand {
|
||||
|
||||
process = process.cwd(context.shell_manager.path());
|
||||
|
||||
trace!(target: "nu::run::external", "cwd = {:?}", context.shell_manager.path());
|
||||
|
||||
let mut process = match stream_next {
|
||||
StreamNext::Last => process,
|
||||
StreamNext::External | StreamNext::Internal => {
|
||||
@ -282,44 +301,60 @@ impl ExternalCommand {
|
||||
}
|
||||
};
|
||||
|
||||
trace!(target: "nu::run::external", "set up stdout pipe");
|
||||
|
||||
if let Some(stdin) = stdin {
|
||||
process = process.stdin(stdin);
|
||||
}
|
||||
|
||||
let mut popen = process.popen()?;
|
||||
trace!(target: "nu::run::external", "set up stdin pipe");
|
||||
trace!(target: "nu::run::external", "built process {:?}", process);
|
||||
|
||||
match stream_next {
|
||||
StreamNext::Last => {
|
||||
let _ = popen.detach();
|
||||
loop {
|
||||
match popen.poll() {
|
||||
None => {
|
||||
let _ = std::thread::sleep(std::time::Duration::new(0, 100000000));
|
||||
}
|
||||
_ => {
|
||||
let _ = popen.terminate();
|
||||
break;
|
||||
let popen = process.popen();
|
||||
|
||||
trace!(target: "nu::run::external", "next = {:?}", stream_next);
|
||||
|
||||
let name_tag = self.name_tag.clone();
|
||||
if let Ok(mut popen) = popen {
|
||||
match stream_next {
|
||||
StreamNext::Last => {
|
||||
let _ = popen.detach();
|
||||
loop {
|
||||
match popen.poll() {
|
||||
None => {
|
||||
let _ = std::thread::sleep(std::time::Duration::new(0, 100000000));
|
||||
}
|
||||
_ => {
|
||||
let _ = popen.terminate();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(ClassifiedInputStream::new())
|
||||
}
|
||||
StreamNext::External => {
|
||||
let _ = popen.detach();
|
||||
let stdout = popen.stdout.take().unwrap();
|
||||
Ok(ClassifiedInputStream::from_stdout(stdout))
|
||||
}
|
||||
StreamNext::Internal => {
|
||||
let _ = popen.detach();
|
||||
let stdout = popen.stdout.take().unwrap();
|
||||
let file = futures::io::AllowStdIo::new(stdout);
|
||||
let stream = Framed::new(file, LinesCodec {});
|
||||
let stream =
|
||||
stream.map(move |line| Value::string(line.unwrap()).tagged(&name_tag));
|
||||
Ok(ClassifiedInputStream::from_input_stream(
|
||||
stream.boxed() as BoxStream<'static, Tagged<Value>>
|
||||
))
|
||||
}
|
||||
println!("");
|
||||
Ok(ClassifiedInputStream::new())
|
||||
}
|
||||
StreamNext::External => {
|
||||
let _ = popen.detach();
|
||||
let stdout = popen.stdout.take().unwrap();
|
||||
Ok(ClassifiedInputStream::from_stdout(stdout))
|
||||
}
|
||||
StreamNext::Internal => {
|
||||
let _ = popen.detach();
|
||||
let stdout = popen.stdout.take().unwrap();
|
||||
let file = futures::io::AllowStdIo::new(stdout);
|
||||
let stream = Framed::new(file, LinesCodec {});
|
||||
let stream = stream.map(move |line| Value::string(line.unwrap()).tagged(name_tag));
|
||||
Ok(ClassifiedInputStream::from_input_stream(
|
||||
stream.boxed() as BoxStream<'static, Tagged<Value>>
|
||||
))
|
||||
}
|
||||
} else {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Command not found",
|
||||
"command not found",
|
||||
name_tag,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5,7 +5,6 @@ pub mod clipboard {
|
||||
use crate::errors::ShellError;
|
||||
use crate::prelude::*;
|
||||
use futures::stream::StreamExt;
|
||||
use futures_async_stream::async_stream_block;
|
||||
|
||||
use clipboard::{ClipboardContext, ClipboardProvider};
|
||||
|
||||
@ -40,10 +39,13 @@ pub mod clipboard {
|
||||
ClipArgs {}: ClipArgs,
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
|
||||
inner_clip(values, name).await;
|
||||
let mut clip_stream = inner_clip(values, name).await;
|
||||
while let Some(value) = clip_stream.next().await {
|
||||
yield value;
|
||||
}
|
||||
};
|
||||
|
||||
let stream: BoxStream<'static, ReturnValue> = stream.boxed();
|
||||
|
@ -1,4 +1,3 @@
|
||||
use crate::context::{SourceMap, SpanSource};
|
||||
use crate::data::Value;
|
||||
use crate::errors::ShellError;
|
||||
use crate::evaluate::Scope;
|
||||
@ -11,13 +10,12 @@ use serde::{Deserialize, Serialize};
|
||||
use std::fmt;
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
use uuid::Uuid;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
pub struct UnevaluatedCallInfo {
|
||||
pub args: hir::Call,
|
||||
pub source: Text,
|
||||
pub source_map: SourceMap,
|
||||
pub name_tag: Tag,
|
||||
}
|
||||
|
||||
@ -37,7 +35,6 @@ impl UnevaluatedCallInfo {
|
||||
|
||||
Ok(CallInfo {
|
||||
args,
|
||||
source_map: self.source_map,
|
||||
name_tag: self.name_tag,
|
||||
})
|
||||
}
|
||||
@ -46,7 +43,6 @@ impl UnevaluatedCallInfo {
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
pub struct CallInfo {
|
||||
pub args: registry::EvaluatedArgs,
|
||||
pub source_map: SourceMap,
|
||||
pub name_tag: Tag,
|
||||
}
|
||||
|
||||
@ -62,7 +58,7 @@ impl CallInfo {
|
||||
args: T::deserialize(&mut deserializer)?,
|
||||
context: RunnablePerItemContext {
|
||||
shell_manager: shell_manager.clone(),
|
||||
name: self.name_tag,
|
||||
name: self.name_tag.clone(),
|
||||
},
|
||||
callback,
|
||||
})
|
||||
@ -73,6 +69,7 @@ impl CallInfo {
|
||||
#[get = "pub(crate)"]
|
||||
pub struct CommandArgs {
|
||||
pub host: Arc<Mutex<dyn Host>>,
|
||||
pub ctrl_c: Arc<AtomicBool>,
|
||||
pub shell_manager: ShellManager,
|
||||
pub call_info: UnevaluatedCallInfo,
|
||||
pub input: InputStream,
|
||||
@ -82,6 +79,7 @@ pub struct CommandArgs {
|
||||
#[get = "pub(crate)"]
|
||||
pub struct RawCommandArgs {
|
||||
pub host: Arc<Mutex<dyn Host>>,
|
||||
pub ctrl_c: Arc<AtomicBool>,
|
||||
pub shell_manager: ShellManager,
|
||||
pub call_info: UnevaluatedCallInfo,
|
||||
}
|
||||
@ -90,6 +88,7 @@ impl RawCommandArgs {
|
||||
pub fn with_input(self, input: Vec<Tagged<Value>>) -> CommandArgs {
|
||||
CommandArgs {
|
||||
host: self.host,
|
||||
ctrl_c: self.ctrl_c,
|
||||
shell_manager: self.shell_manager,
|
||||
call_info: self.call_info,
|
||||
input: input.into(),
|
||||
@ -109,12 +108,14 @@ impl CommandArgs {
|
||||
registry: ®istry::CommandRegistry,
|
||||
) -> Result<EvaluatedWholeStreamCommandArgs, ShellError> {
|
||||
let host = self.host.clone();
|
||||
let ctrl_c = self.ctrl_c.clone();
|
||||
let shell_manager = self.shell_manager.clone();
|
||||
let input = self.input;
|
||||
let call_info = self.call_info.evaluate(registry, &Scope::empty())?;
|
||||
|
||||
Ok(EvaluatedWholeStreamCommandArgs::new(
|
||||
host,
|
||||
ctrl_c,
|
||||
shell_manager,
|
||||
call_info,
|
||||
input,
|
||||
@ -127,12 +128,13 @@ impl CommandArgs {
|
||||
callback: fn(T, RunnableContext) -> Result<OutputStream, ShellError>,
|
||||
) -> Result<RunnableArgs<T>, ShellError> {
|
||||
let shell_manager = self.shell_manager.clone();
|
||||
let source_map = self.call_info.source_map.clone();
|
||||
let host = self.host.clone();
|
||||
let ctrl_c = self.ctrl_c.clone();
|
||||
let args = self.evaluate_once(registry)?;
|
||||
let call_info = args.call_info.clone();
|
||||
let (input, args) = args.split();
|
||||
let name_tag = args.call_info.name_tag;
|
||||
let mut deserializer = ConfigDeserializer::from_call_info(args.call_info);
|
||||
let mut deserializer = ConfigDeserializer::from_call_info(call_info);
|
||||
|
||||
Ok(RunnableArgs {
|
||||
args: T::deserialize(&mut deserializer)?,
|
||||
@ -141,8 +143,8 @@ impl CommandArgs {
|
||||
commands: registry.clone(),
|
||||
shell_manager,
|
||||
name: name_tag,
|
||||
source_map,
|
||||
host,
|
||||
ctrl_c,
|
||||
},
|
||||
callback,
|
||||
})
|
||||
@ -155,17 +157,20 @@ impl CommandArgs {
|
||||
) -> Result<RunnableRawArgs<T>, ShellError> {
|
||||
let raw_args = RawCommandArgs {
|
||||
host: self.host.clone(),
|
||||
ctrl_c: self.ctrl_c.clone(),
|
||||
shell_manager: self.shell_manager.clone(),
|
||||
call_info: self.call_info.clone(),
|
||||
};
|
||||
|
||||
let shell_manager = self.shell_manager.clone();
|
||||
let source_map = self.call_info.source_map.clone();
|
||||
let host = self.host.clone();
|
||||
let ctrl_c = self.ctrl_c.clone();
|
||||
let args = self.evaluate_once(registry)?;
|
||||
let call_info = args.call_info.clone();
|
||||
|
||||
let (input, args) = args.split();
|
||||
let name_tag = args.call_info.name_tag;
|
||||
let mut deserializer = ConfigDeserializer::from_call_info(args.call_info);
|
||||
let mut deserializer = ConfigDeserializer::from_call_info(call_info.clone());
|
||||
|
||||
Ok(RunnableRawArgs {
|
||||
args: T::deserialize(&mut deserializer)?,
|
||||
@ -174,8 +179,8 @@ impl CommandArgs {
|
||||
commands: registry.clone(),
|
||||
shell_manager,
|
||||
name: name_tag,
|
||||
source_map,
|
||||
host,
|
||||
ctrl_c,
|
||||
},
|
||||
raw_args,
|
||||
callback,
|
||||
@ -198,18 +203,12 @@ pub struct RunnableContext {
|
||||
pub input: InputStream,
|
||||
pub shell_manager: ShellManager,
|
||||
pub host: Arc<Mutex<dyn Host>>,
|
||||
pub ctrl_c: Arc<AtomicBool>,
|
||||
pub commands: CommandRegistry,
|
||||
pub source_map: SourceMap,
|
||||
pub name: Tag,
|
||||
}
|
||||
|
||||
impl RunnableContext {
|
||||
pub fn expect_command(&self, name: &str) -> Arc<Command> {
|
||||
self.commands
|
||||
.get_command(name)
|
||||
.expect(&format!("Expected command {}", name))
|
||||
}
|
||||
|
||||
pub fn get_command(&self, name: &str) -> Option<Arc<Command>> {
|
||||
self.commands.get_command(name)
|
||||
}
|
||||
@ -270,6 +269,7 @@ impl Deref for EvaluatedWholeStreamCommandArgs {
|
||||
impl EvaluatedWholeStreamCommandArgs {
|
||||
pub fn new(
|
||||
host: Arc<Mutex<dyn Host>>,
|
||||
ctrl_c: Arc<AtomicBool>,
|
||||
shell_manager: ShellManager,
|
||||
call_info: CallInfo,
|
||||
input: impl Into<InputStream>,
|
||||
@ -277,6 +277,7 @@ impl EvaluatedWholeStreamCommandArgs {
|
||||
EvaluatedWholeStreamCommandArgs {
|
||||
args: EvaluatedCommandArgs {
|
||||
host,
|
||||
ctrl_c,
|
||||
shell_manager,
|
||||
call_info,
|
||||
},
|
||||
@ -285,7 +286,7 @@ impl EvaluatedWholeStreamCommandArgs {
|
||||
}
|
||||
|
||||
pub fn name_tag(&self) -> Tag {
|
||||
self.args.call_info.name_tag
|
||||
self.args.call_info.name_tag.clone()
|
||||
}
|
||||
|
||||
pub fn parts(self) -> (InputStream, registry::EvaluatedArgs) {
|
||||
@ -317,12 +318,14 @@ impl Deref for EvaluatedFilterCommandArgs {
|
||||
impl EvaluatedFilterCommandArgs {
|
||||
pub fn new(
|
||||
host: Arc<Mutex<dyn Host>>,
|
||||
ctrl_c: Arc<AtomicBool>,
|
||||
shell_manager: ShellManager,
|
||||
call_info: CallInfo,
|
||||
) -> EvaluatedFilterCommandArgs {
|
||||
EvaluatedFilterCommandArgs {
|
||||
args: EvaluatedCommandArgs {
|
||||
host,
|
||||
ctrl_c,
|
||||
shell_manager,
|
||||
call_info,
|
||||
},
|
||||
@ -334,6 +337,7 @@ impl EvaluatedFilterCommandArgs {
|
||||
#[get = "pub(crate)"]
|
||||
pub struct EvaluatedCommandArgs {
|
||||
pub host: Arc<Mutex<dyn Host>>,
|
||||
pub ctrl_c: Arc<AtomicBool>,
|
||||
pub shell_manager: ShellManager,
|
||||
pub call_info: CallInfo,
|
||||
}
|
||||
@ -376,7 +380,6 @@ impl EvaluatedCommandArgs {
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub enum CommandAction {
|
||||
ChangePath(String),
|
||||
AddSpanSource(Uuid, SpanSource),
|
||||
Exit,
|
||||
EnterShell(String),
|
||||
EnterValueShell(Tagged<Value>),
|
||||
@ -390,9 +393,6 @@ impl ToDebug for CommandAction {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result {
|
||||
match self {
|
||||
CommandAction::ChangePath(s) => write!(f, "action:change-path={}", s),
|
||||
CommandAction::AddSpanSource(u, source) => {
|
||||
write!(f, "action:add-span-source={}@{:?}", u, source)
|
||||
}
|
||||
CommandAction::Exit => write!(f, "action:exit"),
|
||||
CommandAction::EnterShell(s) => write!(f, "action:enter-shell={}", s),
|
||||
CommandAction::EnterValueShell(t) => {
|
||||
@ -507,6 +507,15 @@ pub enum Command {
|
||||
PerItem(Arc<dyn PerItemCommand>),
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Command {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Command::WholeStream(command) => write!(f, "WholeStream({})", command.name()),
|
||||
Command::PerItem(command) => write!(f, "PerItem({})", command.name()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Command {
|
||||
pub fn name(&self) -> &str {
|
||||
match self {
|
||||
@ -555,6 +564,7 @@ impl Command {
|
||||
) -> OutputStream {
|
||||
let raw_args = RawCommandArgs {
|
||||
host: args.host,
|
||||
ctrl_c: args.ctrl_c,
|
||||
shell_manager: args.shell_manager,
|
||||
call_info: args.call_info,
|
||||
};
|
||||
@ -624,6 +634,7 @@ impl WholeStreamCommand for FnFilterCommand {
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let CommandArgs {
|
||||
host,
|
||||
ctrl_c,
|
||||
shell_manager,
|
||||
call_info,
|
||||
input,
|
||||
@ -641,8 +652,12 @@ impl WholeStreamCommand for FnFilterCommand {
|
||||
Ok(args) => args,
|
||||
};
|
||||
|
||||
let args =
|
||||
EvaluatedFilterCommandArgs::new(host.clone(), shell_manager.clone(), call_info);
|
||||
let args = EvaluatedFilterCommandArgs::new(
|
||||
host.clone(),
|
||||
ctrl_c.clone(),
|
||||
shell_manager.clone(),
|
||||
call_info,
|
||||
);
|
||||
|
||||
match func(args) {
|
||||
Err(err) => return OutputStream::from(vec![Err(err)]).values,
|
||||
|
@ -58,7 +58,7 @@ pub fn config(
|
||||
}: ConfigArgs,
|
||||
RunnableContext { name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let name_span = name;
|
||||
let name_span = name.clone();
|
||||
|
||||
let configuration = if let Some(supplied) = load {
|
||||
Some(supplied.item().clone())
|
||||
@ -72,7 +72,7 @@ pub fn config(
|
||||
let key = v.to_string();
|
||||
let value = result
|
||||
.get(&key)
|
||||
.ok_or_else(|| ShellError::string(&format!("Missing key {} in config", key)))?;
|
||||
.ok_or_else(|| ShellError::labeled_error("Missing key in config", "key", v.tag()))?;
|
||||
|
||||
let mut results = VecDeque::new();
|
||||
|
||||
@ -120,10 +120,11 @@ pub fn config(
|
||||
result.swap_remove(&key);
|
||||
config::write(&result, &configuration)?;
|
||||
} else {
|
||||
return Err(ShellError::string(&format!(
|
||||
"{} does not exist in config",
|
||||
key
|
||||
)));
|
||||
return Err(ShellError::labeled_error(
|
||||
"Key does not exist in config",
|
||||
"key",
|
||||
v.tag(),
|
||||
));
|
||||
}
|
||||
|
||||
let obj = VecDeque::from_iter(vec![Value::Row(result.into()).tagged(v.tag())]);
|
||||
|
46
src/commands/count.rs
Normal file
46
src/commands/count.rs
Normal file
@ -0,0 +1,46 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::data::Value;
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::CommandRegistry;
|
||||
use crate::prelude::*;
|
||||
use futures::stream::StreamExt;
|
||||
|
||||
pub struct Count;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct CountArgs {}
|
||||
|
||||
impl WholeStreamCommand for Count {
|
||||
fn name(&self) -> &str {
|
||||
"count"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("count")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Show the total number of rows."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, count)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn count(
|
||||
CountArgs {}: CountArgs,
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream! {
|
||||
let rows: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
|
||||
yield ReturnSuccess::value(Value::int(rows.len()).tagged(name))
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
@ -39,27 +39,27 @@ where
|
||||
{
|
||||
let mut indexmap = IndexMap::new();
|
||||
|
||||
indexmap.insert("year".to_string(), Value::int(dt.year()).tagged(tag));
|
||||
indexmap.insert("month".to_string(), Value::int(dt.month()).tagged(tag));
|
||||
indexmap.insert("day".to_string(), Value::int(dt.day()).tagged(tag));
|
||||
indexmap.insert("hour".to_string(), Value::int(dt.hour()).tagged(tag));
|
||||
indexmap.insert("minute".to_string(), Value::int(dt.minute()).tagged(tag));
|
||||
indexmap.insert("second".to_string(), Value::int(dt.second()).tagged(tag));
|
||||
indexmap.insert("year".to_string(), Value::int(dt.year()).tagged(&tag));
|
||||
indexmap.insert("month".to_string(), Value::int(dt.month()).tagged(&tag));
|
||||
indexmap.insert("day".to_string(), Value::int(dt.day()).tagged(&tag));
|
||||
indexmap.insert("hour".to_string(), Value::int(dt.hour()).tagged(&tag));
|
||||
indexmap.insert("minute".to_string(), Value::int(dt.minute()).tagged(&tag));
|
||||
indexmap.insert("second".to_string(), Value::int(dt.second()).tagged(&tag));
|
||||
|
||||
let tz = dt.offset();
|
||||
indexmap.insert(
|
||||
"timezone".to_string(),
|
||||
Value::string(format!("{}", tz)).tagged(tag),
|
||||
Value::string(format!("{}", tz)).tagged(&tag),
|
||||
);
|
||||
|
||||
Value::Row(Dictionary::from(indexmap)).tagged(tag)
|
||||
Value::Row(Dictionary::from(indexmap)).tagged(&tag)
|
||||
}
|
||||
|
||||
pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
|
||||
let mut date_out = VecDeque::new();
|
||||
let tag = args.call_info.name_tag;
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
|
||||
let value = if args.has("utc") {
|
||||
let utc: DateTime<Utc> = Utc::now();
|
||||
|
@ -16,7 +16,7 @@ impl PerItemCommand for Echo {
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Echo the argments back to the user."
|
||||
"Echo the arguments back to the user."
|
||||
}
|
||||
|
||||
fn run(
|
||||
@ -35,7 +35,7 @@ fn run(
|
||||
_registry: &CommandRegistry,
|
||||
_raw_args: &RawCommandArgs,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let name = call_info.name_tag;
|
||||
let name = call_info.name_tag.clone();
|
||||
|
||||
let mut output = String::new();
|
||||
|
||||
@ -54,11 +54,10 @@ fn run(
|
||||
output.push_str(&s);
|
||||
}
|
||||
_ => {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Expect a string from pipeline",
|
||||
"not a string-compatible value",
|
||||
i.tag(),
|
||||
));
|
||||
return Err(ShellError::type_error(
|
||||
"a string-compatible value",
|
||||
i.tagged_type_name(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -15,7 +15,7 @@ impl PerItemCommand for Enter {
|
||||
}
|
||||
|
||||
fn signature(&self) -> registry::Signature {
|
||||
Signature::build("enter").required("location", SyntaxShape::Block)
|
||||
Signature::build("enter").required("location", SyntaxShape::Path)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -33,14 +33,14 @@ impl PerItemCommand for Enter {
|
||||
let raw_args = raw_args.clone();
|
||||
match call_info.args.expect_nth(0)? {
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::String(location)),
|
||||
item: Value::Primitive(Primitive::Path(location)),
|
||||
..
|
||||
} => {
|
||||
let location = location.to_string();
|
||||
let location_clone = location.to_string();
|
||||
let location_string = location.display().to_string();
|
||||
let location_clone = location_string.clone();
|
||||
|
||||
if location.starts_with("help") {
|
||||
let spec = location.split(":").collect::<Vec<&str>>();
|
||||
let spec = location_string.split(":").collect::<Vec<&str>>();
|
||||
|
||||
let (_, command) = (spec[0], spec[1]);
|
||||
|
||||
@ -61,13 +61,13 @@ impl PerItemCommand for Enter {
|
||||
)))]
|
||||
.into())
|
||||
} else {
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
// If it's a file, attempt to open the file as a value and enter it
|
||||
let cwd = raw_args.shell_manager.path();
|
||||
|
||||
let full_path = std::path::PathBuf::from(cwd);
|
||||
|
||||
let (file_extension, contents, contents_tag, span_source) =
|
||||
let (file_extension, contents, contents_tag) =
|
||||
crate::commands::open::fetch(
|
||||
&full_path,
|
||||
&location_clone,
|
||||
@ -75,18 +75,9 @@ impl PerItemCommand for Enter {
|
||||
)
|
||||
.await.unwrap();
|
||||
|
||||
if contents_tag.origin != uuid::Uuid::nil() {
|
||||
// If we have loaded something, track its source
|
||||
yield ReturnSuccess::action(CommandAction::AddSpanSource(
|
||||
contents_tag.origin,
|
||||
span_source,
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
match contents {
|
||||
Value::Primitive(Primitive::String(_)) => {
|
||||
let tagged_contents = contents.tagged(contents_tag);
|
||||
let tagged_contents = contents.tagged(&contents_tag);
|
||||
|
||||
if let Some(extension) = file_extension {
|
||||
let command_name = format!("from-{}", extension);
|
||||
@ -95,6 +86,7 @@ impl PerItemCommand for Enter {
|
||||
{
|
||||
let new_args = RawCommandArgs {
|
||||
host: raw_args.host,
|
||||
ctrl_c: raw_args.ctrl_c,
|
||||
shell_manager: raw_args.shell_manager,
|
||||
call_info: UnevaluatedCallInfo {
|
||||
args: crate::parser::hir::Call {
|
||||
@ -103,7 +95,6 @@ impl PerItemCommand for Enter {
|
||||
named: None,
|
||||
},
|
||||
source: raw_args.call_info.source,
|
||||
source_map: raw_args.call_info.source_map,
|
||||
name_tag: raw_args.call_info.name_tag,
|
||||
},
|
||||
};
|
||||
@ -123,7 +114,7 @@ impl PerItemCommand for Enter {
|
||||
yield Ok(ReturnSuccess::Action(CommandAction::EnterValueShell(
|
||||
Tagged {
|
||||
item,
|
||||
tag: contents_tag,
|
||||
tag: contents_tag.clone(),
|
||||
})));
|
||||
}
|
||||
x => yield x,
|
||||
|
@ -37,22 +37,22 @@ pub fn get_environment(tag: Tag) -> Result<Tagged<Value>, Box<dyn std::error::Er
|
||||
let mut indexmap = IndexMap::new();
|
||||
|
||||
let path = std::env::current_dir()?;
|
||||
indexmap.insert("cwd".to_string(), Value::path(path).tagged(tag));
|
||||
indexmap.insert("cwd".to_string(), Value::path(path).tagged(&tag));
|
||||
|
||||
if let Some(home) = dirs::home_dir() {
|
||||
indexmap.insert("home".to_string(), Value::path(home).tagged(tag));
|
||||
indexmap.insert("home".to_string(), Value::path(home).tagged(&tag));
|
||||
}
|
||||
|
||||
let config = config::default_path()?;
|
||||
indexmap.insert("config".to_string(), Value::path(config).tagged(tag));
|
||||
indexmap.insert("config".to_string(), Value::path(config).tagged(&tag));
|
||||
|
||||
let history = History::path();
|
||||
indexmap.insert("history".to_string(), Value::path(history).tagged(tag));
|
||||
indexmap.insert("history".to_string(), Value::path(history).tagged(&tag));
|
||||
|
||||
let temp = std::env::temp_dir();
|
||||
indexmap.insert("temp".to_string(), Value::path(temp).tagged(tag));
|
||||
indexmap.insert("temp".to_string(), Value::path(temp).tagged(&tag));
|
||||
|
||||
let mut dict = TaggedDictBuilder::new(tag);
|
||||
let mut dict = TaggedDictBuilder::new(&tag);
|
||||
for v in std::env::vars() {
|
||||
dict.insert(v.0, Value::string(v.1));
|
||||
}
|
||||
@ -60,14 +60,14 @@ pub fn get_environment(tag: Tag) -> Result<Tagged<Value>, Box<dyn std::error::Er
|
||||
indexmap.insert("vars".to_string(), dict.into_tagged_value());
|
||||
}
|
||||
|
||||
Ok(Value::Row(Dictionary::from(indexmap)).tagged(tag))
|
||||
Ok(Value::Row(Dictionary::from(indexmap)).tagged(&tag))
|
||||
}
|
||||
|
||||
pub fn env(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
|
||||
let mut env_out = VecDeque::new();
|
||||
let tag = args.call_info.name_tag;
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
|
||||
let value = get_environment(tag)?;
|
||||
env_out.push_back(value);
|
||||
|
@ -1,5 +1,5 @@
|
||||
use crate::commands::UnevaluatedCallInfo;
|
||||
use crate::context::SpanSource;
|
||||
use crate::context::AnchorLocation;
|
||||
use crate::data::meta::Span;
|
||||
use crate::data::Value;
|
||||
use crate::errors::ShellError;
|
||||
@ -10,7 +10,6 @@ use mime::Mime;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use surf::mime;
|
||||
use uuid::Uuid;
|
||||
pub struct Fetch;
|
||||
|
||||
impl PerItemCommand for Fetch {
|
||||
@ -44,21 +43,23 @@ fn run(
|
||||
registry: &CommandRegistry,
|
||||
raw_args: &RawCommandArgs,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let path = match call_info
|
||||
.args
|
||||
.nth(0)
|
||||
.ok_or_else(|| ShellError::string(&format!("No file or directory specified")))?
|
||||
{
|
||||
let path = match call_info.args.nth(0).ok_or_else(|| {
|
||||
ShellError::labeled_error(
|
||||
"No file or directory specified",
|
||||
"for command",
|
||||
&call_info.name_tag,
|
||||
)
|
||||
})? {
|
||||
file => file,
|
||||
};
|
||||
let path_buf = path.as_path()?;
|
||||
let path_str = path_buf.display().to_string();
|
||||
let path_span = path.span();
|
||||
let path_span = path.tag.span;
|
||||
let has_raw = call_info.args.has("raw");
|
||||
let registry = registry.clone();
|
||||
let raw_args = raw_args.clone();
|
||||
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
|
||||
let result = fetch(&path_str, path_span).await;
|
||||
|
||||
@ -66,7 +67,7 @@ fn run(
|
||||
yield Err(e);
|
||||
return;
|
||||
}
|
||||
let (file_extension, contents, contents_tag, span_source) = result.unwrap();
|
||||
let (file_extension, contents, contents_tag) = result.unwrap();
|
||||
|
||||
let file_extension = if has_raw {
|
||||
None
|
||||
@ -76,21 +77,14 @@ fn run(
|
||||
file_extension.or(path_str.split('.').last().map(String::from))
|
||||
};
|
||||
|
||||
if contents_tag.origin != uuid::Uuid::nil() {
|
||||
// If we have loaded something, track its source
|
||||
yield ReturnSuccess::action(CommandAction::AddSpanSource(
|
||||
contents_tag.origin,
|
||||
span_source,
|
||||
));
|
||||
}
|
||||
|
||||
let tagged_contents = contents.tagged(contents_tag);
|
||||
let tagged_contents = contents.tagged(&contents_tag);
|
||||
|
||||
if let Some(extension) = file_extension {
|
||||
let command_name = format!("from-{}", extension);
|
||||
if let Some(converter) = registry.get_command(&command_name) {
|
||||
let new_args = RawCommandArgs {
|
||||
host: raw_args.host,
|
||||
ctrl_c: raw_args.ctrl_c,
|
||||
shell_manager: raw_args.shell_manager,
|
||||
call_info: UnevaluatedCallInfo {
|
||||
args: crate::parser::hir::Call {
|
||||
@ -99,7 +93,6 @@ fn run(
|
||||
named: None
|
||||
},
|
||||
source: raw_args.call_info.source,
|
||||
source_map: raw_args.call_info.source_map,
|
||||
name_tag: raw_args.call_info.name_tag,
|
||||
}
|
||||
};
|
||||
@ -113,7 +106,7 @@ fn run(
|
||||
}
|
||||
}
|
||||
Ok(ReturnSuccess::Value(Tagged { item, .. })) => {
|
||||
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag }));
|
||||
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() }));
|
||||
}
|
||||
x => yield x,
|
||||
}
|
||||
@ -129,10 +122,7 @@ fn run(
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
|
||||
pub async fn fetch(
|
||||
location: &str,
|
||||
span: Span,
|
||||
) -> Result<(Option<String>, Value, Tag, SpanSource), ShellError> {
|
||||
pub async fn fetch(location: &str, span: Span) -> Result<(Option<String>, Value, Tag), ShellError> {
|
||||
if let Err(_) = url::Url::parse(location) {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Incomplete or incorrect url",
|
||||
@ -158,9 +148,8 @@ pub async fn fetch(
|
||||
})?),
|
||||
Tag {
|
||||
span,
|
||||
origin: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
},
|
||||
SpanSource::Url(location.to_string()),
|
||||
)),
|
||||
(mime::APPLICATION, mime::JSON) => Ok((
|
||||
Some("json".to_string()),
|
||||
@ -173,9 +162,8 @@ pub async fn fetch(
|
||||
})?),
|
||||
Tag {
|
||||
span,
|
||||
origin: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
},
|
||||
SpanSource::Url(location.to_string()),
|
||||
)),
|
||||
(mime::APPLICATION, mime::OCTET_STREAM) => {
|
||||
let buf: Vec<u8> = r.body_bytes().await.map_err(|_| {
|
||||
@ -190,9 +178,8 @@ pub async fn fetch(
|
||||
Value::binary(buf),
|
||||
Tag {
|
||||
span,
|
||||
origin: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
},
|
||||
SpanSource::Url(location.to_string()),
|
||||
))
|
||||
}
|
||||
(mime::IMAGE, mime::SVG) => Ok((
|
||||
@ -206,9 +193,8 @@ pub async fn fetch(
|
||||
})?),
|
||||
Tag {
|
||||
span,
|
||||
origin: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
},
|
||||
SpanSource::Url(location.to_string()),
|
||||
)),
|
||||
(mime::IMAGE, image_ty) => {
|
||||
let buf: Vec<u8> = r.body_bytes().await.map_err(|_| {
|
||||
@ -223,9 +209,8 @@ pub async fn fetch(
|
||||
Value::binary(buf),
|
||||
Tag {
|
||||
span,
|
||||
origin: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
},
|
||||
SpanSource::Url(location.to_string()),
|
||||
))
|
||||
}
|
||||
(mime::TEXT, mime::HTML) => Ok((
|
||||
@ -239,9 +224,8 @@ pub async fn fetch(
|
||||
})?),
|
||||
Tag {
|
||||
span,
|
||||
origin: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
},
|
||||
SpanSource::Url(location.to_string()),
|
||||
)),
|
||||
(mime::TEXT, mime::PLAIN) => {
|
||||
let path_extension = url::Url::parse(location)
|
||||
@ -266,9 +250,8 @@ pub async fn fetch(
|
||||
})?),
|
||||
Tag {
|
||||
span,
|
||||
origin: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
},
|
||||
SpanSource::Url(location.to_string()),
|
||||
))
|
||||
}
|
||||
(ty, sub_ty) => Ok((
|
||||
@ -276,9 +259,8 @@ pub async fn fetch(
|
||||
Value::string(format!("Not yet supported MIME type: {} {}", ty, sub_ty)),
|
||||
Tag {
|
||||
span,
|
||||
origin: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
},
|
||||
SpanSource::Url(location.to_string()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
@ -287,9 +269,8 @@ pub async fn fetch(
|
||||
Value::string(format!("No content type found")),
|
||||
Tag {
|
||||
span,
|
||||
origin: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
},
|
||||
SpanSource::Url(location.to_string()),
|
||||
)),
|
||||
},
|
||||
Err(_) => {
|
||||
|
@ -7,7 +7,7 @@ pub struct First;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct FirstArgs {
|
||||
amount: Tagged<u64>,
|
||||
rows: Option<Tagged<u64>>,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for First {
|
||||
@ -16,7 +16,7 @@ impl WholeStreamCommand for First {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("first").required("amount", SyntaxShape::Literal)
|
||||
Signature::build("first").optional("rows", SyntaxShape::Int)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -33,8 +33,16 @@ impl WholeStreamCommand for First {
|
||||
}
|
||||
|
||||
fn first(
|
||||
FirstArgs { amount }: FirstArgs,
|
||||
FirstArgs { rows }: FirstArgs,
|
||||
context: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
Ok(OutputStream::from_input(context.input.values.take(*amount)))
|
||||
let rows_desired = if let Some(quantity) = rows {
|
||||
*quantity
|
||||
} else {
|
||||
1
|
||||
};
|
||||
|
||||
Ok(OutputStream::from_input(
|
||||
context.input.values.take(rows_desired),
|
||||
))
|
||||
}
|
||||
|
@ -33,7 +33,7 @@ fn bson_array(input: &Vec<Bson>, tag: Tag) -> Result<Vec<Tagged<Value>>, ShellEr
|
||||
let mut out = vec![];
|
||||
|
||||
for value in input {
|
||||
out.push(convert_bson_value_to_nu_value(value, tag)?);
|
||||
out.push(convert_bson_value_to_nu_value(value, &tag)?);
|
||||
}
|
||||
|
||||
Ok(out)
|
||||
@ -46,100 +46,100 @@ fn convert_bson_value_to_nu_value(
|
||||
let tag = tag.into();
|
||||
|
||||
Ok(match v {
|
||||
Bson::FloatingPoint(n) => Value::Primitive(Primitive::from(*n)).tagged(tag),
|
||||
Bson::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag),
|
||||
Bson::Array(a) => Value::Table(bson_array(a, tag)?).tagged(tag),
|
||||
Bson::FloatingPoint(n) => Value::Primitive(Primitive::from(*n)).tagged(&tag),
|
||||
Bson::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(&tag),
|
||||
Bson::Array(a) => Value::Table(bson_array(a, tag.clone())?).tagged(&tag),
|
||||
Bson::Document(doc) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
for (k, v) in doc.iter() {
|
||||
collected.insert_tagged(k.clone(), convert_bson_value_to_nu_value(v, tag)?);
|
||||
collected.insert_tagged(k.clone(), convert_bson_value_to_nu_value(v, &tag)?);
|
||||
}
|
||||
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(tag),
|
||||
Bson::Null => Value::Primitive(Primitive::Nothing).tagged(tag),
|
||||
Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(&tag),
|
||||
Bson::Null => Value::Primitive(Primitive::Nothing).tagged(&tag),
|
||||
Bson::RegExp(r, opts) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_tagged(
|
||||
"$regex".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(r))).tagged(tag),
|
||||
Value::Primitive(Primitive::String(String::from(r))).tagged(&tag),
|
||||
);
|
||||
collected.insert_tagged(
|
||||
"$options".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(opts))).tagged(tag),
|
||||
Value::Primitive(Primitive::String(String::from(opts))).tagged(&tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
Bson::I32(n) => Value::number(n).tagged(tag),
|
||||
Bson::I64(n) => Value::number(n).tagged(tag),
|
||||
Bson::I32(n) => Value::number(n).tagged(&tag),
|
||||
Bson::I64(n) => Value::number(n).tagged(&tag),
|
||||
Bson::Decimal128(n) => {
|
||||
// TODO: this really isn't great, and we should update this to do a higher
|
||||
// fidelity translation
|
||||
let decimal = BigDecimal::from_str(&format!("{}", n)).map_err(|_| {
|
||||
ShellError::range_error(
|
||||
ExpectedRange::BigDecimal,
|
||||
&n.tagged(tag),
|
||||
&n.tagged(&tag),
|
||||
format!("converting BSON Decimal128 to BigDecimal"),
|
||||
)
|
||||
})?;
|
||||
Value::Primitive(Primitive::Decimal(decimal)).tagged(tag)
|
||||
Value::Primitive(Primitive::Decimal(decimal)).tagged(&tag)
|
||||
}
|
||||
Bson::JavaScriptCode(js) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_tagged(
|
||||
"$javascript".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(js))).tagged(tag),
|
||||
Value::Primitive(Primitive::String(String::from(js))).tagged(&tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
Bson::JavaScriptCodeWithScope(js, doc) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_tagged(
|
||||
"$javascript".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(js))).tagged(tag),
|
||||
Value::Primitive(Primitive::String(String::from(js))).tagged(&tag),
|
||||
);
|
||||
collected.insert_tagged(
|
||||
"$scope".to_string(),
|
||||
convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag)?,
|
||||
convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag.clone())?,
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
Bson::TimeStamp(ts) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
collected.insert_tagged("$timestamp".to_string(), Value::number(ts).tagged(tag));
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_tagged("$timestamp".to_string(), Value::number(ts).tagged(&tag));
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
Bson::Binary(bst, bytes) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_tagged(
|
||||
"$binary_subtype".to_string(),
|
||||
match bst {
|
||||
BinarySubtype::UserDefined(u) => Value::number(u),
|
||||
_ => Value::Primitive(Primitive::String(binary_subtype_to_string(*bst))),
|
||||
}
|
||||
.tagged(tag),
|
||||
.tagged(&tag),
|
||||
);
|
||||
collected.insert_tagged(
|
||||
"$binary".to_string(),
|
||||
Value::Primitive(Primitive::Binary(bytes.to_owned())).tagged(tag),
|
||||
Value::Primitive(Primitive::Binary(bytes.to_owned())).tagged(&tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
Bson::ObjectId(obj_id) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_tagged(
|
||||
"$object_id".to_string(),
|
||||
Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(tag),
|
||||
Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(&tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(tag),
|
||||
Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(&tag),
|
||||
Bson::Symbol(s) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_tagged(
|
||||
"$symbol".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(s))).tagged(tag),
|
||||
Value::Primitive(Primitive::String(String::from(s))).tagged(&tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
@ -201,20 +201,20 @@ fn from_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
|
||||
let tag = args.name_tag();
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::Binary(vb)) =>
|
||||
match from_bson_bytes_to_value(vb, tag) {
|
||||
match from_bson_bytes_to_value(vb, tag.clone()) {
|
||||
Ok(x) => yield ReturnSuccess::value(x),
|
||||
Err(_) => {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as BSON",
|
||||
"input cannot be parsed as BSON",
|
||||
tag,
|
||||
tag.clone(),
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
))
|
||||
@ -223,7 +223,7 @@ fn from_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
tag,
|
||||
tag.clone(),
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
)),
|
||||
|
@ -62,12 +62,12 @@ pub fn from_csv_string_to_value(
|
||||
if let Some(row_values) = iter.next() {
|
||||
let row_values = row_values?;
|
||||
|
||||
let mut row = TaggedDictBuilder::new(tag);
|
||||
let mut row = TaggedDictBuilder::new(tag.clone());
|
||||
|
||||
for (idx, entry) in row_values.iter().enumerate() {
|
||||
row.insert_tagged(
|
||||
fields.get(idx).unwrap(),
|
||||
Value::Primitive(Primitive::String(String::from(entry))).tagged(tag),
|
||||
Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag),
|
||||
);
|
||||
}
|
||||
|
||||
@ -77,7 +77,7 @@ pub fn from_csv_string_to_value(
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Tagged::from_item(Value::Table(rows), tag))
|
||||
Ok(Value::Table(rows).tagged(&tag))
|
||||
}
|
||||
|
||||
fn from_csv(
|
||||
@ -88,7 +88,7 @@ fn from_csv(
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let name_tag = name;
|
||||
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
@ -96,7 +96,7 @@ fn from_csv(
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag);
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
@ -105,15 +105,15 @@ fn from_csv(
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
name_tag,
|
||||
name_tag.clone(),
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
value_tag.clone(),
|
||||
)),
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
match from_csv_string_to_value(concat_string, skip_headers, name_tag) {
|
||||
match from_csv_string_to_value(concat_string, skip_headers, name_tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
for l in list {
|
||||
@ -126,9 +126,9 @@ fn from_csv(
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as CSV",
|
||||
"input cannot be parsed as CSV",
|
||||
name_tag,
|
||||
name_tag.clone(),
|
||||
"value originates from here",
|
||||
last_tag,
|
||||
last_tag.clone(),
|
||||
))
|
||||
} ,
|
||||
}
|
||||
|
@ -45,10 +45,13 @@ fn convert_ini_top_to_nu_value(
|
||||
tag: impl Into<Tag>,
|
||||
) -> Tagged<Value> {
|
||||
let tag = tag.into();
|
||||
let mut top_level = TaggedDictBuilder::new(tag);
|
||||
let mut top_level = TaggedDictBuilder::new(tag.clone());
|
||||
|
||||
for (key, value) in v.iter() {
|
||||
top_level.insert_tagged(key.clone(), convert_ini_second_to_nu_value(value, tag));
|
||||
top_level.insert_tagged(
|
||||
key.clone(),
|
||||
convert_ini_second_to_nu_value(value, tag.clone()),
|
||||
);
|
||||
}
|
||||
|
||||
top_level.into_tagged_value()
|
||||
@ -67,7 +70,7 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
let tag = args.name_tag();
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
@ -75,7 +78,7 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag);
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
@ -84,15 +87,15 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
&value_tag,
|
||||
)),
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
match from_ini_string_to_value(concat_string, tag) {
|
||||
match from_ini_string_to_value(concat_string, tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
for l in list {
|
||||
@ -105,7 +108,7 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as INI",
|
||||
"input cannot be parsed as INI",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
last_tag,
|
||||
))
|
||||
|
@ -35,24 +35,24 @@ fn convert_json_value_to_nu_value(v: &serde_hjson::Value, tag: impl Into<Tag>) -
|
||||
let tag = tag.into();
|
||||
|
||||
match v {
|
||||
serde_hjson::Value::Null => Value::Primitive(Primitive::Nothing).tagged(tag),
|
||||
serde_hjson::Value::Bool(b) => Value::boolean(*b).tagged(tag),
|
||||
serde_hjson::Value::F64(n) => Value::number(n).tagged(tag),
|
||||
serde_hjson::Value::U64(n) => Value::number(n).tagged(tag),
|
||||
serde_hjson::Value::I64(n) => Value::number(n).tagged(tag),
|
||||
serde_hjson::Value::Null => Value::Primitive(Primitive::Nothing).tagged(&tag),
|
||||
serde_hjson::Value::Bool(b) => Value::boolean(*b).tagged(&tag),
|
||||
serde_hjson::Value::F64(n) => Value::number(n).tagged(&tag),
|
||||
serde_hjson::Value::U64(n) => Value::number(n).tagged(&tag),
|
||||
serde_hjson::Value::I64(n) => Value::number(n).tagged(&tag),
|
||||
serde_hjson::Value::String(s) => {
|
||||
Value::Primitive(Primitive::String(String::from(s))).tagged(tag)
|
||||
Value::Primitive(Primitive::String(String::from(s))).tagged(&tag)
|
||||
}
|
||||
serde_hjson::Value::Array(a) => Value::Table(
|
||||
a.iter()
|
||||
.map(|x| convert_json_value_to_nu_value(x, tag))
|
||||
.map(|x| convert_json_value_to_nu_value(x, &tag))
|
||||
.collect(),
|
||||
)
|
||||
.tagged(tag),
|
||||
serde_hjson::Value::Object(o) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
let mut collected = TaggedDictBuilder::new(&tag);
|
||||
for (k, v) in o.iter() {
|
||||
collected.insert_tagged(k.clone(), convert_json_value_to_nu_value(v, tag));
|
||||
collected.insert_tagged(k.clone(), convert_json_value_to_nu_value(v, &tag));
|
||||
}
|
||||
|
||||
collected.into_tagged_value()
|
||||
@ -74,7 +74,7 @@ fn from_json(
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let name_tag = name;
|
||||
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
@ -82,7 +82,7 @@ fn from_json(
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag);
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
@ -91,9 +91,9 @@ fn from_json(
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
name_tag,
|
||||
&name_tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
&value_tag,
|
||||
)),
|
||||
|
||||
}
|
||||
@ -106,15 +106,15 @@ fn from_json(
|
||||
continue;
|
||||
}
|
||||
|
||||
match from_json_string_to_value(json_str.to_string(), name_tag) {
|
||||
match from_json_string_to_value(json_str.to_string(), &name_tag) {
|
||||
Ok(x) =>
|
||||
yield ReturnSuccess::value(x),
|
||||
Err(_) => {
|
||||
if let Some(last_tag) = latest_tag {
|
||||
if let Some(ref last_tag) = latest_tag {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could nnot parse as JSON",
|
||||
"input cannot be parsed as JSON",
|
||||
name_tag,
|
||||
&name_tag,
|
||||
"value originates from here",
|
||||
last_tag))
|
||||
}
|
||||
@ -122,7 +122,7 @@ fn from_json(
|
||||
}
|
||||
}
|
||||
} else {
|
||||
match from_json_string_to_value(concat_string, name_tag) {
|
||||
match from_json_string_to_value(concat_string, name_tag.clone()) {
|
||||
Ok(x) =>
|
||||
match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
|
@ -131,14 +131,14 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
|
||||
let tag = args.name_tag();
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::Binary(vb)) =>
|
||||
match from_sqlite_bytes_to_value(vb, tag) {
|
||||
match from_sqlite_bytes_to_value(vb, tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
for l in list {
|
||||
@ -151,7 +151,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as SQLite",
|
||||
"input cannot be parsed as SQLite",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
))
|
||||
@ -160,7 +160,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
)),
|
||||
|
250
src/commands/from_ssv.rs
Normal file
250
src/commands/from_ssv.rs
Normal file
@ -0,0 +1,250 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::data::{Primitive, TaggedDictBuilder, Value};
|
||||
use crate::prelude::*;
|
||||
|
||||
pub struct FromSSV;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct FromSSVArgs {
|
||||
headerless: bool,
|
||||
#[serde(rename(deserialize = "minimum-spaces"))]
|
||||
minimum_spaces: Option<Tagged<usize>>,
|
||||
}
|
||||
|
||||
const STRING_REPRESENTATION: &str = "from-ssv";
|
||||
const DEFAULT_MINIMUM_SPACES: usize = 2;
|
||||
|
||||
impl WholeStreamCommand for FromSSV {
|
||||
fn name(&self) -> &str {
|
||||
STRING_REPRESENTATION
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(STRING_REPRESENTATION)
|
||||
.switch("headerless")
|
||||
.named("minimum-spaces", SyntaxShape::Int)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Parse text as space-separated values and create a table. The default minimum number of spaces counted as a separator is 2."
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, from_ssv)?.run()
|
||||
}
|
||||
}
|
||||
|
||||
fn string_to_table(
|
||||
s: &str,
|
||||
headerless: bool,
|
||||
split_at: usize,
|
||||
) -> Option<Vec<Vec<(String, String)>>> {
|
||||
let mut lines = s.lines().filter(|l| !l.trim().is_empty());
|
||||
let separator = " ".repeat(std::cmp::max(split_at, 1));
|
||||
|
||||
let headers = lines
|
||||
.next()?
|
||||
.split(&separator)
|
||||
.map(|s| s.trim())
|
||||
.filter(|s| !s.is_empty())
|
||||
.map(|s| s.to_owned())
|
||||
.collect::<Vec<String>>();
|
||||
|
||||
let header_row = if headerless {
|
||||
(1..=headers.len())
|
||||
.map(|i| format!("Column{}", i))
|
||||
.collect::<Vec<String>>()
|
||||
} else {
|
||||
headers
|
||||
};
|
||||
|
||||
Some(
|
||||
lines
|
||||
.map(|l| {
|
||||
header_row
|
||||
.iter()
|
||||
.zip(
|
||||
l.split(&separator)
|
||||
.map(|s| s.trim())
|
||||
.filter(|s| !s.is_empty()),
|
||||
)
|
||||
.map(|(a, b)| (String::from(a), String::from(b)))
|
||||
.collect()
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
fn from_ssv_string_to_value(
|
||||
s: &str,
|
||||
headerless: bool,
|
||||
split_at: usize,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Option<Tagged<Value>> {
|
||||
let tag = tag.into();
|
||||
let rows = string_to_table(s, headerless, split_at)?
|
||||
.iter()
|
||||
.map(|row| {
|
||||
let mut tagged_dict = TaggedDictBuilder::new(&tag);
|
||||
for (col, entry) in row {
|
||||
tagged_dict.insert_tagged(
|
||||
col,
|
||||
Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag),
|
||||
)
|
||||
}
|
||||
tagged_dict.into_tagged_value()
|
||||
})
|
||||
.collect();
|
||||
|
||||
Some(Value::Table(rows).tagged(&tag))
|
||||
}
|
||||
|
||||
fn from_ssv(
|
||||
FromSSVArgs {
|
||||
headerless,
|
||||
minimum_spaces,
|
||||
}: FromSSVArgs,
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
let mut concat_string = String::new();
|
||||
let mut latest_tag: Option<Tag> = None;
|
||||
let split_at = match minimum_spaces {
|
||||
Some(number) => number.item,
|
||||
None => DEFAULT_MINIMUM_SPACES
|
||||
};
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
}
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary (
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
&name,
|
||||
"value originates from here",
|
||||
&value_tag
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
match from_ssv_string_to_value(&concat_string, headerless, split_at, name.clone()) {
|
||||
Some(x) => match x {
|
||||
Tagged { item: Value::Table(list), ..} => {
|
||||
for l in list { yield ReturnSuccess::value(l) }
|
||||
}
|
||||
x => yield ReturnSuccess::value(x)
|
||||
},
|
||||
None => if let Some(tag) = latest_tag {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as SSV",
|
||||
"input cannot be parsed ssv",
|
||||
&name,
|
||||
"value originates from here",
|
||||
&tag,
|
||||
))
|
||||
},
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
fn owned(x: &str, y: &str) -> (String, String) {
|
||||
(String::from(x), String::from(y))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_trims_empty_and_whitespace_only_lines() {
|
||||
let input = r#"
|
||||
|
||||
a b
|
||||
|
||||
1 2
|
||||
|
||||
3 4
|
||||
"#;
|
||||
let result = string_to_table(input, false, 1);
|
||||
assert_eq!(
|
||||
result,
|
||||
Some(vec![
|
||||
vec![owned("a", "1"), owned("b", "2")],
|
||||
vec![owned("a", "3"), owned("b", "4")]
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_ignores_headers_when_headerless() {
|
||||
let input = r#"
|
||||
a b
|
||||
1 2
|
||||
3 4
|
||||
"#;
|
||||
let result = string_to_table(input, true, 1);
|
||||
assert_eq!(
|
||||
result,
|
||||
Some(vec![
|
||||
vec![owned("Column1", "1"), owned("Column2", "2")],
|
||||
vec![owned("Column1", "3"), owned("Column2", "4")]
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_returns_none_given_an_empty_string() {
|
||||
let input = "";
|
||||
let result = string_to_table(input, true, 1);
|
||||
assert_eq!(result, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_allows_a_predefined_number_of_spaces() {
|
||||
let input = r#"
|
||||
column a column b
|
||||
entry 1 entry number 2
|
||||
3 four
|
||||
"#;
|
||||
|
||||
let result = string_to_table(input, false, 3);
|
||||
assert_eq!(
|
||||
result,
|
||||
Some(vec![
|
||||
vec![
|
||||
owned("column a", "entry 1"),
|
||||
owned("column b", "entry number 2")
|
||||
],
|
||||
vec![owned("column a", "3"), owned("column b", "four")]
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn it_trims_remaining_separator_space() {
|
||||
let input = r#"
|
||||
colA colB colC
|
||||
val1 val2 val3
|
||||
"#;
|
||||
|
||||
let trimmed = |s: &str| s.trim() == s;
|
||||
|
||||
let result = string_to_table(input, false, 2).unwrap();
|
||||
assert_eq!(
|
||||
true,
|
||||
result
|
||||
.iter()
|
||||
.all(|row| row.iter().all(|(a, b)| trimmed(a) && trimmed(b)))
|
||||
)
|
||||
}
|
||||
}
|
@ -36,7 +36,7 @@ pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into<Tag>) -> T
|
||||
toml::Value::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag),
|
||||
toml::Value::Array(a) => Value::Table(
|
||||
a.iter()
|
||||
.map(|x| convert_toml_value_to_nu_value(x, tag))
|
||||
.map(|x| convert_toml_value_to_nu_value(x, &tag))
|
||||
.collect(),
|
||||
)
|
||||
.tagged(tag),
|
||||
@ -44,10 +44,10 @@ pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into<Tag>) -> T
|
||||
Value::Primitive(Primitive::String(dt.to_string())).tagged(tag)
|
||||
}
|
||||
toml::Value::Table(t) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
let mut collected = TaggedDictBuilder::new(&tag);
|
||||
|
||||
for (k, v) in t.iter() {
|
||||
collected.insert_tagged(k.clone(), convert_toml_value_to_nu_value(v, tag));
|
||||
collected.insert_tagged(k.clone(), convert_toml_value_to_nu_value(v, &tag));
|
||||
}
|
||||
|
||||
collected.into_tagged_value()
|
||||
@ -71,7 +71,7 @@ pub fn from_toml(
|
||||
let tag = args.name_tag();
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
@ -79,7 +79,7 @@ pub fn from_toml(
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag);
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
@ -88,15 +88,15 @@ pub fn from_toml(
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
&value_tag,
|
||||
)),
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
match from_toml_string_to_value(concat_string, tag) {
|
||||
match from_toml_string_to_value(concat_string, tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
for l in list {
|
||||
@ -109,7 +109,7 @@ pub fn from_toml(
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as TOML",
|
||||
"input cannot be parsed as TOML",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
last_tag,
|
||||
))
|
||||
|
@ -63,12 +63,12 @@ pub fn from_tsv_string_to_value(
|
||||
if let Some(row_values) = iter.next() {
|
||||
let row_values = row_values?;
|
||||
|
||||
let mut row = TaggedDictBuilder::new(tag);
|
||||
let mut row = TaggedDictBuilder::new(&tag);
|
||||
|
||||
for (idx, entry) in row_values.iter().enumerate() {
|
||||
row.insert_tagged(
|
||||
fields.get(idx).unwrap(),
|
||||
Value::Primitive(Primitive::String(String::from(entry))).tagged(tag),
|
||||
Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag),
|
||||
);
|
||||
}
|
||||
|
||||
@ -78,7 +78,7 @@ pub fn from_tsv_string_to_value(
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Tagged::from_item(Value::Table(rows), tag))
|
||||
Ok(Value::Table(rows).tagged(&tag))
|
||||
}
|
||||
|
||||
fn from_tsv(
|
||||
@ -89,7 +89,7 @@ fn from_tsv(
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let name_tag = name;
|
||||
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
@ -97,7 +97,7 @@ fn from_tsv(
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag);
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
@ -106,15 +106,15 @@ fn from_tsv(
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
name_tag,
|
||||
&name_tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
&value_tag,
|
||||
)),
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
match from_tsv_string_to_value(concat_string, skip_headers, name_tag) {
|
||||
match from_tsv_string_to_value(concat_string, skip_headers, name_tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
for l in list {
|
||||
@ -127,9 +127,9 @@ fn from_tsv(
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as TSV",
|
||||
"input cannot be parsed as TSV",
|
||||
name_tag,
|
||||
&name_tag,
|
||||
"value originates from here",
|
||||
last_tag,
|
||||
&last_tag,
|
||||
))
|
||||
} ,
|
||||
}
|
||||
|
@ -31,7 +31,7 @@ fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
let tag = args.name_tag();
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
@ -39,7 +39,7 @@ fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag);
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
@ -47,9 +47,9 @@ fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
&value_tag,
|
||||
)),
|
||||
|
||||
}
|
||||
|
@ -34,7 +34,7 @@ fn from_node_to_value<'a, 'd>(n: &roxmltree::Node<'a, 'd>, tag: impl Into<Tag>)
|
||||
|
||||
let mut children_values = vec![];
|
||||
for c in n.children() {
|
||||
children_values.push(from_node_to_value(&c, tag));
|
||||
children_values.push(from_node_to_value(&c, &tag));
|
||||
}
|
||||
|
||||
let children_values: Vec<Tagged<Value>> = children_values
|
||||
@ -86,7 +86,7 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
let tag = args.name_tag();
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
@ -94,7 +94,7 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag);
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
@ -103,15 +103,15 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
&value_tag,
|
||||
)),
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
match from_xml_string_to_value(concat_string, tag) {
|
||||
match from_xml_string_to_value(concat_string, tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
for l in list {
|
||||
@ -124,9 +124,9 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as XML",
|
||||
"input cannot be parsed as XML",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
last_tag,
|
||||
&last_tag,
|
||||
))
|
||||
} ,
|
||||
}
|
||||
|
@ -64,17 +64,17 @@ fn convert_yaml_value_to_nu_value(v: &serde_yaml::Value, tag: impl Into<Tag>) ->
|
||||
serde_yaml::Value::String(s) => Value::string(s).tagged(tag),
|
||||
serde_yaml::Value::Sequence(a) => Value::Table(
|
||||
a.iter()
|
||||
.map(|x| convert_yaml_value_to_nu_value(x, tag))
|
||||
.map(|x| convert_yaml_value_to_nu_value(x, &tag))
|
||||
.collect(),
|
||||
)
|
||||
.tagged(tag),
|
||||
serde_yaml::Value::Mapping(t) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
let mut collected = TaggedDictBuilder::new(&tag);
|
||||
|
||||
for (k, v) in t.iter() {
|
||||
match k {
|
||||
serde_yaml::Value::String(k) => {
|
||||
collected.insert_tagged(k.clone(), convert_yaml_value_to_nu_value(v, tag));
|
||||
collected.insert_tagged(k.clone(), convert_yaml_value_to_nu_value(v, &tag));
|
||||
}
|
||||
_ => unimplemented!("Unknown key type"),
|
||||
}
|
||||
@ -100,7 +100,7 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
|
||||
let tag = args.name_tag();
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
@ -108,7 +108,7 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag);
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
@ -117,15 +117,15 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
&value_tag,
|
||||
)),
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
match from_yaml_string_to_value(concat_string, tag) {
|
||||
match from_yaml_string_to_value(concat_string, tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
for l in list {
|
||||
@ -138,9 +138,9 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as YAML",
|
||||
"input cannot be parsed as YAML",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
last_tag,
|
||||
&last_tag,
|
||||
))
|
||||
} ,
|
||||
}
|
||||
|
@ -1,14 +1,16 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::data::meta::tag_for_tagged_list;
|
||||
use crate::data::Value;
|
||||
use crate::errors::ShellError;
|
||||
use crate::prelude::*;
|
||||
use log::trace;
|
||||
|
||||
pub struct Get;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct GetArgs {
|
||||
member: Tagged<String>,
|
||||
rest: Vec<Tagged<String>>,
|
||||
member: ColumnPath,
|
||||
rest: Vec<ColumnPath>,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for Get {
|
||||
@ -18,8 +20,8 @@ impl WholeStreamCommand for Get {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("get")
|
||||
.required("member", SyntaxShape::Member)
|
||||
.rest(SyntaxShape::Member)
|
||||
.required("member", SyntaxShape::ColumnPath)
|
||||
.rest(SyntaxShape::ColumnPath)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -35,35 +37,41 @@ impl WholeStreamCommand for Get {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_member(path: &Tagged<String>, obj: &Tagged<Value>) -> Result<Tagged<Value>, ShellError> {
|
||||
pub type ColumnPath = Vec<Tagged<String>>;
|
||||
|
||||
pub fn get_column_path(
|
||||
path: &ColumnPath,
|
||||
obj: &Tagged<Value>,
|
||||
) -> Result<Tagged<Value>, ShellError> {
|
||||
let mut current = Some(obj);
|
||||
for p in path.split(".") {
|
||||
for p in path.iter() {
|
||||
if let Some(obj) = current {
|
||||
current = match obj.get_data_by_key(p) {
|
||||
current = match obj.get_data_by_key(&p) {
|
||||
Some(v) => Some(v),
|
||||
None =>
|
||||
// Before we give up, see if they gave us a path that matches a field name by itself
|
||||
{
|
||||
match obj.get_data_by_key(&path.item) {
|
||||
Some(v) => return Ok(v.clone()),
|
||||
None => {
|
||||
let possibilities = obj.data_descriptors();
|
||||
let possibilities = obj.data_descriptors();
|
||||
|
||||
let mut possible_matches: Vec<_> = possibilities
|
||||
.iter()
|
||||
.map(|x| {
|
||||
(natural::distance::levenshtein_distance(x, &path.item), x)
|
||||
})
|
||||
.collect();
|
||||
let mut possible_matches: Vec<_> = possibilities
|
||||
.iter()
|
||||
.map(|x| (natural::distance::levenshtein_distance(x, &p), x))
|
||||
.collect();
|
||||
|
||||
possible_matches.sort();
|
||||
possible_matches.sort();
|
||||
|
||||
return Err(ShellError::labeled_error(
|
||||
"Unknown column",
|
||||
format!("did you mean '{}'?", possible_matches[0].1),
|
||||
path.tag(),
|
||||
));
|
||||
}
|
||||
if possible_matches.len() > 0 {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Unknown column",
|
||||
format!("did you mean '{}'?", possible_matches[0].1),
|
||||
tag_for_tagged_list(path.iter().map(|p| p.tag())),
|
||||
));
|
||||
} else {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Unknown column",
|
||||
"row does not contain this column",
|
||||
tag_for_tagged_list(path.iter().map(|p| p.tag())),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -72,7 +80,18 @@ fn get_member(path: &Tagged<String>, obj: &Tagged<Value>) -> Result<Tagged<Value
|
||||
|
||||
match current {
|
||||
Some(v) => Ok(v.clone()),
|
||||
None => Ok(Value::nothing().tagged(obj.tag)),
|
||||
None => match obj {
|
||||
// If its None check for certain values.
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::String(_)),
|
||||
..
|
||||
} => Ok(obj.clone()),
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::Path(_)),
|
||||
..
|
||||
} => Ok(obj.clone()),
|
||||
_ => Ok(Value::nothing().tagged(&obj.tag)),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@ -83,6 +102,8 @@ pub fn get(
|
||||
}: GetArgs,
|
||||
RunnableContext { input, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
trace!("get {:?} {:?}", member, fields);
|
||||
|
||||
let stream = input
|
||||
.values
|
||||
.map(move |item| {
|
||||
@ -93,10 +114,10 @@ pub fn get(
|
||||
let fields = vec![&member, &fields]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<&Tagged<String>>>();
|
||||
.collect::<Vec<&ColumnPath>>();
|
||||
|
||||
for field in &fields {
|
||||
match get_member(field, &item) {
|
||||
for column_path in &fields {
|
||||
match get_column_path(column_path, &item) {
|
||||
Ok(Tagged {
|
||||
item: Value::Table(l),
|
||||
..
|
||||
|
@ -26,7 +26,7 @@ impl PerItemCommand for Help {
|
||||
_raw_args: &RawCommandArgs,
|
||||
_input: Tagged<Value>,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let tag = call_info.name_tag;
|
||||
let tag = &call_info.name_tag;
|
||||
|
||||
match call_info.args.nth(0) {
|
||||
Some(Tagged {
|
||||
@ -116,7 +116,7 @@ Here are some tips to help you get started.
|
||||
* help commands - list all available commands
|
||||
* help <command name> - display help about a particular command
|
||||
|
||||
You can also learn more at http://book.nushell.sh"#;
|
||||
You can also learn more at https://book.nushell.sh"#;
|
||||
|
||||
let mut output_stream = VecDeque::new();
|
||||
|
||||
|
@ -7,7 +7,7 @@ pub struct Last;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct LastArgs {
|
||||
amount: Tagged<u64>,
|
||||
rows: Option<Tagged<u64>>,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for Last {
|
||||
@ -16,7 +16,7 @@ impl WholeStreamCommand for Last {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("last").required("amount", SyntaxShape::Number)
|
||||
Signature::build("last").optional("rows", SyntaxShape::Number)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -32,16 +32,23 @@ impl WholeStreamCommand for Last {
|
||||
}
|
||||
}
|
||||
|
||||
fn last(
|
||||
LastArgs { amount }: LastArgs,
|
||||
context: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream_block! {
|
||||
fn last(LastArgs { rows }: LastArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream! {
|
||||
let v: Vec<_> = context.input.into_vec().await;
|
||||
let k = v.len() - (*amount as usize);
|
||||
for x in v[k..].iter() {
|
||||
let y: Tagged<Value> = x.clone();
|
||||
yield ReturnSuccess::value(y)
|
||||
|
||||
let rows_desired = if let Some(quantity) = rows {
|
||||
*quantity
|
||||
} else {
|
||||
1
|
||||
};
|
||||
|
||||
let count = (rows_desired as usize);
|
||||
if count < v.len() {
|
||||
let k = v.len() - count;
|
||||
for x in v[k..].iter() {
|
||||
let y: Tagged<Value> = x.clone();
|
||||
yield ReturnSuccess::value(y)
|
||||
}
|
||||
}
|
||||
};
|
||||
Ok(stream.to_output_stream())
|
||||
|
@ -58,7 +58,7 @@ fn lines(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
|
||||
result.push_back(Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
v.tag(),
|
||||
)));
|
||||
|
@ -34,5 +34,5 @@ impl WholeStreamCommand for LS {
|
||||
}
|
||||
|
||||
fn ls(LsArgs { path }: LsArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
|
||||
context.shell_manager.ls(path, context.name)
|
||||
context.shell_manager.ls(path, &context)
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use crate::commands::UnevaluatedCallInfo;
|
||||
use crate::context::SpanSource;
|
||||
use crate::context::AnchorLocation;
|
||||
use crate::data::meta::Span;
|
||||
use crate::data::Value;
|
||||
use crate::errors::ShellError;
|
||||
@ -7,7 +7,6 @@ use crate::parser::hir::SyntaxShape;
|
||||
use crate::parser::registry::Signature;
|
||||
use crate::prelude::*;
|
||||
use std::path::{Path, PathBuf};
|
||||
use uuid::Uuid;
|
||||
pub struct Open;
|
||||
|
||||
impl PerItemCommand for Open {
|
||||
@ -45,21 +44,23 @@ fn run(
|
||||
let cwd = PathBuf::from(shell_manager.path());
|
||||
let full_path = PathBuf::from(cwd);
|
||||
|
||||
let path = match call_info
|
||||
.args
|
||||
.nth(0)
|
||||
.ok_or_else(|| ShellError::string(&format!("No file or directory specified")))?
|
||||
{
|
||||
let path = match call_info.args.nth(0).ok_or_else(|| {
|
||||
ShellError::labeled_error(
|
||||
"No file or directory specified",
|
||||
"for command",
|
||||
&call_info.name_tag,
|
||||
)
|
||||
})? {
|
||||
file => file,
|
||||
};
|
||||
let path_buf = path.as_path()?;
|
||||
let path_str = path_buf.display().to_string();
|
||||
let path_span = path.span();
|
||||
let path_span = path.tag.span;
|
||||
let has_raw = call_info.args.has("raw");
|
||||
let registry = registry.clone();
|
||||
let raw_args = raw_args.clone();
|
||||
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
|
||||
let result = fetch(&full_path, &path_str, path_span).await;
|
||||
|
||||
@ -67,7 +68,7 @@ fn run(
|
||||
yield Err(e);
|
||||
return;
|
||||
}
|
||||
let (file_extension, contents, contents_tag, span_source) = result.unwrap();
|
||||
let (file_extension, contents, contents_tag) = result.unwrap();
|
||||
|
||||
let file_extension = if has_raw {
|
||||
None
|
||||
@ -77,21 +78,14 @@ fn run(
|
||||
file_extension.or(path_str.split('.').last().map(String::from))
|
||||
};
|
||||
|
||||
if contents_tag.origin != uuid::Uuid::nil() {
|
||||
// If we have loaded something, track its source
|
||||
yield ReturnSuccess::action(CommandAction::AddSpanSource(
|
||||
contents_tag.origin,
|
||||
span_source,
|
||||
));
|
||||
}
|
||||
|
||||
let tagged_contents = contents.tagged(contents_tag);
|
||||
let tagged_contents = contents.tagged(&contents_tag);
|
||||
|
||||
if let Some(extension) = file_extension {
|
||||
let command_name = format!("from-{}", extension);
|
||||
if let Some(converter) = registry.get_command(&command_name) {
|
||||
let new_args = RawCommandArgs {
|
||||
host: raw_args.host,
|
||||
ctrl_c: raw_args.ctrl_c,
|
||||
shell_manager: raw_args.shell_manager,
|
||||
call_info: UnevaluatedCallInfo {
|
||||
args: crate::parser::hir::Call {
|
||||
@ -100,7 +94,6 @@ fn run(
|
||||
named: None
|
||||
},
|
||||
source: raw_args.call_info.source,
|
||||
source_map: raw_args.call_info.source_map,
|
||||
name_tag: raw_args.call_info.name_tag,
|
||||
}
|
||||
};
|
||||
@ -114,7 +107,7 @@ fn run(
|
||||
}
|
||||
}
|
||||
Ok(ReturnSuccess::Value(Tagged { item, .. })) => {
|
||||
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag }));
|
||||
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() }));
|
||||
}
|
||||
x => yield x,
|
||||
}
|
||||
@ -134,7 +127,7 @@ pub async fn fetch(
|
||||
cwd: &PathBuf,
|
||||
location: &str,
|
||||
span: Span,
|
||||
) -> Result<(Option<String>, Value, Tag, SpanSource), ShellError> {
|
||||
) -> Result<(Option<String>, Value, Tag), ShellError> {
|
||||
let mut cwd = cwd.clone();
|
||||
|
||||
cwd.push(Path::new(location));
|
||||
@ -147,9 +140,8 @@ pub async fn fetch(
|
||||
Value::string(s),
|
||||
Tag {
|
||||
span,
|
||||
origin: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::File(cwd.to_string_lossy().to_string())),
|
||||
},
|
||||
SpanSource::File(cwd.to_string_lossy().to_string()),
|
||||
)),
|
||||
Err(_) => {
|
||||
//Non utf8 data.
|
||||
@ -166,18 +158,20 @@ pub async fn fetch(
|
||||
Value::string(s),
|
||||
Tag {
|
||||
span,
|
||||
origin: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::File(
|
||||
cwd.to_string_lossy().to_string(),
|
||||
)),
|
||||
},
|
||||
SpanSource::File(cwd.to_string_lossy().to_string()),
|
||||
)),
|
||||
Err(_) => Ok((
|
||||
None,
|
||||
Value::binary(bytes),
|
||||
Tag {
|
||||
span,
|
||||
origin: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::File(
|
||||
cwd.to_string_lossy().to_string(),
|
||||
)),
|
||||
},
|
||||
SpanSource::File(cwd.to_string_lossy().to_string()),
|
||||
)),
|
||||
}
|
||||
} else {
|
||||
@ -186,9 +180,10 @@ pub async fn fetch(
|
||||
Value::binary(bytes),
|
||||
Tag {
|
||||
span,
|
||||
origin: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::File(
|
||||
cwd.to_string_lossy().to_string(),
|
||||
)),
|
||||
},
|
||||
SpanSource::File(cwd.to_string_lossy().to_string()),
|
||||
))
|
||||
}
|
||||
}
|
||||
@ -204,18 +199,20 @@ pub async fn fetch(
|
||||
Value::string(s),
|
||||
Tag {
|
||||
span,
|
||||
origin: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::File(
|
||||
cwd.to_string_lossy().to_string(),
|
||||
)),
|
||||
},
|
||||
SpanSource::File(cwd.to_string_lossy().to_string()),
|
||||
)),
|
||||
Err(_) => Ok((
|
||||
None,
|
||||
Value::binary(bytes),
|
||||
Tag {
|
||||
span,
|
||||
origin: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::File(
|
||||
cwd.to_string_lossy().to_string(),
|
||||
)),
|
||||
},
|
||||
SpanSource::File(cwd.to_string_lossy().to_string()),
|
||||
)),
|
||||
}
|
||||
} else {
|
||||
@ -224,9 +221,10 @@ pub async fn fetch(
|
||||
Value::binary(bytes),
|
||||
Tag {
|
||||
span,
|
||||
origin: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::File(
|
||||
cwd.to_string_lossy().to_string(),
|
||||
)),
|
||||
},
|
||||
SpanSource::File(cwd.to_string_lossy().to_string()),
|
||||
))
|
||||
}
|
||||
}
|
||||
@ -235,9 +233,10 @@ pub async fn fetch(
|
||||
Value::binary(bytes),
|
||||
Tag {
|
||||
span,
|
||||
origin: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::File(
|
||||
cwd.to_string_lossy().to_string(),
|
||||
)),
|
||||
},
|
||||
SpanSource::File(cwd.to_string_lossy().to_string()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
@ -52,7 +52,7 @@ fn merge_descriptors(values: &[Tagged<Value>]) -> Vec<String> {
|
||||
}
|
||||
|
||||
pub fn pivot(args: PivotArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
let input = context.input.into_vec().await;
|
||||
|
||||
let descs = merge_descriptors(&input);
|
||||
@ -104,7 +104,7 @@ pub fn pivot(args: PivotArgs, context: RunnableContext) -> Result<OutputStream,
|
||||
|
||||
for desc in descs {
|
||||
let mut column_num: usize = 0;
|
||||
let mut dict = TaggedDictBuilder::new(context.name);
|
||||
let mut dict = TaggedDictBuilder::new(&context.name);
|
||||
|
||||
if !args.ignore_titles && !args.header_row {
|
||||
dict.insert(headers[column_num].clone(), Value::string(desc.clone()));
|
||||
|
@ -128,7 +128,7 @@ pub fn filter_plugin(
|
||||
},
|
||||
Err(e) => {
|
||||
let mut result = VecDeque::new();
|
||||
result.push_back(Err(ShellError::string(format!(
|
||||
result.push_back(Err(ShellError::untagged_runtime_error(format!(
|
||||
"Error while processing begin_filter response: {:?} {}",
|
||||
e, input
|
||||
))));
|
||||
@ -138,7 +138,7 @@ pub fn filter_plugin(
|
||||
}
|
||||
Err(e) => {
|
||||
let mut result = VecDeque::new();
|
||||
result.push_back(Err(ShellError::string(format!(
|
||||
result.push_back(Err(ShellError::untagged_runtime_error(format!(
|
||||
"Error while reading begin_filter response: {:?}",
|
||||
e
|
||||
))));
|
||||
@ -189,7 +189,7 @@ pub fn filter_plugin(
|
||||
},
|
||||
Err(e) => {
|
||||
let mut result = VecDeque::new();
|
||||
result.push_back(Err(ShellError::string(format!(
|
||||
result.push_back(Err(ShellError::untagged_runtime_error(format!(
|
||||
"Error while processing end_filter response: {:?} {}",
|
||||
e, input
|
||||
))));
|
||||
@ -199,7 +199,7 @@ pub fn filter_plugin(
|
||||
}
|
||||
Err(e) => {
|
||||
let mut result = VecDeque::new();
|
||||
result.push_back(Err(ShellError::string(format!(
|
||||
result.push_back(Err(ShellError::untagged_runtime_error(format!(
|
||||
"Error while reading end_filter: {:?}",
|
||||
e
|
||||
))));
|
||||
@ -236,7 +236,7 @@ pub fn filter_plugin(
|
||||
},
|
||||
Err(e) => {
|
||||
let mut result = VecDeque::new();
|
||||
result.push_back(Err(ShellError::string(format!(
|
||||
result.push_back(Err(ShellError::untagged_runtime_error(format!(
|
||||
"Error while processing filter response: {:?} {}",
|
||||
e, input
|
||||
))));
|
||||
@ -246,7 +246,7 @@ pub fn filter_plugin(
|
||||
}
|
||||
Err(e) => {
|
||||
let mut result = VecDeque::new();
|
||||
result.push_back(Err(ShellError::string(format!(
|
||||
result.push_back(Err(ShellError::untagged_runtime_error(format!(
|
||||
"Error while reading filter response: {:?}",
|
||||
e
|
||||
))));
|
||||
@ -297,7 +297,7 @@ pub fn sink_plugin(
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let call_info = args.call_info.clone();
|
||||
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
let input: Vec<Tagged<Value>> = args.input.values.collect().await;
|
||||
|
||||
let request = JsonRpc::new("sink", (call_info.clone(), input));
|
||||
@ -312,6 +312,11 @@ pub fn sink_plugin(
|
||||
.expect("Failed to spawn child process");
|
||||
|
||||
let _ = child.wait();
|
||||
|
||||
// Needed for async_stream to type check
|
||||
if false {
|
||||
yield ReturnSuccess::value(Value::nothing().tagged_unknown());
|
||||
}
|
||||
};
|
||||
Ok(OutputStream::new(stream))
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use crate::commands::UnevaluatedCallInfo;
|
||||
use crate::context::SpanSource;
|
||||
use crate::context::AnchorLocation;
|
||||
use crate::data::Value;
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::hir::SyntaxShape;
|
||||
@ -11,6 +11,11 @@ use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use surf::mime;
|
||||
|
||||
pub enum HeaderKind {
|
||||
ContentType(String),
|
||||
ContentLength(String),
|
||||
}
|
||||
|
||||
pub struct Post;
|
||||
|
||||
impl PerItemCommand for Post {
|
||||
@ -24,6 +29,8 @@ impl PerItemCommand for Post {
|
||||
.required("body", SyntaxShape::Any)
|
||||
.named("user", SyntaxShape::Any)
|
||||
.named("password", SyntaxShape::Any)
|
||||
.named("content-type", SyntaxShape::Any)
|
||||
.named("content-length", SyntaxShape::Any)
|
||||
.switch("raw")
|
||||
}
|
||||
|
||||
@ -47,21 +54,20 @@ fn run(
|
||||
registry: &CommandRegistry,
|
||||
raw_args: &RawCommandArgs,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let name_tag = call_info.name_tag.clone();
|
||||
let call_info = call_info.clone();
|
||||
let path = match call_info
|
||||
.args
|
||||
.nth(0)
|
||||
.ok_or_else(|| ShellError::string(&format!("No url specified")))?
|
||||
{
|
||||
file => file.clone(),
|
||||
};
|
||||
let body = match call_info
|
||||
.args
|
||||
.nth(1)
|
||||
.ok_or_else(|| ShellError::string(&format!("No body specified")))?
|
||||
{
|
||||
file => file.clone(),
|
||||
};
|
||||
let path =
|
||||
match call_info.args.nth(0).ok_or_else(|| {
|
||||
ShellError::labeled_error("No url specified", "for command", &name_tag)
|
||||
})? {
|
||||
file => file.clone(),
|
||||
};
|
||||
let body =
|
||||
match call_info.args.nth(1).ok_or_else(|| {
|
||||
ShellError::labeled_error("No body specified", "for command", &name_tag)
|
||||
})? {
|
||||
file => file.clone(),
|
||||
};
|
||||
let path_str = path.as_string()?;
|
||||
let path_span = path.tag();
|
||||
let has_raw = call_info.args.has("raw");
|
||||
@ -73,9 +79,11 @@ fn run(
|
||||
let registry = registry.clone();
|
||||
let raw_args = raw_args.clone();
|
||||
|
||||
let stream = async_stream_block! {
|
||||
let (file_extension, contents, contents_tag, span_source) =
|
||||
post(&path_str, &body, user, password, path_span, ®istry, &raw_args).await.unwrap();
|
||||
let headers = get_headers(&call_info)?;
|
||||
|
||||
let stream = async_stream! {
|
||||
let (file_extension, contents, contents_tag) =
|
||||
post(&path_str, &body, user, password, &headers, path_span, ®istry, &raw_args).await.unwrap();
|
||||
|
||||
let file_extension = if has_raw {
|
||||
None
|
||||
@ -85,21 +93,14 @@ fn run(
|
||||
file_extension.or(path_str.split('.').last().map(String::from))
|
||||
};
|
||||
|
||||
if contents_tag.origin != uuid::Uuid::nil() {
|
||||
// If we have loaded something, track its source
|
||||
yield ReturnSuccess::action(CommandAction::AddSpanSource(
|
||||
contents_tag.origin,
|
||||
span_source,
|
||||
));
|
||||
}
|
||||
|
||||
let tagged_contents = contents.tagged(contents_tag);
|
||||
let tagged_contents = contents.tagged(&contents_tag);
|
||||
|
||||
if let Some(extension) = file_extension {
|
||||
let command_name = format!("from-{}", extension);
|
||||
if let Some(converter) = registry.get_command(&command_name) {
|
||||
let new_args = RawCommandArgs {
|
||||
host: raw_args.host,
|
||||
ctrl_c: raw_args.ctrl_c,
|
||||
shell_manager: raw_args.shell_manager,
|
||||
call_info: UnevaluatedCallInfo {
|
||||
args: crate::parser::hir::Call {
|
||||
@ -108,7 +109,6 @@ fn run(
|
||||
named: None
|
||||
},
|
||||
source: raw_args.call_info.source,
|
||||
source_map: raw_args.call_info.source_map,
|
||||
name_tag: raw_args.call_info.name_tag,
|
||||
}
|
||||
};
|
||||
@ -122,7 +122,7 @@ fn run(
|
||||
}
|
||||
}
|
||||
Ok(ReturnSuccess::Value(Tagged { item, .. })) => {
|
||||
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag }));
|
||||
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() }));
|
||||
}
|
||||
x => yield x,
|
||||
}
|
||||
@ -138,15 +138,71 @@ fn run(
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
|
||||
fn get_headers(call_info: &CallInfo) -> Result<Vec<HeaderKind>, ShellError> {
|
||||
let mut headers = vec![];
|
||||
|
||||
match extract_header_value(&call_info, "content-type") {
|
||||
Ok(h) => match h {
|
||||
Some(ct) => headers.push(HeaderKind::ContentType(ct)),
|
||||
None => {}
|
||||
},
|
||||
Err(e) => {
|
||||
return Err(e);
|
||||
}
|
||||
};
|
||||
|
||||
match extract_header_value(&call_info, "content-length") {
|
||||
Ok(h) => match h {
|
||||
Some(cl) => headers.push(HeaderKind::ContentLength(cl)),
|
||||
None => {}
|
||||
},
|
||||
Err(e) => {
|
||||
return Err(e);
|
||||
}
|
||||
};
|
||||
|
||||
Ok(headers)
|
||||
}
|
||||
|
||||
fn extract_header_value(call_info: &CallInfo, key: &str) -> Result<Option<String>, ShellError> {
|
||||
if call_info.args.has(key) {
|
||||
let tagged = call_info.args.get(key);
|
||||
let val = match tagged {
|
||||
Some(Tagged {
|
||||
item: Value::Primitive(Primitive::String(s)),
|
||||
..
|
||||
}) => s.clone(),
|
||||
Some(Tagged { tag, .. }) => {
|
||||
return Err(ShellError::labeled_error(
|
||||
format!("{} not in expected format. Expected string.", key),
|
||||
"post error",
|
||||
tag,
|
||||
));
|
||||
}
|
||||
_ => {
|
||||
return Err(ShellError::labeled_error(
|
||||
format!("{} not in expected format. Expected string.", key),
|
||||
"post error",
|
||||
Tag::unknown(),
|
||||
));
|
||||
}
|
||||
};
|
||||
return Ok(Some(val));
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
pub async fn post(
|
||||
location: &str,
|
||||
body: &Tagged<Value>,
|
||||
user: Option<String>,
|
||||
password: Option<String>,
|
||||
headers: &Vec<HeaderKind>,
|
||||
tag: Tag,
|
||||
registry: &CommandRegistry,
|
||||
raw_args: &RawCommandArgs,
|
||||
) -> Result<(Option<String>, Value, Tag, SpanSource), ShellError> {
|
||||
) -> Result<(Option<String>, Value, Tag), ShellError> {
|
||||
let registry = registry.clone();
|
||||
let raw_args = raw_args.clone();
|
||||
if location.starts_with("http:") || location.starts_with("https:") {
|
||||
@ -164,6 +220,13 @@ pub async fn post(
|
||||
if let Some(login) = login {
|
||||
s = s.set_header("Authorization", format!("Basic {}", login));
|
||||
}
|
||||
|
||||
for h in headers {
|
||||
s = match h {
|
||||
HeaderKind::ContentType(ct) => s.set_header("Content-Type", ct),
|
||||
HeaderKind::ContentLength(cl) => s.set_header("Content-Length", cl),
|
||||
};
|
||||
}
|
||||
s.await
|
||||
}
|
||||
Tagged {
|
||||
@ -180,6 +243,7 @@ pub async fn post(
|
||||
if let Some(converter) = registry.get_command("to-json") {
|
||||
let new_args = RawCommandArgs {
|
||||
host: raw_args.host,
|
||||
ctrl_c: raw_args.ctrl_c,
|
||||
shell_manager: raw_args.shell_manager,
|
||||
call_info: UnevaluatedCallInfo {
|
||||
args: crate::parser::hir::Call {
|
||||
@ -188,7 +252,6 @@ pub async fn post(
|
||||
named: None,
|
||||
},
|
||||
source: raw_args.call_info.source,
|
||||
source_map: raw_args.call_info.source_map,
|
||||
name_tag: raw_args.call_info.name_tag,
|
||||
},
|
||||
};
|
||||
@ -212,7 +275,7 @@ pub async fn post(
|
||||
return Err(ShellError::labeled_error(
|
||||
"Save could not successfully save",
|
||||
"unexpected data during save",
|
||||
*tag,
|
||||
tag,
|
||||
));
|
||||
}
|
||||
}
|
||||
@ -228,7 +291,7 @@ pub async fn post(
|
||||
return Err(ShellError::labeled_error(
|
||||
"Could not automatically convert table",
|
||||
"needs manual conversion",
|
||||
*tag,
|
||||
tag,
|
||||
));
|
||||
}
|
||||
}
|
||||
@ -244,11 +307,13 @@ pub async fn post(
|
||||
ShellError::labeled_error(
|
||||
"Could not load text from remote url",
|
||||
"could not load",
|
||||
tag,
|
||||
&tag,
|
||||
)
|
||||
})?),
|
||||
tag,
|
||||
SpanSource::Url(location.to_string()),
|
||||
Tag {
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
span: tag.span,
|
||||
},
|
||||
)),
|
||||
(mime::APPLICATION, mime::JSON) => Ok((
|
||||
Some("json".to_string()),
|
||||
@ -256,25 +321,29 @@ pub async fn post(
|
||||
ShellError::labeled_error(
|
||||
"Could not load text from remote url",
|
||||
"could not load",
|
||||
tag,
|
||||
&tag,
|
||||
)
|
||||
})?),
|
||||
tag,
|
||||
SpanSource::Url(location.to_string()),
|
||||
Tag {
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
span: tag.span,
|
||||
},
|
||||
)),
|
||||
(mime::APPLICATION, mime::OCTET_STREAM) => {
|
||||
let buf: Vec<u8> = r.body_bytes().await.map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not load binary file",
|
||||
"could not load",
|
||||
tag,
|
||||
&tag,
|
||||
)
|
||||
})?;
|
||||
Ok((
|
||||
None,
|
||||
Value::binary(buf),
|
||||
tag,
|
||||
SpanSource::Url(location.to_string()),
|
||||
Tag {
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
span: tag.span,
|
||||
},
|
||||
))
|
||||
}
|
||||
(mime::IMAGE, image_ty) => {
|
||||
@ -282,14 +351,16 @@ pub async fn post(
|
||||
ShellError::labeled_error(
|
||||
"Could not load image file",
|
||||
"could not load",
|
||||
tag,
|
||||
&tag,
|
||||
)
|
||||
})?;
|
||||
Ok((
|
||||
Some(image_ty.to_string()),
|
||||
Value::binary(buf),
|
||||
tag,
|
||||
SpanSource::Url(location.to_string()),
|
||||
Tag {
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
span: tag.span,
|
||||
},
|
||||
))
|
||||
}
|
||||
(mime::TEXT, mime::HTML) => Ok((
|
||||
@ -298,11 +369,13 @@ pub async fn post(
|
||||
ShellError::labeled_error(
|
||||
"Could not load text from remote url",
|
||||
"could not load",
|
||||
tag,
|
||||
&tag,
|
||||
)
|
||||
})?),
|
||||
tag,
|
||||
SpanSource::Url(location.to_string()),
|
||||
Tag {
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
span: tag.span,
|
||||
},
|
||||
)),
|
||||
(mime::TEXT, mime::PLAIN) => {
|
||||
let path_extension = url::Url::parse(location)
|
||||
@ -322,11 +395,13 @@ pub async fn post(
|
||||
ShellError::labeled_error(
|
||||
"Could not load text from remote url",
|
||||
"could not load",
|
||||
tag,
|
||||
&tag,
|
||||
)
|
||||
})?),
|
||||
tag,
|
||||
SpanSource::Url(location.to_string()),
|
||||
Tag {
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
span: tag.span,
|
||||
},
|
||||
))
|
||||
}
|
||||
(ty, sub_ty) => Ok((
|
||||
@ -335,16 +410,20 @@ pub async fn post(
|
||||
"Not yet supported MIME type: {} {}",
|
||||
ty, sub_ty
|
||||
)),
|
||||
tag,
|
||||
SpanSource::Url(location.to_string()),
|
||||
Tag {
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
span: tag.span,
|
||||
},
|
||||
)),
|
||||
}
|
||||
}
|
||||
None => Ok((
|
||||
None,
|
||||
Value::string(format!("No content type found")),
|
||||
tag,
|
||||
SpanSource::Url(location.to_string()),
|
||||
Tag {
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
span: tag.span,
|
||||
},
|
||||
)),
|
||||
},
|
||||
Err(_) => {
|
||||
|
@ -7,7 +7,7 @@ use std::path::{Path, PathBuf};
|
||||
pub struct Save;
|
||||
|
||||
macro_rules! process_string {
|
||||
($input:ident, $name_tag:ident) => {{
|
||||
($scope:tt, $input:ident, $name_tag:ident) => {{
|
||||
let mut result_string = String::new();
|
||||
for res in $input {
|
||||
match res {
|
||||
@ -18,11 +18,11 @@ macro_rules! process_string {
|
||||
result_string.push_str(&s);
|
||||
}
|
||||
_ => {
|
||||
yield core::task::Poll::Ready(Err(ShellError::labeled_error(
|
||||
break $scope Err(ShellError::labeled_error(
|
||||
"Save could not successfully save",
|
||||
"unexpected data during save",
|
||||
$name_tag,
|
||||
)));
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -31,7 +31,7 @@ macro_rules! process_string {
|
||||
}
|
||||
|
||||
macro_rules! process_string_return_success {
|
||||
($result_vec:ident, $name_tag:ident) => {{
|
||||
($scope:tt, $result_vec:ident, $name_tag:ident) => {{
|
||||
let mut result_string = String::new();
|
||||
for res in $result_vec {
|
||||
match res {
|
||||
@ -42,11 +42,11 @@ macro_rules! process_string_return_success {
|
||||
result_string.push_str(&s);
|
||||
}
|
||||
_ => {
|
||||
yield core::task::Poll::Ready(Err(ShellError::labeled_error(
|
||||
break $scope Err(ShellError::labeled_error(
|
||||
"Save could not successfully save",
|
||||
"unexpected data during text save",
|
||||
$name_tag,
|
||||
)));
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -55,7 +55,7 @@ macro_rules! process_string_return_success {
|
||||
}
|
||||
|
||||
macro_rules! process_binary_return_success {
|
||||
($result_vec:ident, $name_tag:ident) => {{
|
||||
($scope:tt, $result_vec:ident, $name_tag:ident) => {{
|
||||
let mut result_binary: Vec<u8> = Vec::new();
|
||||
for res in $result_vec {
|
||||
match res {
|
||||
@ -68,11 +68,11 @@ macro_rules! process_binary_return_success {
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
yield core::task::Poll::Ready(Err(ShellError::labeled_error(
|
||||
break $scope Err(ShellError::labeled_error(
|
||||
"Save could not successfully save",
|
||||
"unexpected data during binary save",
|
||||
$name_tag,
|
||||
)));
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -119,49 +119,48 @@ fn save(
|
||||
input,
|
||||
name,
|
||||
shell_manager,
|
||||
source_map,
|
||||
host,
|
||||
ctrl_c,
|
||||
commands: registry,
|
||||
..
|
||||
}: RunnableContext,
|
||||
raw_args: RawCommandArgs,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let mut full_path = PathBuf::from(shell_manager.path());
|
||||
let name_tag = name;
|
||||
let name_tag = name.clone();
|
||||
|
||||
let source_map = source_map.clone();
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
let input: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
if path.is_none() {
|
||||
// If there is no filename, check the metadata for the origin filename
|
||||
// If there is no filename, check the metadata for the anchor filename
|
||||
if input.len() > 0 {
|
||||
let origin = input[0].origin();
|
||||
match source_map.get(&origin) {
|
||||
let anchor = input[0].anchor();
|
||||
match anchor {
|
||||
Some(path) => match path {
|
||||
SpanSource::File(file) => {
|
||||
full_path.push(Path::new(file));
|
||||
AnchorLocation::File(file) => {
|
||||
full_path.push(Path::new(&file));
|
||||
}
|
||||
_ => {
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Save requires a filepath",
|
||||
"Save requires a filepath (1)",
|
||||
"needs path",
|
||||
name_tag,
|
||||
name_tag.clone(),
|
||||
));
|
||||
}
|
||||
},
|
||||
None => {
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Save requires a filepath",
|
||||
"Save requires a filepath (2)",
|
||||
"needs path",
|
||||
name_tag,
|
||||
name_tag.clone(),
|
||||
));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Save requires a filepath",
|
||||
"Save requires a filepath (3)",
|
||||
"needs path",
|
||||
name_tag,
|
||||
name_tag.clone(),
|
||||
));
|
||||
}
|
||||
} else {
|
||||
@ -170,47 +169,51 @@ fn save(
|
||||
}
|
||||
}
|
||||
|
||||
let content : Result<Vec<u8>, ShellError> = if !save_raw {
|
||||
if let Some(extension) = full_path.extension() {
|
||||
let command_name = format!("to-{}", extension.to_str().unwrap());
|
||||
if let Some(converter) = registry.get_command(&command_name) {
|
||||
let new_args = RawCommandArgs {
|
||||
host,
|
||||
shell_manager,
|
||||
call_info: UnevaluatedCallInfo {
|
||||
args: crate::parser::hir::Call {
|
||||
head: raw_args.call_info.args.head,
|
||||
positional: None,
|
||||
named: None
|
||||
},
|
||||
source: raw_args.call_info.source,
|
||||
source_map: raw_args.call_info.source_map,
|
||||
name_tag: raw_args.call_info.name_tag,
|
||||
// TODO use label_break_value once it is stable:
|
||||
// https://github.com/rust-lang/rust/issues/48594
|
||||
let content : Result<Vec<u8>, ShellError> = 'scope: loop {
|
||||
break if !save_raw {
|
||||
if let Some(extension) = full_path.extension() {
|
||||
let command_name = format!("to-{}", extension.to_str().unwrap());
|
||||
if let Some(converter) = registry.get_command(&command_name) {
|
||||
let new_args = RawCommandArgs {
|
||||
host,
|
||||
ctrl_c,
|
||||
shell_manager,
|
||||
call_info: UnevaluatedCallInfo {
|
||||
args: crate::parser::hir::Call {
|
||||
head: raw_args.call_info.args.head,
|
||||
positional: None,
|
||||
named: None
|
||||
},
|
||||
source: raw_args.call_info.source,
|
||||
name_tag: raw_args.call_info.name_tag,
|
||||
}
|
||||
};
|
||||
let mut result = converter.run(new_args.with_input(input), ®istry, false);
|
||||
let result_vec: Vec<Result<ReturnSuccess, ShellError>> = result.drain_vec().await;
|
||||
if converter.is_binary() {
|
||||
process_binary_return_success!('scope, result_vec, name_tag)
|
||||
} else {
|
||||
process_string_return_success!('scope, result_vec, name_tag)
|
||||
}
|
||||
};
|
||||
let mut result = converter.run(new_args.with_input(input), ®istry, false);
|
||||
let result_vec: Vec<Result<ReturnSuccess, ShellError>> = result.drain_vec().await;
|
||||
if converter.is_binary() {
|
||||
process_binary_return_success!(result_vec, name_tag)
|
||||
} else {
|
||||
process_string_return_success!(result_vec, name_tag)
|
||||
process_string!('scope, input, name_tag)
|
||||
}
|
||||
} else {
|
||||
process_string!(input, name_tag)
|
||||
process_string!('scope, input, name_tag)
|
||||
}
|
||||
} else {
|
||||
process_string!(input, name_tag)
|
||||
}
|
||||
} else {
|
||||
Ok(string_from(&input).into_bytes())
|
||||
Ok(string_from(&input).into_bytes())
|
||||
};
|
||||
};
|
||||
|
||||
match content {
|
||||
Ok(save_data) => match std::fs::write(full_path, save_data) {
|
||||
Ok(o) => o,
|
||||
Err(e) => yield Err(ShellError::string(e.to_string())),
|
||||
Err(e) => yield Err(ShellError::labeled_error(e.to_string(), "for command", name)),
|
||||
},
|
||||
Err(e) => yield Err(ShellError::string(e.to_string())),
|
||||
Err(e) => yield Err(ShellError::labeled_error(e.to_string(), "for command", name)),
|
||||
}
|
||||
|
||||
};
|
||||
|
@ -2,6 +2,7 @@ use crate::commands::WholeStreamCommand;
|
||||
use crate::data::TaggedDictBuilder;
|
||||
use crate::errors::ShellError;
|
||||
use crate::prelude::*;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
pub struct Shells;
|
||||
|
||||
@ -32,14 +33,14 @@ fn shells(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream
|
||||
let tag = args.call_info.name_tag;
|
||||
|
||||
for (index, shell) in args.shell_manager.shells.lock().unwrap().iter().enumerate() {
|
||||
let mut dict = TaggedDictBuilder::new(tag);
|
||||
let mut dict = TaggedDictBuilder::new(&tag);
|
||||
|
||||
if index == args.shell_manager.current_shell {
|
||||
if index == (*args.shell_manager.current_shell).load(Ordering::SeqCst) {
|
||||
dict.insert(" ", "X".to_string());
|
||||
} else {
|
||||
dict.insert(" ", " ".to_string());
|
||||
}
|
||||
dict.insert("name", shell.name(&args.call_info.source_map));
|
||||
dict.insert("name", shell.name());
|
||||
dict.insert("path", shell.path());
|
||||
|
||||
shells_out.push_back(dict.into_tagged_value());
|
||||
|
@ -37,7 +37,7 @@ fn size(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream,
|
||||
_ => Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
v.tag(),
|
||||
)),
|
||||
|
@ -1,6 +1,7 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::errors::ShellError;
|
||||
use crate::prelude::*;
|
||||
use log::trace;
|
||||
|
||||
pub struct SkipWhile;
|
||||
|
||||
@ -38,7 +39,9 @@ pub fn skip_while(
|
||||
RunnableContext { input, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let objects = input.values.skip_while(move |item| {
|
||||
trace!("ITEM = {:?}", item);
|
||||
let result = condition.invoke(&item);
|
||||
trace!("RESULT = {:?}", result);
|
||||
|
||||
let return_value = match result {
|
||||
Ok(ref v) if v.is_true() => true,
|
||||
|
@ -35,7 +35,7 @@ fn sort_by(
|
||||
SortByArgs { rest }: SortByArgs,
|
||||
mut context: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
Ok(OutputStream::new(async_stream_block! {
|
||||
Ok(OutputStream::new(async_stream! {
|
||||
let mut vec = context.input.drain_vec().await;
|
||||
|
||||
let calc_key = |item: &Tagged<Value>| {
|
||||
|
@ -94,7 +94,7 @@ fn split_column(
|
||||
_ => Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
name,
|
||||
&name,
|
||||
"value originates from here",
|
||||
v.tag(),
|
||||
)),
|
||||
|
@ -60,7 +60,7 @@ fn split_row(
|
||||
result.push_back(Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
name,
|
||||
&name,
|
||||
"value originates from here",
|
||||
v.tag(),
|
||||
)));
|
||||
|
@ -2,20 +2,16 @@ use crate::commands::WholeStreamCommand;
|
||||
use crate::errors::ShellError;
|
||||
use crate::format::TableView;
|
||||
use crate::prelude::*;
|
||||
use futures_async_stream::async_stream_block;
|
||||
|
||||
pub struct Table;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct TableArgs {}
|
||||
|
||||
impl WholeStreamCommand for Table {
|
||||
fn name(&self) -> &str {
|
||||
"table"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("table")
|
||||
Signature::build("table").named("start_number", SyntaxShape::Number)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -27,20 +23,37 @@ impl WholeStreamCommand for Table {
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, table)?.run()
|
||||
table(args, registry)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn table(_args: TableArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
|
||||
let stream = async_stream_block! {
|
||||
let input: Vec<Tagged<Value>> = context.input.into_vec().await;
|
||||
fn table(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
|
||||
let stream = async_stream! {
|
||||
let host = args.host.clone();
|
||||
let start_number = match args.get("start_number") {
|
||||
Some(Tagged { item: Value::Primitive(Primitive::Int(i)), .. }) => {
|
||||
i.to_usize().unwrap()
|
||||
}
|
||||
_ => {
|
||||
0
|
||||
}
|
||||
};
|
||||
|
||||
let input: Vec<Tagged<Value>> = args.input.into_vec().await;
|
||||
if input.len() > 0 {
|
||||
let mut host = context.host.lock().unwrap();
|
||||
let view = TableView::from_list(&input);
|
||||
let mut host = host.lock().unwrap();
|
||||
let view = TableView::from_list(&input, start_number);
|
||||
|
||||
if let Some(view) = view {
|
||||
handle_unexpected(&mut *host, |host| crate::format::print_view(&view, host));
|
||||
}
|
||||
}
|
||||
// Needed for async_stream to type check
|
||||
if false {
|
||||
yield ReturnSuccess::value(Value::nothing().tagged_unknown());
|
||||
}
|
||||
};
|
||||
|
||||
Ok(OutputStream::new(stream))
|
||||
|
@ -28,26 +28,25 @@ impl WholeStreamCommand for Tags {
|
||||
}
|
||||
|
||||
fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let source_map = args.call_info.source_map.clone();
|
||||
Ok(args
|
||||
.input
|
||||
.values
|
||||
.map(move |v| {
|
||||
let mut tags = TaggedDictBuilder::new(v.tag());
|
||||
{
|
||||
let origin = v.origin();
|
||||
let anchor = v.anchor();
|
||||
let span = v.tag().span;
|
||||
let mut dict = TaggedDictBuilder::new(v.tag());
|
||||
dict.insert("start", Value::int(span.start as i64));
|
||||
dict.insert("end", Value::int(span.end as i64));
|
||||
dict.insert("start", Value::int(span.start() as i64));
|
||||
dict.insert("end", Value::int(span.end() as i64));
|
||||
tags.insert_tagged("span", dict.into_tagged_value());
|
||||
|
||||
match source_map.get(&origin) {
|
||||
Some(SpanSource::File(source)) => {
|
||||
tags.insert("origin", Value::string(source));
|
||||
match anchor {
|
||||
Some(AnchorLocation::File(source)) => {
|
||||
tags.insert("anchor", Value::string(source));
|
||||
}
|
||||
Some(SpanSource::Url(source)) => {
|
||||
tags.insert("origin", Value::string(source));
|
||||
Some(AnchorLocation::Url(source)) => {
|
||||
tags.insert("anchor", Value::string(source));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
@ -46,7 +46,7 @@ pub fn value_to_bson_value(v: &Tagged<Value>) -> Result<Bson, ShellError> {
|
||||
Value::Primitive(Primitive::BeginningOfStream) => Bson::Null,
|
||||
Value::Primitive(Primitive::Decimal(d)) => Bson::FloatingPoint(d.to_f64().unwrap()),
|
||||
Value::Primitive(Primitive::Int(i)) => {
|
||||
Bson::I64(i.tagged(v.tag).coerce_into("converting to BSON")?)
|
||||
Bson::I64(i.tagged(&v.tag).coerce_into("converting to BSON")?)
|
||||
}
|
||||
Value::Primitive(Primitive::Nothing) => Bson::Null,
|
||||
Value::Primitive(Primitive::String(s)) => Bson::String(s.clone()),
|
||||
@ -58,6 +58,7 @@ pub fn value_to_bson_value(v: &Tagged<Value>) -> Result<Bson, ShellError> {
|
||||
.collect::<Result<_, _>>()?,
|
||||
),
|
||||
Value::Block(_) => Bson::Null,
|
||||
Value::Error(e) => return Err(e.clone()),
|
||||
Value::Primitive(Primitive::Binary(b)) => Bson::Binary(BinarySubtype::Generic, b.clone()),
|
||||
Value::Row(o) => object_value_to_bson(o)?,
|
||||
})
|
||||
@ -170,7 +171,7 @@ fn get_binary_subtype<'a>(tagged_value: &'a Tagged<Value>) -> Result<BinarySubty
|
||||
_ => unreachable!(),
|
||||
}),
|
||||
Value::Primitive(Primitive::Int(i)) => Ok(BinarySubtype::UserDefined(
|
||||
i.tagged(tagged_value.tag)
|
||||
i.tagged(&tagged_value.tag)
|
||||
.coerce_into("converting to BSON binary subtype")?,
|
||||
)),
|
||||
_ => Err(ShellError::type_error(
|
||||
@ -207,12 +208,12 @@ fn bson_value_to_bytes(bson: Bson, tag: Tag) -> Result<Vec<u8>, ShellError> {
|
||||
Bson::Array(a) => {
|
||||
for v in a.into_iter() {
|
||||
match v {
|
||||
Bson::Document(d) => shell_encode_document(&mut out, d, tag)?,
|
||||
Bson::Document(d) => shell_encode_document(&mut out, d, tag.clone())?,
|
||||
_ => {
|
||||
return Err(ShellError::labeled_error(
|
||||
format!("All top level values must be Documents, got {:?}", v),
|
||||
"requires BSON-compatible document",
|
||||
tag,
|
||||
&tag,
|
||||
))
|
||||
}
|
||||
}
|
||||
@ -233,11 +234,11 @@ fn bson_value_to_bytes(bson: Bson, tag: Tag) -> Result<Vec<u8>, ShellError> {
|
||||
fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let name_tag = args.name_tag();
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
let input: Vec<Tagged<Value>> = args.input.values.collect().await;
|
||||
|
||||
let to_process_input = if input.len() > 1 {
|
||||
let tag = input[0].tag;
|
||||
let tag = input[0].tag.clone();
|
||||
vec![Tagged { item: Value::Table(input), tag } ]
|
||||
} else if input.len() == 1 {
|
||||
input
|
||||
@ -248,14 +249,14 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
|
||||
for value in to_process_input {
|
||||
match value_to_bson_value(&value) {
|
||||
Ok(bson_value) => {
|
||||
match bson_value_to_bytes(bson_value, name_tag) {
|
||||
match bson_value_to_bytes(bson_value, name_tag.clone()) {
|
||||
Ok(x) => yield ReturnSuccess::value(
|
||||
Value::binary(x).tagged(name_tag),
|
||||
Value::binary(x).tagged(&name_tag),
|
||||
),
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a table with BSON-compatible structure.tag() from pipeline",
|
||||
"requires BSON-compatible input",
|
||||
name_tag,
|
||||
&name_tag,
|
||||
"originates from here".to_string(),
|
||||
value.tag(),
|
||||
)),
|
||||
@ -264,7 +265,7 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
|
||||
_ => yield Err(ShellError::labeled_error(
|
||||
"Expected a table with BSON-compatible structure from pipeline",
|
||||
"requires BSON-compatible input",
|
||||
name_tag))
|
||||
&name_tag))
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -32,8 +32,8 @@ impl WholeStreamCommand for ToCSV {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn value_to_csv_value(v: &Value) -> Value {
|
||||
match v {
|
||||
pub fn value_to_csv_value(v: &Tagged<Value>) -> Tagged<Value> {
|
||||
match &v.item {
|
||||
Value::Primitive(Primitive::String(s)) => Value::Primitive(Primitive::String(s.clone())),
|
||||
Value::Primitive(Primitive::Nothing) => Value::Primitive(Primitive::Nothing),
|
||||
Value::Primitive(Primitive::Boolean(b)) => Value::Primitive(Primitive::Boolean(b.clone())),
|
||||
@ -47,10 +47,11 @@ pub fn value_to_csv_value(v: &Value) -> Value {
|
||||
Value::Block(_) => Value::Primitive(Primitive::Nothing),
|
||||
_ => Value::Primitive(Primitive::Nothing),
|
||||
}
|
||||
.tagged(v.tag.clone())
|
||||
}
|
||||
|
||||
fn to_string_helper(v: &Value) -> Result<String, ShellError> {
|
||||
match v {
|
||||
fn to_string_helper(v: &Tagged<Value>) -> Result<String, ShellError> {
|
||||
match &v.item {
|
||||
Value::Primitive(Primitive::Date(d)) => Ok(d.to_string()),
|
||||
Value::Primitive(Primitive::Bytes(b)) => Ok(format!("{}", b)),
|
||||
Value::Primitive(Primitive::Boolean(_)) => Ok(v.as_string()?),
|
||||
@ -60,7 +61,13 @@ fn to_string_helper(v: &Value) -> Result<String, ShellError> {
|
||||
Value::Table(_) => return Ok(String::from("[Table]")),
|
||||
Value::Row(_) => return Ok(String::from("[Row]")),
|
||||
Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()),
|
||||
_ => return Err(ShellError::string("Unexpected value")),
|
||||
_ => {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Unexpected value",
|
||||
"",
|
||||
v.tag.clone(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -76,7 +83,9 @@ fn merge_descriptors(values: &[Tagged<Value>]) -> Vec<String> {
|
||||
ret
|
||||
}
|
||||
|
||||
pub fn to_string(v: &Value) -> Result<String, ShellError> {
|
||||
pub fn to_string(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
|
||||
let v = &tagged_value.item;
|
||||
|
||||
match v {
|
||||
Value::Row(o) => {
|
||||
let mut wtr = WriterBuilder::new().from_writer(vec![]);
|
||||
@ -92,11 +101,20 @@ pub fn to_string(v: &Value) -> Result<String, ShellError> {
|
||||
wtr.write_record(fields).expect("can not write.");
|
||||
wtr.write_record(values).expect("can not write.");
|
||||
|
||||
return Ok(String::from_utf8(
|
||||
wtr.into_inner()
|
||||
.map_err(|_| ShellError::string("Could not convert record"))?,
|
||||
)
|
||||
.map_err(|_| ShellError::string("Could not convert record"))?);
|
||||
return Ok(String::from_utf8(wtr.into_inner().map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
&tagged_value.tag,
|
||||
)
|
||||
})?)
|
||||
.map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
&tagged_value.tag,
|
||||
)
|
||||
})?);
|
||||
}
|
||||
Value::Table(list) => {
|
||||
let mut wtr = WriterBuilder::new().from_writer(vec![]);
|
||||
@ -120,13 +138,22 @@ pub fn to_string(v: &Value) -> Result<String, ShellError> {
|
||||
wtr.write_record(&row).expect("can not write");
|
||||
}
|
||||
|
||||
return Ok(String::from_utf8(
|
||||
wtr.into_inner()
|
||||
.map_err(|_| ShellError::string("Could not convert record"))?,
|
||||
)
|
||||
.map_err(|_| ShellError::string("Could not convert record"))?);
|
||||
return Ok(String::from_utf8(wtr.into_inner().map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
&tagged_value.tag,
|
||||
)
|
||||
})?)
|
||||
.map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
&tagged_value.tag,
|
||||
)
|
||||
})?);
|
||||
}
|
||||
_ => return to_string_helper(&v),
|
||||
_ => return to_string_helper(tagged_value),
|
||||
}
|
||||
}
|
||||
|
||||
@ -135,11 +162,11 @@ fn to_csv(
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let name_tag = name;
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
let input: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
|
||||
let to_process_input = if input.len() > 1 {
|
||||
let tag = input[0].tag;
|
||||
let tag = input[0].tag.clone();
|
||||
vec![Tagged { item: Value::Table(input), tag } ]
|
||||
} else if input.len() == 1 {
|
||||
input
|
||||
@ -148,20 +175,20 @@ fn to_csv(
|
||||
};
|
||||
|
||||
for value in to_process_input {
|
||||
match to_string(&value_to_csv_value(&value.item)) {
|
||||
match to_string(&value_to_csv_value(&value)) {
|
||||
Ok(x) => {
|
||||
let converted = if headerless {
|
||||
x.lines().skip(1).collect()
|
||||
} else {
|
||||
x
|
||||
};
|
||||
yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(name_tag))
|
||||
yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(&name_tag))
|
||||
}
|
||||
_ => {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a table with CSV-compatible structure.tag() from pipeline",
|
||||
"requires CSV-compatible input",
|
||||
name_tag,
|
||||
&name_tag,
|
||||
"originates from here".to_string(),
|
||||
value.tag(),
|
||||
))
|
||||
|
@ -42,7 +42,7 @@ pub fn value_to_json_value(v: &Tagged<Value>) -> Result<serde_json::Value, Shell
|
||||
.unwrap(),
|
||||
),
|
||||
Value::Primitive(Primitive::Int(i)) => serde_json::Value::Number(serde_json::Number::from(
|
||||
CoerceInto::<i64>::coerce_into(i.tagged(v.tag), "converting to JSON number")?,
|
||||
CoerceInto::<i64>::coerce_into(i.tagged(&v.tag), "converting to JSON number")?,
|
||||
)),
|
||||
Value::Primitive(Primitive::Nothing) => serde_json::Value::Null,
|
||||
Value::Primitive(Primitive::Pattern(s)) => serde_json::Value::String(s.clone()),
|
||||
@ -50,6 +50,7 @@ pub fn value_to_json_value(v: &Tagged<Value>) -> Result<serde_json::Value, Shell
|
||||
Value::Primitive(Primitive::Path(s)) => serde_json::Value::String(s.display().to_string()),
|
||||
|
||||
Value::Table(l) => serde_json::Value::Array(json_list(l)?),
|
||||
Value::Error(e) => return Err(e.clone()),
|
||||
Value::Block(_) => serde_json::Value::Null,
|
||||
Value::Primitive(Primitive::Binary(b)) => serde_json::Value::Array(
|
||||
b.iter()
|
||||
@ -81,11 +82,11 @@ fn json_list(input: &Vec<Tagged<Value>>) -> Result<Vec<serde_json::Value>, Shell
|
||||
fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let name_tag = args.name_tag();
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
let input: Vec<Tagged<Value>> = args.input.values.collect().await;
|
||||
|
||||
let to_process_input = if input.len() > 1 {
|
||||
let tag = input[0].tag;
|
||||
let tag = input[0].tag.clone();
|
||||
vec![Tagged { item: Value::Table(input), tag } ]
|
||||
} else if input.len() == 1 {
|
||||
input
|
||||
@ -98,12 +99,12 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
|
||||
Ok(json_value) => {
|
||||
match serde_json::to_string(&json_value) {
|
||||
Ok(x) => yield ReturnSuccess::value(
|
||||
Value::Primitive(Primitive::String(x)).tagged(name_tag),
|
||||
Value::Primitive(Primitive::String(x)).tagged(&name_tag),
|
||||
),
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a table with JSON-compatible structure.tag() from pipeline",
|
||||
"requires JSON-compatible input",
|
||||
name_tag,
|
||||
&name_tag,
|
||||
"originates from here".to_string(),
|
||||
value.tag(),
|
||||
)),
|
||||
@ -112,7 +113,7 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
|
||||
_ => yield Err(ShellError::labeled_error(
|
||||
"Expected a table with JSON-compatible structure from pipeline",
|
||||
"requires JSON-compatible input",
|
||||
name_tag))
|
||||
&name_tag))
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -201,7 +201,7 @@ fn sqlite_input_stream_to_bytes(
|
||||
fn to_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let name_tag = args.name_tag();
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
let input: Vec<Tagged<Value>> = args.input.values.collect().await;
|
||||
|
||||
match sqlite_input_stream_to_bytes(input) {
|
||||
|
@ -38,10 +38,10 @@ pub fn value_to_toml_value(v: &Tagged<Value>) -> Result<toml::Value, ShellError>
|
||||
toml::Value::String("<Beginning of Stream>".to_string())
|
||||
}
|
||||
Value::Primitive(Primitive::Decimal(f)) => {
|
||||
toml::Value::Float(f.tagged(v.tag).coerce_into("converting to TOML float")?)
|
||||
toml::Value::Float(f.tagged(&v.tag).coerce_into("converting to TOML float")?)
|
||||
}
|
||||
Value::Primitive(Primitive::Int(i)) => {
|
||||
toml::Value::Integer(i.tagged(v.tag).coerce_into("converting to TOML integer")?)
|
||||
toml::Value::Integer(i.tagged(&v.tag).coerce_into("converting to TOML integer")?)
|
||||
}
|
||||
Value::Primitive(Primitive::Nothing) => toml::Value::String("<Nothing>".to_string()),
|
||||
Value::Primitive(Primitive::Pattern(s)) => toml::Value::String(s.clone()),
|
||||
@ -49,6 +49,7 @@ pub fn value_to_toml_value(v: &Tagged<Value>) -> Result<toml::Value, ShellError>
|
||||
Value::Primitive(Primitive::Path(s)) => toml::Value::String(s.display().to_string()),
|
||||
|
||||
Value::Table(l) => toml::Value::Array(collect_values(l)?),
|
||||
Value::Error(e) => return Err(e.clone()),
|
||||
Value::Block(_) => toml::Value::String("<Block>".to_string()),
|
||||
Value::Primitive(Primitive::Binary(b)) => {
|
||||
toml::Value::Array(b.iter().map(|x| toml::Value::Integer(*x as i64)).collect())
|
||||
@ -76,11 +77,11 @@ fn collect_values(input: &Vec<Tagged<Value>>) -> Result<Vec<toml::Value>, ShellE
|
||||
fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let name_tag = args.name_tag();
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
let input: Vec<Tagged<Value>> = args.input.values.collect().await;
|
||||
|
||||
let to_process_input = if input.len() > 1 {
|
||||
let tag = input[0].tag;
|
||||
let tag = input[0].tag.clone();
|
||||
vec![Tagged { item: Value::Table(input), tag } ]
|
||||
} else if input.len() == 1 {
|
||||
input
|
||||
@ -93,12 +94,12 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
|
||||
Ok(toml_value) => {
|
||||
match toml::to_string(&toml_value) {
|
||||
Ok(x) => yield ReturnSuccess::value(
|
||||
Value::Primitive(Primitive::String(x)).tagged(name_tag),
|
||||
Value::Primitive(Primitive::String(x)).tagged(&name_tag),
|
||||
),
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a table with TOML-compatible structure.tag() from pipeline",
|
||||
"requires TOML-compatible input",
|
||||
name_tag,
|
||||
&name_tag,
|
||||
"originates from here".to_string(),
|
||||
value.tag(),
|
||||
)),
|
||||
@ -107,7 +108,7 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
|
||||
_ => yield Err(ShellError::labeled_error(
|
||||
"Expected a table with TOML-compatible structure from pipeline",
|
||||
"requires TOML-compatible input",
|
||||
name_tag))
|
||||
&name_tag))
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -32,7 +32,9 @@ impl WholeStreamCommand for ToTSV {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn value_to_tsv_value(v: &Value) -> Value {
|
||||
pub fn value_to_tsv_value(tagged_value: &Tagged<Value>) -> Tagged<Value> {
|
||||
let v = &tagged_value.item;
|
||||
|
||||
match v {
|
||||
Value::Primitive(Primitive::String(s)) => Value::Primitive(Primitive::String(s.clone())),
|
||||
Value::Primitive(Primitive::Nothing) => Value::Primitive(Primitive::Nothing),
|
||||
@ -47,20 +49,28 @@ pub fn value_to_tsv_value(v: &Value) -> Value {
|
||||
Value::Block(_) => Value::Primitive(Primitive::Nothing),
|
||||
_ => Value::Primitive(Primitive::Nothing),
|
||||
}
|
||||
.tagged(&tagged_value.tag)
|
||||
}
|
||||
|
||||
fn to_string_helper(v: &Value) -> Result<String, ShellError> {
|
||||
fn to_string_helper(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
|
||||
let v = &tagged_value.item;
|
||||
match v {
|
||||
Value::Primitive(Primitive::Date(d)) => Ok(d.to_string()),
|
||||
Value::Primitive(Primitive::Bytes(b)) => Ok(format!("{}", b)),
|
||||
Value::Primitive(Primitive::Boolean(_)) => Ok(v.as_string()?),
|
||||
Value::Primitive(Primitive::Decimal(_)) => Ok(v.as_string()?),
|
||||
Value::Primitive(Primitive::Int(_)) => Ok(v.as_string()?),
|
||||
Value::Primitive(Primitive::Path(_)) => Ok(v.as_string()?),
|
||||
Value::Primitive(Primitive::Boolean(_)) => Ok(tagged_value.as_string()?),
|
||||
Value::Primitive(Primitive::Decimal(_)) => Ok(tagged_value.as_string()?),
|
||||
Value::Primitive(Primitive::Int(_)) => Ok(tagged_value.as_string()?),
|
||||
Value::Primitive(Primitive::Path(_)) => Ok(tagged_value.as_string()?),
|
||||
Value::Table(_) => return Ok(String::from("[table]")),
|
||||
Value::Row(_) => return Ok(String::from("[row]")),
|
||||
Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()),
|
||||
_ => return Err(ShellError::string("Unexpected value")),
|
||||
_ => {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Unexpected value",
|
||||
"original value",
|
||||
&tagged_value.tag,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -76,7 +86,9 @@ fn merge_descriptors(values: &[Tagged<Value>]) -> Vec<String> {
|
||||
ret
|
||||
}
|
||||
|
||||
pub fn to_string(v: &Value) -> Result<String, ShellError> {
|
||||
pub fn to_string(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
|
||||
let v = &tagged_value.item;
|
||||
|
||||
match v {
|
||||
Value::Row(o) => {
|
||||
let mut wtr = WriterBuilder::new().delimiter(b'\t').from_writer(vec![]);
|
||||
@ -91,11 +103,20 @@ pub fn to_string(v: &Value) -> Result<String, ShellError> {
|
||||
wtr.write_record(fields).expect("can not write.");
|
||||
wtr.write_record(values).expect("can not write.");
|
||||
|
||||
return Ok(String::from_utf8(
|
||||
wtr.into_inner()
|
||||
.map_err(|_| ShellError::string("Could not convert record"))?,
|
||||
)
|
||||
.map_err(|_| ShellError::string("Could not convert record"))?);
|
||||
return Ok(String::from_utf8(wtr.into_inner().map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
&tagged_value.tag,
|
||||
)
|
||||
})?)
|
||||
.map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
&tagged_value.tag,
|
||||
)
|
||||
})?);
|
||||
}
|
||||
Value::Table(list) => {
|
||||
let mut wtr = WriterBuilder::new().delimiter(b'\t').from_writer(vec![]);
|
||||
@ -119,13 +140,22 @@ pub fn to_string(v: &Value) -> Result<String, ShellError> {
|
||||
wtr.write_record(&row).expect("can not write");
|
||||
}
|
||||
|
||||
return Ok(String::from_utf8(
|
||||
wtr.into_inner()
|
||||
.map_err(|_| ShellError::string("Could not convert record"))?,
|
||||
)
|
||||
.map_err(|_| ShellError::string("Could not convert record"))?);
|
||||
return Ok(String::from_utf8(wtr.into_inner().map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
&tagged_value.tag,
|
||||
)
|
||||
})?)
|
||||
.map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
&tagged_value.tag,
|
||||
)
|
||||
})?);
|
||||
}
|
||||
_ => return to_string_helper(&v),
|
||||
_ => return to_string_helper(tagged_value),
|
||||
}
|
||||
}
|
||||
|
||||
@ -134,11 +164,11 @@ fn to_tsv(
|
||||
RunnableContext { input, name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let name_tag = name;
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
let input: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
|
||||
let to_process_input = if input.len() > 1 {
|
||||
let tag = input[0].tag;
|
||||
let tag = input[0].tag.clone();
|
||||
vec![Tagged { item: Value::Table(input), tag } ]
|
||||
} else if input.len() == 1 {
|
||||
input
|
||||
@ -147,20 +177,20 @@ fn to_tsv(
|
||||
};
|
||||
|
||||
for value in to_process_input {
|
||||
match to_string(&value_to_tsv_value(&value.item)) {
|
||||
match to_string(&value_to_tsv_value(&value)) {
|
||||
Ok(x) => {
|
||||
let converted = if headerless {
|
||||
x.lines().skip(1).collect()
|
||||
} else {
|
||||
x
|
||||
};
|
||||
yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(name_tag))
|
||||
yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(&name_tag))
|
||||
}
|
||||
_ => {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a table with TSV-compatible structure.tag() from pipeline",
|
||||
"requires TSV-compatible input",
|
||||
name_tag,
|
||||
&name_tag,
|
||||
"originates from here".to_string(),
|
||||
value.tag(),
|
||||
))
|
||||
|
@ -31,7 +31,7 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
|
||||
let tag = args.name_tag();
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
let input: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
|
||||
for value in input {
|
||||
@ -47,7 +47,7 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected table with string values",
|
||||
"requires table with strings",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
v.tag,
|
||||
))
|
||||
@ -57,13 +57,13 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
|
||||
|
||||
match serde_urlencoded::to_string(row_vec) {
|
||||
Ok(s) => {
|
||||
yield ReturnSuccess::value(Value::string(s).tagged(tag));
|
||||
yield ReturnSuccess::value(Value::string(s).tagged(&tag));
|
||||
}
|
||||
_ => {
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Failed to convert to url-encoded",
|
||||
"cannot url-encode",
|
||||
tag,
|
||||
&tag,
|
||||
))
|
||||
}
|
||||
}
|
||||
@ -72,7 +72,7 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a table from pipeline",
|
||||
"requires table input",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
))
|
||||
|
@ -39,7 +39,7 @@ pub fn value_to_yaml_value(v: &Tagged<Value>) -> Result<serde_yaml::Value, Shell
|
||||
serde_yaml::Value::Number(serde_yaml::Number::from(f.to_f64().unwrap()))
|
||||
}
|
||||
Value::Primitive(Primitive::Int(i)) => serde_yaml::Value::Number(serde_yaml::Number::from(
|
||||
CoerceInto::<i64>::coerce_into(i.tagged(v.tag), "converting to YAML number")?,
|
||||
CoerceInto::<i64>::coerce_into(i.tagged(&v.tag), "converting to YAML number")?,
|
||||
)),
|
||||
Value::Primitive(Primitive::Nothing) => serde_yaml::Value::Null,
|
||||
Value::Primitive(Primitive::Pattern(s)) => serde_yaml::Value::String(s.clone()),
|
||||
@ -55,6 +55,7 @@ pub fn value_to_yaml_value(v: &Tagged<Value>) -> Result<serde_yaml::Value, Shell
|
||||
|
||||
serde_yaml::Value::Sequence(out)
|
||||
}
|
||||
Value::Error(e) => return Err(e.clone()),
|
||||
Value::Block(_) => serde_yaml::Value::Null,
|
||||
Value::Primitive(Primitive::Binary(b)) => serde_yaml::Value::Sequence(
|
||||
b.iter()
|
||||
@ -77,11 +78,11 @@ pub fn value_to_yaml_value(v: &Tagged<Value>) -> Result<serde_yaml::Value, Shell
|
||||
fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let name_tag = args.name_tag();
|
||||
let stream = async_stream_block! {
|
||||
let stream = async_stream! {
|
||||
let input: Vec<Tagged<Value>> = args.input.values.collect().await;
|
||||
|
||||
let to_process_input = if input.len() > 1 {
|
||||
let tag = input[0].tag;
|
||||
let tag = input[0].tag.clone();
|
||||
vec![Tagged { item: Value::Table(input), tag } ]
|
||||
} else if input.len() == 1 {
|
||||
input
|
||||
@ -94,12 +95,12 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
|
||||
Ok(yaml_value) => {
|
||||
match serde_yaml::to_string(&yaml_value) {
|
||||
Ok(x) => yield ReturnSuccess::value(
|
||||
Value::Primitive(Primitive::String(x)).tagged(name_tag),
|
||||
Value::Primitive(Primitive::String(x)).tagged(&name_tag),
|
||||
),
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a table with YAML-compatible structure.tag() from pipeline",
|
||||
"requires YAML-compatible input",
|
||||
name_tag,
|
||||
&name_tag,
|
||||
"originates from here".to_string(),
|
||||
value.tag(),
|
||||
)),
|
||||
@ -108,7 +109,7 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
|
||||
_ => yield Err(ShellError::labeled_error(
|
||||
"Expected a table with YAML-compatible structure from pipeline",
|
||||
"requires YAML-compatible input",
|
||||
name_tag))
|
||||
&name_tag))
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -31,14 +31,14 @@ impl WholeStreamCommand for Version {
|
||||
|
||||
pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let tag = args.call_info.name_tag;
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
|
||||
let mut indexmap = IndexMap::new();
|
||||
indexmap.insert(
|
||||
"version".to_string(),
|
||||
Value::string(clap::crate_version!()).tagged(tag),
|
||||
Value::string(clap::crate_version!()).tagged(&tag),
|
||||
);
|
||||
|
||||
let value = Value::Row(Dictionary::from(indexmap)).tagged(tag);
|
||||
let value = Value::Row(Dictionary::from(indexmap)).tagged(&tag);
|
||||
Ok(OutputStream::one(value))
|
||||
}
|
||||
|
@ -49,7 +49,7 @@ impl PerItemCommand for Where {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Expected a condition",
|
||||
"where needs a condition",
|
||||
*tag,
|
||||
tag,
|
||||
))
|
||||
}
|
||||
};
|
||||
|
@ -33,7 +33,7 @@ pub fn which(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
|
||||
let args = args.evaluate_once(registry)?;
|
||||
|
||||
let mut which_out = VecDeque::new();
|
||||
let tag = args.call_info.name_tag;
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
|
||||
if let Some(v) = &args.call_info.args.positional {
|
||||
if v.len() > 0 {
|
||||
@ -52,7 +52,7 @@ pub fn which(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
|
||||
return Err(ShellError::labeled_error(
|
||||
"Expected a filename to find",
|
||||
"needs a filename",
|
||||
*tag,
|
||||
tag,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
@ -1,39 +1,20 @@
|
||||
use crate::commands::{Command, UnevaluatedCallInfo};
|
||||
use crate::parser::hir;
|
||||
use crate::parser::{hir, hir::syntax_shape::ExpandContext};
|
||||
use crate::prelude::*;
|
||||
|
||||
use derive_new::new;
|
||||
use indexmap::IndexMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::error::Error;
|
||||
use std::sync::Arc;
|
||||
use uuid::Uuid;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub enum SpanSource {
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub enum AnchorLocation {
|
||||
Url(String),
|
||||
File(String),
|
||||
Source(Text),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct SourceMap(HashMap<Uuid, SpanSource>);
|
||||
|
||||
impl SourceMap {
|
||||
pub fn insert(&mut self, uuid: Uuid, span_source: SpanSource) {
|
||||
self.0.insert(uuid, span_source);
|
||||
}
|
||||
|
||||
pub fn get(&self, uuid: &Uuid) -> Option<&SpanSource> {
|
||||
self.0.get(uuid)
|
||||
}
|
||||
|
||||
pub fn new() -> SourceMap {
|
||||
SourceMap(HashMap::new())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, new)]
|
||||
pub struct CommandRegistry {
|
||||
#[new(value = "Arc::new(Mutex::new(IndexMap::default()))")]
|
||||
@ -53,13 +34,17 @@ impl CommandRegistry {
|
||||
registry.get(name).map(|c| c.clone())
|
||||
}
|
||||
|
||||
pub(crate) fn expect_command(&self, name: &str) -> Arc<Command> {
|
||||
self.get_command(name).unwrap()
|
||||
}
|
||||
|
||||
pub(crate) fn has(&self, name: &str) -> bool {
|
||||
let registry = self.registry.lock().unwrap();
|
||||
|
||||
registry.contains_key(name)
|
||||
}
|
||||
|
||||
fn insert(&mut self, name: impl Into<String>, command: Arc<Command>) {
|
||||
pub(crate) fn insert(&mut self, name: impl Into<String>, command: Arc<Command>) {
|
||||
let mut registry = self.registry.lock().unwrap();
|
||||
registry.insert(name.into(), command);
|
||||
}
|
||||
@ -73,8 +58,8 @@ impl CommandRegistry {
|
||||
#[derive(Clone)]
|
||||
pub struct Context {
|
||||
registry: CommandRegistry,
|
||||
pub(crate) source_map: SourceMap,
|
||||
host: Arc<Mutex<dyn Host + Send>>,
|
||||
pub ctrl_c: Arc<AtomicBool>,
|
||||
pub(crate) shell_manager: ShellManager,
|
||||
}
|
||||
|
||||
@ -83,12 +68,20 @@ impl Context {
|
||||
&self.registry
|
||||
}
|
||||
|
||||
pub(crate) fn expand_context<'context>(
|
||||
&'context self,
|
||||
source: &'context Text,
|
||||
span: Span,
|
||||
) -> ExpandContext<'context> {
|
||||
ExpandContext::new(&self.registry, span, source, self.shell_manager.homedir())
|
||||
}
|
||||
|
||||
pub(crate) fn basic() -> Result<Context, Box<dyn Error>> {
|
||||
let registry = CommandRegistry::new();
|
||||
Ok(Context {
|
||||
registry: registry.clone(),
|
||||
source_map: SourceMap::new(),
|
||||
host: Arc::new(Mutex::new(crate::env::host::BasicHost)),
|
||||
ctrl_c: Arc::new(AtomicBool::new(false)),
|
||||
shell_manager: ShellManager::basic(registry)?,
|
||||
})
|
||||
}
|
||||
@ -105,43 +98,31 @@ impl Context {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_span_source(&mut self, uuid: Uuid, span_source: SpanSource) {
|
||||
self.source_map.insert(uuid, span_source);
|
||||
pub(crate) fn get_command(&self, name: &str) -> Option<Arc<Command>> {
|
||||
self.registry.get_command(name)
|
||||
}
|
||||
|
||||
pub(crate) fn has_command(&self, name: &str) -> bool {
|
||||
self.registry.has(name)
|
||||
}
|
||||
|
||||
pub(crate) fn get_command(&self, name: &str) -> Arc<Command> {
|
||||
self.registry.get_command(name).unwrap()
|
||||
pub(crate) fn expect_command(&self, name: &str) -> Arc<Command> {
|
||||
self.registry.expect_command(name)
|
||||
}
|
||||
|
||||
pub(crate) fn run_command<'a>(
|
||||
&mut self,
|
||||
command: Arc<Command>,
|
||||
name_tag: Tag,
|
||||
source_map: SourceMap,
|
||||
args: hir::Call,
|
||||
source: &Text,
|
||||
input: InputStream,
|
||||
is_first_command: bool,
|
||||
) -> OutputStream {
|
||||
let command_args = self.command_args(args, input, source, source_map, name_tag);
|
||||
let command_args = self.command_args(args, input, source, name_tag);
|
||||
command.run(command_args, self.registry(), is_first_command)
|
||||
}
|
||||
|
||||
fn call_info(
|
||||
&self,
|
||||
args: hir::Call,
|
||||
source: &Text,
|
||||
source_map: SourceMap,
|
||||
name_tag: Tag,
|
||||
) -> UnevaluatedCallInfo {
|
||||
fn call_info(&self, args: hir::Call, source: &Text, name_tag: Tag) -> UnevaluatedCallInfo {
|
||||
UnevaluatedCallInfo {
|
||||
args,
|
||||
source: source.clone(),
|
||||
source_map,
|
||||
name_tag,
|
||||
}
|
||||
}
|
||||
@ -151,13 +132,13 @@ impl Context {
|
||||
args: hir::Call,
|
||||
input: InputStream,
|
||||
source: &Text,
|
||||
source_map: SourceMap,
|
||||
name_tag: Tag,
|
||||
) -> CommandArgs {
|
||||
CommandArgs {
|
||||
host: self.host.clone(),
|
||||
ctrl_c: self.ctrl_c.clone(),
|
||||
shell_manager: self.shell_manager.clone(),
|
||||
call_info: self.call_info(args, source, source_map, name_tag),
|
||||
call_info: self.call_info(args, source, name_tag),
|
||||
input,
|
||||
}
|
||||
}
|
||||
|
222
src/data/base.rs
222
src/data/base.rs
@ -8,6 +8,7 @@ use crate::Text;
|
||||
use chrono::{DateTime, Utc};
|
||||
use chrono_humanize::Humanize;
|
||||
use derive_new::new;
|
||||
use log::trace;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt;
|
||||
use std::path::PathBuf;
|
||||
@ -212,11 +213,19 @@ impl Block {
|
||||
let scope = Scope::new(value.clone());
|
||||
|
||||
if self.expressions.len() == 0 {
|
||||
return Ok(Value::nothing().tagged(self.tag));
|
||||
return Ok(Value::nothing().tagged(&self.tag));
|
||||
}
|
||||
|
||||
let mut last = None;
|
||||
|
||||
trace!(
|
||||
"EXPRS = {:?}",
|
||||
self.expressions
|
||||
.iter()
|
||||
.map(|e| format!("{}", e))
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
|
||||
for expr in self.expressions.iter() {
|
||||
last = Some(evaluate_baseline_expr(
|
||||
&expr,
|
||||
@ -236,6 +245,9 @@ pub enum Value {
|
||||
Row(crate::data::Dictionary),
|
||||
Table(Vec<Tagged<Value>>),
|
||||
|
||||
// Errors are a type of value too
|
||||
Error(ShellError),
|
||||
|
||||
Block(Block),
|
||||
}
|
||||
|
||||
@ -284,14 +296,15 @@ impl fmt::Debug for ValueDebug<'_> {
|
||||
Value::Row(o) => o.debug(f),
|
||||
Value::Table(l) => debug_list(l).fmt(f),
|
||||
Value::Block(_) => write!(f, "[[block]]"),
|
||||
Value::Error(_) => write!(f, "[[error]]"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Tagged<Value> {
|
||||
pub(crate) fn tagged_type_name(&self) -> Tagged<String> {
|
||||
pub fn tagged_type_name(&self) -> Tagged<String> {
|
||||
let name = self.type_name();
|
||||
Tagged::from_item(name, self.tag())
|
||||
name.tagged(self.tag())
|
||||
}
|
||||
}
|
||||
|
||||
@ -303,7 +316,7 @@ impl std::convert::TryFrom<&Tagged<Value>> for Block {
|
||||
Value::Block(block) => Ok(block.clone()),
|
||||
v => Err(ShellError::type_error(
|
||||
"Block",
|
||||
value.copy_tag(v.type_name()),
|
||||
v.type_name().tagged(value.tag()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
@ -315,11 +328,11 @@ impl std::convert::TryFrom<&Tagged<Value>> for i64 {
|
||||
fn try_from(value: &Tagged<Value>) -> Result<i64, ShellError> {
|
||||
match value.item() {
|
||||
Value::Primitive(Primitive::Int(int)) => {
|
||||
int.tagged(value.tag).coerce_into("converting to i64")
|
||||
int.tagged(&value.tag).coerce_into("converting to i64")
|
||||
}
|
||||
v => Err(ShellError::type_error(
|
||||
"Integer",
|
||||
value.copy_tag(v.type_name()),
|
||||
v.type_name().tagged(value.tag()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
@ -333,7 +346,7 @@ impl std::convert::TryFrom<&Tagged<Value>> for String {
|
||||
Value::Primitive(Primitive::String(s)) => Ok(s.clone()),
|
||||
v => Err(ShellError::type_error(
|
||||
"String",
|
||||
value.copy_tag(v.type_name()),
|
||||
v.type_name().tagged(value.tag()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
@ -347,7 +360,7 @@ impl std::convert::TryFrom<&Tagged<Value>> for Vec<u8> {
|
||||
Value::Primitive(Primitive::Binary(b)) => Ok(b.clone()),
|
||||
v => Err(ShellError::type_error(
|
||||
"Binary",
|
||||
value.copy_tag(v.type_name()),
|
||||
v.type_name().tagged(value.tag()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
@ -361,7 +374,7 @@ impl<'a> std::convert::TryFrom<&'a Tagged<Value>> for &'a crate::data::Dictionar
|
||||
Value::Row(d) => Ok(d),
|
||||
v => Err(ShellError::type_error(
|
||||
"Dictionary",
|
||||
value.copy_tag(v.type_name()),
|
||||
v.type_name().tagged(value.tag()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
@ -383,7 +396,7 @@ impl std::convert::TryFrom<Option<&Tagged<Value>>> for Switch {
|
||||
Value::Primitive(Primitive::Boolean(true)) => Ok(Switch::Present),
|
||||
v => Err(ShellError::type_error(
|
||||
"Boolean",
|
||||
value.copy_tag(v.type_name()),
|
||||
v.type_name().tagged(value.tag()),
|
||||
)),
|
||||
},
|
||||
}
|
||||
@ -394,15 +407,54 @@ impl Tagged<Value> {
|
||||
pub(crate) fn debug(&self) -> ValueDebug<'_> {
|
||||
ValueDebug { value: self }
|
||||
}
|
||||
|
||||
pub fn as_column_path(&self) -> Result<Tagged<Vec<Tagged<String>>>, ShellError> {
|
||||
let mut out: Vec<Tagged<String>> = vec![];
|
||||
|
||||
match &self.item {
|
||||
Value::Table(table) => {
|
||||
for item in table {
|
||||
out.push(item.as_string()?.tagged(&item.tag));
|
||||
}
|
||||
}
|
||||
|
||||
other => {
|
||||
return Err(ShellError::type_error(
|
||||
"column name",
|
||||
other.type_name().tagged(&self.tag),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
Ok(out.tagged(&self.tag))
|
||||
}
|
||||
|
||||
pub(crate) fn as_string(&self) -> Result<String, ShellError> {
|
||||
match &self.item {
|
||||
Value::Primitive(Primitive::String(s)) => Ok(s.clone()),
|
||||
Value::Primitive(Primitive::Boolean(x)) => Ok(format!("{}", x)),
|
||||
Value::Primitive(Primitive::Decimal(x)) => Ok(format!("{}", x)),
|
||||
Value::Primitive(Primitive::Int(x)) => Ok(format!("{}", x)),
|
||||
Value::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)),
|
||||
Value::Primitive(Primitive::Path(x)) => Ok(format!("{}", x.display())),
|
||||
// TODO: this should definitely be more general with better errors
|
||||
other => Err(ShellError::labeled_error(
|
||||
"Expected string",
|
||||
other.type_name(),
|
||||
&self.tag,
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Value {
|
||||
pub(crate) fn type_name(&self) -> String {
|
||||
pub fn type_name(&self) -> String {
|
||||
match self {
|
||||
Value::Primitive(p) => p.type_name(),
|
||||
Value::Row(_) => format!("object"),
|
||||
Value::Row(_) => format!("row"),
|
||||
Value::Table(_) => format!("list"),
|
||||
Value::Block(_) => format!("block"),
|
||||
Value::Error(_) => format!("error"),
|
||||
}
|
||||
}
|
||||
|
||||
@ -418,6 +470,7 @@ impl Value {
|
||||
.collect(),
|
||||
Value::Block(_) => vec![],
|
||||
Value::Table(_) => vec![],
|
||||
Value::Error(_) => vec![],
|
||||
}
|
||||
}
|
||||
|
||||
@ -443,6 +496,22 @@ impl Value {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_data_by_column_path(
|
||||
&self,
|
||||
tag: Tag,
|
||||
path: &Vec<Tagged<String>>,
|
||||
) -> Option<Tagged<&Value>> {
|
||||
let mut current = self;
|
||||
for p in path {
|
||||
match current.get_data_by_key(p) {
|
||||
Some(v) => current = v,
|
||||
None => return None,
|
||||
}
|
||||
}
|
||||
|
||||
Some(current.tagged(tag))
|
||||
}
|
||||
|
||||
pub fn get_data_by_path(&self, tag: Tag, path: &str) -> Option<Tagged<&Value>> {
|
||||
let mut current = self;
|
||||
for p in path.split(".") {
|
||||
@ -452,7 +521,7 @@ impl Value {
|
||||
}
|
||||
}
|
||||
|
||||
Some(Tagged::from_item(current, tag))
|
||||
Some(current.tagged(tag))
|
||||
}
|
||||
|
||||
pub fn insert_data_at_path(
|
||||
@ -472,8 +541,8 @@ impl Value {
|
||||
// Special case for inserting at the top level
|
||||
current
|
||||
.entries
|
||||
.insert(path.to_string(), Tagged::from_item(new_value, tag));
|
||||
return Some(Tagged::from_item(new_obj, tag));
|
||||
.insert(path.to_string(), new_value.tagged(&tag));
|
||||
return Some(new_obj.tagged(&tag));
|
||||
}
|
||||
|
||||
for idx in 0..split_path.len() {
|
||||
@ -484,13 +553,64 @@ impl Value {
|
||||
Value::Row(o) => {
|
||||
o.entries.insert(
|
||||
split_path[idx + 1].to_string(),
|
||||
Tagged::from_item(new_value, tag),
|
||||
new_value.tagged(&tag),
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
return Some(Tagged::from_item(new_obj, tag));
|
||||
return Some(new_obj.tagged(&tag));
|
||||
} else {
|
||||
match next.item {
|
||||
Value::Row(ref mut o) => {
|
||||
current = o;
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn insert_data_at_column_path(
|
||||
&self,
|
||||
tag: Tag,
|
||||
split_path: &Vec<Tagged<String>>,
|
||||
new_value: Value,
|
||||
) -> Option<Tagged<Value>> {
|
||||
let mut new_obj = self.clone();
|
||||
|
||||
if let Value::Row(ref mut o) = new_obj {
|
||||
let mut current = o;
|
||||
|
||||
if split_path.len() == 1 {
|
||||
// Special case for inserting at the top level
|
||||
current
|
||||
.entries
|
||||
.insert(split_path[0].item.clone(), new_value.tagged(&tag));
|
||||
return Some(new_obj.tagged(&tag));
|
||||
}
|
||||
|
||||
for idx in 0..split_path.len() {
|
||||
match current.entries.get_mut(&split_path[idx].item) {
|
||||
Some(next) => {
|
||||
if idx == (split_path.len() - 2) {
|
||||
match &mut next.item {
|
||||
Value::Row(o) => {
|
||||
o.entries.insert(
|
||||
split_path[idx + 1].to_string(),
|
||||
new_value.tagged(&tag),
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
return Some(new_obj.tagged(&tag));
|
||||
} else {
|
||||
match next.item {
|
||||
Value::Row(ref mut o) => {
|
||||
@ -524,8 +644,41 @@ impl Value {
|
||||
match current.entries.get_mut(split_path[idx]) {
|
||||
Some(next) => {
|
||||
if idx == (split_path.len() - 1) {
|
||||
*next = Tagged::from_item(replaced_value, tag);
|
||||
return Some(Tagged::from_item(new_obj, tag));
|
||||
*next = replaced_value.tagged(&tag);
|
||||
return Some(new_obj.tagged(&tag));
|
||||
} else {
|
||||
match next.item {
|
||||
Value::Row(ref mut o) => {
|
||||
current = o;
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn replace_data_at_column_path(
|
||||
&self,
|
||||
tag: Tag,
|
||||
split_path: &Vec<Tagged<String>>,
|
||||
replaced_value: Value,
|
||||
) -> Option<Tagged<Value>> {
|
||||
let mut new_obj = self.clone();
|
||||
|
||||
if let Value::Row(ref mut o) = new_obj {
|
||||
let mut current = o;
|
||||
for idx in 0..split_path.len() {
|
||||
match current.entries.get_mut(&split_path[idx].item) {
|
||||
Some(next) => {
|
||||
if idx == (split_path.len() - 1) {
|
||||
*next = replaced_value.tagged(&tag);
|
||||
return Some(new_obj.tagged(&tag));
|
||||
} else {
|
||||
match next.item {
|
||||
Value::Row(ref mut o) => {
|
||||
@ -549,6 +702,7 @@ impl Value {
|
||||
Value::Row(o) => o.get_data(desc),
|
||||
Value::Block(_) => MaybeOwned::Owned(Value::nothing()),
|
||||
Value::Table(_) => MaybeOwned::Owned(Value::nothing()),
|
||||
Value::Error(_) => MaybeOwned::Owned(Value::nothing()),
|
||||
}
|
||||
}
|
||||
|
||||
@ -558,7 +712,7 @@ impl Value {
|
||||
Value::Block(b) => itertools::join(
|
||||
b.expressions
|
||||
.iter()
|
||||
.map(|e| e.source(&b.source).to_string()),
|
||||
.map(|e| e.span.slice(&b.source).to_string()),
|
||||
"; ",
|
||||
),
|
||||
Value::Row(_) => format!("[table: 1 row]"),
|
||||
@ -567,6 +721,7 @@ impl Value {
|
||||
l.len(),
|
||||
if l.len() == 1 { "row" } else { "rows" }
|
||||
),
|
||||
Value::Error(_) => format!("[error]"),
|
||||
}
|
||||
}
|
||||
|
||||
@ -607,22 +762,6 @@ impl Value {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn as_string(&self) -> Result<String, ShellError> {
|
||||
match self {
|
||||
Value::Primitive(Primitive::String(s)) => Ok(s.clone()),
|
||||
Value::Primitive(Primitive::Boolean(x)) => Ok(format!("{}", x)),
|
||||
Value::Primitive(Primitive::Decimal(x)) => Ok(format!("{}", x)),
|
||||
Value::Primitive(Primitive::Int(x)) => Ok(format!("{}", x)),
|
||||
Value::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)),
|
||||
Value::Primitive(Primitive::Path(x)) => Ok(format!("{}", x.display())),
|
||||
// TODO: this should definitely be more general with better errors
|
||||
other => Err(ShellError::string(format!(
|
||||
"Expected string, got {:?}",
|
||||
other
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn is_true(&self) -> bool {
|
||||
match self {
|
||||
Value::Primitive(Primitive::Boolean(true)) => true,
|
||||
@ -675,9 +814,14 @@ impl Value {
|
||||
Value::Primitive(Primitive::Date(s.into()))
|
||||
}
|
||||
|
||||
pub fn date_from_str(s: &str) -> Result<Value, ShellError> {
|
||||
let date = DateTime::parse_from_rfc3339(s)
|
||||
.map_err(|err| ShellError::string(&format!("Date parse error: {}", err)))?;
|
||||
pub fn date_from_str(s: Tagged<&str>) -> Result<Value, ShellError> {
|
||||
let date = DateTime::parse_from_rfc3339(s.item).map_err(|err| {
|
||||
ShellError::labeled_error(
|
||||
&format!("Date parse error: {}", err),
|
||||
"original value",
|
||||
s.tag,
|
||||
)
|
||||
})?;
|
||||
|
||||
let date = date.with_timezone(&chrono::offset::Utc);
|
||||
|
||||
|
@ -7,7 +7,7 @@ use std::ops::Deref;
|
||||
pub(crate) fn command_dict(command: Arc<Command>, tag: impl Into<Tag>) -> Tagged<Value> {
|
||||
let tag = tag.into();
|
||||
|
||||
let mut cmd_dict = TaggedDictBuilder::new(tag);
|
||||
let mut cmd_dict = TaggedDictBuilder::new(&tag);
|
||||
|
||||
cmd_dict.insert("name", Value::string(command.name()));
|
||||
|
||||
@ -42,7 +42,7 @@ fn for_spec(name: &str, ty: &str, required: bool, tag: impl Into<Tag>) -> Tagged
|
||||
|
||||
fn signature_dict(signature: Signature, tag: impl Into<Tag>) -> Tagged<Value> {
|
||||
let tag = tag.into();
|
||||
let mut sig = TaggedListBuilder::new(tag);
|
||||
let mut sig = TaggedListBuilder::new(&tag);
|
||||
|
||||
for arg in signature.positional.iter() {
|
||||
let is_required = match arg {
|
||||
@ -50,19 +50,19 @@ fn signature_dict(signature: Signature, tag: impl Into<Tag>) -> Tagged<Value> {
|
||||
PositionalType::Optional(_, _) => false,
|
||||
};
|
||||
|
||||
sig.insert_tagged(for_spec(arg.name(), "argument", is_required, tag));
|
||||
sig.insert_tagged(for_spec(arg.name(), "argument", is_required, &tag));
|
||||
}
|
||||
|
||||
if let Some(_) = signature.rest_positional {
|
||||
let is_required = false;
|
||||
sig.insert_tagged(for_spec("rest", "argument", is_required, tag));
|
||||
sig.insert_tagged(for_spec("rest", "argument", is_required, &tag));
|
||||
}
|
||||
|
||||
for (name, ty) in signature.named.iter() {
|
||||
match ty {
|
||||
NamedType::Mandatory(_) => sig.insert_tagged(for_spec(name, "flag", true, tag)),
|
||||
NamedType::Optional(_) => sig.insert_tagged(for_spec(name, "flag", false, tag)),
|
||||
NamedType::Switch => sig.insert_tagged(for_spec(name, "switch", false, tag)),
|
||||
NamedType::Mandatory(_) => sig.insert_tagged(for_spec(name, "flag", true, &tag)),
|
||||
NamedType::Optional(_) => sig.insert_tagged(for_spec(name, "flag", false, &tag)),
|
||||
NamedType::Switch => sig.insert_tagged(for_spec(name, "switch", false, &tag)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -51,8 +51,9 @@ pub fn user_data() -> Result<PathBuf, ShellError> {
|
||||
}
|
||||
|
||||
pub fn app_path(app_data_type: AppDataType, display: &str) -> Result<PathBuf, ShellError> {
|
||||
let path = app_root(app_data_type, &APP_INFO)
|
||||
.map_err(|err| ShellError::string(&format!("Couldn't open {} path:\n{}", display, err)))?;
|
||||
let path = app_root(app_data_type, &APP_INFO).map_err(|err| {
|
||||
ShellError::untagged_runtime_error(&format!("Couldn't open {} path:\n{}", display, err))
|
||||
})?;
|
||||
|
||||
Ok(path)
|
||||
}
|
||||
@ -74,11 +75,22 @@ pub fn read(
|
||||
|
||||
let tag = tag.into();
|
||||
let contents = fs::read_to_string(filename)
|
||||
.map(|v| v.tagged(tag))
|
||||
.map_err(|err| ShellError::string(&format!("Couldn't read config file:\n{}", err)))?;
|
||||
.map(|v| v.tagged(&tag))
|
||||
.map_err(|err| {
|
||||
ShellError::labeled_error(
|
||||
&format!("Couldn't read config file:\n{}", err),
|
||||
"file name",
|
||||
&tag,
|
||||
)
|
||||
})?;
|
||||
|
||||
let parsed: toml::Value = toml::from_str(&contents)
|
||||
.map_err(|err| ShellError::string(&format!("Couldn't parse config file:\n{}", err)))?;
|
||||
let parsed: toml::Value = toml::from_str(&contents).map_err(|err| {
|
||||
ShellError::labeled_error(
|
||||
&format!("Couldn't parse config file:\n{}", err),
|
||||
"file name",
|
||||
&tag,
|
||||
)
|
||||
})?;
|
||||
|
||||
let value = convert_toml_value_to_nu_value(&parsed, tag);
|
||||
let tag = value.tag();
|
||||
@ -86,7 +98,7 @@ pub fn read(
|
||||
Value::Row(Dictionary { entries }) => Ok(entries),
|
||||
other => Err(ShellError::type_error(
|
||||
"Dictionary",
|
||||
other.type_name().tagged(tag),
|
||||
other.type_name().tagged(&tag),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user