mirror of
https://github.com/nushell/nushell.git
synced 2025-07-02 15:40:38 +02:00
Compare commits
562 Commits
Author | SHA1 | Date | |
---|---|---|---|
a6f62e05ae | |||
f8939de14f | |||
01ade02ac1 | |||
39d93b536a | |||
32f67557af | |||
f05eed8e8d | |||
f0a265dbee | |||
bc7736bc99 | |||
19d732f313 | |||
a9a82de5c4 | |||
9074015d1c | |||
2b77544e58 | |||
5ee74b6ab5 | |||
3a04bd9154 | |||
2c176a7f14 | |||
026e18399e | |||
bbf0b45c59 | |||
5bd7300cd5 | |||
ffb5051f6c | |||
ce4ea16c08 | |||
48c94c75fc | |||
73d3708006 | |||
bbea7da669 | |||
7f39609d9a | |||
a14e9e0a2e | |||
3e14dc3eb8 | |||
ba6d8ad261 | |||
2a08865851 | |||
0a3bfe7f73 | |||
451a9c64d3 | |||
88d79c84cd | |||
abcb0877e2 | |||
9e1e2a4320 | |||
d53b0a99d0 | |||
1fb4f9e455 | |||
6e9b6f22c9 | |||
e90b099622 | |||
84c10de864 | |||
d618b60d9e | |||
c761f7f844 | |||
7b89fab327 | |||
eddff46155 | |||
baa50ec9b2 | |||
513186c390 | |||
0c37463bfa | |||
94fc33bbee | |||
ce378a68a6 | |||
fa40740e77 | |||
762fdb98ac | |||
5f795b1aec | |||
6811700b90 | |||
248aca7a44 | |||
17abbdf6e0 | |||
40eca52ed5 | |||
7907dda8f7 | |||
8501024546 | |||
21d30d1e4d | |||
eeaa65c8af | |||
6754b8534e | |||
2ffff959fc | |||
fed4233db4 | |||
2f47263380 | |||
4d5386635e | |||
872eb2c3df | |||
e62a77a885 | |||
9bca63ebef | |||
ae54dc862c | |||
5e951b2be9 | |||
f78d57a703 | |||
f021be623e | |||
b6189879e3 | |||
c7c6445b03 | |||
535aec0648 | |||
664dd291eb | |||
a9216deaa4 | |||
99caad7d60 | |||
7486850357 | |||
bb06661d24 | |||
6a374182a7 | |||
f433b3102f | |||
456e2a8ee3 | |||
1ee3bf784c | |||
54394fe9af | |||
7a728340de | |||
5f1e8a6af8 | |||
e566a073dc | |||
9a4dad6ca1 | |||
eca9f461da | |||
08aaa9494c | |||
352f913c39 | |||
aeeb5dd405 | |||
278bf7ffa9 | |||
b15c824932 | |||
5ad3bfa31b | |||
e5145358eb | |||
3a20fbfe94 | |||
dac32557cd | |||
fedd879b2e | |||
a46c21cffb | |||
6cdfee3573 | |||
af79eb2943 | |||
e9d4730099 | |||
844f541719 | |||
d28f728787 | |||
f35808cb89 | |||
7d6b23ee2f | |||
e68ae4c8d1 | |||
faad6ca355 | |||
c77c1bd297 | |||
5b4b4446b7 | |||
93f20b406e | |||
02318cf3a7 | |||
35fc387505 | |||
fd4ba0443d | |||
b943cbedff | |||
3fd1a26ec0 | |||
3f2c76df28 | |||
7d3312e96e | |||
c59d9dc306 | |||
1f06f8405c | |||
38f454d5ab | |||
487f1a97ea | |||
0f05475e2e | |||
cc805f3f01 | |||
5ac5b90aed | |||
3d73287ea4 | |||
7ebdced256 | |||
ad12018199 | |||
27dcc3ecc3 | |||
e25a795cf6 | |||
16c15e83a3 | |||
cea67cb30b | |||
1e3e034021 | |||
8da27a1a09 | |||
030e749fe7 | |||
a785e64bc9 | |||
d4eeef4bd1 | |||
c8a07d477f | |||
af82eeca72 | |||
3d698b74d8 | |||
d2abb8603a | |||
894e0f7658 | |||
a5087c4966 | |||
ac4ab452d4 | |||
5378727049 | |||
0786ddddbd | |||
7617084ca3 | |||
28941f1a06 | |||
43ceb3edec | |||
bffd8e4dd2 | |||
66023f6243 | |||
10fc32e3ef | |||
3148acd3a4 | |||
e6ce8a89be | |||
74f8081290 | |||
2ae1de2470 | |||
028a327ce8 | |||
318862aad6 | |||
9f4510f2e1 | |||
98c7ab96b6 | |||
13114e972b | |||
4a1b3e26ef | |||
1e3248dfe8 | |||
2aa4cd5cc5 | |||
fb908df17d | |||
cdf09abcc0 | |||
fe2c498a81 | |||
fe7122280d | |||
2e0fb7c1a6 | |||
f33b60c001 | |||
7e48607820 | |||
68a821c84a | |||
c5e59efa4d | |||
ec5b9b9f37 | |||
e88a51e930 | |||
35f8d8548a | |||
7a123d3eb1 | |||
3ed45c7ba8 | |||
8b160f9850 | |||
696b2cda4a | |||
435348aa61 | |||
0a5f41abc2 | |||
2b97bc54c5 | |||
ad49c17eba | |||
f1e88d95c1 | |||
6eac9bfd0f | |||
5d94b16d71 | |||
3bd46fe27a | |||
7b1c7debcb | |||
e77a0a48aa | |||
aa37572ddc | |||
a0cecf7658 | |||
23170ff368 | |||
f9ffd9ae29 | |||
d5fa7b8a55 | |||
f94df58486 | |||
6f5bd62a97 | |||
bb3cc9e153 | |||
202dfdaee2 | |||
0674d4960b | |||
85c2035016 | |||
02be83efbf | |||
b964347895 | |||
56ed1eb807 | |||
570175f95d | |||
839010b89d | |||
7694d09d14 | |||
4accc67843 | |||
c070e2d6f7 | |||
d302d63030 | |||
a2e117f8b0 | |||
b1974fae39 | |||
7248de1167 | |||
a9582e1c62 | |||
bb6335830a | |||
c8f3799c20 | |||
bd3a61a2f7 | |||
077643cadf | |||
fa2d9a8a58 | |||
58f98a4260 | |||
066790552a | |||
dcb1a1996c | |||
6b4d06d8a7 | |||
f615038938 | |||
71b74a284a | |||
2b431f994f | |||
7e096e61d7 | |||
9d7a1097f2 | |||
a98b3124c5 | |||
572698bf3e | |||
7162289d77 | |||
14bf25da14 | |||
a455e2e5eb | |||
840b4b854b | |||
ec4941c8ac | |||
dd86f14a5a | |||
63103580d2 | |||
d25df9c00b | |||
778a00efa1 | |||
fea822792f | |||
f6033ac5af | |||
583ef8674e | |||
94bec72079 | |||
28ed21864d | |||
e16ce1df36 | |||
87abfee268 | |||
ba0f069c31 | |||
154856066f | |||
f91713b714 | |||
f1bf485b2a | |||
955de76116 | |||
bf5bd3ff10 | |||
6ac3351fd1 | |||
b6dafa6e67 | |||
152a541696 | |||
ff8c3aa356 | |||
99ed8e42a3 | |||
6a7a23e3fa | |||
9448225690 | |||
28b99bfaf7 | |||
8403fff345 | |||
aa08e81370 | |||
831d1da256 | |||
3481c7e242 | |||
78e29af423 | |||
3ef5e90b64 | |||
6aa30132aa | |||
5d2ef0faf1 | |||
b2e191f836 | |||
d9230a76f3 | |||
f8d325dbfe | |||
88a890c11f | |||
5a28371b18 | |||
a33b5fe6ce | |||
2080719162 | |||
9db0d6bd34 | |||
d7ebe5fdc3 | |||
4d0b6d8821 | |||
b9acb61d9d | |||
2dcd1c5dbe | |||
2aeb77bd3e | |||
17f8ad7210 | |||
4dbdb1fe54 | |||
79359598db | |||
75180d07de | |||
cdc4fb1011 | |||
7e1b922ea7 | |||
0b1e368cea | |||
3b9a0ac7c6 | |||
a1f989caf9 | |||
c01f2ee0e9 | |||
693cb5c142 | |||
d104efdf68 | |||
bd9d865912 | |||
c62cbcd5f8 | |||
ba4723cc9f | |||
488002f4bc | |||
5115260366 | |||
2d557bce5d | |||
17caa31325 | |||
1ba2269aa9 | |||
36030cab8a | |||
345c00aa1e | |||
cc202e2199 | |||
a5a79a7d95 | |||
656f707a0b | |||
48271d8c3e | |||
60bb984e6e | |||
eeb3b38fba | |||
79d9a0542f | |||
5bfec20244 | |||
57d96c09fa | |||
b693db16cc | |||
3ffafa7196 | |||
53ae03bd63 | |||
ba766de5d1 | |||
8c52b7a23a | |||
2fc9506bc7 | |||
4804e6a151 | |||
786ba3bf91 | |||
e66139e6bb | |||
30904bd095 | |||
99329f14a3 | |||
3c583c9a20 | |||
9a6a3a731e | |||
b2043135ed | |||
545697c0b2 | |||
556852ded4 | |||
ca794f6adb | |||
39b43d1e4b | |||
026335fff0 | |||
7c9edbd9ee | |||
341fa7b196 | |||
bd0032898f | |||
ad11e25fc5 | |||
942c66a9f3 | |||
e10d84b72f | |||
cf36f052c4 | |||
266c9ae9e2 | |||
a3702e1eb7 | |||
92354a817c | |||
79a9751a58 | |||
47979651f3 | |||
667502e8da | |||
fabc0a3f45 | |||
ad125abf6a | |||
8e38596bc9 | |||
440a0e960a | |||
85d47c299e | |||
628a47e6dc | |||
f38657e6f3 | |||
504eff73f0 | |||
fbc1408913 | |||
544c46e0e4 | |||
6c8adac0d9 | |||
687b0e16f7 | |||
881c3495c1 | |||
406b606398 | |||
168e7f7b16 | |||
53cd4df806 | |||
65a163357d | |||
1bdec1cbb6 | |||
7e39179b7f | |||
2bb0c1c618 | |||
9c7e37f728 | |||
227d1d9508 | |||
9998fa50a3 | |||
9d76bf97a3 | |||
9cd494cdfa | |||
5d9e2455f7 | |||
b70cce47e2 | |||
4acf21cdcf | |||
9ef1203ef9 | |||
7c80067900 | |||
49a1e22ba3 | |||
ebd89d8b21 | |||
9547c106d3 | |||
88b22a9248 | |||
4af24363c2 | |||
11132f7353 | |||
cd56b97587 | |||
e6b4e59c0f | |||
18fdc5a229 | |||
3699188586 | |||
88acc11501 | |||
d3919a311f | |||
b52e31fac2 | |||
b44926d597 | |||
3fd92b6437 | |||
d80abb20a4 | |||
9068093081 | |||
9d247387ea | |||
3ed44992e6 | |||
283589dc2f | |||
088e6dffbe | |||
ecdb023e2f | |||
63aba5feb7 | |||
08449e174c | |||
971f9ae0f0 | |||
33535c514e | |||
b74d508c0b | |||
14daa93a19 | |||
761946c522 | |||
0c888486c9 | |||
e16c1b7c88 | |||
7926e4ab6d | |||
d1449c4ee7 | |||
81abb17b38 | |||
cbb9f8efe4 | |||
a0dafcb0f8 | |||
78697bb8cf | |||
64319ad90f | |||
88fdf45a24 | |||
b96c4910b4 | |||
fbf3f7cf1c | |||
aaa21c1619 | |||
1be4eaeae3 | |||
2ec1364925 | |||
69bf43ef56 | |||
4d20f63612 | |||
d25fb3ad56 | |||
d78e3c3b0d | |||
de41c9eaf7 | |||
5754f307eb | |||
63e30899f7 | |||
379260b110 | |||
d2629293e8 | |||
f5c39e29b4 | |||
6af9fe5e10 | |||
fa957a1a07 | |||
1d5e7b441b | |||
c12b211075 | |||
6c730def4b | |||
fc1ffe487a | |||
7d301c76d3 | |||
f032e28657 | |||
bb30051006 | |||
67b1dcae44 | |||
d00a040da9 | |||
86f12ffe61 | |||
df5dcdab64 | |||
e09d482fb7 | |||
b072d75300 | |||
d371a78a0b | |||
90863439d1 | |||
a94b3212a3 | |||
33ad2a36bd | |||
b907bf355f | |||
b5d43f1e20 | |||
2fd4a36c0d | |||
46eebc644c | |||
b14bdd865f | |||
96ee57ae5c | |||
d4cd171d6b | |||
30ccabde5b | |||
604aadc938 | |||
2b181bf69c | |||
cdb7b9d9b2 | |||
f438ffc6e4 | |||
54f8e3442b | |||
26c489a0f5 | |||
58f255c0c7 | |||
30f4cc1fef | |||
8a52085ae2 | |||
f152858d83 | |||
042f1dff01 | |||
439fe973c4 | |||
85fbacb197 | |||
73921f4bd7 | |||
be53ecbbaa | |||
374df9d69f | |||
9c84c01aef | |||
e9508b578a | |||
0bdc362e13 | |||
a86f34d0ad | |||
9fcc49e556 | |||
1433f4a520 | |||
e605d8007c | |||
6e638ab381 | |||
5afd74f0b9 | |||
191cd2c970 | |||
ce71ea0b5c | |||
1edd3e7c3b | |||
bdb09a9a00 | |||
a5dd93d154 | |||
74ba00a222 | |||
15a15c123e | |||
20b697f722 | |||
6fa05c12ff | |||
bb3d88ac29 | |||
a1a3ef4608 | |||
d15859dd86 | |||
5c81bcef1a | |||
8cba59040f | |||
735864c384 | |||
63cb01e83b | |||
ef38805ef9 | |||
82e6873702 | |||
df15fc24fe | |||
7ca62b7b35 | |||
8f4c182a0c | |||
acd2fe8c51 | |||
5c57d6a74d | |||
2ac63910f6 | |||
55689ddb50 | |||
3aab69110e | |||
b9e65e35b8 | |||
356e05177c | |||
e48b94965b | |||
2f731fa1ae | |||
e6be167797 | |||
bfe7133e7c | |||
5f8d8b4a4b | |||
b67b6f7fc5 | |||
560c2e63ac | |||
7f758d3e51 | |||
cc04b9a916 | |||
3005fe10e5 | |||
34b3a49cae | |||
8144926dc7 | |||
5f92fd20e9 | |||
6481bf272c | |||
15406a4247 | |||
9e667cc879 | |||
3e55addbdd | |||
e5ff72120a | |||
ffde939df3 | |||
43108de547 | |||
fa113172da | |||
7ead89844a | |||
74724dee80 | |||
db4b26c1ac | |||
8e7405bf49 | |||
77aee7e543 | |||
60041879f3 | |||
a6e455efc3 | |||
8e538c650e | |||
234ba197d7 | |||
5a34671343 | |||
9b139330f8 | |||
6564ed710d | |||
4954a762b7 | |||
8eece32a8d | |||
4b9f479e5c | |||
429c4332b1 | |||
01a00641f9 | |||
55bb501c71 | |||
c55b5c0a55 | |||
ca275f59da | |||
e752d8a964 | |||
fe1174bf16 | |||
1de57eb2b6 | |||
acd7c98c39 | |||
0e4729b203 | |||
6a0c88d516 | |||
dacf80f34a | |||
9ce61dc677 | |||
b912d4c1ea | |||
b150f9f5d8 | |||
9c435fee75 | |||
ce071f2498 | |||
057de06613 |
2
.gitattributes
vendored
Normal file
2
.gitattributes
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
# Example of a `.gitattributes` file which reclassifies `.nu` files as Nushell:
|
||||||
|
*.nu linguist-language=Nushell
|
1
.typos.toml → .github/.typos.toml
vendored
1
.typos.toml → .github/.typos.toml
vendored
@ -10,3 +10,4 @@ ba = "ba"
|
|||||||
Plasticos = "Plasticos"
|
Plasticos = "Plasticos"
|
||||||
IIF = "IIF"
|
IIF = "IIF"
|
||||||
numer = "numer"
|
numer = "numer"
|
||||||
|
ratatui = "ratatui"
|
1
.github/AUTO_ISSUE_TEMPLATE/README.md
vendored
Normal file
1
.github/AUTO_ISSUE_TEMPLATE/README.md
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
This directory is intended for templates to automatically create issues with the [create-an-issue](https://github.com/JasonEtco/create-an-issue) action.
|
16
.github/AUTO_ISSUE_TEMPLATE/nightly-build-fail.md
vendored
Normal file
16
.github/AUTO_ISSUE_TEMPLATE/nightly-build-fail.md
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
---
|
||||||
|
name: Nightly build of release binaries failed
|
||||||
|
about: Used to submit issues related to binaries release workflow
|
||||||
|
title: 'Attention: Nightly build of release binaries failed'
|
||||||
|
labels: ['build-package', 'priority']
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Nightly build of release binaries failed**
|
||||||
|
|
||||||
|
Hi there:
|
||||||
|
|
||||||
|
If you see me here that means there is a release failure for the nightly build
|
||||||
|
|
||||||
|
Please **click the status badge** to see more details: [](https://github.com/nushell/nushell/actions/workflows/nightly-build.yml)
|
14
.github/pull_request_template.md
vendored
14
.github/pull_request_template.md
vendored
@ -1,3 +1,11 @@
|
|||||||
|
<!--
|
||||||
|
if this PR closes one or more issues, you can automatically link the PR with
|
||||||
|
them by using one of the [*linking keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword), e.g.
|
||||||
|
- this PR should close #xxxx
|
||||||
|
- fixes #xxxx
|
||||||
|
|
||||||
|
you can also mention related issues, PRs or discussions!
|
||||||
|
-->
|
||||||
|
|
||||||
# Description
|
# Description
|
||||||
<!--
|
<!--
|
||||||
@ -16,9 +24,9 @@ Don't forget to add tests that cover your changes.
|
|||||||
Make sure you've run and fixed any issues with these commands:
|
Make sure you've run and fixed any issues with these commands:
|
||||||
|
|
||||||
- `cargo fmt --all -- --check` to check standard code formatting (`cargo fmt --all` applies these changes)
|
- `cargo fmt --all -- --check` to check standard code formatting (`cargo fmt --all` applies these changes)
|
||||||
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect -A clippy::result_large_err` to check that you're using the standard code style
|
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to check that you're using the standard code style
|
||||||
- `cargo test --workspace` to check that all tests pass
|
- `cargo test --workspace` to check that all tests pass (on Windows make sure to [enable developer mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
|
||||||
- `cargo run -- crates/nu-std/tests/run.nu` to run the tests for the standard library
|
- `cargo run -- -c "use std testing; testing run-tests --path crates/nu-std"` to run the tests for the standard library
|
||||||
|
|
||||||
> **Note**
|
> **Note**
|
||||||
> from `nushell` you can also use the `toolkit` as follows
|
> from `nushell` you can also use the `toolkit` as follows
|
||||||
|
25
.github/workflows/audit.yml
vendored
Normal file
25
.github/workflows/audit.yml
vendored
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
name: Security audit
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- '**/Cargo.toml'
|
||||||
|
- '**/Cargo.lock'
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
env:
|
||||||
|
RUST_BACKTRACE: 1
|
||||||
|
CARGO_TERM_COLOR: always
|
||||||
|
CLICOLOR: 1
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
security_audit:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
# Prevent sudden announcement of a new advisory from failing ci:
|
||||||
|
continue-on-error: true
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: rustsec/audit-check@v1.4.1
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
161
.github/workflows/ci.yml
vendored
161
.github/workflows/ci.yml
vendored
@ -1,13 +1,18 @@
|
|||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
push: # Run CI on the main branch after every merge. This is important to fill the GitHub Actions cache in a way that pull requests can see it
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
|
|
||||||
name: continuous-integration
|
name: continuous-integration
|
||||||
|
|
||||||
|
env:
|
||||||
|
NUSHELL_CARGO_PROFILE: ci
|
||||||
|
NU_LOG_LEVEL: DEBUG
|
||||||
|
CLIPPY_OPTIONS: "-D warnings -D clippy::unwrap_used"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
nu-fmt-clippy:
|
fmt-clippy:
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
@ -15,193 +20,131 @@ jobs:
|
|||||||
# builds to link against a too-new-for-many-Linux-installs glibc version. Consider
|
# builds to link against a too-new-for-many-Linux-installs glibc version. Consider
|
||||||
# revisiting this when 20.04 is closer to EOL (April 2025)
|
# revisiting this when 20.04 is closer to EOL (April 2025)
|
||||||
platform: [windows-latest, macos-latest, ubuntu-20.04]
|
platform: [windows-latest, macos-latest, ubuntu-20.04]
|
||||||
style: [default, dataframe]
|
feature: [default, dataframe, extra]
|
||||||
rust:
|
|
||||||
- stable
|
|
||||||
include:
|
include:
|
||||||
- style: default
|
- feature: default
|
||||||
flags: ""
|
flags: ""
|
||||||
- style: dataframe
|
- feature: dataframe
|
||||||
flags: "--features=dataframe "
|
flags: "--features=dataframe"
|
||||||
|
- feature: extra
|
||||||
|
flags: "--features=extra"
|
||||||
exclude:
|
exclude:
|
||||||
# only test dataframes on Ubuntu (the fastest platform)
|
|
||||||
- platform: windows-latest
|
- platform: windows-latest
|
||||||
style: dataframe
|
feature: dataframe
|
||||||
- platform: macos-latest
|
- platform: macos-latest
|
||||||
style: dataframe
|
feature: dataframe
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
env:
|
|
||||||
NUSHELL_CARGO_TARGET: ci
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.4.4
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
|
with:
|
||||||
|
rustflags: ""
|
||||||
|
|
||||||
- name: cargo fmt
|
- name: cargo fmt
|
||||||
run: cargo fmt --all -- --check
|
run: cargo fmt --all -- --check
|
||||||
|
|
||||||
- name: Clippy
|
- name: Clippy
|
||||||
run: cargo clippy --workspace ${{ matrix.flags }}--exclude nu_plugin_* -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect -A clippy::result_large_err
|
run: cargo clippy --workspace ${{ matrix.flags }} --exclude nu_plugin_* -- $CLIPPY_OPTIONS
|
||||||
|
|
||||||
nu-tests:
|
# In tests we don't have to deny unwrap
|
||||||
env:
|
- name: Clippy of tests
|
||||||
NUSHELL_CARGO_TARGET: ci
|
run: cargo clippy --tests --workspace ${{ matrix.flags }} --exclude nu_plugin_* -- -D warnings
|
||||||
|
|
||||||
|
tests:
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
platform: [windows-latest, macos-latest, ubuntu-20.04]
|
platform: [windows-latest, macos-latest, ubuntu-20.04]
|
||||||
style: [default, dataframe]
|
feature: [default, dataframe, extra]
|
||||||
rust:
|
|
||||||
- stable
|
|
||||||
include:
|
include:
|
||||||
- style: default
|
- feature: default
|
||||||
flags: ""
|
flags: ""
|
||||||
- style: dataframe
|
- feature: dataframe
|
||||||
flags: "--features=dataframe"
|
flags: "--features=dataframe"
|
||||||
|
- feature: extra
|
||||||
|
flags: "--features=extra"
|
||||||
exclude:
|
exclude:
|
||||||
# only test dataframes on Ubuntu (the fastest platform)
|
|
||||||
- platform: windows-latest
|
- platform: windows-latest
|
||||||
style: dataframe
|
feature: dataframe
|
||||||
- platform: macos-latest
|
- platform: macos-latest
|
||||||
style: dataframe
|
feature: dataframe
|
||||||
|
- platform: windows-latest
|
||||||
|
feature: extra
|
||||||
|
- platform: macos-latest
|
||||||
|
feature: extra
|
||||||
|
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.4.4
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
|
with:
|
||||||
|
rustflags: ""
|
||||||
|
|
||||||
- name: Tests
|
- name: Tests
|
||||||
run: cargo test --workspace --profile ci --exclude nu_plugin_* ${{ matrix.flags }}
|
run: cargo test --workspace --profile ci --exclude nu_plugin_* ${{ matrix.flags }}
|
||||||
|
|
||||||
std-lib-and-python-virtualenv:
|
std-lib-and-python-virtualenv:
|
||||||
env:
|
|
||||||
NU_LOG_LEVEL: DEBUG
|
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
platform: [ubuntu-20.04, macos-latest, windows-latest]
|
platform: [ubuntu-20.04, macos-latest, windows-latest]
|
||||||
rust:
|
|
||||||
- stable
|
|
||||||
py:
|
py:
|
||||||
- py
|
- py
|
||||||
|
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.4.4
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
|
with:
|
||||||
|
rustflags: ""
|
||||||
|
|
||||||
- name: Install Nushell
|
- name: Install Nushell
|
||||||
# prior to [*standard library: bring the tests into the main CI*](#8525)
|
|
||||||
# there was a `--profile ci` here in the `cargo install`, as well as
|
|
||||||
# `NUSHELL_CARGO_TARGET: ci` in the prelude above.
|
|
||||||
#
|
|
||||||
# this caused a "stackoverflow" error in the CI on windows,
|
|
||||||
# see [this failing job](https://github.com/nushell/nushell/actions/runs/4512034615/jobs/7944945590)
|
|
||||||
#
|
|
||||||
# the CI profile has been removed in 00b820de9021227d1910a9ea388297ee7aee308e
|
|
||||||
# as part of #8525.
|
|
||||||
run: cargo install --path . --locked --no-default-features
|
run: cargo install --path . --locked --no-default-features
|
||||||
|
|
||||||
- name: Standard library tests
|
- name: Standard library tests
|
||||||
run: nu -c 'use std; std run-tests --path crates/nu-std'
|
run: nu -c 'use std testing; testing run-tests --path crates/nu-std'
|
||||||
|
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
|
|
||||||
- run: python -m pip install tox
|
|
||||||
|
|
||||||
# Get only the latest tagged version for stability reasons
|
|
||||||
- name: Install virtualenv
|
- name: Install virtualenv
|
||||||
run: git clone https://github.com/pypa/virtualenv.git
|
run: pip install virtualenv
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
- name: Test Nushell in virtualenv
|
- name: Test Nushell in virtualenv
|
||||||
run: |
|
run: nu scripts/test_virtualenv.nu
|
||||||
cd virtualenv
|
|
||||||
# if we encounter problems with bleeding edge tests pin to the latest tag
|
|
||||||
# git checkout $(git describe --tags | cut -d - -f 1)
|
|
||||||
# We need to disable failing on coverage levels.
|
|
||||||
nu -c "open pyproject.toml | upsert tool.coverage.report.fail_under 1 | save patchproject.toml"
|
|
||||||
mv patchproject.toml pyproject.toml
|
|
||||||
tox -e ${{ matrix.py }} -- -k nushell
|
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
# Build+test plugins on their own, without the rest of Nu. This helps with CI parallelization and
|
|
||||||
# also helps test that the plugins build without any feature unification shenanigans
|
|
||||||
plugins:
|
plugins:
|
||||||
env:
|
|
||||||
NUSHELL_CARGO_TARGET: ci
|
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
platform: [windows-latest, macos-latest, ubuntu-20.04]
|
platform: [windows-latest, macos-latest, ubuntu-20.04]
|
||||||
rust:
|
|
||||||
- stable
|
|
||||||
|
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.4.4
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
|
with:
|
||||||
|
rustflags: ""
|
||||||
|
|
||||||
- name: Clippy
|
- name: Clippy
|
||||||
run: cargo clippy --package nu_plugin_* ${{ matrix.flags }} -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect -A clippy::result_large_err
|
run: cargo clippy --package nu_plugin_* ${{ matrix.flags }} -- $CLIPPY_OPTIONS
|
||||||
|
|
||||||
- name: Tests
|
- name: Tests
|
||||||
run: cargo test --profile ci --package nu_plugin_*
|
run: cargo test --profile ci --package nu_plugin_*
|
||||||
|
|
||||||
|
|
||||||
nu-coverage:
|
|
||||||
env:
|
|
||||||
NUSHELL_CARGO_TARGET: ci
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: true
|
|
||||||
matrix:
|
|
||||||
# disabled mac due to problems with merging coverage and similarity to linux
|
|
||||||
# disabled windows due to running out of disk space when having too many crates or tests
|
|
||||||
platform: [ubuntu-20.04] # windows-latest
|
|
||||||
rust:
|
|
||||||
- stable
|
|
||||||
|
|
||||||
runs-on: ${{ matrix.platform }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.4.4
|
|
||||||
- name: Install cargo-llvm-cov
|
|
||||||
uses: taiki-e/install-action@cargo-llvm-cov
|
|
||||||
|
|
||||||
- name: Tests
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
source <(cargo llvm-cov show-env --export-prefix) # Set the environment variables needed to get coverage.
|
|
||||||
cargo llvm-cov clean --workspace # Remove artifacts that may affect the coverage results.
|
|
||||||
cargo build --workspace --profile ci
|
|
||||||
cargo test --workspace --profile ci
|
|
||||||
cargo llvm-cov report --profile ci --lcov --output-path lcov.info
|
|
||||||
|
|
||||||
- name: Upload coverage reports to Codecov with GitHub Action
|
|
||||||
uses: codecov/codecov-action@v3
|
|
||||||
with:
|
|
||||||
files: lcov.info
|
|
||||||
|
229
.github/workflows/nightly-build.yml
vendored
Normal file
229
.github/workflows/nightly-build.yml
vendored
Normal file
@ -0,0 +1,229 @@
|
|||||||
|
#
|
||||||
|
# REF:
|
||||||
|
# 1. https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstrategymatrixinclude
|
||||||
|
# 2. https://github.com/JasonEtco/create-an-issue
|
||||||
|
# 3. https://docs.github.com/en/actions/learn-github-actions/variables
|
||||||
|
# 4. https://github.com/actions/github-script
|
||||||
|
#
|
||||||
|
name: Nightly Build
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- nightly # Just for test purpose only with the nightly repo
|
||||||
|
# This schedule will run only from the default branch
|
||||||
|
schedule:
|
||||||
|
- cron: '15 0 * * *' # run at 00:15 AM UTC
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
prepare:
|
||||||
|
name: Prepare
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
# This job is required by the release job, so we should make it run both from Nushell repo and nightly repo
|
||||||
|
# if: github.repository == 'nushell/nightly'
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
if: github.repository == 'nushell/nightly'
|
||||||
|
with:
|
||||||
|
ref: main
|
||||||
|
fetch-depth: 0
|
||||||
|
# Configure PAT here: https://github.com/settings/tokens for the push operation in the following steps
|
||||||
|
token: ${{ secrets.WORKFLOW_TOKEN }}
|
||||||
|
|
||||||
|
- name: Setup Nushell
|
||||||
|
uses: hustcer/setup-nu@v3.6
|
||||||
|
if: github.repository == 'nushell/nightly'
|
||||||
|
with:
|
||||||
|
version: 0.84.0
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
# Synchronize the main branch of nightly repo with the main branch of Nushell official repo
|
||||||
|
- name: Prepare for Nightly Release
|
||||||
|
shell: nu {0}
|
||||||
|
if: github.repository == 'nushell/nightly'
|
||||||
|
run: |
|
||||||
|
cd $env.GITHUB_WORKSPACE
|
||||||
|
git checkout main
|
||||||
|
# We can't push if no user name and email are configured
|
||||||
|
git config user.name 'hustcer'
|
||||||
|
git config user.email 'hustcer@outlook.com'
|
||||||
|
git pull origin main
|
||||||
|
git remote add src https://github.com/nushell/nushell.git
|
||||||
|
git fetch src main
|
||||||
|
# All the changes will be overwritten by the upstream main branch
|
||||||
|
git reset --hard src/main
|
||||||
|
git push origin main -f
|
||||||
|
let sha_short = (git rev-parse --short origin/main | str trim | str substring 0..7)
|
||||||
|
let tag_name = $'nightly-($sha_short)'
|
||||||
|
if (git ls-remote --tags origin $tag_name | is-empty) {
|
||||||
|
git tag -a $tag_name -m $'Nightly build from ($sha_short)'
|
||||||
|
git push origin --tags
|
||||||
|
}
|
||||||
|
|
||||||
|
release:
|
||||||
|
name: Release
|
||||||
|
needs: prepare
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
target:
|
||||||
|
- aarch64-apple-darwin
|
||||||
|
- x86_64-apple-darwin
|
||||||
|
- x86_64-pc-windows-msvc
|
||||||
|
- aarch64-pc-windows-msvc
|
||||||
|
- x86_64-unknown-linux-gnu
|
||||||
|
- x86_64-unknown-linux-musl
|
||||||
|
- aarch64-unknown-linux-gnu
|
||||||
|
- armv7-unknown-linux-gnueabihf
|
||||||
|
- riscv64gc-unknown-linux-gnu
|
||||||
|
extra: ['bin']
|
||||||
|
include:
|
||||||
|
- target: aarch64-apple-darwin
|
||||||
|
os: macos-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-apple-darwin
|
||||||
|
os: macos-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-pc-windows-msvc
|
||||||
|
extra: 'bin'
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-pc-windows-msvc
|
||||||
|
extra: msi
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: aarch64-pc-windows-msvc
|
||||||
|
extra: 'bin'
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: aarch64-pc-windows-msvc
|
||||||
|
extra: msi
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-unknown-linux-gnu
|
||||||
|
os: ubuntu-20.04
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-unknown-linux-musl
|
||||||
|
os: ubuntu-20.04
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: aarch64-unknown-linux-gnu
|
||||||
|
os: ubuntu-20.04
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: armv7-unknown-linux-gnueabihf
|
||||||
|
os: ubuntu-20.04
|
||||||
|
target_rustflags: '--exclude=nu-cmd-dataframe'
|
||||||
|
- target: riscv64gc-unknown-linux-gnu
|
||||||
|
os: ubuntu-20.04
|
||||||
|
target_rustflags: '--exclude=nu-cmd-dataframe'
|
||||||
|
|
||||||
|
runs-on: ${{matrix.os}}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: main
|
||||||
|
|
||||||
|
- name: Update Rust Toolchain Target
|
||||||
|
run: |
|
||||||
|
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
||||||
|
|
||||||
|
- name: Setup Rust toolchain and cache
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
|
with:
|
||||||
|
rustflags: ''
|
||||||
|
|
||||||
|
- name: Setup Nushell
|
||||||
|
uses: hustcer/setup-nu@v3.6
|
||||||
|
with:
|
||||||
|
version: 0.84.0
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Release Nu Binary
|
||||||
|
id: nu
|
||||||
|
run: nu .github/workflows/release-pkg.nu
|
||||||
|
env:
|
||||||
|
OS: ${{ matrix.os }}
|
||||||
|
REF: ${{ github.ref }}
|
||||||
|
TARGET: ${{ matrix.target }}
|
||||||
|
_EXTRA_: ${{ matrix.extra }}
|
||||||
|
TARGET_RUSTFLAGS: ${{ matrix.target_rustflags }}
|
||||||
|
|
||||||
|
- name: Create an Issue for Release Failure
|
||||||
|
if: ${{ failure() }}
|
||||||
|
uses: JasonEtco/create-an-issue@v2.9.1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
update_existing: true
|
||||||
|
search_existing: open
|
||||||
|
filename: .github/AUTO_ISSUE_TEMPLATE/nightly-build-fail.md
|
||||||
|
|
||||||
|
- name: Set Outputs of Short SHA
|
||||||
|
id: vars
|
||||||
|
run: |
|
||||||
|
echo "date=$(date -u +'%Y-%m-%d')" >> $GITHUB_OUTPUT
|
||||||
|
sha_short=$(git rev-parse --short HEAD)
|
||||||
|
echo "sha_short=${sha_short:0:7}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
# REF: https://github.com/marketplace/actions/gh-release
|
||||||
|
# Create a release only in nushell/nightly repo
|
||||||
|
- name: Publish Archive
|
||||||
|
uses: softprops/action-gh-release@v0.1.15
|
||||||
|
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
|
||||||
|
with:
|
||||||
|
draft: false
|
||||||
|
prerelease: true
|
||||||
|
name: Nu-nightly-${{ steps.vars.outputs.date }}-${{ steps.vars.outputs.sha_short }}
|
||||||
|
tag_name: nightly-${{ steps.vars.outputs.sha_short }}
|
||||||
|
body: |
|
||||||
|
This is a NIGHTLY build of Nushell.
|
||||||
|
It is NOT recommended for production use.
|
||||||
|
files: ${{ steps.nu.outputs.archive }}
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
cleanup:
|
||||||
|
name: Cleanup
|
||||||
|
# Should only run in nushell/nightly repo
|
||||||
|
if: github.repository == 'nushell/nightly'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
# Sleep for 30 minutes, waiting for the release to be published
|
||||||
|
- name: Waiting for Release
|
||||||
|
run: sleep 1800
|
||||||
|
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
ref: main
|
||||||
|
|
||||||
|
- name: Setup Nushell
|
||||||
|
uses: hustcer/setup-nu@v3.6
|
||||||
|
with:
|
||||||
|
version: 0.84.0
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
# Keep the last a few releases
|
||||||
|
- name: Delete Older Releases
|
||||||
|
shell: nu {0}
|
||||||
|
run: |
|
||||||
|
let KEEP_COUNT = 10
|
||||||
|
let deprecated = (http get https://api.github.com/repos/nushell/nightly/releases | sort-by -r created_at | select tag_name id | range $KEEP_COUNT..)
|
||||||
|
for release in $deprecated {
|
||||||
|
print $'Deleting tag ($release.tag_name)'
|
||||||
|
git push origin --delete $release.tag_name
|
||||||
|
print $'Deleting release ($release.tag_name)'
|
||||||
|
let delete_url = $'https://api.github.com/repos/nushell/nightly/releases/($release.id)'
|
||||||
|
let version = "X-GitHub-Api-Version: 2022-11-28"
|
||||||
|
let accept = "Accept: application/vnd.github+json"
|
||||||
|
let auth = "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}"
|
||||||
|
# http delete $delete_url -H $version -H $auth -H $accept
|
||||||
|
curl -L -X DELETE -H $accept -H $auth -H $version $delete_url
|
||||||
|
}
|
62
.github/workflows/release-pkg.nu
vendored
62
.github/workflows/release-pkg.nu
vendored
@ -15,23 +15,23 @@
|
|||||||
# unset CARGO_TARGET_DIR if set (I have to do this in the parent shell to get it to work)
|
# unset CARGO_TARGET_DIR if set (I have to do this in the parent shell to get it to work)
|
||||||
# 2. $env:CARGO_TARGET_DIR = ""
|
# 2. $env:CARGO_TARGET_DIR = ""
|
||||||
# 2. hide-env CARGO_TARGET_DIR
|
# 2. hide-env CARGO_TARGET_DIR
|
||||||
# 3. let-env TARGET = 'x86_64-pc-windows-msvc'
|
# 3. $env.TARGET = 'x86_64-pc-windows-msvc'
|
||||||
# 4. let-env TARGET_RUSTFLAGS = ''
|
# 4. $env.TARGET_RUSTFLAGS = ''
|
||||||
# 5. let-env GITHUB_WORKSPACE = 'C:\Users\dschroeder\source\repos\forks\nushell'
|
# 5. $env.GITHUB_WORKSPACE = 'C:\Users\dschroeder\source\repos\forks\nushell'
|
||||||
# 6. let-env GITHUB_OUTPUT = 'C:\Users\dschroeder\source\repos\forks\nushell\output\out.txt'
|
# 6. $env.GITHUB_OUTPUT = 'C:\Users\dschroeder\source\repos\forks\nushell\output\out.txt'
|
||||||
# 7. let-env OS = 'windows-latest'
|
# 7. $env.OS = 'windows-latest'
|
||||||
# make sure 7z.exe is in your path https://www.7-zip.org/download.html
|
# make sure 7z.exe is in your path https://www.7-zip.org/download.html
|
||||||
# 8. let-env Path = ($env.Path | append 'c:\apps\7-zip')
|
# 8. $env.Path = ($env.Path | append 'c:\apps\7-zip')
|
||||||
# make sure aria2c.exe is in your path https://github.com/aria2/aria2
|
# make sure aria2c.exe is in your path https://github.com/aria2/aria2
|
||||||
# 9. let-env Path = ($env.Path | append 'c:\path\to\aria2c')
|
# 9. $env.Path = ($env.Path | append 'c:\path\to\aria2c')
|
||||||
# make sure you have the wixtools installed https://wixtoolset.org/
|
# make sure you have the wixtools installed https://wixtoolset.org/
|
||||||
# 10. let-env Path = ($env.Path | append 'C:\Users\dschroeder\AppData\Local\tauri\WixTools')
|
# 10. $env.Path = ($env.Path | append 'C:\Users\dschroeder\AppData\Local\tauri\WixTools')
|
||||||
# You need to run the release-pkg twice. The first pass, with _EXTRA_ as 'bin', makes the output
|
# You need to run the release-pkg twice. The first pass, with _EXTRA_ as 'bin', makes the output
|
||||||
# folder and builds everything. The second pass, that generates the msi file, with _EXTRA_ as 'msi'
|
# folder and builds everything. The second pass, that generates the msi file, with _EXTRA_ as 'msi'
|
||||||
# 11. let-env _EXTRA_ = 'bin'
|
# 11. $env._EXTRA_ = 'bin'
|
||||||
# 12. source .github\workflows\release-pkg.nu
|
# 12. source .github\workflows\release-pkg.nu
|
||||||
# 13. cd ..
|
# 13. cd ..
|
||||||
# 14. let-env _EXTRA_ = 'msi'
|
# 14. $env._EXTRA_ = 'msi'
|
||||||
# 15. source .github\workflows\release-pkg.nu
|
# 15. source .github\workflows\release-pkg.nu
|
||||||
# After msi is generated, you have to update winget-pkgs repo, you'll need to patch the release
|
# After msi is generated, you have to update winget-pkgs repo, you'll need to patch the release
|
||||||
# by deleting the existing msi and uploading this new msi. Then you'll need to update the hash
|
# by deleting the existing msi and uploading this new msi. Then you'll need to update the hash
|
||||||
@ -73,17 +73,17 @@ if $os in [$USE_UBUNTU, 'macos-latest'] {
|
|||||||
match $target {
|
match $target {
|
||||||
'aarch64-unknown-linux-gnu' => {
|
'aarch64-unknown-linux-gnu' => {
|
||||||
sudo apt-get install gcc-aarch64-linux-gnu -y
|
sudo apt-get install gcc-aarch64-linux-gnu -y
|
||||||
let-env CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER = 'aarch64-linux-gnu-gcc'
|
$env.CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER = 'aarch64-linux-gnu-gcc'
|
||||||
cargo-build-nu $flags
|
cargo-build-nu $flags
|
||||||
}
|
}
|
||||||
'riscv64gc-unknown-linux-gnu' => {
|
'riscv64gc-unknown-linux-gnu' => {
|
||||||
sudo apt-get install gcc-riscv64-linux-gnu -y
|
sudo apt-get install gcc-riscv64-linux-gnu -y
|
||||||
let-env CARGO_TARGET_RISCV64GC_UNKNOWN_LINUX_GNU_LINKER = 'riscv64-linux-gnu-gcc'
|
$env.CARGO_TARGET_RISCV64GC_UNKNOWN_LINUX_GNU_LINKER = 'riscv64-linux-gnu-gcc'
|
||||||
cargo-build-nu $flags
|
cargo-build-nu $flags
|
||||||
}
|
}
|
||||||
'armv7-unknown-linux-gnueabihf' => {
|
'armv7-unknown-linux-gnueabihf' => {
|
||||||
sudo apt-get install pkg-config gcc-arm-linux-gnueabihf -y
|
sudo apt-get install pkg-config gcc-arm-linux-gnueabihf -y
|
||||||
let-env CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER = 'arm-linux-gnueabihf-gcc'
|
$env.CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER = 'arm-linux-gnueabihf-gcc'
|
||||||
cargo-build-nu $flags
|
cargo-build-nu $flags
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
@ -99,11 +99,7 @@ if $os in [$USE_UBUNTU, 'macos-latest'] {
|
|||||||
# Build for Windows without static-link-openssl feature
|
# Build for Windows without static-link-openssl feature
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
if $os in ['windows-latest'] {
|
if $os in ['windows-latest'] {
|
||||||
if ($flags | str trim | is-empty) {
|
cargo-build-nu $flags
|
||||||
cargo build --release --all --target $target
|
|
||||||
} else {
|
|
||||||
cargo build --release --all --target $target $flags
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
@ -121,14 +117,16 @@ print $'(char nl)All executable files:'; hr-line
|
|||||||
print (ls -f $executable); sleep 1sec
|
print (ls -f $executable); sleep 1sec
|
||||||
|
|
||||||
print $'(char nl)Copying release files...'; hr-line
|
print $'(char nl)Copying release files...'; hr-line
|
||||||
cp -v README.release.txt $'($dist)/README.txt'
|
"To use Nu plugins, use the register command to tell Nu where to find the plugin. For example:
|
||||||
|
|
||||||
|
> register ./nu_plugin_query" | save $'($dist)/README.txt'
|
||||||
[LICENSE $executable] | each {|it| cp -rv $it $dist } | flatten
|
[LICENSE $executable] | each {|it| cp -rv $it $dist } | flatten
|
||||||
# Sleep a few seconds to make sure the cp process finished successfully
|
# Sleep a few seconds to make sure the cp process finished successfully
|
||||||
sleep 3sec
|
sleep 3sec
|
||||||
|
|
||||||
print $'(char nl)Check binary release version detail:'; hr-line
|
print $'(char nl)Check binary release version detail:'; hr-line
|
||||||
let ver = if $os == 'windows-latest' {
|
let ver = if $os == 'windows-latest' {
|
||||||
(do -i { ./output/nu.exe -c 'version' }) | str join
|
(do -i { .\output\nu.exe -c 'version' }) | str join
|
||||||
} else {
|
} else {
|
||||||
(do -i { ./output/nu -c 'version' }) | str join
|
(do -i { ./output/nu -c 'version' }) | str join
|
||||||
}
|
}
|
||||||
@ -174,27 +172,39 @@ if $os in [$USE_UBUNTU, 'macos-latest'] {
|
|||||||
cp -r $'($dist)/*' target/release/
|
cp -r $'($dist)/*' target/release/
|
||||||
cargo install cargo-wix --version 0.3.4
|
cargo install cargo-wix --version 0.3.4
|
||||||
cargo wix --no-build --nocapture --package nu --output $wixRelease
|
cargo wix --no-build --nocapture --package nu --output $wixRelease
|
||||||
print $'archive: ---> ($wixRelease)';
|
# Workaround for https://github.com/softprops/action-gh-release/issues/280
|
||||||
echo $"archive=($wixRelease)" | save --append $env.GITHUB_OUTPUT
|
let archive = ($wixRelease | str replace -a '\' '/')
|
||||||
|
print $'archive: ---> ($archive)';
|
||||||
|
echo $"archive=($archive)" | save --append $env.GITHUB_OUTPUT
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
print $'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls
|
print $'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls
|
||||||
let archive = $'($dist)/($releaseStem).zip'
|
let archive = $'($dist)/($releaseStem).zip'
|
||||||
7z a $archive *
|
7z a $archive *
|
||||||
print $'archive: ---> ($archive)';
|
|
||||||
let pkg = (ls -f $archive | get name)
|
let pkg = (ls -f $archive | get name)
|
||||||
if not ($pkg | is-empty) {
|
if not ($pkg | is-empty) {
|
||||||
echo $"archive=($pkg | get 0)" | save --append $env.GITHUB_OUTPUT
|
# Workaround for https://github.com/softprops/action-gh-release/issues/280
|
||||||
|
let archive = ($pkg | get 0 | str replace -a '\' '/')
|
||||||
|
print $'archive: ---> ($archive)'
|
||||||
|
echo $"archive=($archive)" | save --append $env.GITHUB_OUTPUT
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
def 'cargo-build-nu' [ options: string ] {
|
def 'cargo-build-nu' [ options: string ] {
|
||||||
if ($options | str trim | is-empty) {
|
if ($options | str trim | is-empty) {
|
||||||
cargo build --release --all --target $target --features=static-link-openssl
|
if $os == 'windows-latest' {
|
||||||
|
cargo build --release --all --target $target
|
||||||
|
} else {
|
||||||
|
cargo build --release --all --target $target --features=static-link-openssl
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
cargo build --release --all --target $target --features=static-link-openssl $options
|
if $os == 'windows-latest' {
|
||||||
|
cargo build --release --all --target $target $options
|
||||||
|
} else {
|
||||||
|
cargo build --release --all --target $target --features=static-link-openssl $options
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
25
.github/workflows/release.yml
vendored
25
.github/workflows/release.yml
vendored
@ -23,6 +23,7 @@ jobs:
|
|||||||
- aarch64-apple-darwin
|
- aarch64-apple-darwin
|
||||||
- x86_64-apple-darwin
|
- x86_64-apple-darwin
|
||||||
- x86_64-pc-windows-msvc
|
- x86_64-pc-windows-msvc
|
||||||
|
- aarch64-pc-windows-msvc
|
||||||
- x86_64-unknown-linux-gnu
|
- x86_64-unknown-linux-gnu
|
||||||
- x86_64-unknown-linux-musl
|
- x86_64-unknown-linux-musl
|
||||||
- aarch64-unknown-linux-gnu
|
- aarch64-unknown-linux-gnu
|
||||||
@ -44,6 +45,14 @@ jobs:
|
|||||||
extra: msi
|
extra: msi
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
target_rustflags: ''
|
target_rustflags: ''
|
||||||
|
- target: aarch64-pc-windows-msvc
|
||||||
|
extra: 'bin'
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: aarch64-pc-windows-msvc
|
||||||
|
extra: msi
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
- target: x86_64-unknown-linux-gnu
|
- target: x86_64-unknown-linux-gnu
|
||||||
os: ubuntu-20.04
|
os: ubuntu-20.04
|
||||||
target_rustflags: ''
|
target_rustflags: ''
|
||||||
@ -55,27 +64,29 @@ jobs:
|
|||||||
target_rustflags: ''
|
target_rustflags: ''
|
||||||
- target: armv7-unknown-linux-gnueabihf
|
- target: armv7-unknown-linux-gnueabihf
|
||||||
os: ubuntu-20.04
|
os: ubuntu-20.04
|
||||||
target_rustflags: ''
|
target_rustflags: '--exclude=nu-cmd-dataframe'
|
||||||
- target: riscv64gc-unknown-linux-gnu
|
- target: riscv64gc-unknown-linux-gnu
|
||||||
os: ubuntu-20.04
|
os: ubuntu-20.04
|
||||||
target_rustflags: ''
|
target_rustflags: '--exclude=nu-cmd-dataframe'
|
||||||
|
|
||||||
runs-on: ${{matrix.os}}
|
runs-on: ${{matrix.os}}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Update Rust Toolchain Target
|
- name: Update Rust Toolchain Target
|
||||||
run: |
|
run: |
|
||||||
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.4.4
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
|
with:
|
||||||
|
rustflags: ''
|
||||||
|
|
||||||
- name: Setup Nushell
|
- name: Setup Nushell
|
||||||
uses: hustcer/setup-nu@v3
|
uses: hustcer/setup-nu@v3.6
|
||||||
with:
|
with:
|
||||||
version: 0.78.0
|
version: 0.84.0
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
@ -91,7 +102,7 @@ jobs:
|
|||||||
|
|
||||||
# REF: https://github.com/marketplace/actions/gh-release
|
# REF: https://github.com/marketplace/actions/gh-release
|
||||||
- name: Publish Archive
|
- name: Publish Archive
|
||||||
uses: softprops/action-gh-release@v0.1.13
|
uses: softprops/action-gh-release@v0.1.15
|
||||||
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
||||||
with:
|
with:
|
||||||
draft: true
|
draft: true
|
||||||
|
6
.github/workflows/typos.yml
vendored
6
.github/workflows/typos.yml
vendored
@ -7,7 +7,9 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Actions Repository
|
- name: Checkout Actions Repository
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Check spelling
|
- name: Check spelling
|
||||||
uses: crate-ci/typos@master
|
uses: crate-ci/typos@v1.16.11
|
||||||
|
with:
|
||||||
|
config: ./.github/.typos.toml
|
||||||
|
2
.github/workflows/winget-submission.yml
vendored
2
.github/workflows/winget-submission.yml
vendored
@ -14,7 +14,7 @@ jobs:
|
|||||||
|
|
||||||
winget:
|
winget:
|
||||||
name: Publish winget package
|
name: Publish winget package
|
||||||
runs-on: windows-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Submit package to Windows Package Manager Community Repository
|
- name: Submit package to Windows Package Manager Community Repository
|
||||||
uses: vedantmgoyal2009/winget-releaser@v2
|
uses: vedantmgoyal2009/winget-releaser@v2
|
||||||
|
1
.gitignore
vendored
1
.gitignore
vendored
@ -42,6 +42,7 @@ tarpaulin-report.html
|
|||||||
*.rsproj
|
*.rsproj
|
||||||
*.rsproj.user
|
*.rsproj.user
|
||||||
*.sln
|
*.sln
|
||||||
|
*.code-workspace
|
||||||
|
|
||||||
# direnv
|
# direnv
|
||||||
.direnv/
|
.direnv/
|
||||||
|
@ -2,7 +2,20 @@
|
|||||||
|
|
||||||
Welcome to Nushell and thank you for considering contributing!
|
Welcome to Nushell and thank you for considering contributing!
|
||||||
|
|
||||||
## Review Process
|
## Table of contents
|
||||||
|
- [Proposing design changes](#proposing-design-changes)
|
||||||
|
- [Developing](#developing)
|
||||||
|
- [Setup](#setup)
|
||||||
|
- [Tests](#tests)
|
||||||
|
- [Useful commands](#useful-commands)
|
||||||
|
- [Debugging tips](#debugging-tips)
|
||||||
|
- [Git etiquette](#git-etiquette)
|
||||||
|
- [Our Rust style](#our-rust-style)
|
||||||
|
- [Generally discouraged](#generally-discouraged)
|
||||||
|
- [Things we want to get better at](#things-we-want-to-get-better-at)
|
||||||
|
- [License](#license)
|
||||||
|
|
||||||
|
## Proposing design changes
|
||||||
|
|
||||||
First of all, before diving into the code, if you want to create a new feature, change something significantly, and especially if the change is user-facing, it is a good practice to first get an approval from the core team before starting to work on it.
|
First of all, before diving into the code, if you want to create a new feature, change something significantly, and especially if the change is user-facing, it is a good practice to first get an approval from the core team before starting to work on it.
|
||||||
This saves both your and our time if we realize the change needs to go another direction before spending time on it.
|
This saves both your and our time if we realize the change needs to go another direction before spending time on it.
|
||||||
@ -41,10 +54,13 @@ Tests can be found in different places:
|
|||||||
* command examples
|
* command examples
|
||||||
* crate-specific tests
|
* crate-specific tests
|
||||||
|
|
||||||
The most comprehensive test suite we have is the `nu-test-support` crate. For testing specific features, such as running Nushell in a REPL mode, we have so called "testbins". For simple tests, you can find `run_test()` and `fail_test()` functions.
|
Most of the tests are built upon the `nu-test-support` crate. For testing specific features, such as running Nushell in a REPL mode, we have so called "testbins". For simple tests, you can find `run_test()` and `fail_test()` functions.
|
||||||
|
|
||||||
### Useful Commands
|
### Useful Commands
|
||||||
|
|
||||||
|
As Nushell is build using a cargo workspace consisting of multiple crates keep in mind that you may need to pass additional flags compared to how you may be used to it from a single crate project.
|
||||||
|
Read cargo's documentation for more details: https://doc.rust-lang.org/cargo/reference/workspaces.html
|
||||||
|
|
||||||
- Build and run Nushell:
|
- Build and run Nushell:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
@ -59,7 +75,7 @@ The most comprehensive test suite we have is the `nu-test-support` crate. For te
|
|||||||
- Run Clippy on Nushell:
|
- Run Clippy on Nushell:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect -A clippy::result_large_err
|
cargo clippy --workspace -- -D warnings -D clippy::unwrap_used
|
||||||
```
|
```
|
||||||
or via the `toolkit.nu` command:
|
or via the `toolkit.nu` command:
|
||||||
```shell
|
```shell
|
||||||
@ -220,3 +236,52 @@ You can help us to make the review process a smooth experience:
|
|||||||
- Choose what simplifies having confidence in the conflict resolution and the review. **Merge commits in your branch are OK** in the squash model.
|
- Choose what simplifies having confidence in the conflict resolution and the review. **Merge commits in your branch are OK** in the squash model.
|
||||||
- Feel free to notify your reviewers or affected PR authors if your change might cause larger conflicts with another change.
|
- Feel free to notify your reviewers or affected PR authors if your change might cause larger conflicts with another change.
|
||||||
- During the rollup of multiple PRs, we may choose to resolve merge conflicts and CI failures ourselves. (Allow maintainers to push to your branch to enable us to do this quickly.)
|
- During the rollup of multiple PRs, we may choose to resolve merge conflicts and CI failures ourselves. (Allow maintainers to push to your branch to enable us to do this quickly.)
|
||||||
|
|
||||||
|
## Our Rust style
|
||||||
|
To make the collaboration on a project the scale of Nushell easy, we want to work towards a style of Rust code that can easily be understood by all of our contributors. We conservatively rely on most of [`clippy`s suggestions](https://github.com/rust-lang/rust-clippy) to get to the holy grail of "idiomatic" code. Good code in our eyes is not the most clever use of all available language features or with the most unique personal touch but readable and strikes a balance between being concise, and also unsurprising and explicit in the places where it matters.
|
||||||
|
One example of this philosophy is that we generally avoid to fight the borrow-checker in our data model but rather try to get to a correct and simple solution first and then figure out where we should reuse data to achieve the necessary performance. As we are still pre-1.0 this served us well to be able to quickly refactor or change larger parts of the code base.
|
||||||
|
|
||||||
|
### Generally discouraged
|
||||||
|
#### `+nightly` language features or things only available in the most recent `+stable`
|
||||||
|
To make life for the people easier that maintain the Nushell packages in various distributions with their own release cycle of `rustc` we typically rely on slightly older Rust versions. We do not make explicit guarantees how far back in the past we live but you can find out in our [`rust-toolchain.toml`](https://github.com/nushell/nushell/blob/main/rust-toolchain.toml)
|
||||||
|
(As a rule of thumb this has been typically been approximately 2 releases behind the newest stable compiler.)
|
||||||
|
The use of nightly features is prohibited.
|
||||||
|
|
||||||
|
#### Panicking
|
||||||
|
As Nushell aims to provide a reliable foundational way for folks to interact with their computer, we cannot carelessly crash the execution of their work by panicking Nushell.
|
||||||
|
Thus panicking is not an allowed error handling strategy for anything that could be triggered by user input OR behavior of the outside system. If Nushell panics this is a bug or we are against all odds already in an unrecoverable state (The system stopped cooperating, we went out of memory). The use of `.unwrap()` is thus outright banned and any uses of `.expect()` or related panicking macros like `unreachable!` should include a helpful description which assumptions have been violated.
|
||||||
|
|
||||||
|
#### `unsafe` code
|
||||||
|
For any use of `unsafe` code we need to require even higher standards and additional review. If you add or alter `unsafe` blocks you have to be familiar with the promises you need to uphold as found in the [Rustonomicon](https://doc.rust-lang.org/nomicon/intro.html). All `unsafe` uses should include `// SAFETY:` comments explaining how the invariants are upheld and thus alerting you what to watch out for when making a change.
|
||||||
|
##### FFI with system calls and the outside world
|
||||||
|
As a shell Nushell needs to interact with system APIs in several places, for which FFI code with unsafe blocks may be necessary. In some cases this can be handled by safe API wrapper crates but in some cases we may choose to directly do those calls.
|
||||||
|
If you do so you need to document the system behavior on top of the Rust memory model guarantees that you uphold. This means documenting whether using a particular system call is safe to use in a particular context and all failure cases are properly recovered.
|
||||||
|
##### Implementing self-contained data structures
|
||||||
|
Another motivation for reaching to `unsafe` code might be to try to implement a particular data structure that is not expressible on safe `std` library APIs. Doing so in the Nushell code base would have to clear a high bar for need based on profiling results. Also you should first do a survey of the [crate ecosystem](https://crates.io) that there doesn't exist a usable well vetted crate that already provides safe APIs to the desired datastructure.
|
||||||
|
##### Make things go faster by removing checks
|
||||||
|
This is probably a bad idea if you feel tempted to do so. Don't
|
||||||
|
#### Macros
|
||||||
|
Another advanced feature people feel tempted to use to work around perceived limitations of Rusts syntax and we are not particularly fans of are custom macros.
|
||||||
|
They have clear downsides not only in terms of readability if they locally introduce a different syntax. Most tooling apart from the compiler will struggle more with them. This limits for example consistent automatic formatting or automated refactors with `rust-analyzer`.
|
||||||
|
That you can fluently read `macro_rules!` is less likely than regular code. This can lead people to introduce funky behavior when using a macro. Be it because a macro is not following proper hygiene rules or because it introduces excessive work at compile time.
|
||||||
|
|
||||||
|
So we generally discourage the addition of macros. In a lot of cases your macro may start do something that can be expressed with functions or generics in a much more reusable fashion.
|
||||||
|
The only exceptions we may allow need to demonstrate that the macro can fix something that is otherwise extremely unreadable, error-prone, or consistently worse at compile time.
|
||||||
|
### Things we want to get better at
|
||||||
|
These are things we did pretty liberally to get Nushell off the ground, that make things harder for a high quality stable product. You may run across them but shouldn't take them as an endorsed example.
|
||||||
|
#### Liberal use of third-party dependencies
|
||||||
|
The amazing variety of crates on [crates.io](https://crates.io) allowed us to quickly get Nushell into a feature rich state but it left us with a bunch of baggage to clean up.
|
||||||
|
Each dependency introduces a compile time cost and duplicated code can add to the overall binary size. Also vetting more for correct and secure implementations takes unreasonably more time as this is also a continuous process of reacting to updates or potential vulnerabilities.
|
||||||
|
|
||||||
|
Thus we only want to accept dependencies that are essential and well tested implementations of a particular requirement of Nushells codebase.
|
||||||
|
Also as a project for the move to 1.0 we will try to unify among a set of dependencies if they possibly implement similar things in an area. We don't need three different crates with potentially perfect fit for three problems but rather one reliable crate with a maximized overlap between what it provides and what we need.
|
||||||
|
We will favor crates that are well tested and used and promise to be more stable and still frequently maintained.
|
||||||
|
#### Deeply nested code
|
||||||
|
As Nushell uses a lot of enums in its internal data representation there are a lot of `match` expressions. Combined with the need to handle a lot of edge cases and be defensive about any errors this has led to some absolutely hard to read deeply nested code (e.g. in the parser but also in the implementation of several commands).
|
||||||
|
This can be observed both as a "rightward drift" where the main part of the code is found after many levels of indentations or by long function bodies with several layers of branching with seemingly repeated branching inside the higher branch level.
|
||||||
|
This can also be exacerbated by "quick" bugfixes/enhancements that may just try to add a special case to catch a previously unexpected condition. The likelihood of introducing a bug in a sea of code duplication is high.
|
||||||
|
To combat this, consider using the early-`return` pattern to reject invalid data early in one place instead of building a tree through Rust's expression constructs with a lot of duplicated paths. Unpacking data into a type that expresses that the necessary things already have been checked and using functions to properly deal with separate and common behavior can also help.
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
We use the [MIT License](https://github.com/nushell/nushell/blob/main/LICENSE) in all of our Nushell projects. If you are including or referencing a crate that uses the [GPL License](https://www.gnu.org/licenses/gpl-3.0.en.html#license-text) unfortunately we will not be able to accept your PR.
|
||||||
|
2692
Cargo.lock
generated
2692
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
112
Cargo.toml
112
Cargo.toml
@ -1,5 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
authors = ["The Nushell Project Developers"]
|
authors = ["The Nushell Project Developers"]
|
||||||
|
build = "scripts/build.rs"
|
||||||
default-run = "nu"
|
default-run = "nu"
|
||||||
description = "A new type of shell"
|
description = "A new type of shell"
|
||||||
documentation = "https://www.nushell.sh/book/"
|
documentation = "https://www.nushell.sh/book/"
|
||||||
@ -10,7 +11,7 @@ license = "MIT"
|
|||||||
name = "nu"
|
name = "nu"
|
||||||
repository = "https://github.com/nushell/nushell"
|
repository = "https://github.com/nushell/nushell"
|
||||||
rust-version = "1.60"
|
rust-version = "1.60"
|
||||||
version = "0.80.0"
|
version = "0.85.0"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
@ -27,7 +28,10 @@ members = [
|
|||||||
"crates/nu-engine",
|
"crates/nu-engine",
|
||||||
"crates/nu-parser",
|
"crates/nu-parser",
|
||||||
"crates/nu-system",
|
"crates/nu-system",
|
||||||
|
"crates/nu-cmd-base",
|
||||||
|
"crates/nu-cmd-extra",
|
||||||
"crates/nu-cmd-lang",
|
"crates/nu-cmd-lang",
|
||||||
|
"crates/nu-cmd-dataframe",
|
||||||
"crates/nu-command",
|
"crates/nu-command",
|
||||||
"crates/nu-protocol",
|
"crates/nu-protocol",
|
||||||
"crates/nu-plugin",
|
"crates/nu-plugin",
|
||||||
@ -42,65 +46,62 @@ members = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
chrono = { version = "0.4.23", features = ["serde"] }
|
nu-cli = { path = "./crates/nu-cli", version = "0.85.0" }
|
||||||
crossterm = "0.26"
|
nu-color-config = { path = "./crates/nu-color-config", version = "0.85.0" }
|
||||||
ctrlc = "3.2.1"
|
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.85.0" }
|
||||||
|
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.85.0" }
|
||||||
|
nu-cmd-dataframe = { path = "./crates/nu-cmd-dataframe", version = "0.85.0", features = ["dataframe"], optional = true }
|
||||||
|
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.85.0", optional = true }
|
||||||
|
nu-command = { path = "./crates/nu-command", version = "0.85.0" }
|
||||||
|
nu-engine = { path = "./crates/nu-engine", version = "0.85.0" }
|
||||||
|
nu-explore = { path = "./crates/nu-explore", version = "0.85.0" }
|
||||||
|
nu-json = { path = "./crates/nu-json", version = "0.85.0" }
|
||||||
|
nu-parser = { path = "./crates/nu-parser", version = "0.85.0" }
|
||||||
|
nu-path = { path = "./crates/nu-path", version = "0.85.0" }
|
||||||
|
nu-plugin = { path = "./crates/nu-plugin", optional = true, version = "0.85.0" }
|
||||||
|
nu-pretty-hex = { path = "./crates/nu-pretty-hex", version = "0.85.0" }
|
||||||
|
nu-protocol = { path = "./crates/nu-protocol", version = "0.85.0" }
|
||||||
|
nu-system = { path = "./crates/nu-system", version = "0.85.0" }
|
||||||
|
nu-table = { path = "./crates/nu-table", version = "0.85.0" }
|
||||||
|
nu-term-grid = { path = "./crates/nu-term-grid", version = "0.85.0" }
|
||||||
|
nu-std = { path = "./crates/nu-std", version = "0.85.0" }
|
||||||
|
nu-utils = { path = "./crates/nu-utils", version = "0.85.0" }
|
||||||
|
nu-ansi-term = "0.49.0"
|
||||||
|
reedline = { version = "0.24.0", features = ["bashisms", "sqlite"] }
|
||||||
|
|
||||||
|
crossterm = "0.27"
|
||||||
|
ctrlc = "3.4"
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
miette = { version = "5.7.0", features = ["fancy-no-backtrace"] }
|
miette = { version = "5.10", features = ["fancy-no-backtrace"] }
|
||||||
nu-cli = { path = "./crates/nu-cli", version = "0.80.0" }
|
mimalloc = { version = "0.1.37", default-features = false, optional = true }
|
||||||
nu-color-config = { path = "./crates/nu-color-config", version = "0.80.0" }
|
|
||||||
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.80.0" }
|
|
||||||
nu-command = { path = "./crates/nu-command", version = "0.80.0" }
|
|
||||||
nu-engine = { path = "./crates/nu-engine", version = "0.80.0" }
|
|
||||||
nu-json = { path = "./crates/nu-json", version = "0.80.0" }
|
|
||||||
nu-parser = { path = "./crates/nu-parser", version = "0.80.0" }
|
|
||||||
nu-path = { path = "./crates/nu-path", version = "0.80.0" }
|
|
||||||
nu-plugin = { path = "./crates/nu-plugin", optional = true, version = "0.80.0" }
|
|
||||||
nu-pretty-hex = { path = "./crates/nu-pretty-hex", version = "0.80.0" }
|
|
||||||
nu-protocol = { path = "./crates/nu-protocol", version = "0.80.0" }
|
|
||||||
nu-system = { path = "./crates/nu-system", version = "0.80.0" }
|
|
||||||
nu-table = { path = "./crates/nu-table", version = "0.80.0" }
|
|
||||||
nu-term-grid = { path = "./crates/nu-term-grid", version = "0.80.0" }
|
|
||||||
nu-std = { path = "./crates/nu-std", version = "0.80.0" }
|
|
||||||
nu-utils = { path = "./crates/nu-utils", version = "0.80.0" }
|
|
||||||
|
|
||||||
nu-ansi-term = "0.47.0"
|
|
||||||
reedline = { version = "0.19.1", features = ["bashisms", "sqlite"]}
|
|
||||||
|
|
||||||
rayon = "1.7.0"
|
|
||||||
is_executable = "1.0.1"
|
|
||||||
simplelog = "0.12.0"
|
|
||||||
time = "0.3.12"
|
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
|
simplelog = "0.12"
|
||||||
|
time = "0.3"
|
||||||
|
|
||||||
[target.'cfg(not(target_os = "windows"))'.dependencies]
|
[target.'cfg(not(target_os = "windows"))'.dependencies]
|
||||||
# Our dependencies don't use OpenSSL on Windows
|
# Our dependencies don't use OpenSSL on Windows
|
||||||
openssl = { version = "0.10.48", features = ["vendored"], optional = true }
|
openssl = { version = "0.10", features = ["vendored"], optional = true }
|
||||||
signal-hook = { version = "0.3.14", default-features = false }
|
signal-hook = { version = "0.3", default-features = false }
|
||||||
|
|
||||||
|
|
||||||
[target.'cfg(windows)'.build-dependencies]
|
[target.'cfg(windows)'.build-dependencies]
|
||||||
winresource = "0.1"
|
winresource = "0.1"
|
||||||
|
|
||||||
[target.'cfg(target_family = "unix")'.dependencies]
|
[target.'cfg(target_family = "unix")'.dependencies]
|
||||||
nix = { version = "0.26", default-features = false, features = [
|
nix = { version = "0.27", default-features = false, features = [
|
||||||
"signal",
|
"signal",
|
||||||
"process",
|
"process",
|
||||||
"fs",
|
"fs",
|
||||||
"term",
|
"term",
|
||||||
] }
|
] }
|
||||||
atty = "0.2"
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-test-support = { path = "./crates/nu-test-support", version = "0.80.0" }
|
nu-test-support = { path = "./crates/nu-test-support", version = "0.85.0" }
|
||||||
tempfile = "3.5.0"
|
assert_cmd = "2.0"
|
||||||
assert_cmd = "2.0.2"
|
criterion = "0.5"
|
||||||
criterion = "0.4"
|
pretty_assertions = "1.4"
|
||||||
pretty_assertions = "1.0.0"
|
rstest = { version = "0.18", default-features = false }
|
||||||
serial_test = "1.0.0"
|
serial_test = "2.0"
|
||||||
hamcrest2 = "0.3.0"
|
tempfile = "3.8"
|
||||||
rstest = { version = "0.17.0", default-features = false }
|
|
||||||
itertools = "0.10.3"
|
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
plugin = [
|
plugin = [
|
||||||
@ -111,26 +112,29 @@ plugin = [
|
|||||||
"nu-protocol/plugin",
|
"nu-protocol/plugin",
|
||||||
"nu-engine/plugin",
|
"nu-engine/plugin",
|
||||||
]
|
]
|
||||||
# extra used to be more useful but now it's the same as default. Leaving it in for backcompat with existing build scripts
|
default = ["plugin", "which-support", "trash-support", "sqlite", "mimalloc"]
|
||||||
extra = ["default"]
|
|
||||||
default = ["plugin", "which-support", "trash-support", "sqlite"]
|
|
||||||
stable = ["default"]
|
stable = ["default"]
|
||||||
wasi = []
|
wasi = ["nu-cmd-lang/wasi"]
|
||||||
|
# NOTE: individual features are also passed to `nu-cmd-lang` that uses them to generate the feature matrix in the `version` command
|
||||||
|
|
||||||
# Enable to statically link OpenSSL; otherwise the system version will be used. Not enabled by default because it takes a while to build
|
# Enable to statically link OpenSSL (perl is required, to build OpenSSL https://docs.rs/openssl/latest/openssl/);
|
||||||
static-link-openssl = ["dep:openssl"]
|
# otherwise the system version will be used. Not enabled by default because it takes a while to build
|
||||||
|
static-link-openssl = ["dep:openssl", "nu-cmd-lang/static-link-openssl"]
|
||||||
|
|
||||||
|
mimalloc = ["nu-cmd-lang/mimalloc", "dep:mimalloc"]
|
||||||
|
|
||||||
# Stable (Default)
|
# Stable (Default)
|
||||||
which-support = ["nu-command/which-support"]
|
which-support = ["nu-command/which-support", "nu-cmd-lang/which-support"]
|
||||||
trash-support = ["nu-command/trash-support"]
|
trash-support = ["nu-command/trash-support", "nu-cmd-lang/trash-support"]
|
||||||
|
|
||||||
# Extra
|
# Extra feature for nushell
|
||||||
|
extra = ["dep:nu-cmd-extra", "nu-cmd-lang/extra"]
|
||||||
|
|
||||||
# Dataframe feature for nushell
|
# Dataframe feature for nushell
|
||||||
dataframe = ["nu-command/dataframe"]
|
dataframe = ["dep:nu-cmd-dataframe", "nu-cmd-lang/dataframe"]
|
||||||
|
|
||||||
# SQLite commands for nushell
|
# SQLite commands for nushell
|
||||||
sqlite = ["nu-command/sqlite"]
|
sqlite = ["nu-command/sqlite", "nu-cmd-lang/sqlite"]
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
opt-level = "s" # Optimize for size
|
opt-level = "s" # Optimize for size
|
||||||
|
15
Cross.toml
15
Cross.toml
@ -1,9 +1,18 @@
|
|||||||
# Configuration for cross-rs: https://github.com/cross-rs/cross
|
# Configuration for cross-rs: https://github.com/cross-rs/cross
|
||||||
# Run cross-rs like this:
|
# Run cross-rs like this:
|
||||||
# cross build --target aarch64-unknown-linux-musl --release
|
# cross build --target aarch64-unknown-linux-gnu --release
|
||||||
|
# or
|
||||||
|
# cross build --target aarch64-unknown-linux-musl --release --features=static-link-openssl
|
||||||
|
|
||||||
[target.aarch64-unknown-linux-gnu]
|
[target.aarch64-unknown-linux-gnu]
|
||||||
dockerfile = "./docker/cross-rs/aarch64-unknown-linux-gnu.dockerfile"
|
pre-build = [
|
||||||
|
"dpkg --add-architecture $CROSS_DEB_ARCH",
|
||||||
|
"apt-get update && apt-get install --assume-yes libssl-dev:$CROSS_DEB_ARCH clang"
|
||||||
|
]
|
||||||
|
|
||||||
|
# NOTE: for musl you will need to build with --features=static-link-openssl
|
||||||
[target.aarch64-unknown-linux-musl]
|
[target.aarch64-unknown-linux-musl]
|
||||||
dockerfile = "./docker/cross-rs/aarch64-unknown-linux-musl.dockerfile"
|
pre-build = [
|
||||||
|
"dpkg --add-architecture $CROSS_DEB_ARCH",
|
||||||
|
"apt-get update && apt-get install --assume-yes clang"
|
||||||
|
]
|
||||||
|
47
README.md
47
README.md
@ -1,28 +1,28 @@
|
|||||||
# Nushell <!-- omit in toc -->
|
# Nushell <!-- omit in toc -->
|
||||||
[](https://crates.io/crates/nu)
|
[](https://crates.io/crates/nu)
|
||||||
[](https://github.com/nushell/nushell/actions)
|
[](https://github.com/nushell/nushell/actions)
|
||||||
|
[](https://github.com/nushell/nushell/actions/workflows/nightly-build.yml)
|
||||||
[](https://discord.gg/NtAbbGn)
|
[](https://discord.gg/NtAbbGn)
|
||||||
[](https://changelog.com/podcast/363)
|
[](https://changelog.com/podcast/363)
|
||||||
[](https://twitter.com/nu_shell)
|
[](https://twitter.com/nu_shell)
|
||||||
[](https://github.com/nushell/nushell/graphs/commit-activity)
|
[](https://github.com/nushell/nushell/graphs/commit-activity)
|
||||||
[](https://github.com/nushell/nushell/graphs/contributors)
|
[](https://github.com/nushell/nushell/graphs/contributors)
|
||||||
[](https://codecov.io/gh/nushell/nushell)
|
|
||||||
|
|
||||||
A new type of shell.
|
A new type of shell.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
## Table of Contents <!-- omit in toc -->
|
## Table of Contents <!-- omit in toc -->
|
||||||
|
|
||||||
- [Status](#status)
|
- [Status](#status)
|
||||||
- [Learning About Nu](#learning-about-nu)
|
- [Learning About Nu](#learning-about-nu)
|
||||||
- [Installation](#installation)
|
- [Installation](#installation)
|
||||||
|
- [Configuration](#configuration)
|
||||||
- [Philosophy](#philosophy)
|
- [Philosophy](#philosophy)
|
||||||
- [Pipelines](#pipelines)
|
- [Pipelines](#pipelines)
|
||||||
- [Opening files](#opening-files)
|
- [Opening files](#opening-files)
|
||||||
- [Plugins](#plugins)
|
- [Plugins](#plugins)
|
||||||
- [Goals](#goals)
|
- [Goals](#goals)
|
||||||
- [Progress](#progress)
|
|
||||||
- [Officially Supported By](#officially-supported-by)
|
- [Officially Supported By](#officially-supported-by)
|
||||||
- [Contributing](#contributing)
|
- [Contributing](#contributing)
|
||||||
- [License](#license)
|
- [License](#license)
|
||||||
@ -55,6 +55,22 @@ Detailed installation instructions can be found in the [installation chapter of
|
|||||||
[](https://repology.org/project/nushell/versions)
|
[](https://repology.org/project/nushell/versions)
|
||||||
|
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
The default configurations can be found at [sample_config](crates/nu-utils/src/sample_config)
|
||||||
|
which are the configuration files one gets when they startup Nushell for the first time.
|
||||||
|
|
||||||
|
It sets all of the default configuration to run Nushell. From here one can
|
||||||
|
then customize this file for their specific needs.
|
||||||
|
|
||||||
|
To see where *config.nu* is located on your system simply type this command.
|
||||||
|
|
||||||
|
```rust
|
||||||
|
$nu.config-path
|
||||||
|
```
|
||||||
|
|
||||||
|
Please see our [book](https://www.nushell.sh) for all of the Nushell documentation.
|
||||||
|
|
||||||
|
|
||||||
## Philosophy
|
## Philosophy
|
||||||
|
|
||||||
@ -192,27 +208,6 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat
|
|||||||
|
|
||||||
- Finally, Nu views data functionally. Rather than using mutation, pipelines act as a means to load, change, and save data without mutable state.
|
- Finally, Nu views data functionally. Rather than using mutation, pipelines act as a means to load, change, and save data without mutable state.
|
||||||
|
|
||||||
## Progress
|
|
||||||
|
|
||||||
Nu is under heavy development and will naturally change as it matures. The chart below isn't meant to be exhaustive, but it helps give an idea for some of the areas of development and their relative maturity:
|
|
||||||
|
|
||||||
| Features | Not started | Prototype | MVP | Preview | Mature | Notes |
|
|
||||||
| ------------- | :---------: | :-------: | :-: | :-----: | :----: | -------------------------------------------------------------------- |
|
|
||||||
| Aliases | | | | X | | Aliases allow for shortening large commands, while passing flags |
|
|
||||||
| Notebook | | X | | | | Initial jupyter support, but it loses state and lacks features |
|
|
||||||
| File ops | | | | X | | cp, mv, rm, mkdir have some support, but lacking others |
|
|
||||||
| Environment | | | | X | | Temporary environment and scoped environment variables |
|
|
||||||
| Shells | | | | X | | Basic value and file shells, but no opt-in/opt-out for commands |
|
|
||||||
| Protocol | | | | X | | Streaming protocol is serviceable |
|
|
||||||
| Plugins | | | X | | | Plugins work on one row at a time, lack batching and expression eval |
|
|
||||||
| Errors | | | | X | | Error reporting works, but could use usability polish |
|
|
||||||
| Documentation | | | X | | | Book updated to latest release, including usage examples |
|
|
||||||
| Paging | | | | X | | Textview has paging, but we'd like paging for tables |
|
|
||||||
| Functions | | | | X | | Functions and aliases are supported |
|
|
||||||
| Variables | | | | X | | Nu supports variables and environment variables |
|
|
||||||
| Completions | | | | X | | Completions for filepaths |
|
|
||||||
| Type-checking | | | | x | | Commands check basic types, and input/output types |
|
|
||||||
|
|
||||||
## Officially Supported By
|
## Officially Supported By
|
||||||
|
|
||||||
Please submit an issue or PR to be added to this list.
|
Please submit an issue or PR to be added to this list.
|
||||||
@ -223,13 +218,15 @@ Please submit an issue or PR to be added to this list.
|
|||||||
- [Couchbase Shell](https://couchbase.sh)
|
- [Couchbase Shell](https://couchbase.sh)
|
||||||
- [virtualenv](https://github.com/pypa/virtualenv)
|
- [virtualenv](https://github.com/pypa/virtualenv)
|
||||||
- [atuin](https://github.com/ellie/atuin)
|
- [atuin](https://github.com/ellie/atuin)
|
||||||
|
- [clap](https://github.com/clap-rs/clap/tree/master/clap_complete_nushell)
|
||||||
|
- [Dorothy](http://github.com/bevry/dorothy)
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
See [Contributing](CONTRIBUTING.md) for details. Thanks to all the people who already contributed!
|
See [Contributing](CONTRIBUTING.md) for details. Thanks to all the people who already contributed!
|
||||||
|
|
||||||
<a href="https://github.com/nushell/nushell/graphs/contributors">
|
<a href="https://github.com/nushell/nushell/graphs/contributors">
|
||||||
<img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=500" />
|
<img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=600" />
|
||||||
</a>
|
</a>
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
@ -1,3 +0,0 @@
|
|||||||
To use Nu plugins, use the register command to tell Nu where to find the plugin. For example:
|
|
||||||
|
|
||||||
> register ./nu_plugin_query
|
|
Before Width: | Height: | Size: 1.2 MiB After Width: | Height: | Size: 1.2 MiB |
@ -2,16 +2,19 @@ use criterion::{criterion_group, criterion_main, BatchSize, Criterion};
|
|||||||
use nu_cli::eval_source;
|
use nu_cli::eval_source;
|
||||||
use nu_parser::parse;
|
use nu_parser::parse;
|
||||||
use nu_plugin::{EncodingType, PluginResponse};
|
use nu_plugin::{EncodingType, PluginResponse};
|
||||||
use nu_protocol::{PipelineData, Span, Value};
|
use nu_protocol::{engine::EngineState, PipelineData, Span, Value};
|
||||||
use nu_utils::{get_default_config, get_default_env};
|
use nu_utils::{get_default_config, get_default_env};
|
||||||
|
|
||||||
|
fn load_bench_commands() -> EngineState {
|
||||||
|
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
|
||||||
|
}
|
||||||
// FIXME: All benchmarks live in this 1 file to speed up build times when benchmarking.
|
// FIXME: All benchmarks live in this 1 file to speed up build times when benchmarking.
|
||||||
// When the *_benchmarks functions were in different files, `cargo bench` would build
|
// When the *_benchmarks functions were in different files, `cargo bench` would build
|
||||||
// an executable for every single one - incredibly slowly. Would be nice to figure out
|
// an executable for every single one - incredibly slowly. Would be nice to figure out
|
||||||
// a way to split things up again.
|
// a way to split things up again.
|
||||||
|
|
||||||
fn parser_benchmarks(c: &mut Criterion) {
|
fn parser_benchmarks(c: &mut Criterion) {
|
||||||
let mut engine_state = nu_command::create_default_context();
|
let mut engine_state = load_bench_commands();
|
||||||
// parsing config.nu breaks without PWD set
|
// parsing config.nu breaks without PWD set
|
||||||
engine_state.add_env_var(
|
engine_state.add_env_var(
|
||||||
"PWD".into(),
|
"PWD".into(),
|
||||||
@ -38,7 +41,7 @@ fn parser_benchmarks(c: &mut Criterion) {
|
|||||||
|
|
||||||
c.bench_function("eval default_env.nu", |b| {
|
c.bench_function("eval default_env.nu", |b| {
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let mut engine_state = nu_command::create_default_context();
|
let mut engine_state = load_bench_commands();
|
||||||
let mut stack = nu_protocol::engine::Stack::new();
|
let mut stack = nu_protocol::engine::Stack::new();
|
||||||
eval_source(
|
eval_source(
|
||||||
&mut engine_state,
|
&mut engine_state,
|
||||||
@ -53,7 +56,7 @@ fn parser_benchmarks(c: &mut Criterion) {
|
|||||||
|
|
||||||
c.bench_function("eval default_config.nu", |b| {
|
c.bench_function("eval default_config.nu", |b| {
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let mut engine_state = nu_command::create_default_context();
|
let mut engine_state = load_bench_commands();
|
||||||
// parsing config.nu breaks without PWD set
|
// parsing config.nu breaks without PWD set
|
||||||
engine_state.add_env_var(
|
engine_state.add_env_var(
|
||||||
"PWD".into(),
|
"PWD".into(),
|
||||||
@ -75,7 +78,7 @@ fn parser_benchmarks(c: &mut Criterion) {
|
|||||||
fn eval_benchmarks(c: &mut Criterion) {
|
fn eval_benchmarks(c: &mut Criterion) {
|
||||||
c.bench_function("eval default_env.nu", |b| {
|
c.bench_function("eval default_env.nu", |b| {
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let mut engine_state = nu_command::create_default_context();
|
let mut engine_state = load_bench_commands();
|
||||||
let mut stack = nu_protocol::engine::Stack::new();
|
let mut stack = nu_protocol::engine::Stack::new();
|
||||||
eval_source(
|
eval_source(
|
||||||
&mut engine_state,
|
&mut engine_state,
|
||||||
@ -90,7 +93,7 @@ fn eval_benchmarks(c: &mut Criterion) {
|
|||||||
|
|
||||||
c.bench_function("eval default_config.nu", |b| {
|
c.bench_function("eval default_config.nu", |b| {
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let mut engine_state = nu_command::create_default_context();
|
let mut engine_state = load_bench_commands();
|
||||||
// parsing config.nu breaks without PWD set
|
// parsing config.nu breaks without PWD set
|
||||||
engine_state.add_env_var(
|
engine_state.add_env_var(
|
||||||
"PWD".into(),
|
"PWD".into(),
|
||||||
@ -111,30 +114,18 @@ fn eval_benchmarks(c: &mut Criterion) {
|
|||||||
|
|
||||||
// generate a new table data with `row_cnt` rows, `col_cnt` columns.
|
// generate a new table data with `row_cnt` rows, `col_cnt` columns.
|
||||||
fn encoding_test_data(row_cnt: usize, col_cnt: usize) -> Value {
|
fn encoding_test_data(row_cnt: usize, col_cnt: usize) -> Value {
|
||||||
let columns: Vec<String> = (0..col_cnt).map(|x| format!("col_{x}")).collect();
|
let record = Value::test_record(
|
||||||
let vals: Vec<Value> = (0..col_cnt as i64).map(Value::test_int).collect();
|
(0..col_cnt)
|
||||||
|
.map(|x| (format!("col_{x}"), Value::test_int(x as i64)))
|
||||||
Value::List {
|
|
||||||
vals: (0..row_cnt)
|
|
||||||
.map(|_| Value::test_record(columns.clone(), vals.clone()))
|
|
||||||
.collect(),
|
.collect(),
|
||||||
span: Span::test_data(),
|
);
|
||||||
}
|
|
||||||
|
Value::list(vec![record; row_cnt], Span::test_data())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn encoding_benchmarks(c: &mut Criterion) {
|
fn encoding_benchmarks(c: &mut Criterion) {
|
||||||
let mut group = c.benchmark_group("Encoding");
|
let mut group = c.benchmark_group("Encoding");
|
||||||
let test_cnt_pairs = [
|
let test_cnt_pairs = [(100, 5), (100, 15), (10000, 5), (10000, 15)];
|
||||||
(100, 5),
|
|
||||||
(100, 10),
|
|
||||||
(100, 15),
|
|
||||||
(1000, 5),
|
|
||||||
(1000, 10),
|
|
||||||
(1000, 15),
|
|
||||||
(10000, 5),
|
|
||||||
(10000, 10),
|
|
||||||
(10000, 15),
|
|
||||||
];
|
|
||||||
for (row_cnt, col_cnt) in test_cnt_pairs.into_iter() {
|
for (row_cnt, col_cnt) in test_cnt_pairs.into_iter() {
|
||||||
for fmt in ["json", "msgpack"] {
|
for fmt in ["json", "msgpack"] {
|
||||||
group.bench_function(&format!("{fmt} encode {row_cnt} * {col_cnt}"), |b| {
|
group.bench_function(&format!("{fmt} encode {row_cnt} * {col_cnt}"), |b| {
|
||||||
@ -151,17 +142,7 @@ fn encoding_benchmarks(c: &mut Criterion) {
|
|||||||
|
|
||||||
fn decoding_benchmarks(c: &mut Criterion) {
|
fn decoding_benchmarks(c: &mut Criterion) {
|
||||||
let mut group = c.benchmark_group("Decoding");
|
let mut group = c.benchmark_group("Decoding");
|
||||||
let test_cnt_pairs = [
|
let test_cnt_pairs = [(100, 5), (100, 15), (10000, 5), (10000, 15)];
|
||||||
(100, 5),
|
|
||||||
(100, 10),
|
|
||||||
(100, 15),
|
|
||||||
(1000, 5),
|
|
||||||
(1000, 10),
|
|
||||||
(1000, 15),
|
|
||||||
(10000, 5),
|
|
||||||
(10000, 10),
|
|
||||||
(10000, 15),
|
|
||||||
];
|
|
||||||
for (row_cnt, col_cnt) in test_cnt_pairs.into_iter() {
|
for (row_cnt, col_cnt) in test_cnt_pairs.into_iter() {
|
||||||
for fmt in ["json", "msgpack"] {
|
for fmt in ["json", "msgpack"] {
|
||||||
group.bench_function(&format!("{fmt} decode for {row_cnt} * {col_cnt}"), |b| {
|
group.bench_function(&format!("{fmt} decode for {row_cnt} * {col_cnt}"), |b| {
|
||||||
|
25
build-all.nu
25
build-all.nu
@ -1,25 +0,0 @@
|
|||||||
echo '-------------------------------------------------------------------'
|
|
||||||
echo 'Building nushell (nu) with dataframes and all the plugins'
|
|
||||||
echo '-------------------------------------------------------------------'
|
|
||||||
|
|
||||||
echo $'(char nl)Building nushell'
|
|
||||||
echo '----------------------------'
|
|
||||||
cargo build --features=dataframe
|
|
||||||
|
|
||||||
let plugins = [
|
|
||||||
nu_plugin_inc,
|
|
||||||
nu_plugin_gstat,
|
|
||||||
nu_plugin_query,
|
|
||||||
nu_plugin_example,
|
|
||||||
nu_plugin_custom_values,
|
|
||||||
nu_plugin_formats,
|
|
||||||
]
|
|
||||||
|
|
||||||
for plugin in $plugins {
|
|
||||||
$'(char nl)Building ($plugin)'
|
|
||||||
'----------------------------'
|
|
||||||
cd $'crates/($plugin)'
|
|
||||||
cargo build
|
|
||||||
cd ../../
|
|
||||||
ignore
|
|
||||||
}
|
|
17
codecov.yml
17
codecov.yml
@ -1,17 +0,0 @@
|
|||||||
coverage:
|
|
||||||
status:
|
|
||||||
project:
|
|
||||||
default:
|
|
||||||
target: 55%
|
|
||||||
threshold: 2%
|
|
||||||
patch:
|
|
||||||
default:
|
|
||||||
informational: true
|
|
||||||
|
|
||||||
comment:
|
|
||||||
layout: reach, diff, files
|
|
||||||
behavior: default
|
|
||||||
require_base: yes
|
|
||||||
require_head: yes
|
|
||||||
after_n_builds: 1 # Disabled windows else: 2
|
|
||||||
|
|
@ -1,54 +0,0 @@
|
|||||||
#!/usr/bin/env nu
|
|
||||||
|
|
||||||
let start = (date now)
|
|
||||||
# Script to generate coverage locally
|
|
||||||
#
|
|
||||||
# Output: `lcov.info` file
|
|
||||||
#
|
|
||||||
# Relies on `cargo-llvm-cov`. Install via `cargo install cargo-llvm-cov`
|
|
||||||
# https://github.com/taiki-e/cargo-llvm-cov
|
|
||||||
|
|
||||||
# You probably have to run `cargo llvm-cov clean` once manually,
|
|
||||||
# as you have to confirm to install additional tooling for your rustup toolchain.
|
|
||||||
# Else the script might stall waiting for your `y<ENTER>`
|
|
||||||
|
|
||||||
# Some of the internal tests rely on the exact cargo profile
|
|
||||||
# (This is somewhat criminal itself)
|
|
||||||
# but we have to signal to the tests that we use the `ci` `--profile`
|
|
||||||
let-env NUSHELL_CARGO_TARGET = "ci"
|
|
||||||
|
|
||||||
# Manual gathering of coverage to catch invocation of the `nu` binary.
|
|
||||||
# This is relevant for tests using the `nu!` macro from `nu-test-support`
|
|
||||||
# see: https://github.com/taiki-e/cargo-llvm-cov#get-coverage-of-external-tests
|
|
||||||
|
|
||||||
print "Setting up environment variables for coverage"
|
|
||||||
# Enable LLVM coverage tracking through environment variables
|
|
||||||
# show env outputs .ini/.toml style description of the variables
|
|
||||||
# In order to use from toml, we need to make sure our string literals are single quoted
|
|
||||||
# This is especially important when running on Windows since "C:\blah" is treated as an escape
|
|
||||||
cargo llvm-cov show-env | str replace (char dq) (char sq) -a | from toml | load-env
|
|
||||||
|
|
||||||
print "Cleaning up coverage data"
|
|
||||||
cargo llvm-cov clean --workspace
|
|
||||||
|
|
||||||
print "Building with workspace and profile=ci"
|
|
||||||
# Apparently we need to explicitly build the necessary parts
|
|
||||||
# using the `--profile=ci` is basically `debug` build with unnecessary symbols stripped
|
|
||||||
# leads to smaller binaries and potential savings when compiling and running
|
|
||||||
cargo build --workspace --profile=ci
|
|
||||||
|
|
||||||
print "Running tests with --workspace and profile=ci"
|
|
||||||
cargo test --workspace --profile=ci
|
|
||||||
|
|
||||||
# You need to provide the used profile to find the raw data
|
|
||||||
print "Generating coverage report as lcov.info"
|
|
||||||
cargo llvm-cov report --lcov --output-path lcov.info --profile=ci
|
|
||||||
|
|
||||||
let end = (date now)
|
|
||||||
$"Coverage generation took ($end - $start)."
|
|
||||||
|
|
||||||
# To display the coverage in your editor see:
|
|
||||||
#
|
|
||||||
# - https://marketplace.visualstudio.com/items?itemName=ryanluker.vscode-coverage-gutters
|
|
||||||
# - https://github.com/umaumax/vim-lcov
|
|
||||||
# - https://github.com/andythigpen/nvim-coverage (probably needs some additional config)
|
|
@ -5,40 +5,39 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cli"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-cli"
|
name = "nu-cli"
|
||||||
version = "0.80.0"
|
version = "0.85.0"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
bench = false
|
bench = false
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-test-support = { path = "../nu-test-support", version = "0.80.0" }
|
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.85.0" }
|
||||||
rstest = { version = "0.17.0", default-features = false }
|
nu-command = { path = "../nu-command", version = "0.85.0" }
|
||||||
|
nu-test-support = { path = "../nu-test-support", version = "0.85.0" }
|
||||||
|
rstest = { version = "0.18.1", default-features = false }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-command = { path = "../nu-command", version = "0.80.0" }
|
nu-cmd-base = { path = "../nu-cmd-base", version = "0.85.0" }
|
||||||
nu-engine = { path = "../nu-engine", version = "0.80.0" }
|
nu-engine = { path = "../nu-engine", version = "0.85.0" }
|
||||||
nu-path = { path = "../nu-path", version = "0.80.0" }
|
nu-path = { path = "../nu-path", version = "0.85.0" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.80.0" }
|
nu-parser = { path = "../nu-parser", version = "0.85.0" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.80.0" }
|
nu-protocol = { path = "../nu-protocol", version = "0.85.0" }
|
||||||
nu-utils = { path = "../nu-utils", version = "0.80.0" }
|
nu-utils = { path = "../nu-utils", version = "0.85.0" }
|
||||||
nu-color-config = { path = "../nu-color-config", version = "0.80.0" }
|
nu-color-config = { path = "../nu-color-config", version = "0.85.0" }
|
||||||
|
nu-ansi-term = "0.49.0"
|
||||||
|
reedline = { version = "0.24.0", features = ["bashisms", "sqlite"] }
|
||||||
|
|
||||||
nu-ansi-term = "0.47.0"
|
chrono = { default-features = false, features = ["std"], version = "0.4" }
|
||||||
reedline = { version = "0.19.1", features = ["bashisms", "sqlite"]}
|
crossterm = "0.27"
|
||||||
|
fancy-regex = "0.11"
|
||||||
atty = "0.2.14"
|
fuzzy-matcher = "0.3"
|
||||||
chrono = { default-features = false, features = ["std"], version = "0.4.23" }
|
is_executable = "1.0"
|
||||||
crossterm = "0.26"
|
|
||||||
fancy-regex = "0.11.0"
|
|
||||||
fuzzy-matcher = "0.3.7"
|
|
||||||
is_executable = "1.0.1"
|
|
||||||
once_cell = "1.17.0"
|
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
miette = { version = "5.7.0", features = ["fancy-no-backtrace"] }
|
miette = { version = "5.10", features = ["fancy-no-backtrace"] }
|
||||||
|
once_cell = "1.18"
|
||||||
percent-encoding = "2"
|
percent-encoding = "2"
|
||||||
sysinfo = "0.28.2"
|
sysinfo = "0.29"
|
||||||
thiserror = "1.0.31"
|
unicode-segmentation = "1.10"
|
||||||
unicode-segmentation = "1.10.0"
|
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
plugin = []
|
plugin = []
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
use nu_engine::CallExt;
|
use nu_engine::CallExt;
|
||||||
use nu_protocol::ast::Call;
|
use nu_protocol::{
|
||||||
use nu_protocol::engine::{Command, EngineState, Stack};
|
ast::Call,
|
||||||
use nu_protocol::Category;
|
engine::{Command, EngineState, Stack},
|
||||||
use nu_protocol::IntoPipelineData;
|
Category, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Type, Value,
|
||||||
use nu_protocol::{PipelineData, ShellError, Signature, SyntaxShape, Type, Value};
|
};
|
||||||
use unicode_segmentation::UnicodeSegmentation;
|
use unicode_segmentation::UnicodeSegmentation;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@ -16,12 +16,20 @@ impl Command for Commandline {
|
|||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build("commandline")
|
Signature::build("commandline")
|
||||||
.input_output_types(vec![(Type::Nothing, Type::Nothing)])
|
.input_output_types(vec![
|
||||||
|
(Type::Nothing, Type::Nothing),
|
||||||
|
(Type::String, Type::String),
|
||||||
|
])
|
||||||
.switch(
|
.switch(
|
||||||
"cursor",
|
"cursor",
|
||||||
"Set or get the current cursor position",
|
"Set or get the current cursor position",
|
||||||
Some('c'),
|
Some('c'),
|
||||||
)
|
)
|
||||||
|
.switch(
|
||||||
|
"cursor-end",
|
||||||
|
"Set the current cursor position to the end of the buffer",
|
||||||
|
Some('e'),
|
||||||
|
)
|
||||||
.switch(
|
.switch(
|
||||||
"append",
|
"append",
|
||||||
"appends the string to the end of the buffer",
|
"appends the string to the end of the buffer",
|
||||||
@ -61,34 +69,27 @@ impl Command for Commandline {
|
|||||||
_input: PipelineData,
|
_input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
if let Some(cmd) = call.opt::<Value>(engine_state, stack, 0)? {
|
if let Some(cmd) = call.opt::<Value>(engine_state, stack, 0)? {
|
||||||
let mut buffer = engine_state
|
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
|
||||||
.repl_buffer_state
|
|
||||||
.lock()
|
|
||||||
.expect("repl buffer state mutex");
|
|
||||||
let mut cursor_pos = engine_state
|
|
||||||
.repl_cursor_pos
|
|
||||||
.lock()
|
|
||||||
.expect("repl cursor pos mutex");
|
|
||||||
|
|
||||||
if call.has_flag("cursor") {
|
if call.has_flag("cursor") {
|
||||||
let cmd_str = cmd.as_string()?;
|
let cmd_str = cmd.as_string()?;
|
||||||
match cmd_str.parse::<i64>() {
|
match cmd_str.parse::<i64>() {
|
||||||
Ok(n) => {
|
Ok(n) => {
|
||||||
*cursor_pos = if n <= 0 {
|
repl.cursor_pos = if n <= 0 {
|
||||||
0usize
|
0usize
|
||||||
} else {
|
} else {
|
||||||
buffer
|
repl.buffer
|
||||||
.grapheme_indices(true)
|
.grapheme_indices(true)
|
||||||
.map(|(i, _c)| i)
|
.map(|(i, _c)| i)
|
||||||
.nth(n as usize)
|
.nth(n as usize)
|
||||||
.unwrap_or(buffer.len())
|
.unwrap_or(repl.buffer.len())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(_) => {
|
||||||
return Err(ShellError::CantConvert {
|
return Err(ShellError::CantConvert {
|
||||||
to_type: "int".to_string(),
|
to_type: "int".to_string(),
|
||||||
from_type: "string".to_string(),
|
from_type: "string".to_string(),
|
||||||
span: cmd.span()?,
|
span: cmd.span(),
|
||||||
help: Some(format!(
|
help: Some(format!(
|
||||||
r#"string "{cmd_str}" does not represent a valid integer"#
|
r#"string "{cmd_str}" does not represent a valid integer"#
|
||||||
)),
|
)),
|
||||||
@ -96,42 +97,32 @@ impl Command for Commandline {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else if call.has_flag("append") {
|
} else if call.has_flag("append") {
|
||||||
buffer.push_str(&cmd.as_string()?);
|
repl.buffer.push_str(&cmd.as_string()?);
|
||||||
} else if call.has_flag("insert") {
|
} else if call.has_flag("insert") {
|
||||||
let cmd_str = cmd.as_string()?;
|
let cmd_str = cmd.as_string()?;
|
||||||
buffer.insert_str(*cursor_pos, &cmd_str);
|
let cursor_pos = repl.cursor_pos;
|
||||||
*cursor_pos += cmd_str.len();
|
repl.buffer.insert_str(cursor_pos, &cmd_str);
|
||||||
|
repl.cursor_pos += cmd_str.len();
|
||||||
} else {
|
} else {
|
||||||
*buffer = cmd.as_string()?;
|
repl.buffer = cmd.as_string()?;
|
||||||
*cursor_pos = buffer.len();
|
repl.cursor_pos = repl.buffer.len();
|
||||||
}
|
}
|
||||||
Ok(Value::Nothing { span: call.head }.into_pipeline_data())
|
Ok(Value::nothing(call.head).into_pipeline_data())
|
||||||
} else {
|
} else {
|
||||||
let buffer = engine_state
|
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
|
||||||
.repl_buffer_state
|
if call.has_flag("cursor-end") {
|
||||||
.lock()
|
repl.cursor_pos = repl.buffer.graphemes(true).count();
|
||||||
.expect("repl buffer state mutex");
|
Ok(Value::nothing(call.head).into_pipeline_data())
|
||||||
if call.has_flag("cursor") {
|
} else if call.has_flag("cursor") {
|
||||||
let cursor_pos = engine_state
|
let char_pos = repl
|
||||||
.repl_cursor_pos
|
.buffer
|
||||||
.lock()
|
|
||||||
.expect("repl cursor pos mutex");
|
|
||||||
let char_pos = buffer
|
|
||||||
.grapheme_indices(true)
|
.grapheme_indices(true)
|
||||||
.chain(std::iter::once((buffer.len(), "")))
|
.chain(std::iter::once((repl.buffer.len(), "")))
|
||||||
.position(|(i, _c)| i == *cursor_pos)
|
.position(|(i, _c)| i == repl.cursor_pos)
|
||||||
.expect("Cursor position isn't on a grapheme boundary");
|
.expect("Cursor position isn't on a grapheme boundary");
|
||||||
Ok(Value::String {
|
Ok(Value::string(char_pos.to_string(), call.head).into_pipeline_data())
|
||||||
val: char_pos.to_string(),
|
|
||||||
span: call.head,
|
|
||||||
}
|
|
||||||
.into_pipeline_data())
|
|
||||||
} else {
|
} else {
|
||||||
Ok(Value::String {
|
Ok(Value::string(repl.buffer.to_string(), call.head).into_pipeline_data())
|
||||||
val: buffer.to_string(),
|
|
||||||
span: call.head,
|
|
||||||
}
|
|
||||||
.into_pipeline_data())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -26,7 +26,7 @@ pub fn add_cli_context(mut engine_state: EngineState) -> EngineState {
|
|||||||
};
|
};
|
||||||
|
|
||||||
if let Err(err) = engine_state.merge_delta(delta) {
|
if let Err(err) = engine_state.merge_delta(delta) {
|
||||||
eprintln!("Error creating default context: {err:?}");
|
eprintln!("Error creating CLI command context: {err:?}");
|
||||||
}
|
}
|
||||||
|
|
||||||
engine_state
|
engine_state
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
use nu_protocol::ast::Call;
|
use nu_protocol::ast::Call;
|
||||||
use nu_protocol::engine::{Command, EngineState, Stack};
|
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
Category, Example, HistoryFileFormat, IntoInterruptiblePipelineData, PipelineData, ShellError,
|
record, Category, Example, HistoryFileFormat, IntoInterruptiblePipelineData, PipelineData,
|
||||||
Signature, Span, Type, Value,
|
ShellError, Signature, Span, Type, Value,
|
||||||
};
|
};
|
||||||
use reedline::{
|
use reedline::{
|
||||||
FileBackedHistory, History as ReedlineHistory, HistoryItem, SearchDirection, SearchQuery,
|
FileBackedHistory, History as ReedlineHistory, HistoryItem, SearchDirection, SearchQuery,
|
||||||
@ -70,12 +70,14 @@ impl Command for History {
|
|||||||
} else {
|
} else {
|
||||||
let history_reader: Option<Box<dyn ReedlineHistory>> =
|
let history_reader: Option<Box<dyn ReedlineHistory>> =
|
||||||
match engine_state.config.history_file_format {
|
match engine_state.config.history_file_format {
|
||||||
HistoryFileFormat::Sqlite => SqliteBackedHistory::with_file(history_path)
|
HistoryFileFormat::Sqlite => {
|
||||||
.map(|inner| {
|
SqliteBackedHistory::with_file(history_path, None, None)
|
||||||
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
|
.map(|inner| {
|
||||||
boxed
|
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
|
||||||
})
|
boxed
|
||||||
.ok(),
|
})
|
||||||
|
.ok()
|
||||||
|
}
|
||||||
|
|
||||||
HistoryFileFormat::PlainText => FileBackedHistory::with_file(
|
HistoryFileFormat::PlainText => FileBackedHistory::with_file(
|
||||||
engine_state.config.max_history_size as usize,
|
engine_state.config.max_history_size as usize,
|
||||||
@ -95,20 +97,15 @@ impl Command for History {
|
|||||||
.ok()
|
.ok()
|
||||||
})
|
})
|
||||||
.map(move |entries| {
|
.map(move |entries| {
|
||||||
entries
|
entries.into_iter().enumerate().map(move |(idx, entry)| {
|
||||||
.into_iter()
|
Value::record(
|
||||||
.enumerate()
|
record! {
|
||||||
.map(move |(idx, entry)| Value::Record {
|
"command" => Value::string(entry.command_line, head),
|
||||||
cols: vec!["command".to_string(), "index".to_string()],
|
"index" => Value::int(idx as i64, head),
|
||||||
vals: vec![
|
},
|
||||||
Value::String {
|
head,
|
||||||
val: entry.command_line,
|
)
|
||||||
span: head,
|
})
|
||||||
},
|
|
||||||
Value::int(idx as i64, head),
|
|
||||||
],
|
|
||||||
span: head,
|
|
||||||
})
|
|
||||||
})
|
})
|
||||||
.ok_or(ShellError::FileNotFound(head))?
|
.ok_or(ShellError::FileNotFound(head))?
|
||||||
.into_pipeline_data(ctrlc)),
|
.into_pipeline_data(ctrlc)),
|
||||||
@ -144,7 +141,7 @@ impl Command for History {
|
|||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
example: "history | wrap cmd | where cmd =~ cargo",
|
example: "history | where command =~ cargo | get command",
|
||||||
description: "Search all the commands from history that contains 'cargo'",
|
description: "Search all the commands from history that contains 'cargo'",
|
||||||
result: None,
|
result: None,
|
||||||
},
|
},
|
||||||
@ -156,8 +153,8 @@ fn create_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span)
|
|||||||
//1. Format all the values
|
//1. Format all the values
|
||||||
//2. Create a record of either short or long columns and values
|
//2. Create a record of either short or long columns and values
|
||||||
|
|
||||||
let item_id_value = Value::Int {
|
let item_id_value = Value::int(
|
||||||
val: match entry.id {
|
match entry.id {
|
||||||
Some(id) => {
|
Some(id) => {
|
||||||
let ids = id.to_string();
|
let ids = id.to_string();
|
||||||
match ids.parse::<i64>() {
|
match ids.parse::<i64>() {
|
||||||
@ -167,21 +164,18 @@ fn create_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span)
|
|||||||
}
|
}
|
||||||
None => 0i64,
|
None => 0i64,
|
||||||
},
|
},
|
||||||
span: head,
|
head,
|
||||||
};
|
);
|
||||||
let start_timestamp_value = Value::String {
|
let start_timestamp_value = Value::string(
|
||||||
val: match entry.start_timestamp {
|
match entry.start_timestamp {
|
||||||
Some(time) => time.to_string(),
|
Some(time) => time.to_string(),
|
||||||
None => "".into(),
|
None => "".into(),
|
||||||
},
|
},
|
||||||
span: head,
|
head,
|
||||||
};
|
);
|
||||||
let command_value = Value::String {
|
let command_value = Value::string(entry.command_line, head);
|
||||||
val: entry.command_line,
|
let session_id_value = Value::int(
|
||||||
span: head,
|
match entry.session_id {
|
||||||
};
|
|
||||||
let session_id_value = Value::Int {
|
|
||||||
val: match entry.session_id {
|
|
||||||
Some(sid) => {
|
Some(sid) => {
|
||||||
let sids = sid.to_string();
|
let sids = sid.to_string();
|
||||||
match sids.parse::<i64>() {
|
match sids.parse::<i64>() {
|
||||||
@ -191,74 +185,56 @@ fn create_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span)
|
|||||||
}
|
}
|
||||||
None => 0i64,
|
None => 0i64,
|
||||||
},
|
},
|
||||||
span: head,
|
head,
|
||||||
};
|
);
|
||||||
let hostname_value = Value::String {
|
let hostname_value = Value::string(
|
||||||
val: match entry.hostname {
|
match entry.hostname {
|
||||||
Some(host) => host,
|
Some(host) => host,
|
||||||
None => "".into(),
|
None => "".into(),
|
||||||
},
|
},
|
||||||
span: head,
|
head,
|
||||||
};
|
);
|
||||||
let cwd_value = Value::String {
|
let cwd_value = Value::string(
|
||||||
val: match entry.cwd {
|
match entry.cwd {
|
||||||
Some(cwd) => cwd,
|
Some(cwd) => cwd,
|
||||||
None => "".into(),
|
None => "".into(),
|
||||||
},
|
},
|
||||||
span: head,
|
head,
|
||||||
};
|
);
|
||||||
let duration_value = Value::Duration {
|
let duration_value = Value::duration(
|
||||||
val: match entry.duration {
|
match entry.duration {
|
||||||
Some(d) => d.as_nanos().try_into().unwrap_or(0),
|
Some(d) => d.as_nanos().try_into().unwrap_or(0),
|
||||||
None => 0,
|
None => 0,
|
||||||
},
|
},
|
||||||
span: head,
|
head,
|
||||||
};
|
);
|
||||||
let exit_status_value = Value::int(entry.exit_status.unwrap_or(0), head);
|
let exit_status_value = Value::int(entry.exit_status.unwrap_or(0), head);
|
||||||
let index_value = Value::int(idx as i64, head);
|
let index_value = Value::int(idx as i64, head);
|
||||||
if long {
|
if long {
|
||||||
Value::Record {
|
Value::record(
|
||||||
cols: vec![
|
record! {
|
||||||
"item_id".into(),
|
"item_id" => item_id_value,
|
||||||
"start_timestamp".into(),
|
"start_timestamp" => start_timestamp_value,
|
||||||
"command".to_string(),
|
"command" => command_value,
|
||||||
"session_id".into(),
|
"session_id" => session_id_value,
|
||||||
"hostname".into(),
|
"hostname" => hostname_value,
|
||||||
"cwd".into(),
|
"cwd" => cwd_value,
|
||||||
"duration".into(),
|
"duration" => duration_value,
|
||||||
"exit_status".into(),
|
"exit_status" => exit_status_value,
|
||||||
"idx".to_string(),
|
"idx" => index_value,
|
||||||
],
|
},
|
||||||
vals: vec![
|
head,
|
||||||
item_id_value,
|
)
|
||||||
start_timestamp_value,
|
|
||||||
command_value,
|
|
||||||
session_id_value,
|
|
||||||
hostname_value,
|
|
||||||
cwd_value,
|
|
||||||
duration_value,
|
|
||||||
exit_status_value,
|
|
||||||
index_value,
|
|
||||||
],
|
|
||||||
span: head,
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
Value::Record {
|
Value::record(
|
||||||
cols: vec![
|
record! {
|
||||||
"start_timestamp".into(),
|
"start_timestamp" => start_timestamp_value,
|
||||||
"command".to_string(),
|
"command" => command_value,
|
||||||
"cwd".into(),
|
"cwd" => cwd_value,
|
||||||
"duration".into(),
|
"duration" => duration_value,
|
||||||
"exit_status".into(),
|
"exit_status" => exit_status_value,
|
||||||
],
|
},
|
||||||
vals: vec![
|
head,
|
||||||
start_timestamp_value,
|
)
|
||||||
command_value,
|
|
||||||
cwd_value,
|
|
||||||
duration_value,
|
|
||||||
exit_status_value,
|
|
||||||
],
|
|
||||||
span: head,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -24,7 +24,10 @@ impl Command for Keybindings {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
"You must use one of the following subcommands. Using this command as-is will only produce this help message."
|
r#"You must use one of the following subcommands. Using this command as-is will only produce this help message.
|
||||||
|
|
||||||
|
For more information on input and keybindings, check:
|
||||||
|
https://www.nushell.sh/book/line_editor.html"#
|
||||||
}
|
}
|
||||||
|
|
||||||
fn search_terms(&self) -> Vec<&str> {
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
@ -38,16 +41,16 @@ impl Command for Keybindings {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
_input: PipelineData,
|
_input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
Ok(Value::String {
|
Ok(Value::string(
|
||||||
val: get_full_help(
|
get_full_help(
|
||||||
&Keybindings.signature(),
|
&Keybindings.signature(),
|
||||||
&Keybindings.examples(),
|
&Keybindings.examples(),
|
||||||
engine_state,
|
engine_state,
|
||||||
stack,
|
stack,
|
||||||
self.is_parser_keyword(),
|
self.is_parser_keyword(),
|
||||||
),
|
),
|
||||||
span: call.head,
|
call.head,
|
||||||
}
|
)
|
||||||
.into_pipeline_data())
|
.into_pipeline_data())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::Call,
|
ast::Call,
|
||||||
engine::{Command, EngineState, Stack},
|
engine::{Command, EngineState, Stack},
|
||||||
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Type, Value,
|
record, Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Type, Value,
|
||||||
};
|
};
|
||||||
use reedline::get_reedline_default_keybindings;
|
use reedline::get_reedline_default_keybindings;
|
||||||
|
|
||||||
@ -41,43 +41,18 @@ impl Command for KeybindingsDefault {
|
|||||||
let records = get_reedline_default_keybindings()
|
let records = get_reedline_default_keybindings()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(mode, modifier, code, event)| {
|
.map(|(mode, modifier, code, event)| {
|
||||||
let mode = Value::String {
|
Value::record(
|
||||||
val: mode,
|
record! {
|
||||||
span: call.head,
|
"mode" => Value::string(mode, call.head),
|
||||||
};
|
"modifier" => Value::string(modifier, call.head),
|
||||||
|
"code" => Value::string(code, call.head),
|
||||||
let modifier = Value::String {
|
"event" => Value::string(event, call.head),
|
||||||
val: modifier,
|
},
|
||||||
span: call.head,
|
call.head,
|
||||||
};
|
)
|
||||||
|
|
||||||
let code = Value::String {
|
|
||||||
val: code,
|
|
||||||
span: call.head,
|
|
||||||
};
|
|
||||||
|
|
||||||
let event = Value::String {
|
|
||||||
val: event,
|
|
||||||
span: call.head,
|
|
||||||
};
|
|
||||||
|
|
||||||
Value::Record {
|
|
||||||
cols: vec![
|
|
||||||
"mode".to_string(),
|
|
||||||
"modifier".to_string(),
|
|
||||||
"code".to_string(),
|
|
||||||
"event".to_string(),
|
|
||||||
],
|
|
||||||
vals: vec![mode, modifier, code, event],
|
|
||||||
span: call.head,
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
Ok(Value::List {
|
Ok(Value::list(records, call.head).into_pipeline_data())
|
||||||
vals: records,
|
|
||||||
span: call.head,
|
|
||||||
}
|
|
||||||
.into_pipeline_data())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::Call,
|
ast::Call,
|
||||||
engine::{Command, EngineState, Stack},
|
engine::{Command, EngineState, Stack},
|
||||||
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, Type, Value,
|
record, Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, Type,
|
||||||
|
Value,
|
||||||
};
|
};
|
||||||
use reedline::{
|
use reedline::{
|
||||||
get_reedline_edit_commands, get_reedline_keybinding_modifiers, get_reedline_keycodes,
|
get_reedline_edit_commands, get_reedline_keybinding_modifiers, get_reedline_keycodes,
|
||||||
@ -59,26 +60,22 @@ impl Command for KeybindingsList {
|
|||||||
_input: PipelineData,
|
_input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let records = if call.named_len() == 0 {
|
let records = if call.named_len() == 0 {
|
||||||
let all_options = vec!["modifiers", "keycodes", "edits", "modes", "events"];
|
let all_options = ["modifiers", "keycodes", "edits", "modes", "events"];
|
||||||
all_options
|
all_options
|
||||||
.iter()
|
.iter()
|
||||||
.flat_map(|argument| get_records(argument, &call.head))
|
.flat_map(|argument| get_records(argument, call.head))
|
||||||
.collect()
|
.collect()
|
||||||
} else {
|
} else {
|
||||||
call.named_iter()
|
call.named_iter()
|
||||||
.flat_map(|(argument, _, _)| get_records(argument.item.as_str(), &call.head))
|
.flat_map(|(argument, _, _)| get_records(argument.item.as_str(), call.head))
|
||||||
.collect()
|
.collect()
|
||||||
};
|
};
|
||||||
|
|
||||||
Ok(Value::List {
|
Ok(Value::list(records, call.head).into_pipeline_data())
|
||||||
vals: records,
|
|
||||||
span: call.head,
|
|
||||||
}
|
|
||||||
.into_pipeline_data())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_records(entry_type: &str, span: &Span) -> Vec<Value> {
|
fn get_records(entry_type: &str, span: Span) -> Vec<Value> {
|
||||||
let values = match entry_type {
|
let values = match entry_type {
|
||||||
"modifiers" => get_reedline_keybinding_modifiers().sorted(),
|
"modifiers" => get_reedline_keybinding_modifiers().sorted(),
|
||||||
"keycodes" => get_reedline_keycodes().sorted(),
|
"keycodes" => get_reedline_keycodes().sorted(),
|
||||||
@ -95,16 +92,14 @@ fn get_records(entry_type: &str, span: &Span) -> Vec<Value> {
|
|||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn convert_to_record(edit: &str, entry_type: &str, span: &Span) -> Value {
|
fn convert_to_record(edit: &str, entry_type: &str, span: Span) -> Value {
|
||||||
let entry_type = Value::string(entry_type, *span);
|
Value::record(
|
||||||
|
record! {
|
||||||
let name = Value::string(edit, *span);
|
"type" => Value::string(entry_type, span),
|
||||||
|
"name" => Value::string(edit, span),
|
||||||
Value::Record {
|
},
|
||||||
cols: vec!["type".to_string(), "name".to_string()],
|
span,
|
||||||
vals: vec![entry_type, name],
|
)
|
||||||
span: *span,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Helper to sort a vec and return a vec
|
// Helper to sort a vec and return a vec
|
||||||
|
@ -3,7 +3,8 @@ use crossterm::{event::Event, event::KeyCode, event::KeyEvent, terminal};
|
|||||||
use nu_protocol::ast::Call;
|
use nu_protocol::ast::Call;
|
||||||
use nu_protocol::engine::{Command, EngineState, Stack};
|
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, Type, Value,
|
record, Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, Type,
|
||||||
|
Value,
|
||||||
};
|
};
|
||||||
use std::io::{stdout, Write};
|
use std::io::{stdout, Write};
|
||||||
|
|
||||||
@ -19,6 +20,10 @@ impl Command for KeybindingsListen {
|
|||||||
"Get input from the user."
|
"Get input from the user."
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn extra_usage(&self) -> &str {
|
||||||
|
"This is an internal debugging tool. For better output, try `input listen --types [key]`"
|
||||||
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.category(Category::Platform)
|
.category(Category::Platform)
|
||||||
@ -78,9 +83,8 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
|
|||||||
let v = print_events_helper(event)?;
|
let v = print_events_helper(event)?;
|
||||||
// Print out the record
|
// Print out the record
|
||||||
let o = match v {
|
let o = match v {
|
||||||
Value::Record { cols, vals, .. } => cols
|
Value::Record { val, .. } => val
|
||||||
.iter()
|
.iter()
|
||||||
.zip(vals.iter())
|
|
||||||
.map(|(x, y)| format!("{}: {}", x, y.into_string("", config)))
|
.map(|(x, y)| format!("{}: {}", x, y.into_string("", config)))
|
||||||
.collect::<Vec<String>>()
|
.collect::<Vec<String>>()
|
||||||
.join(", "),
|
.join(", "),
|
||||||
@ -111,46 +115,29 @@ fn print_events_helper(event: Event) -> Result<Value, ShellError> {
|
|||||||
{
|
{
|
||||||
match code {
|
match code {
|
||||||
KeyCode::Char(c) => {
|
KeyCode::Char(c) => {
|
||||||
let record = Value::Record {
|
let record = record! {
|
||||||
cols: vec![
|
"char" => Value::string(format!("{c}"), Span::unknown()),
|
||||||
"char".into(),
|
"code" => Value::string(format!("{:#08x}", u32::from(c)), Span::unknown()),
|
||||||
"code".into(),
|
"modifier" => Value::string(format!("{modifiers:?}"), Span::unknown()),
|
||||||
"modifier".into(),
|
"flags" => Value::string(format!("{modifiers:#08b}"), Span::unknown()),
|
||||||
"flags".into(),
|
"kind" => Value::string(format!("{kind:?}"), Span::unknown()),
|
||||||
"kind".into(),
|
"state" => Value::string(format!("{state:?}"), Span::unknown()),
|
||||||
"state".into(),
|
|
||||||
],
|
|
||||||
vals: vec![
|
|
||||||
Value::string(format!("{c}"), Span::unknown()),
|
|
||||||
Value::string(format!("{:#08x}", u32::from(c)), Span::unknown()),
|
|
||||||
Value::string(format!("{modifiers:?}"), Span::unknown()),
|
|
||||||
Value::string(format!("{modifiers:#08b}"), Span::unknown()),
|
|
||||||
Value::string(format!("{kind:?}"), Span::unknown()),
|
|
||||||
Value::string(format!("{state:?}"), Span::unknown()),
|
|
||||||
],
|
|
||||||
span: Span::unknown(),
|
|
||||||
};
|
};
|
||||||
Ok(record)
|
Ok(Value::record(record, Span::unknown()))
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
let record = Value::Record {
|
let record = record! {
|
||||||
cols: vec!["code".into(), "modifier".into(), "flags".into()],
|
"code" => Value::string(format!("{code:?}"), Span::unknown()),
|
||||||
vals: vec![
|
"modifier" => Value::string(format!("{modifiers:?}"), Span::unknown()),
|
||||||
Value::string(format!("{code:?}"), Span::unknown()),
|
"flags" => Value::string(format!("{modifiers:#08b}"), Span::unknown()),
|
||||||
Value::string(format!("{modifiers:?}"), Span::unknown()),
|
"kind" => Value::string(format!("{kind:?}"), Span::unknown()),
|
||||||
Value::string(format!("{modifiers:#08b}"), Span::unknown()),
|
"state" => Value::string(format!("{state:?}"), Span::unknown()),
|
||||||
],
|
|
||||||
span: Span::unknown(),
|
|
||||||
};
|
};
|
||||||
Ok(record)
|
Ok(Value::record(record, Span::unknown()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
let record = Value::Record {
|
let record = record! { "event" => Value::string(format!("{event:?}"), Span::unknown()) };
|
||||||
cols: vec!["event".into()],
|
Ok(Value::record(record, Span::unknown()))
|
||||||
vals: vec![Value::string(format!("{event:?}"), Span::unknown())],
|
|
||||||
span: Span::unknown(),
|
|
||||||
};
|
|
||||||
Ok(record)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -89,7 +89,7 @@ impl CommandCompletion {
|
|||||||
let filter_predicate = |command: &[u8]| match_algorithm.matches_u8(command, partial);
|
let filter_predicate = |command: &[u8]| match_algorithm.matches_u8(command, partial);
|
||||||
|
|
||||||
let mut results = working_set
|
let mut results = working_set
|
||||||
.find_commands_by_predicate(filter_predicate)
|
.find_commands_by_predicate(filter_predicate, true)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(move |x| Suggestion {
|
.map(move |x| Suggestion {
|
||||||
value: String::from_utf8_lossy(&x.0).to_string(),
|
value: String::from_utf8_lossy(&x.0).to_string(),
|
||||||
@ -205,10 +205,7 @@ impl Completer for CommandCompletion {
|
|||||||
vec![]
|
vec![]
|
||||||
};
|
};
|
||||||
|
|
||||||
subcommands
|
subcommands.into_iter().chain(commands).collect::<Vec<_>>()
|
||||||
.into_iter()
|
|
||||||
.chain(commands.into_iter())
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_sort_by(&self) -> SortBy {
|
fn get_sort_by(&self) -> SortBy {
|
||||||
@ -266,7 +263,7 @@ mod command_completions_tests {
|
|||||||
(" hello sud", 1),
|
(" hello sud", 1),
|
||||||
];
|
];
|
||||||
for (idx, ele) in commands.iter().enumerate() {
|
for (idx, ele) in commands.iter().enumerate() {
|
||||||
let index = find_non_whitespace_index(&Vec::from(ele.0.as_bytes()), 0);
|
let index = find_non_whitespace_index(ele.0.as_bytes(), 0);
|
||||||
assert_eq!(index, ele.1, "Failed on index {}", idx);
|
assert_eq!(index, ele.1, "Failed on index {}", idx);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -67,20 +67,20 @@ impl NuCompleter {
|
|||||||
) -> Option<Vec<Suggestion>> {
|
) -> Option<Vec<Suggestion>> {
|
||||||
let stack = self.stack.clone();
|
let stack = self.stack.clone();
|
||||||
let block = self.engine_state.get_block(block_id);
|
let block = self.engine_state.get_block(block_id);
|
||||||
let mut callee_stack = stack.gather_captures(&block.captures);
|
let mut callee_stack = stack.gather_captures(&self.engine_state, &block.captures);
|
||||||
|
|
||||||
// Line
|
// Line
|
||||||
if let Some(pos_arg) = block.signature.required_positional.get(0) {
|
if let Some(pos_arg) = block.signature.required_positional.get(0) {
|
||||||
if let Some(var_id) = pos_arg.var_id {
|
if let Some(var_id) = pos_arg.var_id {
|
||||||
callee_stack.add_var(
|
callee_stack.add_var(
|
||||||
var_id,
|
var_id,
|
||||||
Value::List {
|
Value::list(
|
||||||
vals: spans
|
spans
|
||||||
.iter()
|
.iter()
|
||||||
.map(|it| Value::string(it, Span::unknown()))
|
.map(|it| Value::string(it, Span::unknown()))
|
||||||
.collect(),
|
.collect(),
|
||||||
span: Span::unknown(),
|
Span::unknown(),
|
||||||
},
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -97,7 +97,7 @@ impl NuCompleter {
|
|||||||
match result {
|
match result {
|
||||||
Ok(pd) => {
|
Ok(pd) => {
|
||||||
let value = pd.into_value(span);
|
let value = pd.into_value(span);
|
||||||
if let Value::List { vals, span: _ } = value {
|
if let Value::List { vals, .. } = value {
|
||||||
let result =
|
let result =
|
||||||
map_value_completions(vals.iter(), Span::new(span.start, span.end), offset);
|
map_value_completions(vals.iter(), Span::new(span.start, span.end), offset);
|
||||||
|
|
||||||
@ -128,6 +128,7 @@ impl NuCompleter {
|
|||||||
| PipelineElement::Redirection(_, _, expr)
|
| PipelineElement::Redirection(_, _, expr)
|
||||||
| PipelineElement::And(_, expr)
|
| PipelineElement::And(_, expr)
|
||||||
| PipelineElement::Or(_, expr)
|
| PipelineElement::Or(_, expr)
|
||||||
|
| PipelineElement::SameTargetRedirection { cmd: (_, expr), .. }
|
||||||
| PipelineElement::SeparateRedirection { out: (_, expr), .. } => {
|
| PipelineElement::SeparateRedirection { out: (_, expr), .. } => {
|
||||||
let flattened: Vec<_> = flatten_expression(&working_set, &expr);
|
let flattened: Vec<_> = flatten_expression(&working_set, &expr);
|
||||||
let mut spans: Vec<String> = vec![];
|
let mut spans: Vec<String> = vec![];
|
||||||
@ -135,7 +136,7 @@ impl NuCompleter {
|
|||||||
for (flat_idx, flat) in flattened.iter().enumerate() {
|
for (flat_idx, flat) in flattened.iter().enumerate() {
|
||||||
let is_passthrough_command = spans
|
let is_passthrough_command = spans
|
||||||
.first()
|
.first()
|
||||||
.filter(|content| *content == &String::from("sudo"))
|
.filter(|content| content.as_str() == "sudo")
|
||||||
.is_some();
|
.is_some();
|
||||||
// Read the current spam to string
|
// Read the current spam to string
|
||||||
let current_span = working_set.get_span_contents(flat.0).to_vec();
|
let current_span = working_set.get_span_contents(flat.0).to_vec();
|
||||||
@ -453,7 +454,7 @@ pub fn map_value_completions<'a>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Match for record values
|
// Match for record values
|
||||||
if let Ok((cols, vals)) = x.as_record() {
|
if let Ok(record) = x.as_record() {
|
||||||
let mut suggestion = Suggestion {
|
let mut suggestion = Suggestion {
|
||||||
value: String::from(""), // Initialize with empty string
|
value: String::from(""), // Initialize with empty string
|
||||||
description: None,
|
description: None,
|
||||||
@ -466,7 +467,7 @@ pub fn map_value_completions<'a>(
|
|||||||
};
|
};
|
||||||
|
|
||||||
// Iterate the cols looking for `value` and `description`
|
// Iterate the cols looking for `value` and `description`
|
||||||
cols.iter().zip(vals).for_each(|it| {
|
record.iter().for_each(|it| {
|
||||||
// Match `value` column
|
// Match `value` column
|
||||||
if it.0 == "value" {
|
if it.0 == "value" {
|
||||||
// Convert the value to string
|
// Convert the value to string
|
||||||
@ -500,7 +501,8 @@ mod completer_tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_completion_helper() {
|
fn test_completion_helper() {
|
||||||
let mut engine_state = nu_command::create_default_context();
|
let mut engine_state =
|
||||||
|
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context());
|
||||||
|
|
||||||
// Custom additions
|
// Custom additions
|
||||||
let delta = {
|
let delta = {
|
||||||
|
@ -6,7 +6,8 @@ use nu_protocol::{
|
|||||||
PipelineData, Span, Type, Value,
|
PipelineData, Span, Type, Value,
|
||||||
};
|
};
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
use std::{collections::HashMap, sync::Arc};
|
use std::collections::HashMap;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
use super::completer::map_value_completions;
|
use super::completer::map_value_completions;
|
||||||
|
|
||||||
|
@ -63,7 +63,12 @@ impl Completer for DirectoryCompletion {
|
|||||||
|
|
||||||
match self.get_sort_by() {
|
match self.get_sort_by() {
|
||||||
SortBy::Ascending => {
|
SortBy::Ascending => {
|
||||||
sorted_items.sort_by(|a, b| a.value.cmp(&b.value));
|
sorted_items.sort_by(|a, b| {
|
||||||
|
// Ignore trailing slashes in folder names when sorting
|
||||||
|
a.value
|
||||||
|
.trim_end_matches(SEP)
|
||||||
|
.cmp(b.value.trim_end_matches(SEP))
|
||||||
|
});
|
||||||
}
|
}
|
||||||
SortBy::LevenshteinDistance => {
|
SortBy::LevenshteinDistance => {
|
||||||
sorted_items.sort_by(|a, b| {
|
sorted_items.sort_by(|a, b| {
|
||||||
|
@ -60,7 +60,12 @@ impl Completer for FileCompletion {
|
|||||||
|
|
||||||
match self.get_sort_by() {
|
match self.get_sort_by() {
|
||||||
SortBy::Ascending => {
|
SortBy::Ascending => {
|
||||||
sorted_items.sort_by(|a, b| a.value.cmp(&b.value));
|
sorted_items.sort_by(|a, b| {
|
||||||
|
// Ignore trailing slashes in folder names when sorting
|
||||||
|
a.value
|
||||||
|
.trim_end_matches(SEP)
|
||||||
|
.cmp(b.value.trim_end_matches(SEP))
|
||||||
|
});
|
||||||
}
|
}
|
||||||
SortBy::LevenshteinDistance => {
|
SortBy::LevenshteinDistance => {
|
||||||
sorted_items.sort_by(|a, b| {
|
sorted_items.sort_by(|a, b| {
|
||||||
|
@ -235,13 +235,9 @@ fn nested_suggestions(
|
|||||||
let value = recursive_value(val, sublevels);
|
let value = recursive_value(val, sublevels);
|
||||||
|
|
||||||
match value {
|
match value {
|
||||||
Value::Record {
|
Value::Record { val, .. } => {
|
||||||
cols,
|
|
||||||
vals: _,
|
|
||||||
span: _,
|
|
||||||
} => {
|
|
||||||
// Add all the columns as completion
|
// Add all the columns as completion
|
||||||
for item in cols {
|
for item in val.cols {
|
||||||
output.push(Suggestion {
|
output.push(Suggestion {
|
||||||
value: item,
|
value: item,
|
||||||
description: None,
|
description: None,
|
||||||
@ -267,7 +263,7 @@ fn nested_suggestions(
|
|||||||
|
|
||||||
output
|
output
|
||||||
}
|
}
|
||||||
Value::List { vals, span: _ } => {
|
Value::List { vals, .. } => {
|
||||||
for column_name in get_columns(vals.as_slice()) {
|
for column_name in get_columns(vals.as_slice()) {
|
||||||
output.push(Suggestion {
|
output.push(Suggestion {
|
||||||
value: column_name,
|
value: column_name,
|
||||||
@ -288,13 +284,10 @@ fn nested_suggestions(
|
|||||||
fn recursive_value(val: Value, sublevels: Vec<Vec<u8>>) -> Value {
|
fn recursive_value(val: Value, sublevels: Vec<Vec<u8>>) -> Value {
|
||||||
// Go to next sublevel
|
// Go to next sublevel
|
||||||
if let Some(next_sublevel) = sublevels.clone().into_iter().next() {
|
if let Some(next_sublevel) = sublevels.clone().into_iter().next() {
|
||||||
|
let span = val.span();
|
||||||
match val {
|
match val {
|
||||||
Value::Record {
|
Value::Record { val, .. } => {
|
||||||
cols,
|
for item in val {
|
||||||
vals,
|
|
||||||
span: _,
|
|
||||||
} => {
|
|
||||||
for item in cols.into_iter().zip(vals.into_iter()) {
|
|
||||||
// Check if index matches with sublevel
|
// Check if index matches with sublevel
|
||||||
if item.0.as_bytes().to_vec() == next_sublevel {
|
if item.0.as_bytes().to_vec() == next_sublevel {
|
||||||
// If matches try to fetch recursively the next
|
// If matches try to fetch recursively the next
|
||||||
@ -303,11 +296,9 @@ fn recursive_value(val: Value, sublevels: Vec<Vec<u8>>) -> Value {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Current sublevel value not found
|
// Current sublevel value not found
|
||||||
return Value::Nothing {
|
return Value::nothing(span);
|
||||||
span: Span::unknown(),
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
Value::LazyRecord { val, span: _ } => {
|
Value::LazyRecord { val, .. } => {
|
||||||
for col in val.column_names() {
|
for col in val.column_names() {
|
||||||
if col.as_bytes().to_vec() == next_sublevel {
|
if col.as_bytes().to_vec() == next_sublevel {
|
||||||
return recursive_value(
|
return recursive_value(
|
||||||
@ -318,15 +309,13 @@ fn recursive_value(val: Value, sublevels: Vec<Vec<u8>>) -> Value {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Current sublevel value not found
|
// Current sublevel value not found
|
||||||
return Value::Nothing {
|
return Value::nothing(span);
|
||||||
span: Span::unknown(),
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
Value::List { vals, span } => {
|
Value::List { vals, .. } => {
|
||||||
for col in get_columns(vals.as_slice()) {
|
for col in get_columns(vals.as_slice()) {
|
||||||
if col.as_bytes().to_vec() == next_sublevel {
|
if col.as_bytes().to_vec() == next_sublevel {
|
||||||
return recursive_value(
|
return recursive_value(
|
||||||
Value::List { vals, span }
|
Value::list(vals, span)
|
||||||
.get_data_by_key(&col)
|
.get_data_by_key(&col)
|
||||||
.unwrap_or_default(),
|
.unwrap_or_default(),
|
||||||
sublevels.into_iter().skip(1).collect(),
|
sublevels.into_iter().skip(1).collect(),
|
||||||
@ -335,9 +324,7 @@ fn recursive_value(val: Value, sublevels: Vec<Vec<u8>>) -> Value {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Current sublevel value not found
|
// Current sublevel value not found
|
||||||
return Value::Nothing {
|
return Value::nothing(span);
|
||||||
span: Span::unknown(),
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
_ => return val,
|
_ => return val,
|
||||||
}
|
}
|
||||||
|
@ -2,6 +2,7 @@ use crate::util::eval_source;
|
|||||||
use log::info;
|
use log::info;
|
||||||
use log::trace;
|
use log::trace;
|
||||||
use miette::{IntoDiagnostic, Result};
|
use miette::{IntoDiagnostic, Result};
|
||||||
|
use nu_engine::eval_block_with_early_return;
|
||||||
use nu_engine::{convert_env_values, current_dir};
|
use nu_engine::{convert_env_values, current_dir};
|
||||||
use nu_parser::parse;
|
use nu_parser::parse;
|
||||||
use nu_path::canonicalize_with;
|
use nu_path::canonicalize_with;
|
||||||
@ -9,7 +10,7 @@ use nu_protocol::report_error;
|
|||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::Call,
|
ast::Call,
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
Config, PipelineData, ShellError, Span, Type, Value,
|
Config, PipelineData, ShellError, Span, Value,
|
||||||
};
|
};
|
||||||
use nu_utils::stdout_write_all_and_flush;
|
use nu_utils::stdout_write_all_and_flush;
|
||||||
|
|
||||||
@ -98,23 +99,63 @@ pub fn evaluate_file(
|
|||||||
Value::string(file_path.to_string_lossy(), Span::unknown()),
|
Value::string(file_path.to_string_lossy(), Span::unknown()),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let source_filename = file_path
|
||||||
|
.file_name()
|
||||||
|
.expect("internal error: script missing filename");
|
||||||
|
|
||||||
let mut working_set = StateWorkingSet::new(engine_state);
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
trace!("parsing file: {}", file_path_str);
|
trace!("parsing file: {}", file_path_str);
|
||||||
let _ = parse(&mut working_set, Some(file_path_str), &file, false);
|
let block = parse(&mut working_set, Some(file_path_str), &file, false);
|
||||||
|
|
||||||
if working_set.find_decl(b"main", &Type::Any).is_some() {
|
if let Some(err) = working_set.parse_errors.first() {
|
||||||
|
report_error(&working_set, err);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
for block in &mut working_set.delta.blocks {
|
||||||
|
if block.signature.name == "main" {
|
||||||
|
block.signature.name = source_filename.to_string_lossy().to_string();
|
||||||
|
} else if block.signature.name.starts_with("main ") {
|
||||||
|
block.signature.name =
|
||||||
|
source_filename.to_string_lossy().to_string() + " " + &block.signature.name[5..];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let _ = engine_state.merge_delta(working_set.delta);
|
||||||
|
|
||||||
|
if engine_state.find_decl(b"main", &[]).is_some() {
|
||||||
let args = format!("main {}", args.join(" "));
|
let args = format!("main {}", args.join(" "));
|
||||||
|
|
||||||
if !eval_source(
|
let pipeline_data = eval_block_with_early_return(
|
||||||
engine_state,
|
engine_state,
|
||||||
stack,
|
stack,
|
||||||
&file,
|
&block,
|
||||||
file_path_str,
|
|
||||||
PipelineData::empty(),
|
PipelineData::empty(),
|
||||||
true,
|
false,
|
||||||
) {
|
false,
|
||||||
|
)
|
||||||
|
.unwrap_or_else(|e| {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(&working_set, &e);
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
let result = pipeline_data.print(engine_state, stack, true, false);
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Err(err) => {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
|
report_error(&working_set, &err);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
Ok(exit_code) => {
|
||||||
|
if exit_code != 0 {
|
||||||
|
std::process::exit(exit_code as i32);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !eval_source(
|
if !eval_source(
|
||||||
engine_state,
|
engine_state,
|
||||||
stack,
|
stack,
|
||||||
@ -148,7 +189,7 @@ pub(crate) fn print_table_or_error(
|
|||||||
// Change the engine_state config to use the passed in configuration
|
// Change the engine_state config to use the passed in configuration
|
||||||
engine_state.set_config(config);
|
engine_state.set_config(config);
|
||||||
|
|
||||||
if let PipelineData::Value(Value::Error { error }, ..) = &pipeline_data {
|
if let PipelineData::Value(Value::Error { error, .. }, ..) = &pipeline_data {
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
report_error(&working_set, &**error);
|
report_error(&working_set, &**error);
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
@ -195,7 +236,7 @@ pub(crate) fn print_table_or_error(
|
|||||||
|
|
||||||
fn print_or_exit(pipeline_data: PipelineData, engine_state: &mut EngineState, config: &Config) {
|
fn print_or_exit(pipeline_data: PipelineData, engine_state: &mut EngineState, config: &Config) {
|
||||||
for item in pipeline_data {
|
for item in pipeline_data {
|
||||||
if let Value::Error { error } = item {
|
if let Value::Error { error, .. } = item {
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
report_error(&working_set, &*error);
|
report_error(&working_set, &*error);
|
||||||
|
@ -20,7 +20,7 @@ pub use config_files::eval_config_contents;
|
|||||||
pub use eval_cmds::evaluate_commands;
|
pub use eval_cmds::evaluate_commands;
|
||||||
pub use eval_file::evaluate_file;
|
pub use eval_file::evaluate_file;
|
||||||
pub use menus::{DescriptionMenu, NuHelpCompleter};
|
pub use menus::{DescriptionMenu, NuHelpCompleter};
|
||||||
pub use nu_command::util::get_init_cwd;
|
pub use nu_cmd_base::util::get_init_cwd;
|
||||||
pub use nu_highlight::NuHighlight;
|
pub use nu_highlight::NuHighlight;
|
||||||
pub use print::Print;
|
pub use print::Print;
|
||||||
pub use prompt::NushellPrompt;
|
pub use prompt::NushellPrompt;
|
||||||
|
@ -646,7 +646,10 @@ impl Menu for DescriptionMenu {
|
|||||||
|lb| {
|
|lb| {
|
||||||
lb.replace_range(start..end, replacement);
|
lb.replace_range(start..end, replacement);
|
||||||
let mut offset = lb.insertion_point();
|
let mut offset = lb.insertion_point();
|
||||||
offset += lb.len().saturating_sub(end.saturating_sub(start));
|
offset += lb
|
||||||
|
.len()
|
||||||
|
.saturating_sub(end.saturating_sub(start))
|
||||||
|
.saturating_sub(start);
|
||||||
lb.set_insertion_point(offset);
|
lb.set_insertion_point(offset);
|
||||||
},
|
},
|
||||||
UndoBehavior::CreateUndoPoint,
|
UndoBehavior::CreateUndoPoint,
|
||||||
|
@ -94,10 +94,10 @@ fn convert_to_suggestions(
|
|||||||
Some(span @ Value::Record { .. }) => {
|
Some(span @ Value::Record { .. }) => {
|
||||||
let start = span
|
let start = span
|
||||||
.get_data_by_key("start")
|
.get_data_by_key("start")
|
||||||
.and_then(|val| val.as_integer().ok());
|
.and_then(|val| val.as_int().ok());
|
||||||
let end = span
|
let end = span
|
||||||
.get_data_by_key("end")
|
.get_data_by_key("end")
|
||||||
.and_then(|val| val.as_integer().ok());
|
.and_then(|val| val.as_int().ok());
|
||||||
match (start, end) {
|
match (start, end) {
|
||||||
(Some(start), Some(end)) => {
|
(Some(start), Some(end)) => {
|
||||||
let start = start.min(end);
|
let start = start.min(end);
|
||||||
|
@ -48,14 +48,9 @@ impl Command for NuHighlight {
|
|||||||
Ok(line) => {
|
Ok(line) => {
|
||||||
let highlights = highlighter.highlight(&line, line.len());
|
let highlights = highlighter.highlight(&line, line.len());
|
||||||
|
|
||||||
Value::String {
|
Value::string(highlights.render_simple(), head)
|
||||||
val: highlights.render_simple(),
|
|
||||||
span: head,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Err(err) => Value::Error {
|
Err(err) => Value::error(err, head),
|
||||||
error: Box::new(err),
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
ctrlc,
|
ctrlc,
|
||||||
)
|
)
|
||||||
|
@ -16,7 +16,11 @@ impl Command for Print {
|
|||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build("print")
|
Signature::build("print")
|
||||||
.input_output_types(vec![(Type::Nothing, Type::Nothing)])
|
.input_output_types(vec![
|
||||||
|
(Type::Nothing, Type::Nothing),
|
||||||
|
(Type::Any, Type::Nothing),
|
||||||
|
])
|
||||||
|
.allow_variants_without_examples(true)
|
||||||
.rest("rest", SyntaxShape::Any, "the values to print")
|
.rest("rest", SyntaxShape::Any, "the values to print")
|
||||||
.switch(
|
.switch(
|
||||||
"no-newline",
|
"no-newline",
|
||||||
|
@ -109,8 +109,7 @@ impl Prompt for NushellPrompt {
|
|||||||
let prompt = default
|
let prompt = default
|
||||||
.render_prompt_left()
|
.render_prompt_left()
|
||||||
.to_string()
|
.to_string()
|
||||||
.replace('\n', "\r\n")
|
.replace('\n', "\r\n");
|
||||||
+ " ";
|
|
||||||
|
|
||||||
prompt.into()
|
prompt.into()
|
||||||
}
|
}
|
||||||
@ -144,11 +143,11 @@ impl Prompt for NushellPrompt {
|
|||||||
PromptEditMode::Vi(vi_mode) => match vi_mode {
|
PromptEditMode::Vi(vi_mode) => match vi_mode {
|
||||||
PromptViMode::Normal => match &self.default_vi_normal_prompt_indicator {
|
PromptViMode::Normal => match &self.default_vi_normal_prompt_indicator {
|
||||||
Some(indicator) => indicator,
|
Some(indicator) => indicator,
|
||||||
None => ": ",
|
None => "> ",
|
||||||
},
|
},
|
||||||
PromptViMode::Insert => match &self.default_vi_insert_prompt_indicator {
|
PromptViMode::Insert => match &self.default_vi_insert_prompt_indicator {
|
||||||
Some(indicator) => indicator,
|
Some(indicator) => indicator,
|
||||||
None => "> ",
|
None => ": ",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
.into(),
|
.into(),
|
||||||
|
@ -7,7 +7,8 @@ use nu_parser::parse;
|
|||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
create_menus,
|
create_menus,
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
extract_value, Config, ParsedKeybinding, ParsedMenu, PipelineData, ShellError, Span, Value,
|
extract_value, Config, ParsedKeybinding, ParsedMenu, PipelineData, Record, ShellError, Span,
|
||||||
|
Value,
|
||||||
};
|
};
|
||||||
use reedline::{
|
use reedline::{
|
||||||
default_emacs_keybindings, default_vi_insert_keybindings, default_vi_normal_keybindings,
|
default_emacs_keybindings, default_vi_insert_keybindings, default_vi_normal_keybindings,
|
||||||
@ -130,8 +131,9 @@ fn add_menu(
|
|||||||
stack: &Stack,
|
stack: &Stack,
|
||||||
config: &Config,
|
config: &Config,
|
||||||
) -> Result<Reedline, ShellError> {
|
) -> Result<Reedline, ShellError> {
|
||||||
if let Value::Record { cols, vals, span } = &menu.menu_type {
|
let span = menu.menu_type.span();
|
||||||
let layout = extract_value("layout", cols, vals, span)?.into_string("", config);
|
if let Value::Record { val, .. } = &menu.menu_type {
|
||||||
|
let layout = extract_value("layout", val, span)?.into_string("", config);
|
||||||
|
|
||||||
match layout.as_str() {
|
match layout.as_str() {
|
||||||
"columnar" => add_columnar_menu(line_editor, menu, engine_state, stack, config),
|
"columnar" => add_columnar_menu(line_editor, menu, engine_state, stack, config),
|
||||||
@ -140,22 +142,22 @@ fn add_menu(
|
|||||||
_ => Err(ShellError::UnsupportedConfigValue(
|
_ => Err(ShellError::UnsupportedConfigValue(
|
||||||
"columnar, list or description".to_string(),
|
"columnar, list or description".to_string(),
|
||||||
menu.menu_type.into_abbreviated_string(config),
|
menu.menu_type.into_abbreviated_string(config),
|
||||||
menu.menu_type.span()?,
|
menu.menu_type.span(),
|
||||||
)),
|
)),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
Err(ShellError::UnsupportedConfigValue(
|
Err(ShellError::UnsupportedConfigValue(
|
||||||
"only record type".to_string(),
|
"only record type".to_string(),
|
||||||
menu.menu_type.into_abbreviated_string(config),
|
menu.menu_type.into_abbreviated_string(config),
|
||||||
menu.menu_type.span()?,
|
menu.menu_type.span(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! add_style {
|
macro_rules! add_style {
|
||||||
// first arm match add!(1,2), add!(2,3) etc
|
// first arm match add!(1,2), add!(2,3) etc
|
||||||
($name:expr, $cols: expr, $vals:expr, $span:expr, $config: expr, $menu:expr, $f:expr) => {
|
($name:expr, $record: expr, $span:expr, $config: expr, $menu:expr, $f:expr) => {
|
||||||
$menu = match extract_value($name, $cols, $vals, $span) {
|
$menu = match extract_value($name, $record, $span) {
|
||||||
Ok(text) => {
|
Ok(text) => {
|
||||||
let style = match text {
|
let style = match text {
|
||||||
Value::String { val, .. } => lookup_ansi_color_style(&val),
|
Value::String { val, .. } => lookup_ansi_color_style(&val),
|
||||||
@ -177,40 +179,41 @@ pub(crate) fn add_columnar_menu(
|
|||||||
stack: &Stack,
|
stack: &Stack,
|
||||||
config: &Config,
|
config: &Config,
|
||||||
) -> Result<Reedline, ShellError> {
|
) -> Result<Reedline, ShellError> {
|
||||||
|
let span = menu.menu_type.span();
|
||||||
let name = menu.name.into_string("", config);
|
let name = menu.name.into_string("", config);
|
||||||
let mut columnar_menu = ColumnarMenu::default().with_name(&name);
|
let mut columnar_menu = ColumnarMenu::default().with_name(&name);
|
||||||
|
|
||||||
if let Value::Record { cols, vals, span } = &menu.menu_type {
|
if let Value::Record { val, .. } = &menu.menu_type {
|
||||||
columnar_menu = match extract_value("columns", cols, vals, span) {
|
columnar_menu = match extract_value("columns", val, span) {
|
||||||
Ok(columns) => {
|
Ok(columns) => {
|
||||||
let columns = columns.as_integer()?;
|
let columns = columns.as_int()?;
|
||||||
columnar_menu.with_columns(columns as u16)
|
columnar_menu.with_columns(columns as u16)
|
||||||
}
|
}
|
||||||
Err(_) => columnar_menu,
|
Err(_) => columnar_menu,
|
||||||
};
|
};
|
||||||
|
|
||||||
columnar_menu = match extract_value("col_width", cols, vals, span) {
|
columnar_menu = match extract_value("col_width", val, span) {
|
||||||
Ok(col_width) => {
|
Ok(col_width) => {
|
||||||
let col_width = col_width.as_integer()?;
|
let col_width = col_width.as_int()?;
|
||||||
columnar_menu.with_column_width(Some(col_width as usize))
|
columnar_menu.with_column_width(Some(col_width as usize))
|
||||||
}
|
}
|
||||||
Err(_) => columnar_menu.with_column_width(None),
|
Err(_) => columnar_menu.with_column_width(None),
|
||||||
};
|
};
|
||||||
|
|
||||||
columnar_menu = match extract_value("col_padding", cols, vals, span) {
|
columnar_menu = match extract_value("col_padding", val, span) {
|
||||||
Ok(col_padding) => {
|
Ok(col_padding) => {
|
||||||
let col_padding = col_padding.as_integer()?;
|
let col_padding = col_padding.as_int()?;
|
||||||
columnar_menu.with_column_padding(col_padding as usize)
|
columnar_menu.with_column_padding(col_padding as usize)
|
||||||
}
|
}
|
||||||
Err(_) => columnar_menu,
|
Err(_) => columnar_menu,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Value::Record { cols, vals, span } = &menu.style {
|
let span = menu.style.span();
|
||||||
|
if let Value::Record { val, .. } = &menu.style {
|
||||||
add_style!(
|
add_style!(
|
||||||
"text",
|
"text",
|
||||||
cols,
|
val,
|
||||||
vals,
|
|
||||||
span,
|
span,
|
||||||
config,
|
config,
|
||||||
columnar_menu,
|
columnar_menu,
|
||||||
@ -218,8 +221,7 @@ pub(crate) fn add_columnar_menu(
|
|||||||
);
|
);
|
||||||
add_style!(
|
add_style!(
|
||||||
"selected_text",
|
"selected_text",
|
||||||
cols,
|
val,
|
||||||
vals,
|
|
||||||
span,
|
span,
|
||||||
config,
|
config,
|
||||||
columnar_menu,
|
columnar_menu,
|
||||||
@ -227,8 +229,7 @@ pub(crate) fn add_columnar_menu(
|
|||||||
);
|
);
|
||||||
add_style!(
|
add_style!(
|
||||||
"description_text",
|
"description_text",
|
||||||
cols,
|
val,
|
||||||
vals,
|
|
||||||
span,
|
span,
|
||||||
config,
|
config,
|
||||||
columnar_menu,
|
columnar_menu,
|
||||||
@ -242,18 +243,15 @@ pub(crate) fn add_columnar_menu(
|
|||||||
let only_buffer_difference = menu.only_buffer_difference.as_bool()?;
|
let only_buffer_difference = menu.only_buffer_difference.as_bool()?;
|
||||||
columnar_menu = columnar_menu.with_only_buffer_difference(only_buffer_difference);
|
columnar_menu = columnar_menu.with_only_buffer_difference(only_buffer_difference);
|
||||||
|
|
||||||
|
let span = menu.source.span();
|
||||||
match &menu.source {
|
match &menu.source {
|
||||||
Value::Nothing { .. } => {
|
Value::Nothing { .. } => {
|
||||||
Ok(line_editor.with_menu(ReedlineMenu::EngineCompleter(Box::new(columnar_menu))))
|
Ok(line_editor.with_menu(ReedlineMenu::EngineCompleter(Box::new(columnar_menu))))
|
||||||
}
|
}
|
||||||
Value::Closure {
|
Value::Closure { val, captures, .. } => {
|
||||||
val,
|
|
||||||
captures,
|
|
||||||
span,
|
|
||||||
} => {
|
|
||||||
let menu_completer = NuMenuCompleter::new(
|
let menu_completer = NuMenuCompleter::new(
|
||||||
*val,
|
*val,
|
||||||
*span,
|
span,
|
||||||
stack.captures_to_stack(captures),
|
stack.captures_to_stack(captures),
|
||||||
engine_state,
|
engine_state,
|
||||||
only_buffer_difference,
|
only_buffer_difference,
|
||||||
@ -266,7 +264,7 @@ pub(crate) fn add_columnar_menu(
|
|||||||
_ => Err(ShellError::UnsupportedConfigValue(
|
_ => Err(ShellError::UnsupportedConfigValue(
|
||||||
"block or omitted value".to_string(),
|
"block or omitted value".to_string(),
|
||||||
menu.source.into_abbreviated_string(config),
|
menu.source.into_abbreviated_string(config),
|
||||||
menu.source.span()?,
|
span,
|
||||||
)),
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -282,21 +280,22 @@ pub(crate) fn add_list_menu(
|
|||||||
let name = menu.name.into_string("", config);
|
let name = menu.name.into_string("", config);
|
||||||
let mut list_menu = ListMenu::default().with_name(&name);
|
let mut list_menu = ListMenu::default().with_name(&name);
|
||||||
|
|
||||||
if let Value::Record { cols, vals, span } = &menu.menu_type {
|
let span = menu.menu_type.span();
|
||||||
list_menu = match extract_value("page_size", cols, vals, span) {
|
if let Value::Record { val, .. } = &menu.menu_type {
|
||||||
|
list_menu = match extract_value("page_size", val, span) {
|
||||||
Ok(page_size) => {
|
Ok(page_size) => {
|
||||||
let page_size = page_size.as_integer()?;
|
let page_size = page_size.as_int()?;
|
||||||
list_menu.with_page_size(page_size as usize)
|
list_menu.with_page_size(page_size as usize)
|
||||||
}
|
}
|
||||||
Err(_) => list_menu,
|
Err(_) => list_menu,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Value::Record { cols, vals, span } = &menu.style {
|
let span = menu.style.span();
|
||||||
|
if let Value::Record { val, .. } = &menu.style {
|
||||||
add_style!(
|
add_style!(
|
||||||
"text",
|
"text",
|
||||||
cols,
|
val,
|
||||||
vals,
|
|
||||||
span,
|
span,
|
||||||
config,
|
config,
|
||||||
list_menu,
|
list_menu,
|
||||||
@ -304,8 +303,7 @@ pub(crate) fn add_list_menu(
|
|||||||
);
|
);
|
||||||
add_style!(
|
add_style!(
|
||||||
"selected_text",
|
"selected_text",
|
||||||
cols,
|
val,
|
||||||
vals,
|
|
||||||
span,
|
span,
|
||||||
config,
|
config,
|
||||||
list_menu,
|
list_menu,
|
||||||
@ -313,8 +311,7 @@ pub(crate) fn add_list_menu(
|
|||||||
);
|
);
|
||||||
add_style!(
|
add_style!(
|
||||||
"description_text",
|
"description_text",
|
||||||
cols,
|
val,
|
||||||
vals,
|
|
||||||
span,
|
span,
|
||||||
config,
|
config,
|
||||||
list_menu,
|
list_menu,
|
||||||
@ -328,18 +325,15 @@ pub(crate) fn add_list_menu(
|
|||||||
let only_buffer_difference = menu.only_buffer_difference.as_bool()?;
|
let only_buffer_difference = menu.only_buffer_difference.as_bool()?;
|
||||||
list_menu = list_menu.with_only_buffer_difference(only_buffer_difference);
|
list_menu = list_menu.with_only_buffer_difference(only_buffer_difference);
|
||||||
|
|
||||||
|
let span = menu.source.span();
|
||||||
match &menu.source {
|
match &menu.source {
|
||||||
Value::Nothing { .. } => {
|
Value::Nothing { .. } => {
|
||||||
Ok(line_editor.with_menu(ReedlineMenu::HistoryMenu(Box::new(list_menu))))
|
Ok(line_editor.with_menu(ReedlineMenu::HistoryMenu(Box::new(list_menu))))
|
||||||
}
|
}
|
||||||
Value::Closure {
|
Value::Closure { val, captures, .. } => {
|
||||||
val,
|
|
||||||
captures,
|
|
||||||
span,
|
|
||||||
} => {
|
|
||||||
let menu_completer = NuMenuCompleter::new(
|
let menu_completer = NuMenuCompleter::new(
|
||||||
*val,
|
*val,
|
||||||
*span,
|
span,
|
||||||
stack.captures_to_stack(captures),
|
stack.captures_to_stack(captures),
|
||||||
engine_state,
|
engine_state,
|
||||||
only_buffer_difference,
|
only_buffer_difference,
|
||||||
@ -352,7 +346,7 @@ pub(crate) fn add_list_menu(
|
|||||||
_ => Err(ShellError::UnsupportedConfigValue(
|
_ => Err(ShellError::UnsupportedConfigValue(
|
||||||
"block or omitted value".to_string(),
|
"block or omitted value".to_string(),
|
||||||
menu.source.into_abbreviated_string(config),
|
menu.source.into_abbreviated_string(config),
|
||||||
menu.source.span()?,
|
menu.source.span(),
|
||||||
)),
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -368,53 +362,54 @@ pub(crate) fn add_description_menu(
|
|||||||
let name = menu.name.into_string("", config);
|
let name = menu.name.into_string("", config);
|
||||||
let mut description_menu = DescriptionMenu::default().with_name(&name);
|
let mut description_menu = DescriptionMenu::default().with_name(&name);
|
||||||
|
|
||||||
if let Value::Record { cols, vals, span } = &menu.menu_type {
|
let span = menu.menu_type.span();
|
||||||
description_menu = match extract_value("columns", cols, vals, span) {
|
if let Value::Record { val, .. } = &menu.menu_type {
|
||||||
|
description_menu = match extract_value("columns", val, span) {
|
||||||
Ok(columns) => {
|
Ok(columns) => {
|
||||||
let columns = columns.as_integer()?;
|
let columns = columns.as_int()?;
|
||||||
description_menu.with_columns(columns as u16)
|
description_menu.with_columns(columns as u16)
|
||||||
}
|
}
|
||||||
Err(_) => description_menu,
|
Err(_) => description_menu,
|
||||||
};
|
};
|
||||||
|
|
||||||
description_menu = match extract_value("col_width", cols, vals, span) {
|
description_menu = match extract_value("col_width", val, span) {
|
||||||
Ok(col_width) => {
|
Ok(col_width) => {
|
||||||
let col_width = col_width.as_integer()?;
|
let col_width = col_width.as_int()?;
|
||||||
description_menu.with_column_width(Some(col_width as usize))
|
description_menu.with_column_width(Some(col_width as usize))
|
||||||
}
|
}
|
||||||
Err(_) => description_menu.with_column_width(None),
|
Err(_) => description_menu.with_column_width(None),
|
||||||
};
|
};
|
||||||
|
|
||||||
description_menu = match extract_value("col_padding", cols, vals, span) {
|
description_menu = match extract_value("col_padding", val, span) {
|
||||||
Ok(col_padding) => {
|
Ok(col_padding) => {
|
||||||
let col_padding = col_padding.as_integer()?;
|
let col_padding = col_padding.as_int()?;
|
||||||
description_menu.with_column_padding(col_padding as usize)
|
description_menu.with_column_padding(col_padding as usize)
|
||||||
}
|
}
|
||||||
Err(_) => description_menu,
|
Err(_) => description_menu,
|
||||||
};
|
};
|
||||||
|
|
||||||
description_menu = match extract_value("selection_rows", cols, vals, span) {
|
description_menu = match extract_value("selection_rows", val, span) {
|
||||||
Ok(selection_rows) => {
|
Ok(selection_rows) => {
|
||||||
let selection_rows = selection_rows.as_integer()?;
|
let selection_rows = selection_rows.as_int()?;
|
||||||
description_menu.with_selection_rows(selection_rows as u16)
|
description_menu.with_selection_rows(selection_rows as u16)
|
||||||
}
|
}
|
||||||
Err(_) => description_menu,
|
Err(_) => description_menu,
|
||||||
};
|
};
|
||||||
|
|
||||||
description_menu = match extract_value("description_rows", cols, vals, span) {
|
description_menu = match extract_value("description_rows", val, span) {
|
||||||
Ok(description_rows) => {
|
Ok(description_rows) => {
|
||||||
let description_rows = description_rows.as_integer()?;
|
let description_rows = description_rows.as_int()?;
|
||||||
description_menu.with_description_rows(description_rows as usize)
|
description_menu.with_description_rows(description_rows as usize)
|
||||||
}
|
}
|
||||||
Err(_) => description_menu,
|
Err(_) => description_menu,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Value::Record { cols, vals, span } = &menu.style {
|
let span = menu.style.span();
|
||||||
|
if let Value::Record { val, .. } = &menu.style {
|
||||||
add_style!(
|
add_style!(
|
||||||
"text",
|
"text",
|
||||||
cols,
|
val,
|
||||||
vals,
|
|
||||||
span,
|
span,
|
||||||
config,
|
config,
|
||||||
description_menu,
|
description_menu,
|
||||||
@ -422,8 +417,7 @@ pub(crate) fn add_description_menu(
|
|||||||
);
|
);
|
||||||
add_style!(
|
add_style!(
|
||||||
"selected_text",
|
"selected_text",
|
||||||
cols,
|
val,
|
||||||
vals,
|
|
||||||
span,
|
span,
|
||||||
config,
|
config,
|
||||||
description_menu,
|
description_menu,
|
||||||
@ -431,8 +425,7 @@ pub(crate) fn add_description_menu(
|
|||||||
);
|
);
|
||||||
add_style!(
|
add_style!(
|
||||||
"description_text",
|
"description_text",
|
||||||
cols,
|
val,
|
||||||
vals,
|
|
||||||
span,
|
span,
|
||||||
config,
|
config,
|
||||||
description_menu,
|
description_menu,
|
||||||
@ -446,6 +439,7 @@ pub(crate) fn add_description_menu(
|
|||||||
let only_buffer_difference = menu.only_buffer_difference.as_bool()?;
|
let only_buffer_difference = menu.only_buffer_difference.as_bool()?;
|
||||||
description_menu = description_menu.with_only_buffer_difference(only_buffer_difference);
|
description_menu = description_menu.with_only_buffer_difference(only_buffer_difference);
|
||||||
|
|
||||||
|
let span = menu.source.span();
|
||||||
match &menu.source {
|
match &menu.source {
|
||||||
Value::Nothing { .. } => {
|
Value::Nothing { .. } => {
|
||||||
let completer = Box::new(NuHelpCompleter::new(engine_state));
|
let completer = Box::new(NuHelpCompleter::new(engine_state));
|
||||||
@ -454,14 +448,10 @@ pub(crate) fn add_description_menu(
|
|||||||
completer,
|
completer,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
Value::Closure {
|
Value::Closure { val, captures, .. } => {
|
||||||
val,
|
|
||||||
captures,
|
|
||||||
span,
|
|
||||||
} => {
|
|
||||||
let menu_completer = NuMenuCompleter::new(
|
let menu_completer = NuMenuCompleter::new(
|
||||||
*val,
|
*val,
|
||||||
*span,
|
span,
|
||||||
stack.captures_to_stack(captures),
|
stack.captures_to_stack(captures),
|
||||||
engine_state,
|
engine_state,
|
||||||
only_buffer_difference,
|
only_buffer_difference,
|
||||||
@ -474,7 +464,7 @@ pub(crate) fn add_description_menu(
|
|||||||
_ => Err(ShellError::UnsupportedConfigValue(
|
_ => Err(ShellError::UnsupportedConfigValue(
|
||||||
"closure or omitted value".to_string(),
|
"closure or omitted value".to_string(),
|
||||||
menu.source.into_abbreviated_string(config),
|
menu.source.into_abbreviated_string(config),
|
||||||
menu.source.span()?,
|
menu.source.span(),
|
||||||
)),
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -486,6 +476,7 @@ fn add_menu_keybindings(keybindings: &mut Keybindings) {
|
|||||||
KeyCode::Tab,
|
KeyCode::Tab,
|
||||||
ReedlineEvent::UntilFound(vec![
|
ReedlineEvent::UntilFound(vec![
|
||||||
ReedlineEvent::Menu("completion_menu".to_string()),
|
ReedlineEvent::Menu("completion_menu".to_string()),
|
||||||
|
ReedlineEvent::MenuNext,
|
||||||
ReedlineEvent::Edit(vec![EditCommand::Complete]),
|
ReedlineEvent::Edit(vec![EditCommand::Complete]),
|
||||||
]),
|
]),
|
||||||
);
|
);
|
||||||
@ -523,6 +514,12 @@ fn add_menu_keybindings(keybindings: &mut Keybindings) {
|
|||||||
KeyCode::F(1),
|
KeyCode::F(1),
|
||||||
ReedlineEvent::Menu("help_menu".to_string()),
|
ReedlineEvent::Menu("help_menu".to_string()),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
keybindings.add_binding(
|
||||||
|
KeyModifiers::CONTROL,
|
||||||
|
KeyCode::Char('q'),
|
||||||
|
ReedlineEvent::SearchHistory,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum KeybindingsMode {
|
pub enum KeybindingsMode {
|
||||||
@ -577,15 +574,16 @@ fn add_keybinding(
|
|||||||
insert_keybindings: &mut Keybindings,
|
insert_keybindings: &mut Keybindings,
|
||||||
normal_keybindings: &mut Keybindings,
|
normal_keybindings: &mut Keybindings,
|
||||||
) -> Result<(), ShellError> {
|
) -> Result<(), ShellError> {
|
||||||
|
let span = mode.span();
|
||||||
match &mode {
|
match &mode {
|
||||||
Value::String { val, span } => match val.as_str() {
|
Value::String { val, .. } => match val.as_str() {
|
||||||
"emacs" => add_parsed_keybinding(emacs_keybindings, keybinding, config),
|
"emacs" => add_parsed_keybinding(emacs_keybindings, keybinding, config),
|
||||||
"vi_insert" => add_parsed_keybinding(insert_keybindings, keybinding, config),
|
"vi_insert" => add_parsed_keybinding(insert_keybindings, keybinding, config),
|
||||||
"vi_normal" => add_parsed_keybinding(normal_keybindings, keybinding, config),
|
"vi_normal" => add_parsed_keybinding(normal_keybindings, keybinding, config),
|
||||||
m => Err(ShellError::UnsupportedConfigValue(
|
m => Err(ShellError::UnsupportedConfigValue(
|
||||||
"emacs, vi_insert or vi_normal".to_string(),
|
"emacs, vi_insert or vi_normal".to_string(),
|
||||||
m.to_string(),
|
m.to_string(),
|
||||||
*span,
|
span,
|
||||||
)),
|
)),
|
||||||
},
|
},
|
||||||
Value::List { vals, .. } => {
|
Value::List { vals, .. } => {
|
||||||
@ -605,7 +603,7 @@ fn add_keybinding(
|
|||||||
v => Err(ShellError::UnsupportedConfigValue(
|
v => Err(ShellError::UnsupportedConfigValue(
|
||||||
"string or list of strings".to_string(),
|
"string or list of strings".to_string(),
|
||||||
v.into_abbreviated_string(config),
|
v.into_abbreviated_string(config),
|
||||||
v.span()?,
|
v.span(),
|
||||||
)),
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -635,7 +633,7 @@ fn add_parsed_keybinding(
|
|||||||
return Err(ShellError::UnsupportedConfigValue(
|
return Err(ShellError::UnsupportedConfigValue(
|
||||||
"CONTROL, SHIFT, ALT or NONE".to_string(),
|
"CONTROL, SHIFT, ALT or NONE".to_string(),
|
||||||
keybinding.modifier.into_abbreviated_string(config),
|
keybinding.modifier.into_abbreviated_string(config),
|
||||||
keybinding.modifier.span()?,
|
keybinding.modifier.span(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -659,7 +657,7 @@ fn add_parsed_keybinding(
|
|||||||
return Err(ShellError::UnsupportedConfigValue(
|
return Err(ShellError::UnsupportedConfigValue(
|
||||||
"char_<CHAR: unicode codepoint>".to_string(),
|
"char_<CHAR: unicode codepoint>".to_string(),
|
||||||
c.to_string(),
|
c.to_string(),
|
||||||
keybinding.keycode.span()?,
|
keybinding.keycode.span(),
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -686,7 +684,7 @@ fn add_parsed_keybinding(
|
|||||||
.ok_or(ShellError::UnsupportedConfigValue(
|
.ok_or(ShellError::UnsupportedConfigValue(
|
||||||
"(f1|f2|...|f20)".to_string(),
|
"(f1|f2|...|f20)".to_string(),
|
||||||
format!("unknown function key: {c}"),
|
format!("unknown function key: {c}"),
|
||||||
keybinding.keycode.span()?,
|
keybinding.keycode.span(),
|
||||||
))?;
|
))?;
|
||||||
KeyCode::F(fn_num)
|
KeyCode::F(fn_num)
|
||||||
}
|
}
|
||||||
@ -696,7 +694,7 @@ fn add_parsed_keybinding(
|
|||||||
return Err(ShellError::UnsupportedConfigValue(
|
return Err(ShellError::UnsupportedConfigValue(
|
||||||
"crossterm KeyCode".to_string(),
|
"crossterm KeyCode".to_string(),
|
||||||
keybinding.keycode.into_abbreviated_string(config),
|
keybinding.keycode.into_abbreviated_string(config),
|
||||||
keybinding.keycode.span()?,
|
keybinding.keycode.span(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -716,68 +714,61 @@ enum EventType<'config> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl<'config> EventType<'config> {
|
impl<'config> EventType<'config> {
|
||||||
fn try_from_columns(
|
fn try_from_record(record: &'config Record, span: Span) -> Result<Self, ShellError> {
|
||||||
cols: &'config [String],
|
extract_value("send", record, span)
|
||||||
vals: &'config [Value],
|
|
||||||
span: &'config Span,
|
|
||||||
) -> Result<Self, ShellError> {
|
|
||||||
extract_value("send", cols, vals, span)
|
|
||||||
.map(Self::Send)
|
.map(Self::Send)
|
||||||
.or_else(|_| extract_value("edit", cols, vals, span).map(Self::Edit))
|
.or_else(|_| extract_value("edit", record, span).map(Self::Edit))
|
||||||
.or_else(|_| extract_value("until", cols, vals, span).map(Self::Until))
|
.or_else(|_| extract_value("until", record, span).map(Self::Until))
|
||||||
.map_err(|_| ShellError::MissingConfigValue("send, edit or until".to_string(), *span))
|
.map_err(|_| ShellError::MissingConfigValue("send, edit or until".to_string(), span))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>, ShellError> {
|
fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>, ShellError> {
|
||||||
|
let span = value.span();
|
||||||
match value {
|
match value {
|
||||||
Value::Record { cols, vals, span } => {
|
Value::Record { val: record, .. } => match EventType::try_from_record(record, span)? {
|
||||||
match EventType::try_from_columns(cols, vals, span)? {
|
EventType::Send(value) => event_from_record(
|
||||||
EventType::Send(value) => event_from_record(
|
value.into_string("", config).to_lowercase().as_str(),
|
||||||
|
record,
|
||||||
|
config,
|
||||||
|
span,
|
||||||
|
)
|
||||||
|
.map(Some),
|
||||||
|
EventType::Edit(value) => {
|
||||||
|
let edit = edit_from_record(
|
||||||
value.into_string("", config).to_lowercase().as_str(),
|
value.into_string("", config).to_lowercase().as_str(),
|
||||||
cols,
|
record,
|
||||||
vals,
|
|
||||||
config,
|
config,
|
||||||
span,
|
span,
|
||||||
)
|
)?;
|
||||||
.map(Some),
|
Ok(Some(ReedlineEvent::Edit(vec![edit])))
|
||||||
EventType::Edit(value) => {
|
|
||||||
let edit = edit_from_record(
|
|
||||||
value.into_string("", config).to_lowercase().as_str(),
|
|
||||||
cols,
|
|
||||||
vals,
|
|
||||||
config,
|
|
||||||
span,
|
|
||||||
)?;
|
|
||||||
Ok(Some(ReedlineEvent::Edit(vec![edit])))
|
|
||||||
}
|
|
||||||
EventType::Until(value) => match value {
|
|
||||||
Value::List { vals, .. } => {
|
|
||||||
let events = vals
|
|
||||||
.iter()
|
|
||||||
.map(|value| match parse_event(value, config) {
|
|
||||||
Ok(inner) => match inner {
|
|
||||||
None => Err(ShellError::UnsupportedConfigValue(
|
|
||||||
"List containing valid events".to_string(),
|
|
||||||
"Nothing value (null)".to_string(),
|
|
||||||
value.span()?,
|
|
||||||
)),
|
|
||||||
Some(event) => Ok(event),
|
|
||||||
},
|
|
||||||
Err(e) => Err(e),
|
|
||||||
})
|
|
||||||
.collect::<Result<Vec<ReedlineEvent>, ShellError>>()?;
|
|
||||||
|
|
||||||
Ok(Some(ReedlineEvent::UntilFound(events)))
|
|
||||||
}
|
|
||||||
v => Err(ShellError::UnsupportedConfigValue(
|
|
||||||
"list of events".to_string(),
|
|
||||||
v.into_abbreviated_string(config),
|
|
||||||
v.span()?,
|
|
||||||
)),
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
EventType::Until(value) => match value {
|
||||||
|
Value::List { vals, .. } => {
|
||||||
|
let events = vals
|
||||||
|
.iter()
|
||||||
|
.map(|value| match parse_event(value, config) {
|
||||||
|
Ok(inner) => match inner {
|
||||||
|
None => Err(ShellError::UnsupportedConfigValue(
|
||||||
|
"List containing valid events".to_string(),
|
||||||
|
"Nothing value (null)".to_string(),
|
||||||
|
value.span(),
|
||||||
|
)),
|
||||||
|
Some(event) => Ok(event),
|
||||||
|
},
|
||||||
|
Err(e) => Err(e),
|
||||||
|
})
|
||||||
|
.collect::<Result<Vec<ReedlineEvent>, ShellError>>()?;
|
||||||
|
|
||||||
|
Ok(Some(ReedlineEvent::UntilFound(events)))
|
||||||
|
}
|
||||||
|
v => Err(ShellError::UnsupportedConfigValue(
|
||||||
|
"list of events".to_string(),
|
||||||
|
v.into_abbreviated_string(config),
|
||||||
|
v.span(),
|
||||||
|
)),
|
||||||
|
},
|
||||||
|
},
|
||||||
Value::List { vals, .. } => {
|
Value::List { vals, .. } => {
|
||||||
let events = vals
|
let events = vals
|
||||||
.iter()
|
.iter()
|
||||||
@ -786,7 +777,7 @@ fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>,
|
|||||||
None => Err(ShellError::UnsupportedConfigValue(
|
None => Err(ShellError::UnsupportedConfigValue(
|
||||||
"List containing valid events".to_string(),
|
"List containing valid events".to_string(),
|
||||||
"Nothing value (null)".to_string(),
|
"Nothing value (null)".to_string(),
|
||||||
value.span()?,
|
value.span(),
|
||||||
)),
|
)),
|
||||||
Some(event) => Ok(event),
|
Some(event) => Ok(event),
|
||||||
},
|
},
|
||||||
@ -800,17 +791,16 @@ fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>,
|
|||||||
v => Err(ShellError::UnsupportedConfigValue(
|
v => Err(ShellError::UnsupportedConfigValue(
|
||||||
"record or list of records, null to unbind key".to_string(),
|
"record or list of records, null to unbind key".to_string(),
|
||||||
v.into_abbreviated_string(config),
|
v.into_abbreviated_string(config),
|
||||||
v.span()?,
|
v.span(),
|
||||||
)),
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn event_from_record(
|
fn event_from_record(
|
||||||
name: &str,
|
name: &str,
|
||||||
cols: &[String],
|
record: &Record,
|
||||||
vals: &[Value],
|
|
||||||
config: &Config,
|
config: &Config,
|
||||||
span: &Span,
|
span: Span,
|
||||||
) -> Result<ReedlineEvent, ShellError> {
|
) -> Result<ReedlineEvent, ShellError> {
|
||||||
let event = match name {
|
let event = match name {
|
||||||
"none" => ReedlineEvent::None,
|
"none" => ReedlineEvent::None,
|
||||||
@ -842,18 +832,18 @@ fn event_from_record(
|
|||||||
"menupageprevious" => ReedlineEvent::MenuPagePrevious,
|
"menupageprevious" => ReedlineEvent::MenuPagePrevious,
|
||||||
"openeditor" => ReedlineEvent::OpenEditor,
|
"openeditor" => ReedlineEvent::OpenEditor,
|
||||||
"menu" => {
|
"menu" => {
|
||||||
let menu = extract_value("name", cols, vals, span)?;
|
let menu = extract_value("name", record, span)?;
|
||||||
ReedlineEvent::Menu(menu.into_string("", config))
|
ReedlineEvent::Menu(menu.into_string("", config))
|
||||||
}
|
}
|
||||||
"executehostcommand" => {
|
"executehostcommand" => {
|
||||||
let cmd = extract_value("cmd", cols, vals, span)?;
|
let cmd = extract_value("cmd", record, span)?;
|
||||||
ReedlineEvent::ExecuteHostCommand(cmd.into_string("", config))
|
ReedlineEvent::ExecuteHostCommand(cmd.into_string("", config))
|
||||||
}
|
}
|
||||||
v => {
|
v => {
|
||||||
return Err(ShellError::UnsupportedConfigValue(
|
return Err(ShellError::UnsupportedConfigValue(
|
||||||
"Reedline event".to_string(),
|
"Reedline event".to_string(),
|
||||||
v.to_string(),
|
v.to_string(),
|
||||||
*span,
|
span,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -863,10 +853,9 @@ fn event_from_record(
|
|||||||
|
|
||||||
fn edit_from_record(
|
fn edit_from_record(
|
||||||
name: &str,
|
name: &str,
|
||||||
cols: &[String],
|
record: &Record,
|
||||||
vals: &[Value],
|
|
||||||
config: &Config,
|
config: &Config,
|
||||||
span: &Span,
|
span: Span,
|
||||||
) -> Result<EditCommand, ShellError> {
|
) -> Result<EditCommand, ShellError> {
|
||||||
let edit = match name {
|
let edit = match name {
|
||||||
"movetostart" => EditCommand::MoveToStart,
|
"movetostart" => EditCommand::MoveToStart,
|
||||||
@ -883,16 +872,16 @@ fn edit_from_record(
|
|||||||
"movewordrightstart" => EditCommand::MoveWordRightStart,
|
"movewordrightstart" => EditCommand::MoveWordRightStart,
|
||||||
"movebigwordrightstart" => EditCommand::MoveBigWordRightStart,
|
"movebigwordrightstart" => EditCommand::MoveBigWordRightStart,
|
||||||
"movetoposition" => {
|
"movetoposition" => {
|
||||||
let value = extract_value("value", cols, vals, span)?;
|
let value = extract_value("value", record, span)?;
|
||||||
EditCommand::MoveToPosition(value.as_integer()? as usize)
|
EditCommand::MoveToPosition(value.as_int()? as usize)
|
||||||
}
|
}
|
||||||
"insertchar" => {
|
"insertchar" => {
|
||||||
let value = extract_value("value", cols, vals, span)?;
|
let value = extract_value("value", record, span)?;
|
||||||
let char = extract_char(value, config)?;
|
let char = extract_char(value, config)?;
|
||||||
EditCommand::InsertChar(char)
|
EditCommand::InsertChar(char)
|
||||||
}
|
}
|
||||||
"insertstring" => {
|
"insertstring" => {
|
||||||
let value = extract_value("value", cols, vals, span)?;
|
let value = extract_value("value", record, span)?;
|
||||||
EditCommand::InsertString(value.into_string("", config))
|
EditCommand::InsertString(value.into_string("", config))
|
||||||
}
|
}
|
||||||
"insertnewline" => EditCommand::InsertNewline,
|
"insertnewline" => EditCommand::InsertNewline,
|
||||||
@ -924,42 +913,42 @@ fn edit_from_record(
|
|||||||
"undo" => EditCommand::Undo,
|
"undo" => EditCommand::Undo,
|
||||||
"redo" => EditCommand::Redo,
|
"redo" => EditCommand::Redo,
|
||||||
"cutrightuntil" => {
|
"cutrightuntil" => {
|
||||||
let value = extract_value("value", cols, vals, span)?;
|
let value = extract_value("value", record, span)?;
|
||||||
let char = extract_char(value, config)?;
|
let char = extract_char(value, config)?;
|
||||||
EditCommand::CutRightUntil(char)
|
EditCommand::CutRightUntil(char)
|
||||||
}
|
}
|
||||||
"cutrightbefore" => {
|
"cutrightbefore" => {
|
||||||
let value = extract_value("value", cols, vals, span)?;
|
let value = extract_value("value", record, span)?;
|
||||||
let char = extract_char(value, config)?;
|
let char = extract_char(value, config)?;
|
||||||
EditCommand::CutRightBefore(char)
|
EditCommand::CutRightBefore(char)
|
||||||
}
|
}
|
||||||
"moverightuntil" => {
|
"moverightuntil" => {
|
||||||
let value = extract_value("value", cols, vals, span)?;
|
let value = extract_value("value", record, span)?;
|
||||||
let char = extract_char(value, config)?;
|
let char = extract_char(value, config)?;
|
||||||
EditCommand::MoveRightUntil(char)
|
EditCommand::MoveRightUntil(char)
|
||||||
}
|
}
|
||||||
"moverightbefore" => {
|
"moverightbefore" => {
|
||||||
let value = extract_value("value", cols, vals, span)?;
|
let value = extract_value("value", record, span)?;
|
||||||
let char = extract_char(value, config)?;
|
let char = extract_char(value, config)?;
|
||||||
EditCommand::MoveRightBefore(char)
|
EditCommand::MoveRightBefore(char)
|
||||||
}
|
}
|
||||||
"cutleftuntil" => {
|
"cutleftuntil" => {
|
||||||
let value = extract_value("value", cols, vals, span)?;
|
let value = extract_value("value", record, span)?;
|
||||||
let char = extract_char(value, config)?;
|
let char = extract_char(value, config)?;
|
||||||
EditCommand::CutLeftUntil(char)
|
EditCommand::CutLeftUntil(char)
|
||||||
}
|
}
|
||||||
"cutleftbefore" => {
|
"cutleftbefore" => {
|
||||||
let value = extract_value("value", cols, vals, span)?;
|
let value = extract_value("value", record, span)?;
|
||||||
let char = extract_char(value, config)?;
|
let char = extract_char(value, config)?;
|
||||||
EditCommand::CutLeftBefore(char)
|
EditCommand::CutLeftBefore(char)
|
||||||
}
|
}
|
||||||
"moveleftuntil" => {
|
"moveleftuntil" => {
|
||||||
let value = extract_value("value", cols, vals, span)?;
|
let value = extract_value("value", record, span)?;
|
||||||
let char = extract_char(value, config)?;
|
let char = extract_char(value, config)?;
|
||||||
EditCommand::MoveLeftUntil(char)
|
EditCommand::MoveLeftUntil(char)
|
||||||
}
|
}
|
||||||
"moveleftbefore" => {
|
"moveleftbefore" => {
|
||||||
let value = extract_value("value", cols, vals, span)?;
|
let value = extract_value("value", record, span)?;
|
||||||
let char = extract_char(value, config)?;
|
let char = extract_char(value, config)?;
|
||||||
EditCommand::MoveLeftBefore(char)
|
EditCommand::MoveLeftBefore(char)
|
||||||
}
|
}
|
||||||
@ -968,7 +957,7 @@ fn edit_from_record(
|
|||||||
return Err(ShellError::UnsupportedConfigValue(
|
return Err(ShellError::UnsupportedConfigValue(
|
||||||
"reedline EditCommand".to_string(),
|
"reedline EditCommand".to_string(),
|
||||||
e.to_string(),
|
e.to_string(),
|
||||||
*span,
|
span,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -977,7 +966,7 @@ fn edit_from_record(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn extract_char(value: &Value, config: &Config) -> Result<char, ShellError> {
|
fn extract_char(value: &Value, config: &Config) -> Result<char, ShellError> {
|
||||||
let span = value.span()?;
|
let span = value.span();
|
||||||
value
|
value
|
||||||
.into_string("", config)
|
.into_string("", config)
|
||||||
.chars()
|
.chars()
|
||||||
@ -993,16 +982,13 @@ mod test {
|
|||||||
fn test_send_event() {
|
fn test_send_event() {
|
||||||
let cols = vec!["send".to_string()];
|
let cols = vec!["send".to_string()];
|
||||||
let vals = vec![Value::test_string("Enter")];
|
let vals = vec![Value::test_string("Enter")];
|
||||||
|
let event = Record { vals, cols };
|
||||||
|
|
||||||
let span = Span::test_data();
|
let span = Span::test_data();
|
||||||
let b = EventType::try_from_columns(&cols, &vals, &span).unwrap();
|
let b = EventType::try_from_record(&event, span).unwrap();
|
||||||
assert!(matches!(b, EventType::Send(_)));
|
assert!(matches!(b, EventType::Send(_)));
|
||||||
|
|
||||||
let event = Value::Record {
|
let event = Value::test_record(event);
|
||||||
vals,
|
|
||||||
cols,
|
|
||||||
span: Span::test_data(),
|
|
||||||
};
|
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
|
|
||||||
let parsed_event = parse_event(&event, &config).unwrap();
|
let parsed_event = parse_event(&event, &config).unwrap();
|
||||||
@ -1013,16 +999,13 @@ mod test {
|
|||||||
fn test_edit_event() {
|
fn test_edit_event() {
|
||||||
let cols = vec!["edit".to_string()];
|
let cols = vec!["edit".to_string()];
|
||||||
let vals = vec![Value::test_string("Clear")];
|
let vals = vec![Value::test_string("Clear")];
|
||||||
|
let event = Record { vals, cols };
|
||||||
|
|
||||||
let span = Span::test_data();
|
let span = Span::test_data();
|
||||||
let b = EventType::try_from_columns(&cols, &vals, &span).unwrap();
|
let b = EventType::try_from_record(&event, span).unwrap();
|
||||||
assert!(matches!(b, EventType::Edit(_)));
|
assert!(matches!(b, EventType::Edit(_)));
|
||||||
|
|
||||||
let event = Value::Record {
|
let event = Value::test_record(event);
|
||||||
vals,
|
|
||||||
cols,
|
|
||||||
span: Span::test_data(),
|
|
||||||
};
|
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
|
|
||||||
let parsed_event = parse_event(&event, &config).unwrap();
|
let parsed_event = parse_event(&event, &config).unwrap();
|
||||||
@ -1039,16 +1022,13 @@ mod test {
|
|||||||
Value::test_string("Menu"),
|
Value::test_string("Menu"),
|
||||||
Value::test_string("history_menu"),
|
Value::test_string("history_menu"),
|
||||||
];
|
];
|
||||||
|
let event = Record { vals, cols };
|
||||||
|
|
||||||
let span = Span::test_data();
|
let span = Span::test_data();
|
||||||
let b = EventType::try_from_columns(&cols, &vals, &span).unwrap();
|
let b = EventType::try_from_record(&event, span).unwrap();
|
||||||
assert!(matches!(b, EventType::Send(_)));
|
assert!(matches!(b, EventType::Send(_)));
|
||||||
|
|
||||||
let event = Value::Record {
|
let event = Value::test_record(event);
|
||||||
vals,
|
|
||||||
cols,
|
|
||||||
span: Span::test_data(),
|
|
||||||
};
|
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
|
|
||||||
let parsed_event = parse_event(&event, &config).unwrap();
|
let parsed_event = parse_event(&event, &config).unwrap();
|
||||||
@ -1067,38 +1047,27 @@ mod test {
|
|||||||
Value::test_string("history_menu"),
|
Value::test_string("history_menu"),
|
||||||
];
|
];
|
||||||
|
|
||||||
let menu_event = Value::Record {
|
let menu_event = Value::test_record(Record { cols, vals });
|
||||||
cols,
|
|
||||||
vals,
|
|
||||||
span: Span::test_data(),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Enter event
|
// Enter event
|
||||||
let cols = vec!["send".to_string()];
|
let cols = vec!["send".to_string()];
|
||||||
let vals = vec![Value::test_string("Enter")];
|
let vals = vec![Value::test_string("Enter")];
|
||||||
|
|
||||||
let enter_event = Value::Record {
|
let enter_event = Value::test_record(Record { cols, vals });
|
||||||
cols,
|
|
||||||
vals,
|
|
||||||
span: Span::test_data(),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Until event
|
// Until event
|
||||||
let cols = vec!["until".to_string()];
|
let cols = vec!["until".to_string()];
|
||||||
let vals = vec![Value::List {
|
let vals = vec![Value::list(
|
||||||
vals: vec![menu_event, enter_event],
|
vec![menu_event, enter_event],
|
||||||
span: Span::test_data(),
|
Span::test_data(),
|
||||||
}];
|
)];
|
||||||
|
let event = Record { cols, vals };
|
||||||
|
|
||||||
let span = Span::test_data();
|
let span = Span::test_data();
|
||||||
let b = EventType::try_from_columns(&cols, &vals, &span).unwrap();
|
let b = EventType::try_from_record(&event, span).unwrap();
|
||||||
assert!(matches!(b, EventType::Until(_)));
|
assert!(matches!(b, EventType::Until(_)));
|
||||||
|
|
||||||
let event = Value::Record {
|
let event = Value::test_record(event);
|
||||||
cols,
|
|
||||||
vals,
|
|
||||||
span: Span::test_data(),
|
|
||||||
};
|
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
|
|
||||||
let parsed_event = parse_event(&event, &config).unwrap();
|
let parsed_event = parse_event(&event, &config).unwrap();
|
||||||
@ -1120,27 +1089,16 @@ mod test {
|
|||||||
Value::test_string("history_menu"),
|
Value::test_string("history_menu"),
|
||||||
];
|
];
|
||||||
|
|
||||||
let menu_event = Value::Record {
|
let menu_event = Value::test_record(Record { cols, vals });
|
||||||
cols,
|
|
||||||
vals,
|
|
||||||
span: Span::test_data(),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Enter event
|
// Enter event
|
||||||
let cols = vec!["send".to_string()];
|
let cols = vec!["send".to_string()];
|
||||||
let vals = vec![Value::test_string("Enter")];
|
let vals = vec![Value::test_string("Enter")];
|
||||||
|
|
||||||
let enter_event = Value::Record {
|
let enter_event = Value::test_record(Record { cols, vals });
|
||||||
cols,
|
|
||||||
vals,
|
|
||||||
span: Span::test_data(),
|
|
||||||
};
|
|
||||||
|
|
||||||
// Multiple event
|
// Multiple event
|
||||||
let event = Value::List {
|
let event = Value::list(vec![menu_event, enter_event], Span::test_data());
|
||||||
vals: vec![menu_event, enter_event],
|
|
||||||
span: Span::test_data(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let config = Config::default();
|
let config = Config::default();
|
||||||
let parsed_event = parse_event(&event, &config).unwrap();
|
let parsed_event = parse_event(&event, &config).unwrap();
|
||||||
@ -1157,9 +1115,10 @@ mod test {
|
|||||||
fn test_error() {
|
fn test_error() {
|
||||||
let cols = vec!["not_exist".to_string()];
|
let cols = vec!["not_exist".to_string()];
|
||||||
let vals = vec![Value::test_string("Enter")];
|
let vals = vec![Value::test_string("Enter")];
|
||||||
|
let event = Record { cols, vals };
|
||||||
|
|
||||||
let span = Span::test_data();
|
let span = Span::test_data();
|
||||||
let b = EventType::try_from_columns(&cols, &vals, &span);
|
let b = EventType::try_from_record(&event, span);
|
||||||
assert!(matches!(b, Err(ShellError::MissingConfigValue(_, _))));
|
assert!(matches!(b, Err(ShellError::MissingConfigValue(_, _))));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -7,22 +7,27 @@ use crate::{
|
|||||||
};
|
};
|
||||||
use crossterm::cursor::SetCursorStyle;
|
use crossterm::cursor::SetCursorStyle;
|
||||||
use log::{trace, warn};
|
use log::{trace, warn};
|
||||||
use miette::{IntoDiagnostic, Result};
|
use miette::{ErrReport, IntoDiagnostic, Result};
|
||||||
|
use nu_cmd_base::hook::eval_hook;
|
||||||
|
use nu_cmd_base::util::get_guaranteed_cwd;
|
||||||
use nu_color_config::StyleComputer;
|
use nu_color_config::StyleComputer;
|
||||||
use nu_command::hook::eval_hook;
|
|
||||||
use nu_command::util::get_guaranteed_cwd;
|
|
||||||
use nu_engine::convert_env_values;
|
use nu_engine::convert_env_values;
|
||||||
use nu_parser::{lex, trim_quotes_str};
|
use nu_parser::{lex, parse, trim_quotes_str};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
config::NuCursorShape,
|
config::NuCursorShape,
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
|
eval_const::create_nu_constant,
|
||||||
report_error, report_error_new, HistoryFileFormat, PipelineData, ShellError, Span, Spanned,
|
report_error, report_error_new, HistoryFileFormat, PipelineData, ShellError, Span, Spanned,
|
||||||
Value,
|
Value, NU_VARIABLE_ID,
|
||||||
};
|
};
|
||||||
use nu_utils::utils::perf;
|
use nu_utils::utils::perf;
|
||||||
use reedline::{CursorConfig, DefaultHinter, EditCommand, Emacs, SqliteBackedHistory, Vi};
|
use reedline::{
|
||||||
|
CursorConfig, DefaultHinter, EditCommand, Emacs, FileBackedHistory, HistorySessionId, Reedline,
|
||||||
|
SqliteBackedHistory, Vi,
|
||||||
|
};
|
||||||
use std::{
|
use std::{
|
||||||
io::{self, Write},
|
io::{self, IsTerminal, Write},
|
||||||
|
path::Path,
|
||||||
sync::atomic::Ordering,
|
sync::atomic::Ordering,
|
||||||
time::Instant,
|
time::Instant,
|
||||||
};
|
};
|
||||||
@ -46,13 +51,13 @@ pub fn evaluate_repl(
|
|||||||
load_std_lib: Option<Spanned<String>>,
|
load_std_lib: Option<Spanned<String>>,
|
||||||
entire_start_time: Instant,
|
entire_start_time: Instant,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
use nu_command::hook;
|
use nu_cmd_base::hook;
|
||||||
use reedline::{FileBackedHistory, Reedline, Signal};
|
use reedline::Signal;
|
||||||
let use_color = engine_state.get_config().use_ansi_coloring;
|
let use_color = engine_state.get_config().use_ansi_coloring;
|
||||||
|
|
||||||
// Guard against invocation without a connected terminal.
|
// Guard against invocation without a connected terminal.
|
||||||
// reedline / crossterm event polling will fail without a connected tty
|
// reedline / crossterm event polling will fail without a connected tty
|
||||||
if !atty::is(atty::Stream::Stdin) {
|
if !std::io::stdin().is_terminal() {
|
||||||
return Err(std::io::Error::new(
|
return Err(std::io::Error::new(
|
||||||
std::io::ErrorKind::NotFound,
|
std::io::ErrorKind::NotFound,
|
||||||
"Nushell launched as a REPL, but STDIN is not a TTY; either launch in a valid terminal or provide arguments to invoke a script!",
|
"Nushell launched as a REPL, but STDIN is not a TTY; either launch in a valid terminal or provide arguments to invoke a script!",
|
||||||
@ -91,11 +96,7 @@ pub fn evaluate_repl(
|
|||||||
let mut line_editor = Reedline::create();
|
let mut line_editor = Reedline::create();
|
||||||
|
|
||||||
// Now that reedline is created, get the history session id and store it in engine_state
|
// Now that reedline is created, get the history session id and store it in engine_state
|
||||||
let hist_sesh = line_editor
|
store_history_id_in_engine(engine_state, &line_editor);
|
||||||
.get_history_session_id()
|
|
||||||
.map(i64::from)
|
|
||||||
.unwrap_or(0);
|
|
||||||
engine_state.history_session_id = hist_sesh;
|
|
||||||
perf(
|
perf(
|
||||||
"setup reedline",
|
"setup reedline",
|
||||||
start_time,
|
start_time,
|
||||||
@ -127,21 +128,8 @@ pub fn evaluate_repl(
|
|||||||
engine_state.config.history_file_format,
|
engine_state.config.history_file_format,
|
||||||
);
|
);
|
||||||
if let Some(history_path) = history_path.as_deref() {
|
if let Some(history_path) = history_path.as_deref() {
|
||||||
let history: Box<dyn reedline::History> = match engine_state.config.history_file_format {
|
line_editor =
|
||||||
HistoryFileFormat::PlainText => Box::new(
|
update_line_editor_history(engine_state, history_path, line_editor, history_session_id)?
|
||||||
FileBackedHistory::with_file(
|
|
||||||
config.max_history_size as usize,
|
|
||||||
history_path.to_path_buf(),
|
|
||||||
)
|
|
||||||
.into_diagnostic()?,
|
|
||||||
),
|
|
||||||
HistoryFileFormat::Sqlite => Box::new(
|
|
||||||
SqliteBackedHistory::with_file(history_path.to_path_buf()).into_diagnostic()?,
|
|
||||||
),
|
|
||||||
};
|
|
||||||
line_editor = line_editor
|
|
||||||
.with_history_session_id(history_session_id)
|
|
||||||
.with_history(history);
|
|
||||||
};
|
};
|
||||||
perf(
|
perf(
|
||||||
"setup history",
|
"setup history",
|
||||||
@ -177,6 +165,10 @@ pub fn evaluate_repl(
|
|||||||
|
|
||||||
engine_state.set_startup_time(entire_start_time.elapsed().as_nanos() as i64);
|
engine_state.set_startup_time(entire_start_time.elapsed().as_nanos() as i64);
|
||||||
|
|
||||||
|
// Regenerate the $nu constant to contain the startup time and any other potential updates
|
||||||
|
let nu_const = create_nu_constant(engine_state, Span::unknown())?;
|
||||||
|
engine_state.set_variable_const_val(NU_VARIABLE_ID, nu_const);
|
||||||
|
|
||||||
if load_std_lib.is_none() && engine_state.get_config().show_banner {
|
if load_std_lib.is_none() && engine_state.get_config().show_banner {
|
||||||
eval_source(
|
eval_source(
|
||||||
engine_state,
|
engine_state,
|
||||||
@ -243,13 +235,15 @@ pub fn evaluate_repl(
|
|||||||
|
|
||||||
// Find the configured cursor shapes for each mode
|
// Find the configured cursor shapes for each mode
|
||||||
let cursor_config = CursorConfig {
|
let cursor_config = CursorConfig {
|
||||||
vi_insert: Some(map_nucursorshape_to_cursorshape(
|
vi_insert: config
|
||||||
config.cursor_shape_vi_insert,
|
.cursor_shape_vi_insert
|
||||||
)),
|
.map(map_nucursorshape_to_cursorshape),
|
||||||
vi_normal: Some(map_nucursorshape_to_cursorshape(
|
vi_normal: config
|
||||||
config.cursor_shape_vi_normal,
|
.cursor_shape_vi_normal
|
||||||
)),
|
.map(map_nucursorshape_to_cursorshape),
|
||||||
emacs: Some(map_nucursorshape_to_cursorshape(config.cursor_shape_emacs)),
|
emacs: config
|
||||||
|
.cursor_shape_emacs
|
||||||
|
.map(map_nucursorshape_to_cursorshape),
|
||||||
};
|
};
|
||||||
perf(
|
perf(
|
||||||
"get config/cursor config",
|
"get config/cursor config",
|
||||||
@ -403,7 +397,7 @@ pub fn evaluate_repl(
|
|||||||
// Right before we start our prompt and take input from the user,
|
// Right before we start our prompt and take input from the user,
|
||||||
// fire the "pre_prompt" hook
|
// fire the "pre_prompt" hook
|
||||||
if let Some(hook) = config.hooks.pre_prompt.clone() {
|
if let Some(hook) = config.hooks.pre_prompt.clone() {
|
||||||
if let Err(err) = eval_hook(engine_state, stack, None, vec![], &hook) {
|
if let Err(err) = eval_hook(engine_state, stack, None, vec![], &hook, "pre_prompt") {
|
||||||
report_error_new(engine_state, &err);
|
report_error_new(engine_state, &err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -474,30 +468,21 @@ pub fn evaluate_repl(
|
|||||||
// hook
|
// hook
|
||||||
if let Some(hook) = config.hooks.pre_execution.clone() {
|
if let Some(hook) = config.hooks.pre_execution.clone() {
|
||||||
// Set the REPL buffer to the current command for the "pre_execution" hook
|
// Set the REPL buffer to the current command for the "pre_execution" hook
|
||||||
let mut repl_buffer = engine_state
|
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
|
||||||
.repl_buffer_state
|
repl.buffer = s.to_string();
|
||||||
.lock()
|
drop(repl);
|
||||||
.expect("repl buffer state mutex");
|
|
||||||
*repl_buffer = s.to_string();
|
|
||||||
drop(repl_buffer);
|
|
||||||
|
|
||||||
if let Err(err) = eval_hook(engine_state, stack, None, vec![], &hook) {
|
if let Err(err) =
|
||||||
|
eval_hook(engine_state, stack, None, vec![], &hook, "pre_execution")
|
||||||
|
{
|
||||||
report_error_new(engine_state, &err);
|
report_error_new(engine_state, &err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut repl_cursor = engine_state
|
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
|
||||||
.repl_cursor_pos
|
repl.cursor_pos = line_editor.current_insertion_point();
|
||||||
.lock()
|
repl.buffer = line_editor.current_buffer_contents().to_string();
|
||||||
.expect("repl cursor pos mutex");
|
drop(repl);
|
||||||
*repl_cursor = line_editor.current_insertion_point();
|
|
||||||
drop(repl_cursor);
|
|
||||||
let mut repl_buffer = engine_state
|
|
||||||
.repl_buffer_state
|
|
||||||
.lock()
|
|
||||||
.expect("repl buffer state mutex");
|
|
||||||
*repl_buffer = line_editor.current_buffer_contents().to_string();
|
|
||||||
drop(repl_buffer);
|
|
||||||
|
|
||||||
if shell_integration {
|
if shell_integration {
|
||||||
run_ansi_sequence(PRE_EXECUTE_MARKER)?;
|
run_ansi_sequence(PRE_EXECUTE_MARKER)?;
|
||||||
@ -531,24 +516,12 @@ pub fn evaluate_repl(
|
|||||||
(path.to_string_lossy().to_string(), tokens.0[0].span)
|
(path.to_string_lossy().to_string(), tokens.0[0].span)
|
||||||
};
|
};
|
||||||
|
|
||||||
stack.add_env_var(
|
stack.add_env_var("OLDPWD".into(), Value::string(cwd.clone(), Span::unknown()));
|
||||||
"OLDPWD".into(),
|
|
||||||
Value::String {
|
|
||||||
val: cwd.clone(),
|
|
||||||
span: Span::unknown(),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
//FIXME: this only changes the current scope, but instead this environment variable
|
//FIXME: this only changes the current scope, but instead this environment variable
|
||||||
//should probably be a block that loads the information from the state in the overlay
|
//should probably be a block that loads the information from the state in the overlay
|
||||||
stack.add_env_var(
|
stack.add_env_var("PWD".into(), Value::string(path.clone(), Span::unknown()));
|
||||||
"PWD".into(),
|
let cwd = Value::string(cwd, span);
|
||||||
Value::String {
|
|
||||||
val: path.clone(),
|
|
||||||
span: Span::unknown(),
|
|
||||||
},
|
|
||||||
);
|
|
||||||
let cwd = Value::String { val: cwd, span };
|
|
||||||
|
|
||||||
let shells = stack.get_env_var(engine_state, "NUSHELL_SHELLS");
|
let shells = stack.get_env_var(engine_state, "NUSHELL_SHELLS");
|
||||||
let mut shells = if let Some(v) = shells {
|
let mut shells = if let Some(v) = shells {
|
||||||
@ -561,31 +534,49 @@ pub fn evaluate_repl(
|
|||||||
|
|
||||||
let current_shell = stack.get_env_var(engine_state, "NUSHELL_CURRENT_SHELL");
|
let current_shell = stack.get_env_var(engine_state, "NUSHELL_CURRENT_SHELL");
|
||||||
let current_shell = if let Some(v) = current_shell {
|
let current_shell = if let Some(v) = current_shell {
|
||||||
v.as_integer().unwrap_or_default() as usize
|
v.as_int().unwrap_or_default() as usize
|
||||||
} else {
|
} else {
|
||||||
0
|
0
|
||||||
};
|
};
|
||||||
|
|
||||||
let last_shell = stack.get_env_var(engine_state, "NUSHELL_LAST_SHELL");
|
let last_shell = stack.get_env_var(engine_state, "NUSHELL_LAST_SHELL");
|
||||||
let last_shell = if let Some(v) = last_shell {
|
let last_shell = if let Some(v) = last_shell {
|
||||||
v.as_integer().unwrap_or_default() as usize
|
v.as_int().unwrap_or_default() as usize
|
||||||
} else {
|
} else {
|
||||||
0
|
0
|
||||||
};
|
};
|
||||||
|
|
||||||
shells[current_shell] = Value::String { val: path, span };
|
shells[current_shell] = Value::string(path, span);
|
||||||
|
|
||||||
stack.add_env_var("NUSHELL_SHELLS".into(), Value::List { vals: shells, span });
|
stack.add_env_var("NUSHELL_SHELLS".into(), Value::list(shells, span));
|
||||||
stack.add_env_var(
|
stack.add_env_var(
|
||||||
"NUSHELL_LAST_SHELL".into(),
|
"NUSHELL_LAST_SHELL".into(),
|
||||||
Value::Int {
|
Value::int(last_shell as i64, span),
|
||||||
val: last_shell as i64,
|
|
||||||
span,
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
} else if !s.trim().is_empty() {
|
} else if !s.trim().is_empty() {
|
||||||
trace!("eval source: {}", s);
|
trace!("eval source: {}", s);
|
||||||
|
|
||||||
|
let mut cmds = s.split_whitespace();
|
||||||
|
if let Some("exit") = cmds.next() {
|
||||||
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
|
let _ = parse(&mut working_set, None, s.as_bytes(), false);
|
||||||
|
|
||||||
|
if working_set.parse_errors.is_empty() {
|
||||||
|
match cmds.next() {
|
||||||
|
Some(s) => {
|
||||||
|
if let Ok(n) = s.parse::<i32>() {
|
||||||
|
drop(line_editor);
|
||||||
|
std::process::exit(n);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
drop(line_editor);
|
||||||
|
std::process::exit(0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
eval_source(
|
eval_source(
|
||||||
engine_state,
|
engine_state,
|
||||||
stack,
|
stack,
|
||||||
@ -594,15 +585,16 @@ pub fn evaluate_repl(
|
|||||||
PipelineData::empty(),
|
PipelineData::empty(),
|
||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
|
if engine_state.get_config().bracketed_paste {
|
||||||
|
#[cfg(not(target_os = "windows"))]
|
||||||
|
let _ = line_editor.enable_bracketed_paste();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
let cmd_duration = start_time.elapsed();
|
let cmd_duration = start_time.elapsed();
|
||||||
|
|
||||||
stack.add_env_var(
|
stack.add_env_var(
|
||||||
"CMD_DURATION_MS".into(),
|
"CMD_DURATION_MS".into(),
|
||||||
Value::String {
|
Value::string(format!("{}", cmd_duration.as_millis()), Span::unknown()),
|
||||||
val: format!("{}", cmd_duration.as_millis()),
|
|
||||||
span: Span::unknown(),
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
|
|
||||||
if history_supports_meta && !s.is_empty() && line_editor.has_last_command_context()
|
if history_supports_meta && !s.is_empty() && line_editor.has_last_command_context()
|
||||||
@ -654,23 +646,15 @@ pub fn evaluate_repl(
|
|||||||
run_ansi_sequence(RESET_APPLICATION_MODE)?;
|
run_ansi_sequence(RESET_APPLICATION_MODE)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut repl_buffer = engine_state
|
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
|
||||||
.repl_buffer_state
|
|
||||||
.lock()
|
|
||||||
.expect("repl buffer state mutex");
|
|
||||||
let mut repl_cursor_pos = engine_state
|
|
||||||
.repl_cursor_pos
|
|
||||||
.lock()
|
|
||||||
.expect("repl cursor pos mutex");
|
|
||||||
line_editor.run_edit_commands(&[
|
line_editor.run_edit_commands(&[
|
||||||
EditCommand::Clear,
|
EditCommand::Clear,
|
||||||
EditCommand::InsertString(repl_buffer.to_string()),
|
EditCommand::InsertString(repl.buffer.to_string()),
|
||||||
EditCommand::MoveToPosition(*repl_cursor_pos),
|
EditCommand::MoveToPosition(repl.cursor_pos),
|
||||||
]);
|
]);
|
||||||
*repl_buffer = "".to_string();
|
repl.buffer = "".to_string();
|
||||||
drop(repl_buffer);
|
repl.cursor_pos = 0;
|
||||||
*repl_cursor_pos = 0;
|
drop(repl);
|
||||||
drop(repl_cursor_pos);
|
|
||||||
}
|
}
|
||||||
Ok(Signal::CtrlC) => {
|
Ok(Signal::CtrlC) => {
|
||||||
// `Reedline` clears the line content. New prompt is shown
|
// `Reedline` clears the line content. New prompt is shown
|
||||||
@ -722,6 +706,49 @@ pub fn evaluate_repl(
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn store_history_id_in_engine(engine_state: &mut EngineState, line_editor: &Reedline) {
|
||||||
|
let session_id = line_editor
|
||||||
|
.get_history_session_id()
|
||||||
|
.map(i64::from)
|
||||||
|
.unwrap_or(0);
|
||||||
|
|
||||||
|
engine_state.history_session_id = session_id;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_line_editor_history(
|
||||||
|
engine_state: &mut EngineState,
|
||||||
|
history_path: &Path,
|
||||||
|
line_editor: Reedline,
|
||||||
|
history_session_id: Option<HistorySessionId>,
|
||||||
|
) -> Result<Reedline, ErrReport> {
|
||||||
|
let config = engine_state.get_config();
|
||||||
|
let history: Box<dyn reedline::History> = match engine_state.config.history_file_format {
|
||||||
|
HistoryFileFormat::PlainText => Box::new(
|
||||||
|
FileBackedHistory::with_file(
|
||||||
|
config.max_history_size as usize,
|
||||||
|
history_path.to_path_buf(),
|
||||||
|
)
|
||||||
|
.into_diagnostic()?,
|
||||||
|
),
|
||||||
|
HistoryFileFormat::Sqlite => Box::new(
|
||||||
|
SqliteBackedHistory::with_file(
|
||||||
|
history_path.to_path_buf(),
|
||||||
|
history_session_id,
|
||||||
|
Some(chrono::Utc::now()),
|
||||||
|
)
|
||||||
|
.into_diagnostic()?,
|
||||||
|
),
|
||||||
|
};
|
||||||
|
let line_editor = line_editor
|
||||||
|
.with_history_session_id(history_session_id)
|
||||||
|
.with_history_exclusion_prefix(Some(" ".into()))
|
||||||
|
.with_history(history);
|
||||||
|
|
||||||
|
store_history_id_in_engine(engine_state, &line_editor);
|
||||||
|
|
||||||
|
Ok(line_editor)
|
||||||
|
}
|
||||||
|
|
||||||
fn map_nucursorshape_to_cursorshape(shape: NuCursorShape) -> SetCursorStyle {
|
fn map_nucursorshape_to_cursorshape(shape: NuCursorShape) -> SetCursorStyle {
|
||||||
match shape {
|
match shape {
|
||||||
NuCursorShape::Block => SetCursorStyle::SteadyBlock,
|
NuCursorShape::Block => SetCursorStyle::SteadyBlock,
|
||||||
@ -793,3 +820,20 @@ fn looks_like_path_windows_drive_path_works() {
|
|||||||
assert_eq!(looks_like_path("F:\\some_dir"), on_windows);
|
assert_eq!(looks_like_path("F:\\some_dir"), on_windows);
|
||||||
assert_eq!(looks_like_path("G:/some_dir"), on_windows);
|
assert_eq!(looks_like_path("G:/some_dir"), on_windows);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn are_session_ids_in_sync() {
|
||||||
|
let engine_state = &mut EngineState::new();
|
||||||
|
let history_path_o =
|
||||||
|
crate::config_files::get_history_path("nushell", engine_state.config.history_file_format);
|
||||||
|
assert!(history_path_o.is_some());
|
||||||
|
let history_path = history_path_o.as_deref().unwrap();
|
||||||
|
let line_editor = reedline::Reedline::create();
|
||||||
|
let history_session_id = reedline::Reedline::create_history_session_id();
|
||||||
|
let line_editor =
|
||||||
|
update_line_editor_history(engine_state, history_path, line_editor, history_session_id);
|
||||||
|
assert_eq!(
|
||||||
|
i64::from(line_editor.unwrap().get_history_session_id().unwrap()),
|
||||||
|
engine_state.history_session_id
|
||||||
|
);
|
||||||
|
}
|
||||||
|
@ -59,7 +59,7 @@ impl Highlighter for NuHighlighter {
|
|||||||
($shape:expr, $span:expr, $text:expr) => {{
|
($shape:expr, $span:expr, $text:expr) => {{
|
||||||
let spans = split_span_by_highlight_positions(
|
let spans = split_span_by_highlight_positions(
|
||||||
line,
|
line,
|
||||||
&$span,
|
$span,
|
||||||
&matching_brackets_pos,
|
&matching_brackets_pos,
|
||||||
global_span_offset,
|
global_span_offset,
|
||||||
);
|
);
|
||||||
@ -143,8 +143,8 @@ impl Highlighter for NuHighlighter {
|
|||||||
|
|
||||||
fn split_span_by_highlight_positions(
|
fn split_span_by_highlight_positions(
|
||||||
line: &str,
|
line: &str,
|
||||||
span: &Span,
|
span: Span,
|
||||||
highlight_positions: &Vec<usize>,
|
highlight_positions: &[usize],
|
||||||
global_span_offset: usize,
|
global_span_offset: usize,
|
||||||
) -> Vec<(Span, bool)> {
|
) -> Vec<(Span, bool)> {
|
||||||
let mut start = span.start;
|
let mut start = span.start;
|
||||||
@ -237,6 +237,7 @@ fn find_matching_block_end_in_block(
|
|||||||
| PipelineElement::Redirection(_, _, e)
|
| PipelineElement::Redirection(_, _, e)
|
||||||
| PipelineElement::And(_, e)
|
| PipelineElement::And(_, e)
|
||||||
| PipelineElement::Or(_, e)
|
| PipelineElement::Or(_, e)
|
||||||
|
| PipelineElement::SameTargetRedirection { cmd: (_, e), .. }
|
||||||
| PipelineElement::SeparateRedirection { out: (_, e), .. } => {
|
| PipelineElement::SeparateRedirection { out: (_, e), .. } => {
|
||||||
if e.span.contains(global_cursor_offset) {
|
if e.span.contains(global_cursor_offset) {
|
||||||
if let Some(pos) = find_matching_block_end_in_expr(
|
if let Some(pos) = find_matching_block_end_in_expr(
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use nu_command::hook::eval_hook;
|
use nu_cmd_base::hook::eval_hook;
|
||||||
use nu_engine::{eval_block, eval_block_with_early_return};
|
use nu_engine::{eval_block, eval_block_with_early_return};
|
||||||
use nu_parser::{escape_quote_string, lex, parse, unescape_unquote_string, Token, TokenContents};
|
use nu_parser::{escape_quote_string, lex, parse, unescape_unquote_string, Token, TokenContents};
|
||||||
use nu_protocol::engine::StateWorkingSet;
|
use nu_protocol::engine::StateWorkingSet;
|
||||||
@ -105,7 +105,7 @@ fn gather_env_vars(
|
|||||||
span: full_span,
|
span: full_span,
|
||||||
} = token
|
} = token
|
||||||
{
|
{
|
||||||
let contents = engine_state.get_span_contents(&full_span);
|
let contents = engine_state.get_span_contents(full_span);
|
||||||
let (parts, _) = lex(contents, full_span.start, &[], &[b'='], true);
|
let (parts, _) = lex(contents, full_span.start, &[], &[b'='], true);
|
||||||
|
|
||||||
let name = if let Some(Token {
|
let name = if let Some(Token {
|
||||||
@ -185,10 +185,7 @@ fn gather_env_vars(
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
Value::String {
|
Value::string(bytes, *span)
|
||||||
val: bytes,
|
|
||||||
span: *span,
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
report_capture_error(
|
report_capture_error(
|
||||||
engine_state,
|
engine_state,
|
||||||
@ -257,7 +254,14 @@ pub fn eval_source(
|
|||||||
{
|
{
|
||||||
result = print_if_stream(stream, stderr_stream, false, exit_code);
|
result = print_if_stream(stream, stderr_stream, false, exit_code);
|
||||||
} else if let Some(hook) = config.hooks.display_output.clone() {
|
} else if let Some(hook) = config.hooks.display_output.clone() {
|
||||||
match eval_hook(engine_state, stack, Some(pipeline_data), vec![], &hook) {
|
match eval_hook(
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
Some(pipeline_data),
|
||||||
|
vec![],
|
||||||
|
&hook,
|
||||||
|
"display_output",
|
||||||
|
) {
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
result = Err(err);
|
result = Err(err);
|
||||||
}
|
}
|
||||||
|
@ -143,7 +143,7 @@ fn external_completer_trailing_space() {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn external_completer_no_trailing_space() {
|
fn external_completer_no_trailing_space() {
|
||||||
let block = "let external_completer = {|spans| $spans}";
|
let block = "{|spans| $spans}";
|
||||||
let input = "gh alias".to_string();
|
let input = "gh alias".to_string();
|
||||||
|
|
||||||
let suggestions = run_external_completion(block, &input);
|
let suggestions = run_external_completion(block, &input);
|
||||||
@ -154,7 +154,7 @@ fn external_completer_no_trailing_space() {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn external_completer_pass_flags() {
|
fn external_completer_pass_flags() {
|
||||||
let block = "let external_completer = {|spans| $spans}";
|
let block = "{|spans| $spans}";
|
||||||
let input = "gh api --".to_string();
|
let input = "gh api --".to_string();
|
||||||
|
|
||||||
let suggestions = run_external_completion(block, &input);
|
let suggestions = run_external_completion(block, &input);
|
||||||
@ -538,7 +538,7 @@ fn variables_completions() {
|
|||||||
"loginshell-path".into(),
|
"loginshell-path".into(),
|
||||||
"os-info".into(),
|
"os-info".into(),
|
||||||
"pid".into(),
|
"pid".into(),
|
||||||
"scope".into(),
|
"plugin-path".into(),
|
||||||
"startup-time".into(),
|
"startup-time".into(),
|
||||||
"temp-path".into(),
|
"temp-path".into(),
|
||||||
];
|
];
|
||||||
@ -568,88 +568,6 @@ fn variables_completions() {
|
|||||||
// Match results
|
// Match results
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(expected, suggestions);
|
||||||
|
|
||||||
// Test completions for $nu.scope
|
|
||||||
let suggestions = completer.complete("$nu.scope.", 10);
|
|
||||||
assert_eq!(5, suggestions.len());
|
|
||||||
let expected: Vec<String> = vec![
|
|
||||||
"aliases".into(),
|
|
||||||
"commands".into(),
|
|
||||||
"engine_state".into(),
|
|
||||||
"modules".into(),
|
|
||||||
"vars".into(),
|
|
||||||
];
|
|
||||||
// Match results
|
|
||||||
match_suggestions(expected, suggestions);
|
|
||||||
|
|
||||||
// Test completions for $nu.scope.commands
|
|
||||||
let suggestions = completer.complete("$nu.scope.commands.", 19);
|
|
||||||
assert_eq!(15, suggestions.len());
|
|
||||||
let expected: Vec<String> = vec![
|
|
||||||
"category".into(),
|
|
||||||
"creates_scope".into(),
|
|
||||||
"examples".into(),
|
|
||||||
"extra_usage".into(),
|
|
||||||
"is_builtin".into(),
|
|
||||||
"is_custom".into(),
|
|
||||||
"is_extern".into(),
|
|
||||||
"is_keyword".into(),
|
|
||||||
"is_plugin".into(),
|
|
||||||
"is_sub".into(),
|
|
||||||
"module_name".into(),
|
|
||||||
"name".into(),
|
|
||||||
"search_terms".into(),
|
|
||||||
"signatures".into(),
|
|
||||||
"usage".into(),
|
|
||||||
];
|
|
||||||
// Match results
|
|
||||||
match_suggestions(expected, suggestions);
|
|
||||||
|
|
||||||
// Test completions for $nu.scope.commands.signatures
|
|
||||||
let suggestions = completer.complete("$nu.scope.commands.signatures.", 30);
|
|
||||||
assert_eq!(17, suggestions.len());
|
|
||||||
let expected: Vec<String> = vec![
|
|
||||||
"any".into(),
|
|
||||||
"binary".into(),
|
|
||||||
"bool".into(),
|
|
||||||
"datetime".into(),
|
|
||||||
"duration".into(),
|
|
||||||
"filesize".into(),
|
|
||||||
"int".into(),
|
|
||||||
"list<any>".into(),
|
|
||||||
"list<binary>".into(),
|
|
||||||
"list<number>".into(),
|
|
||||||
"list<string>".into(),
|
|
||||||
"nothing".into(),
|
|
||||||
"number".into(),
|
|
||||||
"range".into(),
|
|
||||||
"record".into(),
|
|
||||||
"string".into(),
|
|
||||||
"table".into(),
|
|
||||||
];
|
|
||||||
// Match results
|
|
||||||
match_suggestions(expected, suggestions);
|
|
||||||
|
|
||||||
// Test completions for $nu.scope.engine_state
|
|
||||||
let suggestions = completer.complete("$nu.scope.engine_state.", 23);
|
|
||||||
assert_eq!(6, suggestions.len());
|
|
||||||
let expected: Vec<String> = vec![
|
|
||||||
"num_blocks".into(),
|
|
||||||
"num_decls".into(),
|
|
||||||
"num_env_vars".into(),
|
|
||||||
"num_modules".into(),
|
|
||||||
"num_vars".into(),
|
|
||||||
"source_bytes".into(),
|
|
||||||
];
|
|
||||||
// Match results
|
|
||||||
match_suggestions(expected, suggestions);
|
|
||||||
|
|
||||||
// Test completions for $nu.scope.vars
|
|
||||||
let suggestions = completer.complete("$nu.scope.vars.", 15);
|
|
||||||
assert_eq!(3, suggestions.len());
|
|
||||||
let expected: Vec<String> = vec!["name".into(), "type".into(), "value".into()];
|
|
||||||
// Match results
|
|
||||||
match_suggestions(expected, suggestions);
|
|
||||||
|
|
||||||
// Test completions for custom var
|
// Test completions for custom var
|
||||||
let suggestions = completer.complete("$actor.", 7);
|
let suggestions = completer.complete("$actor.", 7);
|
||||||
|
|
||||||
|
@ -1,16 +1,20 @@
|
|||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use nu_command::create_default_context;
|
|
||||||
use nu_engine::eval_block;
|
use nu_engine::eval_block;
|
||||||
use nu_parser::parse;
|
use nu_parser::parse;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
PipelineData, ShellError, Span, Value,
|
eval_const::create_nu_constant,
|
||||||
|
PipelineData, ShellError, Span, Value, NU_VARIABLE_ID,
|
||||||
};
|
};
|
||||||
use nu_test_support::fs;
|
use nu_test_support::fs;
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
const SEP: char = std::path::MAIN_SEPARATOR;
|
const SEP: char = std::path::MAIN_SEPARATOR;
|
||||||
|
|
||||||
|
fn create_default_context() -> EngineState {
|
||||||
|
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
|
||||||
|
}
|
||||||
|
|
||||||
// creates a new engine with the current path into the completions fixtures folder
|
// creates a new engine with the current path into the completions fixtures folder
|
||||||
pub fn new_engine() -> (PathBuf, String, EngineState, Stack) {
|
pub fn new_engine() -> (PathBuf, String, EngineState, Stack) {
|
||||||
// Target folder inside assets
|
// Target folder inside assets
|
||||||
@ -25,39 +29,41 @@ pub fn new_engine() -> (PathBuf, String, EngineState, Stack) {
|
|||||||
// Create a new engine with default context
|
// Create a new engine with default context
|
||||||
let mut engine_state = create_default_context();
|
let mut engine_state = create_default_context();
|
||||||
|
|
||||||
|
// Add $nu
|
||||||
|
let nu_const =
|
||||||
|
create_nu_constant(&engine_state, Span::test_data()).expect("Failed creating $nu");
|
||||||
|
engine_state.set_variable_const_val(NU_VARIABLE_ID, nu_const);
|
||||||
|
|
||||||
// New stack
|
// New stack
|
||||||
let mut stack = Stack::new();
|
let mut stack = Stack::new();
|
||||||
|
|
||||||
// Add pwd as env var
|
// Add pwd as env var
|
||||||
stack.add_env_var(
|
stack.add_env_var(
|
||||||
"PWD".to_string(),
|
"PWD".to_string(),
|
||||||
Value::String {
|
Value::string(dir_str.clone(), nu_protocol::Span::new(0, dir_str.len())),
|
||||||
val: dir_str.clone(),
|
|
||||||
span: nu_protocol::Span::new(0, dir_str.len()),
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
stack.add_env_var(
|
stack.add_env_var(
|
||||||
"TEST".to_string(),
|
"TEST".to_string(),
|
||||||
Value::String {
|
Value::string(
|
||||||
val: "NUSHELL".to_string(),
|
"NUSHELL".to_string(),
|
||||||
span: nu_protocol::Span::new(0, dir_str.len()),
|
nu_protocol::Span::new(0, dir_str.len()),
|
||||||
},
|
),
|
||||||
);
|
);
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
stack.add_env_var(
|
stack.add_env_var(
|
||||||
"Path".to_string(),
|
"Path".to_string(),
|
||||||
Value::String {
|
Value::string(
|
||||||
val: "c:\\some\\path;c:\\some\\other\\path".to_string(),
|
"c:\\some\\path;c:\\some\\other\\path".to_string(),
|
||||||
span: nu_protocol::Span::new(0, dir_str.len()),
|
nu_protocol::Span::new(0, dir_str.len()),
|
||||||
},
|
),
|
||||||
);
|
);
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
stack.add_env_var(
|
stack.add_env_var(
|
||||||
"PATH".to_string(),
|
"PATH".to_string(),
|
||||||
Value::String {
|
Value::string(
|
||||||
val: "/some/path:/some/other/path".to_string(),
|
"/some/path:/some/other/path".to_string(),
|
||||||
span: nu_protocol::Span::new(0, dir_str.len()),
|
nu_protocol::Span::new(0, dir_str.len()),
|
||||||
},
|
),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Merge environment into the permanent state
|
// Merge environment into the permanent state
|
||||||
@ -86,17 +92,14 @@ pub fn new_quote_engine() -> (PathBuf, String, EngineState, Stack) {
|
|||||||
// Add pwd as env var
|
// Add pwd as env var
|
||||||
stack.add_env_var(
|
stack.add_env_var(
|
||||||
"PWD".to_string(),
|
"PWD".to_string(),
|
||||||
Value::String {
|
Value::string(dir_str.clone(), nu_protocol::Span::new(0, dir_str.len())),
|
||||||
val: dir_str.clone(),
|
|
||||||
span: nu_protocol::Span::new(0, dir_str.len()),
|
|
||||||
},
|
|
||||||
);
|
);
|
||||||
stack.add_env_var(
|
stack.add_env_var(
|
||||||
"TEST".to_string(),
|
"TEST".to_string(),
|
||||||
Value::String {
|
Value::string(
|
||||||
val: "NUSHELL".to_string(),
|
"NUSHELL".to_string(),
|
||||||
span: nu_protocol::Span::new(0, dir_str.len()),
|
nu_protocol::Span::new(0, dir_str.len()),
|
||||||
},
|
),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Merge environment into the permanent state
|
// Merge environment into the permanent state
|
||||||
@ -159,12 +162,7 @@ pub fn merge_input(
|
|||||||
engine_state,
|
engine_state,
|
||||||
stack,
|
stack,
|
||||||
&block,
|
&block,
|
||||||
PipelineData::Value(
|
PipelineData::Value(Value::nothing(Span::unknown(),), None),
|
||||||
Value::Nothing {
|
|
||||||
span: Span::unknown(),
|
|
||||||
},
|
|
||||||
None
|
|
||||||
),
|
|
||||||
false,
|
false,
|
||||||
false
|
false
|
||||||
)
|
)
|
||||||
|
18
crates/nu-cmd-base/Cargo.toml
Normal file
18
crates/nu-cmd-base/Cargo.toml
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
[package]
|
||||||
|
authors = ["The Nushell Project Developers"]
|
||||||
|
description = "The foundation tools to build Nushell commands."
|
||||||
|
edition = "2021"
|
||||||
|
license = "MIT"
|
||||||
|
name = "nu-cmd-base"
|
||||||
|
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base"
|
||||||
|
version = "0.85.0"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
nu-engine = { path = "../nu-engine", version = "0.85.0" }
|
||||||
|
nu-parser = { path = "../nu-parser", version = "0.85.0" }
|
||||||
|
nu-path = { path = "../nu-path", version = "0.85.0" }
|
||||||
|
nu-protocol = { version = "0.85.0", path = "../nu-protocol" }
|
||||||
|
indexmap = { version = "2.0" }
|
||||||
|
miette = { version = "5.10", features = ["fancy-no-backtrace"] }
|
1
crates/nu-cmd-base/src/formats/mod.rs
Normal file
1
crates/nu-cmd-base/src/formats/mod.rs
Normal file
@ -0,0 +1 @@
|
|||||||
|
pub mod to;
|
20
crates/nu-cmd-base/src/formats/to/delimited.rs
Normal file
20
crates/nu-cmd-base/src/formats/to/delimited.rs
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
use indexmap::{indexset, IndexSet};
|
||||||
|
use nu_protocol::Value;
|
||||||
|
|
||||||
|
pub fn merge_descriptors(values: &[Value]) -> Vec<String> {
|
||||||
|
let mut ret: Vec<String> = vec![];
|
||||||
|
let mut seen: IndexSet<String> = indexset! {};
|
||||||
|
for value in values {
|
||||||
|
let data_descriptors = match value {
|
||||||
|
Value::Record { val, .. } => val.cols.clone(),
|
||||||
|
_ => vec!["".to_string()],
|
||||||
|
};
|
||||||
|
for desc in data_descriptors {
|
||||||
|
if !desc.is_empty() && !seen.contains(&desc) {
|
||||||
|
seen.insert(desc.to_string());
|
||||||
|
ret.push(desc.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ret
|
||||||
|
}
|
1
crates/nu-cmd-base/src/formats/to/mod.rs
Normal file
1
crates/nu-cmd-base/src/formats/to/mod.rs
Normal file
@ -0,0 +1 @@
|
|||||||
|
pub mod delimited;
|
@ -14,12 +14,8 @@ pub fn eval_env_change_hook(
|
|||||||
) -> Result<(), ShellError> {
|
) -> Result<(), ShellError> {
|
||||||
if let Some(hook) = env_change_hook {
|
if let Some(hook) = env_change_hook {
|
||||||
match hook {
|
match hook {
|
||||||
Value::Record {
|
Value::Record { val, .. } => {
|
||||||
cols: env_names,
|
for (env_name, hook_value) in &val {
|
||||||
vals: hook_values,
|
|
||||||
..
|
|
||||||
} => {
|
|
||||||
for (env_name, hook_value) in env_names.iter().zip(hook_values.iter()) {
|
|
||||||
let before = engine_state
|
let before = engine_state
|
||||||
.previous_env_vars
|
.previous_env_vars
|
||||||
.get(env_name)
|
.get(env_name)
|
||||||
@ -37,6 +33,7 @@ pub fn eval_env_change_hook(
|
|||||||
None,
|
None,
|
||||||
vec![("$before".into(), before), ("$after".into(), after.clone())],
|
vec![("$before".into(), before), ("$after".into(), after.clone())],
|
||||||
hook_value,
|
hook_value,
|
||||||
|
"env_change",
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
engine_state
|
engine_state
|
||||||
@ -48,7 +45,7 @@ pub fn eval_env_change_hook(
|
|||||||
x => {
|
x => {
|
||||||
return Err(ShellError::TypeMismatch {
|
return Err(ShellError::TypeMismatch {
|
||||||
err_message: "record for the 'env_change' hook".to_string(),
|
err_message: "record for the 'env_change' hook".to_string(),
|
||||||
span: x.span()?,
|
span: x.span(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -63,8 +60,9 @@ pub fn eval_hook(
|
|||||||
input: Option<PipelineData>,
|
input: Option<PipelineData>,
|
||||||
arguments: Vec<(String, Value)>,
|
arguments: Vec<(String, Value)>,
|
||||||
value: &Value,
|
value: &Value,
|
||||||
|
hook_name: &str,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let value_span = value.span()?;
|
let value_span = value.span();
|
||||||
|
|
||||||
// Hooks can optionally be a record in this form:
|
// Hooks can optionally be a record in this form:
|
||||||
// {
|
// {
|
||||||
@ -86,71 +84,134 @@ pub fn eval_hook(
|
|||||||
optional: false,
|
optional: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let span = value.span();
|
||||||
match value {
|
match value {
|
||||||
|
Value::String { val, .. } => {
|
||||||
|
let (block, delta, vars) = {
|
||||||
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
|
let mut vars: Vec<(VarId, Value)> = vec![];
|
||||||
|
|
||||||
|
for (name, val) in arguments {
|
||||||
|
let var_id = working_set.add_variable(
|
||||||
|
name.as_bytes().to_vec(),
|
||||||
|
val.span(),
|
||||||
|
Type::Any,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
vars.push((var_id, val));
|
||||||
|
}
|
||||||
|
|
||||||
|
let output = parse(
|
||||||
|
&mut working_set,
|
||||||
|
Some(&format!("{hook_name} hook")),
|
||||||
|
val.as_bytes(),
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
if let Some(err) = working_set.parse_errors.first() {
|
||||||
|
report_error(&working_set, err);
|
||||||
|
|
||||||
|
return Err(ShellError::UnsupportedConfigValue(
|
||||||
|
"valid source code".into(),
|
||||||
|
"source code with syntax errors".into(),
|
||||||
|
span,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
(output, working_set.render(), vars)
|
||||||
|
};
|
||||||
|
|
||||||
|
engine_state.merge_delta(delta)?;
|
||||||
|
let input = if let Some(input) = input {
|
||||||
|
input
|
||||||
|
} else {
|
||||||
|
PipelineData::empty()
|
||||||
|
};
|
||||||
|
|
||||||
|
let var_ids: Vec<VarId> = vars
|
||||||
|
.into_iter()
|
||||||
|
.map(|(var_id, val)| {
|
||||||
|
stack.add_var(var_id, val);
|
||||||
|
var_id
|
||||||
|
})
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
match eval_block(engine_state, stack, &block, input, false, false) {
|
||||||
|
Ok(pipeline_data) => {
|
||||||
|
output = pipeline_data;
|
||||||
|
}
|
||||||
|
Err(err) => {
|
||||||
|
report_error_new(engine_state, &err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for var_id in var_ids.iter() {
|
||||||
|
stack.remove_var(*var_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
Value::List { vals, .. } => {
|
Value::List { vals, .. } => {
|
||||||
for val in vals {
|
for val in vals {
|
||||||
eval_hook(engine_state, stack, None, arguments.clone(), val)?;
|
eval_hook(
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
None,
|
||||||
|
arguments.clone(),
|
||||||
|
val,
|
||||||
|
&format!("{hook_name} list, recursive"),
|
||||||
|
)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Value::Record { .. } => {
|
Value::Record { .. } => {
|
||||||
let do_run_hook =
|
let do_run_hook = if let Ok(condition) =
|
||||||
if let Ok(condition) = value.clone().follow_cell_path(&[condition_path], false) {
|
value.clone().follow_cell_path(&[condition_path], false)
|
||||||
match condition {
|
{
|
||||||
Value::Block {
|
let other_span = condition.span();
|
||||||
val: block_id,
|
match condition {
|
||||||
span: block_span,
|
Value::Block { val: block_id, .. } | Value::Closure { val: block_id, .. } => {
|
||||||
..
|
match run_hook_block(
|
||||||
}
|
engine_state,
|
||||||
| Value::Closure {
|
stack,
|
||||||
val: block_id,
|
block_id,
|
||||||
span: block_span,
|
None,
|
||||||
..
|
arguments.clone(),
|
||||||
} => {
|
other_span,
|
||||||
match run_hook_block(
|
) {
|
||||||
engine_state,
|
Ok(pipeline_data) => {
|
||||||
stack,
|
if let PipelineData::Value(Value::Bool { val, .. }, ..) =
|
||||||
block_id,
|
pipeline_data
|
||||||
None,
|
{
|
||||||
arguments.clone(),
|
val
|
||||||
block_span,
|
} else {
|
||||||
) {
|
return Err(ShellError::UnsupportedConfigValue(
|
||||||
Ok(pipeline_data) => {
|
"boolean output".to_string(),
|
||||||
if let PipelineData::Value(Value::Bool { val, .. }, ..) =
|
"other PipelineData variant".to_string(),
|
||||||
pipeline_data
|
other_span,
|
||||||
{
|
));
|
||||||
val
|
|
||||||
} else {
|
|
||||||
return Err(ShellError::UnsupportedConfigValue(
|
|
||||||
"boolean output".to_string(),
|
|
||||||
"other PipelineData variant".to_string(),
|
|
||||||
block_span,
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
return Err(err);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
Err(err) => {
|
||||||
other => {
|
return Err(err);
|
||||||
return Err(ShellError::UnsupportedConfigValue(
|
}
|
||||||
"block".to_string(),
|
|
||||||
format!("{}", other.get_type()),
|
|
||||||
other.span()?,
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
other => {
|
||||||
// always run the hook
|
return Err(ShellError::UnsupportedConfigValue(
|
||||||
true
|
"block".to_string(),
|
||||||
};
|
format!("{}", other.get_type()),
|
||||||
|
other_span,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// always run the hook
|
||||||
|
true
|
||||||
|
};
|
||||||
|
|
||||||
if do_run_hook {
|
if do_run_hook {
|
||||||
match value.clone().follow_cell_path(&[code_path], false)? {
|
let follow = value.clone().follow_cell_path(&[code_path], false)?;
|
||||||
Value::String {
|
let source_span = follow.span();
|
||||||
val,
|
match follow {
|
||||||
span: source_span,
|
Value::String { val, .. } => {
|
||||||
} => {
|
|
||||||
let (block, delta, vars) = {
|
let (block, delta, vars) = {
|
||||||
let mut working_set = StateWorkingSet::new(engine_state);
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
@ -159,16 +220,19 @@ pub fn eval_hook(
|
|||||||
for (name, val) in arguments {
|
for (name, val) in arguments {
|
||||||
let var_id = working_set.add_variable(
|
let var_id = working_set.add_variable(
|
||||||
name.as_bytes().to_vec(),
|
name.as_bytes().to_vec(),
|
||||||
val.span()?,
|
val.span(),
|
||||||
Type::Any,
|
Type::Any,
|
||||||
false,
|
false,
|
||||||
);
|
);
|
||||||
|
|
||||||
vars.push((var_id, val));
|
vars.push((var_id, val));
|
||||||
}
|
}
|
||||||
|
|
||||||
let output =
|
let output = parse(
|
||||||
parse(&mut working_set, Some("hook"), val.as_bytes(), false);
|
&mut working_set,
|
||||||
|
Some(&format!("{hook_name} hook")),
|
||||||
|
val.as_bytes(),
|
||||||
|
false,
|
||||||
|
);
|
||||||
if let Some(err) = working_set.parse_errors.first() {
|
if let Some(err) = working_set.parse_errors.first() {
|
||||||
report_error(&working_set, err);
|
report_error(&working_set, err);
|
||||||
|
|
||||||
@ -206,77 +270,47 @@ pub fn eval_hook(
|
|||||||
stack.remove_var(*var_id);
|
stack.remove_var(*var_id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Value::Block {
|
Value::Block { val: block_id, .. } => {
|
||||||
val: block_id,
|
|
||||||
span: block_span,
|
|
||||||
..
|
|
||||||
} => {
|
|
||||||
run_hook_block(
|
run_hook_block(
|
||||||
engine_state,
|
engine_state,
|
||||||
stack,
|
stack,
|
||||||
block_id,
|
block_id,
|
||||||
input,
|
input,
|
||||||
arguments,
|
arguments,
|
||||||
block_span,
|
source_span,
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
Value::Closure {
|
Value::Closure { val: block_id, .. } => {
|
||||||
val: block_id,
|
|
||||||
span: block_span,
|
|
||||||
..
|
|
||||||
} => {
|
|
||||||
run_hook_block(
|
run_hook_block(
|
||||||
engine_state,
|
engine_state,
|
||||||
stack,
|
stack,
|
||||||
block_id,
|
block_id,
|
||||||
input,
|
input,
|
||||||
arguments,
|
arguments,
|
||||||
block_span,
|
source_span,
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
other => {
|
other => {
|
||||||
return Err(ShellError::UnsupportedConfigValue(
|
return Err(ShellError::UnsupportedConfigValue(
|
||||||
"block or string".to_string(),
|
"block or string".to_string(),
|
||||||
format!("{}", other.get_type()),
|
format!("{}", other.get_type()),
|
||||||
other.span()?,
|
source_span,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Value::Block {
|
Value::Block { val: block_id, .. } => {
|
||||||
val: block_id,
|
output = run_hook_block(engine_state, stack, *block_id, input, arguments, span)?;
|
||||||
span: block_span,
|
|
||||||
..
|
|
||||||
} => {
|
|
||||||
output = run_hook_block(
|
|
||||||
engine_state,
|
|
||||||
stack,
|
|
||||||
*block_id,
|
|
||||||
input,
|
|
||||||
arguments,
|
|
||||||
*block_span,
|
|
||||||
)?;
|
|
||||||
}
|
}
|
||||||
Value::Closure {
|
Value::Closure { val: block_id, .. } => {
|
||||||
val: block_id,
|
output = run_hook_block(engine_state, stack, *block_id, input, arguments, span)?;
|
||||||
span: block_span,
|
|
||||||
..
|
|
||||||
} => {
|
|
||||||
output = run_hook_block(
|
|
||||||
engine_state,
|
|
||||||
stack,
|
|
||||||
*block_id,
|
|
||||||
input,
|
|
||||||
arguments,
|
|
||||||
*block_span,
|
|
||||||
)?;
|
|
||||||
}
|
}
|
||||||
other => {
|
other => {
|
||||||
return Err(ShellError::UnsupportedConfigValue(
|
return Err(ShellError::UnsupportedConfigValue(
|
||||||
"block, record, or list of records".into(),
|
"string, block, record, or list of commands".into(),
|
||||||
format!("{}", other.get_type()),
|
format!("{}", other.get_type()),
|
||||||
other.span()?,
|
other.span(),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -299,7 +333,7 @@ fn run_hook_block(
|
|||||||
|
|
||||||
let input = optional_input.unwrap_or_else(PipelineData::empty);
|
let input = optional_input.unwrap_or_else(PipelineData::empty);
|
||||||
|
|
||||||
let mut callee_stack = stack.gather_captures(&block.captures);
|
let mut callee_stack = stack.gather_captures(engine_state, &block.captures);
|
||||||
|
|
||||||
for (idx, PositionalArg { var_id, .. }) in
|
for (idx, PositionalArg { var_id, .. }) in
|
||||||
block.signature.required_positional.iter().enumerate()
|
block.signature.required_positional.iter().enumerate()
|
||||||
@ -319,7 +353,7 @@ fn run_hook_block(
|
|||||||
let pipeline_data =
|
let pipeline_data =
|
||||||
eval_block_with_early_return(engine_state, &mut callee_stack, block, input, false, false)?;
|
eval_block_with_early_return(engine_state, &mut callee_stack, block, input, false, false)?;
|
||||||
|
|
||||||
if let PipelineData::Value(Value::Error { error }, _) = pipeline_data {
|
if let PipelineData::Value(Value::Error { error, .. }, _) = pipeline_data {
|
||||||
return Err(*error);
|
return Err(*error);
|
||||||
}
|
}
|
||||||
|
|
@ -76,9 +76,7 @@ where
|
|||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
if let Err(error) = r {
|
if let Err(error) = r {
|
||||||
return Value::Error {
|
return Value::error(error, span);
|
||||||
error: Box::new(error),
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
v
|
v
|
4
crates/nu-cmd-base/src/lib.rs
Normal file
4
crates/nu-cmd-base/src/lib.rs
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
pub mod formats;
|
||||||
|
pub mod hook;
|
||||||
|
pub mod input_handler;
|
||||||
|
pub mod util;
|
65
crates/nu-cmd-dataframe/Cargo.toml
Normal file
65
crates/nu-cmd-dataframe/Cargo.toml
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
[package]
|
||||||
|
authors = ["The Nushell Project Developers"]
|
||||||
|
description = "Nushell's dataframe commands based on polars."
|
||||||
|
edition = "2021"
|
||||||
|
license = "MIT"
|
||||||
|
name = "nu-cmd-dataframe"
|
||||||
|
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-dataframe"
|
||||||
|
version = "0.85.0"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
bench = false
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
nu-engine = { path = "../nu-engine", version = "0.85.0" }
|
||||||
|
nu-parser = { path = "../nu-parser", version = "0.85.0" }
|
||||||
|
nu-protocol = { path = "../nu-protocol", version = "0.85.0" }
|
||||||
|
|
||||||
|
# Potential dependencies for extras
|
||||||
|
chrono = { version = "0.4", features = ["std", "unstable-locales"], default-features = false }
|
||||||
|
fancy-regex = "0.11"
|
||||||
|
indexmap = { version = "2.0" }
|
||||||
|
num = { version = "0.4", optional = true }
|
||||||
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
sqlparser = { version = "0.36.1", optional = true }
|
||||||
|
polars-io = { version = "0.32", features = ["avro"], optional = true }
|
||||||
|
|
||||||
|
[dependencies.polars]
|
||||||
|
features = [
|
||||||
|
"arg_where",
|
||||||
|
"checked_arithmetic",
|
||||||
|
"concat_str",
|
||||||
|
"cross_join",
|
||||||
|
"csv",
|
||||||
|
"cum_agg",
|
||||||
|
"default",
|
||||||
|
"dtype-categorical",
|
||||||
|
"dtype-datetime",
|
||||||
|
"dtype-struct",
|
||||||
|
"dynamic_groupby",
|
||||||
|
"ipc",
|
||||||
|
"is_in",
|
||||||
|
"json",
|
||||||
|
"lazy",
|
||||||
|
"object",
|
||||||
|
"parquet",
|
||||||
|
"random",
|
||||||
|
"rolling_window",
|
||||||
|
"rows",
|
||||||
|
"serde",
|
||||||
|
"serde-lazy",
|
||||||
|
"strings",
|
||||||
|
"to_dummies",
|
||||||
|
]
|
||||||
|
optional = true
|
||||||
|
version = "0.32"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
dataframe = ["num", "polars", "polars-io", "sqlparser"]
|
||||||
|
default = []
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.85.0" }
|
||||||
|
nu-test-support = { path = "../nu-test-support", version = "0.85.0" }
|
21
crates/nu-cmd-dataframe/LICENSE
Normal file
21
crates/nu-cmd-dataframe/LICENSE
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2019 - 2023 The Nushell Project Developers
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
@ -23,8 +23,10 @@ impl Command for AppendDF {
|
|||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.required("other", SyntaxShape::Any, "dataframe to be appended")
|
.required("other", SyntaxShape::Any, "dataframe to be appended")
|
||||||
.switch("col", "appends in col orientation", Some('c'))
|
.switch("col", "appends in col orientation", Some('c'))
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
@ -19,8 +19,7 @@ impl Command for ColumnsDF {
|
|||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(Type::Custom("dataframe".into()), Type::Any)
|
||||||
.output_type(Type::Any)
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -28,10 +27,10 @@ impl Command for ColumnsDF {
|
|||||||
vec![Example {
|
vec![Example {
|
||||||
description: "Dataframe columns",
|
description: "Dataframe columns",
|
||||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr columns",
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr columns",
|
||||||
result: Some(Value::List {
|
result: Some(Value::list(
|
||||||
vals: vec![Value::test_string("a"), Value::test_string("b")],
|
vec![Value::test_string("a"), Value::test_string("b")],
|
||||||
span: Span::test_data(),
|
Span::test_data(),
|
||||||
}),
|
)),
|
||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -61,10 +60,7 @@ fn command(
|
|||||||
.map(|v| Value::string(*v, call.head))
|
.map(|v| Value::string(*v, call.head))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let names = Value::List {
|
let names = Value::list(names, call.head);
|
||||||
vals: names,
|
|
||||||
span: call.head,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(PipelineData::Value(names, None))
|
Ok(PipelineData::Value(names, None))
|
||||||
}
|
}
|
@ -23,8 +23,10 @@ impl Command for DropDF {
|
|||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.rest("rest", SyntaxShape::Any, "column names to be dropped")
|
.rest("rest", SyntaxShape::Any, "column names to be dropped")
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
@ -25,7 +25,7 @@ impl Command for DropDuplicates {
|
|||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.optional(
|
.optional(
|
||||||
"subset",
|
"subset",
|
||||||
SyntaxShape::Table,
|
SyntaxShape::Table(vec![]),
|
||||||
"subset of columns to drop duplicates",
|
"subset of columns to drop duplicates",
|
||||||
)
|
)
|
||||||
.switch("maintain", "maintain order", Some('m'))
|
.switch("maintain", "maintain order", Some('m'))
|
||||||
@ -34,8 +34,10 @@ impl Command for DropDuplicates {
|
|||||||
"keeps last duplicate value (by default keeps first)",
|
"keeps last duplicate value (by default keeps first)",
|
||||||
Some('l'),
|
Some('l'),
|
||||||
)
|
)
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
@ -24,11 +24,13 @@ impl Command for DropNulls {
|
|||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.optional(
|
.optional(
|
||||||
"subset",
|
"subset",
|
||||||
SyntaxShape::Table,
|
SyntaxShape::Table(vec![]),
|
||||||
"subset of columns to drop nulls",
|
"subset of columns to drop nulls",
|
||||||
)
|
)
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
@ -19,8 +19,10 @@ impl Command for DataTypes {
|
|||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -77,10 +79,7 @@ fn command(
|
|||||||
.dtype();
|
.dtype();
|
||||||
|
|
||||||
let dtype_str = dtype.to_string();
|
let dtype_str = dtype.to_string();
|
||||||
dtypes.push(Value::String {
|
dtypes.push(Value::string(dtype_str, call.head));
|
||||||
val: dtype_str,
|
|
||||||
span: call.head,
|
|
||||||
});
|
|
||||||
|
|
||||||
Value::string(*v, call.head)
|
Value::string(*v, call.head)
|
||||||
})
|
})
|
@ -20,8 +20,11 @@ impl Command for Dummies {
|
|||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.switch("drop-first", "Drop first row", Some('d'))
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -113,10 +116,11 @@ fn command(
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let drop_first: bool = call.has_flag("drop-first");
|
||||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||||
|
|
||||||
df.as_ref()
|
df.as_ref()
|
||||||
.to_dummies(None)
|
.to_dummies(None, drop_first)
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
ShellError::GenericError(
|
ShellError::GenericError(
|
||||||
"Error calculating dummies".into(),
|
"Error calculating dummies".into(),
|
@ -29,8 +29,10 @@ impl Command for FilterWith {
|
|||||||
SyntaxShape::Any,
|
SyntaxShape::Any,
|
||||||
"boolean mask used to filter data",
|
"boolean mask used to filter data",
|
||||||
)
|
)
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe or lazyframe".into()))
|
.category(Category::Custom("dataframe or lazyframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -90,7 +92,7 @@ fn command_eager(
|
|||||||
df: NuDataFrame,
|
df: NuDataFrame,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let mask_value: Value = call.req(engine_state, stack, 0)?;
|
let mask_value: Value = call.req(engine_state, stack, 0)?;
|
||||||
let mask_span = mask_value.span()?;
|
let mask_span = mask_value.span();
|
||||||
|
|
||||||
if NuExpression::can_downcast(&mask_value) {
|
if NuExpression::can_downcast(&mask_value) {
|
||||||
let expression = NuExpression::try_from_value(mask_value)?;
|
let expression = NuExpression::try_from_value(mask_value)?;
|
@ -1,4 +1,4 @@
|
|||||||
use super::super::values::{Column, NuDataFrame};
|
use super::super::values::{Column, NuDataFrame, NuExpression};
|
||||||
use nu_engine::CallExt;
|
use nu_engine::CallExt;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::Call,
|
ast::Call,
|
||||||
@ -15,7 +15,7 @@ impl Command for FirstDF {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Show only the first number of rows."
|
"Show only the first number of rows or create a first expression"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
@ -25,8 +25,16 @@ impl Command for FirstDF {
|
|||||||
SyntaxShape::Int,
|
SyntaxShape::Int,
|
||||||
"starting from the front, the number of rows to return",
|
"starting from the front, the number of rows to return",
|
||||||
)
|
)
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_types(vec![
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
(
|
||||||
|
Type::Custom("expression".into()),
|
||||||
|
Type::Custom("expression".into()),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
),
|
||||||
|
])
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -62,6 +70,11 @@ impl Command for FirstDF {
|
|||||||
.into_value(Span::test_data()),
|
.into_value(Span::test_data()),
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
Example {
|
||||||
|
description: "Creates a first expression from a column",
|
||||||
|
example: "dfr col a | dfr first",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -72,8 +85,19 @@ impl Command for FirstDF {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
let value = input.into_value(call.head);
|
||||||
command(engine_state, stack, call, df)
|
if NuDataFrame::can_downcast(&value) {
|
||||||
|
let df = NuDataFrame::try_from_value(value)?;
|
||||||
|
command(engine_state, stack, call, df)
|
||||||
|
} else {
|
||||||
|
let expr = NuExpression::try_from_value(value)?;
|
||||||
|
let expr: NuExpression = expr.into_polars().first().into();
|
||||||
|
|
||||||
|
Ok(PipelineData::Value(
|
||||||
|
NuExpression::into_value(expr, call.head),
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -95,11 +119,25 @@ fn command(
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use super::super::super::test_dataframe::test_dataframe;
|
use super::super::super::test_dataframe::{build_test_engine_state, test_dataframe_example};
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use crate::dataframe::lazy::aggregate::LazyAggregate;
|
||||||
|
use crate::dataframe::lazy::groupby::ToLazyGroupBy;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_examples() {
|
fn test_examples_dataframe() {
|
||||||
test_dataframe(vec![Box::new(FirstDF {})])
|
let mut engine_state = build_test_engine_state(vec![Box::new(FirstDF {})]);
|
||||||
|
test_dataframe_example(&mut engine_state, &FirstDF.examples()[0]);
|
||||||
|
test_dataframe_example(&mut engine_state, &FirstDF.examples()[1]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples_expression() {
|
||||||
|
let mut engine_state = build_test_engine_state(vec![
|
||||||
|
Box::new(FirstDF {}),
|
||||||
|
Box::new(LazyAggregate {}),
|
||||||
|
Box::new(ToLazyGroupBy {}),
|
||||||
|
]);
|
||||||
|
test_dataframe_example(&mut engine_state, &FirstDF.examples()[2]);
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -24,8 +24,10 @@ impl Command for GetDF {
|
|||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.rest("rest", SyntaxShape::Any, "column names to sort dataframe")
|
.rest("rest", SyntaxShape::Any, "column names to sort dataframe")
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
120
crates/nu-cmd-dataframe/src/dataframe/eager/last.rs
Normal file
120
crates/nu-cmd-dataframe/src/dataframe/eager/last.rs
Normal file
@ -0,0 +1,120 @@
|
|||||||
|
use super::super::values::{utils::DEFAULT_ROWS, Column, NuDataFrame, NuExpression};
|
||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct LastDF;
|
||||||
|
|
||||||
|
impl Command for LastDF {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"dfr last"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Creates new dataframe with tail rows or creates a last expression."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.optional("rows", SyntaxShape::Int, "Number of rows for tail")
|
||||||
|
.input_output_types(vec![
|
||||||
|
(
|
||||||
|
Type::Custom("expression".into()),
|
||||||
|
Type::Custom("expression".into()),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
),
|
||||||
|
])
|
||||||
|
.category(Category::Custom("dataframe".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
description: "Create new dataframe with last rows",
|
||||||
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr last 1",
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new("a".to_string(), vec![Value::test_int(3)]),
|
||||||
|
Column::new("b".to_string(), vec![Value::test_int(4)]),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Creates a last expression from a column",
|
||||||
|
example: "dfr col a | dfr last",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let value = input.into_value(call.head);
|
||||||
|
if NuDataFrame::can_downcast(&value) {
|
||||||
|
let df = NuDataFrame::try_from_value(value)?;
|
||||||
|
command(engine_state, stack, call, df)
|
||||||
|
} else {
|
||||||
|
let expr = NuExpression::try_from_value(value)?;
|
||||||
|
let expr: NuExpression = expr.into_polars().last().into();
|
||||||
|
|
||||||
|
Ok(PipelineData::Value(
|
||||||
|
NuExpression::into_value(expr, call.head),
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn command(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
df: NuDataFrame,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let rows: Option<usize> = call.opt(engine_state, stack, 0)?;
|
||||||
|
let rows = rows.unwrap_or(DEFAULT_ROWS);
|
||||||
|
|
||||||
|
let res = df.as_ref().tail(Some(rows));
|
||||||
|
Ok(PipelineData::Value(
|
||||||
|
NuDataFrame::dataframe_into_value(res, call.head),
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::super::super::test_dataframe::{build_test_engine_state, test_dataframe_example};
|
||||||
|
use super::*;
|
||||||
|
use crate::dataframe::lazy::aggregate::LazyAggregate;
|
||||||
|
use crate::dataframe::lazy::groupby::ToLazyGroupBy;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples_dataframe() {
|
||||||
|
let mut engine_state = build_test_engine_state(vec![Box::new(LastDF {})]);
|
||||||
|
test_dataframe_example(&mut engine_state, &LastDF.examples()[0]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples_expression() {
|
||||||
|
let mut engine_state = build_test_engine_state(vec![
|
||||||
|
Box::new(LastDF {}),
|
||||||
|
Box::new(LazyAggregate {}),
|
||||||
|
Box::new(ToLazyGroupBy {}),
|
||||||
|
]);
|
||||||
|
test_dataframe_example(&mut engine_state, &LastDF.examples()[1]);
|
||||||
|
}
|
||||||
|
}
|
@ -1,7 +1,7 @@
|
|||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::Call,
|
ast::Call,
|
||||||
engine::{Command, EngineState, Stack},
|
engine::{Command, EngineState, Stack},
|
||||||
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Value,
|
record, Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Value,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::dataframe::values::NuDataFrame;
|
use crate::dataframe::values::NuDataFrame;
|
||||||
@ -55,34 +55,18 @@ impl Command for ListDF {
|
|||||||
NuDataFrame::try_from_value(value).ok().map(|df| (name, df))
|
NuDataFrame::try_from_value(value).ok().map(|df| (name, df))
|
||||||
})
|
})
|
||||||
.map(|(name, df)| {
|
.map(|(name, df)| {
|
||||||
let name = Value::String {
|
Value::record(
|
||||||
val: name,
|
record! {
|
||||||
span: call.head,
|
"name" => Value::string(name, call.head),
|
||||||
};
|
"columns" => Value::int(df.as_ref().width() as i64, call.head),
|
||||||
|
"rows" => Value::int(df.as_ref().height() as i64, call.head),
|
||||||
let columns = Value::int(df.as_ref().width() as i64, call.head);
|
},
|
||||||
|
call.head,
|
||||||
let rows = Value::int(df.as_ref().height() as i64, call.head);
|
)
|
||||||
|
|
||||||
let cols = vec![
|
|
||||||
"name".to_string(),
|
|
||||||
"columns".to_string(),
|
|
||||||
"rows".to_string(),
|
|
||||||
];
|
|
||||||
let vals = vec![name, columns, rows];
|
|
||||||
|
|
||||||
Value::Record {
|
|
||||||
cols,
|
|
||||||
vals,
|
|
||||||
span: call.head,
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
.collect::<Vec<Value>>();
|
.collect::<Vec<Value>>();
|
||||||
|
|
||||||
let list = Value::List {
|
let list = Value::list(vals, call.head);
|
||||||
vals,
|
|
||||||
span: call.head,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(list.into_pipeline_data())
|
Ok(list.into_pipeline_data())
|
||||||
}
|
}
|
@ -26,13 +26,13 @@ impl Command for MeltDF {
|
|||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.required_named(
|
.required_named(
|
||||||
"columns",
|
"columns",
|
||||||
SyntaxShape::Table,
|
SyntaxShape::Table(vec![]),
|
||||||
"column names for melting",
|
"column names for melting",
|
||||||
Some('c'),
|
Some('c'),
|
||||||
)
|
)
|
||||||
.required_named(
|
.required_named(
|
||||||
"values",
|
"values",
|
||||||
SyntaxShape::Table,
|
SyntaxShape::Table(vec![]),
|
||||||
"column names used as value columns",
|
"column names used as value columns",
|
||||||
Some('v'),
|
Some('v'),
|
||||||
)
|
)
|
||||||
@ -48,8 +48,10 @@ impl Command for MeltDF {
|
|||||||
"optional name for value column",
|
"optional name for value column",
|
||||||
Some('l'),
|
Some('l'),
|
||||||
)
|
)
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
@ -22,8 +22,10 @@ mod sql_expr;
|
|||||||
mod summary;
|
mod summary;
|
||||||
mod take;
|
mod take;
|
||||||
mod to_arrow;
|
mod to_arrow;
|
||||||
|
mod to_avro;
|
||||||
mod to_csv;
|
mod to_csv;
|
||||||
mod to_df;
|
mod to_df;
|
||||||
|
mod to_json_lines;
|
||||||
mod to_nu;
|
mod to_nu;
|
||||||
mod to_parquet;
|
mod to_parquet;
|
||||||
mod with_column;
|
mod with_column;
|
||||||
@ -54,8 +56,10 @@ pub use sql_expr::parse_sql_expr;
|
|||||||
pub use summary::Summary;
|
pub use summary::Summary;
|
||||||
pub use take::TakeDF;
|
pub use take::TakeDF;
|
||||||
pub use to_arrow::ToArrow;
|
pub use to_arrow::ToArrow;
|
||||||
|
pub use to_avro::ToAvro;
|
||||||
pub use to_csv::ToCSV;
|
pub use to_csv::ToCSV;
|
||||||
pub use to_df::ToDataFrame;
|
pub use to_df::ToDataFrame;
|
||||||
|
pub use to_json_lines::ToJsonLines;
|
||||||
pub use to_nu::ToNu;
|
pub use to_nu::ToNu;
|
||||||
pub use to_parquet::ToParquet;
|
pub use to_parquet::ToParquet;
|
||||||
pub use with_column::WithColumn;
|
pub use with_column::WithColumn;
|
||||||
@ -94,10 +98,12 @@ pub fn add_eager_decls(working_set: &mut StateWorkingSet) {
|
|||||||
SliceDF,
|
SliceDF,
|
||||||
TakeDF,
|
TakeDF,
|
||||||
ToArrow,
|
ToArrow,
|
||||||
|
ToAvro,
|
||||||
ToCSV,
|
ToCSV,
|
||||||
ToDataFrame,
|
ToDataFrame,
|
||||||
ToNu,
|
ToNu,
|
||||||
ToParquet,
|
ToParquet,
|
||||||
|
ToJsonLines,
|
||||||
WithColumn
|
WithColumn
|
||||||
);
|
);
|
||||||
}
|
}
|
@ -9,10 +9,12 @@ use nu_protocol::{
|
|||||||
use std::{fs::File, io::BufReader, path::PathBuf};
|
use std::{fs::File, io::BufReader, path::PathBuf};
|
||||||
|
|
||||||
use polars::prelude::{
|
use polars::prelude::{
|
||||||
CsvEncoding, CsvReader, IpcReader, JsonReader, LazyCsvReader, LazyFileListReader, LazyFrame,
|
CsvEncoding, CsvReader, IpcReader, JsonFormat, JsonReader, LazyCsvReader, LazyFileListReader,
|
||||||
ParallelStrategy, ParquetReader, ScanArgsIpc, ScanArgsParquet, SerReader,
|
LazyFrame, ParallelStrategy, ParquetReader, ScanArgsIpc, ScanArgsParquet, SerReader,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use polars_io::avro::AvroReader;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct OpenDataFrame;
|
pub struct OpenDataFrame;
|
||||||
|
|
||||||
@ -22,7 +24,7 @@ impl Command for OpenDataFrame {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Opens CSV, JSON, arrow, or parquet file to create dataframe."
|
"Opens CSV, JSON, JSON lines, arrow, avro, or parquet file to create dataframe."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
@ -36,7 +38,7 @@ impl Command for OpenDataFrame {
|
|||||||
.named(
|
.named(
|
||||||
"type",
|
"type",
|
||||||
SyntaxShape::String,
|
SyntaxShape::String,
|
||||||
"File type: csv, tsv, json, parquet, arrow. If omitted, derive from file extension",
|
"File type: csv, tsv, json, parquet, arrow, avro. If omitted, derive from file extension",
|
||||||
Some('t'),
|
Some('t'),
|
||||||
)
|
)
|
||||||
.named(
|
.named(
|
||||||
@ -68,8 +70,7 @@ impl Command for OpenDataFrame {
|
|||||||
"Columns to be selected from csv file. CSV and Parquet file",
|
"Columns to be selected from csv file. CSV and Parquet file",
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
.input_type(Type::Any)
|
.input_output_type(Type::Any, Type::Custom("dataframe".into()))
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -115,9 +116,11 @@ fn command(
|
|||||||
match type_id {
|
match type_id {
|
||||||
Some((e, msg, blamed)) => match e.as_str() {
|
Some((e, msg, blamed)) => match e.as_str() {
|
||||||
"csv" | "tsv" => from_csv(engine_state, stack, call),
|
"csv" | "tsv" => from_csv(engine_state, stack, call),
|
||||||
"parquet" => from_parquet(engine_state, stack, call),
|
"parquet" | "parq" => from_parquet(engine_state, stack, call),
|
||||||
"ipc" | "arrow" => from_ipc(engine_state, stack, call),
|
"ipc" | "arrow" => from_ipc(engine_state, stack, call),
|
||||||
"json" => from_json(engine_state, stack, call),
|
"json" => from_json(engine_state, stack, call),
|
||||||
|
"jsonl" => from_jsonl(engine_state, stack, call),
|
||||||
|
"avro" => from_avro(engine_state, stack, call),
|
||||||
_ => Err(ShellError::FileNotFoundCustom(
|
_ => Err(ShellError::FileNotFoundCustom(
|
||||||
format!("{msg}. Supported values: csv, tsv, parquet, ipc, arrow, json"),
|
format!("{msg}. Supported values: csv, tsv, parquet, ipc, arrow, json"),
|
||||||
blamed,
|
blamed,
|
||||||
@ -199,6 +202,46 @@ fn from_parquet(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn from_avro(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
) -> Result<Value, ShellError> {
|
||||||
|
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
||||||
|
let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?;
|
||||||
|
|
||||||
|
let r = File::open(&file.item).map_err(|e| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Error opening file".into(),
|
||||||
|
e.to_string(),
|
||||||
|
Some(file.span),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
let reader = AvroReader::new(r);
|
||||||
|
|
||||||
|
let reader = match columns {
|
||||||
|
None => reader,
|
||||||
|
Some(columns) => reader.with_columns(Some(columns)),
|
||||||
|
};
|
||||||
|
|
||||||
|
let df: NuDataFrame = reader
|
||||||
|
.finish()
|
||||||
|
.map_err(|e| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Avro reader error".into(),
|
||||||
|
format!("{e:?}"),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})?
|
||||||
|
.into();
|
||||||
|
|
||||||
|
Ok(df.into_value(call.head))
|
||||||
|
}
|
||||||
|
|
||||||
fn from_ipc(
|
fn from_ipc(
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
@ -299,6 +342,44 @@ fn from_json(
|
|||||||
Ok(df.into_value(call.head))
|
Ok(df.into_value(call.head))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn from_jsonl(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
) -> Result<Value, ShellError> {
|
||||||
|
let infer_schema: Option<usize> = call.get_flag(engine_state, stack, "infer-schema")?;
|
||||||
|
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
||||||
|
let file = File::open(&file.item).map_err(|e| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Error opening file".into(),
|
||||||
|
e.to_string(),
|
||||||
|
Some(file.span),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let buf_reader = BufReader::new(file);
|
||||||
|
let reader = JsonReader::new(buf_reader)
|
||||||
|
.with_json_format(JsonFormat::JsonLines)
|
||||||
|
.infer_schema_len(infer_schema);
|
||||||
|
|
||||||
|
let df: NuDataFrame = reader
|
||||||
|
.finish()
|
||||||
|
.map_err(|e| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Json lines reader error".into(),
|
||||||
|
format!("{e:?}"),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})?
|
||||||
|
.into();
|
||||||
|
|
||||||
|
Ok(df.into_value(call.head))
|
||||||
|
}
|
||||||
|
|
||||||
fn from_csv(
|
fn from_csv(
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
@ -28,8 +28,10 @@ impl Command for QueryDf {
|
|||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.required("sql", SyntaxShape::String, "sql query")
|
.required("sql", SyntaxShape::String, "sql query")
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -86,10 +88,7 @@ fn command(
|
|||||||
let lazy = NuLazyFrame::new(false, df_sql);
|
let lazy = NuLazyFrame::new(false, df_sql);
|
||||||
|
|
||||||
let eager = lazy.collect(call.head)?;
|
let eager = lazy.collect(call.head)?;
|
||||||
let value = Value::CustomValue {
|
let value = Value::custom_value(Box::new(eager), call.head);
|
||||||
val: Box::new(eager),
|
|
||||||
span: call.head,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(PipelineData::Value(value, None))
|
Ok(PipelineData::Value(value, None))
|
||||||
}
|
}
|
@ -33,8 +33,10 @@ impl Command for RenameDF {
|
|||||||
SyntaxShape::Any,
|
SyntaxShape::Any,
|
||||||
"New names for the selected column(s). A string or list of strings",
|
"New names for the selected column(s). A string or list of strings",
|
||||||
)
|
)
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe or lazyframe".into()))
|
.category(Category::Custom("dataframe or lazyframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -158,7 +160,7 @@ fn command_lazy(
|
|||||||
let value: Value = call.req(engine_state, stack, 1)?;
|
let value: Value = call.req(engine_state, stack, 1)?;
|
||||||
return Err(ShellError::IncompatibleParametersSingle {
|
return Err(ShellError::IncompatibleParametersSingle {
|
||||||
msg: "New name list has different size to column list".into(),
|
msg: "New name list has different size to column list".into(),
|
||||||
span: value.span()?,
|
span: value.span(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
@ -41,8 +41,10 @@ impl Command for SampleDF {
|
|||||||
)
|
)
|
||||||
.switch("replace", "sample with replace", Some('e'))
|
.switch("replace", "sample with replace", Some('e'))
|
||||||
.switch("shuffle", "shuffle sample", Some('u'))
|
.switch("shuffle", "shuffle sample", Some('u'))
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
@ -22,8 +22,10 @@ impl Command for ShapeDF {
|
|||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
@ -25,8 +25,10 @@ impl Command for SliceDF {
|
|||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.required("offset", SyntaxShape::Int, "start of slice")
|
.required("offset", SyntaxShape::Int, "start of slice")
|
||||||
.required("size", SyntaxShape::Int, "size of slice")
|
.required("size", SyntaxShape::Int, "size of slice")
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
@ -18,7 +18,7 @@ impl SQLContext {
|
|||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
table_map: HashMap::new(),
|
table_map: HashMap::new(),
|
||||||
dialect: GenericDialect::default(),
|
dialect: GenericDialect,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -150,7 +150,7 @@ impl SQLContext {
|
|||||||
let agg_df = df.groupby(group_by).agg(agg_projection);
|
let agg_df = df.groupby(group_by).agg(agg_projection);
|
||||||
let mut final_proj_pos = groupby_pos
|
let mut final_proj_pos = groupby_pos
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(agg_proj_pos.into_iter())
|
.chain(agg_proj_pos)
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
final_proj_pos.sort_by(|(proj_pa, _), (proj_pb, _)| proj_pa.cmp(proj_pb));
|
final_proj_pos.sort_by(|(proj_pa, _), (proj_pb, _)| proj_pa.cmp(proj_pb));
|
@ -3,7 +3,7 @@ use polars::prelude::{col, lit, DataType, Expr, LiteralValue, PolarsResult as Re
|
|||||||
|
|
||||||
use sqlparser::ast::{
|
use sqlparser::ast::{
|
||||||
BinaryOperator as SQLBinaryOperator, DataType as SQLDataType, Expr as SqlExpr,
|
BinaryOperator as SQLBinaryOperator, DataType as SQLDataType, Expr as SqlExpr,
|
||||||
Function as SQLFunction, Value as SqlValue, WindowSpec,
|
Function as SQLFunction, Value as SqlValue, WindowType,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn map_sql_polars_datatype(data_type: &SQLDataType) -> Result<DataType> {
|
fn map_sql_polars_datatype(data_type: &SQLDataType) -> Result<DataType> {
|
||||||
@ -29,8 +29,8 @@ fn map_sql_polars_datatype(data_type: &SQLDataType) -> Result<DataType> {
|
|||||||
SQLDataType::Boolean => DataType::Boolean,
|
SQLDataType::Boolean => DataType::Boolean,
|
||||||
SQLDataType::Date => DataType::Date,
|
SQLDataType::Date => DataType::Date,
|
||||||
SQLDataType::Time(_, _) => DataType::Time,
|
SQLDataType::Time(_, _) => DataType::Time,
|
||||||
SQLDataType::Timestamp(_, _) => DataType::Datetime(TimeUnit::Milliseconds, None),
|
SQLDataType::Timestamp(_, _) => DataType::Datetime(TimeUnit::Microseconds, None),
|
||||||
SQLDataType::Interval => DataType::Duration(TimeUnit::Milliseconds),
|
SQLDataType::Interval => DataType::Duration(TimeUnit::Microseconds),
|
||||||
SQLDataType::Array(inner_type) => match inner_type {
|
SQLDataType::Array(inner_type) => match inner_type {
|
||||||
Some(inner_type) => DataType::List(Box::new(map_sql_polars_datatype(inner_type)?)),
|
Some(inner_type) => DataType::List(Box::new(map_sql_polars_datatype(inner_type)?)),
|
||||||
None => {
|
None => {
|
||||||
@ -125,18 +125,26 @@ pub fn parse_sql_expr(expr: &SqlExpr) -> Result<Expr> {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn apply_window_spec(expr: Expr, window_spec: &Option<WindowSpec>) -> Result<Expr> {
|
fn apply_window_spec(expr: Expr, window_type: Option<&WindowType>) -> Result<Expr> {
|
||||||
Ok(match &window_spec {
|
Ok(match &window_type {
|
||||||
Some(window_spec) => {
|
Some(wtype) => match wtype {
|
||||||
// Process for simple window specification, partition by first
|
WindowType::WindowSpec(window_spec) => {
|
||||||
let partition_by = window_spec
|
// Process for simple window specification, partition by first
|
||||||
.partition_by
|
let partition_by = window_spec
|
||||||
.iter()
|
.partition_by
|
||||||
.map(parse_sql_expr)
|
.iter()
|
||||||
.collect::<Result<Vec<_>>>()?;
|
.map(parse_sql_expr)
|
||||||
expr.over(partition_by)
|
.collect::<Result<Vec<_>>>()?;
|
||||||
// Order by and Row range may not be supported at the moment
|
expr.over(partition_by)
|
||||||
}
|
// Order by and Row range may not be supported at the moment
|
||||||
|
}
|
||||||
|
// TODO: make NamedWindow work
|
||||||
|
WindowType::NamedWindow(_named) => {
|
||||||
|
return Err(PolarsError::ComputeError(
|
||||||
|
format!("Expression: {expr:?} was not supported in polars-sql yet!").into(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
},
|
||||||
None => expr,
|
None => expr,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -160,13 +168,13 @@ fn parse_sql_function(sql_function: &SQLFunction) -> Result<Expr> {
|
|||||||
sql_function.distinct,
|
sql_function.distinct,
|
||||||
) {
|
) {
|
||||||
("sum", [FunctionArgExpr::Expr(expr)], false) => {
|
("sum", [FunctionArgExpr::Expr(expr)], false) => {
|
||||||
apply_window_spec(parse_sql_expr(expr)?, &sql_function.over)?.sum()
|
apply_window_spec(parse_sql_expr(expr)?, sql_function.over.as_ref())?.sum()
|
||||||
}
|
}
|
||||||
("count", [FunctionArgExpr::Expr(expr)], false) => {
|
("count", [FunctionArgExpr::Expr(expr)], false) => {
|
||||||
apply_window_spec(parse_sql_expr(expr)?, &sql_function.over)?.count()
|
apply_window_spec(parse_sql_expr(expr)?, sql_function.over.as_ref())?.count()
|
||||||
}
|
}
|
||||||
("count", [FunctionArgExpr::Expr(expr)], true) => {
|
("count", [FunctionArgExpr::Expr(expr)], true) => {
|
||||||
apply_window_spec(parse_sql_expr(expr)?, &sql_function.over)?.n_unique()
|
apply_window_spec(parse_sql_expr(expr)?, sql_function.over.as_ref())?.n_unique()
|
||||||
}
|
}
|
||||||
// Special case for wildcard args to count function.
|
// Special case for wildcard args to count function.
|
||||||
("count", [FunctionArgExpr::Wildcard], false) => lit(1i32).count(),
|
("count", [FunctionArgExpr::Wildcard], false) => lit(1i32).count(),
|
@ -29,11 +29,13 @@ impl Command for Summary {
|
|||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.named(
|
.named(
|
||||||
"quantiles",
|
"quantiles",
|
||||||
SyntaxShape::Table,
|
SyntaxShape::Table(vec![]),
|
||||||
"provide optional quantiles",
|
"provide optional quantiles",
|
||||||
Some('q'),
|
Some('q'),
|
||||||
)
|
)
|
||||||
@ -118,30 +120,31 @@ fn command(
|
|||||||
let quantiles = quantiles.map(|values| {
|
let quantiles = quantiles.map(|values| {
|
||||||
values
|
values
|
||||||
.iter()
|
.iter()
|
||||||
.map(|value| match value {
|
.map(|value| {
|
||||||
Value::Float { val, span } => {
|
let span = value.span();
|
||||||
if (&0.0..=&1.0).contains(&val) {
|
match value {
|
||||||
Ok(*val)
|
Value::Float { val, .. } => {
|
||||||
} else {
|
if (&0.0..=&1.0).contains(&val) {
|
||||||
Err(ShellError::GenericError(
|
Ok(*val)
|
||||||
"Incorrect value for quantile".to_string(),
|
} else {
|
||||||
"value should be between 0 and 1".to_string(),
|
Err(ShellError::GenericError(
|
||||||
Some(*span),
|
"Incorrect value for quantile".to_string(),
|
||||||
None,
|
"value should be between 0 and 1".to_string(),
|
||||||
Vec::new(),
|
Some(span),
|
||||||
))
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
Value::Error { error, .. } => Err(*error.clone()),
|
||||||
_ => match value.span() {
|
_ => Err(ShellError::GenericError(
|
||||||
Ok(span) => Err(ShellError::GenericError(
|
|
||||||
"Incorrect value for quantile".to_string(),
|
"Incorrect value for quantile".to_string(),
|
||||||
"value should be a float".to_string(),
|
"value should be a float".to_string(),
|
||||||
Some(span),
|
Some(span),
|
||||||
None,
|
None,
|
||||||
Vec::new(),
|
Vec::new(),
|
||||||
)),
|
)),
|
||||||
Err(e) => Err(e),
|
}
|
||||||
},
|
|
||||||
})
|
})
|
||||||
.collect::<Result<Vec<f64>, ShellError>>()
|
.collect::<Result<Vec<f64>, ShellError>>()
|
||||||
});
|
});
|
@ -29,8 +29,10 @@ impl Command for TakeDF {
|
|||||||
SyntaxShape::Any,
|
SyntaxShape::Any,
|
||||||
"list of indices used to take data",
|
"list of indices used to take data",
|
||||||
)
|
)
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -91,7 +93,7 @@ fn command(
|
|||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let index_value: Value = call.req(engine_state, stack, 0)?;
|
let index_value: Value = call.req(engine_state, stack, 0)?;
|
||||||
let index_span = index_value.span()?;
|
let index_span = index_value.span();
|
||||||
let index = NuDataFrame::try_from_value(index_value)?.as_series(index_span)?;
|
let index = NuDataFrame::try_from_value(index_value)?.as_series(index_span)?;
|
||||||
|
|
||||||
let casted = match index.dtype() {
|
let casted = match index.dtype() {
|
@ -25,8 +25,7 @@ impl Command for ToArrow {
|
|||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.required("file", SyntaxShape::Filepath, "file path to save dataframe")
|
.required("file", SyntaxShape::Filepath, "file path to save dataframe")
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(Type::Custom("dataframe".into()), Type::Any)
|
||||||
.output_type(Type::Any)
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -79,16 +78,10 @@ fn command(
|
|||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let file_value = Value::String {
|
let file_value = Value::string(format!("saved {:?}", &file_name.item), file_name.span);
|
||||||
val: format!("saved {:?}", &file_name.item),
|
|
||||||
span: file_name.span,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(PipelineData::Value(
|
Ok(PipelineData::Value(
|
||||||
Value::List {
|
Value::list(vec![file_value], call.head),
|
||||||
vals: vec![file_value],
|
|
||||||
span: call.head,
|
|
||||||
},
|
|
||||||
None,
|
None,
|
||||||
))
|
))
|
||||||
}
|
}
|
117
crates/nu-cmd-dataframe/src/dataframe/eager/to_avro.rs
Normal file
117
crates/nu-cmd-dataframe/src/dataframe/eager/to_avro.rs
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
use std::{fs::File, path::PathBuf};
|
||||||
|
|
||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Type, Value,
|
||||||
|
};
|
||||||
|
use polars_io::avro::{AvroCompression, AvroWriter};
|
||||||
|
use polars_io::SerWriter;
|
||||||
|
|
||||||
|
use super::super::values::NuDataFrame;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct ToAvro;
|
||||||
|
|
||||||
|
impl Command for ToAvro {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"dfr to-avro"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Saves dataframe to avro file."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.named(
|
||||||
|
"compression",
|
||||||
|
SyntaxShape::String,
|
||||||
|
"use compression, supports deflate or snappy",
|
||||||
|
Some('c'),
|
||||||
|
)
|
||||||
|
.required("file", SyntaxShape::Filepath, "file path to save dataframe")
|
||||||
|
.input_output_type(Type::Custom("dataframe".into()), Type::Any)
|
||||||
|
.category(Category::Custom("dataframe".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![Example {
|
||||||
|
description: "Saves dataframe to avro file",
|
||||||
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr to-avro test.avro",
|
||||||
|
result: None,
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
command(engine_state, stack, call, input)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_compression(call: &Call) -> Result<Option<AvroCompression>, ShellError> {
|
||||||
|
if let Some((compression, span)) = call
|
||||||
|
.get_flag_expr("compression")
|
||||||
|
.and_then(|e| e.as_string().map(|s| (s, e.span)))
|
||||||
|
{
|
||||||
|
match compression.as_ref() {
|
||||||
|
"snappy" => Ok(Some(AvroCompression::Snappy)),
|
||||||
|
"deflate" => Ok(Some(AvroCompression::Deflate)),
|
||||||
|
_ => Err(ShellError::IncorrectValue {
|
||||||
|
msg: "compression must be one of deflate or snappy".to_string(),
|
||||||
|
val_span: span,
|
||||||
|
call_span: span,
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn command(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let file_name: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
||||||
|
let compression = get_compression(call)?;
|
||||||
|
|
||||||
|
let mut df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||||
|
|
||||||
|
let file = File::create(&file_name.item).map_err(|e| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Error with file name".into(),
|
||||||
|
e.to_string(),
|
||||||
|
Some(file_name.span),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
AvroWriter::new(file)
|
||||||
|
.with_compression(compression)
|
||||||
|
.finish(df.as_mut())
|
||||||
|
.map_err(|e| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Error saving file".into(),
|
||||||
|
e.to_string(),
|
||||||
|
Some(file_name.span),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let file_value = Value::string(format!("saved {:?}", &file_name.item), file_name.span);
|
||||||
|
|
||||||
|
Ok(PipelineData::Value(
|
||||||
|
Value::list(vec![file_value], call.head),
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
}
|
@ -32,8 +32,7 @@ impl Command for ToCSV {
|
|||||||
Some('d'),
|
Some('d'),
|
||||||
)
|
)
|
||||||
.switch("no-header", "Indicates if file doesn't have header", None)
|
.switch("no-header", "Indicates if file doesn't have header", None)
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(Type::Custom("dataframe".into()), Type::Any)
|
||||||
.output_type(Type::Any)
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -125,16 +124,10 @@ fn command(
|
|||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let file_value = Value::String {
|
let file_value = Value::string(format!("saved {:?}", &file_name.item), file_name.span);
|
||||||
val: format!("saved {:?}", &file_name.item),
|
|
||||||
span: file_name.span,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(PipelineData::Value(
|
Ok(PipelineData::Value(
|
||||||
Value::List {
|
Value::list(vec![file_value], call.head),
|
||||||
vals: vec![file_value],
|
|
||||||
span: call.head,
|
|
||||||
},
|
|
||||||
None,
|
None,
|
||||||
))
|
))
|
||||||
}
|
}
|
@ -20,8 +20,7 @@ impl Command for ToDataFrame {
|
|||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.input_type(Type::Any)
|
.input_output_type(Type::Any, Type::Custom("dataframe".into()))
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
90
crates/nu-cmd-dataframe/src/dataframe/eager/to_json_lines.rs
Normal file
90
crates/nu-cmd-dataframe/src/dataframe/eager/to_json_lines.rs
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
use std::{fs::File, io::BufWriter, path::PathBuf};
|
||||||
|
|
||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Type, Value,
|
||||||
|
};
|
||||||
|
use polars::prelude::{JsonWriter, SerWriter};
|
||||||
|
|
||||||
|
use super::super::values::NuDataFrame;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct ToJsonLines;
|
||||||
|
|
||||||
|
impl Command for ToJsonLines {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"dfr to-jsonl"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Saves dataframe to a JSON lines file."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.required("file", SyntaxShape::Filepath, "file path to save dataframe")
|
||||||
|
.input_output_type(Type::Custom("dataframe".into()), Type::Any)
|
||||||
|
.category(Category::Custom("dataframe".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![Example {
|
||||||
|
description: "Saves dataframe to JSON lines file",
|
||||||
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr to-jsonl test.jsonl",
|
||||||
|
result: None,
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
command(engine_state, stack, call, input)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn command(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let file_name: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
||||||
|
|
||||||
|
let mut df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||||
|
|
||||||
|
let file = File::create(&file_name.item).map_err(|e| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Error with file name".into(),
|
||||||
|
e.to_string(),
|
||||||
|
Some(file_name.span),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
let buf_writer = BufWriter::new(file);
|
||||||
|
|
||||||
|
JsonWriter::new(buf_writer)
|
||||||
|
.finish(df.as_mut())
|
||||||
|
.map_err(|e| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Error saving file".into(),
|
||||||
|
e.to_string(),
|
||||||
|
Some(file_name.span),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let file_value = Value::string(format!("saved {:?}", &file_name.item), file_name.span);
|
||||||
|
|
||||||
|
Ok(PipelineData::Value(
|
||||||
|
Value::list(vec![file_value], call.head),
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
}
|
@ -2,9 +2,11 @@ use nu_engine::CallExt;
|
|||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::Call,
|
ast::Call,
|
||||||
engine::{Command, EngineState, Stack},
|
engine::{Command, EngineState, Stack},
|
||||||
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
|
Category, Example, PipelineData, Record, ShellError, Signature, Span, SyntaxShape, Type, Value,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use crate::dataframe::values::NuExpression;
|
||||||
|
|
||||||
use super::super::values::NuDataFrame;
|
use super::super::values::NuDataFrame;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@ -16,7 +18,7 @@ impl Command for ToNu {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Converts a section of the dataframe into nushell Table."
|
"Converts a dataframe or an expression into into nushell value for access and exploration."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
@ -28,45 +30,47 @@ impl Command for ToNu {
|
|||||||
Some('n'),
|
Some('n'),
|
||||||
)
|
)
|
||||||
.switch("tail", "shows tail rows", Some('t'))
|
.switch("tail", "shows tail rows", Some('t'))
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_types(vec![
|
||||||
.output_type(Type::Any)
|
(Type::Custom("expression".into()), Type::Any),
|
||||||
|
(Type::Custom("dataframe".into()), Type::Table(vec![])),
|
||||||
|
])
|
||||||
|
//.input_output_type(Type::Any, Type::Any)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
let cols = vec!["index".into(), "a".into(), "b".into()];
|
let cols = vec!["index".into(), "a".into(), "b".into()];
|
||||||
let rec_1 = Value::Record {
|
let rec_1 = Value::test_record(Record {
|
||||||
cols: cols.clone(),
|
cols: cols.clone(),
|
||||||
vals: vec![Value::test_int(0), Value::test_int(1), Value::test_int(2)],
|
vals: vec![Value::test_int(0), Value::test_int(1), Value::test_int(2)],
|
||||||
span: Span::test_data(),
|
});
|
||||||
};
|
let rec_2 = Value::test_record(Record {
|
||||||
let rec_2 = Value::Record {
|
|
||||||
cols: cols.clone(),
|
cols: cols.clone(),
|
||||||
vals: vec![Value::test_int(1), Value::test_int(3), Value::test_int(4)],
|
vals: vec![Value::test_int(1), Value::test_int(3), Value::test_int(4)],
|
||||||
span: Span::test_data(),
|
});
|
||||||
};
|
let rec_3 = Value::test_record(Record {
|
||||||
let rec_3 = Value::Record {
|
|
||||||
cols,
|
cols,
|
||||||
vals: vec![Value::test_int(2), Value::test_int(3), Value::test_int(4)],
|
vals: vec![Value::test_int(2), Value::test_int(3), Value::test_int(4)],
|
||||||
span: Span::test_data(),
|
});
|
||||||
};
|
|
||||||
|
|
||||||
vec![
|
vec![
|
||||||
Example {
|
Example {
|
||||||
description: "Shows head rows from dataframe",
|
description: "Shows head rows from dataframe",
|
||||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr into-nu",
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr into-nu",
|
||||||
result: Some(Value::List {
|
result: Some(Value::list(vec![rec_1, rec_2], Span::test_data())),
|
||||||
vals: vec![rec_1, rec_2],
|
|
||||||
span: Span::test_data(),
|
|
||||||
}),
|
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Shows tail rows from dataframe",
|
description: "Shows tail rows from dataframe",
|
||||||
example: "[[a b]; [1 2] [5 6] [3 4]] | dfr into-df | dfr into-nu -t -n 1",
|
example: "[[a b]; [1 2] [5 6] [3 4]] | dfr into-df | dfr into-nu -t -n 1",
|
||||||
result: Some(Value::List {
|
result: Some(Value::list(vec![rec_3], Span::test_data())),
|
||||||
vals: vec![rec_3],
|
},
|
||||||
span: Span::test_data(),
|
Example {
|
||||||
}),
|
description: "Convert a col expression into a nushell value",
|
||||||
|
example: "dfr col a | dfr into-nu",
|
||||||
|
result: Some(Value::test_record(Record {
|
||||||
|
cols: vec!["expr".into(), "value".into()],
|
||||||
|
vals: vec![Value::test_string("column"), Value::test_string("a")],
|
||||||
|
})),
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
@ -78,20 +82,25 @@ impl Command for ToNu {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
command(engine_state, stack, call, input)
|
let value = input.into_value(call.head);
|
||||||
|
if NuDataFrame::can_downcast(&value) {
|
||||||
|
dataframe_command(engine_state, stack, call, value)
|
||||||
|
} else {
|
||||||
|
expression_command(call, value)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn command(
|
fn dataframe_command(
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: Value,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let rows: Option<usize> = call.get_flag(engine_state, stack, "rows")?;
|
let rows: Option<usize> = call.get_flag(engine_state, stack, "rows")?;
|
||||||
let tail: bool = call.has_flag("tail");
|
let tail: bool = call.has_flag("tail");
|
||||||
|
|
||||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
let df = NuDataFrame::try_from_value(input)?;
|
||||||
|
|
||||||
let values = if tail {
|
let values = if tail {
|
||||||
df.tail(rows, call.head)?
|
df.tail(rows, call.head)?
|
||||||
@ -104,21 +113,30 @@ fn command(
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let value = Value::List {
|
let value = Value::list(values, call.head);
|
||||||
vals: values,
|
|
||||||
span: call.head,
|
Ok(PipelineData::Value(value, None))
|
||||||
};
|
}
|
||||||
|
fn expression_command(call: &Call, input: Value) -> Result<PipelineData, ShellError> {
|
||||||
|
let expr = NuExpression::try_from_value(input)?;
|
||||||
|
let value = expr.to_value(call.head)?;
|
||||||
|
|
||||||
Ok(PipelineData::Value(value, None))
|
Ok(PipelineData::Value(value, None))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
|
use super::super::super::expressions::ExprCol;
|
||||||
use super::super::super::test_dataframe::test_dataframe;
|
use super::super::super::test_dataframe::test_dataframe;
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_examples() {
|
fn test_examples_dataframe_input() {
|
||||||
test_dataframe(vec![Box::new(ToNu {})])
|
test_dataframe(vec![Box::new(ToNu {})])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples_expression_input() {
|
||||||
|
test_dataframe(vec![Box::new(ToNu {}), Box::new(ExprCol {})])
|
||||||
|
}
|
||||||
}
|
}
|
@ -25,8 +25,7 @@ impl Command for ToParquet {
|
|||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.required("file", SyntaxShape::Filepath, "file path to save dataframe")
|
.required("file", SyntaxShape::Filepath, "file path to save dataframe")
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(Type::Custom("dataframe".into()), Type::Any)
|
||||||
.output_type(Type::Any)
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -79,16 +78,10 @@ fn command(
|
|||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
let file_value = Value::String {
|
let file_value = Value::string(format!("saved {:?}", &file_name.item), file_name.span);
|
||||||
val: format!("saved {:?}", &file_name.item),
|
|
||||||
span: file_name.span,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(PipelineData::Value(
|
Ok(PipelineData::Value(
|
||||||
Value::List {
|
Value::list(vec![file_value], call.head),
|
||||||
vals: vec![file_value],
|
|
||||||
span: call.head,
|
|
||||||
},
|
|
||||||
None,
|
None,
|
||||||
))
|
))
|
||||||
}
|
}
|
@ -27,8 +27,10 @@ impl Command for WithColumn {
|
|||||||
SyntaxShape::Any,
|
SyntaxShape::Any,
|
||||||
"series to be added or expressions used to define the new columns",
|
"series to be added or expressions used to define the new columns",
|
||||||
)
|
)
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe or lazyframe".into()))
|
.category(Category::Custom("dataframe or lazyframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -112,7 +114,7 @@ impl Command for WithColumn {
|
|||||||
Err(ShellError::CantConvert {
|
Err(ShellError::CantConvert {
|
||||||
to_type: "lazy or eager dataframe".into(),
|
to_type: "lazy or eager dataframe".into(),
|
||||||
from_type: value.get_type().to_string(),
|
from_type: value.get_type().to_string(),
|
||||||
span: value.span()?,
|
span: value.span(),
|
||||||
help: None,
|
help: None,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -126,14 +128,11 @@ fn command_eager(
|
|||||||
mut df: NuDataFrame,
|
mut df: NuDataFrame,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let new_column: Value = call.req(engine_state, stack, 0)?;
|
let new_column: Value = call.req(engine_state, stack, 0)?;
|
||||||
let column_span = new_column.span()?;
|
let column_span = new_column.span();
|
||||||
|
|
||||||
if NuExpression::can_downcast(&new_column) {
|
if NuExpression::can_downcast(&new_column) {
|
||||||
let vals: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
let vals: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
||||||
let value = Value::List {
|
let value = Value::list(vals, call.head);
|
||||||
vals,
|
|
||||||
span: call.head,
|
|
||||||
};
|
|
||||||
let expressions = NuExpression::extract_exprs(value)?;
|
let expressions = NuExpression::extract_exprs(value)?;
|
||||||
let lazy = NuLazyFrame::new(true, df.lazy().with_columns(&expressions));
|
let lazy = NuLazyFrame::new(true, df.lazy().with_columns(&expressions));
|
||||||
|
|
||||||
@ -177,10 +176,7 @@ fn command_lazy(
|
|||||||
lazy: NuLazyFrame,
|
lazy: NuLazyFrame,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let vals: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
let vals: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
||||||
let value = Value::List {
|
let value = Value::list(vals, call.head);
|
||||||
vals,
|
|
||||||
span: call.head,
|
|
||||||
};
|
|
||||||
let expressions = NuExpression::extract_exprs(value)?;
|
let expressions = NuExpression::extract_exprs(value)?;
|
||||||
|
|
||||||
let lazy: NuLazyFrame = lazy.into_polars().with_columns(&expressions).into();
|
let lazy: NuLazyFrame = lazy.into_polars().with_columns(&expressions).into();
|
@ -4,7 +4,7 @@ use nu_engine::CallExt;
|
|||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::Call,
|
ast::Call,
|
||||||
engine::{Command, EngineState, Stack},
|
engine::{Command, EngineState, Stack},
|
||||||
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
|
Category, Example, PipelineData, Record, ShellError, Signature, SyntaxShape, Type, Value,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@ -26,8 +26,10 @@ impl Command for ExprAlias {
|
|||||||
SyntaxShape::String,
|
SyntaxShape::String,
|
||||||
"Alias name for the expression",
|
"Alias name for the expression",
|
||||||
)
|
)
|
||||||
.input_type(Type::Custom("expression".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("expression".into()))
|
Type::Custom("expression".into()),
|
||||||
|
Type::Custom("expression".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("expression".into()))
|
.category(Category::Custom("expression".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -39,20 +41,18 @@ impl Command for ExprAlias {
|
|||||||
let cols = vec!["expr".into(), "value".into()];
|
let cols = vec!["expr".into(), "value".into()];
|
||||||
let expr = Value::test_string("column");
|
let expr = Value::test_string("column");
|
||||||
let value = Value::test_string("a");
|
let value = Value::test_string("a");
|
||||||
let expr = Value::Record {
|
let expr = Value::test_record(Record {
|
||||||
cols,
|
cols,
|
||||||
vals: vec![expr, value],
|
vals: vec![expr, value],
|
||||||
span: Span::test_data(),
|
});
|
||||||
};
|
|
||||||
|
|
||||||
let cols = vec!["expr".into(), "alias".into()];
|
let cols = vec!["expr".into(), "alias".into()];
|
||||||
let value = Value::test_string("new_a");
|
let value = Value::test_string("new_a");
|
||||||
|
|
||||||
let record = Value::Record {
|
let record = Value::test_record(Record {
|
||||||
cols,
|
cols,
|
||||||
vals: vec![expr, value],
|
vals: vec![expr, value],
|
||||||
span: Span::test_data(),
|
});
|
||||||
};
|
|
||||||
|
|
||||||
Some(record)
|
Some(record)
|
||||||
},
|
},
|
||||||
@ -86,7 +86,7 @@ impl Command for ExprAlias {
|
|||||||
mod test {
|
mod test {
|
||||||
use super::super::super::test_dataframe::test_dataframe;
|
use super::super::super::test_dataframe::test_dataframe;
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::dataframe::expressions::ExprAsNu;
|
use crate::dataframe::eager::ToNu;
|
||||||
use crate::dataframe::expressions::ExprCol;
|
use crate::dataframe::expressions::ExprCol;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -94,7 +94,7 @@ mod test {
|
|||||||
test_dataframe(vec![
|
test_dataframe(vec![
|
||||||
Box::new(ExprAlias {}),
|
Box::new(ExprAlias {}),
|
||||||
Box::new(ExprCol {}),
|
Box::new(ExprCol {}),
|
||||||
Box::new(ExprAsNu {}),
|
Box::new(ToNu {}),
|
||||||
])
|
])
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -22,8 +22,7 @@ impl Command for ExprArgWhere {
|
|||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.required("column name", SyntaxShape::Any, "Expression to evaluate")
|
.required("column name", SyntaxShape::Any, "Expression to evaluate")
|
||||||
.input_type(Type::Any)
|
.input_output_type(Type::Any, Type::Custom("expression".into()))
|
||||||
.output_type(Type::Custom("expression".into()))
|
|
||||||
.category(Category::Custom("expression".into()))
|
.category(Category::Custom("expression".into()))
|
||||||
}
|
}
|
||||||
|
|
@ -3,7 +3,7 @@ use nu_engine::CallExt;
|
|||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::Call,
|
ast::Call,
|
||||||
engine::{Command, EngineState, Stack},
|
engine::{Command, EngineState, Stack},
|
||||||
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
|
Category, Example, PipelineData, Record, ShellError, Signature, SyntaxShape, Type, Value,
|
||||||
};
|
};
|
||||||
use polars::prelude::col;
|
use polars::prelude::col;
|
||||||
|
|
||||||
@ -26,8 +26,7 @@ impl Command for ExprCol {
|
|||||||
SyntaxShape::String,
|
SyntaxShape::String,
|
||||||
"Name of column to be used",
|
"Name of column to be used",
|
||||||
)
|
)
|
||||||
.input_type(Type::Any)
|
.input_output_type(Type::Any, Type::Custom("expression".into()))
|
||||||
.output_type(Type::Custom("expression".into()))
|
|
||||||
.category(Category::Custom("expression".into()))
|
.category(Category::Custom("expression".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -35,11 +34,10 @@ impl Command for ExprCol {
|
|||||||
vec![Example {
|
vec![Example {
|
||||||
description: "Creates a named column expression and converts it to a nu object",
|
description: "Creates a named column expression and converts it to a nu object",
|
||||||
example: "dfr col a | dfr into-nu",
|
example: "dfr col a | dfr into-nu",
|
||||||
result: Some(Value::Record {
|
result: Some(Value::test_record(Record {
|
||||||
cols: vec!["expr".into(), "value".into()],
|
cols: vec!["expr".into(), "value".into()],
|
||||||
vals: vec![Value::test_string("column"), Value::test_string("a")],
|
vals: vec![Value::test_string("column"), Value::test_string("a")],
|
||||||
span: Span::test_data(),
|
})),
|
||||||
}),
|
|
||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -65,10 +63,10 @@ impl Command for ExprCol {
|
|||||||
mod test {
|
mod test {
|
||||||
use super::super::super::test_dataframe::test_dataframe;
|
use super::super::super::test_dataframe::test_dataframe;
|
||||||
use super::*;
|
use super::*;
|
||||||
use crate::dataframe::expressions::as_nu::ExprAsNu;
|
use crate::dataframe::eager::ToNu;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_examples() {
|
fn test_examples() {
|
||||||
test_dataframe(vec![Box::new(ExprCol {}), Box::new(ExprAsNu {})])
|
test_dataframe(vec![Box::new(ExprCol {}), Box::new(ToNu {})])
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -31,8 +31,7 @@ impl Command for ExprConcatStr {
|
|||||||
SyntaxShape::List(Box::new(SyntaxShape::Any)),
|
SyntaxShape::List(Box::new(SyntaxShape::Any)),
|
||||||
"Expression(s) that define the string concatenation",
|
"Expression(s) that define the string concatenation",
|
||||||
)
|
)
|
||||||
.input_type(Type::Any)
|
.input_output_type(Type::Any, Type::Custom("expression".into()))
|
||||||
.output_type(Type::Custom("expression".into()))
|
|
||||||
.category(Category::Custom("expression".into()))
|
.category(Category::Custom("expression".into()))
|
||||||
}
|
}
|
||||||
|
|
164
crates/nu-cmd-dataframe/src/dataframe/expressions/datepart.rs
Normal file
164
crates/nu-cmd-dataframe/src/dataframe/expressions/datepart.rs
Normal file
@ -0,0 +1,164 @@
|
|||||||
|
use super::super::values::NuExpression;
|
||||||
|
|
||||||
|
use crate::dataframe::values::{Column, NuDataFrame};
|
||||||
|
use chrono::{DateTime, FixedOffset};
|
||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, Example, PipelineData, ShellError, Signature, Span, Spanned, SyntaxShape, Type,
|
||||||
|
Value,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct ExprDatePart;
|
||||||
|
|
||||||
|
impl Command for ExprDatePart {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"dfr datepart"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Creates an expression for capturing the specified datepart in a column."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.required(
|
||||||
|
"Datepart name",
|
||||||
|
SyntaxShape::String,
|
||||||
|
"Part of the date to capture. Possible values are year, quarter, month, week, weekday, day, hour, minute, second, millisecond, microsecond, nanosecond",
|
||||||
|
)
|
||||||
|
.input_output_type(
|
||||||
|
Type::Custom("expression".into()),
|
||||||
|
Type::Custom("expression".into()),
|
||||||
|
)
|
||||||
|
.category(Category::Custom("expression".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
let dt = DateTime::<FixedOffset>::parse_from_str(
|
||||||
|
"2021-12-30T01:02:03.123456789 +0000",
|
||||||
|
"%Y-%m-%dT%H:%M:%S.%9f %z",
|
||||||
|
)
|
||||||
|
.expect("date calculation should not fail in test");
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
description: "Creates an expression to capture the year date part",
|
||||||
|
example: r#"[["2021-12-30T01:02:03.123456789"]] | dfr into-df | dfr as-datetime "%Y-%m-%dT%H:%M:%S.%9f" | dfr with-column [(dfr col datetime | dfr datepart year | dfr as datetime_year )]"#,
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new("datetime".to_string(), vec![Value::test_date(dt)]),
|
||||||
|
Column::new("datetime_year".to_string(), vec![Value::test_int(2021)]),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Creates an expression to capture multiple date parts",
|
||||||
|
example: r#"[["2021-12-30T01:02:03.123456789"]] | dfr into-df | dfr as-datetime "%Y-%m-%dT%H:%M:%S.%9f" |
|
||||||
|
dfr with-column [ (dfr col datetime | dfr datepart year | dfr as datetime_year ),
|
||||||
|
(dfr col datetime | dfr datepart month | dfr as datetime_month ),
|
||||||
|
(dfr col datetime | dfr datepart day | dfr as datetime_day ),
|
||||||
|
(dfr col datetime | dfr datepart hour | dfr as datetime_hour ),
|
||||||
|
(dfr col datetime | dfr datepart minute | dfr as datetime_minute ),
|
||||||
|
(dfr col datetime | dfr datepart second | dfr as datetime_second ),
|
||||||
|
(dfr col datetime | dfr datepart nanosecond | dfr as datetime_ns ) ]"#,
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new("datetime".to_string(), vec![Value::test_date(dt)]),
|
||||||
|
Column::new("datetime_year".to_string(), vec![Value::test_int(2021)]),
|
||||||
|
Column::new("datetime_month".to_string(), vec![Value::test_int(12)]),
|
||||||
|
Column::new("datetime_day".to_string(), vec![Value::test_int(30)]),
|
||||||
|
Column::new("datetime_hour".to_string(), vec![Value::test_int(1)]),
|
||||||
|
Column::new("datetime_minute".to_string(), vec![Value::test_int(2)]),
|
||||||
|
Column::new("datetime_second".to_string(), vec![Value::test_int(3)]),
|
||||||
|
Column::new("datetime_ns".to_string(), vec![Value::test_int(123456789)]),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
|
vec![
|
||||||
|
"year",
|
||||||
|
"month",
|
||||||
|
"week",
|
||||||
|
"weekday",
|
||||||
|
"quarter",
|
||||||
|
"day",
|
||||||
|
"hour",
|
||||||
|
"minute",
|
||||||
|
"second",
|
||||||
|
"millisecond",
|
||||||
|
"microsecond",
|
||||||
|
"nanosecond",
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let part: Spanned<String> = call.req(engine_state, stack, 0)?;
|
||||||
|
|
||||||
|
let expr = NuExpression::try_from_pipeline(input, call.head)?;
|
||||||
|
let expr_dt = expr.into_polars().dt();
|
||||||
|
let expr = match part.item.as_str() {
|
||||||
|
"year" => expr_dt.year(),
|
||||||
|
"quarter" => expr_dt.quarter(),
|
||||||
|
"month" => expr_dt.month(),
|
||||||
|
"week" => expr_dt.week(),
|
||||||
|
"day" => expr_dt.day(),
|
||||||
|
"hour" => expr_dt.hour(),
|
||||||
|
"minute" => expr_dt.minute(),
|
||||||
|
"second" => expr_dt.second(),
|
||||||
|
"millisecond" => expr_dt.millisecond(),
|
||||||
|
"microsecond" => expr_dt.microsecond(),
|
||||||
|
"nanosecond" => expr_dt.nanosecond(),
|
||||||
|
_ => {
|
||||||
|
return Err(ShellError::UnsupportedInput(
|
||||||
|
format!("{} is not a valid datepart, expected one of year, month, day, hour, minute, second, millisecond, microsecond, nanosecond", part.item),
|
||||||
|
"value originates from here".to_string(),
|
||||||
|
call.head,
|
||||||
|
part.span,
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}.into();
|
||||||
|
|
||||||
|
Ok(PipelineData::Value(
|
||||||
|
NuExpression::into_value(expr, call.head),
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::super::super::test_dataframe::test_dataframe;
|
||||||
|
use super::*;
|
||||||
|
use crate::dataframe::eager::ToNu;
|
||||||
|
use crate::dataframe::eager::WithColumn;
|
||||||
|
use crate::dataframe::expressions::ExprAlias;
|
||||||
|
use crate::dataframe::expressions::ExprCol;
|
||||||
|
use crate::dataframe::series::AsDateTime;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples() {
|
||||||
|
test_dataframe(vec![
|
||||||
|
Box::new(ExprDatePart {}),
|
||||||
|
Box::new(ExprCol {}),
|
||||||
|
Box::new(ToNu {}),
|
||||||
|
Box::new(AsDateTime {}),
|
||||||
|
Box::new(WithColumn {}),
|
||||||
|
Box::new(ExprAlias {}),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,679 @@
|
|||||||
|
/// Definition of multiple Expression commands using a macro rule
|
||||||
|
/// All of these expressions have an identical body and only require
|
||||||
|
/// to have a change in the name, description and expression function
|
||||||
|
use crate::dataframe::values::{Column, NuDataFrame, NuExpression, NuLazyFrame};
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, Example, PipelineData, ShellError, Signature, Span, Type, Value,
|
||||||
|
};
|
||||||
|
|
||||||
|
// The structs defined in this file are structs that form part of other commands
|
||||||
|
// since they share a similar name
|
||||||
|
macro_rules! expr_command {
|
||||||
|
($command: ident, $name: expr, $desc: expr, $examples: expr, $func: ident, $test: ident) => {
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct $command;
|
||||||
|
|
||||||
|
impl Command for $command {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
$name
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
$desc
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.input_output_type(
|
||||||
|
Type::Custom("expression".into()),
|
||||||
|
Type::Custom("expression".into()),
|
||||||
|
)
|
||||||
|
.category(Category::Custom("expression".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
$examples
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
_engine_state: &EngineState,
|
||||||
|
_stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let expr = NuExpression::try_from_pipeline(input, call.head)?;
|
||||||
|
let expr: NuExpression = expr.into_polars().$func().into();
|
||||||
|
|
||||||
|
Ok(PipelineData::Value(
|
||||||
|
NuExpression::into_value(expr, call.head),
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod $test {
|
||||||
|
use super::super::super::test_dataframe::test_dataframe;
|
||||||
|
use super::*;
|
||||||
|
use crate::dataframe::lazy::aggregate::LazyAggregate;
|
||||||
|
use crate::dataframe::lazy::groupby::ToLazyGroupBy;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples() {
|
||||||
|
test_dataframe(vec![
|
||||||
|
Box::new($command {}),
|
||||||
|
Box::new(LazyAggregate {}),
|
||||||
|
Box::new(ToLazyGroupBy {}),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
($command: ident, $name: expr, $desc: expr, $examples: expr, $func: ident, $test: ident, $ddof: expr) => {
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct $command;
|
||||||
|
|
||||||
|
impl Command for $command {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
$name
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
$desc
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.input_output_type(
|
||||||
|
Type::Custom("expression".into()),
|
||||||
|
Type::Custom("expression".into()),
|
||||||
|
)
|
||||||
|
.category(Category::Custom("expression".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
$examples
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
_engine_state: &EngineState,
|
||||||
|
_stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let expr = NuExpression::try_from_pipeline(input, call.head)?;
|
||||||
|
let expr: NuExpression = expr.into_polars().$func($ddof).into();
|
||||||
|
|
||||||
|
Ok(PipelineData::Value(
|
||||||
|
NuExpression::into_value(expr, call.head),
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod $test {
|
||||||
|
use super::super::super::test_dataframe::test_dataframe;
|
||||||
|
use super::*;
|
||||||
|
use crate::dataframe::lazy::aggregate::LazyAggregate;
|
||||||
|
use crate::dataframe::lazy::groupby::ToLazyGroupBy;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples() {
|
||||||
|
test_dataframe(vec![
|
||||||
|
Box::new($command {}),
|
||||||
|
Box::new(LazyAggregate {}),
|
||||||
|
Box::new(ToLazyGroupBy {}),
|
||||||
|
])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// The structs defined in this file are structs that form part of other commands
|
||||||
|
// since they share a similar name
|
||||||
|
macro_rules! lazy_expr_command {
|
||||||
|
($command: ident, $name: expr, $desc: expr, $examples: expr, $func: ident, $test: ident) => {
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct $command;
|
||||||
|
|
||||||
|
impl Command for $command {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
$name
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
$desc
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.input_output_types(vec![
|
||||||
|
(
|
||||||
|
Type::Custom("expression".into()),
|
||||||
|
Type::Custom("expression".into()),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
),
|
||||||
|
])
|
||||||
|
.category(Category::Custom("expression".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
$examples
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
_engine_state: &EngineState,
|
||||||
|
_stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let value = input.into_value(call.head);
|
||||||
|
if NuDataFrame::can_downcast(&value) {
|
||||||
|
let lazy = NuLazyFrame::try_from_value(value)?;
|
||||||
|
let lazy = NuLazyFrame::new(lazy.from_eager, lazy.into_polars().$func());
|
||||||
|
|
||||||
|
Ok(PipelineData::Value(lazy.into_value(call.head)?, None))
|
||||||
|
} else {
|
||||||
|
let expr = NuExpression::try_from_value(value)?;
|
||||||
|
let expr: NuExpression = expr.into_polars().$func().into();
|
||||||
|
|
||||||
|
Ok(PipelineData::Value(
|
||||||
|
NuExpression::into_value(expr, call.head),
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod $test {
|
||||||
|
use super::super::super::test_dataframe::{
|
||||||
|
build_test_engine_state, test_dataframe_example,
|
||||||
|
};
|
||||||
|
use super::*;
|
||||||
|
use crate::dataframe::lazy::aggregate::LazyAggregate;
|
||||||
|
use crate::dataframe::lazy::groupby::ToLazyGroupBy;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples_dataframe() {
|
||||||
|
// the first example should be a for the dataframe case
|
||||||
|
let example = &$command.examples()[0];
|
||||||
|
let mut engine_state = build_test_engine_state(vec![Box::new($command {})]);
|
||||||
|
test_dataframe_example(&mut engine_state, &example)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples_expressions() {
|
||||||
|
// the second example should be a for the dataframe case
|
||||||
|
let example = &$command.examples()[1];
|
||||||
|
let mut engine_state = build_test_engine_state(vec![
|
||||||
|
Box::new($command {}),
|
||||||
|
Box::new(LazyAggregate {}),
|
||||||
|
Box::new(ToLazyGroupBy {}),
|
||||||
|
]);
|
||||||
|
test_dataframe_example(&mut engine_state, &example)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
($command: ident, $name: expr, $desc: expr, $examples: expr, $func: ident, $test: ident, $ddof: expr) => {
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct $command;
|
||||||
|
|
||||||
|
impl Command for $command {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
$name
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
$desc
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.input_output_types(vec![
|
||||||
|
(
|
||||||
|
Type::Custom("expression".into()),
|
||||||
|
Type::Custom("expression".into()),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
),
|
||||||
|
])
|
||||||
|
.category(Category::Custom("expression".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
$examples
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
_engine_state: &EngineState,
|
||||||
|
_stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let value = input.into_value(call.head);
|
||||||
|
if NuDataFrame::can_downcast(&value) {
|
||||||
|
let lazy = NuLazyFrame::try_from_value(value)?;
|
||||||
|
let lazy = NuLazyFrame::new(lazy.from_eager, lazy.into_polars().$func($ddof));
|
||||||
|
|
||||||
|
Ok(PipelineData::Value(lazy.into_value(call.head)?, None))
|
||||||
|
} else {
|
||||||
|
let expr = NuExpression::try_from_value(value)?;
|
||||||
|
let expr: NuExpression = expr.into_polars().$func($ddof).into();
|
||||||
|
|
||||||
|
Ok(PipelineData::Value(
|
||||||
|
NuExpression::into_value(expr, call.head),
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod $test {
|
||||||
|
use super::super::super::test_dataframe::{
|
||||||
|
build_test_engine_state, test_dataframe_example,
|
||||||
|
};
|
||||||
|
use super::*;
|
||||||
|
use crate::dataframe::lazy::aggregate::LazyAggregate;
|
||||||
|
use crate::dataframe::lazy::groupby::ToLazyGroupBy;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples_dataframe() {
|
||||||
|
// the first example should be a for the dataframe case
|
||||||
|
let example = &$command.examples()[0];
|
||||||
|
let mut engine_state = build_test_engine_state(vec![Box::new($command {})]);
|
||||||
|
test_dataframe_example(&mut engine_state, &example)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples_expressions() {
|
||||||
|
// the second example should be a for the dataframe case
|
||||||
|
let example = &$command.examples()[1];
|
||||||
|
let mut engine_state = build_test_engine_state(vec![
|
||||||
|
Box::new($command {}),
|
||||||
|
Box::new(LazyAggregate {}),
|
||||||
|
Box::new(ToLazyGroupBy {}),
|
||||||
|
]);
|
||||||
|
test_dataframe_example(&mut engine_state, &example)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// ExprList command
|
||||||
|
// Expands to a command definition for a list expression
|
||||||
|
expr_command!(
|
||||||
|
ExprList,
|
||||||
|
"dfr implode",
|
||||||
|
"Aggregates a group to a Series",
|
||||||
|
vec![Example {
|
||||||
|
description: "",
|
||||||
|
example: "",
|
||||||
|
result: None,
|
||||||
|
}],
|
||||||
|
implode,
|
||||||
|
test_implode
|
||||||
|
);
|
||||||
|
|
||||||
|
// ExprAggGroups command
|
||||||
|
// Expands to a command definition for a agg groups expression
|
||||||
|
expr_command!(
|
||||||
|
ExprAggGroups,
|
||||||
|
"dfr agg-groups",
|
||||||
|
"creates an agg_groups expression",
|
||||||
|
vec![Example {
|
||||||
|
description: "",
|
||||||
|
example: "",
|
||||||
|
result: None,
|
||||||
|
}],
|
||||||
|
agg_groups,
|
||||||
|
test_groups
|
||||||
|
);
|
||||||
|
|
||||||
|
// ExprCount command
|
||||||
|
// Expands to a command definition for a count expression
|
||||||
|
expr_command!(
|
||||||
|
ExprCount,
|
||||||
|
"dfr count",
|
||||||
|
"creates a count expression",
|
||||||
|
vec![Example {
|
||||||
|
description: "",
|
||||||
|
example: "",
|
||||||
|
result: None,
|
||||||
|
}],
|
||||||
|
count,
|
||||||
|
test_count
|
||||||
|
);
|
||||||
|
|
||||||
|
// ExprNot command
|
||||||
|
// Expands to a command definition for a not expression
|
||||||
|
expr_command!(
|
||||||
|
ExprNot,
|
||||||
|
"dfr expr-not",
|
||||||
|
"creates a not expression",
|
||||||
|
vec![Example {
|
||||||
|
description: "Creates a not expression",
|
||||||
|
example: "(dfr col a) > 2) | dfr expr-not",
|
||||||
|
result: None,
|
||||||
|
},],
|
||||||
|
not,
|
||||||
|
test_not
|
||||||
|
);
|
||||||
|
|
||||||
|
// ExprMax command
|
||||||
|
// Expands to a command definition for max aggregation
|
||||||
|
lazy_expr_command!(
|
||||||
|
ExprMax,
|
||||||
|
"dfr max",
|
||||||
|
"Creates a max expression or aggregates columns to their max value",
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
description: "Max value from columns in a dataframe",
|
||||||
|
example: "[[a b]; [6 2] [1 4] [4 1]] | dfr into-df | dfr max",
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new("a".to_string(), vec![Value::test_int(6)],),
|
||||||
|
Column::new("b".to_string(), vec![Value::test_int(4)],),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Max aggregation for a group-by",
|
||||||
|
example: r#"[[a b]; [one 2] [one 4] [two 1]]
|
||||||
|
| dfr into-df
|
||||||
|
| dfr group-by a
|
||||||
|
| dfr agg (dfr col b | dfr max)"#,
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new(
|
||||||
|
"a".to_string(),
|
||||||
|
vec![Value::test_string("one"), Value::test_string("two")],
|
||||||
|
),
|
||||||
|
Column::new(
|
||||||
|
"b".to_string(),
|
||||||
|
vec![Value::test_int(4), Value::test_int(1)],
|
||||||
|
),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
max,
|
||||||
|
test_max
|
||||||
|
);
|
||||||
|
|
||||||
|
// ExprMin command
|
||||||
|
// Expands to a command definition for min aggregation
|
||||||
|
lazy_expr_command!(
|
||||||
|
ExprMin,
|
||||||
|
"dfr min",
|
||||||
|
"Creates a min expression or aggregates columns to their min value",
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
description: "Min value from columns in a dataframe",
|
||||||
|
example: "[[a b]; [6 2] [1 4] [4 1]] | dfr into-df | dfr min",
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new("a".to_string(), vec![Value::test_int(1)],),
|
||||||
|
Column::new("b".to_string(), vec![Value::test_int(1)],),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Min aggregation for a group-by",
|
||||||
|
example: r#"[[a b]; [one 2] [one 4] [two 1]]
|
||||||
|
| dfr into-df
|
||||||
|
| dfr group-by a
|
||||||
|
| dfr agg (dfr col b | dfr min)"#,
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new(
|
||||||
|
"a".to_string(),
|
||||||
|
vec![Value::test_string("one"), Value::test_string("two")],
|
||||||
|
),
|
||||||
|
Column::new(
|
||||||
|
"b".to_string(),
|
||||||
|
vec![Value::test_int(2), Value::test_int(1)],
|
||||||
|
),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
min,
|
||||||
|
test_min
|
||||||
|
);
|
||||||
|
|
||||||
|
// ExprSum command
|
||||||
|
// Expands to a command definition for sum aggregation
|
||||||
|
lazy_expr_command!(
|
||||||
|
ExprSum,
|
||||||
|
"dfr sum",
|
||||||
|
"Creates a sum expression for an aggregation or aggregates columns to their sum value",
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
description: "Sums all columns in a dataframe",
|
||||||
|
example: "[[a b]; [6 2] [1 4] [4 1]] | dfr into-df | dfr sum",
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new("a".to_string(), vec![Value::test_int(11)],),
|
||||||
|
Column::new("b".to_string(), vec![Value::test_int(7)],),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Sum aggregation for a group-by",
|
||||||
|
example: r#"[[a b]; [one 2] [one 4] [two 1]]
|
||||||
|
| dfr into-df
|
||||||
|
| dfr group-by a
|
||||||
|
| dfr agg (dfr col b | dfr sum)"#,
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new(
|
||||||
|
"a".to_string(),
|
||||||
|
vec![Value::test_string("one"), Value::test_string("two")],
|
||||||
|
),
|
||||||
|
Column::new(
|
||||||
|
"b".to_string(),
|
||||||
|
vec![Value::test_int(6), Value::test_int(1)],
|
||||||
|
),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
sum,
|
||||||
|
test_sum
|
||||||
|
);
|
||||||
|
|
||||||
|
// ExprMean command
|
||||||
|
// Expands to a command definition for mean aggregation
|
||||||
|
lazy_expr_command!(
|
||||||
|
ExprMean,
|
||||||
|
"dfr mean",
|
||||||
|
"Creates a mean expression for an aggregation or aggregates columns to their mean value",
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
description: "Mean value from columns in a dataframe",
|
||||||
|
example: "[[a b]; [6 2] [4 2] [2 2]] | dfr into-df | dfr mean",
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new("a".to_string(), vec![Value::test_float(4.0)],),
|
||||||
|
Column::new("b".to_string(), vec![Value::test_float(2.0)],),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Mean aggregation for a group-by",
|
||||||
|
example: r#"[[a b]; [one 2] [one 4] [two 1]]
|
||||||
|
| dfr into-df
|
||||||
|
| dfr group-by a
|
||||||
|
| dfr agg (dfr col b | dfr mean)"#,
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new(
|
||||||
|
"a".to_string(),
|
||||||
|
vec![Value::test_string("one"), Value::test_string("two")],
|
||||||
|
),
|
||||||
|
Column::new(
|
||||||
|
"b".to_string(),
|
||||||
|
vec![Value::test_float(3.0), Value::test_float(1.0)],
|
||||||
|
),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
mean,
|
||||||
|
test_mean
|
||||||
|
);
|
||||||
|
|
||||||
|
// ExprMedian command
|
||||||
|
// Expands to a command definition for median aggregation
|
||||||
|
expr_command!(
|
||||||
|
ExprMedian,
|
||||||
|
"dfr median",
|
||||||
|
"Creates a median expression for an aggregation",
|
||||||
|
vec![Example {
|
||||||
|
description: "Median aggregation for a group-by",
|
||||||
|
example: r#"[[a b]; [one 2] [one 4] [two 1]]
|
||||||
|
| dfr into-df
|
||||||
|
| dfr group-by a
|
||||||
|
| dfr agg (dfr col b | dfr median)"#,
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new(
|
||||||
|
"a".to_string(),
|
||||||
|
vec![Value::test_string("one"), Value::test_string("two")],
|
||||||
|
),
|
||||||
|
Column::new(
|
||||||
|
"b".to_string(),
|
||||||
|
vec![Value::test_float(3.0), Value::test_float(1.0)],
|
||||||
|
),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},],
|
||||||
|
median,
|
||||||
|
test_median
|
||||||
|
);
|
||||||
|
|
||||||
|
// ExprStd command
|
||||||
|
// Expands to a command definition for std aggregation
|
||||||
|
lazy_expr_command!(
|
||||||
|
ExprStd,
|
||||||
|
"dfr std",
|
||||||
|
"Creates a std expression for an aggregation of std value from columns in a dataframe",
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
description: "Std value from columns in a dataframe",
|
||||||
|
example: "[[a b]; [6 2] [4 2] [2 2]] | dfr into-df | dfr std",
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new("a".to_string(), vec![Value::test_float(2.0)],),
|
||||||
|
Column::new("b".to_string(), vec![Value::test_float(0.0)],),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Std aggregation for a group-by",
|
||||||
|
example: r#"[[a b]; [one 2] [one 2] [two 1] [two 1]]
|
||||||
|
| dfr into-df
|
||||||
|
| dfr group-by a
|
||||||
|
| dfr agg (dfr col b | dfr std)"#,
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new(
|
||||||
|
"a".to_string(),
|
||||||
|
vec![Value::test_string("one"), Value::test_string("two")],
|
||||||
|
),
|
||||||
|
Column::new(
|
||||||
|
"b".to_string(),
|
||||||
|
vec![Value::test_float(0.0), Value::test_float(0.0)],
|
||||||
|
),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
std,
|
||||||
|
test_std,
|
||||||
|
1
|
||||||
|
);
|
||||||
|
|
||||||
|
// ExprVar command
|
||||||
|
// Expands to a command definition for var aggregation
|
||||||
|
lazy_expr_command!(
|
||||||
|
ExprVar,
|
||||||
|
"dfr var",
|
||||||
|
"Create a var expression for an aggregation",
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
description:
|
||||||
|
"Var value from columns in a dataframe or aggregates columns to their var value",
|
||||||
|
example: "[[a b]; [6 2] [4 2] [2 2]] | dfr into-df | dfr var",
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new("a".to_string(), vec![Value::test_float(4.0)],),
|
||||||
|
Column::new("b".to_string(), vec![Value::test_float(0.0)],),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Var aggregation for a group-by",
|
||||||
|
example: r#"[[a b]; [one 2] [one 2] [two 1] [two 1]]
|
||||||
|
| dfr into-df
|
||||||
|
| dfr group-by a
|
||||||
|
| dfr agg (dfr col b | dfr var)"#,
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new(
|
||||||
|
"a".to_string(),
|
||||||
|
vec![Value::test_string("one"), Value::test_string("two")],
|
||||||
|
),
|
||||||
|
Column::new(
|
||||||
|
"b".to_string(),
|
||||||
|
vec![Value::test_float(0.0), Value::test_float(0.0)],
|
||||||
|
),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
],
|
||||||
|
var,
|
||||||
|
test_var,
|
||||||
|
1
|
||||||
|
);
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user