mirror of
https://github.com/nushell/nushell.git
synced 2025-08-15 23:28:19 +02:00
Compare commits
627 Commits
Author | SHA1 | Date | |
---|---|---|---|
36427a7434 | |||
daf52ba5c8 | |||
2f7f00001d | |||
ee7334a772 | |||
3fe9c7c00c | |||
43992f5b6f | |||
91e72ae8b4 | |||
da54ed8ea1 | |||
3eabc83c61 | |||
cc4a4a11f0 | |||
d53e16748d | |||
4ead4ce4d6 | |||
31606a8fe1 | |||
7133a04e2f | |||
79a6c78032 | |||
5478bdff0e | |||
a4711af952 | |||
751ef6e8da | |||
038f8f85ed | |||
4245c67ce3 | |||
e56879e588 | |||
c75e7bfbd3 | |||
e8579a9268 | |||
3dead9a001 | |||
0b106789a7 | |||
bf83756562 | |||
06fa1784c1 | |||
e6d673c39e | |||
c4fcd54573 | |||
4e56cd5fc4 | |||
0e3ca7b355 | |||
2b69bd9b6d | |||
3a82c6c88d | |||
61a89c1834 | |||
fcdc7f3d83 | |||
2b70d27cdf | |||
8c2af9941c | |||
f015409253 | |||
f33d952adf | |||
9f4c3a1d10 | |||
4f9c0775d9 | |||
d528bb713b | |||
7cc1a86459 | |||
dfbd98013d | |||
2c9f6acc03 | |||
007d15ed9f | |||
3e37922537 | |||
1274d1f7e3 | |||
da9615f971 | |||
eb8d2d3206 | |||
ee5b5bd39e | |||
d565c9ed01 | |||
18d5d8aae1 | |||
89c0e325fa | |||
7f2beb49db | |||
7203138880 | |||
459f3c0c28 | |||
2e4900f085 | |||
c921eadc6a | |||
00ac34d716 | |||
28a796d5cb | |||
f8698a6c24 | |||
48bca0a058 | |||
f3d92e3fa1 | |||
57dce8a386 | |||
aeb517867e | |||
71baeff287 | |||
1b01625e1e | |||
51265b262d | |||
889fe7f97b | |||
93b407fa88 | |||
b0687606f7 | |||
324aaef0af | |||
30c38b8c49 | |||
16167a25ec | |||
38009c714c | |||
e263991448 | |||
0b7c246bf4 | |||
7ce66a9b77 | |||
c2622589ef | |||
1ba4fe0aac | |||
5aa1ccea10 | |||
00e9e0e6a9 | |||
db1ffe57d3 | |||
2df00ff498 | |||
c4e8e040ce | |||
59ad605e22 | |||
5569f5beff | |||
4ed522db93 | |||
beb3ec6a49 | |||
a506d3f9b5 | |||
316d2d6af2 | |||
202d3b2d11 | |||
288f419f7b | |||
c272fa2b0a | |||
bfe9d8699d | |||
60ca889443 | |||
f48656e18b | |||
172a0c44bd | |||
8979e3f5bf | |||
1fcce4cffc | |||
8cf9efafbb | |||
8943fcf78d | |||
33a2b98f66 | |||
05e570aa71 | |||
d8255040f1 | |||
4da755895d | |||
a674ce2dbc | |||
71d78b41c4 | |||
647a740c11 | |||
a317284db6 | |||
a4bd51a11d | |||
f6d807bf36 | |||
020d1b17c5 | |||
118857aedc | |||
a340e965e8 | |||
25a5e8d8e8 | |||
2fe84bd197 | |||
c95c1e845c | |||
105ec0c89f | |||
2c1b787db5 | |||
fdb677e932 | |||
a18ff1d3a2 | |||
a86a0dd16e | |||
f4136aa3f4 | |||
082e8d0de8 | |||
9da0f41ebb | |||
372d576846 | |||
c795f16143 | |||
a4a3c514ba | |||
5478ec44bb | |||
6902bbe547 | |||
4e5da8cd91 | |||
d248451428 | |||
3e758e899f | |||
f69a812055 | |||
6fba4b409e | |||
cb7ac9199d | |||
a6b8e2f95c | |||
0b202d55f0 | |||
e88a6bff60 | |||
a234e6ff51 | |||
ae0cf8780d | |||
680a2fa2aa | |||
70277cc2ba | |||
574106bc03 | |||
2a8364d259 | |||
760c9ef2e9 | |||
c3079a14d9 | |||
4f7e9aac62 | |||
7ee8aa78cc | |||
d9d022733f | |||
1d032ce80c | |||
975a89269e | |||
db5b6c790f | |||
2bed202b82 | |||
8a0f2ca9f9 | |||
24ab294cda | |||
bfa95bbd24 | |||
3f700f03ad | |||
f0e90a3733 | |||
cde8a629c5 | |||
70aa7ad993 | |||
29b3512494 | |||
d961ea19cc | |||
3db9c81958 | |||
55240d98a5 | |||
fda181d566 | |||
2e484156e0 | |||
52604f8b00 | |||
2fed1f5967 | |||
5be8717fe8 | |||
091d14f085 | |||
4c19242c0d | |||
3df0177ba5 | |||
f7888fce83 | |||
cf1a53143c | |||
28a94048c5 | |||
fb691c0da5 | |||
7972aea530 | |||
aa710eeb9a | |||
91e843a6d4 | |||
ebcb26f9d5 | |||
f8b0af70ff | |||
12465193a4 | |||
bd3930d00d | |||
81e86c40e1 | |||
2fe25d6299 | |||
4aeede2dd5 | |||
0e46ef9769 | |||
962467fdfd | |||
d27232df6e | |||
d7cec2088a | |||
22d1fdcdf6 | |||
ba59f71f20 | |||
2352548467 | |||
3efbda63b8 | |||
1fe62ee613 | |||
126d11fcb7 | |||
ea4f8ff400 | |||
ebcdf5a8b1 | |||
440b9c8e1f | |||
96a886eb84 | |||
61d59f13fa | |||
5da7dcdbdb | |||
f92f11c0cf | |||
3bf96523a4 | |||
8d46398e13 | |||
461d558983 | |||
65c9160170 | |||
e3124d3561 | |||
b886fd364c | |||
21d949207f | |||
4a9e2ac37b | |||
9cc74e7a9f | |||
4adcf079e2 | |||
81cec2e50f | |||
ed7b2615c1 | |||
74e0e4f092 | |||
42fc9f52a1 | |||
c563e0cfb0 | |||
8671a3dbbd | |||
fc813af4c8 | |||
b83aa17c96 | |||
c7e10c3c57 | |||
e7d2717424 | |||
222c307648 | |||
eb9eb09ac5 | |||
6eacbabe17 | |||
33303f083c | |||
483974311d | |||
179ea5ae87 | |||
bdc7cdbcc4 | |||
2b524cd861 | |||
ad9f051d61 | |||
cfbe835910 | |||
8896ba80a4 | |||
803c24f9ce | |||
2f74574e35 | |||
8b9f02246f | |||
d9ecb7da93 | |||
18ce5de500 | |||
fbde02370a | |||
13452a7aa2 | |||
a8c49857d9 | |||
90afb65329 | |||
ff4907ed3b | |||
cbd7608898 | |||
adc9bbdc18 | |||
37bc922a67 | |||
ae51f6d722 | |||
1b2079ffdb | |||
9a968c4bdd | |||
89df01f829 | |||
dbb30cc9e0 | |||
02d63705cc | |||
ea97229688 | |||
6bf955a5a5 | |||
f90035e084 | |||
cc8b623ff8 | |||
60cb13c493 | |||
c10e483683 | |||
2d0c7b2214 | |||
88d421dcb6 | |||
7c50f7c714 | |||
bc043dcaeb | |||
10be753ab7 | |||
6906a0ca50 | |||
833471241a | |||
c4dcfdb77b | |||
1e8876b076 | |||
5483519c7d | |||
457f162fd9 | |||
58a8f30a25 | |||
70ba5d9d68 | |||
7b88bda9a1 | |||
bb37306d07 | |||
505cc014ac | |||
ff79959fdf | |||
8c2b1a22d4 | |||
3d62753e80 | |||
36c30ade3a | |||
e0eb29f161 | |||
c2ac8f730e | |||
1a0986903f | |||
0f25641722 | |||
7d6d48f3f7 | |||
6a8c183c1a | |||
0beb28e827 | |||
8352a09117 | |||
a9252c5075 | |||
73fbe26ef9 | |||
52fa9a978b | |||
d4357ad981 | |||
a0d7c1a4fd | |||
a340511e95 | |||
426e64501d | |||
b0d68c31e8 | |||
583cb96cff | |||
ff8831318d | |||
ce308ee461 | |||
21388175b8 | |||
520f11fb8f | |||
39b95fc59e | |||
63e68934f6 | |||
acc152564c | |||
8f63db4c95 | |||
cb133ed387 | |||
a7547a54bc | |||
d1969a3c9a | |||
ce582cdafb | |||
55de232a1c | |||
deca337a56 | |||
60e9f469af | |||
b500ac57c2 | |||
eadb8da9f7 | |||
cda15d91dd | |||
651a8716fb | |||
a1b7574306 | |||
09f12b9c4a | |||
9ae74e3941 | |||
d8bec8668f | |||
12ccaf5e33 | |||
5fecf59f54 | |||
a3aae2d26c | |||
d1d6518ece | |||
2d868323b6 | |||
0389815137 | |||
11cdb94699 | |||
0ca5c2f135 | |||
715b0d90a9 | |||
05c36d1bc7 | |||
208ebeefab | |||
b33f4b7f55 | |||
f41b1460aa | |||
220858d641 | |||
db261e3ed9 | |||
82eb1c5584 | |||
6be291b00a | |||
7add38fe32 | |||
78903724f5 | |||
cb57f0a539 | |||
717081bd2f | |||
e1ffaf2548 | |||
1db4be12d1 | |||
6193679dfc | |||
a9657e17ad | |||
03d455a688 | |||
bae04352ca | |||
a1497716f1 | |||
b5b63d2bf9 | |||
5c59611083 | |||
1503ee09ba | |||
24dba9dc53 | |||
a2dc3e3b33 | |||
95998bdd53 | |||
bd5de023a1 | |||
38e761493d | |||
7fcebf37ec | |||
0e9927ea4d | |||
d273ce89df | |||
2dc5c19b71 | |||
669b44ad7d | |||
eff063822a | |||
9a5c4d36be | |||
cd4560e97a | |||
24cc2f9d87 | |||
8f81812ef9 | |||
2229370b13 | |||
a33650a69e | |||
56d7e4bb89 | |||
e5f589ccdd | |||
8c4d3eaa7e | |||
89322f59f2 | |||
4e307480e4 | |||
d601abaee0 | |||
ceaa0f9375 | |||
d31b7024d8 | |||
9dd30d7756 | |||
eff9305eb3 | |||
885b87a842 | |||
017daeed18 | |||
c8c018452f | |||
1a0778d77e | |||
d75aa7ed1b | |||
39edd7e080 | |||
61dbcf3de6 | |||
f8ed4b45fd | |||
7b57f132bb | |||
dfca117551 | |||
29eb109b1e | |||
70d8163181 | |||
e4cef8a154 | |||
15146e68ad | |||
b0f9cda9b5 | |||
173162df2e | |||
c0b944edb6 | |||
26699d96eb | |||
08940ba4f8 | |||
ecb9799b6a | |||
a886e30e04 | |||
147009a161 | |||
12a1eefe73 | |||
0f8f3bcf9a | |||
639f4bd499 | |||
e82df7c1c9 | |||
41f4d0dcbc | |||
eb2a91ea7c | |||
b81d46574c | |||
1c6c85d35d | |||
67ea25afca | |||
f25525be6c | |||
a72f94f452 | |||
210c6f1c43 | |||
0cd90e2388 | |||
7ca2a6f8ac | |||
237a685605 | |||
2bf0397d80 | |||
5ec823996a | |||
67b6188b19 | |||
df74a0c961 | |||
af6c4bdc9c | |||
d7f26b177a | |||
470d130289 | |||
a23e96c945 | |||
9ba16dbdaf | |||
43f9ec295f | |||
f39e5b3f37 | |||
6c0b65b570 | |||
1dcaffb792 | |||
ca4222277e | |||
5c2bcd068b | |||
9aba96604b | |||
7be90c2644 | |||
7e9e93cf82 | |||
6d1f7cb3e3 | |||
334cf1862a | |||
49d86855ce | |||
5fe97b8d59 | |||
2bad1371f0 | |||
3030608de0 | |||
5d32cd2c40 | |||
07be33c119 | |||
eaf522b41f | |||
e76586ede4 | |||
1979b61a92 | |||
02fcc485fb | |||
55e05be0d8 | |||
e10ac2ede6 | |||
bf1f2d5ebd | |||
6aed1b42ae | |||
f33a26123c | |||
7c160725ed | |||
5832823dff | |||
3fe355c4a6 | |||
dd56c813f9 | |||
7a6cfa24fc | |||
2ea2a904e8 | |||
dfba62da00 | |||
b241e9edd5 | |||
946cef77f1 | |||
c99c8119fe | |||
2b4914608e | |||
8b80ceac32 | |||
e89bb2ee96 | |||
862d53bb6e | |||
820d0c0959 | |||
968eb45fb2 | |||
2c1d261cca | |||
69d1c8e948 | |||
2c7ab6e898 | |||
c986426478 | |||
09674a0026 | |||
9cca4ec18b | |||
90c86e6cbf | |||
4cb195a998 | |||
f7f09292d6 | |||
2c35e07c2d | |||
c949d2e893 | |||
83de8560ee | |||
00e5e6d719 | |||
1dd861b10f | |||
42aa2ff5ba | |||
74f62305b2 | |||
8f634f4140 | |||
33001d1992 | |||
f4b7333dc8 | |||
3dde851381 | |||
029f3843d3 | |||
0f6996b70d | |||
9160f36ea5 | |||
7f346dbf4c | |||
03888b9d81 | |||
966cebec34 | |||
44b7cfd696 | |||
a17ffdfe56 | |||
430b2746b8 | |||
1e566adcfc | |||
789781665d | |||
e926919582 | |||
8d5d01bbc9 | |||
58f7cfd099 | |||
b432866dc9 | |||
81e496673e | |||
2dab65f852 | |||
95dcb2fd6c | |||
d97b2e3c60 | |||
4fe7865ad0 | |||
d122bc3d89 | |||
7d17c2eb5e | |||
0e6e9abc12 | |||
f3982278e8 | |||
b1e591f84c | |||
122bcff356 | |||
087fe484f6 | |||
551fecd10d | |||
88bbe4abaa | |||
49f92e9090 | |||
4779d69de6 | |||
de7b000505 | |||
9eaa8908d2 | |||
fc72aa6abe | |||
8e1385417e | |||
95f89a093a | |||
e9b677a9e9 | |||
7555743ccc | |||
93612974e0 | |||
52a35827c7 | |||
c5a14bb8ff | |||
48bdcc71f4 | |||
78c93e5ae0 | |||
96af27fb4c | |||
12b8b4580c | |||
1616acd124 | |||
0cb4281fdb | |||
6f6ad23072 | |||
1ab09256d7 | |||
ee14811912 | |||
7939fb05ea | |||
53d30ee7ea | |||
058ce0ed2d | |||
9bb7f0c7dc | |||
9521b209d1 | |||
f51a79181a | |||
938fa6ee55 | |||
1d0d91d5e5 | |||
252155bdb9 | |||
be508cbd7f | |||
fcd1d59abd | |||
083c534948 | |||
bda3245725 | |||
1d44843970 | |||
d16946c6e8 | |||
2f6b4c5e9b | |||
4a967d19a9 | |||
3d58c3f70e | |||
c504c93a1d | |||
8b46ba8b6b | |||
f8ac9db15b | |||
7636963732 | |||
5d1e2b1df1 | |||
273226d666 | |||
2b8fb4fe00 | |||
2cb059146b | |||
fb7b0a8c11 | |||
d4aeadbb44 | |||
2a8f92b709 | |||
453e294883 | |||
e1c5ae3cd5 | |||
a8a0c78a32 | |||
879258039c | |||
4ac4f71a37 | |||
62e56d3581 | |||
2e1b6acc0e | |||
3eae657121 | |||
e74ce72f09 | |||
d577074da9 | |||
f7d5162582 | |||
0430167f1c | |||
1128fa137f | |||
81243c48f0 | |||
442df9e39c | |||
a58d9b0b3a | |||
2a3d5a9d42 | |||
a5d7d6dd46 | |||
18e3a5d40b | |||
553c951a60 | |||
781c4bd1d7 | |||
a2e335dcd7 | |||
c6fc6bd5a7 | |||
a7830ac1fd | |||
00713c9339 | |||
7d7dbd8b2c | |||
d4675d9138 | |||
6e88b3f8d6 | |||
720813339f | |||
5b4dd775d4 | |||
bfe398ca36 | |||
31e1f49cb6 | |||
26897b287c | |||
5a7707cb52 | |||
4b0b4ddce1 | |||
9fa2f43d06 | |||
2891867de9 | |||
55c7246830 | |||
17246db38b | |||
e60dac8957 | |||
d007b10fbf | |||
942030199d | |||
fb8ac4198b | |||
2ce5de58e6 | |||
2f18b9c856 | |||
bdc767bf23 | |||
3770a5eed1 | |||
0705fb9cd1 | |||
1a1a960836 | |||
5be818b5ee | |||
c7d3014849 | |||
164a089656 | |||
0b2d1327d2 | |||
5f6f18076c | |||
81de8ecd70 | |||
30ed63667b | |||
a56906ca6d | |||
0f0e1e2068 | |||
192ee59c75 | |||
803a348f41 |
40
.github/labeler.yml
vendored
Normal file
40
.github/labeler.yml
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
# A bot for automatically labelling pull requests
|
||||
# See https://github.com/actions/labeler
|
||||
|
||||
dataframe:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- crates/nu_plugin_polars/**
|
||||
|
||||
std-library:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- crates/nu-std/**
|
||||
|
||||
ci:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- .github/workflows/**
|
||||
|
||||
|
||||
LSP:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- crates/nu-lsp/**
|
||||
|
||||
parser:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- crates/nu-parser/**
|
||||
|
||||
pr:plugins:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
# plugins API
|
||||
- crates/nu-plugin/**
|
||||
- crates/nu-plugin-core/**
|
||||
- crates/nu-plugin-engine/**
|
||||
- crates/nu-plugin-protocol/**
|
||||
- crates/nu-plugin-test-support/**
|
||||
# specific plugins (like polars)
|
||||
- crates/nu_plugin_*/**
|
42
.github/pull_request_template.md
vendored
42
.github/pull_request_template.md
vendored
@ -1,40 +1,16 @@
|
||||
<!--
|
||||
if this PR closes one or more issues, you can automatically link the PR with
|
||||
them by using one of the [*linking keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword), e.g.
|
||||
- this PR should close #xxxx
|
||||
- fixes #xxxx
|
||||
Thank you for improving Nushell!
|
||||
Please, read our contributing guide: https://github.com/nushell/nushell/blob/main/CONTRIBUTING.md
|
||||
|
||||
you can also mention related issues, PRs or discussions!
|
||||
Use the following space to include the motivation and any technical details behind this PR.
|
||||
-->
|
||||
|
||||
# Description
|
||||
## Release notes summary - What our users need to know
|
||||
<!--
|
||||
Thank you for improving Nushell. Please, check our [contributing guide](../CONTRIBUTING.md) and talk to the core team before making major changes.
|
||||
|
||||
Description of your pull request goes here. **Provide examples and/or screenshots** if your changes affect the user experience.
|
||||
This section will be included as part of our release notes. See the contributing guide for more details.
|
||||
If you're not confident about this, a core team member would be glad to help!
|
||||
-->
|
||||
|
||||
# User-Facing Changes
|
||||
<!-- List of all changes that impact the user experience here. This helps us keep track of breaking changes. -->
|
||||
|
||||
# Tests + Formatting
|
||||
<!--
|
||||
Don't forget to add tests that cover your changes.
|
||||
|
||||
Make sure you've run and fixed any issues with these commands:
|
||||
|
||||
- `cargo fmt --all -- --check` to check standard code formatting (`cargo fmt --all` applies these changes)
|
||||
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to check that you're using the standard code style
|
||||
- `cargo test --workspace` to check that all tests pass (on Windows make sure to [enable developer mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
|
||||
- `cargo run -- -c "use toolkit.nu; toolkit test stdlib"` to run the tests for the standard library
|
||||
|
||||
> **Note**
|
||||
> from `nushell` you can also use the `toolkit` as follows
|
||||
> ```bash
|
||||
> use toolkit.nu # or use an `env_change` hook to activate it automatically
|
||||
> toolkit check pr
|
||||
> ```
|
||||
-->
|
||||
|
||||
# After Submitting
|
||||
<!-- If your PR had any user-facing changes, update [the documentation](https://github.com/nushell/nushell.github.io) after the PR is merged, if necessary. This will help us keep the docs up to date. -->
|
||||
## Tasks after submitting
|
||||
<!-- Remove any tasks which aren't relevant for your PR, or add your own -->
|
||||
- [ ] Update the [documentation](https://github.com/nushell/nushell.github.io)
|
||||
|
52
.github/workflows/beta-test.yml
vendored
Normal file
52
.github/workflows/beta-test.yml
vendored
Normal file
@ -0,0 +1,52 @@
|
||||
name: Test on Beta Toolchain
|
||||
# This workflow is made to run our tests on the beta toolchain to validate that
|
||||
# the beta toolchain works.
|
||||
# We do not intend to test here that we are working correctly but rather that
|
||||
# the beta toolchain works correctly.
|
||||
# The ci.yml handles our actual testing with our guarantees.
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# If this workflow fails, GitHub notifications will go to the last person
|
||||
# who edited this line.
|
||||
# See: https://docs.github.com/en/actions/monitoring-and-troubleshooting-workflows/monitoring-workflows/notifications-for-workflow-runs
|
||||
- cron: '0 0 * * *' # Runs daily at midnight UTC
|
||||
|
||||
env:
|
||||
NUSHELL_CARGO_PROFILE: ci
|
||||
NU_LOG_LEVEL: DEBUG
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref && github.ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build-and-test:
|
||||
# this job is more for testing the beta toolchain and not our tests, so if
|
||||
# this fails but the tests of the regular ci pass, then this is fine
|
||||
continue-on-error: true
|
||||
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
platform: [windows-latest, macos-latest, ubuntu-22.04]
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- run: rustup update beta
|
||||
|
||||
- name: Tests
|
||||
run: cargo +beta test --workspace --profile ci --exclude nu_plugin_*
|
||||
- name: Check for clean repo
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo "there are changes";
|
||||
git status --porcelain
|
||||
exit 1
|
||||
else
|
||||
echo "no changes in working directory";
|
||||
fi
|
25
.github/workflows/ci.yml
vendored
25
.github/workflows/ci.yml
vendored
@ -3,6 +3,7 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- 'patch-release-*'
|
||||
|
||||
name: continuous-integration
|
||||
|
||||
@ -21,14 +22,14 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
# Pinning to Ubuntu 20.04 because building on newer Ubuntu versions causes linux-gnu
|
||||
# Pinning to Ubuntu 22.04 because building on newer Ubuntu versions causes linux-gnu
|
||||
# builds to link against a too-new-for-many-Linux-installs glibc version. Consider
|
||||
# revisiting this when 20.04 is closer to EOL (April 2025)
|
||||
# revisiting this when 22.04 is closer to EOL (June 2027)
|
||||
#
|
||||
# Using macOS 13 runner because 14 is based on the M1 and has half as much RAM (7 GB,
|
||||
# instead of 14 GB) which is too little for us right now. Revisit when `dfr` commands are
|
||||
# removed and we're only building the `polars` plugin instead
|
||||
platform: [windows-latest, macos-13, ubuntu-20.04]
|
||||
platform: [windows-latest, macos-13, ubuntu-22.04]
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
@ -36,7 +37,7 @@ jobs:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.12.0
|
||||
|
||||
- name: cargo fmt
|
||||
run: cargo fmt --all -- --check
|
||||
@ -56,7 +57,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
platform: [windows-latest, macos-latest, ubuntu-20.04]
|
||||
platform: [windows-latest, macos-latest, ubuntu-22.04]
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
@ -64,7 +65,7 @@ jobs:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.12.0
|
||||
|
||||
- name: Tests
|
||||
run: cargo test --workspace --profile ci --exclude nu_plugin_*
|
||||
@ -83,7 +84,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
platform: [ubuntu-20.04, macos-latest, windows-latest]
|
||||
platform: [ubuntu-22.04, macos-latest, windows-latest]
|
||||
py:
|
||||
- py
|
||||
|
||||
@ -93,10 +94,10 @@ jobs:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.12.0
|
||||
|
||||
- name: Install Nushell
|
||||
run: cargo install --path . --locked --no-default-features --force
|
||||
run: cargo install --path . --locked --force
|
||||
|
||||
- name: Standard library tests
|
||||
run: nu -c 'use crates/nu-std/testing.nu; testing run-tests --path crates/nu-std'
|
||||
@ -136,7 +137,7 @@ jobs:
|
||||
# instead of 14 GB) which is too little for us right now.
|
||||
#
|
||||
# Failure occurring with clippy for rust 1.77.2
|
||||
platform: [windows-latest, macos-13, ubuntu-20.04]
|
||||
platform: [windows-latest, macos-13, ubuntu-22.04]
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
@ -144,7 +145,7 @@ jobs:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.12.0
|
||||
|
||||
- name: Clippy
|
||||
run: cargo clippy --package nu_plugin_* -- $CLIPPY_OPTIONS
|
||||
@ -185,7 +186,7 @@ jobs:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.12.0
|
||||
|
||||
- name: Add wasm32-unknown-unknown target
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
|
25
.github/workflows/friendly-config-reminder.yml
vendored
Normal file
25
.github/workflows/friendly-config-reminder.yml
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
name: Comment on changes to the config
|
||||
on:
|
||||
pull_request_target:
|
||||
paths:
|
||||
- 'crates/nu-protocol/src/config/**'
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check if there is already a bot comment
|
||||
uses: peter-evans/find-comment@v3
|
||||
id: fc
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
comment-author: 'github-actions[bot]'
|
||||
body-includes: Hey, just a bot checking in!
|
||||
- name: Create comment if there is not
|
||||
if: steps.fc.outputs.comment-id == ''
|
||||
uses: peter-evans/create-or-update-comment@v4
|
||||
with:
|
||||
issue-number: ${{ github.event.pull_request.number }}
|
||||
body: |
|
||||
Hey, just a bot checking in! You edited files related to the configuration.
|
||||
If you changed any of the default values or added a new config option, don't forget to update the [`doc_config.nu`](https://github.com/nushell/nushell/blob/main/crates/nu-utils/src/default_files/doc_config.nu) which documents the options for our users including the defaults provided by the Rust implementation.
|
||||
If you didn't make a change here, you can just ignore me.
|
19
.github/workflows/labels.yml
vendored
Normal file
19
.github/workflows/labels.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
# Automatically labels PRs based on the configuration file
|
||||
# you are probably looking for 👉 `.github/labeler.yml`
|
||||
name: Label PRs
|
||||
|
||||
on:
|
||||
- pull_request_target
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'nushell'
|
||||
steps:
|
||||
- uses: actions/labeler@v5
|
||||
with:
|
||||
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
sync-labels: true
|
140
.github/workflows/nightly-build.yml
vendored
140
.github/workflows/nightly-build.yml
vendored
@ -4,6 +4,7 @@
|
||||
# 2. https://github.com/JasonEtco/create-an-issue
|
||||
# 3. https://docs.github.com/en/actions/learn-github-actions/variables
|
||||
# 4. https://github.com/actions/github-script
|
||||
# 5. https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#jobsjob_idneeds
|
||||
#
|
||||
name: Nightly Build
|
||||
|
||||
@ -14,6 +15,7 @@ on:
|
||||
# This schedule will run only from the default branch
|
||||
schedule:
|
||||
- cron: '15 0 * * *' # run at 00:15 AM UTC
|
||||
workflow_dispatch:
|
||||
|
||||
defaults:
|
||||
run:
|
||||
@ -25,6 +27,11 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
# This job is required by the release job, so we should make it run both from Nushell repo and nightly repo
|
||||
# if: github.repository == 'nushell/nightly'
|
||||
# Map a step output to a job output
|
||||
outputs:
|
||||
skip: ${{ steps.vars.outputs.skip }}
|
||||
build_date: ${{ steps.vars.outputs.build_date }}
|
||||
nightly_tag: ${{ steps.vars.outputs.nightly_tag }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
@ -39,7 +46,7 @@ jobs:
|
||||
uses: hustcer/setup-nu@v3
|
||||
if: github.repository == 'nushell/nightly'
|
||||
with:
|
||||
version: 0.101.0
|
||||
version: 0.105.1
|
||||
|
||||
# Synchronize the main branch of nightly repo with the main branch of Nushell official repo
|
||||
- name: Prepare for Nightly Release
|
||||
@ -57,16 +64,53 @@ jobs:
|
||||
# All the changes will be overwritten by the upstream main branch
|
||||
git reset --hard src/main
|
||||
git push origin main -f
|
||||
let sha_short = (git rev-parse --short origin/main | str trim | str substring 0..7)
|
||||
let tag_name = $'nightly-($sha_short)'
|
||||
if (git ls-remote --tags origin $tag_name | is-empty) {
|
||||
git tag -a $tag_name -m $'Nightly build from ($sha_short)'
|
||||
|
||||
- name: Create Tag and Output Tag Name
|
||||
if: github.repository == 'nushell/nightly'
|
||||
id: vars
|
||||
shell: nu {0}
|
||||
run: |
|
||||
let date = date now | format date %m%d
|
||||
let version = open Cargo.toml | get package.version
|
||||
let sha_short = (git rev-parse --short origin/main | str trim | str substring 0..6)
|
||||
let latest_meta = http get https://api.github.com/repos/nushell/nightly/releases
|
||||
| sort-by -r created_at
|
||||
| where tag_name =~ nightly
|
||||
| get tag_name?.0? | default ''
|
||||
| parse '{version}-nightly.{build}+{hash}'
|
||||
if ($latest_meta.0?.hash? | default '') == $sha_short {
|
||||
print $'(ansi g)Latest nightly build is up-to-date, skip rebuilding.(ansi reset)'
|
||||
$'skip=true(char nl)' o>> $env.GITHUB_OUTPUT
|
||||
exit 0
|
||||
}
|
||||
let prev_ver = $latest_meta.0?.version? | default '0.0.0'
|
||||
let build = if ($latest_meta | is-empty) or ($version != $prev_ver) { 1 } else {
|
||||
($latest_meta | get build?.0? | default 0 | into int) + 1
|
||||
}
|
||||
let nightly_tag = $'($version)-nightly.($build)+($sha_short)'
|
||||
$'build_date=($date)(char nl)' o>> $env.GITHUB_OUTPUT
|
||||
$'nightly_tag=($nightly_tag)(char nl)' o>> $env.GITHUB_OUTPUT
|
||||
if (git ls-remote --tags origin $nightly_tag | is-empty) {
|
||||
ls **/Cargo.toml | each {|file|
|
||||
open --raw $file.name
|
||||
| str replace --all $'version = "($version)"' $'version = "($version)-nightly.($build)"'
|
||||
| save --force $file.name
|
||||
}
|
||||
# Disable the following two workflows for the automatic committed changes
|
||||
rm .github/workflows/ci.yml
|
||||
rm .github/workflows/audit.yml
|
||||
|
||||
git add .
|
||||
git commit -m $'Update version to ($version)-nightly.($build)'
|
||||
git tag -a $nightly_tag -m $'Nightly build from ($sha_short)'
|
||||
git push origin --tags
|
||||
git push origin main -f
|
||||
}
|
||||
|
||||
standard:
|
||||
release:
|
||||
name: Nu
|
||||
needs: prepare
|
||||
if: needs.prepare.outputs.skip != 'true'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@ -83,24 +127,16 @@ jobs:
|
||||
- armv7-unknown-linux-musleabihf
|
||||
- riscv64gc-unknown-linux-gnu
|
||||
- loongarch64-unknown-linux-gnu
|
||||
extra: ['bin']
|
||||
- loongarch64-unknown-linux-musl
|
||||
include:
|
||||
- target: aarch64-apple-darwin
|
||||
os: macos-latest
|
||||
- target: x86_64-apple-darwin
|
||||
os: macos-latest
|
||||
- target: x86_64-pc-windows-msvc
|
||||
extra: 'bin'
|
||||
os: windows-latest
|
||||
- target: x86_64-pc-windows-msvc
|
||||
extra: msi
|
||||
os: windows-latest
|
||||
- target: aarch64-pc-windows-msvc
|
||||
extra: 'bin'
|
||||
os: windows-latest
|
||||
- target: aarch64-pc-windows-msvc
|
||||
extra: msi
|
||||
os: windows-latest
|
||||
os: windows-11-arm
|
||||
- target: x86_64-unknown-linux-gnu
|
||||
os: ubuntu-22.04
|
||||
- target: x86_64-unknown-linux-musl
|
||||
@ -117,29 +153,42 @@ jobs:
|
||||
os: ubuntu-22.04
|
||||
- target: loongarch64-unknown-linux-gnu
|
||||
os: ubuntu-22.04
|
||||
- target: loongarch64-unknown-linux-musl
|
||||
os: ubuntu-22.04
|
||||
|
||||
runs-on: ${{matrix.os}}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install Wix Toolset 6 for Windows
|
||||
shell: pwsh
|
||||
if: ${{ startsWith(matrix.os, 'windows') }}
|
||||
run: |
|
||||
dotnet tool install --global wix --version 6.0.0
|
||||
dotnet workload install wix
|
||||
$wixPath = "$env:USERPROFILE\.dotnet\tools"
|
||||
echo "$wixPath" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||
$env:PATH = "$wixPath;$env:PATH"
|
||||
wix --version
|
||||
|
||||
- name: Update Rust Toolchain Target
|
||||
run: |
|
||||
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
|
||||
with:
|
||||
cache: false
|
||||
rustflags: ''
|
||||
|
||||
- name: Setup Nushell
|
||||
uses: hustcer/setup-nu@v3
|
||||
with:
|
||||
version: 0.101.0
|
||||
version: 0.105.1
|
||||
|
||||
- name: Release Nu Binary
|
||||
id: nu
|
||||
@ -148,11 +197,10 @@ jobs:
|
||||
OS: ${{ matrix.os }}
|
||||
REF: ${{ github.ref }}
|
||||
TARGET: ${{ matrix.target }}
|
||||
_EXTRA_: ${{ matrix.extra }}
|
||||
|
||||
- name: Create an Issue for Release Failure
|
||||
if: ${{ failure() }}
|
||||
uses: JasonEtco/create-an-issue@v2.9.2
|
||||
uses: JasonEtco/create-an-issue@v2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
@ -160,13 +208,6 @@ jobs:
|
||||
search_existing: open
|
||||
filename: .github/AUTO_ISSUE_TEMPLATE/nightly-build-fail.md
|
||||
|
||||
- name: Set Outputs of Short SHA
|
||||
id: vars
|
||||
run: |
|
||||
echo "date=$(date -u +'%Y-%m-%d')" >> $GITHUB_OUTPUT
|
||||
sha_short=$(git rev-parse --short HEAD)
|
||||
echo "sha_short=${sha_short:0:7}" >> $GITHUB_OUTPUT
|
||||
|
||||
# REF: https://github.com/marketplace/actions/gh-release
|
||||
# Create a release only in nushell/nightly repo
|
||||
- name: Publish Archive
|
||||
@ -174,9 +215,37 @@ jobs:
|
||||
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
|
||||
with:
|
||||
prerelease: true
|
||||
files: ${{ steps.nu.outputs.archive }}
|
||||
tag_name: nightly-${{ steps.vars.outputs.sha_short }}
|
||||
name: Nu-nightly-${{ steps.vars.outputs.date }}-${{ steps.vars.outputs.sha_short }}
|
||||
files: |
|
||||
${{ steps.nu.outputs.msi }}
|
||||
${{ steps.nu.outputs.archive }}
|
||||
tag_name: ${{ needs.prepare.outputs.nightly_tag }}
|
||||
name: ${{ needs.prepare.outputs.build_date }}-${{ needs.prepare.outputs.nightly_tag }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
sha256sum:
|
||||
needs: [prepare, release]
|
||||
name: Create Sha256sum
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == 'nushell/nightly'
|
||||
steps:
|
||||
- name: Download Release Archives
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: >-
|
||||
gh release download ${{ needs.prepare.outputs.nightly_tag }}
|
||||
--repo ${{ github.repository }}
|
||||
--pattern '*'
|
||||
--dir release
|
||||
- name: Create Checksums
|
||||
run: cd release && shasum -a 256 * > ../SHA256SUMS
|
||||
- name: Publish Checksums
|
||||
uses: softprops/action-gh-release@v2.0.9
|
||||
with:
|
||||
draft: false
|
||||
prerelease: true
|
||||
files: SHA256SUMS
|
||||
tag_name: ${{ needs.prepare.outputs.nightly_tag }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
@ -184,12 +253,9 @@ jobs:
|
||||
name: Cleanup
|
||||
# Should only run in nushell/nightly repo
|
||||
if: github.repository == 'nushell/nightly'
|
||||
needs: [release, sha256sum]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Sleep for 30 minutes, waiting for the release to be published
|
||||
- name: Waiting for Release
|
||||
run: sleep 1800
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
@ -197,14 +263,14 @@ jobs:
|
||||
- name: Setup Nushell
|
||||
uses: hustcer/setup-nu@v3
|
||||
with:
|
||||
version: 0.101.0
|
||||
version: 0.105.1
|
||||
|
||||
# Keep the last a few releases
|
||||
- name: Delete Older Releases
|
||||
shell: nu {0}
|
||||
run: |
|
||||
let KEEP_COUNT = 10
|
||||
let deprecated = (http get https://api.github.com/repos/nushell/nightly/releases | sort-by -r created_at | select tag_name id | range $KEEP_COUNT..)
|
||||
let deprecated = (http get https://api.github.com/repos/nushell/nightly/releases | sort-by -r created_at | select tag_name id | slice $KEEP_COUNT..)
|
||||
for release in $deprecated {
|
||||
print $'Deleting tag ($release.tag_name)'
|
||||
git push origin --delete $release.tag_name
|
||||
|
44
.github/workflows/pre-release-checkup.yml
vendored
Normal file
44
.github/workflows/pre-release-checkup.yml
vendored
Normal file
@ -0,0 +1,44 @@
|
||||
name: Checks to perform pre-release (manual)
|
||||
on:
|
||||
- workflow_dispatch
|
||||
|
||||
|
||||
env:
|
||||
NUSHELL_CARGO_PROFILE: ci
|
||||
NU_LOG_LEVEL: DEBUG
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref && github.ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build-and-test:
|
||||
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
platform: [windows-latest, macos-latest, ubuntu-22.04]
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: taiki-e/install-action@cargo-hack
|
||||
|
||||
- name: Feature power set
|
||||
run: |
|
||||
cargo hack --all --feature-powerset --at-least-one-of rustls-tls,native-tls --mutually-exclusive-features rustls-tls,native-tls --mutually-exclusive-features rustls-tls,static-link-openssl --skip default-no-clipboard,stable,mimalloc check
|
||||
# Don't build fully for now as it will run out of disk space
|
||||
# - name: Build all crates
|
||||
# run: cargo hack --all build --clean-per-run
|
||||
|
||||
- name: Check for clean repo
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo "there are changes";
|
||||
git status --porcelain
|
||||
exit 1
|
||||
else
|
||||
echo "no changes in working directory";
|
||||
fi
|
62
.github/workflows/release-msi.nu
vendored
Executable file
62
.github/workflows/release-msi.nu
vendored
Executable file
@ -0,0 +1,62 @@
|
||||
#!/usr/bin/env nu
|
||||
|
||||
# Created: 2025/05/21 19:05:20
|
||||
# Description:
|
||||
# A script to build Windows MSI packages for NuShell. Need wix 6.0 to be installed.
|
||||
# The script will download the specified NuShell release, extract it, and create an MSI package.
|
||||
# Can be run locally or in GitHub Actions.
|
||||
# To run this script locally:
|
||||
# load-env { TARGET: 'x86_64-pc-windows-msvc' REF: '0.103.0' GITHUB_REPOSITORY: 'nushell/nushell' }
|
||||
# nu .github/workflows/release-msi.nu
|
||||
|
||||
def build-msi [] {
|
||||
let target = $env.TARGET
|
||||
# We should read the version from the environment variable first
|
||||
# As we may build the MSI package for a specific version not the latest one
|
||||
let version = $env.MSI_VERSION? | default (open Cargo.toml | get package.version)
|
||||
let arch = if $nu.os-info.arch =~ 'x86_64' { 'x64' } else { 'arm64' }
|
||||
|
||||
print $'Building msi package for (ansi g)($target)(ansi reset) with version (ansi g)($version)(ansi reset) from tag (ansi g)($env.REF)(ansi reset)...'
|
||||
fetch-nu-pkg
|
||||
# Create extra Windows msi release package if dotnet and wix are available
|
||||
let installed = [dotnet wix] | all { (which $in | length) > 0 }
|
||||
if $installed and (wix --version | split row . | first | into int) >= 6 {
|
||||
|
||||
print $'(char nl)Start creating Windows msi package with the following contents...'
|
||||
cd wix; hr-line
|
||||
cp nu/README.txt .
|
||||
ls -f nu/* | print
|
||||
./nu/nu.exe -c $'NU_RELEASE_VERSION=($version) dotnet build -c Release -p:Platform=($arch)'
|
||||
glob **/*.msi | print
|
||||
# Workaround for https://github.com/softprops/action-gh-release/issues/280
|
||||
let wixRelease = (glob **/*.msi | where $it =~ bin | get 0 | str replace --all '\' '/')
|
||||
let msi = $'($wixRelease | path dirname)/nu-($version)-($target).msi'
|
||||
mv $wixRelease $msi
|
||||
print $'MSI archive: ---> ($msi)';
|
||||
# Run only in GitHub Actions
|
||||
if ($env.GITHUB_ACTIONS? | default false | into bool) {
|
||||
echo $"msi=($msi)(char nl)" o>> $env.GITHUB_OUTPUT
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def fetch-nu-pkg [] {
|
||||
mkdir wix/nu
|
||||
# See: https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/store-information-in-variables#default-environment-variables
|
||||
gh release download $env.REF --repo $env.GITHUB_REPOSITORY --pattern $'*-($env.TARGET).zip' --dir wix/nu
|
||||
cd wix/nu
|
||||
let pkg = ls *.zip | get name.0
|
||||
unzip $pkg
|
||||
rm $pkg
|
||||
ls | print
|
||||
}
|
||||
|
||||
# Print a horizontal line marker
|
||||
def 'hr-line' [
|
||||
--blank-line(-b)
|
||||
] {
|
||||
print $'(ansi g)---------------------------------------------------------------------------->(ansi reset)'
|
||||
if $blank_line { char nl }
|
||||
}
|
||||
|
||||
alias main = build-msi
|
103
.github/workflows/release-msi.yml
vendored
Normal file
103
.github/workflows/release-msi.yml
vendored
Normal file
@ -0,0 +1,103 @@
|
||||
#
|
||||
# REF:
|
||||
# 1. https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstrategymatrixinclude
|
||||
#
|
||||
name: Build Windows MSI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
required: true
|
||||
description: 'Tag to Rebuild MSI'
|
||||
version:
|
||||
description: 'Version of Rebuild MSI'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
packages: write
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
release:
|
||||
name: Nu
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
target:
|
||||
- x86_64-pc-windows-msvc
|
||||
- aarch64-pc-windows-msvc
|
||||
extra: ['bin']
|
||||
|
||||
include:
|
||||
- target: x86_64-pc-windows-msvc
|
||||
os: windows-latest
|
||||
- target: aarch64-pc-windows-msvc
|
||||
os: windows-11-arm
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Wix Toolset 6 for Windows
|
||||
shell: pwsh
|
||||
if: ${{ startsWith(matrix.os, 'windows') }}
|
||||
run: |
|
||||
dotnet tool install --global wix --version 6.0.0
|
||||
dotnet workload install wix
|
||||
$wixPath = "$env:USERPROFILE\.dotnet\tools"
|
||||
echo "$wixPath" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||
$env:PATH = "$wixPath;$env:PATH"
|
||||
wix --version
|
||||
|
||||
- name: Setup Nushell
|
||||
uses: hustcer/setup-nu@v3
|
||||
with:
|
||||
version: 0.105.1
|
||||
|
||||
- name: Release MSI Packages
|
||||
id: nu
|
||||
run: nu .github/workflows/release-msi.nu
|
||||
env:
|
||||
OS: ${{ matrix.os }}
|
||||
REF: ${{ inputs.tag }}
|
||||
TARGET: ${{ matrix.target }}
|
||||
MSI_VERSION: ${{ inputs.version }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# REF: https://github.com/marketplace/actions/gh-release
|
||||
- name: Publish Archive
|
||||
uses: softprops/action-gh-release@v2.0.5
|
||||
with:
|
||||
tag_name: ${{ inputs.tag }}
|
||||
files: ${{ steps.nu.outputs.msi }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
sha256sum:
|
||||
needs: release
|
||||
name: Create Sha256sum
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Release Archives
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: >-
|
||||
gh release download ${{ inputs.tag }}
|
||||
--repo ${{ github.repository }}
|
||||
--pattern '*'
|
||||
--dir release
|
||||
- name: Create Checksums
|
||||
run: cd release && rm -f SHA256SUMS && shasum -a 256 * > ../SHA256SUMS
|
||||
- name: Publish Checksums
|
||||
uses: softprops/action-gh-release@v2.0.5
|
||||
with:
|
||||
files: SHA256SUMS
|
||||
tag_name: ${{ inputs.tag }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
119
.github/workflows/release-pkg.nu
vendored
119
.github/workflows/release-pkg.nu
vendored
@ -8,10 +8,10 @@
|
||||
|
||||
# Instructions for manually creating an MSI for Winget Releases when they fail
|
||||
# Added 2022-11-29 when Windows packaging wouldn't work
|
||||
# Updated again on 2023-02-23 because msis are still failing validation
|
||||
# Updated again on 2023-02-23 because MSIs are still failing validation
|
||||
# To run this manual for windows here are the steps I take
|
||||
# checkout the release you want to publish
|
||||
# 1. git checkout 0.86.0
|
||||
# 1. git checkout 0.103.0
|
||||
# unset CARGO_TARGET_DIR if set (I have to do this in the parent shell to get it to work)
|
||||
# 2. $env:CARGO_TARGET_DIR = ""
|
||||
# 2. hide-env CARGO_TARGET_DIR
|
||||
@ -23,19 +23,13 @@
|
||||
# 7. $env.Path = ($env.Path | append 'c:\apps\7-zip')
|
||||
# make sure aria2c.exe is in your path https://github.com/aria2/aria2
|
||||
# 8. $env.Path = ($env.Path | append 'c:\path\to\aria2c')
|
||||
# make sure you have the wixtools installed https://wixtoolset.org/
|
||||
# 9. $env.Path = ($env.Path | append 'C:\Users\dschroeder\AppData\Local\tauri\WixTools')
|
||||
# You need to run the release-pkg twice. The first pass, with _EXTRA_ as 'bin', makes the output
|
||||
# folder and builds everything. The second pass, that generates the msi file, with _EXTRA_ as 'msi'
|
||||
# 10. $env._EXTRA_ = 'bin'
|
||||
# 11. source .github\workflows\release-pkg.nu
|
||||
# 12. cd ..
|
||||
# 13. $env._EXTRA_ = 'msi'
|
||||
# 14. source .github\workflows\release-pkg.nu
|
||||
# make sure you have the wix 6.0 installed: dotnet tool install --global wix --version 6.0.0
|
||||
# then build nu*.exe and the MSI installer by running:
|
||||
# 9. source .github\workflows\release-pkg.nu
|
||||
# After msi is generated, you have to update winget-pkgs repo, you'll need to patch the release
|
||||
# by deleting the existing msi and uploading this new msi. Then you'll need to update the hash
|
||||
# on the winget-pkgs PR. To generate the hash, run this command
|
||||
# 15. open target\wix\nu-0.74.0-x86_64-pc-windows-msvc.msi | hash sha256
|
||||
# 10. open wix\bin\x64\Release\nu-0.103.0-x86_64-pc-windows-msvc.msi | hash sha256
|
||||
# Then, just take the output and put it in the winget-pkgs PR for the hash on the msi
|
||||
|
||||
|
||||
@ -85,14 +79,14 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
|
||||
cargo-build-nu
|
||||
}
|
||||
'aarch64-unknown-linux-musl' => {
|
||||
aria2c https://musl.cc/aarch64-linux-musl-cross.tgz
|
||||
aria2c https://github.com/nushell/integrations/releases/download/build-tools/aarch64-linux-musl-cross.tgz
|
||||
tar -xf aarch64-linux-musl-cross.tgz -C $env.HOME
|
||||
$env.PATH = ($env.PATH | split row (char esep) | prepend $'($env.HOME)/aarch64-linux-musl-cross/bin')
|
||||
$env.CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER = 'aarch64-linux-musl-gcc'
|
||||
cargo-build-nu
|
||||
}
|
||||
'armv7-unknown-linux-musleabihf' => {
|
||||
aria2c https://musl.cc/armv7r-linux-musleabihf-cross.tgz
|
||||
aria2c https://github.com/nushell/integrations/releases/download/build-tools/armv7r-linux-musleabihf-cross.tgz
|
||||
tar -xf armv7r-linux-musleabihf-cross.tgz -C $env.HOME
|
||||
$env.PATH = ($env.PATH | split row (char esep) | prepend $'($env.HOME)/armv7r-linux-musleabihf-cross/bin')
|
||||
$env.CARGO_TARGET_ARMV7_UNKNOWN_LINUX_MUSLEABIHF_LINKER = 'armv7r-linux-musleabihf-gcc'
|
||||
@ -105,6 +99,14 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
|
||||
$env.CARGO_TARGET_LOONGARCH64_UNKNOWN_LINUX_GNU_LINKER = 'loongarch64-unknown-linux-gnu-gcc'
|
||||
cargo-build-nu
|
||||
}
|
||||
'loongarch64-unknown-linux-musl' => {
|
||||
print $"(ansi g)Downloading LoongArch64 musl cross-compilation toolchain...(ansi reset)"
|
||||
aria2c -q https://github.com/LoongsonLab/oscomp-toolchains-for-oskernel/releases/download/loongarch64-linux-musl-cross-gcc-13.2.0/loongarch64-linux-musl-cross.tgz
|
||||
tar -xf loongarch64-linux-musl-cross.tgz
|
||||
$env.PATH = ($env.PATH | split row (char esep) | prepend $'($env.PWD)/loongarch64-linux-musl-cross/bin')
|
||||
$env.CARGO_TARGET_LOONGARCH64_UNKNOWN_LINUX_MUSL_LINKER = "loongarch64-linux-musl-gcc"
|
||||
cargo-build-nu
|
||||
}
|
||||
_ => {
|
||||
# musl-tools to fix 'Failed to find tool. Is `musl-gcc` installed?'
|
||||
# Actually just for x86_64-unknown-linux-musl target
|
||||
@ -117,14 +119,14 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
|
||||
# ----------------------------------------------------------------------------
|
||||
# Build for Windows without static-link-openssl feature
|
||||
# ----------------------------------------------------------------------------
|
||||
if $os in ['windows-latest'] {
|
||||
if $os =~ 'windows' {
|
||||
cargo-build-nu
|
||||
}
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Prepare for the release archive
|
||||
# ----------------------------------------------------------------------------
|
||||
let suffix = if $os == 'windows-latest' { '.exe' }
|
||||
let suffix = if $os =~ 'windows' { '.exe' }
|
||||
# nu, nu_plugin_* were all included
|
||||
let executable = $'target/($target)/release/($bin)*($suffix)'
|
||||
print $'Current executable file: ($executable)'
|
||||
@ -148,10 +150,10 @@ For more information, refer to https://www.nushell.sh/book/plugins.html
|
||||
[LICENSE ...(glob $executable)] | each {|it| cp -rv $it $dist } | flatten
|
||||
|
||||
print $'(char nl)Check binary release version detail:'; hr-line
|
||||
let ver = if $os == 'windows-latest' {
|
||||
(do -i { .\output\nu.exe -c 'version' }) | str join
|
||||
let ver = if $os =~ 'windows' {
|
||||
(do -i { .\output\nu.exe -c 'version' }) | default '' | str join
|
||||
} else {
|
||||
(do -i { ./output/nu -c 'version' }) | str join
|
||||
(do -i { ./output/nu -c 'version' }) | default '' | str join
|
||||
}
|
||||
if ($ver | str trim | is-empty) {
|
||||
print $'(ansi r)Incompatible Nu binary: The binary cross compiled is not runnable on current arch...(ansi reset)'
|
||||
@ -175,53 +177,60 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
|
||||
tar -czf $archive $dest
|
||||
print $'archive: ---> ($archive)'; ls $archive
|
||||
# REF: https://github.blog/changelog/2022-10-11-github-actions-deprecating-save-state-and-set-output-commands/
|
||||
echo $"archive=($archive)" | save --append $env.GITHUB_OUTPUT
|
||||
echo $"archive=($archive)(char nl)" o>> $env.GITHUB_OUTPUT
|
||||
|
||||
} else if $os == 'windows-latest' {
|
||||
} else if $os =~ 'windows' {
|
||||
|
||||
let releaseStem = $'($bin)-($version)-($target)'
|
||||
let arch = if $nu.os-info.arch =~ 'x86_64' { 'x64' } else { 'arm64' }
|
||||
fetch-less $arch
|
||||
|
||||
print $'(char nl)Download less related stuffs...'; hr-line
|
||||
# todo: less-v661 is out but is released as a zip file. maybe we should switch to that and extract it?
|
||||
aria2c https://github.com/jftuga/less-Windows/releases/download/less-v608/less.exe -o less.exe
|
||||
# the below was renamed because it was failing to download for darren. it should work but it wasn't
|
||||
# todo: maybe we should get rid of this aria2c dependency and just use http get?
|
||||
#aria2c https://raw.githubusercontent.com/jftuga/less-Windows/master/LICENSE -o LICENSE-for-less.txt
|
||||
aria2c https://github.com/jftuga/less-Windows/blob/master/LICENSE -o LICENSE-for-less.txt
|
||||
|
||||
# Create Windows msi release package
|
||||
if (get-env _EXTRA_) == 'msi' {
|
||||
|
||||
let wixRelease = $'($src)/target/wix/($releaseStem).msi'
|
||||
print $'(char nl)Start creating Windows msi package with the following contents...'
|
||||
cd $src; hr-line
|
||||
# Wix need the binaries be stored in target/release/
|
||||
cp -r ($'($dist)/*' | into glob) target/release/
|
||||
ls target/release/* | print
|
||||
cargo install cargo-wix --version 0.3.8
|
||||
cargo wix --no-build --nocapture --package nu --output $wixRelease
|
||||
print $'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls | print
|
||||
let archive = $'($dist)/($releaseStem).zip'
|
||||
7z a $archive ...(glob *)
|
||||
let pkg = (ls -f $archive | get name)
|
||||
if not ($pkg | is-empty) {
|
||||
# Workaround for https://github.com/softprops/action-gh-release/issues/280
|
||||
let archive = ($wixRelease | str replace --all '\' '/')
|
||||
print $'archive: ---> ($archive)';
|
||||
echo $"archive=($archive)" | save --append $env.GITHUB_OUTPUT
|
||||
let archive = ($pkg | get 0 | str replace --all '\' '/')
|
||||
print $'archive: ---> ($archive)'
|
||||
echo $"archive=($archive)(char nl)" o>> $env.GITHUB_OUTPUT
|
||||
}
|
||||
|
||||
} else {
|
||||
# Create extra Windows msi release package if dotnet and wix are available
|
||||
let installed = [dotnet wix] | all { (which $in | length) > 0 }
|
||||
if $installed and (wix --version | split row . | first | into int) >= 6 {
|
||||
|
||||
print $'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls | print
|
||||
let archive = $'($dist)/($releaseStem).zip'
|
||||
7z a $archive ...(glob *)
|
||||
let pkg = (ls -f $archive | get name)
|
||||
if not ($pkg | is-empty) {
|
||||
# Workaround for https://github.com/softprops/action-gh-release/issues/280
|
||||
let archive = ($pkg | get 0 | str replace --all '\' '/')
|
||||
print $'archive: ---> ($archive)'
|
||||
echo $"archive=($archive)" | save --append $env.GITHUB_OUTPUT
|
||||
}
|
||||
print $'(char nl)Start creating Windows msi package with the following contents...'
|
||||
cd $src; cd wix; hr-line; mkdir nu
|
||||
# Wix need the binaries be stored in nu folder
|
||||
cp -r ($'($dist)/*' | into glob) nu/
|
||||
cp $'($dist)/README.txt' .
|
||||
ls -f nu/* | print
|
||||
./nu/nu.exe -c $'NU_RELEASE_VERSION=($version) dotnet build -c Release -p:Platform=($arch)'
|
||||
glob **/*.msi | print
|
||||
# Workaround for https://github.com/softprops/action-gh-release/issues/280
|
||||
let wixRelease = (glob **/*.msi | where $it =~ bin | get 0 | str replace --all '\' '/')
|
||||
let msi = $'($wixRelease | path dirname)/nu-($version)-($target).msi'
|
||||
mv $wixRelease $msi
|
||||
print $'MSI archive: ---> ($msi)';
|
||||
echo $"msi=($msi)(char nl)" o>> $env.GITHUB_OUTPUT
|
||||
}
|
||||
}
|
||||
|
||||
def fetch-less [
|
||||
arch: string = 'x64' # The architecture to fetch
|
||||
] {
|
||||
let less_zip = $'less-($arch).zip'
|
||||
print $'Fetching less archive: (ansi g)($less_zip)(ansi reset)'
|
||||
let url = $'https://github.com/jftuga/less-Windows/releases/download/less-v668/($less_zip)'
|
||||
http get https://github.com/jftuga/less-Windows/blob/master/LICENSE | save -rf LICENSE-for-less.txt
|
||||
http get $url | save -rf $less_zip
|
||||
unzip $less_zip
|
||||
rm $less_zip lesskey.exe
|
||||
}
|
||||
|
||||
def 'cargo-build-nu' [] {
|
||||
if $os == 'windows-latest' {
|
||||
if $os =~ 'windows' {
|
||||
cargo build --release --all --target $target
|
||||
} else {
|
||||
cargo build --release --all --target $target --features=static-link-openssl
|
||||
|
34
.github/workflows/release.yml
vendored
34
.github/workflows/release.yml
vendored
@ -35,24 +35,16 @@ jobs:
|
||||
- armv7-unknown-linux-musleabihf
|
||||
- riscv64gc-unknown-linux-gnu
|
||||
- loongarch64-unknown-linux-gnu
|
||||
extra: ['bin']
|
||||
- loongarch64-unknown-linux-musl
|
||||
include:
|
||||
- target: aarch64-apple-darwin
|
||||
os: macos-latest
|
||||
- target: x86_64-apple-darwin
|
||||
os: macos-latest
|
||||
- target: x86_64-pc-windows-msvc
|
||||
extra: 'bin'
|
||||
os: windows-latest
|
||||
- target: x86_64-pc-windows-msvc
|
||||
extra: msi
|
||||
os: windows-latest
|
||||
- target: aarch64-pc-windows-msvc
|
||||
extra: 'bin'
|
||||
os: windows-latest
|
||||
- target: aarch64-pc-windows-msvc
|
||||
extra: msi
|
||||
os: windows-latest
|
||||
os: windows-11-arm
|
||||
- target: x86_64-unknown-linux-gnu
|
||||
os: ubuntu-22.04
|
||||
- target: x86_64-unknown-linux-musl
|
||||
@ -69,18 +61,31 @@ jobs:
|
||||
os: ubuntu-22.04
|
||||
- target: loongarch64-unknown-linux-gnu
|
||||
os: ubuntu-22.04
|
||||
- target: loongarch64-unknown-linux-musl
|
||||
os: ubuntu-22.04
|
||||
|
||||
runs-on: ${{matrix.os}}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Wix Toolset 6 for Windows
|
||||
shell: pwsh
|
||||
if: ${{ startsWith(matrix.os, 'windows') }}
|
||||
run: |
|
||||
dotnet tool install --global wix --version 6.0.0
|
||||
dotnet workload install wix
|
||||
$wixPath = "$env:USERPROFILE\.dotnet\tools"
|
||||
echo "$wixPath" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||
$env:PATH = "$wixPath;$env:PATH"
|
||||
wix --version
|
||||
|
||||
- name: Update Rust Toolchain Target
|
||||
run: |
|
||||
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
||||
|
||||
- name: Setup Rust toolchain
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.10.1
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.12.0
|
||||
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
|
||||
with:
|
||||
cache: false
|
||||
@ -89,7 +94,7 @@ jobs:
|
||||
- name: Setup Nushell
|
||||
uses: hustcer/setup-nu@v3
|
||||
with:
|
||||
version: 0.101.0
|
||||
version: 0.105.1
|
||||
|
||||
- name: Release Nu Binary
|
||||
id: nu
|
||||
@ -98,7 +103,6 @@ jobs:
|
||||
OS: ${{ matrix.os }}
|
||||
REF: ${{ github.ref }}
|
||||
TARGET: ${{ matrix.target }}
|
||||
_EXTRA_: ${{ matrix.extra }}
|
||||
|
||||
# WARN: Don't upgrade this action due to the release per asset issue.
|
||||
# See: https://github.com/softprops/action-gh-release/issues/445
|
||||
@ -107,7 +111,9 @@ jobs:
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
||||
with:
|
||||
draft: true
|
||||
files: ${{ steps.nu.outputs.archive }}
|
||||
files: |
|
||||
${{ steps.nu.outputs.msi }}
|
||||
${{ steps.nu.outputs.archive }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
|
2
.github/workflows/typos.yml
vendored
2
.github/workflows/typos.yml
vendored
@ -10,4 +10,4 @@ jobs:
|
||||
uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Check spelling
|
||||
uses: crate-ci/typos@v1.29.4
|
||||
uses: crate-ci/typos@v1.35.4
|
||||
|
7
.github/workflows/winget-submission.yml
vendored
7
.github/workflows/winget-submission.yml
vendored
@ -10,6 +10,11 @@ on:
|
||||
required: true
|
||||
type: string
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
packages: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
|
||||
winget:
|
||||
@ -26,4 +31,4 @@ jobs:
|
||||
version: ${{ inputs.tag_name || github.event.release.tag_name }}
|
||||
release-tag: ${{ inputs.tag_name || github.event.release.tag_name }}
|
||||
token: ${{ secrets.NUSHELL_PAT }}
|
||||
fork-user: fdncred
|
||||
fork-user: nushell
|
||||
|
6
.gitignore
vendored
6
.gitignore
vendored
@ -32,11 +32,17 @@ unstable_cargo_features.txt
|
||||
# Helix configuration folder
|
||||
.helix/*
|
||||
.helix
|
||||
wix/bin/
|
||||
wix/obj/
|
||||
wix/nu/
|
||||
|
||||
# Coverage tools
|
||||
lcov.info
|
||||
tarpaulin-report.html
|
||||
|
||||
# benchmarking
|
||||
/tango
|
||||
|
||||
# Visual Studio
|
||||
.vs/*
|
||||
*.rsproj
|
||||
|
@ -3,6 +3,7 @@
|
||||
Welcome to Nushell and thank you for considering contributing!
|
||||
|
||||
## Table of contents
|
||||
- [Tips for submitting PRs](#tips-for-submitting-prs)
|
||||
- [Proposing design changes](#proposing-design-changes)
|
||||
- [Developing](#developing)
|
||||
- [Setup](#setup)
|
||||
@ -20,6 +21,51 @@ More resources can be found in the nascent [developer documentation](devdocs/REA
|
||||
- [Platform support policy](devdocs/PLATFORM_SUPPORT.md)
|
||||
- [Our Rust style](devdocs/rust_style.md)
|
||||
|
||||
## Tips for submitting PRs
|
||||
|
||||
Thank you for improving Nushell! We are always glad to see contributions, and we are absolutely willing to talk through the design or implementation of your PR. Come talk with us in [Discord](https://discordapp.com/invite/NtAbbGn), or create a GitHub discussion or draft PR and we can help you work out the details from there.
|
||||
|
||||
**Please talk to the core team before making major changes!** See the [proposing design changes](#proposing-design-changes) for more details.
|
||||
|
||||
### Release notes section
|
||||
|
||||
In our PR template, we have a "Release notes summary" section which will be included in our release notes for our blog.
|
||||
|
||||
This section should include all information about your change which is relevant to a user of Nushell. You should try to keep it **brief and simple to understand**, and focus on the ways your change directly impacts the user experience. We highly encourage adding examples and, when relevant, screenshots in this section.
|
||||
|
||||
Please make sure to consider both the *intended changes*, such as additions or deliberate breaking changes **and** possible *side effects* that might change how users interact with a command or feature. It's important to think carefully about the ways that your PR might affect any aspect of the user experience, and to document these changes even if they seem minor or aren't directly related to the main purpose of the PR.
|
||||
|
||||
This section might not be relevant for all PRs. If your PR is a work in progress, feel free to write "WIP"/"TODO"/etc in this section. You can also write "N/A" if this is a technical change which doesn't impact the user experience.
|
||||
|
||||
If you're not sure what to put here, or need some help, **a core team member would be glad to help you out**. We may also makes some tweaks to your release notes section. Please don't take it personally, we just want to make sure our release notes are polished and easy to understand. Once the release notes section is ready, we'll add the (TODO label name) label to indicate that the release notes section is ready to be included in the actual release notes.
|
||||
|
||||
### Tests and formatting checks
|
||||
|
||||
Our CI system automatically checks formatting and runs our tests. If you're running into an issue, or just want to make sure everything is ready to go before creating your PR, you can run the checks yourself:
|
||||
|
||||
```nushell
|
||||
use toolkit.nu # or use an `env_change` hook to activate it automatically
|
||||
toolkit check pr
|
||||
```
|
||||
|
||||
Furthermore, you can also runs these checks individually with the subcommands of `toolkit`, or run the underlying commands yourself:
|
||||
|
||||
- `cargo fmt --all -- --check` to check standard code formatting (`cargo fmt --all` applies these changes)
|
||||
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to check that you're using the standard code style
|
||||
- `cargo test --workspace` to check that all tests pass (on Windows make sure to enable [developer mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
|
||||
- `cargo run -- -c "use toolkit.nu; toolkit test stdlib"` to run the tests for the standard library
|
||||
|
||||
If the checks are passing on your local system, but CI just won't pass, feel free to ask for help from the core team.
|
||||
|
||||
### Linking and mentioning issues
|
||||
|
||||
If your PR closes one or more issues, you can automatically link the PR with them by using one of the [linking keywords](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword):
|
||||
|
||||
- This PR should close #xxxx
|
||||
- Fixes #xxxx
|
||||
|
||||
You can also mention related issues, PRs or discussions!
|
||||
|
||||
## Proposing design changes
|
||||
|
||||
First of all, before diving into the code, if you want to create a new feature, change something significantly, and especially if the change is user-facing, it is a good practice to first get an approval from the core team before starting to work on it.
|
||||
@ -31,7 +77,7 @@ The review process can be summarized as follows:
|
||||
1. You want to make some change to Nushell that is more involved than simple bug-fixing.
|
||||
2. Go to [Discord](https://discordapp.com/invite/NtAbbGn) or a [GitHub issue](https://github.com/nushell/nushell/issues/new/choose) and chat with some core team members and/or other contributors about it.
|
||||
3. After getting a green light from the core team, implement the feature, open a pull request (PR) and write a concise but comprehensive description of the change.
|
||||
4. If your PR includes any use-facing features (such as adding a flag to a command), clearly list them in the PR description.
|
||||
4. If your PR includes any user-facing features (such as adding a flag to a command), clearly list them in the PR description.
|
||||
5. Then, core team members and other regular contributors will review the PR and suggest changes.
|
||||
6. When we all agree, the PR will be merged.
|
||||
7. If your PR includes any user-facing features, make sure the changes are also reflected in [the documentation](https://github.com/nushell/nushell.github.io) after the PR is merged.
|
||||
|
1757
Cargo.lock
generated
1757
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
193
Cargo.toml
193
Cargo.toml
@ -4,14 +4,14 @@ build = "scripts/build.rs"
|
||||
default-run = "nu"
|
||||
description = "A new type of shell"
|
||||
documentation = "https://www.nushell.sh/book/"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
exclude = ["images"]
|
||||
homepage = "https://www.nushell.sh"
|
||||
license = "MIT"
|
||||
name = "nu"
|
||||
repository = "https://github.com/nushell/nushell"
|
||||
rust-version = "1.82.0"
|
||||
version = "0.102.0"
|
||||
rust-version = "1.87.0"
|
||||
version = "0.106.2"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
@ -24,36 +24,37 @@ pkg-fmt = "zip"
|
||||
|
||||
[workspace]
|
||||
members = [
|
||||
"crates/nu_plugin_custom_values",
|
||||
"crates/nu_plugin_example",
|
||||
"crates/nu_plugin_formats",
|
||||
"crates/nu_plugin_gstat",
|
||||
"crates/nu_plugin_inc",
|
||||
"crates/nu_plugin_polars",
|
||||
"crates/nu_plugin_query",
|
||||
"crates/nu_plugin_stress_internals",
|
||||
"crates/nu-cli",
|
||||
"crates/nu-engine",
|
||||
"crates/nu-parser",
|
||||
"crates/nu-system",
|
||||
"crates/nu-cmd-base",
|
||||
"crates/nu-cmd-extra",
|
||||
"crates/nu-cmd-lang",
|
||||
"crates/nu-cmd-plugin",
|
||||
"crates/nu-command",
|
||||
"crates/nu-color-config",
|
||||
"crates/nu-command",
|
||||
"crates/nu-derive-value",
|
||||
"crates/nu-engine",
|
||||
"crates/nu-experimental",
|
||||
"crates/nu-explore",
|
||||
"crates/nu-json",
|
||||
"crates/nu-lsp",
|
||||
"crates/nu-pretty-hex",
|
||||
"crates/nu-protocol",
|
||||
"crates/nu-derive-value",
|
||||
"crates/nu-plugin",
|
||||
"crates/nu-parser",
|
||||
"crates/nu-plugin-core",
|
||||
"crates/nu-plugin-engine",
|
||||
"crates/nu-plugin-protocol",
|
||||
"crates/nu-plugin-test-support",
|
||||
"crates/nu_plugin_inc",
|
||||
"crates/nu_plugin_gstat",
|
||||
"crates/nu_plugin_example",
|
||||
"crates/nu_plugin_query",
|
||||
"crates/nu_plugin_custom_values",
|
||||
"crates/nu_plugin_formats",
|
||||
"crates/nu_plugin_polars",
|
||||
"crates/nu_plugin_stress_internals",
|
||||
"crates/nu-plugin",
|
||||
"crates/nu-pretty-hex",
|
||||
"crates/nu-protocol",
|
||||
"crates/nu-std",
|
||||
"crates/nu-system",
|
||||
"crates/nu-table",
|
||||
"crates/nu-term-grid",
|
||||
"crates/nu-test-support",
|
||||
@ -63,15 +64,15 @@ members = [
|
||||
|
||||
[workspace.dependencies]
|
||||
alphanumeric-sort = "1.5"
|
||||
ansi-str = "0.8"
|
||||
ansi-str = "0.9"
|
||||
anyhow = "1.0.82"
|
||||
base64 = "0.22.1"
|
||||
bracoxide = "0.1.4"
|
||||
bracoxide = "0.1.6"
|
||||
brotli = "7.0"
|
||||
byteorder = "1.5"
|
||||
bytes = "1"
|
||||
bytesize = "1.3"
|
||||
calamine = "0.26.1"
|
||||
bytesize = "1.3.3"
|
||||
calamine = "0.28"
|
||||
chardetng = "0.1.17"
|
||||
chrono = { default-features = false, version = "0.4.34" }
|
||||
chrono-humanize = "0.2.3"
|
||||
@ -82,35 +83,37 @@ csv = "1.3"
|
||||
ctrlc = "3.4"
|
||||
devicons = "0.6.12"
|
||||
dialoguer = { default-features = false, version = "0.11" }
|
||||
fuzzy-matcher = { version = "^0.3.7" }
|
||||
digest = { default-features = false, version = "0.10" }
|
||||
dirs = "5.0"
|
||||
dirs-sys = "0.4"
|
||||
dtparse = "2.0"
|
||||
encoding_rs = "0.8"
|
||||
fancy-regex = "0.14"
|
||||
fancy-regex = "0.16"
|
||||
filesize = "0.2"
|
||||
filetime = "0.2"
|
||||
heck = "0.5.0"
|
||||
human-date-parser = "0.2.0"
|
||||
indexmap = "2.7"
|
||||
http = "1.3.1"
|
||||
human-date-parser = "0.3.0"
|
||||
indexmap = "2.10"
|
||||
indicatif = "0.17"
|
||||
interprocess = "2.2.0"
|
||||
is_executable = "1.0"
|
||||
itertools = "0.13"
|
||||
itertools = "0.14"
|
||||
libc = "0.2"
|
||||
libproc = "0.14"
|
||||
log = "0.4"
|
||||
lru = "0.12"
|
||||
lscolors = { version = "0.17", default-features = false }
|
||||
lscolors = { version = "0.20", default-features = false }
|
||||
lsp-server = "0.7.8"
|
||||
lsp-types = { version = "0.97.0", features = ["proposed"] }
|
||||
lsp-textdocument = "0.4.1"
|
||||
lsp-textdocument = "0.4.2"
|
||||
mach2 = "0.4"
|
||||
md5 = { version = "0.10", package = "md-5" }
|
||||
miette = "7.3"
|
||||
miette = "7.6"
|
||||
mime = "0.3.17"
|
||||
mime_guess = "2.0"
|
||||
mockito = { version = "1.6", default-features = false }
|
||||
mockito = { version = "1.7", default-features = false }
|
||||
multipart-rs = "0.1.13"
|
||||
native-tls = "0.2"
|
||||
nix = { version = "0.29", default-features = false }
|
||||
@ -127,90 +130,102 @@ pathdiff = "0.2"
|
||||
percent-encoding = "2"
|
||||
pretty_assertions = "1.4"
|
||||
print-positions = "0.6"
|
||||
proc-macro-error = { version = "1.0", default-features = false }
|
||||
proc-macro-error2 = "2.0"
|
||||
proc-macro2 = "1.0"
|
||||
procfs = "0.17.0"
|
||||
pwd = "1.3"
|
||||
quick-xml = "0.37.0"
|
||||
quick-xml = "0.37.5"
|
||||
quickcheck = "1.0"
|
||||
quickcheck_macros = "1.0"
|
||||
quickcheck_macros = "1.1"
|
||||
quote = "1.0"
|
||||
rand = "0.8"
|
||||
rand = "0.9"
|
||||
getrandom = "0.2" # pick same version that rand requires
|
||||
rand_chacha = "0.3.1"
|
||||
ratatui = "0.26"
|
||||
rayon = "1.10"
|
||||
reedline = "0.38.0"
|
||||
rand_chacha = "0.9"
|
||||
ratatui = "0.29"
|
||||
rayon = "1.11"
|
||||
reedline = "0.41.0"
|
||||
rmp = "0.8"
|
||||
rmp-serde = "1.3"
|
||||
roxmltree = "0.20"
|
||||
rstest = { version = "0.23", default-features = false }
|
||||
rstest_reuse = "0.7"
|
||||
rusqlite = "0.31"
|
||||
rust-embed = "8.5.0"
|
||||
rust-embed = "8.7.0"
|
||||
# We have to fix rustls and ureq versions
|
||||
# because we use unversioned api to allow users set up their own
|
||||
# crypto providers (grep for "unversioned")
|
||||
rustls = { version = "=0.23.28", default-features = false, features = ["std", "tls12"] }
|
||||
rustls-native-certs = "0.8"
|
||||
scopeguard = { version = "1.2.0" }
|
||||
serde = { version = "1.0" }
|
||||
serde_json = "1.0"
|
||||
serde_json = "1.0.97"
|
||||
serde_urlencoded = "0.7.1"
|
||||
serde_yaml = "0.9.33"
|
||||
sha2 = "0.10"
|
||||
strip-ansi-escapes = "0.2.0"
|
||||
strip-ansi-escapes = "0.2.1"
|
||||
strum = "0.26"
|
||||
strum_macros = "0.26"
|
||||
syn = "2.0"
|
||||
sysinfo = "0.33"
|
||||
tabled = { version = "0.17.0", default-features = false }
|
||||
tempfile = "3.15"
|
||||
titlecase = "3.0"
|
||||
sysinfo = "0.36"
|
||||
tabled = { version = "0.20", default-features = false }
|
||||
tempfile = "3.20"
|
||||
thiserror = "2.0.12"
|
||||
titlecase = "3.6"
|
||||
toml = "0.8"
|
||||
trash = "5.2"
|
||||
update-informer = { version = "1.2.0", default-features = false, features = ["github", "native-tls", "ureq"] }
|
||||
update-informer = { version = "1.3.0", default-features = false, features = ["github", "ureq"] }
|
||||
umask = "2.1"
|
||||
unicode-segmentation = "1.12"
|
||||
unicode-width = "0.2"
|
||||
ureq = { version = "2.12", default-features = false }
|
||||
ureq = { version = "=3.0.12", default-features = false, features = ["socks-proxy"] }
|
||||
url = "2.2"
|
||||
uu_cp = "0.0.29"
|
||||
uu_mkdir = "0.0.29"
|
||||
uu_mktemp = "0.0.29"
|
||||
uu_mv = "0.0.29"
|
||||
uu_touch = "0.0.29"
|
||||
uu_whoami = "0.0.29"
|
||||
uu_uname = "0.0.29"
|
||||
uucore = "0.0.29"
|
||||
uuid = "1.12.0"
|
||||
uu_cp = "0.0.30"
|
||||
uu_mkdir = "0.0.30"
|
||||
uu_mktemp = "0.0.30"
|
||||
uu_mv = "0.0.30"
|
||||
uu_touch = "0.0.30"
|
||||
uu_whoami = "0.0.30"
|
||||
uu_uname = "0.0.30"
|
||||
uucore = "0.0.30"
|
||||
uuid = "1.16.0"
|
||||
v_htmlescape = "0.15.0"
|
||||
wax = "0.6"
|
||||
web-time = "1.1.0"
|
||||
which = "7.0.0"
|
||||
which = "8.0.0"
|
||||
windows = "0.56"
|
||||
windows-sys = "0.48"
|
||||
winreg = "0.52"
|
||||
memchr = "2.7.4"
|
||||
webpki-roots = "1.0"
|
||||
|
||||
[workspace.lints.clippy]
|
||||
# Warning: workspace lints affect library code as well as tests, so don't enable lints that would be too noisy in tests like that.
|
||||
# todo = "warn"
|
||||
unchecked_duration_subtraction = "warn"
|
||||
used_underscore_binding = "warn"
|
||||
result_large_err = "allow"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
nu-cli = { path = "./crates/nu-cli", version = "0.102.0" }
|
||||
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.102.0" }
|
||||
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.102.0" }
|
||||
nu-cmd-plugin = { path = "./crates/nu-cmd-plugin", version = "0.102.0", optional = true }
|
||||
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.102.0" }
|
||||
nu-command = { path = "./crates/nu-command", version = "0.102.0" }
|
||||
nu-engine = { path = "./crates/nu-engine", version = "0.102.0" }
|
||||
nu-explore = { path = "./crates/nu-explore", version = "0.102.0" }
|
||||
nu-lsp = { path = "./crates/nu-lsp/", version = "0.102.0" }
|
||||
nu-parser = { path = "./crates/nu-parser", version = "0.102.0" }
|
||||
nu-path = { path = "./crates/nu-path", version = "0.102.0" }
|
||||
nu-plugin-engine = { path = "./crates/nu-plugin-engine", optional = true, version = "0.102.0" }
|
||||
nu-protocol = { path = "./crates/nu-protocol", version = "0.102.0" }
|
||||
nu-std = { path = "./crates/nu-std", version = "0.102.0" }
|
||||
nu-system = { path = "./crates/nu-system", version = "0.102.0" }
|
||||
nu-utils = { path = "./crates/nu-utils", version = "0.102.0" }
|
||||
nu-cli = { path = "./crates/nu-cli", version = "0.106.2" }
|
||||
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.106.2" }
|
||||
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.106.2" }
|
||||
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.106.2" }
|
||||
nu-cmd-plugin = { path = "./crates/nu-cmd-plugin", version = "0.106.2", optional = true }
|
||||
nu-command = { path = "./crates/nu-command", version = "0.106.2", default-features = false, features = ["os"] }
|
||||
nu-engine = { path = "./crates/nu-engine", version = "0.106.2" }
|
||||
nu-experimental = { path = "./crates/nu-experimental", version = "0.106.2" }
|
||||
nu-explore = { path = "./crates/nu-explore", version = "0.106.2" }
|
||||
nu-lsp = { path = "./crates/nu-lsp/", version = "0.106.2" }
|
||||
nu-parser = { path = "./crates/nu-parser", version = "0.106.2" }
|
||||
nu-path = { path = "./crates/nu-path", version = "0.106.2" }
|
||||
nu-plugin-engine = { path = "./crates/nu-plugin-engine", optional = true, version = "0.106.2" }
|
||||
nu-protocol = { path = "./crates/nu-protocol", version = "0.106.2" }
|
||||
nu-std = { path = "./crates/nu-std", version = "0.106.2" }
|
||||
nu-system = { path = "./crates/nu-system", version = "0.106.2" }
|
||||
nu-utils = { path = "./crates/nu-utils", version = "0.106.2" }
|
||||
reedline = { workspace = true, features = ["bashisms", "sqlite"] }
|
||||
|
||||
crossterm = { workspace = true }
|
||||
@ -218,7 +233,6 @@ ctrlc = { workspace = true }
|
||||
dirs = { workspace = true }
|
||||
log = { workspace = true }
|
||||
miette = { workspace = true, features = ["fancy-no-backtrace", "fancy"] }
|
||||
mimalloc = { version = "0.1.42", default-features = false, optional = true }
|
||||
multipart-rs = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
simplelog = "0.12"
|
||||
@ -240,9 +254,9 @@ nix = { workspace = true, default-features = false, features = [
|
||||
] }
|
||||
|
||||
[dev-dependencies]
|
||||
nu-test-support = { path = "./crates/nu-test-support", version = "0.102.0" }
|
||||
nu-plugin-protocol = { path = "./crates/nu-plugin-protocol", version = "0.102.0" }
|
||||
nu-plugin-core = { path = "./crates/nu-plugin-core", version = "0.102.0" }
|
||||
nu-test-support = { path = "./crates/nu-test-support", version = "0.106.2" }
|
||||
nu-plugin-protocol = { path = "./crates/nu-plugin-protocol", version = "0.106.2" }
|
||||
nu-plugin-core = { path = "./crates/nu-plugin-core", version = "0.106.2" }
|
||||
assert_cmd = "2.0"
|
||||
dirs = { workspace = true }
|
||||
tango-bench = "0.6"
|
||||
@ -253,10 +267,14 @@ serial_test = "3.2"
|
||||
tempfile = { workspace = true }
|
||||
|
||||
[features]
|
||||
# Enable all features while still avoiding mutually exclusive features.
|
||||
# Use this if `--all-features` fails.
|
||||
full = ["plugin", "rustls-tls", "system-clipboard", "trash-support", "sqlite"]
|
||||
|
||||
plugin = [
|
||||
# crates
|
||||
"nu-cmd-plugin",
|
||||
"nu-plugin-engine",
|
||||
"dep:nu-cmd-plugin",
|
||||
"dep:nu-plugin-engine",
|
||||
|
||||
# features
|
||||
"nu-cli/plugin",
|
||||
@ -268,33 +286,34 @@ plugin = [
|
||||
"nu-protocol/plugin",
|
||||
]
|
||||
|
||||
native-tls = ["nu-command/native-tls"]
|
||||
rustls-tls = ["nu-command/rustls-tls"]
|
||||
|
||||
default = [
|
||||
"plugin",
|
||||
"trash-support",
|
||||
"sqlite",
|
||||
"mimalloc",
|
||||
"rustls-tls"
|
||||
]
|
||||
stable = ["default"]
|
||||
# NOTE: individual features are also passed to `nu-cmd-lang` that uses them to generate the feature matrix in the `version` command
|
||||
|
||||
# Enable to statically link OpenSSL (perl is required, to build OpenSSL https://docs.rs/openssl/latest/openssl/);
|
||||
# otherwise the system version will be used. Not enabled by default because it takes a while to build
|
||||
static-link-openssl = ["dep:openssl", "nu-cmd-lang/static-link-openssl"]
|
||||
static-link-openssl = ["dep:openssl"]
|
||||
|
||||
mimalloc = ["nu-cmd-lang/mimalloc", "dep:mimalloc"]
|
||||
# Optional system clipboard support in `reedline`, this behavior has problematic compatibility with some systems.
|
||||
# Missing X server/ Wayland can cause issues
|
||||
system-clipboard = [
|
||||
"reedline/system_clipboard",
|
||||
"nu-cli/system-clipboard",
|
||||
"nu-cmd-lang/system-clipboard",
|
||||
]
|
||||
|
||||
# Stable (Default)
|
||||
trash-support = ["nu-command/trash-support", "nu-cmd-lang/trash-support"]
|
||||
trash-support = ["nu-command/trash-support"]
|
||||
|
||||
# SQLite commands for nushell
|
||||
sqlite = ["nu-command/sqlite", "nu-cmd-lang/sqlite"]
|
||||
sqlite = ["nu-command/sqlite", "nu-std/sqlite"]
|
||||
|
||||
[profile.release]
|
||||
opt-level = "s" # Optimize for size
|
||||
@ -324,7 +343,7 @@ bench = false
|
||||
# To use a development version of a dependency please use a global override here
|
||||
# changing versions in each sub-crate of the workspace is tedious
|
||||
[patch.crates-io]
|
||||
# reedline = { git = "https://github.com/nushell/reedline", branch = "main" }
|
||||
reedline = { git = "https://github.com/nushell/reedline", branch = "main" }
|
||||
# nu-ansi-term = {git = "https://github.com/nushell/nu-ansi-term.git", branch = "main"}
|
||||
|
||||
# Run all benchmarks with `cargo bench`
|
||||
|
2
LICENSE
2
LICENSE
@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2019 - 2023 The Nushell Project Developers
|
||||
Copyright (c) 2019 - 2025 The Nushell Project Developers
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
@ -4,7 +4,6 @@
|
||||
[](https://github.com/nushell/nushell/actions/workflows/nightly-build.yml)
|
||||
[](https://discord.gg/NtAbbGn)
|
||||
[](https://changelog.com/podcast/363)
|
||||
[](https://twitter.com/nu_shell)
|
||||
[](https://github.com/nushell/nushell/graphs/commit-activity)
|
||||
[](https://github.com/nushell/nushell/graphs/contributors)
|
||||
|
||||
@ -35,7 +34,7 @@ This project has reached a minimum-viable-product level of quality. Many people
|
||||
|
||||
The [Nushell book](https://www.nushell.sh/book/) is the primary source of Nushell documentation. You can find [a full list of Nu commands in the book](https://www.nushell.sh/commands/), and we have many examples of using Nu in our [cookbook](https://www.nushell.sh/cookbook/).
|
||||
|
||||
We're also active on [Discord](https://discord.gg/NtAbbGn) and [Twitter](https://twitter.com/nu_shell); come and chat with us!
|
||||
We're also active on [Discord](https://discord.gg/NtAbbGn); come and chat with us!
|
||||
|
||||
## Installation
|
||||
|
||||
@ -223,6 +222,7 @@ Please submit an issue or PR to be added to this list.
|
||||
- [Dorothy](http://github.com/bevry/dorothy)
|
||||
- [Direnv](https://github.com/direnv/direnv/blob/master/docs/hook.md#nushell)
|
||||
- [x-cmd](https://x-cmd.com/mod/nu)
|
||||
- [vfox](https://github.com/version-fox/vfox)
|
||||
|
||||
## Contributing
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
# Security Policy
|
||||
|
||||
As a shell and programming language Nushell provides you with great powers and the potential to do dangerous things to your computer and data. Whenever there is a risk that a malicious actor can abuse a bug or a violation of documented behavior/assumptions in Nushell to harm you this is a *security* risk.
|
||||
As a shell and programming language Nushell provides you with great powers and the potential to do dangerous things to your computer and data. Whenever there is a risk that a malicious actor can abuse a bug or a violation of documented behavior/assumptions in Nushell to harm you this is a *security* risk.
|
||||
We want to fix those issues without exposing our users to unnecessary risk. Thus we want to explain our security policy.
|
||||
Additional issues may be part of *safety* where the behavior of Nushell as designed and implemented can cause unintended harm or a bug causes damage without the involvement of a third party.
|
||||
|
||||
@ -11,7 +11,7 @@ Only if you provide a strong reasoning and the necessary resources, will we cons
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
If you suspect that a bug or behavior of Nushell can affect security or may be potentially exploitable, please report the issue to us in private.
|
||||
If you suspect that a bug or behavior of Nushell can affect security or may be potentially exploitable, please report the issue to us in private.
|
||||
Either reach out to the core team on [our Discord server](https://discord.gg/NtAbbGn) to arrange a private channel or use the [GitHub vulnerability reporting form](https://github.com/nushell/nushell/security/advisories/new).
|
||||
Please try to answer the following questions:
|
||||
- How can we reach you for further questions?
|
||||
|
@ -1,21 +1,19 @@
|
||||
use nu_cli::{eval_source, evaluate_commands};
|
||||
use nu_plugin_core::{Encoder, EncodingType};
|
||||
use nu_plugin_protocol::{PluginCallResponse, PluginOutput};
|
||||
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack},
|
||||
PipelineData, Signals, Span, Spanned, Value,
|
||||
engine::{EngineState, Stack},
|
||||
};
|
||||
use nu_std::load_standard_library;
|
||||
use nu_utils::{get_default_config, get_default_env};
|
||||
use std::{
|
||||
fmt::Write,
|
||||
hint::black_box,
|
||||
rc::Rc,
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
sync::{Arc, atomic::AtomicBool},
|
||||
};
|
||||
|
||||
use std::hint::black_box;
|
||||
|
||||
use tango_bench::{benchmark_fn, tango_benchmarks, tango_main, IntoBenchmarks};
|
||||
use tango_bench::{IntoBenchmarks, benchmark_fn, tango_benchmarks, tango_main};
|
||||
|
||||
fn load_bench_commands() -> EngineState {
|
||||
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
|
||||
@ -70,14 +68,14 @@ fn encoding_test_data(row_cnt: usize, col_cnt: usize) -> Value {
|
||||
}
|
||||
|
||||
fn bench_command(
|
||||
name: &str,
|
||||
command: &str,
|
||||
name: impl Into<String>,
|
||||
command: impl Into<String> + Clone,
|
||||
stack: Stack,
|
||||
engine: EngineState,
|
||||
) -> impl IntoBenchmarks {
|
||||
let commands = Spanned {
|
||||
span: Span::unknown(),
|
||||
item: command.to_string(),
|
||||
item: command.into(),
|
||||
};
|
||||
[benchmark_fn(name, move |b| {
|
||||
let commands = commands.clone();
|
||||
@ -141,19 +139,16 @@ fn bench_load_standard_lib() -> impl IntoBenchmarks {
|
||||
})]
|
||||
}
|
||||
|
||||
fn create_flat_record_string(n: i32) -> String {
|
||||
let mut s = String::from("let record = {");
|
||||
fn create_flat_record_string(n: usize) -> String {
|
||||
let mut s = String::from("let record = { ");
|
||||
for i in 0..n {
|
||||
s.push_str(&format!("col_{}: {}", i, i));
|
||||
if i < n - 1 {
|
||||
s.push_str(", ");
|
||||
}
|
||||
write!(s, "col_{i}: {i}, ").unwrap();
|
||||
}
|
||||
s.push('}');
|
||||
s
|
||||
}
|
||||
|
||||
fn create_nested_record_string(depth: i32) -> String {
|
||||
fn create_nested_record_string(depth: usize) -> String {
|
||||
let mut s = String::from("let record = {");
|
||||
for _ in 0..depth {
|
||||
s.push_str("col: {");
|
||||
@ -166,7 +161,7 @@ fn create_nested_record_string(depth: i32) -> String {
|
||||
s
|
||||
}
|
||||
|
||||
fn create_example_table_nrows(n: i32) -> String {
|
||||
fn create_example_table_nrows(n: usize) -> String {
|
||||
let mut s = String::from("let table = [[foo bar baz]; ");
|
||||
for i in 0..n {
|
||||
s.push_str(&format!("[0, 1, {i}]"));
|
||||
@ -178,120 +173,153 @@ fn create_example_table_nrows(n: i32) -> String {
|
||||
s
|
||||
}
|
||||
|
||||
fn bench_record_create(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_record_create(n: usize) -> impl IntoBenchmarks {
|
||||
bench_command(
|
||||
&format!("record_create_{n}"),
|
||||
&create_flat_record_string(n),
|
||||
format!("record_create_{n}"),
|
||||
create_flat_record_string(n),
|
||||
Stack::new(),
|
||||
setup_engine(),
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_record_flat_access(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_record_flat_access(n: usize) -> impl IntoBenchmarks {
|
||||
let setup_command = create_flat_record_string(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
bench_command(
|
||||
&format!("record_flat_access_{n}"),
|
||||
format!("record_flat_access_{n}"),
|
||||
"$record.col_0 | ignore",
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_record_nested_access(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_record_nested_access(n: usize) -> impl IntoBenchmarks {
|
||||
let setup_command = create_nested_record_string(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
let nested_access = ".col".repeat(n as usize);
|
||||
let nested_access = ".col".repeat(n);
|
||||
bench_command(
|
||||
&format!("record_nested_access_{n}"),
|
||||
&format!("$record{} | ignore", nested_access),
|
||||
format!("record_nested_access_{n}"),
|
||||
format!("$record{nested_access} | ignore"),
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_table_create(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_record_insert(n: usize, m: usize) -> impl IntoBenchmarks {
|
||||
let setup_command = create_flat_record_string(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
let mut insert = String::from("$record");
|
||||
for i in n..(n + m) {
|
||||
write!(insert, " | insert col_{i} {i}").unwrap();
|
||||
}
|
||||
insert.push_str(" | ignore");
|
||||
bench_command(format!("record_insert_{n}_{m}"), insert, stack, engine)
|
||||
}
|
||||
|
||||
fn bench_table_create(n: usize) -> impl IntoBenchmarks {
|
||||
bench_command(
|
||||
&format!("table_create_{n}"),
|
||||
&create_example_table_nrows(n),
|
||||
format!("table_create_{n}"),
|
||||
create_example_table_nrows(n),
|
||||
Stack::new(),
|
||||
setup_engine(),
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_table_get(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_table_get(n: usize) -> impl IntoBenchmarks {
|
||||
let setup_command = create_example_table_nrows(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
bench_command(
|
||||
&format!("table_get_{n}"),
|
||||
format!("table_get_{n}"),
|
||||
"$table | get bar | math sum | ignore",
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_table_select(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_table_select(n: usize) -> impl IntoBenchmarks {
|
||||
let setup_command = create_example_table_nrows(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
bench_command(
|
||||
&format!("table_select_{n}"),
|
||||
format!("table_select_{n}"),
|
||||
"$table | select foo baz | ignore",
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_eval_interleave(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_table_insert_row(n: usize, m: usize) -> impl IntoBenchmarks {
|
||||
let setup_command = create_example_table_nrows(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
let mut insert = String::from("$table");
|
||||
for i in n..(n + m) {
|
||||
write!(insert, " | insert {i} {{ foo: 0, bar: 1, baz: {i} }}").unwrap();
|
||||
}
|
||||
insert.push_str(" | ignore");
|
||||
bench_command(format!("table_insert_row_{n}_{m}"), insert, stack, engine)
|
||||
}
|
||||
|
||||
fn bench_table_insert_col(n: usize, m: usize) -> impl IntoBenchmarks {
|
||||
let setup_command = create_example_table_nrows(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
let mut insert = String::from("$table");
|
||||
for i in 0..m {
|
||||
write!(insert, " | insert col_{i} {i}").unwrap();
|
||||
}
|
||||
insert.push_str(" | ignore");
|
||||
bench_command(format!("table_insert_col_{n}_{m}"), insert, stack, engine)
|
||||
}
|
||||
|
||||
fn bench_eval_interleave(n: usize) -> impl IntoBenchmarks {
|
||||
let engine = setup_engine();
|
||||
let stack = Stack::new();
|
||||
bench_command(
|
||||
&format!("eval_interleave_{n}"),
|
||||
&format!("seq 1 {n} | wrap a | interleave {{ seq 1 {n} | wrap b }} | ignore"),
|
||||
format!("eval_interleave_{n}"),
|
||||
format!("seq 1 {n} | wrap a | interleave {{ seq 1 {n} | wrap b }} | ignore"),
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_eval_interleave_with_interrupt(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_eval_interleave_with_interrupt(n: usize) -> impl IntoBenchmarks {
|
||||
let mut engine = setup_engine();
|
||||
engine.set_signals(Signals::new(Arc::new(AtomicBool::new(false))));
|
||||
let stack = Stack::new();
|
||||
bench_command(
|
||||
&format!("eval_interleave_with_interrupt_{n}"),
|
||||
&format!("seq 1 {n} | wrap a | interleave {{ seq 1 {n} | wrap b }} | ignore"),
|
||||
format!("eval_interleave_with_interrupt_{n}"),
|
||||
format!("seq 1 {n} | wrap a | interleave {{ seq 1 {n} | wrap b }} | ignore"),
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_eval_for(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_eval_for(n: usize) -> impl IntoBenchmarks {
|
||||
let engine = setup_engine();
|
||||
let stack = Stack::new();
|
||||
bench_command(
|
||||
&format!("eval_for_{n}"),
|
||||
&format!("(for $x in (1..{n}) {{ 1 }}) | ignore"),
|
||||
format!("eval_for_{n}"),
|
||||
format!("(for $x in (1..{n}) {{ 1 }}) | ignore"),
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_eval_each(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_eval_each(n: usize) -> impl IntoBenchmarks {
|
||||
let engine = setup_engine();
|
||||
let stack = Stack::new();
|
||||
bench_command(
|
||||
&format!("eval_each_{n}"),
|
||||
&format!("(1..{n}) | each {{|_| 1 }} | ignore"),
|
||||
format!("eval_each_{n}"),
|
||||
format!("(1..{n}) | each {{|_| 1 }} | ignore"),
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_eval_par_each(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_eval_par_each(n: usize) -> impl IntoBenchmarks {
|
||||
let engine = setup_engine();
|
||||
let stack = Stack::new();
|
||||
bench_command(
|
||||
&format!("eval_par_each_{n}"),
|
||||
&format!("(1..{}) | par-each -t 2 {{|_| 1 }} | ignore", n),
|
||||
format!("eval_par_each_{n}"),
|
||||
format!("(1..{n}) | par-each -t 2 {{|_| 1 }} | ignore"),
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
@ -329,7 +357,7 @@ fn encode_json(row_cnt: usize, col_cnt: usize) -> impl IntoBenchmarks {
|
||||
let encoder = Rc::new(EncodingType::try_from_bytes(b"json").unwrap());
|
||||
|
||||
[benchmark_fn(
|
||||
format!("encode_json_{}_{}", row_cnt, col_cnt),
|
||||
format!("encode_json_{row_cnt}_{col_cnt}"),
|
||||
move |b| {
|
||||
let encoder = encoder.clone();
|
||||
let test_data = test_data.clone();
|
||||
@ -349,7 +377,7 @@ fn encode_msgpack(row_cnt: usize, col_cnt: usize) -> impl IntoBenchmarks {
|
||||
let encoder = Rc::new(EncodingType::try_from_bytes(b"msgpack").unwrap());
|
||||
|
||||
[benchmark_fn(
|
||||
format!("encode_msgpack_{}_{}", row_cnt, col_cnt),
|
||||
format!("encode_msgpack_{row_cnt}_{col_cnt}"),
|
||||
move |b| {
|
||||
let encoder = encoder.clone();
|
||||
let test_data = test_data.clone();
|
||||
@ -371,7 +399,7 @@ fn decode_json(row_cnt: usize, col_cnt: usize) -> impl IntoBenchmarks {
|
||||
encoder.encode(&test_data, &mut res).unwrap();
|
||||
|
||||
[benchmark_fn(
|
||||
format!("decode_json_{}_{}", row_cnt, col_cnt),
|
||||
format!("decode_json_{row_cnt}_{col_cnt}"),
|
||||
move |b| {
|
||||
let res = res.clone();
|
||||
b.iter(move || {
|
||||
@ -394,7 +422,7 @@ fn decode_msgpack(row_cnt: usize, col_cnt: usize) -> impl IntoBenchmarks {
|
||||
encoder.encode(&test_data, &mut res).unwrap();
|
||||
|
||||
[benchmark_fn(
|
||||
format!("decode_msgpack_{}_{}", row_cnt, col_cnt),
|
||||
format!("decode_msgpack_{row_cnt}_{col_cnt}"),
|
||||
move |b| {
|
||||
let res = res.clone();
|
||||
b.iter(move || {
|
||||
@ -427,6 +455,14 @@ tango_benchmarks!(
|
||||
bench_record_nested_access(32),
|
||||
bench_record_nested_access(64),
|
||||
bench_record_nested_access(128),
|
||||
bench_record_insert(1, 1),
|
||||
bench_record_insert(10, 1),
|
||||
bench_record_insert(100, 1),
|
||||
bench_record_insert(1000, 1),
|
||||
bench_record_insert(1, 10),
|
||||
bench_record_insert(10, 10),
|
||||
bench_record_insert(100, 10),
|
||||
bench_record_insert(1000, 10),
|
||||
// Table
|
||||
bench_table_create(1),
|
||||
bench_table_create(10),
|
||||
@ -440,6 +476,22 @@ tango_benchmarks!(
|
||||
bench_table_select(10),
|
||||
bench_table_select(100),
|
||||
bench_table_select(1_000),
|
||||
bench_table_insert_row(1, 1),
|
||||
bench_table_insert_row(10, 1),
|
||||
bench_table_insert_row(100, 1),
|
||||
bench_table_insert_row(1000, 1),
|
||||
bench_table_insert_row(1, 10),
|
||||
bench_table_insert_row(10, 10),
|
||||
bench_table_insert_row(100, 10),
|
||||
bench_table_insert_row(1000, 10),
|
||||
bench_table_insert_col(1, 1),
|
||||
bench_table_insert_col(10, 1),
|
||||
bench_table_insert_col(100, 1),
|
||||
bench_table_insert_col(1000, 1),
|
||||
bench_table_insert_col(1, 10),
|
||||
bench_table_insert_col(10, 10),
|
||||
bench_table_insert_col(100, 10),
|
||||
bench_table_insert_col(1000, 10),
|
||||
// Eval
|
||||
// Interleave
|
||||
bench_eval_interleave(100),
|
||||
|
@ -2,31 +2,32 @@
|
||||
authors = ["The Nushell Project Developers"]
|
||||
description = "CLI-related functionality for Nushell"
|
||||
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cli"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
license = "MIT"
|
||||
name = "nu-cli"
|
||||
version = "0.102.0"
|
||||
version = "0.106.2"
|
||||
|
||||
[lib]
|
||||
bench = false
|
||||
|
||||
[dev-dependencies]
|
||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.102.0" }
|
||||
nu-command = { path = "../nu-command", version = "0.102.0" }
|
||||
nu-test-support = { path = "../nu-test-support", version = "0.102.0" }
|
||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.106.2" }
|
||||
nu-command = { path = "../nu-command", version = "0.106.2" }
|
||||
nu-std = { path = "../nu-std", version = "0.106.2" }
|
||||
nu-test-support = { path = "../nu-test-support", version = "0.106.2" }
|
||||
rstest = { workspace = true, default-features = false }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.102.0" }
|
||||
nu-engine = { path = "../nu-engine", version = "0.102.0", features = ["os"] }
|
||||
nu-glob = { path = "../nu-glob", version = "0.102.0" }
|
||||
nu-path = { path = "../nu-path", version = "0.102.0" }
|
||||
nu-parser = { path = "../nu-parser", version = "0.102.0" }
|
||||
nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.102.0", optional = true }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.102.0", features = ["os"] }
|
||||
nu-utils = { path = "../nu-utils", version = "0.102.0" }
|
||||
nu-color-config = { path = "../nu-color-config", version = "0.102.0" }
|
||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.106.2" }
|
||||
nu-engine = { path = "../nu-engine", version = "0.106.2", features = ["os"] }
|
||||
nu-glob = { path = "../nu-glob", version = "0.106.2" }
|
||||
nu-path = { path = "../nu-path", version = "0.106.2" }
|
||||
nu-parser = { path = "../nu-parser", version = "0.106.2" }
|
||||
nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.106.2", optional = true }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.106.2", features = ["os"] }
|
||||
nu-utils = { path = "../nu-utils", version = "0.106.2" }
|
||||
nu-color-config = { path = "../nu-color-config", version = "0.106.2" }
|
||||
nu-ansi-term = { workspace = true }
|
||||
reedline = { workspace = true, features = ["bashisms", "sqlite"] }
|
||||
|
||||
@ -40,6 +41,7 @@ miette = { workspace = true, features = ["fancy-no-backtrace"] }
|
||||
nucleo-matcher = { workspace = true }
|
||||
percent-encoding = { workspace = true }
|
||||
sysinfo = { workspace = true }
|
||||
strum = { workspace = true }
|
||||
unicode-segmentation = { workspace = true }
|
||||
uuid = { workspace = true, features = ["v4"] }
|
||||
which = { workspace = true }
|
||||
@ -49,4 +51,4 @@ plugin = ["nu-plugin-engine"]
|
||||
system-clipboard = ["reedline/system_clipboard"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
workspace = true
|
||||
|
@ -1,9 +1,9 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
pub struct CommandlineEdit;
|
||||
|
||||
impl Command for SubCommand {
|
||||
impl Command for CommandlineEdit {
|
||||
fn name(&self) -> &str {
|
||||
"commandline edit"
|
||||
}
|
||||
@ -29,7 +29,7 @@ impl Command for SubCommand {
|
||||
.required(
|
||||
"str",
|
||||
SyntaxShape::String,
|
||||
"the string to perform the operation with",
|
||||
"The string to perform the operation with.",
|
||||
)
|
||||
.category(Category::Core)
|
||||
}
|
||||
|
@ -2,9 +2,9 @@ use nu_engine::command_prelude::*;
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
pub struct CommandlineGetCursor;
|
||||
|
||||
impl Command for SubCommand {
|
||||
impl Command for CommandlineGetCursor {
|
||||
fn name(&self) -> &str {
|
||||
"commandline get-cursor"
|
||||
}
|
||||
|
@ -4,6 +4,6 @@ mod get_cursor;
|
||||
mod set_cursor;
|
||||
|
||||
pub use commandline_::Commandline;
|
||||
pub use edit::SubCommand as CommandlineEdit;
|
||||
pub use get_cursor::SubCommand as CommandlineGetCursor;
|
||||
pub use set_cursor::SubCommand as CommandlineSetCursor;
|
||||
pub use edit::CommandlineEdit;
|
||||
pub use get_cursor::CommandlineGetCursor;
|
||||
pub use set_cursor::CommandlineSetCursor;
|
||||
|
@ -3,9 +3,9 @@ use nu_engine::command_prelude::*;
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
pub struct CommandlineSetCursor;
|
||||
|
||||
impl Command for SubCommand {
|
||||
impl Command for CommandlineSetCursor {
|
||||
fn name(&self) -> &str {
|
||||
"commandline set-cursor"
|
||||
}
|
||||
@ -18,7 +18,7 @@ impl Command for SubCommand {
|
||||
"set the current cursor position to the end of the buffer",
|
||||
Some('e'),
|
||||
)
|
||||
.optional("pos", SyntaxShape::Int, "Cursor position to be set")
|
||||
.optional("pos", SyntaxShape::Int, "Cursor position to be set.")
|
||||
.category(Category::Core)
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,8 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::{shell_error::io::IoError, HistoryFileFormat};
|
||||
use nu_protocol::{
|
||||
HistoryFileFormat,
|
||||
shell_error::{self, io::IoError},
|
||||
};
|
||||
use reedline::{
|
||||
FileBackedHistory, History as ReedlineHistory, HistoryItem, SearchDirection, SearchQuery,
|
||||
SqliteBackedHistory,
|
||||
@ -94,7 +97,7 @@ impl Command for History {
|
||||
})
|
||||
})
|
||||
.ok_or(IoError::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
shell_error::io::ErrorKind::FileNotFound,
|
||||
head,
|
||||
history_path,
|
||||
))?
|
||||
@ -105,13 +108,12 @@ impl Command for History {
|
||||
.ok()
|
||||
})
|
||||
.map(move |entries| {
|
||||
entries
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(move |(idx, entry)| create_history_record(idx, entry, long, head))
|
||||
entries.into_iter().enumerate().map(move |(idx, entry)| {
|
||||
create_sqlite_history_record(idx, entry, long, head)
|
||||
})
|
||||
})
|
||||
.ok_or(IoError::new(
|
||||
std::io::ErrorKind::NotFound,
|
||||
shell_error::io::ErrorKind::FileNotFound,
|
||||
head,
|
||||
history_path,
|
||||
))?
|
||||
@ -140,7 +142,7 @@ impl Command for History {
|
||||
}
|
||||
}
|
||||
|
||||
fn create_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span) -> Value {
|
||||
fn create_sqlite_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span) -> Value {
|
||||
//1. Format all the values
|
||||
//2. Create a record of either short or long columns and values
|
||||
|
||||
@ -151,11 +153,8 @@ fn create_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span)
|
||||
.unwrap_or_default(),
|
||||
head,
|
||||
);
|
||||
let start_timestamp_value = Value::string(
|
||||
entry
|
||||
.start_timestamp
|
||||
.map(|time| time.to_string())
|
||||
.unwrap_or_default(),
|
||||
let start_timestamp_value = Value::date(
|
||||
entry.start_timestamp.unwrap_or_default().fixed_offset(),
|
||||
head,
|
||||
);
|
||||
let command_value = Value::string(entry.command_line, head);
|
||||
|
@ -2,8 +2,8 @@ use std::path::{Path, PathBuf};
|
||||
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::{
|
||||
shell_error::{self, io::IoError},
|
||||
HistoryFileFormat,
|
||||
shell_error::{self, io::IoError},
|
||||
};
|
||||
|
||||
use reedline::{
|
||||
@ -21,12 +21,12 @@ impl Command for HistoryImport {
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Import command line history"
|
||||
"Import command line history."
|
||||
}
|
||||
|
||||
fn extra_description(&self) -> &str {
|
||||
r#"Can import history from input, either successive command lines or more detailed records. If providing records, available fields are:
|
||||
command_line, id, start_timestamp, hostname, cwd, duration, exit_status.
|
||||
command, start_timestamp, hostname, cwd, duration, exit_status.
|
||||
|
||||
If no input is provided, will import all history items from existing history in the other format: if current history is stored in sqlite, it will store it in plain text and vice versa.
|
||||
|
||||
@ -48,8 +48,7 @@ Note that history item IDs are ignored when importing from file."#
|
||||
vec![
|
||||
Example {
|
||||
example: "history import",
|
||||
description:
|
||||
"Append all items from history in the other format to the current history",
|
||||
description: "Append all items from history in the other format to the current history",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
@ -58,7 +57,7 @@ Note that history item IDs are ignored when importing from file."#
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
example: "[[ command_line cwd ]; [ foo /home ]] | history import",
|
||||
example: "[[ command cwd ]; [ foo /home ]] | history import",
|
||||
description: "Append `foo` ran from `/home` to the current history",
|
||||
result: None,
|
||||
},
|
||||
@ -198,7 +197,7 @@ fn item_from_record(mut rec: Record, span: Span) -> Result<HistoryItem, ShellErr
|
||||
return Err(ShellError::TypeMismatch {
|
||||
err_message: format!("missing column: {}", fields::COMMAND_LINE),
|
||||
span,
|
||||
})
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
@ -283,22 +282,22 @@ fn backup(path: &Path, span: Span) -> Result<Option<PathBuf>, ShellError> {
|
||||
PathBuf::from(path),
|
||||
"history path exists but is not a file",
|
||||
)
|
||||
.into())
|
||||
.into());
|
||||
}
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(None),
|
||||
Err(e) => {
|
||||
return Err(IoError::new_internal(
|
||||
e.kind(),
|
||||
e,
|
||||
"Could not get metadata",
|
||||
nu_protocol::location!(),
|
||||
)
|
||||
.into())
|
||||
.into());
|
||||
}
|
||||
}
|
||||
let bak_path = find_backup_path(path, span)?;
|
||||
std::fs::copy(path, &bak_path).map_err(|err| {
|
||||
IoError::new_internal(
|
||||
err.kind(),
|
||||
err.not_found_as(NotFound::File),
|
||||
"Could not copy backup",
|
||||
nu_protocol::location!(),
|
||||
)
|
||||
|
@ -1,9 +1,9 @@
|
||||
use crossterm::{
|
||||
event::Event, event::KeyCode, event::KeyEvent, execute, terminal, QueueableCommand,
|
||||
QueueableCommand, event::Event, event::KeyCode, event::KeyEvent, execute, terminal,
|
||||
};
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::shell_error::io::IoError;
|
||||
use std::io::{stdout, Write};
|
||||
use std::io::{Write, stdout};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct KeybindingsListen;
|
||||
@ -42,7 +42,7 @@ impl Command for KeybindingsListen {
|
||||
Err(e) => {
|
||||
terminal::disable_raw_mode().map_err(|err| {
|
||||
IoError::new_internal(
|
||||
err.kind(),
|
||||
err,
|
||||
"Could not disable raw mode",
|
||||
nu_protocol::location!(),
|
||||
)
|
||||
@ -71,18 +71,10 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
|
||||
let config = engine_state.get_config();
|
||||
|
||||
stdout().flush().map_err(|err| {
|
||||
IoError::new_internal(
|
||||
err.kind(),
|
||||
"Could not flush stdout",
|
||||
nu_protocol::location!(),
|
||||
)
|
||||
IoError::new_internal(err, "Could not flush stdout", nu_protocol::location!())
|
||||
})?;
|
||||
terminal::enable_raw_mode().map_err(|err| {
|
||||
IoError::new_internal(
|
||||
err.kind(),
|
||||
"Could not enable raw mode",
|
||||
nu_protocol::location!(),
|
||||
)
|
||||
IoError::new_internal(err, "Could not enable raw mode", nu_protocol::location!())
|
||||
})?;
|
||||
|
||||
if config.use_kitty_protocol {
|
||||
@ -114,7 +106,7 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
|
||||
|
||||
loop {
|
||||
let event = crossterm::event::read().map_err(|err| {
|
||||
IoError::new_internal(err.kind(), "Could not read event", nu_protocol::location!())
|
||||
IoError::new_internal(err, "Could not read event", nu_protocol::location!())
|
||||
})?;
|
||||
if event == Event::Key(KeyCode::Esc.into()) {
|
||||
break;
|
||||
@ -136,7 +128,7 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
|
||||
};
|
||||
stdout.queue(crossterm::style::Print(o)).map_err(|err| {
|
||||
IoError::new_internal(
|
||||
err.kind(),
|
||||
err,
|
||||
"Could not print output record",
|
||||
nu_protocol::location!(),
|
||||
)
|
||||
@ -144,14 +136,10 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
|
||||
stdout
|
||||
.queue(crossterm::style::Print("\r\n"))
|
||||
.map_err(|err| {
|
||||
IoError::new_internal(
|
||||
err.kind(),
|
||||
"Could not print linebreak",
|
||||
nu_protocol::location!(),
|
||||
)
|
||||
IoError::new_internal(err, "Could not print linebreak", nu_protocol::location!())
|
||||
})?;
|
||||
stdout.flush().map_err(|err| {
|
||||
IoError::new_internal(err.kind(), "Could not flush", nu_protocol::location!())
|
||||
IoError::new_internal(err, "Could not flush", nu_protocol::location!())
|
||||
})?;
|
||||
}
|
||||
|
||||
@ -163,11 +151,7 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
|
||||
}
|
||||
|
||||
terminal::disable_raw_mode().map_err(|err| {
|
||||
IoError::new_internal(
|
||||
err.kind(),
|
||||
"Could not disable raw mode",
|
||||
nu_protocol::location!(),
|
||||
)
|
||||
IoError::new_internal(err, "Could not disable raw mode", nu_protocol::location!())
|
||||
})?;
|
||||
|
||||
Ok(Value::nothing(Span::unknown()))
|
||||
|
85
crates/nu-cli/src/completions/attribute_completions.rs
Normal file
85
crates/nu-cli/src/completions/attribute_completions.rs
Normal file
@ -0,0 +1,85 @@
|
||||
use super::{SemanticSuggestion, completion_options::NuMatcher};
|
||||
use crate::{
|
||||
SuggestionKind,
|
||||
completions::{Completer, CompletionOptions},
|
||||
};
|
||||
use nu_protocol::{
|
||||
Span,
|
||||
engine::{Stack, StateWorkingSet},
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
|
||||
pub struct AttributeCompletion;
|
||||
pub struct AttributableCompletion;
|
||||
|
||||
impl Completer for AttributeCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let mut matcher = NuMatcher::new(prefix, options);
|
||||
|
||||
let attr_commands =
|
||||
working_set.find_commands_by_predicate(|s| s.starts_with(b"attr "), true);
|
||||
|
||||
for (decl_id, name, desc, ty) in attr_commands {
|
||||
let name = name.strip_prefix(b"attr ").unwrap_or(&name);
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(name).into_owned(),
|
||||
description: desc,
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
append_whitespace: false,
|
||||
..Default::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Command(ty, Some(decl_id))),
|
||||
});
|
||||
}
|
||||
|
||||
matcher.results()
|
||||
}
|
||||
}
|
||||
|
||||
impl Completer for AttributableCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let mut matcher = NuMatcher::new(prefix, options);
|
||||
|
||||
for s in ["def", "extern", "export def", "export extern"] {
|
||||
let decl_id = working_set
|
||||
.find_decl(s.as_bytes())
|
||||
.expect("internal error, builtin declaration not found");
|
||||
let cmd = working_set.get_decl(decl_id);
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: cmd.name().into(),
|
||||
description: Some(cmd.description().into()),
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
append_whitespace: false,
|
||||
..Default::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Command(cmd.command_type(), None)),
|
||||
});
|
||||
}
|
||||
|
||||
matcher.results()
|
||||
}
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
use crate::completions::CompletionOptions;
|
||||
use nu_protocol::{
|
||||
DeclId, Span,
|
||||
engine::{Stack, StateWorkingSet},
|
||||
Span,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
|
||||
@ -12,10 +12,9 @@ pub trait Completer {
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: &[u8],
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion>;
|
||||
}
|
||||
@ -29,9 +28,15 @@ pub struct SemanticSuggestion {
|
||||
// TODO: think about name: maybe suggestion context?
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum SuggestionKind {
|
||||
Command(nu_protocol::engine::CommandType),
|
||||
Type(nu_protocol::Type),
|
||||
Command(nu_protocol::engine::CommandType, Option<DeclId>),
|
||||
Value(nu_protocol::Type),
|
||||
CellPath,
|
||||
Directory,
|
||||
File,
|
||||
Flag,
|
||||
Module,
|
||||
Operator,
|
||||
Variable,
|
||||
}
|
||||
|
||||
impl From<Suggestion> for SemanticSuggestion {
|
||||
|
153
crates/nu-cli/src/completions/cell_path_completions.rs
Normal file
153
crates/nu-cli/src/completions/cell_path_completions.rs
Normal file
@ -0,0 +1,153 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
use crate::completions::{Completer, CompletionOptions, SemanticSuggestion, SuggestionKind};
|
||||
use nu_engine::{column::get_columns, eval_variable};
|
||||
use nu_protocol::{
|
||||
ShellError, Span, Value,
|
||||
ast::{Expr, Expression, FullCellPath, PathMember},
|
||||
engine::{Stack, StateWorkingSet},
|
||||
eval_const::eval_constant,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
|
||||
use super::completion_options::NuMatcher;
|
||||
|
||||
pub struct CellPathCompletion<'a> {
|
||||
pub full_cell_path: &'a FullCellPath,
|
||||
pub position: usize,
|
||||
}
|
||||
|
||||
fn prefix_from_path_member(member: &PathMember, pos: usize) -> (String, Span) {
|
||||
let (prefix_str, start) = match member {
|
||||
PathMember::String { val, span, .. } => (val, span.start),
|
||||
PathMember::Int { val, span, .. } => (&val.to_string(), span.start),
|
||||
};
|
||||
let prefix_str = prefix_str.get(..pos + 1 - start).unwrap_or(prefix_str);
|
||||
// strip wrapping quotes
|
||||
let quotations = ['"', '\'', '`'];
|
||||
let prefix_str = prefix_str.strip_prefix(quotations).unwrap_or(prefix_str);
|
||||
(prefix_str.to_string(), Span::new(start, pos + 1))
|
||||
}
|
||||
|
||||
impl Completer for CellPathCompletion<'_> {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
_prefix: impl AsRef<str>,
|
||||
_span: Span,
|
||||
offset: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let mut prefix_str = String::new();
|
||||
// position at dots, e.g. `$env.config.<TAB>`
|
||||
let mut span = Span::new(self.position + 1, self.position + 1);
|
||||
let mut path_member_num_before_pos = 0;
|
||||
for member in self.full_cell_path.tail.iter() {
|
||||
if member.span().end <= self.position {
|
||||
path_member_num_before_pos += 1;
|
||||
} else if member.span().contains(self.position) {
|
||||
(prefix_str, span) = prefix_from_path_member(member, self.position);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let current_span = reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
};
|
||||
|
||||
let mut matcher = NuMatcher::new(prefix_str, options);
|
||||
let path_members = self
|
||||
.full_cell_path
|
||||
.tail
|
||||
.get(0..path_member_num_before_pos)
|
||||
.unwrap_or_default();
|
||||
let value = eval_cell_path(
|
||||
working_set,
|
||||
stack,
|
||||
&self.full_cell_path.head,
|
||||
path_members,
|
||||
span,
|
||||
)
|
||||
.unwrap_or_default();
|
||||
|
||||
for suggestion in get_suggestions_by_value(&value, current_span) {
|
||||
matcher.add_semantic_suggestion(suggestion);
|
||||
}
|
||||
matcher.results()
|
||||
}
|
||||
}
|
||||
|
||||
/// Follow cell path to get the value
|
||||
/// NOTE: This is a relatively lightweight implementation,
|
||||
/// so it may fail to get the exact value when the expression is complicated.
|
||||
/// One failing example would be `[$foo].0`
|
||||
pub(crate) fn eval_cell_path(
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
head: &Expression,
|
||||
path_members: &[PathMember],
|
||||
span: Span,
|
||||
) -> Result<Value, ShellError> {
|
||||
// evaluate the head expression to get its value
|
||||
let head_value = if let Expr::Var(var_id) = head.expr {
|
||||
working_set
|
||||
.get_variable(var_id)
|
||||
.const_val
|
||||
.to_owned()
|
||||
.map_or_else(
|
||||
|| eval_variable(working_set.permanent_state, stack, var_id, span),
|
||||
Ok,
|
||||
)
|
||||
} else {
|
||||
eval_constant(working_set, head)
|
||||
}?;
|
||||
head_value
|
||||
.follow_cell_path(path_members)
|
||||
.map(Cow::into_owned)
|
||||
}
|
||||
|
||||
fn get_suggestions_by_value(
|
||||
value: &Value,
|
||||
current_span: reedline::Span,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let to_suggestion = |s: String, v: Option<&Value>| {
|
||||
// Check if the string needs quoting
|
||||
let value = if s.is_empty()
|
||||
|| s.chars()
|
||||
.any(|c: char| !(c.is_ascii_alphabetic() || ['_', '-'].contains(&c)))
|
||||
{
|
||||
format!("{s:?}")
|
||||
} else {
|
||||
s
|
||||
};
|
||||
|
||||
SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value,
|
||||
span: current_span,
|
||||
description: v.map(|v| v.get_type().to_string()),
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::CellPath),
|
||||
}
|
||||
};
|
||||
match value {
|
||||
Value::Record { val, .. } => val
|
||||
.columns()
|
||||
.map(|s| to_suggestion(s.to_string(), val.get(s)))
|
||||
.collect(),
|
||||
Value::List { vals, .. } => get_columns(vals.as_slice())
|
||||
.into_iter()
|
||||
.map(|s| {
|
||||
let sub_val = vals
|
||||
.first()
|
||||
.and_then(|v| v.as_record().ok())
|
||||
.and_then(|rv| rv.get(&s));
|
||||
to_suggestion(s, sub_val)
|
||||
})
|
||||
.collect(),
|
||||
_ => vec![],
|
||||
}
|
||||
}
|
@ -1,37 +1,25 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::{
|
||||
completions::{Completer, CompletionOptions},
|
||||
SuggestionKind,
|
||||
completions::{Completer, CompletionOptions},
|
||||
};
|
||||
use nu_parser::FlatShape;
|
||||
use nu_protocol::{
|
||||
engine::{CachedFile, Stack, StateWorkingSet},
|
||||
Span,
|
||||
engine::{CommandType, Stack, StateWorkingSet},
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
|
||||
use super::{completion_options::NuMatcher, SemanticSuggestion};
|
||||
use super::{SemanticSuggestion, completion_options::NuMatcher};
|
||||
|
||||
pub struct CommandCompletion {
|
||||
flattened: Vec<(Span, FlatShape)>,
|
||||
flat_shape: FlatShape,
|
||||
force_completion_after_space: bool,
|
||||
/// Whether to include internal commands
|
||||
pub internals: bool,
|
||||
/// Whether to include external commands
|
||||
pub externals: bool,
|
||||
}
|
||||
|
||||
impl CommandCompletion {
|
||||
pub fn new(
|
||||
flattened: Vec<(Span, FlatShape)>,
|
||||
flat_shape: FlatShape,
|
||||
force_completion_after_space: bool,
|
||||
) -> Self {
|
||||
Self {
|
||||
flattened,
|
||||
flat_shape,
|
||||
force_completion_after_space,
|
||||
}
|
||||
}
|
||||
|
||||
fn external_command_completion(
|
||||
&self,
|
||||
working_set: &StateWorkingSet,
|
||||
@ -64,13 +52,16 @@ impl CommandCompletion {
|
||||
continue;
|
||||
};
|
||||
let value = if matched_internal(&name) {
|
||||
format!("^{}", name)
|
||||
format!("^{name}")
|
||||
} else {
|
||||
name.clone()
|
||||
};
|
||||
if suggs.contains_key(&value) {
|
||||
continue;
|
||||
}
|
||||
// TODO: check name matching before a relative heavy IO involved
|
||||
// `is_executable` for performance consideration, should avoid
|
||||
// duplicated `match_aux` call for matched items in the future
|
||||
if matcher.matches(&name) && is_executable::is_executable(item.path()) {
|
||||
// If there's an internal command with the same name, adds ^cmd to the
|
||||
// matcher so that both the internal and external command are included
|
||||
@ -84,8 +75,10 @@ impl CommandCompletion {
|
||||
append_whitespace: true,
|
||||
..Default::default()
|
||||
},
|
||||
// TODO: is there a way to create a test?
|
||||
kind: None,
|
||||
kind: Some(SuggestionKind::Command(
|
||||
CommandType::External,
|
||||
None,
|
||||
)),
|
||||
},
|
||||
);
|
||||
}
|
||||
@ -97,46 +90,50 @@ impl CommandCompletion {
|
||||
|
||||
suggs
|
||||
}
|
||||
}
|
||||
|
||||
fn complete_commands(
|
||||
&self,
|
||||
impl Completer for CommandCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
find_externals: bool,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let partial = working_set.get_span_contents(span);
|
||||
let mut matcher = NuMatcher::new(String::from_utf8_lossy(partial), options.clone());
|
||||
let mut matcher = NuMatcher::new(prefix, options);
|
||||
|
||||
let sugg_span = reedline::Span::new(span.start - offset, span.end - offset);
|
||||
|
||||
let mut internal_suggs = HashMap::new();
|
||||
let filtered_commands = working_set.find_commands_by_predicate(
|
||||
|name| {
|
||||
let name = String::from_utf8_lossy(name);
|
||||
matcher.add(&name, name.to_string())
|
||||
},
|
||||
true,
|
||||
);
|
||||
for (name, description, typ) in filtered_commands {
|
||||
let name = String::from_utf8_lossy(&name);
|
||||
internal_suggs.insert(
|
||||
name.to_string(),
|
||||
SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: name.to_string(),
|
||||
description,
|
||||
span: sugg_span,
|
||||
append_whitespace: true,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Command(typ)),
|
||||
if self.internals {
|
||||
let filtered_commands = working_set.find_commands_by_predicate(
|
||||
|name| {
|
||||
let name = String::from_utf8_lossy(name);
|
||||
matcher.add(&name, name.to_string())
|
||||
},
|
||||
true,
|
||||
);
|
||||
for (decl_id, name, description, typ) in filtered_commands {
|
||||
let name = String::from_utf8_lossy(&name);
|
||||
internal_suggs.insert(
|
||||
name.to_string(),
|
||||
SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: name.to_string(),
|
||||
description,
|
||||
span: sugg_span,
|
||||
append_whitespace: true,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Command(typ, Some(decl_id))),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let mut external_suggs = if find_externals {
|
||||
let mut external_suggs = if self.externals {
|
||||
self.external_command_completion(
|
||||
working_set,
|
||||
sugg_span,
|
||||
@ -159,179 +156,3 @@ impl CommandCompletion {
|
||||
res
|
||||
}
|
||||
}
|
||||
|
||||
impl Completer for CommandCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
_prefix: &[u8],
|
||||
span: Span,
|
||||
offset: usize,
|
||||
pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let last = self
|
||||
.flattened
|
||||
.iter()
|
||||
.rev()
|
||||
.skip_while(|x| x.0.end > pos)
|
||||
.take_while(|x| {
|
||||
matches!(
|
||||
x.1,
|
||||
FlatShape::InternalCall(_)
|
||||
| FlatShape::External
|
||||
| FlatShape::ExternalArg
|
||||
| FlatShape::Literal
|
||||
| FlatShape::String
|
||||
)
|
||||
})
|
||||
.last();
|
||||
|
||||
// The last item here would be the earliest shape that could possible by part of this subcommand
|
||||
let subcommands = if let Some(last) = last {
|
||||
self.complete_commands(
|
||||
working_set,
|
||||
Span::new(last.0.start, pos),
|
||||
offset,
|
||||
false,
|
||||
options,
|
||||
)
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
if !subcommands.is_empty() {
|
||||
return subcommands;
|
||||
}
|
||||
|
||||
let config = working_set.get_config();
|
||||
if matches!(self.flat_shape, nu_parser::FlatShape::External)
|
||||
|| matches!(self.flat_shape, nu_parser::FlatShape::InternalCall(_))
|
||||
|| ((span.end - span.start) == 0)
|
||||
|| is_passthrough_command(working_set.delta.get_file_contents())
|
||||
{
|
||||
// we're in a gap or at a command
|
||||
if working_set.get_span_contents(span).is_empty() && !self.force_completion_after_space
|
||||
{
|
||||
return vec![];
|
||||
}
|
||||
self.complete_commands(
|
||||
working_set,
|
||||
span,
|
||||
offset,
|
||||
config.completions.external.enable,
|
||||
options,
|
||||
)
|
||||
} else {
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_non_whitespace_index(contents: &[u8], start: usize) -> usize {
|
||||
match contents.get(start..) {
|
||||
Some(contents) => {
|
||||
contents
|
||||
.iter()
|
||||
.take_while(|x| x.is_ascii_whitespace())
|
||||
.count()
|
||||
+ start
|
||||
}
|
||||
None => start,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_passthrough_command(working_set_file_contents: &[CachedFile]) -> bool {
|
||||
for cached_file in working_set_file_contents {
|
||||
let contents = &cached_file.content;
|
||||
let last_pipe_pos_rev = contents.iter().rev().position(|x| x == &b'|');
|
||||
let last_pipe_pos = last_pipe_pos_rev.map(|x| contents.len() - x).unwrap_or(0);
|
||||
|
||||
let cur_pos = find_non_whitespace_index(contents, last_pipe_pos);
|
||||
|
||||
let result = match contents.get(cur_pos..) {
|
||||
Some(contents) => contents.starts_with(b"sudo ") || contents.starts_with(b"doas "),
|
||||
None => false,
|
||||
};
|
||||
if result {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod command_completions_tests {
|
||||
use super::*;
|
||||
use nu_protocol::engine::EngineState;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[test]
|
||||
fn test_find_non_whitespace_index() {
|
||||
let commands = [
|
||||
(" hello", 4),
|
||||
("sudo ", 0),
|
||||
(" sudo ", 2),
|
||||
(" sudo ", 2),
|
||||
(" hello ", 1),
|
||||
(" hello ", 3),
|
||||
(" hello | sudo ", 4),
|
||||
(" sudo|sudo", 5),
|
||||
("sudo | sudo ", 0),
|
||||
(" hello sud", 1),
|
||||
];
|
||||
for (idx, ele) in commands.iter().enumerate() {
|
||||
let index = find_non_whitespace_index(ele.0.as_bytes(), 0);
|
||||
assert_eq!(index, ele.1, "Failed on index {}", idx);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_last_command_passthrough() {
|
||||
let commands = [
|
||||
(" hello", false),
|
||||
(" sudo ", true),
|
||||
("sudo ", true),
|
||||
(" hello", false),
|
||||
(" sudo", false),
|
||||
(" sudo ", true),
|
||||
(" sudo ", true),
|
||||
(" sudo ", true),
|
||||
(" hello ", false),
|
||||
(" hello | sudo ", true),
|
||||
(" sudo|sudo", false),
|
||||
("sudo | sudo ", true),
|
||||
(" hello sud", false),
|
||||
(" sudo | sud ", false),
|
||||
(" sudo|sudo ", true),
|
||||
(" sudo | sudo ls | sudo ", true),
|
||||
];
|
||||
for (idx, ele) in commands.iter().enumerate() {
|
||||
let input = ele.0.as_bytes();
|
||||
|
||||
let mut engine_state = EngineState::new();
|
||||
engine_state.add_file("test.nu".into(), Arc::new([]));
|
||||
|
||||
let delta = {
|
||||
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||
let _ = working_set.add_file("child.nu".into(), input);
|
||||
working_set.render()
|
||||
};
|
||||
|
||||
let result = engine_state.merge_delta(delta);
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Merge delta has failed: {}",
|
||||
result.err().unwrap()
|
||||
);
|
||||
|
||||
let is_passthrough_command = is_passthrough_command(engine_state.get_file_contents());
|
||||
assert_eq!(
|
||||
is_passthrough_command, ele.1,
|
||||
"index for '{}': {}",
|
||||
ele.0, idx
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,16 +1,16 @@
|
||||
use super::{completion_options::NuMatcher, MatchAlgorithm};
|
||||
use super::{MatchAlgorithm, completion_options::NuMatcher};
|
||||
use crate::completions::CompletionOptions;
|
||||
use nu_ansi_term::Style;
|
||||
use nu_engine::env_to_string;
|
||||
use nu_path::dots::expand_ndots;
|
||||
use nu_path::{expand_to_real_path, home_dir};
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
Span,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
};
|
||||
use nu_utils::get_ls_colors;
|
||||
use nu_utils::IgnoreCaseExt;
|
||||
use std::path::{is_separator, Component, Path, PathBuf, MAIN_SEPARATOR as SEP};
|
||||
use nu_utils::get_ls_colors;
|
||||
use std::path::{Component, MAIN_SEPARATOR as SEP, Path, PathBuf, is_separator};
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct PathBuiltFromString {
|
||||
@ -22,21 +22,27 @@ pub struct PathBuiltFromString {
|
||||
/// Recursively goes through paths that match a given `partial`.
|
||||
/// built: State struct for a valid matching path built so far.
|
||||
///
|
||||
/// `want_directory`: Whether we want only directories as completion matches.
|
||||
/// Some commands like `cd` can only be run on directories whereas others
|
||||
/// like `ls` can be run on regular files as well.
|
||||
///
|
||||
/// `isdir`: whether the current partial path has a trailing slash.
|
||||
/// Parsing a path string into a pathbuf loses that bit of information.
|
||||
///
|
||||
/// want_directory: Whether we want only directories as completion matches.
|
||||
/// Some commands like `cd` can only be run on directories whereas others
|
||||
/// like `ls` can be run on regular files as well.
|
||||
/// `enable_exact_match`: Whether match algorithm is Prefix and all previous components
|
||||
/// of the path matched a directory exactly.
|
||||
fn complete_rec(
|
||||
partial: &[&str],
|
||||
built_paths: &[PathBuiltFromString],
|
||||
options: &CompletionOptions,
|
||||
want_directory: bool,
|
||||
isdir: bool,
|
||||
enable_exact_match: bool,
|
||||
) -> Vec<PathBuiltFromString> {
|
||||
let has_more = !partial.is_empty() && (partial.len() > 1 || isdir);
|
||||
|
||||
if let Some((&base, rest)) = partial.split_first() {
|
||||
if base.chars().all(|c| c == '.') && (isdir || !rest.is_empty()) {
|
||||
if base.chars().all(|c| c == '.') && has_more {
|
||||
let built_paths: Vec<_> = built_paths
|
||||
.iter()
|
||||
.map(|built| {
|
||||
@ -46,13 +52,23 @@ fn complete_rec(
|
||||
built
|
||||
})
|
||||
.collect();
|
||||
return complete_rec(rest, &built_paths, options, want_directory, isdir);
|
||||
return complete_rec(
|
||||
rest,
|
||||
&built_paths,
|
||||
options,
|
||||
want_directory,
|
||||
isdir,
|
||||
enable_exact_match,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let prefix = partial.first().unwrap_or(&"");
|
||||
let mut matcher = NuMatcher::new(prefix, options.clone());
|
||||
let mut matcher = NuMatcher::new(prefix, options);
|
||||
|
||||
let mut exact_match = None;
|
||||
// Only relevant for case insensitive matching
|
||||
let mut multiple_exact_matches = false;
|
||||
for built in built_paths {
|
||||
let mut path = built.cwd.clone();
|
||||
for part in &built.parts {
|
||||
@ -65,55 +81,63 @@ fn complete_rec(
|
||||
|
||||
for entry in result.filter_map(|e| e.ok()) {
|
||||
let entry_name = entry.file_name().to_string_lossy().into_owned();
|
||||
let entry_isdir = entry.path().is_dir() && !entry.path().is_symlink();
|
||||
let entry_isdir = entry.path().is_dir();
|
||||
let mut built = built.clone();
|
||||
built.parts.push(entry_name.clone());
|
||||
built.isdir = entry_isdir;
|
||||
// Symlinks to directories shouldn't have a trailing slash (#13275)
|
||||
built.isdir = entry_isdir && !entry.path().is_symlink();
|
||||
|
||||
if !want_directory || entry_isdir {
|
||||
matcher.add(entry_name.clone(), (entry_name, built));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut completions = vec![];
|
||||
for (entry_name, built) in matcher.results() {
|
||||
match partial.split_first() {
|
||||
Some((base, rest)) => {
|
||||
// We use `isdir` to confirm that the current component has
|
||||
// at least one next component or a slash.
|
||||
// Serves as confirmation to ignore longer completions for
|
||||
// components in between.
|
||||
if !rest.is_empty() || isdir {
|
||||
completions.extend(complete_rec(
|
||||
rest,
|
||||
&[built],
|
||||
options,
|
||||
want_directory,
|
||||
isdir,
|
||||
));
|
||||
} else {
|
||||
completions.push(built);
|
||||
}
|
||||
|
||||
// For https://github.com/nushell/nushell/issues/13204
|
||||
if isdir && options.match_algorithm == MatchAlgorithm::Prefix {
|
||||
let exact_match = if options.case_sensitive {
|
||||
entry_name.eq(base)
|
||||
if enable_exact_match && !multiple_exact_matches && has_more {
|
||||
let matches = if options.case_sensitive {
|
||||
entry_name.eq(prefix)
|
||||
} else {
|
||||
entry_name.to_folded_case().eq(&base.to_folded_case())
|
||||
entry_name.eq_ignore_case(prefix)
|
||||
};
|
||||
if exact_match {
|
||||
break;
|
||||
if matches {
|
||||
if exact_match.is_none() {
|
||||
exact_match = Some(built.clone());
|
||||
} else {
|
||||
multiple_exact_matches = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
completions.push(built);
|
||||
|
||||
matcher.add(entry_name, built);
|
||||
}
|
||||
}
|
||||
}
|
||||
completions
|
||||
|
||||
// Don't show longer completions if we have a single exact match (#13204, #14794)
|
||||
if !multiple_exact_matches {
|
||||
if let Some(built) = exact_match {
|
||||
return complete_rec(
|
||||
&partial[1..],
|
||||
&[built],
|
||||
options,
|
||||
want_directory,
|
||||
isdir,
|
||||
true,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if has_more {
|
||||
let mut completions = vec![];
|
||||
for built in matcher.results() {
|
||||
completions.extend(complete_rec(
|
||||
&partial[1..],
|
||||
&[built],
|
||||
options,
|
||||
want_directory,
|
||||
isdir,
|
||||
false,
|
||||
));
|
||||
}
|
||||
completions
|
||||
} else {
|
||||
matcher.results()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -139,7 +163,7 @@ impl OriginalCwd {
|
||||
}
|
||||
}
|
||||
|
||||
fn surround_remove(partial: &str) -> String {
|
||||
pub fn surround_remove(partial: &str) -> String {
|
||||
for c in ['`', '"', '\''] {
|
||||
if partial.starts_with(c) {
|
||||
let ret = partial.strip_prefix(c).unwrap_or(partial);
|
||||
@ -157,6 +181,7 @@ pub struct FileSuggestion {
|
||||
pub span: nu_protocol::Span,
|
||||
pub path: String,
|
||||
pub style: Option<Style>,
|
||||
pub is_dir: bool,
|
||||
}
|
||||
|
||||
/// # Parameters
|
||||
@ -197,10 +222,9 @@ pub fn complete_item(
|
||||
let ls_colors = (engine_state.config.completions.use_ls_colors
|
||||
&& engine_state.config.use_ansi_coloring.get(engine_state))
|
||||
.then(|| {
|
||||
let ls_colors_env_str = match stack.get_env_var(engine_state, "LS_COLORS") {
|
||||
Some(v) => env_to_string("LS_COLORS", v, engine_state, stack).ok(),
|
||||
None => None,
|
||||
};
|
||||
let ls_colors_env_str = stack
|
||||
.get_env_var(engine_state, "LS_COLORS")
|
||||
.and_then(|v| env_to_string("LS_COLORS", v, engine_state, stack).ok());
|
||||
get_ls_colors(ls_colors_env_str)
|
||||
});
|
||||
|
||||
@ -254,57 +278,57 @@ pub fn complete_item(
|
||||
options,
|
||||
want_directory,
|
||||
isdir,
|
||||
options.match_algorithm == MatchAlgorithm::Prefix,
|
||||
)
|
||||
.into_iter()
|
||||
.map(|mut p| {
|
||||
if should_collapse_dots {
|
||||
p = collapse_ndots(p);
|
||||
}
|
||||
let is_dir = p.isdir;
|
||||
let path = original_cwd.apply(p, path_separator);
|
||||
let real_path = expand_to_real_path(&path);
|
||||
let metadata = std::fs::symlink_metadata(&real_path).ok();
|
||||
let style = ls_colors.as_ref().map(|lsc| {
|
||||
lsc.style_for_path_with_metadata(
|
||||
&path,
|
||||
std::fs::symlink_metadata(expand_to_real_path(&path))
|
||||
.ok()
|
||||
.as_ref(),
|
||||
)
|
||||
.map(lscolors::Style::to_nu_ansi_term_style)
|
||||
.unwrap_or_default()
|
||||
lsc.style_for_path_with_metadata(&real_path, metadata.as_ref())
|
||||
.map(lscolors::Style::to_nu_ansi_term_style)
|
||||
.unwrap_or_default()
|
||||
});
|
||||
FileSuggestion {
|
||||
span,
|
||||
path: escape_path(path, want_directory),
|
||||
path: escape_path(path),
|
||||
style,
|
||||
is_dir,
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
// Fix files or folders with quotes or hashes
|
||||
pub fn escape_path(path: String, dir: bool) -> String {
|
||||
pub fn escape_path(path: String) -> String {
|
||||
// make glob pattern have the highest priority.
|
||||
if nu_glob::is_glob(path.as_str()) {
|
||||
if nu_glob::is_glob(path.as_str()) || path.contains('`') {
|
||||
// expand home `~` for https://github.com/nushell/nushell/issues/13905
|
||||
let pathbuf = nu_path::expand_tilde(path);
|
||||
let path = pathbuf.to_string_lossy();
|
||||
return if path.contains('\'') {
|
||||
// decide to use double quote, also need to escape `"` in path
|
||||
// or else users can't do anything with completed path either.
|
||||
format!("\"{}\"", path.replace('"', r#"\""#))
|
||||
if path.contains('\'') {
|
||||
// decide to use double quotes
|
||||
// Path as Debug will do the escaping for `"`, `\`
|
||||
format!("{path:?}")
|
||||
} else {
|
||||
format!("'{path}'")
|
||||
};
|
||||
}
|
||||
|
||||
let filename_contaminated = !dir && path.contains(['\'', '"', ' ', '#', '(', ')']);
|
||||
let dirname_contaminated = dir && path.contains(['\'', '"', ' ', '#']);
|
||||
let maybe_flag = path.starts_with('-');
|
||||
let maybe_variable = path.starts_with('$');
|
||||
let maybe_number = path.parse::<f64>().is_ok();
|
||||
if filename_contaminated || dirname_contaminated || maybe_flag || maybe_variable || maybe_number
|
||||
{
|
||||
format!("`{path}`")
|
||||
}
|
||||
} else {
|
||||
path
|
||||
let contaminated =
|
||||
path.contains(['\'', '"', ' ', '#', '(', ')', '{', '}', '[', ']', '|', ';']);
|
||||
let maybe_flag = path.starts_with('-');
|
||||
let maybe_variable = path.starts_with('$');
|
||||
let maybe_number = path.parse::<f64>().is_ok();
|
||||
if contaminated || maybe_flag || maybe_variable || maybe_number {
|
||||
format!("`{path}`")
|
||||
} else {
|
||||
path
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -315,12 +339,12 @@ pub struct AdjustView {
|
||||
}
|
||||
|
||||
pub fn adjust_if_intermediate(
|
||||
prefix: &[u8],
|
||||
prefix: &str,
|
||||
working_set: &StateWorkingSet,
|
||||
mut span: nu_protocol::Span,
|
||||
) -> AdjustView {
|
||||
let span_contents = String::from_utf8_lossy(working_set.get_span_contents(span)).to_string();
|
||||
let mut prefix = String::from_utf8_lossy(prefix).to_string();
|
||||
let mut prefix = prefix.to_string();
|
||||
|
||||
// A difference of 1 because of the cursor's unicode code point in between.
|
||||
// Using .chars().count() because unicode and Windows.
|
||||
|
@ -2,8 +2,8 @@ use nu_parser::trim_quotes_str;
|
||||
use nu_protocol::{CompletionAlgorithm, CompletionSort};
|
||||
use nu_utils::IgnoreCaseExt;
|
||||
use nucleo_matcher::{
|
||||
pattern::{Atom, AtomKind, CaseMatching, Normalization},
|
||||
Config, Matcher, Utf32Str,
|
||||
pattern::{Atom, AtomKind, CaseMatching, Normalization},
|
||||
};
|
||||
use std::{borrow::Cow, fmt::Display};
|
||||
|
||||
@ -18,6 +18,12 @@ pub enum MatchAlgorithm {
|
||||
/// "git switch" is matched by "git sw"
|
||||
Prefix,
|
||||
|
||||
/// Only show suggestions which have a substring matching with the given input
|
||||
///
|
||||
/// Example:
|
||||
/// "git checkout" is matched by "checkout"
|
||||
Substring,
|
||||
|
||||
/// Only show suggestions which contain the input chars at any place
|
||||
///
|
||||
/// Example:
|
||||
@ -25,8 +31,8 @@ pub enum MatchAlgorithm {
|
||||
Fuzzy,
|
||||
}
|
||||
|
||||
pub struct NuMatcher<T> {
|
||||
options: CompletionOptions,
|
||||
pub struct NuMatcher<'a, T> {
|
||||
options: &'a CompletionOptions,
|
||||
needle: String,
|
||||
state: State<T>,
|
||||
}
|
||||
@ -36,6 +42,10 @@ enum State<T> {
|
||||
/// Holds (haystack, item)
|
||||
items: Vec<(String, T)>,
|
||||
},
|
||||
Substring {
|
||||
/// Holds (haystack, item)
|
||||
items: Vec<(String, T)>,
|
||||
},
|
||||
Fuzzy {
|
||||
matcher: Matcher,
|
||||
atom: Atom,
|
||||
@ -45,11 +55,11 @@ enum State<T> {
|
||||
}
|
||||
|
||||
/// Filters and sorts suggestions
|
||||
impl<T> NuMatcher<T> {
|
||||
impl<T> NuMatcher<'_, T> {
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `needle` - The text to search for
|
||||
pub fn new(needle: impl AsRef<str>, options: CompletionOptions) -> NuMatcher<T> {
|
||||
pub fn new(needle: impl AsRef<str>, options: &CompletionOptions) -> NuMatcher<T> {
|
||||
let needle = trim_quotes_str(needle.as_ref());
|
||||
match options.match_algorithm {
|
||||
MatchAlgorithm::Prefix => {
|
||||
@ -64,6 +74,18 @@ impl<T> NuMatcher<T> {
|
||||
state: State::Prefix { items: Vec::new() },
|
||||
}
|
||||
}
|
||||
MatchAlgorithm::Substring => {
|
||||
let lowercase_needle = if options.case_sensitive {
|
||||
needle.to_owned()
|
||||
} else {
|
||||
needle.to_folded_case()
|
||||
};
|
||||
NuMatcher {
|
||||
options,
|
||||
needle: lowercase_needle,
|
||||
state: State::Substring { items: Vec::new() },
|
||||
}
|
||||
}
|
||||
MatchAlgorithm::Fuzzy => {
|
||||
let atom = Atom::new(
|
||||
needle,
|
||||
@ -80,7 +102,11 @@ impl<T> NuMatcher<T> {
|
||||
options,
|
||||
needle: needle.to_owned(),
|
||||
state: State::Fuzzy {
|
||||
matcher: Matcher::new(Config::DEFAULT),
|
||||
matcher: Matcher::new({
|
||||
let mut cfg = Config::DEFAULT;
|
||||
cfg.prefer_prefix = true;
|
||||
cfg
|
||||
}),
|
||||
atom,
|
||||
items: Vec::new(),
|
||||
},
|
||||
@ -102,11 +128,21 @@ impl<T> NuMatcher<T> {
|
||||
} else {
|
||||
Cow::Owned(haystack.to_folded_case())
|
||||
};
|
||||
let matches = if self.options.positional {
|
||||
haystack_folded.starts_with(self.needle.as_str())
|
||||
let matches = haystack_folded.starts_with(self.needle.as_str());
|
||||
if matches {
|
||||
if let Some(item) = item {
|
||||
items.push((haystack.to_string(), item));
|
||||
}
|
||||
}
|
||||
matches
|
||||
}
|
||||
State::Substring { items } => {
|
||||
let haystack_folded = if self.options.case_sensitive {
|
||||
Cow::Borrowed(haystack)
|
||||
} else {
|
||||
haystack_folded.contains(self.needle.as_str())
|
||||
Cow::Owned(haystack.to_folded_case())
|
||||
};
|
||||
let matches = haystack_folded.contains(self.needle.as_str());
|
||||
if matches {
|
||||
if let Some(item) = item {
|
||||
items.push((haystack.to_string(), item));
|
||||
@ -148,7 +184,7 @@ impl<T> NuMatcher<T> {
|
||||
/// Get all the items that matched (sorted)
|
||||
pub fn results(self) -> Vec<T> {
|
||||
match self.state {
|
||||
State::Prefix { mut items, .. } => {
|
||||
State::Prefix { mut items, .. } | State::Substring { mut items, .. } => {
|
||||
items.sort_by(|(haystack1, _), (haystack2, _)| {
|
||||
let cmp_sensitive = haystack1.cmp(haystack2);
|
||||
if self.options.case_sensitive {
|
||||
@ -184,7 +220,7 @@ impl<T> NuMatcher<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl NuMatcher<SemanticSuggestion> {
|
||||
impl NuMatcher<'_, SemanticSuggestion> {
|
||||
pub fn add_semantic_suggestion(&mut self, sugg: SemanticSuggestion) -> bool {
|
||||
let value = sugg.suggestion.value.to_string();
|
||||
self.add(value, sugg)
|
||||
@ -195,6 +231,7 @@ impl From<CompletionAlgorithm> for MatchAlgorithm {
|
||||
fn from(value: CompletionAlgorithm) -> Self {
|
||||
match value {
|
||||
CompletionAlgorithm::Prefix => MatchAlgorithm::Prefix,
|
||||
CompletionAlgorithm::Substring => MatchAlgorithm::Substring,
|
||||
CompletionAlgorithm::Fuzzy => MatchAlgorithm::Fuzzy,
|
||||
}
|
||||
}
|
||||
@ -206,6 +243,7 @@ impl TryFrom<String> for MatchAlgorithm {
|
||||
fn try_from(value: String) -> Result<Self, Self::Error> {
|
||||
match value.as_str() {
|
||||
"prefix" => Ok(Self::Prefix),
|
||||
"substring" => Ok(Self::Substring),
|
||||
"fuzzy" => Ok(Self::Fuzzy),
|
||||
_ => Err(InvalidMatchAlgorithm::Unknown),
|
||||
}
|
||||
@ -230,7 +268,6 @@ impl std::error::Error for InvalidMatchAlgorithm {}
|
||||
#[derive(Clone)]
|
||||
pub struct CompletionOptions {
|
||||
pub case_sensitive: bool,
|
||||
pub positional: bool,
|
||||
pub match_algorithm: MatchAlgorithm,
|
||||
pub sort: CompletionSort,
|
||||
}
|
||||
@ -239,7 +276,6 @@ impl Default for CompletionOptions {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
case_sensitive: true,
|
||||
positional: true,
|
||||
match_algorithm: MatchAlgorithm::Prefix,
|
||||
sort: Default::default(),
|
||||
}
|
||||
@ -256,6 +292,9 @@ mod test {
|
||||
#[case(MatchAlgorithm::Prefix, "example text", "", true)]
|
||||
#[case(MatchAlgorithm::Prefix, "example text", "examp", true)]
|
||||
#[case(MatchAlgorithm::Prefix, "example text", "text", false)]
|
||||
#[case(MatchAlgorithm::Substring, "example text", "", true)]
|
||||
#[case(MatchAlgorithm::Substring, "example text", "text", true)]
|
||||
#[case(MatchAlgorithm::Substring, "example text", "mplxt", false)]
|
||||
#[case(MatchAlgorithm::Fuzzy, "example text", "", true)]
|
||||
#[case(MatchAlgorithm::Fuzzy, "example text", "examp", true)]
|
||||
#[case(MatchAlgorithm::Fuzzy, "example text", "ext", true)]
|
||||
@ -271,7 +310,7 @@ mod test {
|
||||
match_algorithm,
|
||||
..Default::default()
|
||||
};
|
||||
let mut matcher = NuMatcher::new(needle, options);
|
||||
let mut matcher = NuMatcher::new(needle, &options);
|
||||
matcher.add(haystack, haystack);
|
||||
if should_match {
|
||||
assert_eq!(vec![haystack], matcher.results());
|
||||
@ -286,7 +325,7 @@ mod test {
|
||||
match_algorithm: MatchAlgorithm::Fuzzy,
|
||||
..Default::default()
|
||||
};
|
||||
let mut matcher = NuMatcher::new("fob", options);
|
||||
let mut matcher = NuMatcher::new("fob", &options);
|
||||
for item in ["foo/bar", "fob", "foo bar"] {
|
||||
matcher.add(item, item);
|
||||
}
|
||||
@ -300,7 +339,7 @@ mod test {
|
||||
match_algorithm: MatchAlgorithm::Fuzzy,
|
||||
..Default::default()
|
||||
};
|
||||
let mut matcher = NuMatcher::new("'love spaces' ", options);
|
||||
let mut matcher = NuMatcher::new("'love spaces' ", &options);
|
||||
for item in [
|
||||
"'i love spaces'",
|
||||
"'i love spaces' so much",
|
||||
|
@ -1,30 +1,31 @@
|
||||
use crate::completions::{
|
||||
completer::map_value_completions, Completer, CompletionOptions, SemanticSuggestion,
|
||||
Completer, CompletionOptions, MatchAlgorithm, SemanticSuggestion,
|
||||
completer::map_value_completions,
|
||||
};
|
||||
use nu_engine::eval_call;
|
||||
use nu_protocol::{
|
||||
DeclId, PipelineData, Span, Type, Value,
|
||||
ast::{Argument, Call, Expr, Expression},
|
||||
debugger::WithoutDebug,
|
||||
engine::{Stack, StateWorkingSet},
|
||||
DeclId, PipelineData, Span, Type, Value,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
};
|
||||
use std::collections::HashMap;
|
||||
|
||||
use super::completion_options::NuMatcher;
|
||||
|
||||
pub struct CustomCompletion<T: Completer> {
|
||||
stack: Stack,
|
||||
decl_id: DeclId,
|
||||
line: String,
|
||||
line_pos: usize,
|
||||
fallback: T,
|
||||
}
|
||||
|
||||
impl<T: Completer> CustomCompletion<T> {
|
||||
pub fn new(stack: Stack, decl_id: DeclId, line: String, fallback: T) -> Self {
|
||||
pub fn new(decl_id: DeclId, line: String, line_pos: usize, fallback: T) -> Self {
|
||||
Self {
|
||||
stack,
|
||||
decl_id,
|
||||
line,
|
||||
line_pos,
|
||||
fallback,
|
||||
}
|
||||
}
|
||||
@ -35,38 +36,44 @@ impl<T: Completer> Completer for CustomCompletion<T> {
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: &[u8],
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
pos: usize,
|
||||
orig_options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
// Line position
|
||||
let line_pos = pos - offset;
|
||||
|
||||
// Call custom declaration
|
||||
let result = eval_call::<WithoutDebug>(
|
||||
working_set.permanent_state,
|
||||
&mut self.stack,
|
||||
&Call {
|
||||
decl_id: self.decl_id,
|
||||
head: span,
|
||||
arguments: vec![
|
||||
Argument::Positional(Expression::new_unknown(
|
||||
Expr::String(self.line.clone()),
|
||||
Span::unknown(),
|
||||
Type::String,
|
||||
)),
|
||||
Argument::Positional(Expression::new_unknown(
|
||||
Expr::Int(line_pos as i64),
|
||||
Span::unknown(),
|
||||
Type::Int,
|
||||
)),
|
||||
],
|
||||
parser_info: HashMap::new(),
|
||||
},
|
||||
PipelineData::empty(),
|
||||
);
|
||||
let mut stack_mut = stack.clone();
|
||||
let mut eval = |engine_state: &EngineState| {
|
||||
eval_call::<WithoutDebug>(
|
||||
engine_state,
|
||||
&mut stack_mut,
|
||||
&Call {
|
||||
decl_id: self.decl_id,
|
||||
head: span,
|
||||
arguments: vec![
|
||||
Argument::Positional(Expression::new_unknown(
|
||||
Expr::String(self.line.clone()),
|
||||
Span::unknown(),
|
||||
Type::String,
|
||||
)),
|
||||
Argument::Positional(Expression::new_unknown(
|
||||
Expr::Int(self.line_pos as i64),
|
||||
Span::unknown(),
|
||||
Type::Int,
|
||||
)),
|
||||
],
|
||||
parser_info: HashMap::new(),
|
||||
},
|
||||
PipelineData::empty(),
|
||||
)
|
||||
};
|
||||
let result = if self.decl_id.get() < working_set.permanent_state.num_decls() {
|
||||
eval(working_set.permanent_state)
|
||||
} else {
|
||||
let mut engine_state = working_set.permanent_state.clone();
|
||||
let _ = engine_state.merge_delta(working_set.delta.clone());
|
||||
eval(&engine_state)
|
||||
};
|
||||
|
||||
let mut completion_options = orig_options.clone();
|
||||
let mut should_sort = true;
|
||||
@ -96,10 +103,12 @@ impl<T: Completer> Completer for CustomCompletion<T> {
|
||||
{
|
||||
completion_options.case_sensitive = case_sensitive;
|
||||
}
|
||||
if let Some(positional) =
|
||||
options.get("positional").and_then(|val| val.as_bool().ok())
|
||||
{
|
||||
completion_options.positional = positional;
|
||||
let positional =
|
||||
options.get("positional").and_then(|val| val.as_bool().ok());
|
||||
if positional.is_some() {
|
||||
log::warn!(
|
||||
"Use of the positional option is deprecated. Use the substring match algorithm instead."
|
||||
);
|
||||
}
|
||||
if let Some(algorithm) = options
|
||||
.get("completion_algorithm")
|
||||
@ -107,6 +116,11 @@ impl<T: Completer> Completer for CustomCompletion<T> {
|
||||
.and_then(|option| option.try_into().ok())
|
||||
{
|
||||
completion_options.match_algorithm = algorithm;
|
||||
if let Some(false) = positional {
|
||||
if completion_options.match_algorithm == MatchAlgorithm::Prefix {
|
||||
completion_options.match_algorithm = MatchAlgorithm::Substring
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -120,14 +134,13 @@ impl<T: Completer> Completer for CustomCompletion<T> {
|
||||
prefix,
|
||||
span,
|
||||
offset,
|
||||
pos,
|
||||
orig_options,
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
log::error!(
|
||||
"Custom completer returned invalid value of type {}",
|
||||
value.get_type().to_string()
|
||||
value.get_type()
|
||||
);
|
||||
return vec![];
|
||||
}
|
||||
@ -138,7 +151,7 @@ impl<T: Completer> Completer for CustomCompletion<T> {
|
||||
}
|
||||
};
|
||||
|
||||
let mut matcher = NuMatcher::new(String::from_utf8_lossy(prefix), completion_options);
|
||||
let mut matcher = NuMatcher::new(prefix, &completion_options);
|
||||
|
||||
if should_sort {
|
||||
for sugg in suggestions {
|
||||
|
@ -1,37 +1,30 @@
|
||||
use crate::completions::{
|
||||
completion_common::{adjust_if_intermediate, complete_item, AdjustView},
|
||||
Completer, CompletionOptions,
|
||||
completion_common::{AdjustView, adjust_if_intermediate, complete_item},
|
||||
};
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
Span,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use std::path::Path;
|
||||
|
||||
use super::{completion_common::FileSuggestion, SemanticSuggestion};
|
||||
use super::{SemanticSuggestion, SuggestionKind, completion_common::FileSuggestion};
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct DirectoryCompletion {}
|
||||
|
||||
impl DirectoryCompletion {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
}
|
||||
pub struct DirectoryCompletion;
|
||||
|
||||
impl Completer for DirectoryCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: &[u8],
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let AdjustView { prefix, span, .. } = adjust_if_intermediate(prefix, working_set, span);
|
||||
let AdjustView { prefix, span, .. } =
|
||||
adjust_if_intermediate(prefix.as_ref(), working_set, span);
|
||||
|
||||
// Filter only the folders
|
||||
#[allow(deprecated)]
|
||||
@ -54,8 +47,7 @@ impl Completer for DirectoryCompletion {
|
||||
},
|
||||
..Suggestion::default()
|
||||
},
|
||||
// TODO????
|
||||
kind: None,
|
||||
kind: Some(SuggestionKind::Directory),
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
@ -1,21 +1,23 @@
|
||||
use crate::completions::{file_path_completion, Completer, CompletionOptions};
|
||||
use crate::completions::{
|
||||
Completer, CompletionOptions, SemanticSuggestion, SuggestionKind,
|
||||
completion_common::{FileSuggestion, surround_remove},
|
||||
completion_options::NuMatcher,
|
||||
file_path_completion,
|
||||
};
|
||||
use nu_path::expand_tilde;
|
||||
use nu_protocol::{
|
||||
engine::{Stack, StateWorkingSet},
|
||||
Span,
|
||||
engine::{Stack, StateWorkingSet, VirtualPath},
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use std::path::{is_separator, PathBuf, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR};
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
path::{MAIN_SEPARATOR_STR, PathBuf, is_separator},
|
||||
};
|
||||
|
||||
use super::{SemanticSuggestion, SuggestionKind};
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct DotNuCompletion {}
|
||||
|
||||
impl DotNuCompletion {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
pub struct DotNuCompletion {
|
||||
/// e.g. use std/a<tab>
|
||||
pub std_virtual_path: bool,
|
||||
}
|
||||
|
||||
impl Completer for DotNuCompletion {
|
||||
@ -23,62 +25,90 @@ impl Completer for DotNuCompletion {
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: &[u8],
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let prefix_str = String::from_utf8_lossy(prefix);
|
||||
let prefix_str = prefix.as_ref();
|
||||
let start_with_backquote = prefix_str.starts_with('`');
|
||||
let end_with_backquote = prefix_str.ends_with('`');
|
||||
let prefix_str = prefix_str.replace('`', "");
|
||||
// e.g. `./`, `..\`, `/`
|
||||
let not_lib_dirs = prefix_str
|
||||
.chars()
|
||||
.find(|c| *c != '.')
|
||||
.is_some_and(is_separator);
|
||||
let mut search_dirs: Vec<PathBuf> = vec![];
|
||||
|
||||
// If prefix_str is only a word we want to search in the current dir
|
||||
let (base, partial) = prefix_str
|
||||
.rsplit_once(is_separator)
|
||||
.unwrap_or((".", &prefix_str));
|
||||
let (base, partial) = if let Some((parent, remain)) = prefix_str.rsplit_once(is_separator) {
|
||||
// If prefix_str is only a word we want to search in the current dir.
|
||||
// "/xx" should be split to "/" and "xx".
|
||||
if parent.is_empty() {
|
||||
(MAIN_SEPARATOR_STR, remain)
|
||||
} else {
|
||||
(parent, remain)
|
||||
}
|
||||
} else {
|
||||
(".", prefix_str.as_str())
|
||||
};
|
||||
let base_dir = base.replace(is_separator, MAIN_SEPARATOR_STR);
|
||||
|
||||
// Fetch the lib dirs
|
||||
let lib_dirs: Vec<PathBuf> = working_set
|
||||
// NOTE: 2 ways to setup `NU_LIB_DIRS`
|
||||
// 1. `const NU_LIB_DIRS = [paths]`, equal to `nu -I paths`
|
||||
// 2. `$env.NU_LIB_DIRS = [paths]`
|
||||
let const_lib_dirs = working_set
|
||||
.find_variable(b"$NU_LIB_DIRS")
|
||||
.and_then(|vid| working_set.get_variable(vid).const_val.as_ref())
|
||||
.or(working_set.get_env_var("NU_LIB_DIRS"))
|
||||
.map(|lib_dirs| {
|
||||
.and_then(|vid| working_set.get_variable(vid).const_val.as_ref());
|
||||
let env_lib_dirs = working_set.get_env_var("NU_LIB_DIRS");
|
||||
let lib_dirs: HashSet<PathBuf> = [const_lib_dirs, env_lib_dirs]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.flat_map(|lib_dirs| {
|
||||
lib_dirs
|
||||
.as_list()
|
||||
.into_iter()
|
||||
.flat_map(|it| it.iter().filter_map(|x| x.to_path().ok()))
|
||||
.map(expand_tilde)
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
.collect();
|
||||
|
||||
// Check if the base_dir is a folder
|
||||
// rsplit_once removes the separator
|
||||
let cwd = working_set.permanent_state.cwd(None);
|
||||
if base_dir != "." {
|
||||
// Search in base_dir as well as lib_dirs
|
||||
let expanded_base_dir = expand_tilde(&base_dir);
|
||||
let is_base_dir_relative = expanded_base_dir.is_relative();
|
||||
// Search in base_dir as well as lib_dirs.
|
||||
// After expanded, base_dir can be a relative path or absolute path.
|
||||
// If relative, we join "current working dir" with it to get subdirectory and add to search_dirs.
|
||||
// If absolute, we add it to search_dirs.
|
||||
if let Ok(mut cwd) = cwd {
|
||||
cwd.push(&base_dir);
|
||||
search_dirs.push(cwd.into_std_path_buf());
|
||||
if is_base_dir_relative {
|
||||
cwd.push(&base_dir);
|
||||
search_dirs.push(cwd.into_std_path_buf());
|
||||
} else {
|
||||
search_dirs.push(expanded_base_dir);
|
||||
}
|
||||
}
|
||||
if !not_lib_dirs {
|
||||
search_dirs.extend(lib_dirs.into_iter().map(|mut dir| {
|
||||
dir.push(&base_dir);
|
||||
dir
|
||||
}));
|
||||
}
|
||||
search_dirs.extend(lib_dirs.into_iter().map(|mut dir| {
|
||||
dir.push(&base_dir);
|
||||
dir
|
||||
}));
|
||||
} else {
|
||||
if let Ok(cwd) = cwd {
|
||||
search_dirs.push(cwd.into_std_path_buf());
|
||||
}
|
||||
search_dirs.extend(lib_dirs);
|
||||
if !not_lib_dirs {
|
||||
search_dirs.extend(lib_dirs);
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch the files filtering the ones that ends with .nu
|
||||
// and transform them into suggestions
|
||||
let completions = file_path_completion(
|
||||
let mut completions = file_path_completion(
|
||||
span,
|
||||
partial,
|
||||
&search_dirs
|
||||
@ -89,22 +119,67 @@ impl Completer for DotNuCompletion {
|
||||
working_set.permanent_state,
|
||||
stack,
|
||||
);
|
||||
|
||||
if self.std_virtual_path {
|
||||
let mut matcher = NuMatcher::new(partial, options);
|
||||
let base_dir = surround_remove(&base_dir);
|
||||
if base_dir == "." {
|
||||
let surround_prefix = partial
|
||||
.chars()
|
||||
.take_while(|c| "`'\"".contains(*c))
|
||||
.collect::<String>();
|
||||
for path in ["std", "std-rfc"] {
|
||||
let path = format!("{surround_prefix}{path}");
|
||||
matcher.add(
|
||||
path.clone(),
|
||||
FileSuggestion {
|
||||
span,
|
||||
path,
|
||||
style: None,
|
||||
is_dir: true,
|
||||
},
|
||||
);
|
||||
}
|
||||
} else if let Some(VirtualPath::Dir(sub_paths)) =
|
||||
working_set.find_virtual_path(&base_dir)
|
||||
{
|
||||
for sub_vp_id in sub_paths {
|
||||
let (path, sub_vp) = working_set.get_virtual_path(*sub_vp_id);
|
||||
let path = path
|
||||
.strip_prefix(&format!("{base_dir}/"))
|
||||
.unwrap_or(path)
|
||||
.to_string();
|
||||
matcher.add(
|
||||
path.clone(),
|
||||
FileSuggestion {
|
||||
path,
|
||||
span,
|
||||
style: None,
|
||||
is_dir: matches!(sub_vp, VirtualPath::Dir(_)),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
completions.extend(matcher.results());
|
||||
}
|
||||
|
||||
completions
|
||||
.into_iter()
|
||||
// Different base dir, so we list the .nu files or folders
|
||||
.filter(|it| {
|
||||
// for paths with spaces in them
|
||||
let path = it.path.trim_end_matches('`');
|
||||
path.ends_with(".nu") || path.ends_with(SEP)
|
||||
path.ends_with(".nu") || it.is_dir
|
||||
})
|
||||
.map(|x| {
|
||||
let append_whitespace =
|
||||
x.path.ends_with(".nu") && (!start_with_backquote || end_with_backquote);
|
||||
let append_whitespace = !x.is_dir && (!start_with_backquote || end_with_backquote);
|
||||
// Re-calculate the span to replace
|
||||
let mut span_offset = 0;
|
||||
let mut value = x.path.to_string();
|
||||
// Complete only the last path component
|
||||
if base_dir != "." {
|
||||
if base_dir == MAIN_SEPARATOR_STR {
|
||||
span_offset = base_dir.len()
|
||||
} else if base_dir != "." {
|
||||
span_offset = base_dir.len() + 1
|
||||
}
|
||||
// Retain only one '`'
|
||||
|
112
crates/nu-cli/src/completions/exportable_completions.rs
Normal file
112
crates/nu-cli/src/completions/exportable_completions.rs
Normal file
@ -0,0 +1,112 @@
|
||||
use crate::completions::{
|
||||
Completer, CompletionOptions, SemanticSuggestion, SuggestionKind,
|
||||
completion_common::surround_remove, completion_options::NuMatcher,
|
||||
};
|
||||
use nu_protocol::{
|
||||
ModuleId, Span,
|
||||
engine::{Stack, StateWorkingSet},
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
|
||||
pub struct ExportableCompletion<'a> {
|
||||
pub module_id: ModuleId,
|
||||
pub temp_working_set: Option<StateWorkingSet<'a>>,
|
||||
}
|
||||
|
||||
/// If name contains space, wrap it in quotes
|
||||
fn wrapped_name(name: String) -> String {
|
||||
if !name.contains(' ') {
|
||||
return name;
|
||||
}
|
||||
if name.contains('\'') {
|
||||
format!("\"{}\"", name.replace('"', r#"\""#))
|
||||
} else {
|
||||
format!("'{name}'")
|
||||
}
|
||||
}
|
||||
|
||||
impl Completer for ExportableCompletion<'_> {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let mut matcher = NuMatcher::<()>::new(surround_remove(prefix.as_ref()), options);
|
||||
let mut results = Vec::new();
|
||||
let span = reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
};
|
||||
// TODO: use matcher.add_lazy to lazy evaluate an item if it matches the prefix
|
||||
let mut add_suggestion = |value: String,
|
||||
description: Option<String>,
|
||||
extra: Option<Vec<String>>,
|
||||
kind: SuggestionKind| {
|
||||
results.push(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value,
|
||||
span,
|
||||
description,
|
||||
extra,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(kind),
|
||||
});
|
||||
};
|
||||
|
||||
let working_set = self.temp_working_set.as_ref().unwrap_or(working_set);
|
||||
let module = working_set.get_module(self.module_id);
|
||||
|
||||
for (name, decl_id) in &module.decls {
|
||||
let name = String::from_utf8_lossy(name).to_string();
|
||||
if matcher.matches(&name) {
|
||||
let cmd = working_set.get_decl(*decl_id);
|
||||
add_suggestion(
|
||||
wrapped_name(name),
|
||||
Some(cmd.description().to_string()),
|
||||
None,
|
||||
// `None` here avoids arguments being expanded by snippet edit style for lsp
|
||||
SuggestionKind::Command(cmd.command_type(), None),
|
||||
);
|
||||
}
|
||||
}
|
||||
for (name, module_id) in &module.submodules {
|
||||
let name = String::from_utf8_lossy(name).to_string();
|
||||
if matcher.matches(&name) {
|
||||
let comments = working_set.get_module_comments(*module_id).map(|spans| {
|
||||
spans
|
||||
.iter()
|
||||
.map(|sp| {
|
||||
String::from_utf8_lossy(working_set.get_span_contents(*sp)).into()
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
});
|
||||
add_suggestion(
|
||||
wrapped_name(name),
|
||||
Some("Submodule".into()),
|
||||
comments,
|
||||
SuggestionKind::Module,
|
||||
);
|
||||
}
|
||||
}
|
||||
for (name, var_id) in &module.constants {
|
||||
let name = String::from_utf8_lossy(name).to_string();
|
||||
if matcher.matches(&name) {
|
||||
let var = working_set.get_variable(*var_id);
|
||||
add_suggestion(
|
||||
wrapped_name(name),
|
||||
var.const_val
|
||||
.as_ref()
|
||||
.and_then(|v| v.clone().coerce_into_string().ok()),
|
||||
None,
|
||||
SuggestionKind::Variable,
|
||||
);
|
||||
}
|
||||
}
|
||||
results
|
||||
}
|
||||
}
|
@ -1,41 +1,33 @@
|
||||
use crate::completions::{
|
||||
completion_common::{adjust_if_intermediate, complete_item, AdjustView},
|
||||
Completer, CompletionOptions,
|
||||
completion_common::{AdjustView, adjust_if_intermediate, complete_item},
|
||||
};
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
Span,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use std::path::Path;
|
||||
|
||||
use super::{completion_common::FileSuggestion, SemanticSuggestion};
|
||||
use super::{SemanticSuggestion, SuggestionKind, completion_common::FileSuggestion};
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct FileCompletion {}
|
||||
|
||||
impl FileCompletion {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
}
|
||||
pub struct FileCompletion;
|
||||
|
||||
impl Completer for FileCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: &[u8],
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let AdjustView {
|
||||
prefix,
|
||||
span,
|
||||
readjusted,
|
||||
} = adjust_if_intermediate(prefix, working_set, span);
|
||||
} = adjust_if_intermediate(prefix.as_ref(), working_set, span);
|
||||
|
||||
#[allow(deprecated)]
|
||||
let items: Vec<_> = complete_item(
|
||||
@ -58,8 +50,11 @@ impl Completer for FileCompletion {
|
||||
},
|
||||
..Suggestion::default()
|
||||
},
|
||||
// TODO????
|
||||
kind: None,
|
||||
kind: Some(if x.is_dir {
|
||||
SuggestionKind::Directory
|
||||
} else {
|
||||
SuggestionKind::File
|
||||
}),
|
||||
})
|
||||
.collect();
|
||||
|
||||
|
@ -1,22 +1,15 @@
|
||||
use crate::completions::{completion_options::NuMatcher, Completer, CompletionOptions};
|
||||
use crate::completions::{
|
||||
Completer, CompletionOptions, SemanticSuggestion, SuggestionKind, completion_options::NuMatcher,
|
||||
};
|
||||
use nu_protocol::{
|
||||
ast::{Expr, Expression},
|
||||
DeclId, Span,
|
||||
engine::{Stack, StateWorkingSet},
|
||||
Span,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
|
||||
use super::SemanticSuggestion;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FlagCompletion {
|
||||
expression: Expression,
|
||||
}
|
||||
|
||||
impl FlagCompletion {
|
||||
pub fn new(expression: Expression) -> Self {
|
||||
Self { expression }
|
||||
}
|
||||
pub decl_id: DeclId,
|
||||
}
|
||||
|
||||
impl Completer for FlagCompletion {
|
||||
@ -24,69 +17,42 @@ impl Completer for FlagCompletion {
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
prefix: &[u8],
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
// Check if it's a flag
|
||||
if let Expr::Call(call) = &self.expression.expr {
|
||||
let decl = working_set.get_decl(call.decl_id);
|
||||
let sig = decl.signature();
|
||||
|
||||
let mut matcher = NuMatcher::new(String::from_utf8_lossy(prefix), options.clone());
|
||||
|
||||
for named in &sig.named {
|
||||
let flag_desc = &named.desc;
|
||||
if let Some(short) = named.short {
|
||||
let mut named = vec![0; short.len_utf8()];
|
||||
short.encode_utf8(&mut named);
|
||||
named.insert(0, b'-');
|
||||
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(&named).to_string(),
|
||||
description: Some(flag_desc.to_string()),
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
append_whitespace: true,
|
||||
..Suggestion::default()
|
||||
},
|
||||
// TODO????
|
||||
kind: None,
|
||||
});
|
||||
}
|
||||
|
||||
if named.long.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut named = named.long.as_bytes().to_vec();
|
||||
named.insert(0, b'-');
|
||||
named.insert(0, b'-');
|
||||
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(&named).to_string(),
|
||||
description: Some(flag_desc.to_string()),
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
append_whitespace: true,
|
||||
..Suggestion::default()
|
||||
let mut matcher = NuMatcher::new(prefix, options);
|
||||
let mut add_suggestion = |value: String, description: String| {
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value,
|
||||
description: Some(description),
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
// TODO????
|
||||
kind: None,
|
||||
});
|
||||
append_whitespace: true,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Flag),
|
||||
});
|
||||
};
|
||||
|
||||
let decl = working_set.get_decl(self.decl_id);
|
||||
let sig = decl.signature();
|
||||
for named in &sig.named {
|
||||
if let Some(short) = named.short {
|
||||
let mut name = String::from("-");
|
||||
name.push(short);
|
||||
add_suggestion(name, named.desc.clone());
|
||||
}
|
||||
|
||||
return matcher.results();
|
||||
if named.long.is_empty() {
|
||||
continue;
|
||||
}
|
||||
add_suggestion(format!("--{}", named.long), named.desc.clone());
|
||||
}
|
||||
|
||||
vec![]
|
||||
matcher.results()
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,6 @@
|
||||
mod attribute_completions;
|
||||
mod base;
|
||||
mod cell_path_completions;
|
||||
mod command_completions;
|
||||
mod completer;
|
||||
mod completion_common;
|
||||
@ -6,19 +8,23 @@ mod completion_options;
|
||||
mod custom_completions;
|
||||
mod directory_completions;
|
||||
mod dotnu_completions;
|
||||
mod exportable_completions;
|
||||
mod file_completions;
|
||||
mod flag_completions;
|
||||
mod operator_completions;
|
||||
mod variable_completions;
|
||||
|
||||
pub use attribute_completions::{AttributableCompletion, AttributeCompletion};
|
||||
pub use base::{Completer, SemanticSuggestion, SuggestionKind};
|
||||
pub use cell_path_completions::CellPathCompletion;
|
||||
pub use command_completions::CommandCompletion;
|
||||
pub use completer::NuCompleter;
|
||||
pub use completion_options::{CompletionOptions, MatchAlgorithm};
|
||||
pub use custom_completions::CustomCompletion;
|
||||
pub use directory_completions::DirectoryCompletion;
|
||||
pub use dotnu_completions::DotNuCompletion;
|
||||
pub use file_completions::{file_path_completion, FileCompletion};
|
||||
pub use exportable_completions::ExportableCompletion;
|
||||
pub use file_completions::{FileCompletion, file_path_completion};
|
||||
pub use flag_completions::FlagCompletion;
|
||||
pub use operator_completions::OperatorCompletion;
|
||||
pub use variable_completions::VariableCompletion;
|
||||
|
@ -1,170 +1,277 @@
|
||||
use crate::completions::{
|
||||
completion_options::NuMatcher, Completer, CompletionOptions, SemanticSuggestion, SuggestionKind,
|
||||
Completer, CompletionOptions, SemanticSuggestion, SuggestionKind, completion_options::NuMatcher,
|
||||
};
|
||||
use nu_protocol::{
|
||||
ast::{Expr, Expression},
|
||||
ENV_VARIABLE_ID, Span, Type, Value,
|
||||
ast::{self, Comparison, Expr, Expression},
|
||||
engine::{Stack, StateWorkingSet},
|
||||
Span, Type,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use strum::{EnumMessage, IntoEnumIterator};
|
||||
|
||||
use super::cell_path_completions::eval_cell_path;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OperatorCompletion {
|
||||
previous_expr: Expression,
|
||||
pub struct OperatorCompletion<'a> {
|
||||
pub left_hand_side: &'a Expression,
|
||||
}
|
||||
|
||||
impl OperatorCompletion {
|
||||
pub fn new(previous_expr: Expression) -> Self {
|
||||
OperatorCompletion { previous_expr }
|
||||
struct OperatorItem {
|
||||
pub symbols: String,
|
||||
pub description: String,
|
||||
}
|
||||
|
||||
fn operator_to_item<T: EnumMessage + AsRef<str>>(op: T) -> OperatorItem {
|
||||
OperatorItem {
|
||||
symbols: op.as_ref().into(),
|
||||
description: op.get_message().unwrap_or_default().into(),
|
||||
}
|
||||
}
|
||||
|
||||
impl Completer for OperatorCompletion {
|
||||
fn common_comparison_ops() -> Vec<OperatorItem> {
|
||||
vec![
|
||||
operator_to_item(Comparison::In),
|
||||
operator_to_item(Comparison::NotIn),
|
||||
operator_to_item(Comparison::Equal),
|
||||
operator_to_item(Comparison::NotEqual),
|
||||
]
|
||||
}
|
||||
|
||||
fn all_ops_for_immutable() -> Vec<OperatorItem> {
|
||||
ast::Comparison::iter()
|
||||
.map(operator_to_item)
|
||||
.chain(ast::Math::iter().map(operator_to_item))
|
||||
.chain(ast::Boolean::iter().map(operator_to_item))
|
||||
.chain(ast::Bits::iter().map(operator_to_item))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn collection_comparison_ops() -> Vec<OperatorItem> {
|
||||
let mut ops = common_comparison_ops();
|
||||
ops.push(operator_to_item(Comparison::Has));
|
||||
ops.push(operator_to_item(Comparison::NotHas));
|
||||
ops
|
||||
}
|
||||
|
||||
fn number_comparison_ops() -> Vec<OperatorItem> {
|
||||
Comparison::iter()
|
||||
.filter(|op| {
|
||||
!matches!(
|
||||
op,
|
||||
Comparison::RegexMatch
|
||||
| Comparison::NotRegexMatch
|
||||
| Comparison::StartsWith
|
||||
| Comparison::EndsWith
|
||||
| Comparison::Has
|
||||
| Comparison::NotHas
|
||||
)
|
||||
})
|
||||
.map(operator_to_item)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn math_ops() -> Vec<OperatorItem> {
|
||||
ast::Math::iter()
|
||||
.filter(|op| !matches!(op, ast::Math::Concatenate | ast::Math::Pow))
|
||||
.map(operator_to_item)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn bit_ops() -> Vec<OperatorItem> {
|
||||
ast::Bits::iter().map(operator_to_item).collect()
|
||||
}
|
||||
|
||||
fn all_assignment_ops() -> Vec<OperatorItem> {
|
||||
ast::Assignment::iter().map(operator_to_item).collect()
|
||||
}
|
||||
|
||||
fn numeric_assignment_ops() -> Vec<OperatorItem> {
|
||||
ast::Assignment::iter()
|
||||
.filter(|op| !matches!(op, ast::Assignment::ConcatenateAssign))
|
||||
.map(operator_to_item)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn concat_assignment_ops() -> Vec<OperatorItem> {
|
||||
vec![
|
||||
operator_to_item(ast::Assignment::Assign),
|
||||
operator_to_item(ast::Assignment::ConcatenateAssign),
|
||||
]
|
||||
}
|
||||
|
||||
fn valid_int_ops() -> Vec<OperatorItem> {
|
||||
let mut ops = valid_float_ops();
|
||||
ops.extend(bit_ops());
|
||||
ops
|
||||
}
|
||||
|
||||
fn valid_float_ops() -> Vec<OperatorItem> {
|
||||
let mut ops = valid_value_with_unit_ops();
|
||||
ops.push(operator_to_item(ast::Math::Pow));
|
||||
ops
|
||||
}
|
||||
|
||||
fn valid_string_ops() -> Vec<OperatorItem> {
|
||||
let mut ops: Vec<OperatorItem> = Comparison::iter().map(operator_to_item).collect();
|
||||
ops.push(operator_to_item(ast::Math::Concatenate));
|
||||
ops.push(OperatorItem {
|
||||
symbols: "like".into(),
|
||||
description: Comparison::RegexMatch
|
||||
.get_message()
|
||||
.unwrap_or_default()
|
||||
.into(),
|
||||
});
|
||||
ops.push(OperatorItem {
|
||||
symbols: "not-like".into(),
|
||||
description: Comparison::NotRegexMatch
|
||||
.get_message()
|
||||
.unwrap_or_default()
|
||||
.into(),
|
||||
});
|
||||
ops
|
||||
}
|
||||
|
||||
fn valid_list_ops() -> Vec<OperatorItem> {
|
||||
let mut ops = collection_comparison_ops();
|
||||
ops.push(operator_to_item(ast::Math::Concatenate));
|
||||
ops
|
||||
}
|
||||
|
||||
fn valid_binary_ops() -> Vec<OperatorItem> {
|
||||
let mut ops = number_comparison_ops();
|
||||
ops.extend(bit_ops());
|
||||
ops.push(operator_to_item(ast::Math::Concatenate));
|
||||
ops
|
||||
}
|
||||
|
||||
fn valid_bool_ops() -> Vec<OperatorItem> {
|
||||
let mut ops: Vec<OperatorItem> = ast::Boolean::iter().map(operator_to_item).collect();
|
||||
ops.extend(common_comparison_ops());
|
||||
ops
|
||||
}
|
||||
|
||||
fn valid_value_with_unit_ops() -> Vec<OperatorItem> {
|
||||
let mut ops = number_comparison_ops();
|
||||
ops.extend(math_ops());
|
||||
ops
|
||||
}
|
||||
|
||||
fn ops_by_value(value: &Value, mutable: bool) -> Vec<OperatorItem> {
|
||||
let mut ops = match value {
|
||||
Value::Int { .. } => valid_int_ops(),
|
||||
Value::Float { .. } => valid_float_ops(),
|
||||
Value::String { .. } => valid_string_ops(),
|
||||
Value::Binary { .. } => valid_binary_ops(),
|
||||
Value::Bool { .. } => valid_bool_ops(),
|
||||
Value::Date { .. } => number_comparison_ops(),
|
||||
Value::Filesize { .. } | Value::Duration { .. } => valid_value_with_unit_ops(),
|
||||
Value::Range { .. } | Value::Record { .. } => collection_comparison_ops(),
|
||||
Value::List { .. } => valid_list_ops(),
|
||||
_ => all_ops_for_immutable(),
|
||||
};
|
||||
if mutable {
|
||||
ops.extend(match value {
|
||||
Value::Int { .. }
|
||||
| Value::Float { .. }
|
||||
| Value::Filesize { .. }
|
||||
| Value::Duration { .. } => numeric_assignment_ops(),
|
||||
Value::String { .. } | Value::Binary { .. } | Value::List { .. } => {
|
||||
concat_assignment_ops()
|
||||
}
|
||||
Value::Bool { .. }
|
||||
| Value::Date { .. }
|
||||
| Value::Range { .. }
|
||||
| Value::Record { .. } => vec![operator_to_item(ast::Assignment::Assign)],
|
||||
_ => all_assignment_ops(),
|
||||
})
|
||||
}
|
||||
ops
|
||||
}
|
||||
|
||||
fn is_expression_mutable(expr: &Expr, working_set: &StateWorkingSet) -> bool {
|
||||
let Expr::FullCellPath(path) = expr else {
|
||||
return false;
|
||||
};
|
||||
let Expr::Var(id) = path.head.expr else {
|
||||
return false;
|
||||
};
|
||||
if id == ENV_VARIABLE_ID {
|
||||
return true;
|
||||
}
|
||||
let var = working_set.get_variable(id);
|
||||
var.mutable
|
||||
}
|
||||
|
||||
impl Completer for OperatorCompletion<'_> {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
_prefix: &[u8],
|
||||
stack: &Stack,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
//Check if int, float, or string
|
||||
let partial = std::str::from_utf8(working_set.get_span_contents(span)).unwrap_or("");
|
||||
let op = match &self.previous_expr.expr {
|
||||
Expr::BinaryOp(x, _, _) => &x.expr,
|
||||
_ => {
|
||||
return vec![];
|
||||
}
|
||||
};
|
||||
let possible_operations = match op {
|
||||
Expr::Int(_) => vec![
|
||||
("+", "Add (Plus)"),
|
||||
("-", "Subtract (Minus)"),
|
||||
("*", "Multiply"),
|
||||
("/", "Divide"),
|
||||
("==", "Equal to"),
|
||||
("!=", "Not equal to"),
|
||||
("//", "Floor division"),
|
||||
("<", "Less than"),
|
||||
(">", "Greater than"),
|
||||
("<=", "Less than or equal to"),
|
||||
(">=", "Greater than or equal to"),
|
||||
("mod", "Floor division remainder (Modulo)"),
|
||||
("**", "Power of"),
|
||||
("bit-or", "Bitwise OR"),
|
||||
("bit-xor", "Bitwise exclusive OR"),
|
||||
("bit-and", "Bitwise AND"),
|
||||
("bit-shl", "Bitwise shift left"),
|
||||
("bit-shr", "Bitwise shift right"),
|
||||
("in", "Is a member of (doesn't use regex)"),
|
||||
("not-in", "Is not a member of (doesn't use regex)"),
|
||||
],
|
||||
Expr::String(_) => vec![
|
||||
("=~", "Contains regex match"),
|
||||
("like", "Contains regex match"),
|
||||
("!~", "Does not contain regex match"),
|
||||
("not-like", "Does not contain regex match"),
|
||||
(
|
||||
"++",
|
||||
"Concatenates two lists, two strings, or two binary values",
|
||||
),
|
||||
("in", "Is a member of (doesn't use regex)"),
|
||||
("not-in", "Is not a member of (doesn't use regex)"),
|
||||
("starts-with", "Starts with"),
|
||||
("ends-with", "Ends with"),
|
||||
],
|
||||
Expr::Float(_) => vec![
|
||||
("+", "Add (Plus)"),
|
||||
("-", "Subtract (Minus)"),
|
||||
("*", "Multiply"),
|
||||
("/", "Divide"),
|
||||
("==", "Equal to"),
|
||||
("!=", "Not equal to"),
|
||||
("//", "Floor division"),
|
||||
("<", "Less than"),
|
||||
(">", "Greater than"),
|
||||
("<=", "Less than or equal to"),
|
||||
(">=", "Greater than or equal to"),
|
||||
("mod", "Floor division remainder (Modulo)"),
|
||||
("**", "Power of"),
|
||||
("in", "Is a member of (doesn't use regex)"),
|
||||
("not-in", "Is not a member of (doesn't use regex)"),
|
||||
],
|
||||
Expr::Bool(_) => vec![
|
||||
(
|
||||
"and",
|
||||
"Both values are true (short-circuits when first value is false)",
|
||||
),
|
||||
(
|
||||
"or",
|
||||
"Either value is true (short-circuits when first value is true)",
|
||||
),
|
||||
("xor", "One value is true and the other is false"),
|
||||
("not", "Negates a value or expression"),
|
||||
("in", "Is a member of (doesn't use regex)"),
|
||||
("not-in", "Is not a member of (doesn't use regex)"),
|
||||
],
|
||||
Expr::FullCellPath(path) => match path.head.expr {
|
||||
Expr::List(_) => vec![
|
||||
(
|
||||
"++",
|
||||
"Concatenates two lists, two strings, or two binary values",
|
||||
),
|
||||
("has", "Contains a value of (doesn't use regex)"),
|
||||
("not-has", "Does not contain a value of (doesn't use regex)"),
|
||||
],
|
||||
Expr::Var(id) => get_variable_completions(id, working_set),
|
||||
_ => vec![],
|
||||
let mut needs_assignment_ops = true;
|
||||
// Complete according expression type
|
||||
// TODO: type inference on self.left_hand_side to get more accurate completions
|
||||
let mut possible_operations: Vec<OperatorItem> = match &self.left_hand_side.ty {
|
||||
Type::Int | Type::Number => valid_int_ops(),
|
||||
Type::Float => valid_float_ops(),
|
||||
Type::String => valid_string_ops(),
|
||||
Type::Binary => valid_binary_ops(),
|
||||
Type::Bool => valid_bool_ops(),
|
||||
Type::Date => number_comparison_ops(),
|
||||
Type::Filesize | Type::Duration => valid_value_with_unit_ops(),
|
||||
Type::Record(_) | Type::Range => collection_comparison_ops(),
|
||||
Type::List(_) | Type::Table(_) => valid_list_ops(),
|
||||
// Unknown type, resort to evaluated values
|
||||
Type::Any => match &self.left_hand_side.expr {
|
||||
Expr::FullCellPath(path) => {
|
||||
// for `$ <tab>`
|
||||
if matches!(path.head.expr, Expr::Garbage) {
|
||||
return vec![];
|
||||
}
|
||||
let value =
|
||||
eval_cell_path(working_set, stack, &path.head, &path.tail, path.head.span)
|
||||
.unwrap_or_default();
|
||||
let mutable = is_expression_mutable(&self.left_hand_side.expr, working_set);
|
||||
// to avoid duplication
|
||||
needs_assignment_ops = false;
|
||||
ops_by_value(&value, mutable)
|
||||
}
|
||||
_ => all_ops_for_immutable(),
|
||||
},
|
||||
_ => vec![],
|
||||
_ => common_comparison_ops(),
|
||||
};
|
||||
// If the left hand side is a variable, add assignment operators if mutable
|
||||
if needs_assignment_ops && is_expression_mutable(&self.left_hand_side.expr, working_set) {
|
||||
possible_operations.extend(match &self.left_hand_side.ty {
|
||||
Type::Int | Type::Float | Type::Number => numeric_assignment_ops(),
|
||||
Type::Filesize | Type::Duration => numeric_assignment_ops(),
|
||||
Type::String | Type::Binary | Type::List(_) => concat_assignment_ops(),
|
||||
Type::Any => all_assignment_ops(),
|
||||
_ => vec![operator_to_item(ast::Assignment::Assign)],
|
||||
});
|
||||
}
|
||||
|
||||
let mut matcher = NuMatcher::new(partial, options.clone());
|
||||
for (symbol, desc) in possible_operations.into_iter() {
|
||||
let mut matcher = NuMatcher::new(prefix, options);
|
||||
for OperatorItem {
|
||||
symbols,
|
||||
description,
|
||||
} in possible_operations
|
||||
{
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: symbol.to_string(),
|
||||
description: Some(desc.to_string()),
|
||||
value: symbols.to_owned(),
|
||||
description: Some(description.to_owned()),
|
||||
span: reedline::Span::new(span.start - offset, span.end - offset),
|
||||
append_whitespace: true,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Command(
|
||||
nu_protocol::engine::CommandType::Builtin,
|
||||
)),
|
||||
kind: Some(SuggestionKind::Operator),
|
||||
});
|
||||
}
|
||||
matcher.results()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_variable_completions<'a>(
|
||||
id: nu_protocol::Id<nu_protocol::marker::Var>,
|
||||
working_set: &StateWorkingSet,
|
||||
) -> Vec<(&'a str, &'a str)> {
|
||||
let var = working_set.get_variable(id);
|
||||
if !var.mutable {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
match var.ty {
|
||||
Type::List(_) | Type::String | Type::Binary => vec![
|
||||
(
|
||||
"++=",
|
||||
"Concatenates two lists, two strings, or two binary values",
|
||||
),
|
||||
("=", "Assigns a value to a variable."),
|
||||
],
|
||||
|
||||
Type::Int | Type::Float => vec![
|
||||
("=", "Assigns a value to a variable."),
|
||||
("+=", "Adds a value to a variable."),
|
||||
("-=", "Subtracts a value from a variable."),
|
||||
("*=", "Multiplies a variable by a value"),
|
||||
("/=", "Divides a variable by a value."),
|
||||
],
|
||||
_ => vec![],
|
||||
}
|
||||
}
|
||||
|
@ -1,157 +1,67 @@
|
||||
use crate::completions::{Completer, CompletionOptions, SemanticSuggestion, SuggestionKind};
|
||||
use nu_engine::{column::get_columns, eval_variable};
|
||||
use nu_protocol::{
|
||||
Span, VarId,
|
||||
engine::{Stack, StateWorkingSet},
|
||||
Span, Value,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use std::str;
|
||||
|
||||
use super::completion_options::NuMatcher;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct VariableCompletion {
|
||||
var_context: (Vec<u8>, Vec<Vec<u8>>), // tuple with $var and the sublevels (.b.c.d)
|
||||
}
|
||||
|
||||
impl VariableCompletion {
|
||||
pub fn new(var_context: (Vec<u8>, Vec<Vec<u8>>)) -> Self {
|
||||
Self { var_context }
|
||||
}
|
||||
}
|
||||
pub struct VariableCompletion;
|
||||
|
||||
impl Completer for VariableCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: &[u8],
|
||||
_stack: &Stack,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let builtins = ["$nu", "$in", "$env"];
|
||||
let var_str = std::str::from_utf8(&self.var_context.0).unwrap_or("");
|
||||
let var_id = working_set.find_variable(&self.var_context.0);
|
||||
let mut matcher = NuMatcher::new(prefix, options);
|
||||
let current_span = reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
};
|
||||
let sublevels_count = self.var_context.1.len();
|
||||
let prefix_str = String::from_utf8_lossy(prefix);
|
||||
let mut matcher = NuMatcher::new(prefix_str, options.clone());
|
||||
|
||||
// Completions for the given variable
|
||||
if !var_str.is_empty() {
|
||||
// Completion for $env.<tab>
|
||||
if var_str == "$env" {
|
||||
let env_vars = stack.get_env_vars(working_set.permanent_state);
|
||||
|
||||
// Return nested values
|
||||
if sublevels_count > 0 {
|
||||
// Extract the target var ($env.<target-var>)
|
||||
let target_var = self.var_context.1[0].clone();
|
||||
let target_var_str =
|
||||
str::from_utf8(&target_var).unwrap_or_default().to_string();
|
||||
|
||||
// Everything after the target var is the nested level ($env.<target-var>.<nested_levels>...)
|
||||
let nested_levels: Vec<Vec<u8>> =
|
||||
self.var_context.1.clone().into_iter().skip(1).collect();
|
||||
|
||||
if let Some(val) = env_vars.get(&target_var_str) {
|
||||
for suggestion in nested_suggestions(val, &nested_levels, current_span) {
|
||||
matcher.add_semantic_suggestion(suggestion);
|
||||
}
|
||||
|
||||
return matcher.results();
|
||||
}
|
||||
} else {
|
||||
// No nesting provided, return all env vars
|
||||
for env_var in env_vars {
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: env_var.0,
|
||||
span: current_span,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Type(env_var.1.get_type())),
|
||||
});
|
||||
}
|
||||
|
||||
return matcher.results();
|
||||
}
|
||||
}
|
||||
|
||||
// Completions for $nu.<tab>
|
||||
if var_str == "$nu" {
|
||||
// Eval nu var
|
||||
if let Ok(nuval) = eval_variable(
|
||||
working_set.permanent_state,
|
||||
stack,
|
||||
nu_protocol::NU_VARIABLE_ID,
|
||||
nu_protocol::Span::new(current_span.start, current_span.end),
|
||||
) {
|
||||
for suggestion in nested_suggestions(&nuval, &self.var_context.1, current_span)
|
||||
{
|
||||
matcher.add_semantic_suggestion(suggestion);
|
||||
}
|
||||
|
||||
return matcher.results();
|
||||
}
|
||||
}
|
||||
|
||||
// Completion other variable types
|
||||
if let Some(var_id) = var_id {
|
||||
// Extract the variable value from the stack
|
||||
let var = stack.get_var(var_id, Span::new(span.start, span.end));
|
||||
|
||||
// If the value exists and it's of type Record
|
||||
if let Ok(value) = var {
|
||||
for suggestion in nested_suggestions(&value, &self.var_context.1, current_span)
|
||||
{
|
||||
matcher.add_semantic_suggestion(suggestion);
|
||||
}
|
||||
|
||||
return matcher.results();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Variable completion (e.g: $en<tab> to complete $env)
|
||||
let builtins = ["$nu", "$in", "$env"];
|
||||
for builtin in builtins {
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: builtin.to_string(),
|
||||
span: current_span,
|
||||
description: Some("reserved".into()),
|
||||
..Suggestion::default()
|
||||
},
|
||||
// TODO is there a way to get the VarId to get the type???
|
||||
kind: None,
|
||||
kind: Some(SuggestionKind::Variable),
|
||||
});
|
||||
}
|
||||
|
||||
let mut add_candidate = |name, var_id: &VarId| {
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(name).to_string(),
|
||||
span: current_span,
|
||||
description: Some(working_set.get_variable(*var_id).ty.to_string()),
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Variable),
|
||||
})
|
||||
};
|
||||
|
||||
// TODO: The following can be refactored (see find_commands_by_predicate() used in
|
||||
// command_completions).
|
||||
let mut removed_overlays = vec![];
|
||||
// Working set scope vars
|
||||
for scope_frame in working_set.delta.scope.iter().rev() {
|
||||
for overlay_frame in scope_frame.active_overlays(&mut removed_overlays).rev() {
|
||||
for v in &overlay_frame.vars {
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(v.0).to_string(),
|
||||
span: current_span,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Type(
|
||||
working_set.get_variable(*v.1).ty.clone(),
|
||||
)),
|
||||
});
|
||||
for (name, var_id) in &overlay_frame.vars {
|
||||
add_candidate(name, var_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Permanent state vars
|
||||
// for scope in &self.engine_state.scope {
|
||||
for overlay_frame in working_set
|
||||
@ -159,98 +69,11 @@ impl Completer for VariableCompletion {
|
||||
.active_overlays(&removed_overlays)
|
||||
.rev()
|
||||
{
|
||||
for v in &overlay_frame.vars {
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(v.0).to_string(),
|
||||
span: current_span,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Type(
|
||||
working_set.get_variable(*v.1).ty.clone(),
|
||||
)),
|
||||
});
|
||||
for (name, var_id) in &overlay_frame.vars {
|
||||
add_candidate(name, var_id);
|
||||
}
|
||||
}
|
||||
|
||||
matcher.results()
|
||||
}
|
||||
}
|
||||
|
||||
// Find recursively the values for sublevels
|
||||
// if no sublevels are set it returns the current value
|
||||
fn nested_suggestions(
|
||||
val: &Value,
|
||||
sublevels: &[Vec<u8>],
|
||||
current_span: reedline::Span,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let mut output: Vec<SemanticSuggestion> = vec![];
|
||||
let value = recursive_value(val, sublevels).unwrap_or_else(Value::nothing);
|
||||
|
||||
let kind = SuggestionKind::Type(value.get_type());
|
||||
match value {
|
||||
Value::Record { val, .. } => {
|
||||
// Add all the columns as completion
|
||||
for col in val.columns() {
|
||||
output.push(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: col.clone(),
|
||||
span: current_span,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(kind.clone()),
|
||||
});
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
Value::List { vals, .. } => {
|
||||
for column_name in get_columns(vals.as_slice()) {
|
||||
output.push(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: column_name,
|
||||
span: current_span,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(kind.clone()),
|
||||
});
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
_ => output,
|
||||
}
|
||||
}
|
||||
|
||||
// Extracts the recursive value (e.g: $var.a.b.c)
|
||||
fn recursive_value(val: &Value, sublevels: &[Vec<u8>]) -> Result<Value, Span> {
|
||||
// Go to next sublevel
|
||||
if let Some((sublevel, next_sublevels)) = sublevels.split_first() {
|
||||
let span = val.span();
|
||||
match val {
|
||||
Value::Record { val, .. } => {
|
||||
if let Some((_, value)) = val.iter().find(|(key, _)| key.as_bytes() == sublevel) {
|
||||
// If matches try to fetch recursively the next
|
||||
recursive_value(value, next_sublevels)
|
||||
} else {
|
||||
// Current sublevel value not found
|
||||
Err(span)
|
||||
}
|
||||
}
|
||||
Value::List { vals, .. } => {
|
||||
for col in get_columns(vals.as_slice()) {
|
||||
if col.as_bytes() == *sublevel {
|
||||
let val = val.get_data_by_key(&col).ok_or(span)?;
|
||||
return recursive_value(&val, next_sublevels);
|
||||
}
|
||||
}
|
||||
|
||||
// Current sublevel value not found
|
||||
Err(span)
|
||||
}
|
||||
_ => Ok(val.clone()),
|
||||
}
|
||||
} else {
|
||||
Ok(val.clone())
|
||||
}
|
||||
}
|
||||
|
@ -2,10 +2,11 @@ use crate::util::eval_source;
|
||||
#[cfg(feature = "plugin")]
|
||||
use nu_path::canonicalize_with;
|
||||
#[cfg(feature = "plugin")]
|
||||
use nu_protocol::{engine::StateWorkingSet, ParseError, PluginRegistryFile, Spanned};
|
||||
use nu_protocol::{ParseError, PluginRegistryFile, Spanned, engine::StateWorkingSet};
|
||||
use nu_protocol::{
|
||||
PipelineData,
|
||||
engine::{EngineState, Stack},
|
||||
report_shell_error, PipelineData,
|
||||
report_shell_error,
|
||||
};
|
||||
#[cfg(feature = "plugin")]
|
||||
use nu_utils::perf;
|
||||
@ -18,7 +19,7 @@ const OLD_PLUGIN_FILE: &str = "plugin.nu";
|
||||
|
||||
#[cfg(feature = "plugin")]
|
||||
pub fn read_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Spanned<String>>) {
|
||||
use nu_protocol::{shell_error::io::IoError, ShellError};
|
||||
use nu_protocol::{ShellError, shell_error::io::IoError};
|
||||
use std::path::Path;
|
||||
|
||||
let span = plugin_file.as_ref().map(|s| s.span);
|
||||
@ -79,7 +80,7 @@ pub fn read_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Span
|
||||
report_shell_error(
|
||||
engine_state,
|
||||
&ShellError::Io(IoError::new_internal_with_path(
|
||||
err.kind(),
|
||||
err,
|
||||
"Could not open plugin registry file",
|
||||
nu_protocol::location!(),
|
||||
plugin_path,
|
||||
@ -230,8 +231,8 @@ pub fn eval_config_contents(
|
||||
#[cfg(feature = "plugin")]
|
||||
pub fn migrate_old_plugin_file(engine_state: &EngineState) -> bool {
|
||||
use nu_protocol::{
|
||||
shell_error::io::IoError, PluginExample, PluginIdentity, PluginRegistryItem,
|
||||
PluginRegistryItemData, PluginSignature, ShellError,
|
||||
PluginExample, PluginIdentity, PluginRegistryItem, PluginRegistryItemData, PluginSignature,
|
||||
ShellError, shell_error::io::IoError,
|
||||
};
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
@ -277,7 +278,7 @@ pub fn migrate_old_plugin_file(engine_state: &EngineState) -> bool {
|
||||
&mut stack,
|
||||
&old_contents,
|
||||
&old_plugin_file_path.to_string_lossy(),
|
||||
PipelineData::Empty,
|
||||
PipelineData::empty(),
|
||||
false,
|
||||
) != 0
|
||||
{
|
||||
@ -322,7 +323,7 @@ pub fn migrate_old_plugin_file(engine_state: &EngineState) -> bool {
|
||||
if let Err(err) = std::fs::File::create(&new_plugin_file_path)
|
||||
.map_err(|err| {
|
||||
IoError::new_internal_with_path(
|
||||
err.kind(),
|
||||
err,
|
||||
"Could not create new plugin file",
|
||||
nu_protocol::location!(),
|
||||
new_plugin_file_path.clone(),
|
||||
|
@ -2,10 +2,11 @@ use log::info;
|
||||
use nu_engine::eval_block;
|
||||
use nu_parser::parse;
|
||||
use nu_protocol::{
|
||||
cli_error::report_compile_error,
|
||||
PipelineData, ShellError, Spanned, Value,
|
||||
debugger::WithoutDebug,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
report_parse_error, report_parse_warning, PipelineData, ShellError, Spanned, Value,
|
||||
report_error::report_compile_error,
|
||||
report_parse_error, report_parse_warning,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
|
||||
|
@ -4,12 +4,12 @@ use nu_engine::eval_block;
|
||||
use nu_parser::parse;
|
||||
use nu_path::canonicalize_with;
|
||||
use nu_protocol::{
|
||||
cli_error::report_compile_error,
|
||||
PipelineData, ShellError, Span, Value,
|
||||
debugger::WithoutDebug,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
report_error::report_compile_error,
|
||||
report_parse_error, report_parse_warning,
|
||||
shell_error::io::IoError,
|
||||
PipelineData, ShellError, Span, Value,
|
||||
shell_error::io::*,
|
||||
};
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
|
||||
@ -27,11 +27,11 @@ pub fn evaluate_file(
|
||||
let cwd = engine_state.cwd_as_string(Some(stack))?;
|
||||
|
||||
let file_path = canonicalize_with(&path, cwd).map_err(|err| {
|
||||
IoError::new_with_additional_context(
|
||||
err.kind(),
|
||||
Span::unknown(),
|
||||
PathBuf::from(&path),
|
||||
IoError::new_internal_with_path(
|
||||
err.not_found_as(NotFound::File),
|
||||
"Could not access file",
|
||||
nu_protocol::location!(),
|
||||
PathBuf::from(&path),
|
||||
)
|
||||
})?;
|
||||
|
||||
@ -46,21 +46,21 @@ pub fn evaluate_file(
|
||||
})?;
|
||||
|
||||
let file = std::fs::read(&file_path).map_err(|err| {
|
||||
IoError::new_with_additional_context(
|
||||
err.kind(),
|
||||
Span::unknown(),
|
||||
file_path.clone(),
|
||||
IoError::new_internal_with_path(
|
||||
err.not_found_as(NotFound::File),
|
||||
"Could not read file",
|
||||
nu_protocol::location!(),
|
||||
file_path.clone(),
|
||||
)
|
||||
})?;
|
||||
engine_state.file = Some(file_path.clone());
|
||||
|
||||
let parent = file_path.parent().ok_or_else(|| {
|
||||
IoError::new_with_additional_context(
|
||||
std::io::ErrorKind::NotFound,
|
||||
Span::unknown(),
|
||||
file_path.clone(),
|
||||
IoError::new_internal_with_path(
|
||||
ErrorKind::DirectoryNotFound,
|
||||
"The file path does not have a parent",
|
||||
nu_protocol::location!(),
|
||||
file_path.clone(),
|
||||
)
|
||||
})?;
|
||||
|
||||
|
@ -18,7 +18,7 @@ mod validation;
|
||||
pub use commands::add_cli_context;
|
||||
pub use completions::{FileCompletion, NuCompleter, SemanticSuggestion, SuggestionKind};
|
||||
pub use config_files::eval_config_contents;
|
||||
pub use eval_cmds::{evaluate_commands, EvaluateCommandsOpts};
|
||||
pub use eval_cmds::{EvaluateCommandsOpts, evaluate_commands};
|
||||
pub use eval_file::evaluate_file;
|
||||
pub use menus::NuHelpCompleter;
|
||||
pub use nu_highlight::NuHighlight;
|
||||
|
@ -1,5 +1,5 @@
|
||||
use nu_engine::documentation::{get_flags_section, HelpStyle};
|
||||
use nu_protocol::{engine::EngineState, levenshtein_distance, Config};
|
||||
use nu_engine::documentation::{FormatterValue, HelpStyle, get_flags_section};
|
||||
use nu_protocol::{Config, engine::EngineState, levenshtein_distance};
|
||||
use nu_utils::IgnoreCaseExt;
|
||||
use reedline::{Completer, Suggestion};
|
||||
use std::{fmt::Write, sync::Arc};
|
||||
@ -66,8 +66,11 @@ impl NuHelpCompleter {
|
||||
let _ = write!(long_desc, "Usage:\r\n > {}\r\n", sig.call_signature());
|
||||
|
||||
if !sig.named.is_empty() {
|
||||
long_desc.push_str(&get_flags_section(&sig, &help_style, |v| {
|
||||
v.to_parsable_string(", ", &self.config)
|
||||
long_desc.push_str(&get_flags_section(&sig, &help_style, |v| match v {
|
||||
FormatterValue::DefaultValue(value) => {
|
||||
value.to_parsable_string(", ", &self.config)
|
||||
}
|
||||
FormatterValue::CodeString(text) => text.to_string(),
|
||||
}))
|
||||
}
|
||||
|
||||
|
@ -1,10 +1,10 @@
|
||||
use nu_engine::eval_block;
|
||||
use nu_protocol::{
|
||||
BlockId, IntoPipelineData, Span, Value,
|
||||
debugger::WithoutDebug,
|
||||
engine::{EngineState, Stack},
|
||||
BlockId, IntoPipelineData, Span, Value,
|
||||
};
|
||||
use reedline::{menu_functions::parse_selection_char, Completer, Suggestion};
|
||||
use reedline::{Completer, Suggestion, menu_functions::parse_selection_char};
|
||||
use std::sync::Arc;
|
||||
|
||||
const SELECTION_CHAR: char = '!';
|
||||
|
@ -3,6 +3,8 @@ use std::sync::Arc;
|
||||
use nu_engine::command_prelude::*;
|
||||
use reedline::{Highlighter, StyledText};
|
||||
|
||||
use crate::syntax_highlight::highlight_syntax;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct NuHighlight;
|
||||
|
||||
@ -14,6 +16,11 @@ impl Command for NuHighlight {
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("nu-highlight")
|
||||
.category(Category::Strings)
|
||||
.switch(
|
||||
"reject-garbage",
|
||||
"Return an error if invalid syntax (garbage) was encountered",
|
||||
Some('r'),
|
||||
)
|
||||
.input_output_types(vec![(Type::String, Type::String)])
|
||||
}
|
||||
|
||||
@ -32,19 +39,33 @@ impl Command for NuHighlight {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let reject_garbage = call.has_flag(engine_state, stack, "reject-garbage")?;
|
||||
let head = call.head;
|
||||
|
||||
let signals = engine_state.signals();
|
||||
|
||||
let highlighter = crate::NuHighlighter {
|
||||
engine_state: Arc::new(engine_state.clone()),
|
||||
stack: Arc::new(stack.clone()),
|
||||
};
|
||||
let engine_state = Arc::new(engine_state.clone());
|
||||
let stack = Arc::new(stack.clone());
|
||||
|
||||
input.map(
|
||||
move |x| match x.coerce_into_string() {
|
||||
Ok(line) => {
|
||||
let highlights = highlighter.highlight(&line, line.len());
|
||||
let result = highlight_syntax(&engine_state, &stack, &line, line.len());
|
||||
|
||||
let highlights = match (reject_garbage, result.found_garbage) {
|
||||
(false, _) => result.text,
|
||||
(true, None) => result.text,
|
||||
(true, Some(span)) => {
|
||||
let error = ShellError::OutsideSpannedLabeledError {
|
||||
src: line,
|
||||
error: "encountered invalid syntax while highlighting".into(),
|
||||
msg: "invalid syntax".into(),
|
||||
span,
|
||||
};
|
||||
return Value::error(error, head);
|
||||
}
|
||||
};
|
||||
|
||||
Value::string(highlights.render_simple(), head)
|
||||
}
|
||||
Err(err) => Value::error(err, head),
|
||||
|
@ -2,8 +2,9 @@ use crate::NushellPrompt;
|
||||
use log::{trace, warn};
|
||||
use nu_engine::ClosureEvalOnce;
|
||||
use nu_protocol::{
|
||||
Config, PipelineData, Value,
|
||||
engine::{EngineState, Stack},
|
||||
report_shell_error, Config, PipelineData, Value,
|
||||
report_shell_error,
|
||||
};
|
||||
use reedline::Prompt;
|
||||
|
||||
@ -60,7 +61,7 @@ fn get_prompt_string(
|
||||
.and_then(|v| match v {
|
||||
Value::Closure { val, .. } => {
|
||||
let result = ClosureEvalOnce::new(engine_state, stack, val.as_ref().clone())
|
||||
.run_with_input(PipelineData::Empty);
|
||||
.run_with_input(PipelineData::empty());
|
||||
|
||||
trace!(
|
||||
"get_prompt_string (block) {}:{}:{}",
|
||||
@ -75,7 +76,7 @@ fn get_prompt_string(
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
Value::String { .. } => Some(PipelineData::Value(v.clone(), None)),
|
||||
Value::String { .. } => Some(PipelineData::value(v.clone(), None)),
|
||||
_ => None,
|
||||
})
|
||||
.and_then(|pipeline_data| {
|
||||
|
@ -1,19 +1,20 @@
|
||||
use crate::{menus::NuMenuCompleter, NuHelpCompleter};
|
||||
use crate::{NuHelpCompleter, menus::NuMenuCompleter};
|
||||
use crossterm::event::{KeyCode, KeyModifiers};
|
||||
use nu_ansi_term::Style;
|
||||
use nu_color_config::{color_record_to_nustyle, lookup_ansi_color_style};
|
||||
use nu_engine::eval_block;
|
||||
use nu_parser::parse;
|
||||
use nu_protocol::{
|
||||
Config, EditBindings, FromValue, ParsedKeybinding, ParsedMenu, PipelineData, Record,
|
||||
ShellError, Span, Type, Value,
|
||||
debugger::WithoutDebug,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
extract_value, Config, EditBindings, FromValue, ParsedKeybinding, ParsedMenu, PipelineData,
|
||||
Record, ShellError, Span, Type, Value,
|
||||
extract_value,
|
||||
};
|
||||
use reedline::{
|
||||
default_emacs_keybindings, default_vi_insert_keybindings, default_vi_normal_keybindings,
|
||||
ColumnarMenu, DescriptionMenu, DescriptionMode, EditCommand, IdeMenu, Keybindings, ListMenu,
|
||||
MenuBuilder, Reedline, ReedlineEvent, ReedlineMenu,
|
||||
MenuBuilder, Reedline, ReedlineEvent, ReedlineMenu, default_emacs_keybindings,
|
||||
default_vi_insert_keybindings, default_vi_normal_keybindings,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
|
||||
@ -158,7 +159,7 @@ pub(crate) fn add_menus(
|
||||
engine_state.merge_delta(delta)?;
|
||||
|
||||
let mut temp_stack = Stack::new().collect_value();
|
||||
let input = PipelineData::Empty;
|
||||
let input = PipelineData::empty();
|
||||
menu_eval_results.push(eval_block::<WithoutDebug>(
|
||||
&engine_state,
|
||||
&mut temp_stack,
|
||||
@ -740,9 +741,15 @@ fn add_keybinding(
|
||||
let span = mode.span();
|
||||
match &mode {
|
||||
Value::String { val, .. } => match val.as_str() {
|
||||
"emacs" => add_parsed_keybinding(emacs_keybindings, keybinding, config),
|
||||
"vi_insert" => add_parsed_keybinding(insert_keybindings, keybinding, config),
|
||||
"vi_normal" => add_parsed_keybinding(normal_keybindings, keybinding, config),
|
||||
str if str.eq_ignore_ascii_case("emacs") => {
|
||||
add_parsed_keybinding(emacs_keybindings, keybinding, config)
|
||||
}
|
||||
str if str.eq_ignore_ascii_case("vi_insert") => {
|
||||
add_parsed_keybinding(insert_keybindings, keybinding, config)
|
||||
}
|
||||
str if str.eq_ignore_ascii_case("vi_normal") => {
|
||||
add_parsed_keybinding(normal_keybindings, keybinding, config)
|
||||
}
|
||||
str => Err(ShellError::InvalidValue {
|
||||
valid: "'emacs', 'vi_insert', or 'vi_normal'".into(),
|
||||
actual: format!("'{str}'"),
|
||||
@ -992,41 +999,58 @@ fn event_from_record(
|
||||
) -> Result<ReedlineEvent, ShellError> {
|
||||
let event = match name {
|
||||
"none" => ReedlineEvent::None,
|
||||
"clearscreen" => ReedlineEvent::ClearScreen,
|
||||
"clearscrollback" => ReedlineEvent::ClearScrollback,
|
||||
"historyhintcomplete" => ReedlineEvent::HistoryHintComplete,
|
||||
"historyhintwordcomplete" => ReedlineEvent::HistoryHintWordComplete,
|
||||
"ctrld" => ReedlineEvent::CtrlD,
|
||||
"ctrlc" => ReedlineEvent::CtrlC,
|
||||
"clearscreen" => ReedlineEvent::ClearScreen,
|
||||
"clearscrollback" => ReedlineEvent::ClearScrollback,
|
||||
"enter" => ReedlineEvent::Enter,
|
||||
"submit" => ReedlineEvent::Submit,
|
||||
"submitornewline" => ReedlineEvent::SubmitOrNewline,
|
||||
"esc" | "escape" => ReedlineEvent::Esc,
|
||||
// Non-sensical for user configuration:
|
||||
//
|
||||
// `ReedlineEvent::Mouse` - itself a no-op
|
||||
// `ReedlineEvent::Resize` - requires size info specifically from the ANSI resize
|
||||
// event
|
||||
//
|
||||
// Handled above in `parse_event`:
|
||||
//
|
||||
// `ReedlineEvent::Edit`
|
||||
"repaint" => ReedlineEvent::Repaint,
|
||||
"previoushistory" => ReedlineEvent::PreviousHistory,
|
||||
"up" => ReedlineEvent::Up,
|
||||
"down" => ReedlineEvent::Down,
|
||||
"right" => ReedlineEvent::Right,
|
||||
"left" => ReedlineEvent::Left,
|
||||
"searchhistory" => ReedlineEvent::SearchHistory,
|
||||
"nexthistory" => ReedlineEvent::NextHistory,
|
||||
"previoushistory" => ReedlineEvent::PreviousHistory,
|
||||
"repaint" => ReedlineEvent::Repaint,
|
||||
"menudown" => ReedlineEvent::MenuDown,
|
||||
"menuup" => ReedlineEvent::MenuUp,
|
||||
"menuleft" => ReedlineEvent::MenuLeft,
|
||||
"menuright" => ReedlineEvent::MenuRight,
|
||||
"menunext" => ReedlineEvent::MenuNext,
|
||||
"menuprevious" => ReedlineEvent::MenuPrevious,
|
||||
"menupagenext" => ReedlineEvent::MenuPageNext,
|
||||
"menupageprevious" => ReedlineEvent::MenuPagePrevious,
|
||||
"openeditor" => ReedlineEvent::OpenEditor,
|
||||
"searchhistory" => ReedlineEvent::SearchHistory,
|
||||
// Handled above in `parse_event`:
|
||||
//
|
||||
// `ReedlineEvent::Multiple`
|
||||
// `ReedlineEvent::UntilFound`
|
||||
"menu" => {
|
||||
let menu = extract_value("name", record, span)?;
|
||||
ReedlineEvent::Menu(menu.to_expanded_string("", config))
|
||||
}
|
||||
"menunext" => ReedlineEvent::MenuNext,
|
||||
"menuprevious" => ReedlineEvent::MenuPrevious,
|
||||
"menuup" => ReedlineEvent::MenuUp,
|
||||
"menudown" => ReedlineEvent::MenuDown,
|
||||
"menuleft" => ReedlineEvent::MenuLeft,
|
||||
"menuright" => ReedlineEvent::MenuRight,
|
||||
"menupagenext" => ReedlineEvent::MenuPageNext,
|
||||
"menupageprevious" => ReedlineEvent::MenuPagePrevious,
|
||||
"executehostcommand" => {
|
||||
let cmd = extract_value("cmd", record, span)?;
|
||||
ReedlineEvent::ExecuteHostCommand(cmd.to_expanded_string("", config))
|
||||
}
|
||||
"openeditor" => ReedlineEvent::OpenEditor,
|
||||
"vichangemode" => {
|
||||
let mode = extract_value("mode", record, span)?;
|
||||
ReedlineEvent::ViChangeMode(mode.as_str()?.to_owned())
|
||||
}
|
||||
str => {
|
||||
return Err(ShellError::InvalidValue {
|
||||
valid: "a reedline event".into(),
|
||||
@ -1056,7 +1080,6 @@ fn edit_from_record(
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false),
|
||||
},
|
||||
|
||||
"movetoend" => EditCommand::MoveToEnd {
|
||||
select: extract_value("select", record, span)
|
||||
.and_then(|value| value.as_bool())
|
||||
@ -1092,16 +1115,6 @@ fn edit_from_record(
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false),
|
||||
},
|
||||
"movewordrightend" => EditCommand::MoveWordRightEnd {
|
||||
select: extract_value("select", record, span)
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false),
|
||||
},
|
||||
"movebigwordrightend" => EditCommand::MoveBigWordRightEnd {
|
||||
select: extract_value("select", record, span)
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false),
|
||||
},
|
||||
"movewordrightstart" => EditCommand::MoveWordRightStart {
|
||||
select: extract_value("select", record, span)
|
||||
.and_then(|value| value.as_bool())
|
||||
@ -1112,6 +1125,16 @@ fn edit_from_record(
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false),
|
||||
},
|
||||
"movewordrightend" => EditCommand::MoveWordRightEnd {
|
||||
select: extract_value("select", record, span)
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false),
|
||||
},
|
||||
"movebigwordrightend" => EditCommand::MoveBigWordRightEnd {
|
||||
select: extract_value("select", record, span)
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false),
|
||||
},
|
||||
"movetoposition" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let select = extract_value("select", record, span)
|
||||
@ -1133,6 +1156,13 @@ fn edit_from_record(
|
||||
EditCommand::InsertString(value.to_expanded_string("", config))
|
||||
}
|
||||
"insertnewline" => EditCommand::InsertNewline,
|
||||
"replacechar" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value)?;
|
||||
EditCommand::ReplaceChar(char)
|
||||
}
|
||||
// `EditCommand::ReplaceChars` - Internal hack not sanely implementable as a
|
||||
// standalone binding
|
||||
"backspace" => EditCommand::Backspace,
|
||||
"delete" => EditCommand::Delete,
|
||||
"cutchar" => EditCommand::CutChar,
|
||||
@ -1140,11 +1170,13 @@ fn edit_from_record(
|
||||
"deleteword" => EditCommand::DeleteWord,
|
||||
"clear" => EditCommand::Clear,
|
||||
"cleartolineend" => EditCommand::ClearToLineEnd,
|
||||
"complete" => EditCommand::Complete,
|
||||
"cutcurrentline" => EditCommand::CutCurrentLine,
|
||||
"cutfromstart" => EditCommand::CutFromStart,
|
||||
"cutfromlinestart" => EditCommand::CutFromLineStart,
|
||||
"cuttoend" => EditCommand::CutToEnd,
|
||||
"cuttolineend" => EditCommand::CutToLineEnd,
|
||||
"killline" => EditCommand::KillLine,
|
||||
"cutwordleft" => EditCommand::CutWordLeft,
|
||||
"cutbigwordleft" => EditCommand::CutBigWordLeft,
|
||||
"cutwordright" => EditCommand::CutWordRight,
|
||||
@ -1156,6 +1188,7 @@ fn edit_from_record(
|
||||
"uppercaseword" => EditCommand::UppercaseWord,
|
||||
"lowercaseword" => EditCommand::LowercaseWord,
|
||||
"capitalizechar" => EditCommand::CapitalizeChar,
|
||||
"switchcasechar" => EditCommand::SwitchcaseChar,
|
||||
"swapwords" => EditCommand::SwapWords,
|
||||
"swapgraphemes" => EditCommand::SwapGraphemes,
|
||||
"undo" => EditCommand::Undo,
|
||||
@ -1212,17 +1245,64 @@ fn edit_from_record(
|
||||
.unwrap_or(false);
|
||||
EditCommand::MoveLeftBefore { c: char, select }
|
||||
}
|
||||
"complete" => EditCommand::Complete,
|
||||
"selectall" => EditCommand::SelectAll,
|
||||
"cutselection" => EditCommand::CutSelection,
|
||||
"copyselection" => EditCommand::CopySelection,
|
||||
"paste" => EditCommand::Paste,
|
||||
"copyfromstart" => EditCommand::CopyFromStart,
|
||||
"copyfromlinestart" => EditCommand::CopyFromLineStart,
|
||||
"copytoend" => EditCommand::CopyToEnd,
|
||||
"copytolineend" => EditCommand::CopyToLineEnd,
|
||||
"copycurrentline" => EditCommand::CopyCurrentLine,
|
||||
"copywordleft" => EditCommand::CopyWordLeft,
|
||||
"copybigwordleft" => EditCommand::CopyBigWordLeft,
|
||||
"copywordright" => EditCommand::CopyWordRight,
|
||||
"copybigwordright" => EditCommand::CopyBigWordRight,
|
||||
"copywordrighttonext" => EditCommand::CopyWordRightToNext,
|
||||
"copybigwordrighttonext" => EditCommand::CopyBigWordRightToNext,
|
||||
"copyleft" => EditCommand::CopyLeft,
|
||||
"copyright" => EditCommand::CopyRight,
|
||||
"copyrightuntil" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value)?;
|
||||
EditCommand::CopyRightUntil(char)
|
||||
}
|
||||
"copyrightbefore" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value)?;
|
||||
EditCommand::CopyRightBefore(char)
|
||||
}
|
||||
"copyleftuntil" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value)?;
|
||||
EditCommand::CopyLeftUntil(char)
|
||||
}
|
||||
"copyleftbefore" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value)?;
|
||||
EditCommand::CopyLeftBefore(char)
|
||||
}
|
||||
"swapcursorandanchor" => EditCommand::SwapCursorAndAnchor,
|
||||
#[cfg(feature = "system-clipboard")]
|
||||
"cutselectionsystem" => EditCommand::CutSelectionSystem,
|
||||
"copyselection" => EditCommand::CopySelection,
|
||||
#[cfg(feature = "system-clipboard")]
|
||||
"copyselectionsystem" => EditCommand::CopySelectionSystem,
|
||||
"paste" => EditCommand::Paste,
|
||||
#[cfg(feature = "system-clipboard")]
|
||||
"pastesystem" => EditCommand::PasteSystem,
|
||||
"selectall" => EditCommand::SelectAll,
|
||||
"cutinside" => {
|
||||
let value = extract_value("left", record, span)?;
|
||||
let left = extract_char(value)?;
|
||||
let value = extract_value("right", record, span)?;
|
||||
let right = extract_char(value)?;
|
||||
EditCommand::CutInside { left, right }
|
||||
}
|
||||
"yankinside" => {
|
||||
let value = extract_value("left", record, span)?;
|
||||
let left = extract_char(value)?;
|
||||
let value = extract_value("right", record, span)?;
|
||||
let right = extract_char(value)?;
|
||||
EditCommand::YankInside { left, right }
|
||||
}
|
||||
str => {
|
||||
return Err(ShellError::InvalidValue {
|
||||
valid: "a reedline EditCommand".into(),
|
||||
|
@ -6,12 +6,12 @@ use crate::prompt_update::{
|
||||
VSCODE_PRE_EXECUTION_MARKER,
|
||||
};
|
||||
use crate::{
|
||||
NuHighlighter, NuValidator, NushellPrompt,
|
||||
completions::NuCompleter,
|
||||
nu_highlight::NoOpHighlighter,
|
||||
prompt_update,
|
||||
reedline_config::{add_menus, create_keybindings, KeybindingsMode},
|
||||
reedline_config::{KeybindingsMode, add_menus, create_keybindings},
|
||||
util::eval_source,
|
||||
NuHighlighter, NuValidator, NushellPrompt,
|
||||
};
|
||||
use crossterm::cursor::SetCursorStyle;
|
||||
use log::{error, trace, warn};
|
||||
@ -20,27 +20,30 @@ use nu_cmd_base::util::get_editor;
|
||||
use nu_color_config::StyleComputer;
|
||||
#[allow(deprecated)]
|
||||
use nu_engine::env_to_strings;
|
||||
use nu_engine::exit::cleanup_exit;
|
||||
use nu_parser::{lex, parse, trim_quotes_str};
|
||||
use nu_protocol::shell_error::io::IoError;
|
||||
use nu_protocol::{BannerKind, shell_error};
|
||||
use nu_protocol::{
|
||||
HistoryConfig, HistoryFileFormat, PipelineData, ShellError, Span, Spanned, Value,
|
||||
config::NuCursorShape,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
report_shell_error, HistoryConfig, HistoryFileFormat, PipelineData, ShellError, Span, Spanned,
|
||||
Value,
|
||||
report_shell_error,
|
||||
};
|
||||
use nu_utils::{
|
||||
filesystem::{have_permission, PermissionResult},
|
||||
filesystem::{PermissionResult, have_permission},
|
||||
perf,
|
||||
};
|
||||
use reedline::{
|
||||
CursorConfig, CwdAwareHinter, DefaultCompleter, EditCommand, Emacs, FileBackedHistory,
|
||||
HistorySessionId, Reedline, SqliteBackedHistory, Vi,
|
||||
};
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
env::temp_dir,
|
||||
io::{self, IsTerminal, Write},
|
||||
panic::{catch_unwind, AssertUnwindSafe},
|
||||
panic::{AssertUnwindSafe, catch_unwind},
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
time::{Duration, Instant},
|
||||
@ -142,8 +145,8 @@ pub fn evaluate_repl(
|
||||
|
||||
if load_std_lib.is_none() {
|
||||
match engine_state.get_config().show_banner {
|
||||
Value::Bool { val: false, .. } => {}
|
||||
Value::String { ref val, .. } if val == "short" => {
|
||||
BannerKind::None => {}
|
||||
BannerKind::Short => {
|
||||
eval_source(
|
||||
engine_state,
|
||||
&mut unique_stack,
|
||||
@ -153,7 +156,7 @@ pub fn evaluate_repl(
|
||||
false,
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
BannerKind::Full => {
|
||||
eval_source(
|
||||
engine_state,
|
||||
&mut unique_stack,
|
||||
@ -236,7 +239,7 @@ fn escape_special_vscode_bytes(input: &str) -> Result<String, ShellError> {
|
||||
|
||||
match byte {
|
||||
// Escape bytes below 0x20
|
||||
b if b < 0x20 => format!("\\x{:02X}", byte).into_bytes(),
|
||||
b if b < 0x20 => format!("\\x{byte:02X}").into_bytes(),
|
||||
// Escape semicolon as \x3B
|
||||
b';' => "\\x3B".to_string().into_bytes(),
|
||||
// Escape backslash as \\
|
||||
@ -322,7 +325,19 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
||||
perf!("reset signals", start_time, use_color);
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
// Right before we start our prompt and take input from the user, fire the "pre_prompt" hook
|
||||
// Check all the environment variables they ask for
|
||||
// fire the "env_change" hook
|
||||
if let Err(error) = hook::eval_env_change_hook(
|
||||
&engine_state.get_config().hooks.env_change.clone(),
|
||||
engine_state,
|
||||
&mut stack,
|
||||
) {
|
||||
report_shell_error(engine_state, &error)
|
||||
}
|
||||
perf!("env-change hook", start_time, use_color);
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
// Next, right before we start our prompt and take input from the user, fire the "pre_prompt" hook
|
||||
if let Err(err) = hook::eval_hooks(
|
||||
engine_state,
|
||||
&mut stack,
|
||||
@ -334,18 +349,6 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
||||
}
|
||||
perf!("pre-prompt hook", start_time, use_color);
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
// Next, check all the environment variables they ask for
|
||||
// fire the "env_change" hook
|
||||
if let Err(error) = hook::eval_env_change_hook(
|
||||
&engine_state.get_config().hooks.env_change.clone(),
|
||||
engine_state,
|
||||
&mut stack,
|
||||
) {
|
||||
report_shell_error(engine_state, &error)
|
||||
}
|
||||
perf!("env-change hook", start_time, use_color);
|
||||
|
||||
let engine_reference = Arc::new(engine_state.clone());
|
||||
let config = stack.get_config(engine_state);
|
||||
|
||||
@ -692,7 +695,11 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
||||
);
|
||||
|
||||
println!();
|
||||
return (false, stack, line_editor);
|
||||
|
||||
cleanup_exit((), engine_state, 0);
|
||||
|
||||
// if cleanup_exit didn't exit, we should keep running
|
||||
return (true, stack, line_editor);
|
||||
}
|
||||
Err(err) => {
|
||||
let message = err.to_string();
|
||||
@ -848,7 +855,7 @@ fn do_auto_cd(
|
||||
report_shell_error(
|
||||
engine_state,
|
||||
&ShellError::Io(IoError::new_with_additional_context(
|
||||
std::io::ErrorKind::NotFound,
|
||||
shell_error::io::ErrorKind::DirectoryNotFound,
|
||||
span,
|
||||
PathBuf::from(&path),
|
||||
"Cannot change directory",
|
||||
@ -858,11 +865,11 @@ fn do_auto_cd(
|
||||
path.to_string_lossy().to_string()
|
||||
};
|
||||
|
||||
if let PermissionResult::PermissionDenied(_) = have_permission(path.clone()) {
|
||||
if let PermissionResult::PermissionDenied = have_permission(path.clone()) {
|
||||
report_shell_error(
|
||||
engine_state,
|
||||
&ShellError::Io(IoError::new_with_additional_context(
|
||||
std::io::ErrorKind::PermissionDenied,
|
||||
shell_error::io::ErrorKind::from_std(std::io::ErrorKind::PermissionDenied),
|
||||
span,
|
||||
PathBuf::from(path),
|
||||
"Cannot change directory",
|
||||
@ -930,6 +937,9 @@ fn do_run_cmd(
|
||||
trace!("eval source: {}", s);
|
||||
|
||||
let mut cmds = s.split_whitespace();
|
||||
|
||||
let had_warning_before = engine_state.exit_warning_given.load(Ordering::SeqCst);
|
||||
|
||||
if let Some("exit") = cmds.next() {
|
||||
let mut working_set = StateWorkingSet::new(engine_state);
|
||||
let _ = parse(&mut working_set, None, s.as_bytes(), false);
|
||||
@ -938,13 +948,11 @@ fn do_run_cmd(
|
||||
match cmds.next() {
|
||||
Some(s) => {
|
||||
if let Ok(n) = s.parse::<i32>() {
|
||||
drop(line_editor);
|
||||
std::process::exit(n);
|
||||
return cleanup_exit(line_editor, engine_state, n);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
drop(line_editor);
|
||||
std::process::exit(0);
|
||||
return cleanup_exit(line_editor, engine_state, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -963,6 +971,14 @@ fn do_run_cmd(
|
||||
false,
|
||||
);
|
||||
|
||||
// if there was a warning before, and we got to this point, it means
|
||||
// the possible call to cleanup_exit did not occur.
|
||||
if had_warning_before && engine_state.is_interactive {
|
||||
engine_state
|
||||
.exit_warning_given
|
||||
.store(false, Ordering::SeqCst);
|
||||
}
|
||||
|
||||
line_editor
|
||||
}
|
||||
|
||||
@ -1081,8 +1097,7 @@ fn run_shell_integration_osc633(
|
||||
// If we're in vscode, run their specific ansi escape sequence.
|
||||
// This is helpful for ctrl+g to change directories in the terminal.
|
||||
run_ansi_sequence(&format!(
|
||||
"{}{}{}",
|
||||
VSCODE_CWD_PROPERTY_MARKER_PREFIX, path, VSCODE_CWD_PROPERTY_MARKER_SUFFIX
|
||||
"{VSCODE_CWD_PROPERTY_MARKER_PREFIX}{path}{VSCODE_CWD_PROPERTY_MARKER_SUFFIX}"
|
||||
));
|
||||
|
||||
perf!(
|
||||
@ -1098,10 +1113,7 @@ fn run_shell_integration_osc633(
|
||||
|
||||
//OSC 633 ; E ; <commandline> [; <nonce] ST - Explicitly set the command line with an optional nonce.
|
||||
run_ansi_sequence(&format!(
|
||||
"{}{}{}",
|
||||
VSCODE_COMMANDLINE_MARKER_PREFIX,
|
||||
replaced_cmd_text,
|
||||
VSCODE_COMMANDLINE_MARKER_SUFFIX
|
||||
"{VSCODE_COMMANDLINE_MARKER_PREFIX}{replaced_cmd_text}{VSCODE_COMMANDLINE_MARKER_SUFFIX}"
|
||||
));
|
||||
}
|
||||
}
|
||||
@ -1436,7 +1448,7 @@ fn are_session_ids_in_sync() {
|
||||
|
||||
#[cfg(test)]
|
||||
mod test_auto_cd {
|
||||
use super::{do_auto_cd, escape_special_vscode_bytes, parse_operation, ReplOperation};
|
||||
use super::{ReplOperation, do_auto_cd, escape_special_vscode_bytes, parse_operation};
|
||||
use nu_path::AbsolutePath;
|
||||
use nu_protocol::engine::{EngineState, Stack};
|
||||
use tempfile::tempdir;
|
||||
@ -1477,7 +1489,7 @@ mod test_auto_cd {
|
||||
// Parse the input. It must be an auto-cd operation.
|
||||
let op = parse_operation(input.to_string(), &engine_state, &stack).unwrap();
|
||||
let ReplOperation::AutoCd { cwd, target, span } = op else {
|
||||
panic!("'{}' was not parsed into an auto-cd operation", input)
|
||||
panic!("'{input}' was not parsed into an auto-cd operation")
|
||||
};
|
||||
|
||||
// Perform the auto-cd operation.
|
||||
|
@ -2,11 +2,11 @@ use log::trace;
|
||||
use nu_ansi_term::Style;
|
||||
use nu_color_config::{get_matching_brackets_style, get_shape_color};
|
||||
use nu_engine::env;
|
||||
use nu_parser::{flatten_block, parse, FlatShape};
|
||||
use nu_parser::{FlatShape, flatten_block, parse};
|
||||
use nu_protocol::{
|
||||
Span,
|
||||
ast::{Block, Expr, Expression, PipelineRedirection, RecordItem},
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
Span,
|
||||
};
|
||||
use reedline::{Highlighter, StyledText};
|
||||
use std::sync::Arc;
|
||||
@ -17,147 +17,173 @@ pub struct NuHighlighter {
|
||||
}
|
||||
|
||||
impl Highlighter for NuHighlighter {
|
||||
fn highlight(&self, line: &str, _cursor: usize) -> StyledText {
|
||||
trace!("highlighting: {}", line);
|
||||
fn highlight(&self, line: &str, cursor: usize) -> StyledText {
|
||||
let result = highlight_syntax(&self.engine_state, &self.stack, line, cursor);
|
||||
result.text
|
||||
}
|
||||
}
|
||||
|
||||
let config = self.stack.get_config(&self.engine_state);
|
||||
let highlight_resolved_externals = config.highlight_resolved_externals;
|
||||
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
||||
let block = parse(&mut working_set, None, line.as_bytes(), false);
|
||||
let (shapes, global_span_offset) = {
|
||||
let mut shapes = flatten_block(&working_set, &block);
|
||||
// Highlighting externals has a config point because of concerns that using which to resolve
|
||||
// externals may slow down things too much.
|
||||
if highlight_resolved_externals {
|
||||
for (span, shape) in shapes.iter_mut() {
|
||||
if *shape == FlatShape::External {
|
||||
let str_contents =
|
||||
working_set.get_span_contents(Span::new(span.start, span.end));
|
||||
/// Result of a syntax highlight operation
|
||||
#[derive(Default)]
|
||||
pub(crate) struct HighlightResult {
|
||||
/// The highlighted text
|
||||
pub(crate) text: StyledText,
|
||||
/// The span of any garbage that was highlighted
|
||||
pub(crate) found_garbage: Option<Span>,
|
||||
}
|
||||
|
||||
let str_word = String::from_utf8_lossy(str_contents).to_string();
|
||||
let paths = env::path_str(&self.engine_state, &self.stack, *span).ok();
|
||||
#[allow(deprecated)]
|
||||
let res = if let Ok(cwd) =
|
||||
env::current_dir_str(&self.engine_state, &self.stack)
|
||||
{
|
||||
which::which_in(str_word, paths.as_ref(), cwd).ok()
|
||||
} else {
|
||||
which::which_in_global(str_word, paths.as_ref())
|
||||
.ok()
|
||||
.and_then(|mut i| i.next())
|
||||
};
|
||||
if res.is_some() {
|
||||
*shape = FlatShape::ExternalResolved;
|
||||
}
|
||||
pub(crate) fn highlight_syntax(
|
||||
engine_state: &EngineState,
|
||||
stack: &Stack,
|
||||
line: &str,
|
||||
cursor: usize,
|
||||
) -> HighlightResult {
|
||||
trace!("highlighting: {}", line);
|
||||
|
||||
let config = stack.get_config(engine_state);
|
||||
let highlight_resolved_externals = config.highlight_resolved_externals;
|
||||
let mut working_set = StateWorkingSet::new(engine_state);
|
||||
let block = parse(&mut working_set, None, line.as_bytes(), false);
|
||||
let (shapes, global_span_offset) = {
|
||||
let mut shapes = flatten_block(&working_set, &block);
|
||||
// Highlighting externals has a config point because of concerns that using which to resolve
|
||||
// externals may slow down things too much.
|
||||
if highlight_resolved_externals {
|
||||
for (span, shape) in shapes.iter_mut() {
|
||||
if *shape == FlatShape::External {
|
||||
let str_contents =
|
||||
working_set.get_span_contents(Span::new(span.start, span.end));
|
||||
|
||||
let str_word = String::from_utf8_lossy(str_contents).to_string();
|
||||
let paths = env::path_str(engine_state, stack, *span).ok();
|
||||
let res = if let Ok(cwd) = engine_state.cwd(Some(stack)) {
|
||||
which::which_in(str_word, paths.as_ref(), cwd).ok()
|
||||
} else {
|
||||
which::which_in_global(str_word, paths.as_ref())
|
||||
.ok()
|
||||
.and_then(|mut i| i.next())
|
||||
};
|
||||
if res.is_some() {
|
||||
*shape = FlatShape::ExternalResolved;
|
||||
}
|
||||
}
|
||||
}
|
||||
(shapes, self.engine_state.next_span_start())
|
||||
}
|
||||
(shapes, engine_state.next_span_start())
|
||||
};
|
||||
|
||||
let mut result = HighlightResult::default();
|
||||
let mut last_seen_span = global_span_offset;
|
||||
|
||||
let global_cursor_offset = cursor + global_span_offset;
|
||||
let matching_brackets_pos = find_matching_brackets(
|
||||
line,
|
||||
&working_set,
|
||||
&block,
|
||||
global_span_offset,
|
||||
global_cursor_offset,
|
||||
);
|
||||
|
||||
for shape in &shapes {
|
||||
if shape.0.end <= last_seen_span
|
||||
|| last_seen_span < global_span_offset
|
||||
|| shape.0.start < global_span_offset
|
||||
{
|
||||
// We've already output something for this span
|
||||
// so just skip this one
|
||||
continue;
|
||||
}
|
||||
if shape.0.start > last_seen_span {
|
||||
let gap = line
|
||||
[(last_seen_span - global_span_offset)..(shape.0.start - global_span_offset)]
|
||||
.to_string();
|
||||
result.text.push((Style::new(), gap));
|
||||
}
|
||||
let next_token = line
|
||||
[(shape.0.start - global_span_offset)..(shape.0.end - global_span_offset)]
|
||||
.to_string();
|
||||
|
||||
let mut add_colored_token = |shape: &FlatShape, text: String| {
|
||||
result
|
||||
.text
|
||||
.push((get_shape_color(shape.as_str(), &config), text));
|
||||
};
|
||||
|
||||
let mut output = StyledText::default();
|
||||
let mut last_seen_span = global_span_offset;
|
||||
|
||||
let global_cursor_offset = _cursor + global_span_offset;
|
||||
let matching_brackets_pos = find_matching_brackets(
|
||||
line,
|
||||
&working_set,
|
||||
&block,
|
||||
global_span_offset,
|
||||
global_cursor_offset,
|
||||
);
|
||||
|
||||
for shape in &shapes {
|
||||
if shape.0.end <= last_seen_span
|
||||
|| last_seen_span < global_span_offset
|
||||
|| shape.0.start < global_span_offset
|
||||
{
|
||||
// We've already output something for this span
|
||||
// so just skip this one
|
||||
continue;
|
||||
match shape.1 {
|
||||
FlatShape::Garbage => {
|
||||
result.found_garbage.get_or_insert_with(|| {
|
||||
Span::new(
|
||||
shape.0.start - global_span_offset,
|
||||
shape.0.end - global_span_offset,
|
||||
)
|
||||
});
|
||||
add_colored_token(&shape.1, next_token)
|
||||
}
|
||||
if shape.0.start > last_seen_span {
|
||||
let gap = line
|
||||
[(last_seen_span - global_span_offset)..(shape.0.start - global_span_offset)]
|
||||
.to_string();
|
||||
output.push((Style::new(), gap));
|
||||
}
|
||||
let next_token = line
|
||||
[(shape.0.start - global_span_offset)..(shape.0.end - global_span_offset)]
|
||||
.to_string();
|
||||
|
||||
let mut add_colored_token = |shape: &FlatShape, text: String| {
|
||||
output.push((get_shape_color(shape.as_str(), &config), text));
|
||||
};
|
||||
|
||||
match shape.1 {
|
||||
FlatShape::Garbage => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Nothing => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Binary => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Bool => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Int => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Float => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Range => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::InternalCall(_) => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::External => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::ExternalArg => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::ExternalResolved => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Keyword => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Literal => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Operator => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Signature => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::String => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::RawString => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::StringInterpolation => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::DateTime => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::List
|
||||
| FlatShape::Table
|
||||
| FlatShape::Record
|
||||
| FlatShape::Block
|
||||
| FlatShape::Closure => {
|
||||
let span = shape.0;
|
||||
let shape = &shape.1;
|
||||
let spans = split_span_by_highlight_positions(
|
||||
line,
|
||||
span,
|
||||
&matching_brackets_pos,
|
||||
global_span_offset,
|
||||
);
|
||||
for (part, highlight) in spans {
|
||||
let start = part.start - span.start;
|
||||
let end = part.end - span.start;
|
||||
let text = next_token[start..end].to_string();
|
||||
let mut style = get_shape_color(shape.as_str(), &config);
|
||||
if highlight {
|
||||
style = get_matching_brackets_style(style, &config);
|
||||
}
|
||||
output.push((style, text));
|
||||
FlatShape::Nothing => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Binary => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Bool => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Int => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Float => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Range => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::InternalCall(_) => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::External => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::ExternalArg => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::ExternalResolved => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Keyword => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Literal => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Operator => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Signature => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::String => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::RawString => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::StringInterpolation => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::DateTime => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::List
|
||||
| FlatShape::Table
|
||||
| FlatShape::Record
|
||||
| FlatShape::Block
|
||||
| FlatShape::Closure => {
|
||||
let span = shape.0;
|
||||
let shape = &shape.1;
|
||||
let spans = split_span_by_highlight_positions(
|
||||
line,
|
||||
span,
|
||||
&matching_brackets_pos,
|
||||
global_span_offset,
|
||||
);
|
||||
for (part, highlight) in spans {
|
||||
let start = part.start - span.start;
|
||||
let end = part.end - span.start;
|
||||
let text = next_token[start..end].to_string();
|
||||
let mut style = get_shape_color(shape.as_str(), &config);
|
||||
if highlight {
|
||||
style = get_matching_brackets_style(style, &config);
|
||||
}
|
||||
result.text.push((style, text));
|
||||
}
|
||||
|
||||
FlatShape::Filepath => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Directory => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::GlobInterpolation => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::GlobPattern => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Variable(_) | FlatShape::VarDecl(_) => {
|
||||
add_colored_token(&shape.1, next_token)
|
||||
}
|
||||
FlatShape::Flag => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Pipe => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Redirection => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Custom(..) => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::MatchPattern => add_colored_token(&shape.1, next_token),
|
||||
}
|
||||
last_seen_span = shape.0.end;
|
||||
}
|
||||
|
||||
let remainder = line[(last_seen_span - global_span_offset)..].to_string();
|
||||
if !remainder.is_empty() {
|
||||
output.push((Style::new(), remainder));
|
||||
FlatShape::Filepath => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Directory => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::GlobInterpolation => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::GlobPattern => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Variable(_) | FlatShape::VarDecl(_) => {
|
||||
add_colored_token(&shape.1, next_token)
|
||||
}
|
||||
FlatShape::Flag => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Pipe => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Redirection => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Custom(..) => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::MatchPattern => add_colored_token(&shape.1, next_token),
|
||||
}
|
||||
|
||||
output
|
||||
last_seen_span = shape.0.end;
|
||||
}
|
||||
|
||||
let remainder = line[(last_seen_span - global_span_offset)..].to_string();
|
||||
if !remainder.is_empty() {
|
||||
result.text.push((Style::new(), remainder));
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn split_span_by_highlight_positions(
|
||||
@ -309,6 +335,7 @@ fn find_matching_block_end_in_expr(
|
||||
.unwrap_or(expression.span.start);
|
||||
|
||||
return match &expression.expr {
|
||||
// TODO: Can't these be handled with an `_ => None` branch? Refactor
|
||||
Expr::Bool(_) => None,
|
||||
Expr::Int(_) => None,
|
||||
Expr::Float(_) => None,
|
||||
@ -335,6 +362,28 @@ fn find_matching_block_end_in_expr(
|
||||
Expr::Nothing => None,
|
||||
Expr::Garbage => None,
|
||||
|
||||
Expr::AttributeBlock(ab) => ab
|
||||
.attributes
|
||||
.iter()
|
||||
.find_map(|attr| {
|
||||
find_matching_block_end_in_expr(
|
||||
line,
|
||||
working_set,
|
||||
&attr.expr,
|
||||
global_span_offset,
|
||||
global_cursor_offset,
|
||||
)
|
||||
})
|
||||
.or_else(|| {
|
||||
find_matching_block_end_in_expr(
|
||||
line,
|
||||
working_set,
|
||||
&ab.item,
|
||||
global_span_offset,
|
||||
global_cursor_offset,
|
||||
)
|
||||
}),
|
||||
|
||||
Expr::Table(table) => {
|
||||
if expr_last == global_cursor_offset {
|
||||
// cursor is at table end
|
||||
|
@ -2,13 +2,13 @@
|
||||
|
||||
use nu_cmd_base::hook::eval_hook;
|
||||
use nu_engine::{eval_block, eval_block_with_early_return};
|
||||
use nu_parser::{lex, parse, unescape_unquote_string, Token, TokenContents};
|
||||
use nu_parser::{Token, TokenContents, lex, parse, unescape_unquote_string};
|
||||
use nu_protocol::{
|
||||
cli_error::report_compile_error,
|
||||
PipelineData, ShellError, Span, Value,
|
||||
debugger::WithoutDebug,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
report_parse_error, report_parse_warning, report_shell_error, PipelineData, ShellError, Span,
|
||||
Value,
|
||||
report_error::report_compile_error,
|
||||
report_parse_error, report_parse_warning, report_shell_error,
|
||||
};
|
||||
#[cfg(windows)]
|
||||
use nu_utils::enable_vt_processing;
|
||||
|
@ -1,7 +1,7 @@
|
||||
use nu_parser::parse;
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, StateWorkingSet},
|
||||
ParseError,
|
||||
engine::{EngineState, StateWorkingSet},
|
||||
};
|
||||
use reedline::{ValidationResult, Validator};
|
||||
use std::sync::Arc;
|
||||
|
@ -1,5 +1,5 @@
|
||||
use nu_protocol::HistoryFileFormat;
|
||||
use nu_test_support::{nu, Outcome};
|
||||
use nu_test_support::{Outcome, nu};
|
||||
use reedline::{
|
||||
FileBackedHistory, History, HistoryItem, HistoryItemId, ReedlineError, SearchQuery,
|
||||
SqliteBackedHistory,
|
||||
|
@ -5,3 +5,9 @@ fn nu_highlight_not_expr() {
|
||||
let actual = nu!("'not false' | nu-highlight | ansi strip");
|
||||
assert_eq!(actual.out, "not false");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nu_highlight_where_row_condition() {
|
||||
let actual = nu!("'ls | where a b 12345(' | nu-highlight | ansi strip");
|
||||
assert_eq!(actual.out, "ls | where a b 12345(");
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -2,9 +2,9 @@ use nu_engine::eval_block;
|
||||
use nu_parser::parse;
|
||||
use nu_path::{AbsolutePathBuf, PathBuf};
|
||||
use nu_protocol::{
|
||||
PipelineData, ShellError, Span, Value,
|
||||
debugger::WithoutDebug,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
PipelineData, ShellError, Span, Value,
|
||||
};
|
||||
use nu_test_support::fs;
|
||||
use reedline::Suggestion;
|
||||
@ -14,11 +14,8 @@ fn create_default_context() -> EngineState {
|
||||
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
|
||||
}
|
||||
|
||||
// creates a new engine with the current path into the completions fixtures folder
|
||||
pub fn new_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
// Target folder inside assets
|
||||
let dir = fs::fixtures().join("completions");
|
||||
let dir_str = dir
|
||||
pub fn new_engine_helper(pwd: AbsolutePathBuf) -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
let pwd_str = pwd
|
||||
.clone()
|
||||
.into_os_string()
|
||||
.into_string()
|
||||
@ -36,13 +33,13 @@ pub fn new_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
// Add pwd as env var
|
||||
stack.add_env_var(
|
||||
"PWD".to_string(),
|
||||
Value::string(dir_str.clone(), nu_protocol::Span::new(0, dir_str.len())),
|
||||
Value::string(pwd_str.clone(), nu_protocol::Span::new(0, pwd_str.len())),
|
||||
);
|
||||
stack.add_env_var(
|
||||
"TEST".to_string(),
|
||||
Value::string(
|
||||
"NUSHELL".to_string(),
|
||||
nu_protocol::Span::new(0, dir_str.len()),
|
||||
nu_protocol::Span::new(0, pwd_str.len()),
|
||||
),
|
||||
);
|
||||
#[cfg(windows)]
|
||||
@ -50,7 +47,7 @@ pub fn new_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
"Path".to_string(),
|
||||
Value::string(
|
||||
"c:\\some\\path;c:\\some\\other\\path".to_string(),
|
||||
nu_protocol::Span::new(0, dir_str.len()),
|
||||
nu_protocol::Span::new(0, pwd_str.len()),
|
||||
),
|
||||
);
|
||||
#[cfg(not(windows))]
|
||||
@ -58,7 +55,7 @@ pub fn new_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
"PATH".to_string(),
|
||||
Value::string(
|
||||
"/some/path:/some/other/path".to_string(),
|
||||
nu_protocol::Span::new(0, dir_str.len()),
|
||||
nu_protocol::Span::new(0, pwd_str.len()),
|
||||
),
|
||||
);
|
||||
|
||||
@ -66,46 +63,63 @@ pub fn new_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
let merge_result = engine_state.merge_env(&mut stack);
|
||||
assert!(merge_result.is_ok());
|
||||
|
||||
(dir, dir_str, engine_state, stack)
|
||||
(pwd, pwd_str, engine_state, stack)
|
||||
}
|
||||
|
||||
// creates a new engine with the current path into the completions fixtures folder
|
||||
/// creates a new engine with the current path in the completions fixtures folder
|
||||
pub fn new_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
new_engine_helper(fs::fixtures().join("completions"))
|
||||
}
|
||||
|
||||
/// Adds pseudo PATH env for external completion tests
|
||||
pub fn new_external_engine() -> EngineState {
|
||||
let mut engine = create_default_context();
|
||||
let dir = fs::fixtures().join("external_completions").join("path");
|
||||
let dir_str = dir.to_string_lossy().to_string();
|
||||
let internal_span = nu_protocol::Span::new(0, dir_str.len());
|
||||
engine.add_env_var(
|
||||
"PATH".to_string(),
|
||||
Value::List {
|
||||
vals: vec![Value::String {
|
||||
val: dir_str,
|
||||
internal_span,
|
||||
}],
|
||||
internal_span,
|
||||
},
|
||||
);
|
||||
engine
|
||||
}
|
||||
|
||||
/// creates a new engine with the current path in the dotnu_completions fixtures folder
|
||||
pub fn new_dotnu_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
// Target folder inside assets
|
||||
let dir = fs::fixtures().join("dotnu_completions");
|
||||
let dir_str = dir
|
||||
.clone()
|
||||
.into_os_string()
|
||||
.into_string()
|
||||
.unwrap_or_default();
|
||||
let (dir, dir_str, mut engine_state, mut stack) = new_engine_helper(dir);
|
||||
let dir_span = nu_protocol::Span::new(0, dir_str.len());
|
||||
|
||||
// Create a new engine with default context
|
||||
let mut engine_state = create_default_context();
|
||||
|
||||
// Add $nu
|
||||
engine_state.generate_nu_constant();
|
||||
|
||||
// New stack
|
||||
let mut stack = Stack::new();
|
||||
|
||||
// Add pwd as env var
|
||||
stack.add_env_var("PWD".to_string(), Value::string(dir_str.clone(), dir_span));
|
||||
stack.add_env_var(
|
||||
"TEST".to_string(),
|
||||
Value::string("NUSHELL".to_string(), dir_span),
|
||||
// const $NU_LIB_DIRS
|
||||
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||
let var_id = working_set.add_variable(
|
||||
b"$NU_LIB_DIRS".into(),
|
||||
Span::unknown(),
|
||||
nu_protocol::Type::List(Box::new(nu_protocol::Type::String)),
|
||||
false,
|
||||
);
|
||||
working_set.set_variable_const_val(
|
||||
var_id,
|
||||
Value::test_list(vec![
|
||||
Value::string(file(dir.join("lib-dir1")), dir_span),
|
||||
Value::string(file(dir.join("lib-dir3")), dir_span),
|
||||
]),
|
||||
);
|
||||
let _ = engine_state.merge_delta(working_set.render());
|
||||
|
||||
stack.add_env_var(
|
||||
"NU_LIB_DIRS".to_string(),
|
||||
Value::list(
|
||||
vec![
|
||||
Value::string(file(dir.join("lib-dir1")), dir_span),
|
||||
Value::string(file(dir.join("lib-dir2")), dir_span),
|
||||
Value::string(file(dir.join("lib-dir3")), dir_span),
|
||||
],
|
||||
dir_span,
|
||||
),
|
||||
"NU_LIB_DIRS".into(),
|
||||
Value::test_list(vec![
|
||||
Value::string(file(dir.join("lib-dir2")), dir_span),
|
||||
Value::string(file(dir.join("lib-dir3")), dir_span),
|
||||
]),
|
||||
);
|
||||
|
||||
// Merge environment into the permanent state
|
||||
@ -116,77 +130,15 @@ pub fn new_dotnu_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
}
|
||||
|
||||
pub fn new_quote_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
// Target folder inside assets
|
||||
let dir = fs::fixtures().join("quoted_completions");
|
||||
let dir_str = dir
|
||||
.clone()
|
||||
.into_os_string()
|
||||
.into_string()
|
||||
.unwrap_or_default();
|
||||
|
||||
// Create a new engine with default context
|
||||
let mut engine_state = create_default_context();
|
||||
|
||||
// New stack
|
||||
let mut stack = Stack::new();
|
||||
|
||||
// Add pwd as env var
|
||||
stack.add_env_var(
|
||||
"PWD".to_string(),
|
||||
Value::string(dir_str.clone(), nu_protocol::Span::new(0, dir_str.len())),
|
||||
);
|
||||
stack.add_env_var(
|
||||
"TEST".to_string(),
|
||||
Value::string(
|
||||
"NUSHELL".to_string(),
|
||||
nu_protocol::Span::new(0, dir_str.len()),
|
||||
),
|
||||
);
|
||||
|
||||
// Merge environment into the permanent state
|
||||
let merge_result = engine_state.merge_env(&mut stack);
|
||||
assert!(merge_result.is_ok());
|
||||
|
||||
(dir, dir_str, engine_state, stack)
|
||||
new_engine_helper(fs::fixtures().join("quoted_completions"))
|
||||
}
|
||||
|
||||
pub fn new_partial_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
// Target folder inside assets
|
||||
let dir = fs::fixtures().join("partial_completions");
|
||||
let dir_str = dir
|
||||
.clone()
|
||||
.into_os_string()
|
||||
.into_string()
|
||||
.unwrap_or_default();
|
||||
|
||||
// Create a new engine with default context
|
||||
let mut engine_state = create_default_context();
|
||||
|
||||
// New stack
|
||||
let mut stack = Stack::new();
|
||||
|
||||
// Add pwd as env var
|
||||
stack.add_env_var(
|
||||
"PWD".to_string(),
|
||||
Value::string(dir_str.clone(), nu_protocol::Span::new(0, dir_str.len())),
|
||||
);
|
||||
stack.add_env_var(
|
||||
"TEST".to_string(),
|
||||
Value::string(
|
||||
"NUSHELL".to_string(),
|
||||
nu_protocol::Span::new(0, dir_str.len()),
|
||||
),
|
||||
);
|
||||
|
||||
// Merge environment into the permanent state
|
||||
let merge_result = engine_state.merge_env(&mut stack);
|
||||
assert!(merge_result.is_ok());
|
||||
|
||||
(dir, dir_str, engine_state, stack)
|
||||
new_engine_helper(fs::fixtures().join("partial_completions"))
|
||||
}
|
||||
|
||||
// match a list of suggestions with the expected values
|
||||
pub fn match_suggestions(expected: &Vec<String>, suggestions: &Vec<Suggestion>) {
|
||||
/// match a list of suggestions with the expected values
|
||||
pub fn match_suggestions(expected: &Vec<&str>, suggestions: &Vec<Suggestion>) {
|
||||
let expected_len = expected.len();
|
||||
let suggestions_len = suggestions.len();
|
||||
if expected_len != suggestions_len {
|
||||
@ -197,28 +149,34 @@ pub fn match_suggestions(expected: &Vec<String>, suggestions: &Vec<Suggestion>)
|
||||
)
|
||||
}
|
||||
|
||||
let suggestoins_str = suggestions
|
||||
let suggestions_str = suggestions
|
||||
.iter()
|
||||
.map(|it| it.value.clone())
|
||||
.map(|it| it.value.as_str())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(expected, &suggestoins_str);
|
||||
assert_eq!(expected, &suggestions_str);
|
||||
}
|
||||
|
||||
// append the separator to the converted path
|
||||
/// match a list of suggestions with the expected values
|
||||
pub fn match_suggestions_by_string(expected: &[String], suggestions: &Vec<Suggestion>) {
|
||||
let expected = expected.iter().map(|it| it.as_str()).collect::<Vec<_>>();
|
||||
match_suggestions(&expected, suggestions);
|
||||
}
|
||||
|
||||
/// append the separator to the converted path
|
||||
pub fn folder(path: impl Into<PathBuf>) -> String {
|
||||
let mut converted_path = file(path);
|
||||
converted_path.push(MAIN_SEPARATOR);
|
||||
converted_path
|
||||
}
|
||||
|
||||
// convert a given path to string
|
||||
/// convert a given path to string
|
||||
pub fn file(path: impl Into<PathBuf>) -> String {
|
||||
path.into().into_os_string().into_string().unwrap()
|
||||
}
|
||||
|
||||
// merge_input executes the given input into the engine
|
||||
// and merges the state
|
||||
/// merge_input executes the given input into the engine
|
||||
/// and merges the state
|
||||
pub fn merge_input(
|
||||
input: &[u8],
|
||||
engine_state: &mut EngineState,
|
||||
@ -236,13 +194,15 @@ pub fn merge_input(
|
||||
|
||||
engine_state.merge_delta(delta)?;
|
||||
|
||||
assert!(eval_block::<WithoutDebug>(
|
||||
engine_state,
|
||||
stack,
|
||||
&block,
|
||||
PipelineData::Value(Value::nothing(Span::unknown()), None),
|
||||
)
|
||||
.is_ok());
|
||||
assert!(
|
||||
eval_block::<WithoutDebug>(
|
||||
engine_state,
|
||||
stack,
|
||||
&block,
|
||||
PipelineData::value(Value::nothing(Span::unknown()), None),
|
||||
)
|
||||
.is_ok()
|
||||
);
|
||||
|
||||
// Merge environment into the permanent state
|
||||
engine_state.merge_env(stack)
|
||||
|
@ -1,3 +1,5 @@
|
||||
pub mod completions_helpers;
|
||||
|
||||
pub use completions_helpers::{file, folder, match_suggestions, merge_input, new_engine};
|
||||
pub use completions_helpers::{
|
||||
file, folder, match_suggestions, match_suggestions_by_string, merge_input, new_engine,
|
||||
};
|
||||
|
@ -1,11 +1,11 @@
|
||||
[package]
|
||||
authors = ["The Nushell Project Developers"]
|
||||
description = "The foundation tools to build Nushell commands."
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
license = "MIT"
|
||||
name = "nu-cmd-base"
|
||||
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base"
|
||||
version = "0.102.0"
|
||||
version = "0.106.2"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
@ -13,12 +13,12 @@ version = "0.102.0"
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
nu-engine = { path = "../nu-engine", version = "0.102.0", default-features = false }
|
||||
nu-parser = { path = "../nu-parser", version = "0.102.0" }
|
||||
nu-path = { path = "../nu-path", version = "0.102.0" }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.102.0", default-features = false }
|
||||
nu-engine = { path = "../nu-engine", version = "0.106.2", default-features = false }
|
||||
nu-parser = { path = "../nu-parser", version = "0.106.2" }
|
||||
nu-path = { path = "../nu-path", version = "0.106.2" }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.106.2", default-features = false }
|
||||
|
||||
indexmap = { workspace = true }
|
||||
miette = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
[dev-dependencies]
|
||||
|
@ -1,4 +1,4 @@
|
||||
use indexmap::{indexset, IndexSet};
|
||||
use indexmap::{IndexSet, indexset};
|
||||
use nu_protocol::Value;
|
||||
|
||||
pub fn merge_descriptors(values: &[Value]) -> Vec<String> {
|
||||
|
@ -1,11 +1,11 @@
|
||||
use miette::Result;
|
||||
use nu_engine::{eval_block, eval_block_with_early_return};
|
||||
use nu_engine::{eval_block, eval_block_with_early_return, redirect_env};
|
||||
use nu_parser::parse;
|
||||
use nu_protocol::{
|
||||
cli_error::{report_parse_error, report_shell_error},
|
||||
PipelineData, PositionalArg, ShellError, Span, Type, Value, VarId,
|
||||
debugger::WithoutDebug,
|
||||
engine::{Closure, EngineState, Stack, StateWorkingSet},
|
||||
PipelineData, PositionalArg, ShellError, Span, Type, Value, VarId,
|
||||
report_error::{report_parse_error, report_shell_error},
|
||||
};
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
|
||||
@ -325,19 +325,7 @@ fn run_hook(
|
||||
}
|
||||
|
||||
// If all went fine, preserve the environment of the called block
|
||||
let caller_env_vars = stack.get_env_var_names(engine_state);
|
||||
redirect_env(engine_state, stack, &callee_stack);
|
||||
|
||||
// remove env vars that are present in the caller but not in the callee
|
||||
// (the callee hid them)
|
||||
for var in caller_env_vars.iter() {
|
||||
if !callee_stack.has_env_var(engine_state, var) {
|
||||
stack.remove_env_var(engine_state, var);
|
||||
}
|
||||
}
|
||||
|
||||
// add new env vars from callee to caller
|
||||
for (var, value) in callee_stack.get_stack_env_vars() {
|
||||
stack.add_env_var(var, value);
|
||||
}
|
||||
Ok(pipeline_data)
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use nu_protocol::{ast::CellPath, PipelineData, ShellError, Signals, Span, Value};
|
||||
use nu_protocol::{PipelineData, ShellError, Signals, Span, Value, ast::CellPath};
|
||||
use std::sync::Arc;
|
||||
|
||||
pub trait CmdArgument {
|
||||
|
@ -3,3 +3,6 @@ pub mod formats;
|
||||
pub mod hook;
|
||||
pub mod input_handler;
|
||||
pub mod util;
|
||||
mod wrap_call;
|
||||
|
||||
pub use wrap_call::*;
|
||||
|
@ -1,6 +1,6 @@
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack},
|
||||
Range, ShellError, Span, Value,
|
||||
engine::{EngineState, Stack},
|
||||
};
|
||||
use std::ops::Bound;
|
||||
|
||||
|
101
crates/nu-cmd-base/src/wrap_call.rs
Normal file
101
crates/nu-cmd-base/src/wrap_call.rs
Normal file
@ -0,0 +1,101 @@
|
||||
use nu_engine::CallExt;
|
||||
use nu_protocol::{
|
||||
DeclId, FromValue, ShellError, Span,
|
||||
engine::{Call, EngineState, Stack, StateWorkingSet},
|
||||
};
|
||||
|
||||
/// A helper utility to aid in implementing commands which have the same behavior for `run` and `run_const`.
|
||||
///
|
||||
/// Only supports functions in [`Call`] and [`CallExt`] which have a `const` suffix.
|
||||
///
|
||||
/// To use, the actual command logic should be moved to a function. Then, `eval` and `eval_const` can be implemented like this:
|
||||
/// ```rust
|
||||
/// # use nu_engine::command_prelude::*;
|
||||
/// # use nu_cmd_base::WrapCall;
|
||||
/// # fn do_command_logic(call: WrapCall) -> Result<PipelineData, ShellError> { Ok(PipelineData::empty()) }
|
||||
///
|
||||
/// # struct Command {}
|
||||
/// # impl Command {
|
||||
/// fn run(&self, engine_state: &EngineState, stack: &mut Stack, call: &Call) -> Result<PipelineData, ShellError> {
|
||||
/// let call = WrapCall::Eval(engine_state, stack, call);
|
||||
/// do_command_logic(call)
|
||||
/// }
|
||||
///
|
||||
/// fn run_const(&self, working_set: &StateWorkingSet, call: &Call) -> Result<PipelineData, ShellError> {
|
||||
/// let call = WrapCall::ConstEval(working_set, call);
|
||||
/// do_command_logic(call)
|
||||
/// }
|
||||
/// # }
|
||||
/// ```
|
||||
///
|
||||
/// Then, the typical [`Call`] and [`CallExt`] operations can be called using destructuring:
|
||||
///
|
||||
/// ```rust
|
||||
/// # use nu_engine::command_prelude::*;
|
||||
/// # use nu_cmd_base::WrapCall;
|
||||
/// # let call = WrapCall::Eval(&EngineState::new(), &mut Stack::new(), &Call::new(Span::unknown()));
|
||||
/// # fn do_command_logic(call: WrapCall) -> Result<(), ShellError> {
|
||||
/// let (call, required): (_, String) = call.req(0)?;
|
||||
/// let (call, flag): (_, Option<i64>) = call.get_flag("number")?;
|
||||
/// # Ok(())
|
||||
/// # }
|
||||
/// ```
|
||||
///
|
||||
/// A new `WrapCall` instance has to be returned after each function to ensure
|
||||
/// that there is only ever one copy of mutable [`Stack`] reference.
|
||||
pub enum WrapCall<'a> {
|
||||
Eval(&'a EngineState, &'a mut Stack, &'a Call<'a>),
|
||||
ConstEval(&'a StateWorkingSet<'a>, &'a Call<'a>),
|
||||
}
|
||||
|
||||
/// Macro to choose between the non-const and const versions of each [`Call`]/[`CallExt`] function
|
||||
macro_rules! proxy {
|
||||
($self:ident , $eval:ident , $const:ident , $( $args:expr ),*) => {
|
||||
match $self {
|
||||
WrapCall::Eval(engine_state, stack, call) => {
|
||||
Call::$eval(call, engine_state, stack, $( $args ),*)
|
||||
.map(|val| (WrapCall::Eval(engine_state, stack, call), val))
|
||||
},
|
||||
WrapCall::ConstEval(working_set, call) => {
|
||||
Call::$const(call, working_set, $( $args ),*)
|
||||
.map(|val| (WrapCall::ConstEval(working_set, call), val))
|
||||
},
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl WrapCall<'_> {
|
||||
pub fn head(&self) -> Span {
|
||||
match self {
|
||||
WrapCall::Eval(_, _, call) => call.head,
|
||||
WrapCall::ConstEval(_, call) => call.head,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn decl_id(&self) -> DeclId {
|
||||
match self {
|
||||
WrapCall::Eval(_, _, call) => call.decl_id,
|
||||
WrapCall::ConstEval(_, call) => call.decl_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_flag<T: FromValue>(self, flag_name: &str) -> Result<(Self, bool), ShellError> {
|
||||
proxy!(self, has_flag, has_flag_const, flag_name)
|
||||
}
|
||||
|
||||
pub fn get_flag<T: FromValue>(self, name: &str) -> Result<(Self, Option<T>), ShellError> {
|
||||
proxy!(self, get_flag, get_flag_const, name)
|
||||
}
|
||||
|
||||
pub fn req<T: FromValue>(self, pos: usize) -> Result<(Self, T), ShellError> {
|
||||
proxy!(self, req, req_const, pos)
|
||||
}
|
||||
|
||||
pub fn rest<T: FromValue>(self, pos: usize) -> Result<(Self, Vec<T>), ShellError> {
|
||||
proxy!(self, rest, rest_const, pos)
|
||||
}
|
||||
|
||||
pub fn opt<T: FromValue>(self, pos: usize) -> Result<(Self, Option<T>), ShellError> {
|
||||
proxy!(self, opt, opt_const, pos)
|
||||
}
|
||||
}
|
@ -1,11 +1,11 @@
|
||||
[package]
|
||||
authors = ["The Nushell Project Developers"]
|
||||
description = "Nushell's extra commands that are not part of the 1.0 api standard."
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
license = "MIT"
|
||||
name = "nu-cmd-extra"
|
||||
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-extra"
|
||||
version = "0.102.0"
|
||||
version = "0.106.2"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
@ -16,13 +16,13 @@ bench = false
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.102.0" }
|
||||
nu-engine = { path = "../nu-engine", version = "0.102.0", default-features = false }
|
||||
nu-json = { version = "0.102.0", path = "../nu-json" }
|
||||
nu-parser = { path = "../nu-parser", version = "0.102.0" }
|
||||
nu-pretty-hex = { version = "0.102.0", path = "../nu-pretty-hex" }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.102.0", default-features = false }
|
||||
nu-utils = { path = "../nu-utils", version = "0.102.0", default-features = false }
|
||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.106.2" }
|
||||
nu-engine = { path = "../nu-engine", version = "0.106.2", default-features = false }
|
||||
nu-json = { version = "0.106.2", path = "../nu-json" }
|
||||
nu-parser = { path = "../nu-parser", version = "0.106.2" }
|
||||
nu-pretty-hex = { version = "0.106.2", path = "../nu-pretty-hex" }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.106.2", default-features = false }
|
||||
nu-utils = { path = "../nu-utils", version = "0.106.2", default-features = false }
|
||||
|
||||
# Potential dependencies for extras
|
||||
heck = { workspace = true }
|
||||
@ -37,6 +37,6 @@ itertools = { workspace = true }
|
||||
mime = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.102.0" }
|
||||
nu-command = { path = "../nu-command", version = "0.102.0" }
|
||||
nu-test-support = { path = "../nu-test-support", version = "0.102.0" }
|
||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.106.2" }
|
||||
nu-command = { path = "../nu-command", version = "0.106.2" }
|
||||
nu-test-support = { path = "../nu-test-support", version = "0.106.2" }
|
||||
|
@ -3,7 +3,12 @@ use nu_protocol::engine::Command;
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn test_examples(cmd: impl Command + 'static) {
|
||||
test_examples::test_examples(cmd);
|
||||
test_examples::test_examples(cmd, &[]);
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn test_examples_with_commands(cmd: impl Command + 'static, commands: &[&dyn Command]) {
|
||||
test_examples::test_examples(cmd, commands);
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@ -16,15 +21,15 @@ mod test_examples {
|
||||
};
|
||||
|
||||
use nu_protocol::{
|
||||
engine::{Command, EngineState, StateWorkingSet},
|
||||
Type,
|
||||
engine::{Command, EngineState, StateWorkingSet},
|
||||
};
|
||||
use std::collections::HashSet;
|
||||
|
||||
pub fn test_examples(cmd: impl Command + 'static) {
|
||||
pub fn test_examples(cmd: impl Command + 'static, commands: &[&dyn Command]) {
|
||||
let examples = cmd.examples();
|
||||
let signature = cmd.signature();
|
||||
let mut engine_state = make_engine_state(cmd.clone_box());
|
||||
let mut engine_state = make_engine_state(cmd.clone_box(), commands);
|
||||
|
||||
let cwd = std::env::current_dir().expect("Could not get current working directory.");
|
||||
|
||||
@ -38,7 +43,7 @@ mod test_examples {
|
||||
check_example_input_and_output_types_match_command_signature(
|
||||
&example,
|
||||
&cwd,
|
||||
&mut make_engine_state(cmd.clone_box()),
|
||||
&mut make_engine_state(cmd.clone_box(), commands),
|
||||
&signature.input_output_types,
|
||||
signature.operates_on_cell_paths(),
|
||||
),
|
||||
@ -57,7 +62,7 @@ mod test_examples {
|
||||
);
|
||||
}
|
||||
|
||||
fn make_engine_state(cmd: Box<dyn Command>) -> Box<EngineState> {
|
||||
fn make_engine_state(cmd: Box<dyn Command>, commands: &[&dyn Command]) -> Box<EngineState> {
|
||||
let mut engine_state = Box::new(EngineState::new());
|
||||
|
||||
let delta = {
|
||||
@ -69,6 +74,10 @@ mod test_examples {
|
||||
working_set.add_decl(Box::new(nu_cmd_lang::If));
|
||||
working_set.add_decl(Box::new(nu_command::MathRound));
|
||||
|
||||
for command in commands {
|
||||
working_set.add_decl(command.clone_box());
|
||||
}
|
||||
|
||||
// Adding the command that is being tested to the working set
|
||||
working_set.add_decl(cmd);
|
||||
working_set.render()
|
||||
|
@ -26,7 +26,7 @@ impl Command for BitsAnd {
|
||||
.required(
|
||||
"target",
|
||||
SyntaxShape::OneOf(vec![SyntaxShape::Binary, SyntaxShape::Int]),
|
||||
"right-hand side of the operation",
|
||||
"Right-hand side of the operation.",
|
||||
)
|
||||
.named(
|
||||
"endian",
|
||||
@ -65,7 +65,7 @@ impl Command for BitsAnd {
|
||||
return Err(ShellError::TypeMismatch {
|
||||
err_message: "Endian must be one of native, little, big".to_string(),
|
||||
span: endian.span,
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -113,8 +113,7 @@ impl Command for BitsAnd {
|
||||
])),
|
||||
},
|
||||
Example {
|
||||
description:
|
||||
"Apply bitwise and to binary data of varying lengths with specified endianness",
|
||||
description: "Apply bitwise and to binary data of varying lengths with specified endianness",
|
||||
example: "0x[c0 ff ee] | bits and 0x[ff] --endian big",
|
||||
result: Some(Value::test_binary(vec![0x00, 0x00, 0xee])),
|
||||
},
|
||||
|
@ -1,120 +0,0 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
use nu_protocol::{report_parse_warning, ParseWarning};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct BitsInto;
|
||||
|
||||
impl Command for BitsInto {
|
||||
fn name(&self) -> &str {
|
||||
"into bits"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("into bits")
|
||||
.input_output_types(vec![
|
||||
(Type::Binary, Type::String),
|
||||
(Type::Int, Type::String),
|
||||
(Type::Filesize, Type::String),
|
||||
(Type::Duration, Type::String),
|
||||
(Type::String, Type::String),
|
||||
(Type::Bool, Type::String),
|
||||
(Type::table(), Type::table()),
|
||||
(Type::record(), Type::record()),
|
||||
])
|
||||
.allow_variants_without_examples(true) // TODO: supply exhaustive examples
|
||||
.rest(
|
||||
"rest",
|
||||
SyntaxShape::CellPath,
|
||||
"for a data structure input, convert data at the given cell paths",
|
||||
)
|
||||
.category(Category::Deprecated)
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Convert value to a binary string."
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec![]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
report_parse_warning(
|
||||
&StateWorkingSet::new(engine_state),
|
||||
&ParseWarning::DeprecatedWarning {
|
||||
old_command: "into bits".into(),
|
||||
new_suggestion: "use `format bits`".into(),
|
||||
span: head,
|
||||
url: "`help format bits`".into(),
|
||||
},
|
||||
);
|
||||
crate::extra::strings::format::format_bits(engine_state, stack, call, input)
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "convert a binary value into a string, padded to 8 places with 0s",
|
||||
example: "0x[1] | into bits",
|
||||
result: Some(Value::string("00000001",
|
||||
Span::test_data(),
|
||||
)),
|
||||
},
|
||||
Example {
|
||||
description: "convert an int into a string, padded to 8 places with 0s",
|
||||
example: "1 | into bits",
|
||||
result: Some(Value::string("00000001",
|
||||
Span::test_data(),
|
||||
)),
|
||||
},
|
||||
Example {
|
||||
description: "convert a filesize value into a string, padded to 8 places with 0s",
|
||||
example: "1b | into bits",
|
||||
result: Some(Value::string("00000001",
|
||||
Span::test_data(),
|
||||
)),
|
||||
},
|
||||
Example {
|
||||
description: "convert a duration value into a string, padded to 8 places with 0s",
|
||||
example: "1ns | into bits",
|
||||
result: Some(Value::string("00000001",
|
||||
Span::test_data(),
|
||||
)),
|
||||
},
|
||||
Example {
|
||||
description: "convert a boolean value into a string, padded to 8 places with 0s",
|
||||
example: "true | into bits",
|
||||
result: Some(Value::string("00000001",
|
||||
Span::test_data(),
|
||||
)),
|
||||
},
|
||||
Example {
|
||||
description: "convert a string into a raw binary string, padded with 0s to 8 places",
|
||||
example: "'nushell.sh' | into bits",
|
||||
result: Some(Value::string("01101110 01110101 01110011 01101000 01100101 01101100 01101100 00101110 01110011 01101000",
|
||||
Span::test_data(),
|
||||
)),
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
|
||||
test_examples(BitsInto {})
|
||||
}
|
||||
}
|
@ -1,6 +1,5 @@
|
||||
mod and;
|
||||
mod bits_;
|
||||
mod into;
|
||||
mod not;
|
||||
mod or;
|
||||
mod rotate_left;
|
||||
@ -11,7 +10,6 @@ mod xor;
|
||||
|
||||
pub use and::BitsAnd;
|
||||
pub use bits_::Bits;
|
||||
pub use into::BitsInto;
|
||||
pub use not::BitsNot;
|
||||
pub use or::BitsOr;
|
||||
pub use rotate_left::BitsRol;
|
||||
@ -137,7 +135,7 @@ where
|
||||
(min, max) => (rhs, lhs, max, min),
|
||||
};
|
||||
|
||||
let pad = iter::repeat(0).take(max_len - min_len);
|
||||
let pad = iter::repeat_n(0, max_len - min_len);
|
||||
|
||||
let mut a;
|
||||
let mut b;
|
||||
@ -161,9 +159,10 @@ where
|
||||
}
|
||||
(Value::Binary { .. }, Value::Int { .. }) | (Value::Int { .. }, Value::Binary { .. }) => {
|
||||
Value::error(
|
||||
ShellError::PipelineMismatch {
|
||||
ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "input, and argument, to be both int or both binary"
|
||||
.to_string(),
|
||||
wrong_type: "int and binary".to_string(),
|
||||
dst_span: rhs.span(),
|
||||
src_span: span,
|
||||
},
|
||||
|
@ -1,5 +1,5 @@
|
||||
use super::{get_number_bytes, NumberBytes};
|
||||
use nu_cmd_base::input_handler::{operate, CmdArgument};
|
||||
use super::{NumberBytes, get_number_bytes};
|
||||
use nu_cmd_base::input_handler::{CmdArgument, operate};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -100,8 +100,7 @@ impl Command for BitsNot {
|
||||
)),
|
||||
},
|
||||
Example {
|
||||
description:
|
||||
"Apply logical negation to a list of numbers, treat input as 2 bytes number",
|
||||
description: "Apply logical negation to a list of numbers, treat input as 2 bytes number",
|
||||
example: "[4 3 2] | bits not --number-bytes 2",
|
||||
result: Some(Value::list(
|
||||
vec![
|
||||
@ -113,8 +112,7 @@ impl Command for BitsNot {
|
||||
)),
|
||||
},
|
||||
Example {
|
||||
description:
|
||||
"Apply logical negation to a list of numbers, treat input as signed number",
|
||||
description: "Apply logical negation to a list of numbers, treat input as signed number",
|
||||
example: "[4 3 2] | bits not --signed",
|
||||
result: Some(Value::list(
|
||||
vec![
|
||||
|
@ -27,7 +27,7 @@ impl Command for BitsOr {
|
||||
.required(
|
||||
"target",
|
||||
SyntaxShape::OneOf(vec![SyntaxShape::Binary, SyntaxShape::Int]),
|
||||
"right-hand side of the operation",
|
||||
"Right-hand side of the operation.",
|
||||
)
|
||||
.named(
|
||||
"endian",
|
||||
@ -66,7 +66,7 @@ impl Command for BitsOr {
|
||||
return Err(ShellError::TypeMismatch {
|
||||
err_message: "Endian must be one of native, little, big".to_string(),
|
||||
span: endian.span,
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -106,8 +106,7 @@ impl Command for BitsOr {
|
||||
result: Some(Value::test_binary(vec![0xca, 0xfe])),
|
||||
},
|
||||
Example {
|
||||
description:
|
||||
"Apply bitwise or to binary data of varying lengths with specified endianness",
|
||||
description: "Apply bitwise or to binary data of varying lengths with specified endianness",
|
||||
example: "0x[c0 ff ee] | bits or 0x[ff] --endian big",
|
||||
result: Some(Value::test_binary(vec![0xc0, 0xff, 0xff])),
|
||||
},
|
||||
|
@ -1,5 +1,5 @@
|
||||
use super::{get_input_num_type, get_number_bytes, InputNumType, NumberBytes};
|
||||
use nu_cmd_base::input_handler::{operate, CmdArgument};
|
||||
use super::{InputNumType, NumberBytes, get_input_num_type, get_number_bytes};
|
||||
use nu_cmd_base::input_handler::{CmdArgument, operate};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
struct Arguments {
|
||||
@ -37,7 +37,7 @@ impl Command for BitsRol {
|
||||
),
|
||||
])
|
||||
.allow_variants_without_examples(true)
|
||||
.required("bits", SyntaxShape::Int, "number of bits to rotate left")
|
||||
.required("bits", SyntaxShape::Int, "Number of bits to rotate left.")
|
||||
.switch(
|
||||
"signed",
|
||||
"always treat input number as a signed number",
|
||||
@ -222,7 +222,8 @@ fn rotate_bytes_and_bits_left(data: &[u8], byte_shift: usize, bit_shift: usize)
|
||||
debug_assert!(byte_shift < data.len());
|
||||
debug_assert!(
|
||||
(1..8).contains(&bit_shift),
|
||||
"Bit shifts of 0 can't be handled by this impl and everything else should be part of the byteshift");
|
||||
"Bit shifts of 0 can't be handled by this impl and everything else should be part of the byteshift"
|
||||
);
|
||||
let mut bytes = Vec::with_capacity(data.len());
|
||||
let mut next_index = byte_shift;
|
||||
for _ in 0..data.len() {
|
||||
|
@ -1,5 +1,5 @@
|
||||
use super::{get_input_num_type, get_number_bytes, InputNumType, NumberBytes};
|
||||
use nu_cmd_base::input_handler::{operate, CmdArgument};
|
||||
use super::{InputNumType, NumberBytes, get_input_num_type, get_number_bytes};
|
||||
use nu_cmd_base::input_handler::{CmdArgument, operate};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
struct Arguments {
|
||||
@ -37,7 +37,7 @@ impl Command for BitsRor {
|
||||
),
|
||||
])
|
||||
.allow_variants_without_examples(true)
|
||||
.required("bits", SyntaxShape::Int, "number of bits to rotate right")
|
||||
.required("bits", SyntaxShape::Int, "Number of bits to rotate right.")
|
||||
.switch(
|
||||
"signed",
|
||||
"always treat input number as a signed number",
|
||||
|
@ -1,6 +1,6 @@
|
||||
use super::{get_input_num_type, get_number_bytes, InputNumType, NumberBytes};
|
||||
use super::{InputNumType, NumberBytes, get_input_num_type, get_number_bytes};
|
||||
use itertools::Itertools;
|
||||
use nu_cmd_base::input_handler::{operate, CmdArgument};
|
||||
use nu_cmd_base::input_handler::{CmdArgument, operate};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
use std::iter;
|
||||
@ -40,7 +40,7 @@ impl Command for BitsShl {
|
||||
),
|
||||
])
|
||||
.allow_variants_without_examples(true)
|
||||
.required("bits", SyntaxShape::Int, "number of bits to shift left")
|
||||
.required("bits", SyntaxShape::Int, "Number of bits to shift left.")
|
||||
.switch(
|
||||
"signed",
|
||||
"always treat input number as a signed number",
|
||||
@ -237,7 +237,8 @@ fn shift_bytes_left(data: &[u8], byte_shift: usize) -> Vec<u8> {
|
||||
|
||||
fn shift_bytes_and_bits_left(data: &[u8], byte_shift: usize, bit_shift: usize) -> Vec<u8> {
|
||||
use itertools::Position::*;
|
||||
debug_assert!((1..8).contains(&bit_shift),
|
||||
debug_assert!(
|
||||
(1..8).contains(&bit_shift),
|
||||
"Bit shifts of 0 can't be handled by this impl and everything else should be part of the byteshift"
|
||||
);
|
||||
data.iter()
|
||||
@ -249,7 +250,7 @@ fn shift_bytes_and_bits_left(data: &[u8], byte_shift: usize, bit_shift: usize) -
|
||||
Last | Only => lhs << bit_shift,
|
||||
_ => (lhs << bit_shift) | (rhs >> (8 - bit_shift)),
|
||||
})
|
||||
.chain(iter::repeat(0).take(byte_shift))
|
||||
.chain(iter::repeat_n(0, byte_shift))
|
||||
.collect::<Vec<u8>>()
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
use super::{get_input_num_type, get_number_bytes, InputNumType, NumberBytes};
|
||||
use nu_cmd_base::input_handler::{operate, CmdArgument};
|
||||
use super::{InputNumType, NumberBytes, get_input_num_type, get_number_bytes};
|
||||
use nu_cmd_base::input_handler::{CmdArgument, operate};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
struct Arguments {
|
||||
@ -37,7 +37,7 @@ impl Command for BitsShr {
|
||||
),
|
||||
])
|
||||
.allow_variants_without_examples(true)
|
||||
.required("bits", SyntaxShape::Int, "number of bits to shift right")
|
||||
.required("bits", SyntaxShape::Int, "Number of bits to shift right.")
|
||||
.switch(
|
||||
"signed",
|
||||
"always treat input number as a signed number",
|
||||
|
@ -27,7 +27,7 @@ impl Command for BitsXor {
|
||||
.required(
|
||||
"target",
|
||||
SyntaxShape::OneOf(vec![SyntaxShape::Binary, SyntaxShape::Int]),
|
||||
"right-hand side of the operation",
|
||||
"Right-hand side of the operation.",
|
||||
)
|
||||
.named(
|
||||
"endian",
|
||||
@ -66,7 +66,7 @@ impl Command for BitsXor {
|
||||
return Err(ShellError::TypeMismatch {
|
||||
err_message: "Endian must be one of native, little, big".to_string(),
|
||||
span: endian.span,
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -106,8 +106,7 @@ impl Command for BitsXor {
|
||||
result: Some(Value::test_binary(vec![0x70, 0x40])),
|
||||
},
|
||||
Example {
|
||||
description:
|
||||
"Apply bitwise xor to binary data of varying lengths with specified endianness",
|
||||
description: "Apply bitwise xor to binary data of varying lengths with specified endianness",
|
||||
example: "0x[ca fe] | bits xor 0x[aa] --endian big",
|
||||
result: Some(Value::test_binary(vec![0xca, 0x54])),
|
||||
},
|
||||
|
@ -1,74 +0,0 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::{report_parse_warning, ParseWarning};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Fmt;
|
||||
|
||||
impl Command for Fmt {
|
||||
fn name(&self) -> &str {
|
||||
"fmt"
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Format a number."
|
||||
}
|
||||
|
||||
fn signature(&self) -> nu_protocol::Signature {
|
||||
Signature::build("fmt")
|
||||
.input_output_types(vec![(Type::Number, Type::record())])
|
||||
.category(Category::Deprecated)
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec![]
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Get a record containing multiple formats for the number 42",
|
||||
example: "42 | fmt",
|
||||
result: Some(Value::test_record(record! {
|
||||
"binary" => Value::test_string("0b101010"),
|
||||
"debug" => Value::test_string("42"),
|
||||
"display" => Value::test_string("42"),
|
||||
"lowerexp" => Value::test_string("4.2e1"),
|
||||
"lowerhex" => Value::test_string("0x2a"),
|
||||
"octal" => Value::test_string("0o52"),
|
||||
"upperexp" => Value::test_string("4.2E1"),
|
||||
"upperhex" => Value::test_string("0x2A"),
|
||||
})),
|
||||
}]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
report_parse_warning(
|
||||
&StateWorkingSet::new(engine_state),
|
||||
&ParseWarning::DeprecatedWarning {
|
||||
old_command: "fmt".into(),
|
||||
new_suggestion: "use `format number`".into(),
|
||||
span: head,
|
||||
url: "`help format number`".into(),
|
||||
},
|
||||
);
|
||||
crate::extra::strings::format::format_number(engine_state, stack, call, input)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
|
||||
test_examples(Fmt {})
|
||||
}
|
||||
}
|
@ -1,3 +0,0 @@
|
||||
mod fmt;
|
||||
|
||||
pub(crate) use fmt::Fmt;
|
@ -1,4 +1,4 @@
|
||||
use nu_engine::{command_prelude::*, ClosureEval, ClosureEvalOnce};
|
||||
use nu_engine::{ClosureEval, ClosureEvalOnce, command_prelude::*};
|
||||
use nu_protocol::engine::Closure;
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -26,7 +26,7 @@ impl Command for EachWhile {
|
||||
.required(
|
||||
"closure",
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||
"the closure to run",
|
||||
"The closure to run.",
|
||||
)
|
||||
.category(Category::Filters)
|
||||
}
|
||||
@ -72,7 +72,7 @@ impl Command for EachWhile {
|
||||
|
||||
let metadata = input.metadata();
|
||||
match input {
|
||||
PipelineData::Empty => Ok(PipelineData::Empty),
|
||||
PipelineData::Empty => Ok(PipelineData::empty()),
|
||||
PipelineData::Value(Value::Range { .. }, ..)
|
||||
| PipelineData::Value(Value::List { .. }, ..)
|
||||
| PipelineData::ListStream(..) => {
|
||||
@ -109,7 +109,7 @@ impl Command for EachWhile {
|
||||
.fuse()
|
||||
.into_pipeline_data(head, engine_state.signals().clone()))
|
||||
} else {
|
||||
Ok(PipelineData::Empty)
|
||||
Ok(PipelineData::empty())
|
||||
}
|
||||
}
|
||||
// This match allows non-iterables to be accepted,
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::{vertical_rotate_value, VerticalDirection};
|
||||
use super::{VerticalDirection, vertical_rotate_value};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::{horizontal_rotate_value, HorizontalDirection};
|
||||
use super::{HorizontalDirection, horizontal_rotate_value};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::{horizontal_rotate_value, HorizontalDirection};
|
||||
use super::{HorizontalDirection, horizontal_rotate_value};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::{vertical_rotate_value, VerticalDirection};
|
||||
use super::{VerticalDirection, vertical_rotate_value};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -20,7 +20,7 @@ impl Command for Rotate {
|
||||
.rest(
|
||||
"rest",
|
||||
SyntaxShape::String,
|
||||
"the names to give columns once rotated",
|
||||
"The names to give columns once rotated.",
|
||||
)
|
||||
.category(Category::Filters)
|
||||
.allow_variants_without_examples(true)
|
||||
@ -36,104 +36,93 @@ impl Command for Rotate {
|
||||
description: "Rotate a record clockwise, producing a table (like `transpose` but with column order reversed)",
|
||||
example: "{a:1, b:2} | rotate",
|
||||
result: Some(Value::test_list(vec![
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_int(1),
|
||||
"column1" => Value::test_string("a"),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_int(2),
|
||||
"column1" => Value::test_string("b"),
|
||||
}),
|
||||
],
|
||||
)),
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_int(1),
|
||||
"column1" => Value::test_string("a"),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_int(2),
|
||||
"column1" => Value::test_string("b"),
|
||||
}),
|
||||
])),
|
||||
},
|
||||
Example {
|
||||
description: "Rotate 2x3 table clockwise",
|
||||
example: "[[a b]; [1 2] [3 4] [5 6]] | rotate",
|
||||
result: Some(Value::test_list(
|
||||
vec![
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_int(5),
|
||||
"column1" => Value::test_int(3),
|
||||
"column2" => Value::test_int(1),
|
||||
"column3" => Value::test_string("a"),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_int(6),
|
||||
"column1" => Value::test_int(4),
|
||||
"column2" => Value::test_int(2),
|
||||
"column3" => Value::test_string("b"),
|
||||
}),
|
||||
],
|
||||
)),
|
||||
result: Some(Value::test_list(vec![
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_int(5),
|
||||
"column1" => Value::test_int(3),
|
||||
"column2" => Value::test_int(1),
|
||||
"column3" => Value::test_string("a"),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_int(6),
|
||||
"column1" => Value::test_int(4),
|
||||
"column2" => Value::test_int(2),
|
||||
"column3" => Value::test_string("b"),
|
||||
}),
|
||||
])),
|
||||
},
|
||||
Example {
|
||||
description: "Rotate table clockwise and change columns names",
|
||||
example: "[[a b]; [1 2]] | rotate col_a col_b",
|
||||
result: Some(Value::test_list(
|
||||
vec![
|
||||
Value::test_record(record! {
|
||||
"col_a" => Value::test_int(1),
|
||||
"col_b" => Value::test_string("a"),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"col_a" => Value::test_int(2),
|
||||
"col_b" => Value::test_string("b"),
|
||||
}),
|
||||
],
|
||||
)),
|
||||
result: Some(Value::test_list(vec![
|
||||
Value::test_record(record! {
|
||||
"col_a" => Value::test_int(1),
|
||||
"col_b" => Value::test_string("a"),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"col_a" => Value::test_int(2),
|
||||
"col_b" => Value::test_string("b"),
|
||||
}),
|
||||
])),
|
||||
},
|
||||
Example {
|
||||
description: "Rotate table counter clockwise",
|
||||
example: "[[a b]; [1 2]] | rotate --ccw",
|
||||
result: Some(Value::test_list(
|
||||
vec![
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_string("b"),
|
||||
"column1" => Value::test_int(2),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_string("a"),
|
||||
"column1" => Value::test_int(1),
|
||||
}),
|
||||
],
|
||||
)),
|
||||
result: Some(Value::test_list(vec![
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_string("b"),
|
||||
"column1" => Value::test_int(2),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_string("a"),
|
||||
"column1" => Value::test_int(1),
|
||||
}),
|
||||
])),
|
||||
},
|
||||
Example {
|
||||
description: "Rotate table counter-clockwise",
|
||||
example: "[[a b]; [1 2] [3 4] [5 6]] | rotate --ccw",
|
||||
result: Some(Value::test_list(
|
||||
vec![
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_string("b"),
|
||||
"column1" => Value::test_int(2),
|
||||
"column2" => Value::test_int(4),
|
||||
"column3" => Value::test_int(6),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_string("a"),
|
||||
"column1" => Value::test_int(1),
|
||||
"column2" => Value::test_int(3),
|
||||
"column3" => Value::test_int(5),
|
||||
}),
|
||||
],
|
||||
)),
|
||||
result: Some(Value::test_list(vec![
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_string("b"),
|
||||
"column1" => Value::test_int(2),
|
||||
"column2" => Value::test_int(4),
|
||||
"column3" => Value::test_int(6),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_string("a"),
|
||||
"column1" => Value::test_int(1),
|
||||
"column2" => Value::test_int(3),
|
||||
"column3" => Value::test_int(5),
|
||||
}),
|
||||
])),
|
||||
},
|
||||
Example {
|
||||
description: "Rotate table counter-clockwise and change columns names",
|
||||
example: "[[a b]; [1 2]] | rotate --ccw col_a col_b",
|
||||
result: Some(Value::test_list(
|
||||
vec![
|
||||
Value::test_record(record! {
|
||||
"col_a" => Value::test_string("b"),
|
||||
"col_b" => Value::test_int(2),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"col_a" => Value::test_string("a"),
|
||||
"col_b" => Value::test_int(1),
|
||||
}),
|
||||
],
|
||||
)),
|
||||
result: Some(Value::test_list(vec![
|
||||
Value::test_record(record! {
|
||||
"col_a" => Value::test_string("b"),
|
||||
"col_b" => Value::test_int(2),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"col_a" => Value::test_string("a"),
|
||||
"col_b" => Value::test_int(1),
|
||||
}),
|
||||
])),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use nu_engine::{command_prelude::*, ClosureEval};
|
||||
use nu_protocol::{engine::Closure, PipelineIterator};
|
||||
use nu_engine::{ClosureEval, command_prelude::*};
|
||||
use nu_protocol::{PipelineIterator, engine::Closure};
|
||||
use std::collections::HashSet;
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -12,11 +12,14 @@ impl Command for UpdateCells {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("update cells")
|
||||
.input_output_types(vec![(Type::table(), Type::table())])
|
||||
.input_output_types(vec![
|
||||
(Type::table(), Type::table()),
|
||||
(Type::record(), Type::record()),
|
||||
])
|
||||
.required(
|
||||
"closure",
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||
"the closure to run an update for each cell",
|
||||
"The closure to run an update for each cell.",
|
||||
)
|
||||
.named(
|
||||
"columns",
|
||||
@ -77,6 +80,15 @@ impl Command for UpdateCells {
|
||||
"2021-11-18" => Value::test_string(""),
|
||||
})])),
|
||||
},
|
||||
Example {
|
||||
example: r#"{a: 1, b: 2, c: 3} | update cells { $in + 10 }"#,
|
||||
description: "Update each value in a record.",
|
||||
result: Some(Value::test_record(record! {
|
||||
"a" => Value::test_int(11),
|
||||
"b" => Value::test_int(12),
|
||||
"c" => Value::test_int(13),
|
||||
})),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
@ -85,7 +97,7 @@ impl Command for UpdateCells {
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
mut input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
let closure: Closure = call.req(engine_state, stack, 0)?;
|
||||
@ -102,14 +114,51 @@ impl Command for UpdateCells {
|
||||
|
||||
let metadata = input.metadata();
|
||||
|
||||
Ok(UpdateCellIterator {
|
||||
iter: input.into_iter(),
|
||||
closure: ClosureEval::new(engine_state, stack, closure),
|
||||
columns,
|
||||
span: head,
|
||||
match input {
|
||||
PipelineData::Value(
|
||||
Value::Record {
|
||||
ref mut val,
|
||||
internal_span,
|
||||
},
|
||||
..,
|
||||
) => {
|
||||
let val = val.to_mut();
|
||||
update_record(
|
||||
val,
|
||||
&mut ClosureEval::new(engine_state, stack, closure),
|
||||
internal_span,
|
||||
columns.as_ref(),
|
||||
);
|
||||
Ok(input)
|
||||
}
|
||||
_ => Ok(UpdateCellIterator {
|
||||
iter: input.into_iter(),
|
||||
closure: ClosureEval::new(engine_state, stack, closure),
|
||||
columns,
|
||||
span: head,
|
||||
}
|
||||
.into_pipeline_data(head, engine_state.signals().clone())
|
||||
.set_metadata(metadata)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn update_record(
|
||||
record: &mut Record,
|
||||
closure: &mut ClosureEval,
|
||||
span: Span,
|
||||
cols: Option<&HashSet<String>>,
|
||||
) {
|
||||
if let Some(columns) = cols {
|
||||
for (col, val) in record.iter_mut() {
|
||||
if columns.contains(col) {
|
||||
*val = eval_value(closure, span, std::mem::take(val));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (_, val) in record.iter_mut() {
|
||||
*val = eval_value(closure, span, std::mem::take(val))
|
||||
}
|
||||
.into_pipeline_data(head, engine_state.signals().clone())
|
||||
.set_metadata(metadata))
|
||||
}
|
||||
}
|
||||
|
||||
@ -128,18 +177,7 @@ impl Iterator for UpdateCellIterator {
|
||||
|
||||
let value = if let Value::Record { val, .. } = &mut value {
|
||||
let val = val.to_mut();
|
||||
if let Some(columns) = &self.columns {
|
||||
for (col, val) in val.iter_mut() {
|
||||
if columns.contains(col) {
|
||||
*val = eval_value(&mut self.closure, self.span, std::mem::take(val));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (_, val) in val.iter_mut() {
|
||||
*val = eval_value(&mut self.closure, self.span, std::mem::take(val))
|
||||
}
|
||||
}
|
||||
|
||||
update_record(val, &mut self.closure, self.span, self.columns.as_ref());
|
||||
value
|
||||
} else {
|
||||
eval_value(&mut self.closure, self.span, value)
|
||||
|
@ -55,7 +55,7 @@ fn from_url(input: PipelineData, head: Span) -> Result<PipelineData, ShellError>
|
||||
.map(|(k, v)| (k, Value::string(v, head)))
|
||||
.collect();
|
||||
|
||||
Ok(PipelineData::Value(Value::record(record, head), metadata))
|
||||
Ok(PipelineData::value(Value::record(record, head), metadata))
|
||||
}
|
||||
_ => Err(ShellError::UnsupportedInput {
|
||||
msg: "String not compatible with URL encoding".to_string(),
|
||||
|
@ -101,7 +101,7 @@ impl Command for ToHtml {
|
||||
.named(
|
||||
"theme",
|
||||
SyntaxShape::String,
|
||||
"the name of the theme to use (github, blulocolight, ...)",
|
||||
"the name of the theme to use (github, blulocolight, ...); case-insensitive",
|
||||
Some('t'),
|
||||
)
|
||||
.switch(
|
||||
@ -109,18 +109,26 @@ impl Command for ToHtml {
|
||||
"produce a color table of all available themes",
|
||||
Some('l'),
|
||||
)
|
||||
.switch("raw", "do not escape html tags", Some('r'))
|
||||
.category(Category::Formats)
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "Outputs an HTML string representing the contents of this table",
|
||||
description: "Outputs an HTML string representing the contents of this table",
|
||||
example: "[[foo bar]; [1 2]] | to html",
|
||||
result: Some(Value::test_string(
|
||||
r#"<html><style>body { background-color:white;color:black; }</style><body><table><thead><tr><th>foo</th><th>bar</th></tr></thead><tbody><tr><td>1</td><td>2</td></tr></tbody></table></body></html>"#,
|
||||
)),
|
||||
},
|
||||
Example {
|
||||
description: "Outputs an HTML string using a record of xml data",
|
||||
example: r#"{tag: a attributes: { style: "color: red" } content: ["hello!"] } | to xml | to html --raw"#,
|
||||
result: Some(Value::test_string(
|
||||
r#"<html><style>body { background-color:white;color:black; }</style><body><a style="color: red">hello!</a></body></html>"#,
|
||||
)),
|
||||
},
|
||||
Example {
|
||||
description: "Optionally, only output the html for the content itself",
|
||||
example: "[[foo bar]; [1 2]] | to html --partial",
|
||||
@ -163,9 +171,16 @@ fn get_theme_from_asset_file(
|
||||
) -> Result<HashMap<&'static str, String>, ShellError> {
|
||||
let theme_name = match theme {
|
||||
Some(s) => &s.item,
|
||||
None => "default", // There is no theme named "default" so this will be HtmlTheme::default(), which is "nu_default".
|
||||
None => {
|
||||
return Ok(convert_html_theme_to_hash_map(
|
||||
is_dark,
|
||||
&HtmlTheme::default(),
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let theme_span = theme.map(|s| s.span).unwrap_or(Span::unknown());
|
||||
|
||||
// 228 themes come from
|
||||
// https://github.com/mbadolato/iTerm2-Color-Schemes/tree/master/windowsterminal
|
||||
// we should find a hit on any name in there
|
||||
@ -175,8 +190,17 @@ fn get_theme_from_asset_file(
|
||||
let th = asset
|
||||
.themes
|
||||
.into_iter()
|
||||
.find(|n| n.name.eq_ignore_case(theme_name)) // case insensitive search
|
||||
.unwrap_or_default();
|
||||
.find(|n| n.name.eq_ignore_case(theme_name)); // case insensitive search
|
||||
|
||||
let th = match th {
|
||||
Some(t) => t,
|
||||
None => {
|
||||
return Err(ShellError::TypeMismatch {
|
||||
err_message: format!("Unknown HTML theme '{theme_name}'"),
|
||||
span: theme_span,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
Ok(convert_html_theme_to_hash_map(is_dark, &th))
|
||||
}
|
||||
@ -238,6 +262,7 @@ fn to_html(
|
||||
let dark = call.has_flag(engine_state, stack, "dark")?;
|
||||
let partial = call.has_flag(engine_state, stack, "partial")?;
|
||||
let list = call.has_flag(engine_state, stack, "list")?;
|
||||
let raw = call.has_flag(engine_state, stack, "raw")?;
|
||||
let theme: Option<Spanned<String>> = call.get_flag(engine_state, stack, "theme")?;
|
||||
let config = &stack.get_config(engine_state);
|
||||
|
||||
@ -257,18 +282,20 @@ fn to_html(
|
||||
None => head,
|
||||
};
|
||||
|
||||
let color_hm = get_theme_from_asset_file(dark, theme.as_ref());
|
||||
let color_hm = match color_hm {
|
||||
let color_hm = match get_theme_from_asset_file(dark, theme.as_ref()) {
|
||||
Ok(c) => c,
|
||||
_ => {
|
||||
return Err(ShellError::GenericError {
|
||||
error: "Error finding theme name".into(),
|
||||
msg: "Error finding theme name".into(),
|
||||
span: Some(theme_span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})
|
||||
}
|
||||
Err(e) => match e {
|
||||
ShellError::TypeMismatch {
|
||||
err_message,
|
||||
span: _,
|
||||
} => {
|
||||
return Err(ShellError::TypeMismatch {
|
||||
err_message,
|
||||
span: theme_span,
|
||||
});
|
||||
}
|
||||
_ => return Err(e),
|
||||
},
|
||||
};
|
||||
|
||||
// change the color of the page
|
||||
@ -301,15 +328,15 @@ fn to_html(
|
||||
let inner_value = match vec_of_values.len() {
|
||||
0 => String::default(),
|
||||
1 => match headers {
|
||||
Some(headers) => html_table(vec_of_values, headers, config),
|
||||
Some(headers) => html_table(vec_of_values, headers, raw, config),
|
||||
None => {
|
||||
let value = &vec_of_values[0];
|
||||
html_value(value.clone(), config)
|
||||
html_value(value.clone(), raw, config)
|
||||
}
|
||||
},
|
||||
_ => match headers {
|
||||
Some(headers) => html_table(vec_of_values, headers, config),
|
||||
None => html_list(vec_of_values, config),
|
||||
Some(headers) => html_table(vec_of_values, headers, raw, config),
|
||||
None => html_list(vec_of_values, raw, config),
|
||||
},
|
||||
};
|
||||
|
||||
@ -377,19 +404,19 @@ fn theme_demo(span: Span) -> PipelineData {
|
||||
})
|
||||
}
|
||||
|
||||
fn html_list(list: Vec<Value>, config: &Config) -> String {
|
||||
fn html_list(list: Vec<Value>, raw: bool, config: &Config) -> String {
|
||||
let mut output_string = String::new();
|
||||
output_string.push_str("<ol>");
|
||||
for value in list {
|
||||
output_string.push_str("<li>");
|
||||
output_string.push_str(&html_value(value, config));
|
||||
output_string.push_str(&html_value(value, raw, config));
|
||||
output_string.push_str("</li>");
|
||||
}
|
||||
output_string.push_str("</ol>");
|
||||
output_string
|
||||
}
|
||||
|
||||
fn html_table(table: Vec<Value>, headers: Vec<String>, config: &Config) -> String {
|
||||
fn html_table(table: Vec<Value>, headers: Vec<String>, raw: bool, config: &Config) -> String {
|
||||
let mut output_string = String::new();
|
||||
|
||||
output_string.push_str("<table>");
|
||||
@ -412,7 +439,7 @@ fn html_table(table: Vec<Value>, headers: Vec<String>, config: &Config) -> Strin
|
||||
.cloned()
|
||||
.unwrap_or_else(|| Value::nothing(span));
|
||||
output_string.push_str("<td>");
|
||||
output_string.push_str(&html_value(data, config));
|
||||
output_string.push_str(&html_value(data, raw, config));
|
||||
output_string.push_str("</td>");
|
||||
}
|
||||
output_string.push_str("</tr>");
|
||||
@ -423,7 +450,7 @@ fn html_table(table: Vec<Value>, headers: Vec<String>, config: &Config) -> Strin
|
||||
output_string
|
||||
}
|
||||
|
||||
fn html_value(value: Value, config: &Config) -> String {
|
||||
fn html_value(value: Value, raw: bool, config: &Config) -> String {
|
||||
let mut output_string = String::new();
|
||||
match value {
|
||||
Value::Binary { val, .. } => {
|
||||
@ -432,11 +459,22 @@ fn html_value(value: Value, config: &Config) -> String {
|
||||
output_string.push_str(&output);
|
||||
output_string.push_str("</pre>");
|
||||
}
|
||||
other => output_string.push_str(
|
||||
&v_htmlescape::escape(&other.to_abbreviated_string(config))
|
||||
.to_string()
|
||||
.replace('\n', "<br>"),
|
||||
),
|
||||
other => {
|
||||
if raw {
|
||||
output_string.push_str(
|
||||
&other
|
||||
.to_abbreviated_string(config)
|
||||
.to_string()
|
||||
.replace('\n', "<br>"),
|
||||
)
|
||||
} else {
|
||||
output_string.push_str(
|
||||
&v_htmlescape::escape(&other.to_abbreviated_string(config))
|
||||
.to_string()
|
||||
.replace('\n', "<br>"),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
output_string
|
||||
}
|
||||
@ -699,8 +737,92 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
use crate::test_examples_with_commands;
|
||||
use nu_command::ToXml;
|
||||
|
||||
test_examples(ToHtml {})
|
||||
test_examples_with_commands(ToHtml {}, &[&ToXml])
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_theme_from_asset_file_returns_default() {
|
||||
let result = super::get_theme_from_asset_file(false, None);
|
||||
|
||||
assert!(result.is_ok(), "Expected Ok result for None theme");
|
||||
|
||||
let theme_map = result.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
theme_map.get("background").map(String::as_str),
|
||||
Some("white"),
|
||||
"Expected default background color to be white"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
theme_map.get("foreground").map(String::as_str),
|
||||
Some("black"),
|
||||
"Expected default foreground color to be black"
|
||||
);
|
||||
|
||||
assert!(
|
||||
theme_map.contains_key("red"),
|
||||
"Expected default theme to have a 'red' color"
|
||||
);
|
||||
|
||||
assert!(
|
||||
theme_map.contains_key("bold_green"),
|
||||
"Expected default theme to have a 'bold_green' color"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn returns_a_valid_theme() {
|
||||
let theme_name = "Dracula".to_string().into_spanned(Span::new(0, 7));
|
||||
let result = super::get_theme_from_asset_file(false, Some(&theme_name));
|
||||
|
||||
assert!(result.is_ok(), "Expected Ok result for valid theme");
|
||||
let theme_map = result.unwrap();
|
||||
let required_keys = [
|
||||
"background",
|
||||
"foreground",
|
||||
"red",
|
||||
"green",
|
||||
"blue",
|
||||
"bold_red",
|
||||
"bold_green",
|
||||
"bold_blue",
|
||||
];
|
||||
|
||||
for key in required_keys {
|
||||
assert!(
|
||||
theme_map.contains_key(key),
|
||||
"Expected theme to contain key '{key}'"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fails_with_unknown_theme_name() {
|
||||
let result = super::get_theme_from_asset_file(
|
||||
false,
|
||||
Some(&"doesnt-exist".to_string().into_spanned(Span::new(0, 13))),
|
||||
);
|
||||
|
||||
assert!(result.is_err(), "Expected error for invalid theme name");
|
||||
|
||||
if let Err(err) = result {
|
||||
assert!(
|
||||
matches!(err, ShellError::TypeMismatch { .. }),
|
||||
"Expected TypeMismatch error, got: {err:?}"
|
||||
);
|
||||
|
||||
if let ShellError::TypeMismatch { err_message, span } = err {
|
||||
assert!(
|
||||
err_message.contains("doesnt-exist"),
|
||||
"Error message should mention theme name, got: {err_message}"
|
||||
);
|
||||
assert_eq!(span.start, 0);
|
||||
assert_eq!(span.end, 13);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
pub struct MathArcCos;
|
||||
|
||||
impl Command for SubCommand {
|
||||
impl Command for MathArcCos {
|
||||
fn name(&self) -> &str {
|
||||
"math arccos"
|
||||
}
|
||||
@ -114,6 +114,6 @@ mod test {
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
|
||||
test_examples(SubCommand {})
|
||||
test_examples(MathArcCos {})
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
pub struct MathArcCosH;
|
||||
|
||||
impl Command for SubCommand {
|
||||
impl Command for MathArcCosH {
|
||||
fn name(&self) -> &str {
|
||||
"math arccosh"
|
||||
}
|
||||
@ -100,6 +100,6 @@ mod test {
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
|
||||
test_examples(SubCommand {})
|
||||
test_examples(MathArcCosH {})
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
pub struct MathArcSin;
|
||||
|
||||
impl Command for SubCommand {
|
||||
impl Command for MathArcSin {
|
||||
fn name(&self) -> &str {
|
||||
"math arcsin"
|
||||
}
|
||||
@ -115,6 +115,6 @@ mod test {
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
|
||||
test_examples(SubCommand {})
|
||||
test_examples(MathArcSin {})
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
pub struct MathArcSinH;
|
||||
|
||||
impl Command for SubCommand {
|
||||
impl Command for MathArcSinH {
|
||||
fn name(&self) -> &str {
|
||||
"math arcsinh"
|
||||
}
|
||||
@ -88,6 +88,6 @@ mod test {
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
|
||||
test_examples(SubCommand {})
|
||||
test_examples(MathArcSinH {})
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
pub struct MathArcTan;
|
||||
|
||||
impl Command for SubCommand {
|
||||
impl Command for MathArcTan {
|
||||
fn name(&self) -> &str {
|
||||
"math arctan"
|
||||
}
|
||||
@ -102,6 +102,6 @@ mod test {
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
|
||||
test_examples(SubCommand {})
|
||||
test_examples(MathArcTan {})
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
pub struct MathArcTanH;
|
||||
|
||||
impl Command for SubCommand {
|
||||
impl Command for MathArcTanH {
|
||||
fn name(&self) -> &str {
|
||||
"math arctanh"
|
||||
}
|
||||
@ -101,6 +101,6 @@ mod test {
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
|
||||
test_examples(SubCommand {})
|
||||
test_examples(MathArcTanH {})
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user