mirror of
https://github.com/nushell/nushell.git
synced 2025-07-01 15:11:52 +02:00
Compare commits
1004 Commits
Author | SHA1 | Date | |
---|---|---|---|
d2abb8603a | |||
894e0f7658 | |||
a5087c4966 | |||
ac4ab452d4 | |||
5378727049 | |||
0786ddddbd | |||
7617084ca3 | |||
28941f1a06 | |||
43ceb3edec | |||
bffd8e4dd2 | |||
66023f6243 | |||
10fc32e3ef | |||
3148acd3a4 | |||
e6ce8a89be | |||
74f8081290 | |||
2ae1de2470 | |||
028a327ce8 | |||
318862aad6 | |||
9f4510f2e1 | |||
98c7ab96b6 | |||
13114e972b | |||
4a1b3e26ef | |||
1e3248dfe8 | |||
2aa4cd5cc5 | |||
fb908df17d | |||
cdf09abcc0 | |||
fe2c498a81 | |||
fe7122280d | |||
2e0fb7c1a6 | |||
f33b60c001 | |||
7e48607820 | |||
68a821c84a | |||
c5e59efa4d | |||
ec5b9b9f37 | |||
e88a51e930 | |||
35f8d8548a | |||
7a123d3eb1 | |||
3ed45c7ba8 | |||
8b160f9850 | |||
696b2cda4a | |||
435348aa61 | |||
0a5f41abc2 | |||
2b97bc54c5 | |||
ad49c17eba | |||
f1e88d95c1 | |||
6eac9bfd0f | |||
5d94b16d71 | |||
3bd46fe27a | |||
7b1c7debcb | |||
e77a0a48aa | |||
aa37572ddc | |||
a0cecf7658 | |||
23170ff368 | |||
f9ffd9ae29 | |||
d5fa7b8a55 | |||
f94df58486 | |||
6f5bd62a97 | |||
bb3cc9e153 | |||
202dfdaee2 | |||
0674d4960b | |||
85c2035016 | |||
02be83efbf | |||
b964347895 | |||
56ed1eb807 | |||
570175f95d | |||
839010b89d | |||
7694d09d14 | |||
4accc67843 | |||
c070e2d6f7 | |||
d302d63030 | |||
a2e117f8b0 | |||
b1974fae39 | |||
7248de1167 | |||
a9582e1c62 | |||
bb6335830a | |||
c8f3799c20 | |||
bd3a61a2f7 | |||
077643cadf | |||
fa2d9a8a58 | |||
58f98a4260 | |||
066790552a | |||
dcb1a1996c | |||
6b4d06d8a7 | |||
f615038938 | |||
71b74a284a | |||
2b431f994f | |||
7e096e61d7 | |||
9d7a1097f2 | |||
a98b3124c5 | |||
572698bf3e | |||
7162289d77 | |||
14bf25da14 | |||
a455e2e5eb | |||
840b4b854b | |||
ec4941c8ac | |||
dd86f14a5a | |||
63103580d2 | |||
d25df9c00b | |||
778a00efa1 | |||
fea822792f | |||
f6033ac5af | |||
583ef8674e | |||
94bec72079 | |||
28ed21864d | |||
e16ce1df36 | |||
87abfee268 | |||
ba0f069c31 | |||
154856066f | |||
f91713b714 | |||
f1bf485b2a | |||
955de76116 | |||
bf5bd3ff10 | |||
6ac3351fd1 | |||
b6dafa6e67 | |||
152a541696 | |||
ff8c3aa356 | |||
99ed8e42a3 | |||
6a7a23e3fa | |||
9448225690 | |||
28b99bfaf7 | |||
8403fff345 | |||
aa08e81370 | |||
831d1da256 | |||
3481c7e242 | |||
78e29af423 | |||
3ef5e90b64 | |||
6aa30132aa | |||
5d2ef0faf1 | |||
b2e191f836 | |||
d9230a76f3 | |||
f8d325dbfe | |||
88a890c11f | |||
5a28371b18 | |||
a33b5fe6ce | |||
2080719162 | |||
9db0d6bd34 | |||
d7ebe5fdc3 | |||
4d0b6d8821 | |||
b9acb61d9d | |||
2dcd1c5dbe | |||
2aeb77bd3e | |||
17f8ad7210 | |||
4dbdb1fe54 | |||
79359598db | |||
75180d07de | |||
cdc4fb1011 | |||
7e1b922ea7 | |||
0b1e368cea | |||
3b9a0ac7c6 | |||
a1f989caf9 | |||
c01f2ee0e9 | |||
693cb5c142 | |||
d104efdf68 | |||
bd9d865912 | |||
c62cbcd5f8 | |||
ba4723cc9f | |||
488002f4bc | |||
5115260366 | |||
2d557bce5d | |||
17caa31325 | |||
1ba2269aa9 | |||
36030cab8a | |||
345c00aa1e | |||
cc202e2199 | |||
a5a79a7d95 | |||
656f707a0b | |||
48271d8c3e | |||
60bb984e6e | |||
eeb3b38fba | |||
79d9a0542f | |||
5bfec20244 | |||
57d96c09fa | |||
b693db16cc | |||
3ffafa7196 | |||
53ae03bd63 | |||
ba766de5d1 | |||
8c52b7a23a | |||
2fc9506bc7 | |||
4804e6a151 | |||
786ba3bf91 | |||
e66139e6bb | |||
30904bd095 | |||
99329f14a3 | |||
3c583c9a20 | |||
9a6a3a731e | |||
b2043135ed | |||
545697c0b2 | |||
556852ded4 | |||
ca794f6adb | |||
39b43d1e4b | |||
026335fff0 | |||
7c9edbd9ee | |||
341fa7b196 | |||
bd0032898f | |||
ad11e25fc5 | |||
942c66a9f3 | |||
e10d84b72f | |||
cf36f052c4 | |||
266c9ae9e2 | |||
a3702e1eb7 | |||
92354a817c | |||
79a9751a58 | |||
47979651f3 | |||
667502e8da | |||
fabc0a3f45 | |||
ad125abf6a | |||
8e38596bc9 | |||
440a0e960a | |||
85d47c299e | |||
628a47e6dc | |||
f38657e6f3 | |||
504eff73f0 | |||
fbc1408913 | |||
544c46e0e4 | |||
6c8adac0d9 | |||
687b0e16f7 | |||
881c3495c1 | |||
406b606398 | |||
168e7f7b16 | |||
53cd4df806 | |||
65a163357d | |||
1bdec1cbb6 | |||
7e39179b7f | |||
2bb0c1c618 | |||
9c7e37f728 | |||
227d1d9508 | |||
9998fa50a3 | |||
9d76bf97a3 | |||
9cd494cdfa | |||
5d9e2455f7 | |||
b70cce47e2 | |||
4acf21cdcf | |||
9ef1203ef9 | |||
7c80067900 | |||
49a1e22ba3 | |||
ebd89d8b21 | |||
9547c106d3 | |||
88b22a9248 | |||
4af24363c2 | |||
11132f7353 | |||
cd56b97587 | |||
e6b4e59c0f | |||
18fdc5a229 | |||
3699188586 | |||
88acc11501 | |||
d3919a311f | |||
b52e31fac2 | |||
b44926d597 | |||
3fd92b6437 | |||
d80abb20a4 | |||
9068093081 | |||
9d247387ea | |||
3ed44992e6 | |||
283589dc2f | |||
088e6dffbe | |||
ecdb023e2f | |||
63aba5feb7 | |||
08449e174c | |||
971f9ae0f0 | |||
33535c514e | |||
b74d508c0b | |||
14daa93a19 | |||
761946c522 | |||
0c888486c9 | |||
e16c1b7c88 | |||
7926e4ab6d | |||
d1449c4ee7 | |||
81abb17b38 | |||
cbb9f8efe4 | |||
a0dafcb0f8 | |||
78697bb8cf | |||
64319ad90f | |||
88fdf45a24 | |||
b96c4910b4 | |||
fbf3f7cf1c | |||
aaa21c1619 | |||
1be4eaeae3 | |||
2ec1364925 | |||
69bf43ef56 | |||
4d20f63612 | |||
d25fb3ad56 | |||
d78e3c3b0d | |||
de41c9eaf7 | |||
5754f307eb | |||
63e30899f7 | |||
379260b110 | |||
d2629293e8 | |||
f5c39e29b4 | |||
6af9fe5e10 | |||
fa957a1a07 | |||
1d5e7b441b | |||
c12b211075 | |||
6c730def4b | |||
fc1ffe487a | |||
7d301c76d3 | |||
f032e28657 | |||
bb30051006 | |||
67b1dcae44 | |||
d00a040da9 | |||
86f12ffe61 | |||
df5dcdab64 | |||
e09d482fb7 | |||
b072d75300 | |||
d371a78a0b | |||
90863439d1 | |||
a94b3212a3 | |||
33ad2a36bd | |||
b907bf355f | |||
b5d43f1e20 | |||
2fd4a36c0d | |||
46eebc644c | |||
b14bdd865f | |||
96ee57ae5c | |||
d4cd171d6b | |||
30ccabde5b | |||
604aadc938 | |||
2b181bf69c | |||
cdb7b9d9b2 | |||
f438ffc6e4 | |||
54f8e3442b | |||
26c489a0f5 | |||
58f255c0c7 | |||
30f4cc1fef | |||
8a52085ae2 | |||
f152858d83 | |||
042f1dff01 | |||
439fe973c4 | |||
85fbacb197 | |||
73921f4bd7 | |||
be53ecbbaa | |||
374df9d69f | |||
9c84c01aef | |||
e9508b578a | |||
0bdc362e13 | |||
a86f34d0ad | |||
9fcc49e556 | |||
1433f4a520 | |||
e605d8007c | |||
6e638ab381 | |||
5afd74f0b9 | |||
191cd2c970 | |||
ce71ea0b5c | |||
1edd3e7c3b | |||
bdb09a9a00 | |||
a5dd93d154 | |||
74ba00a222 | |||
15a15c123e | |||
20b697f722 | |||
6fa05c12ff | |||
bb3d88ac29 | |||
a1a3ef4608 | |||
d15859dd86 | |||
5c81bcef1a | |||
8cba59040f | |||
735864c384 | |||
63cb01e83b | |||
ef38805ef9 | |||
82e6873702 | |||
df15fc24fe | |||
7ca62b7b35 | |||
8f4c182a0c | |||
acd2fe8c51 | |||
5c57d6a74d | |||
2ac63910f6 | |||
55689ddb50 | |||
3aab69110e | |||
b9e65e35b8 | |||
356e05177c | |||
e48b94965b | |||
2f731fa1ae | |||
e6be167797 | |||
bfe7133e7c | |||
5f8d8b4a4b | |||
b67b6f7fc5 | |||
560c2e63ac | |||
7f758d3e51 | |||
cc04b9a916 | |||
3005fe10e5 | |||
34b3a49cae | |||
8144926dc7 | |||
5f92fd20e9 | |||
6481bf272c | |||
15406a4247 | |||
9e667cc879 | |||
3e55addbdd | |||
e5ff72120a | |||
ffde939df3 | |||
43108de547 | |||
fa113172da | |||
7ead89844a | |||
74724dee80 | |||
db4b26c1ac | |||
8e7405bf49 | |||
77aee7e543 | |||
60041879f3 | |||
a6e455efc3 | |||
8e538c650e | |||
234ba197d7 | |||
5a34671343 | |||
9b139330f8 | |||
6564ed710d | |||
4954a762b7 | |||
8eece32a8d | |||
4b9f479e5c | |||
429c4332b1 | |||
01a00641f9 | |||
55bb501c71 | |||
c55b5c0a55 | |||
ca275f59da | |||
e752d8a964 | |||
fe1174bf16 | |||
1de57eb2b6 | |||
acd7c98c39 | |||
0e4729b203 | |||
6a0c88d516 | |||
dacf80f34a | |||
9ce61dc677 | |||
b912d4c1ea | |||
b150f9f5d8 | |||
9c435fee75 | |||
ce071f2498 | |||
057de06613 | |||
8695b57584 | |||
c9652bce00 | |||
bf86cd50a5 | |||
b4a1f0f003 | |||
8d8304cf91 | |||
92c1051143 | |||
9ebb61fc2d | |||
2254805a6d | |||
6cbd42974b | |||
8584aa79a2 | |||
a3bf2bff49 | |||
5e8754bd85 | |||
a45bd0301a | |||
45c17d9664 | |||
39cdf56214 | |||
9e9fe83bfd | |||
e735d0c561 | |||
39e51f1953 | |||
43a3983d36 | |||
a8b4e81408 | |||
6c13c67528 | |||
2a484a3e7e | |||
a78cd6e231 | |||
a528c043fe | |||
47f9fd3644 | |||
fe9f732c5f | |||
a5d02a0737 | |||
7a945848de | |||
a92949b5c3 | |||
d5ae979094 | |||
388e84e7ef | |||
a5af77dd72 | |||
a2dd948e71 | |||
fe60fb8679 | |||
250071939b | |||
0ea973b78b | |||
a2a346e39c | |||
6dc7ff2335 | |||
1acc2bfd96 | |||
10d65b611f | |||
edb61fc1d5 | |||
155de9f6fc | |||
b82e279f9d | |||
6bbe5b6255 | |||
1019acb7a3 | |||
83b1ec83c9 | |||
d9a00a876b | |||
e4625acf24 | |||
7fb48b9a2f | |||
5fcbefb7b4 | |||
345cdef113 | |||
a7c1b363eb | |||
d45e9671d4 | |||
517dc6d39e | |||
e590d3587c | |||
bdaa32666a | |||
9804cd82f8 | |||
59b85e549c | |||
dae4a9b091 | |||
fb10e1dfc5 | |||
4ca47258a0 | |||
b37662c7e1 | |||
3076378373 | |||
4c4c1f6147 | |||
35c8485442 | |||
3268ecd116 | |||
44493dac51 | |||
6f9b9914cf | |||
ffb9ab9eef | |||
5fe0ca418d | |||
ecc820a8c1 | |||
6047b04208 | |||
8d8b011702 | |||
07c9f681c7 | |||
c422c6cc3d | |||
e251f3a0b4 | |||
77ca73f414 | |||
48c75831fc | |||
4b8a259916 | |||
5733a13409 | |||
f9049c4c6c | |||
66b5931438 | |||
503052b669 | |||
7d6a32c5f8 | |||
a1b7261121 | |||
5a4b6f0f7f | |||
78b227d1ef | |||
07598c9620 | |||
7413ef2824 | |||
0cc735a7b2 | |||
d00038eb4b | |||
47af701380 | |||
24b4ac692e | |||
fb72da0e82 | |||
d339902dc6 | |||
393f424f1c | |||
c8f54476c9 | |||
d42c2b2dbc | |||
ed64a44b82 | |||
1855dfb656 | |||
91c01bf6b3 | |||
29256b161c | |||
32f098d91d | |||
06996d8c7f | |||
2306ef3063 | |||
f82a1d8e4e | |||
f0e0ab35fc | |||
3b20d6890c | |||
6eb00f6c60 | |||
4ecec59224 | |||
bf3bb66c3e | |||
6b3236715b | |||
cbedc8403f | |||
1d68c48a92 | |||
fff4de5c44 | |||
45d33e70db | |||
9b35d59023 | |||
71611dec4f | |||
a122e55129 | |||
6cedc05384 | |||
8efbb48cb0 | |||
0bfa769b7d | |||
5afc49250f | |||
efb81a1277 | |||
4a8124ad1e | |||
8ddebcb932 | |||
b2d7427d2d | |||
b4400c4896 | |||
5a8d4c628f | |||
ebdb7ac2d8 | |||
a33a7960bb | |||
3603610026 | |||
017151dff1 | |||
e892aad3f6 | |||
ad90b6e5f3 | |||
4da7bbbb59 | |||
b9808c8598 | |||
36036d9308 | |||
99c0a2575f | |||
487789b45b | |||
b3d6896977 | |||
8ee52b6ee1 | |||
46dba8853a | |||
eb4d19fb9c | |||
c3678764b4 | |||
9bb2c8faf5 | |||
74dcac3b0d | |||
6d51e34d0a | |||
de76c7a57d | |||
0e23400510 | |||
d0a83fec69 | |||
57510f2fd2 | |||
58b96fdede | |||
9e3d6c3bfd | |||
4a955d7e76 | |||
637283ffad | |||
5afbfb5c2c | |||
d128c0e02b | |||
60e6ea5abd | |||
415607706f | |||
f2b977b9c5 | |||
35e8420780 | |||
1c5846e1fb | |||
5ec6edb9c5 | |||
5d8bedfbe4 | |||
0477493734 | |||
e6b196c141 | |||
49960beb35 | |||
1b677f167e | |||
d881481758 | |||
a3ea0c304a | |||
4fda6d7eaa | |||
771e24913d | |||
aded2c1937 | |||
e54b867e8e | |||
c12b4b4af7 | |||
87ddba0193 | |||
a29b61bd4f | |||
7c6ea81dd4 | |||
d06ebb1686 | |||
d93953a56f | |||
74283c3ebc | |||
8a030f3bfc | |||
5518ffd248 | |||
bcdb9bf5b4 | |||
3509bde1a9 | |||
add20873d0 | |||
427db0d101 | |||
7bac0b417f | |||
56efbd7de9 | |||
91282d4404 | |||
398976e43e | |||
f723bc6989 | |||
22142bd4ae | |||
caf1432dc7 | |||
65c90d5b45 | |||
50ca77437d | |||
71b4949843 | |||
54a18991ab | |||
1fcb98289a | |||
01e5ba01f6 | |||
1134c2f16c | |||
d18cf19a3f | |||
2ec2028637 | |||
b84a01cb1d | |||
ca4d8008d4 | |||
a256f6d0d1 | |||
0aa6954f33 | |||
68d98fcf24 | |||
87086262f3 | |||
3fab427383 | |||
61fa826159 | |||
0b9fc4ff3a | |||
1817d5e01e | |||
0ca0f8ec17 | |||
6be5631477 | |||
678e942bd8 | |||
83ddf0ebe2 | |||
eaea00366b | |||
0788fe5e72 | |||
b2257a5ca3 | |||
3bf5999ef4 | |||
8a85299575 | |||
3db0aed9f7 | |||
09276db2a5 | |||
0e496f900d | |||
1ed645c6c2 | |||
bc6948dc89 | |||
e9c17daecd | |||
53beba7acc | |||
ed0fce9aa6 | |||
995603b08c | |||
a49e5b30ff | |||
97e7d550c8 | |||
bc54930bc6 | |||
0d6e43097d | |||
393717dbb4 | |||
da8cb14f8b | |||
9f01cf333c | |||
d391e912ff | |||
8b185a4008 | |||
90b65018b6 | |||
7ec5f2f2eb | |||
332f1192a6 | |||
944cad35bf | |||
86ae27b0c1 | |||
d9a888528a | |||
05f1b41275 | |||
5b03bca138 | |||
d409171ba8 | |||
0567407f85 | |||
6872d2ac2a | |||
ad4450f9e8 | |||
d8478ca690 | |||
aab31833a2 | |||
c0648a83be | |||
744a28b31d | |||
b4b68afa17 | |||
dd22647fcd | |||
f66136bc86 | |||
8cf9bc9993 | |||
d0aa69bfcb | |||
4a1d12462f | |||
8d5fbc6fcb | |||
85bfdba1e2 | |||
546c753d1e | |||
2c3aade057 | |||
be52f7fb07 | |||
ec5396a352 | |||
403bf1a734 | |||
05ff7a9925 | |||
5c2a767987 | |||
35798ce0cc | |||
47d6a66fbf | |||
616f065324 | |||
1e39a1a7a3 | |||
66ad83c15c | |||
c48e9cdf5b | |||
6a274b860a | |||
2f8a52d256 | |||
0f4a073eaf | |||
626410b2aa | |||
a193b85123 | |||
0f40c44ed2 | |||
cd6f86052d | |||
b9858ea8f8 | |||
cb1eefd24a | |||
a1840e9d20 | |||
b01cbf82c3 | |||
e89c796b41 | |||
758351c732 | |||
77d33766f1 | |||
b0be6c3013 | |||
93e5d8edc9 | |||
9c6bfc0be9 | |||
77e73cef66 | |||
7d963776a0 | |||
ecc153cbef | |||
d1309a36b2 | |||
1d3f6105f5 | |||
e36a2947b9 | |||
64f50a179e | |||
f9cf1d943c | |||
10a42de64f | |||
c66bd5e809 | |||
3f224db990 | |||
491a9c019c | |||
77e9f8d7df | |||
14bf0b000e | |||
0ca49091c0 | |||
bb8949f2b2 | |||
ef7fbf4bf9 | |||
eb2e2e6370 | |||
a8eef9af33 | |||
400a9d3b1e | |||
7095d8994e | |||
2d41613039 | |||
1552eb921a | |||
0ac3f7a1c8 | |||
bddb63ccb5 | |||
7625aed200 | |||
19beafa865 | |||
8543b0789d | |||
bdaa01165e | |||
b2a557d4ed | |||
0903a891e4 | |||
1b2916988e | |||
d74a260883 | |||
31d9c0889c | |||
222c0f11c3 | |||
106ca65c58 | |||
4c97b3dd28 | |||
e672689a76 | |||
2579a827fc | |||
8c487edf62 | |||
0b97f52a8b | |||
21b84a6d65 | |||
d3be5ec750 | |||
4b16406050 | |||
b0ce602e4b | |||
aa876ce24f | |||
71fdf717a8 | |||
4de0347fdc | |||
f34ac9be62 | |||
12652f897a | |||
24ee381fea | |||
494a07f6f3 | |||
61455b457d | |||
57ce6a7c66 | |||
0bd4d27e8d | |||
1701303279 | |||
fd09609b44 | |||
c7583ecdb7 | |||
4eec4a27c7 | |||
86faf753bd | |||
79d0735864 | |||
808e523adc | |||
a52386e837 | |||
c26d91fb61 | |||
0a5f8f05da | |||
a13946e3ef | |||
2e01bf9cba | |||
7e82f8d9b5 | |||
2bef85a913 | |||
e435196956 | |||
af1ab39851 | |||
baddc86d9d | |||
de6bab59bf | |||
0ff1cb1ea6 | |||
3e9bb4028a | |||
878e08cfa4 | |||
4e78f3649b | |||
ccd72fa64a | |||
03e688ea7b | |||
7e949595bd | |||
d31a51e3bc | |||
0df847da15 | |||
6af59cb0ea | |||
2ad0fcb377 | |||
f34034ae58 | |||
0e2167884d | |||
e445c41454 | |||
454d1a995c | |||
62575c9a4f | |||
4898750fc1 | |||
48b4471382 | |||
f7b8f97873 | |||
4ae1b1cc26 | |||
b7a34498e3 | |||
10fd3115c2 | |||
df60793e3b | |||
a4952bc029 | |||
f6ca62384e | |||
d6141881f2 | |||
f93033c20b | |||
33fb17776a | |||
b864a455f2 | |||
b9c78a05aa | |||
26c36e932e | |||
bd096430cb | |||
6148314dcd | |||
a7b5bd18ba | |||
e01eb42e74 | |||
c1d76bfac7 | |||
12483fac92 | |||
1a62d87a42 | |||
2ccbefe01e | |||
438062d7fc | |||
0a1af85200 | |||
5bf077d64f | |||
dec0a2517f | |||
324d625324 | |||
e22b70acff | |||
a5c604c283 | |||
644164fab3 | |||
592e677caf | |||
cc7bdebc1c | |||
27d798270b | |||
50f1e33965 | |||
ffc3727a1e | |||
b093d5d10d | |||
9e589a9d93 | |||
f8d2bff283 | |||
0a2e711351 | |||
ba5258d716 | |||
c358400351 | |||
a3f817d71b | |||
c6e2607868 | |||
a29da8c95b | |||
a09aaf3495 | |||
ffc8e752a5 | |||
6ca07b87b9 | |||
49e45915f0 | |||
96e3a3de68 | |||
2aa5c2c41f | |||
4b3e3a37a3 | |||
2492165fcb | |||
c602b5a1e8 | |||
9bbb9711e4 | |||
680405e527 | |||
44595b44c5 | |||
b27c7702f9 | |||
378a3ae05f | |||
836a56b347 | |||
b36ac8f2f8 | |||
b1e7bb899a | |||
7c285750c7 | |||
e93a8b1d32 | |||
42f0b55de0 | |||
253b223e65 | |||
85bfdca578 | |||
4dd9d0d46b | |||
fd1ac5106d | |||
b572b4ecbd | |||
585e104608 | |||
d0aefa99eb | |||
728e95c52b | |||
83087e0f9d | |||
3fb1e37473 | |||
9890966fa4 | |||
aba0fb0000 | |||
0e86ba4b63 | |||
fc23c6721a | |||
f4a129a792 | |||
8deecc0137 | |||
c7966e81c2 | |||
2659c359e9 | |||
e389e51b2b | |||
0ab6b66d8f | |||
8608d8d873 | |||
d34a2c353f | |||
150b0b6b86 | |||
d0e0701a88 | |||
28b20c5ec3 | |||
9088ef182e | |||
c4d1aa452d | |||
66e52b7cfc | |||
e761954cf0 | |||
58829e3560 | |||
62652cf8c1 | |||
4482862a40 | |||
d80ba00590 | |||
81dd4a8450 | |||
101ed629a4 | |||
95ec2fcce7 | |||
73bc3389e5 | |||
bc38a6a795 | |||
e89866bedb | |||
ca09dbbbee | |||
fa4531fd17 | |||
d17c970f8c | |||
6ca62ef131 | |||
8e84e33638 | |||
b9be416937 | |||
0a8c9b22b0 | |||
527c44ed84 | |||
8ee015a847 | |||
e8cabd16d5 | |||
88e07b5ea4 | |||
f3ee8b50e3 | |||
68ad854b0d | |||
789b2e603a | |||
66398fbf77 | |||
daeb3e5187 | |||
1fd1a3a456 | |||
30ac2d220c | |||
ade7bde813 | |||
cba3e100a0 | |||
664d8d3573 | |||
d5ce509e3a | |||
2f10d19c98 | |||
4c787af26d | |||
8136170431 | |||
007916c2c1 | |||
208ffdc1da | |||
4468dc835c | |||
90a2352337 | |||
0a9d14fcb3 | |||
7863fb1087 | |||
072d2a919d | |||
ccbdc9f6d8 | |||
0f5ea16605 | |||
1cd70d7505 | |||
b0775b3f1e | |||
2894668b3e | |||
1096e653b0 | |||
9777d755d5 | |||
58529aa0b2 | |||
64b6c02a22 | |||
0780300fb3 | |||
b9106b633b | |||
23dfaa2933 | |||
cfd2cc4970 | |||
710349768f | |||
f4bd78b86d | |||
ddb7e4e179 | |||
00601f1835 | |||
c31225fdcf | |||
16b99ed0ba | |||
3b6d340603 | |||
99aea0c71c | |||
023e244958 | |||
659d890ecf | |||
8e9ed14b89 | |||
f4bf7316fe | |||
0527f9bf0d | |||
055edd886d | |||
5e70d4121a | |||
f9b5d8bc5e | |||
2917c045fb | |||
6e6ef862c5 | |||
a7fdca05c6 | |||
ddc33dc74a | |||
a562f492e3 | |||
58f0d0b945 | |||
67d1249b2b | |||
66e5e42fb1 | |||
1f01b6438f | |||
bea7ec33c1 | |||
b5561f35b9 | |||
b796cda060 | |||
4c308b7f2f | |||
d50eb9b41b | |||
9168301369 | |||
e8d930f659 | |||
aef88aa03e | |||
ec4370069a | |||
c79ece2b21 | |||
99076af18b | |||
a0e3ad2b70 | |||
e89e734ca2 | |||
9945241b77 | |||
215ed141e7 | |||
f189ee67a1 | |||
babc7d3baf | |||
8f4807020f | |||
31e1410191 | |||
24d7227e27 | |||
c130ca1bc6 | |||
4db960c0a6 | |||
d13ce2aec9 | |||
5e957ecda6 |
2
.gitattributes
vendored
Normal file
2
.gitattributes
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
# Example of a `.gitattributes` file which reclassifies `.nu` files as Nushell:
|
||||||
|
*.nu linguist-language=Nushell
|
5
.githooks/pre-commit
Executable file
5
.githooks/pre-commit
Executable file
@ -0,0 +1,5 @@
|
|||||||
|
#!/usr/bin/env nu
|
||||||
|
|
||||||
|
use ../toolkit.nu fmt
|
||||||
|
|
||||||
|
fmt --check --verbose
|
6
.githooks/pre-push
Executable file
6
.githooks/pre-push
Executable file
@ -0,0 +1,6 @@
|
|||||||
|
#!/usr/bin/env nu
|
||||||
|
|
||||||
|
use ../toolkit.nu [fmt, clippy]
|
||||||
|
|
||||||
|
fmt --check --verbose
|
||||||
|
clippy --verbose
|
1
.github/AUTO_ISSUE_TEMPLATE/README.md
vendored
Normal file
1
.github/AUTO_ISSUE_TEMPLATE/README.md
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
This directory is intended for templates to automatically create issues with the [create-an-issue](https://github.com/JasonEtco/create-an-issue) action.
|
16
.github/AUTO_ISSUE_TEMPLATE/nightly-build-fail.md
vendored
Normal file
16
.github/AUTO_ISSUE_TEMPLATE/nightly-build-fail.md
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
---
|
||||||
|
name: Nightly build of release binaries failed
|
||||||
|
about: Used to submit issues related to binaries release workflow
|
||||||
|
title: 'Attention: Nightly build of release binaries failed'
|
||||||
|
labels: ['build-package', 'priority']
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Nightly build of release binaries failed**
|
||||||
|
|
||||||
|
Hi there:
|
||||||
|
|
||||||
|
If you see me here that means there is a release failure for the nightly build
|
||||||
|
|
||||||
|
Please **click the status badge** to see more details: [](https://github.com/nushell/nushell/actions/workflows/nightly-build.yml)
|
1
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
1
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@ -1,5 +1,6 @@
|
|||||||
name: Bug Report
|
name: Bug Report
|
||||||
description: Create a report to help us improve
|
description: Create a report to help us improve
|
||||||
|
labels: ["needs-triage"]
|
||||||
body:
|
body:
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: description
|
id: description
|
||||||
|
2
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
2
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@ -1,6 +1,6 @@
|
|||||||
name: Feature Request
|
name: Feature Request
|
||||||
description: "When you want a new feature for something that doesn't already exist"
|
description: "When you want a new feature for something that doesn't already exist"
|
||||||
labels: "enhancement"
|
labels: ["needs-triage", "enhancement"]
|
||||||
body:
|
body:
|
||||||
- type: textarea
|
- type: textarea
|
||||||
id: problem
|
id: problem
|
||||||
|
11
.github/ISSUE_TEMPLATE/standard-library-bug-or-feature-report.md
vendored
Normal file
11
.github/ISSUE_TEMPLATE/standard-library-bug-or-feature-report.md
vendored
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
---
|
||||||
|
name: standard library bug or feature report
|
||||||
|
about: Used to submit issues related to the nu standard library
|
||||||
|
title: ''
|
||||||
|
labels: ['needs-triage', 'std-library']
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
**Describe the bug or feature**
|
||||||
|
A clear and concise description of what the bug is.
|
34
.github/pull_request_template.md
vendored
34
.github/pull_request_template.md
vendored
@ -1,24 +1,40 @@
|
|||||||
|
<!--
|
||||||
|
if this PR closes one or more issues, you can automatically link the PR with
|
||||||
|
them by using one of the [*linking keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword), e.g.
|
||||||
|
- this PR should close #xxxx
|
||||||
|
- fixes #xxxx
|
||||||
|
|
||||||
|
you can also mention related issues, PRs or discussions!
|
||||||
|
-->
|
||||||
|
|
||||||
# Description
|
# Description
|
||||||
|
<!--
|
||||||
|
Thank you for improving Nushell. Please, check our [contributing guide](../CONTRIBUTING.md) and talk to the core team before making major changes.
|
||||||
|
|
||||||
_(Thank you for improving Nushell. Please, check our [contributing guide](../CONTRIBUTING.md) and talk to the core team before making major changes.)_
|
Description of your pull request goes here. **Provide examples and/or screenshots** if your changes affect the user experience.
|
||||||
|
-->
|
||||||
_(Description of your pull request goes here. **Provide examples and/or screenshots** if your changes affect the user experience.)_
|
|
||||||
|
|
||||||
# User-Facing Changes
|
# User-Facing Changes
|
||||||
|
<!-- List of all changes that impact the user experience here. This helps us keep track of breaking changes. -->
|
||||||
_(List of all changes that impact the user experience here. This helps us keep track of breaking changes.)_
|
|
||||||
|
|
||||||
# Tests + Formatting
|
# Tests + Formatting
|
||||||
|
<!--
|
||||||
Don't forget to add tests that cover your changes.
|
Don't forget to add tests that cover your changes.
|
||||||
|
|
||||||
Make sure you've run and fixed any issues with these commands:
|
Make sure you've run and fixed any issues with these commands:
|
||||||
|
|
||||||
- `cargo fmt --all -- --check` to check standard code formatting (`cargo fmt --all` applies these changes)
|
- `cargo fmt --all -- --check` to check standard code formatting (`cargo fmt --all` applies these changes)
|
||||||
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect` to check that you're using the standard code style
|
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to check that you're using the standard code style
|
||||||
- `cargo test --workspace` to check that all tests pass
|
- `cargo test --workspace` to check that all tests pass
|
||||||
|
- `cargo run -- -c "use std testing; testing run-tests --path crates/nu-std"` to run the tests for the standard library
|
||||||
|
|
||||||
|
> **Note**
|
||||||
|
> from `nushell` you can also use the `toolkit` as follows
|
||||||
|
> ```bash
|
||||||
|
> use toolkit.nu # or use an `env_change` hook to activate it automatically
|
||||||
|
> toolkit check pr
|
||||||
|
> ```
|
||||||
|
-->
|
||||||
|
|
||||||
# After Submitting
|
# After Submitting
|
||||||
|
<!-- If your PR had any user-facing changes, update [the documentation](https://github.com/nushell/nushell.github.io) after the PR is merged, if necessary. This will help us keep the docs up to date. -->
|
||||||
If your PR had any user-facing changes, update [the documentation](https://github.com/nushell/nushell.github.io) after the PR is merged, if necessary. This will help us keep the docs up to date.
|
|
||||||
|
100
.github/workflows/ci.yml
vendored
100
.github/workflows/ci.yml
vendored
@ -1,13 +1,18 @@
|
|||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
push: # Run CI on the main branch after every merge. This is important to fill the GitHub Actions cache in a way that pull requests can see it
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
|
|
||||||
name: continuous-integration
|
name: continuous-integration
|
||||||
|
|
||||||
|
env:
|
||||||
|
NUSHELL_CARGO_TARGET: ci
|
||||||
|
NU_LOG_LEVEL: DEBUG
|
||||||
|
CLIPPY_OPTIONS: "-D warnings -D clippy::unwrap_used"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
nu-fmt-clippy:
|
fmt-clippy:
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
@ -15,61 +20,58 @@ jobs:
|
|||||||
# builds to link against a too-new-for-many-Linux-installs glibc version. Consider
|
# builds to link against a too-new-for-many-Linux-installs glibc version. Consider
|
||||||
# revisiting this when 20.04 is closer to EOL (April 2025)
|
# revisiting this when 20.04 is closer to EOL (April 2025)
|
||||||
platform: [windows-latest, macos-latest, ubuntu-20.04]
|
platform: [windows-latest, macos-latest, ubuntu-20.04]
|
||||||
style: [default, dataframe]
|
feature: [default, dataframe, extra]
|
||||||
rust:
|
|
||||||
- stable
|
|
||||||
include:
|
include:
|
||||||
- style: default
|
- feature: default
|
||||||
flags: ""
|
flags: ""
|
||||||
- style: dataframe
|
- feature: dataframe
|
||||||
flags: "--features=dataframe "
|
flags: "--features=dataframe"
|
||||||
|
- feature: extra
|
||||||
|
flags: "--features=extra"
|
||||||
exclude:
|
exclude:
|
||||||
# only test dataframes on Ubuntu (the fastest platform)
|
|
||||||
- platform: windows-latest
|
- platform: windows-latest
|
||||||
style: dataframe
|
feature: dataframe
|
||||||
- platform: macos-latest
|
- platform: macos-latest
|
||||||
style: dataframe
|
feature: dataframe
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
env:
|
|
||||||
NUSHELL_CARGO_TARGET: ci
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.3.5
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
|
with:
|
||||||
|
rustflags: ""
|
||||||
|
|
||||||
- name: cargo fmt
|
- name: cargo fmt
|
||||||
run: cargo fmt --all -- --check
|
run: cargo fmt --all -- --check
|
||||||
|
|
||||||
- name: Clippy
|
- name: Clippy
|
||||||
run: cargo clippy --workspace ${{ matrix.flags }}--exclude nu_plugin_* -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect
|
run: cargo clippy --workspace ${{ matrix.flags }} --exclude nu_plugin_* -- $CLIPPY_OPTIONS
|
||||||
|
|
||||||
nu-tests:
|
|
||||||
env:
|
|
||||||
NUSHELL_CARGO_TARGET: ci
|
|
||||||
|
|
||||||
|
tests:
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
platform: [windows-latest, macos-latest, ubuntu-20.04]
|
platform: [windows-latest, macos-latest, ubuntu-20.04]
|
||||||
style: [default, dataframe]
|
feature: [default, dataframe, extra]
|
||||||
rust:
|
|
||||||
- stable
|
|
||||||
include:
|
include:
|
||||||
- style: default
|
- feature: default
|
||||||
flags: ""
|
flags: ""
|
||||||
- style: dataframe
|
- feature: dataframe
|
||||||
flags: "--features=dataframe"
|
flags: "--features=dataframe"
|
||||||
|
- feature: extra
|
||||||
|
flags: "--features=extra"
|
||||||
exclude:
|
exclude:
|
||||||
# only test dataframes on Ubuntu (the fastest platform)
|
|
||||||
- platform: windows-latest
|
- platform: windows-latest
|
||||||
style: dataframe
|
feature: dataframe
|
||||||
- platform: macos-latest
|
- platform: macos-latest
|
||||||
style: dataframe
|
feature: dataframe
|
||||||
|
- platform: windows-latest
|
||||||
|
feature: extra
|
||||||
|
- platform: macos-latest
|
||||||
|
feature: extra
|
||||||
|
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
|
|
||||||
@ -77,21 +79,18 @@ jobs:
|
|||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.3.5
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
|
with:
|
||||||
|
rustflags: ""
|
||||||
|
|
||||||
- name: Tests
|
- name: Tests
|
||||||
run: cargo test --workspace --profile ci --exclude nu_plugin_* ${{ matrix.flags }}
|
run: cargo test --workspace --profile ci --exclude nu_plugin_* ${{ matrix.flags }}
|
||||||
|
|
||||||
python-virtualenv:
|
std-lib-and-python-virtualenv:
|
||||||
env:
|
|
||||||
NUSHELL_CARGO_TARGET: ci
|
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
platform: [ubuntu-20.04, macos-latest, windows-latest]
|
platform: [ubuntu-20.04, macos-latest, windows-latest]
|
||||||
rust:
|
|
||||||
- stable
|
|
||||||
py:
|
py:
|
||||||
- py
|
- py
|
||||||
|
|
||||||
@ -101,39 +100,34 @@ jobs:
|
|||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.3.5
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
|
with:
|
||||||
|
rustflags: ""
|
||||||
|
|
||||||
- name: Install Nushell
|
- name: Install Nushell
|
||||||
run: cargo install --locked --path=. --profile ci --no-default-features
|
run: cargo install --path . --locked --no-default-features
|
||||||
|
|
||||||
|
- name: Standard library tests
|
||||||
|
run: nu -c 'use std testing; testing run-tests --path crates/nu-std'
|
||||||
|
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: "3.10"
|
python-version: "3.10"
|
||||||
|
|
||||||
- run: python -m pip install tox
|
|
||||||
|
|
||||||
# Get only the latest tagged version for stability reasons
|
|
||||||
- name: Install virtualenv
|
- name: Install virtualenv
|
||||||
run: git clone https://github.com/pypa/virtualenv.git && cd virtualenv && git checkout $(git describe --tags | cut -d - -f 1)
|
run: pip install virtualenv
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
- name: Test Nushell in virtualenv
|
- name: Test Nushell in virtualenv
|
||||||
run: cd virtualenv && tox -e ${{ matrix.py }} -- -k nushell
|
run: nu scripts/test_virtualenv.nu
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
# Build+test plugins on their own, without the rest of Nu. This helps with CI parallelization and
|
|
||||||
# also helps test that the plugins build without any feature unification shenanigans
|
|
||||||
plugins:
|
plugins:
|
||||||
env:
|
|
||||||
NUSHELL_CARGO_TARGET: ci
|
|
||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
platform: [windows-latest, macos-latest, ubuntu-20.04]
|
platform: [windows-latest, macos-latest, ubuntu-20.04]
|
||||||
rust:
|
|
||||||
- stable
|
|
||||||
|
|
||||||
runs-on: ${{ matrix.platform }}
|
runs-on: ${{ matrix.platform }}
|
||||||
|
|
||||||
@ -141,10 +135,12 @@ jobs:
|
|||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.3.5
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
|
with:
|
||||||
|
rustflags: ""
|
||||||
|
|
||||||
- name: Clippy
|
- name: Clippy
|
||||||
run: cargo clippy --package nu_plugin_* ${{ matrix.flags }} -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect
|
run: cargo clippy --package nu_plugin_* ${{ matrix.flags }} -- $CLIPPY_OPTIONS
|
||||||
|
|
||||||
- name: Tests
|
- name: Tests
|
||||||
run: cargo test --profile ci --package nu_plugin_*
|
run: cargo test --profile ci --package nu_plugin_*
|
||||||
|
41
.github/workflows/manual.yml
vendored
41
.github/workflows/manual.yml
vendored
@ -1,41 +0,0 @@
|
|||||||
# This is a basic workflow that is manually triggered
|
|
||||||
# Don't run it unless you know what you are doing
|
|
||||||
|
|
||||||
name: Manual Workflow for Winget Submission
|
|
||||||
|
|
||||||
# Controls when the action will run. Workflow runs when manually triggered using the UI
|
|
||||||
# or API.
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
# Inputs the workflow accepts.
|
|
||||||
inputs:
|
|
||||||
ver:
|
|
||||||
# Friendly description to be shown in the UI instead of 'ver'
|
|
||||||
description: 'The nushell version to release'
|
|
||||||
# Default value if no value is explicitly provided
|
|
||||||
default: '0.66.0'
|
|
||||||
# Input has to be provided for the workflow to run
|
|
||||||
required: true
|
|
||||||
uri:
|
|
||||||
# Friendly description to be shown in the UI instead of 'uri'
|
|
||||||
description: 'The nushell windows .msi package URI to publish'
|
|
||||||
# Default value if no value is explicitly provided
|
|
||||||
default: 'https://github.com/nushell/nushell/releases/download/0.66.0/nu-0.66.0-x86_64-pc-windows-msvc.msi'
|
|
||||||
# Input has to be provided for the workflow to run
|
|
||||||
required: true
|
|
||||||
|
|
||||||
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
|
|
||||||
jobs:
|
|
||||||
# This workflow contains a single job
|
|
||||||
rls-winget-pkg:
|
|
||||||
name: Publish winget package manually
|
|
||||||
# The type of runner that the job will run on
|
|
||||||
runs-on: windows-latest
|
|
||||||
|
|
||||||
# Steps represent a sequence of tasks that will be executed as part of the job
|
|
||||||
steps:
|
|
||||||
# Runs commands using the runners shell
|
|
||||||
- name: Submit package to Windows Package Manager Community Repository Manually
|
|
||||||
run: |
|
|
||||||
iwr https://github.com/microsoft/winget-create/releases/download/v1.0.4.0/wingetcreate.exe -OutFile wingetcreate.exe
|
|
||||||
.\wingetcreate.exe update Nushell.Nushell -s -v ${{ github.event.inputs.ver }} -u ${{ github.event.inputs.uri }} -t ${{ secrets.NUSHELL_PAT }}
|
|
230
.github/workflows/nightly-build.yml
vendored
Normal file
230
.github/workflows/nightly-build.yml
vendored
Normal file
@ -0,0 +1,230 @@
|
|||||||
|
#
|
||||||
|
# REF:
|
||||||
|
# 1. https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstrategymatrixinclude
|
||||||
|
# 2. https://github.com/JasonEtco/create-an-issue
|
||||||
|
# 3. https://docs.github.com/en/actions/learn-github-actions/variables
|
||||||
|
# 4. https://github.com/actions/github-script
|
||||||
|
#
|
||||||
|
name: Nightly Build
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- nightly # Just for test purpose only with the nightly repo
|
||||||
|
# This schedule will run only from the default branch
|
||||||
|
schedule:
|
||||||
|
- cron: '15 1 * * *' # run at 01:15 AM UTC
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
prepare:
|
||||||
|
name: Prepare
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
# This job is required by the release job, so we should make it run both from Nushell repo and nightly repo
|
||||||
|
# if: github.repository == 'nushell/nightly'
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
if: github.repository == 'nushell/nightly'
|
||||||
|
with:
|
||||||
|
ref: main
|
||||||
|
fetch-depth: 0
|
||||||
|
# Configure PAT here: https://github.com/settings/tokens for the push operation in the following steps
|
||||||
|
token: ${{ secrets.WORKFLOW_TOKEN }}
|
||||||
|
|
||||||
|
- name: Setup Nushell
|
||||||
|
uses: hustcer/setup-nu@v3
|
||||||
|
if: github.repository == 'nushell/nightly'
|
||||||
|
with:
|
||||||
|
version: 0.81.0
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
# Synchronize the main branch of nightly repo with the main branch of Nushell official repo
|
||||||
|
- name: Prepare for Nightly Release
|
||||||
|
shell: nu {0}
|
||||||
|
if: github.repository == 'nushell/nightly'
|
||||||
|
run: |
|
||||||
|
cd $env.GITHUB_WORKSPACE
|
||||||
|
git checkout main
|
||||||
|
# We can't push if no user name and email are configured
|
||||||
|
git config user.name 'hustcer'
|
||||||
|
git config user.email 'hustcer@outlook.com'
|
||||||
|
git fetch origin main
|
||||||
|
git remote add src https://github.com/nushell/nushell.git
|
||||||
|
git fetch src main
|
||||||
|
# git pull --rebase src main
|
||||||
|
# All the changes will be overwritten by the upstream main branch
|
||||||
|
git reset --hard src/main
|
||||||
|
git push origin main -f
|
||||||
|
let sha_short = (git rev-parse --short src/main | str trim | str substring 0..7)
|
||||||
|
let tag_name = $'nightly-($sha_short)'
|
||||||
|
if (git ls-remote --tags origin $tag_name | is-empty) {
|
||||||
|
git tag -a $tag_name -m $'Nightly build from ($sha_short)'
|
||||||
|
git push origin --tags
|
||||||
|
}
|
||||||
|
|
||||||
|
release:
|
||||||
|
name: Release
|
||||||
|
needs: prepare
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
target:
|
||||||
|
- aarch64-apple-darwin
|
||||||
|
- x86_64-apple-darwin
|
||||||
|
- x86_64-pc-windows-msvc
|
||||||
|
- aarch64-pc-windows-msvc
|
||||||
|
- x86_64-unknown-linux-gnu
|
||||||
|
- x86_64-unknown-linux-musl
|
||||||
|
- aarch64-unknown-linux-gnu
|
||||||
|
- armv7-unknown-linux-gnueabihf
|
||||||
|
- riscv64gc-unknown-linux-gnu
|
||||||
|
extra: ['bin']
|
||||||
|
include:
|
||||||
|
- target: aarch64-apple-darwin
|
||||||
|
os: macos-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-apple-darwin
|
||||||
|
os: macos-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-pc-windows-msvc
|
||||||
|
extra: 'bin'
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-pc-windows-msvc
|
||||||
|
extra: msi
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: aarch64-pc-windows-msvc
|
||||||
|
extra: 'bin'
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: aarch64-pc-windows-msvc
|
||||||
|
extra: msi
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-unknown-linux-gnu
|
||||||
|
os: ubuntu-20.04
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: x86_64-unknown-linux-musl
|
||||||
|
os: ubuntu-20.04
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: aarch64-unknown-linux-gnu
|
||||||
|
os: ubuntu-20.04
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: armv7-unknown-linux-gnueabihf
|
||||||
|
os: ubuntu-20.04
|
||||||
|
target_rustflags: '--exclude=nu-cmd-dataframe'
|
||||||
|
- target: riscv64gc-unknown-linux-gnu
|
||||||
|
os: ubuntu-20.04
|
||||||
|
target_rustflags: '--exclude=nu-cmd-dataframe'
|
||||||
|
|
||||||
|
runs-on: ${{matrix.os}}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: main
|
||||||
|
|
||||||
|
- name: Update Rust Toolchain Target
|
||||||
|
run: |
|
||||||
|
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
||||||
|
|
||||||
|
- name: Setup Rust toolchain and cache
|
||||||
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
|
with:
|
||||||
|
rustflags: ''
|
||||||
|
|
||||||
|
- name: Setup Nushell
|
||||||
|
uses: hustcer/setup-nu@v3
|
||||||
|
with:
|
||||||
|
version: 0.81.0
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Release Nu Binary
|
||||||
|
id: nu
|
||||||
|
run: nu .github/workflows/release-pkg.nu
|
||||||
|
env:
|
||||||
|
OS: ${{ matrix.os }}
|
||||||
|
REF: ${{ github.ref }}
|
||||||
|
TARGET: ${{ matrix.target }}
|
||||||
|
_EXTRA_: ${{ matrix.extra }}
|
||||||
|
TARGET_RUSTFLAGS: ${{ matrix.target_rustflags }}
|
||||||
|
|
||||||
|
- name: Create an Issue for Release Failure
|
||||||
|
if: ${{ failure() }}
|
||||||
|
uses: JasonEtco/create-an-issue@v2.9.1
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
with:
|
||||||
|
update_existing: true
|
||||||
|
search_existing: open
|
||||||
|
filename: .github/AUTO_ISSUE_TEMPLATE/nightly-build-fail.md
|
||||||
|
|
||||||
|
- name: Set Outputs of Short SHA
|
||||||
|
id: vars
|
||||||
|
run: |
|
||||||
|
echo "date=$(date -u +'%Y-%m-%d')" >> $GITHUB_OUTPUT
|
||||||
|
sha_short=$(git rev-parse --short HEAD)
|
||||||
|
echo "sha_short=${sha_short:0:7}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
# REF: https://github.com/marketplace/actions/gh-release
|
||||||
|
# Create a release only in nushell/nightly repo
|
||||||
|
- name: Publish Archive
|
||||||
|
uses: softprops/action-gh-release@v0.1.13
|
||||||
|
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
|
||||||
|
with:
|
||||||
|
draft: false
|
||||||
|
prerelease: true
|
||||||
|
name: Nu-nightly-${{ steps.vars.outputs.date }}-${{ steps.vars.outputs.sha_short }}
|
||||||
|
tag_name: nightly-${{ steps.vars.outputs.sha_short }}
|
||||||
|
body: |
|
||||||
|
This is a NIGHTLY build of Nushell.
|
||||||
|
It is NOT recommended for production use.
|
||||||
|
files: ${{ steps.nu.outputs.archive }}
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
cleanup:
|
||||||
|
name: Cleanup
|
||||||
|
# Should only run in nushell/nightly repo
|
||||||
|
if: github.repository == 'nushell/nightly'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
# Sleep for 30 minutes, waiting for the release to be published
|
||||||
|
- name: Waiting for Release
|
||||||
|
run: sleep 1800
|
||||||
|
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: main
|
||||||
|
|
||||||
|
- name: Setup Nushell
|
||||||
|
uses: hustcer/setup-nu@v3
|
||||||
|
with:
|
||||||
|
version: 0.81.0
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
# Keep the last a few releases
|
||||||
|
- name: Delete Older Releases
|
||||||
|
shell: nu {0}
|
||||||
|
run: |
|
||||||
|
let KEEP_COUNT = 10
|
||||||
|
let deprecated = (http get https://api.github.com/repos/nushell/nightly/releases | sort-by -r created_at | select tag_name id | range $KEEP_COUNT..)
|
||||||
|
for release in $deprecated {
|
||||||
|
print $'Deleting tag ($release.tag_name)'
|
||||||
|
git push origin --delete $release.tag_name
|
||||||
|
print $'Deleting release ($release.tag_name)'
|
||||||
|
let delete_url = $'https://api.github.com/repos/nushell/nightly/releases/($release.id)'
|
||||||
|
let version = "X-GitHub-Api-Version: 2022-11-28"
|
||||||
|
let accept = "Accept: application/vnd.github+json"
|
||||||
|
let auth = "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}"
|
||||||
|
# http delete $delete_url -H $version -H $auth -H $accept
|
||||||
|
curl -L -X DELETE -H $accept -H $auth -H $version $delete_url
|
||||||
|
}
|
140
.github/workflows/release-pkg.nu
vendored
140
.github/workflows/release-pkg.nu
vendored
@ -6,29 +6,37 @@
|
|||||||
# REF:
|
# REF:
|
||||||
# 1. https://github.com/volks73/cargo-wix
|
# 1. https://github.com/volks73/cargo-wix
|
||||||
|
|
||||||
|
# Instructions for manually creating an MSI for Winget Releases when they fail
|
||||||
# Added 2022-11-29 when Windows packaging wouldn't work
|
# Added 2022-11-29 when Windows packaging wouldn't work
|
||||||
# To run this manual for windows
|
# Updated again on 2023-02-23 because msis are still failing validation
|
||||||
# unset CARGO_TARGET_DIR if set
|
# To run this manual for windows here are the steps I take
|
||||||
# hide-env CARGO_TARGET_DIR
|
# checkout the release you want to publish
|
||||||
# let-env TARGET = 'x86_64-pc-windows-msvc'
|
# 1. git checkout 0.76.0
|
||||||
# let-env TARGET_RUSTFLAGS = ''
|
# unset CARGO_TARGET_DIR if set (I have to do this in the parent shell to get it to work)
|
||||||
# let-env GITHUB_WORKSPACE = 'C:\Users\dschroeder\source\repos\forks\nushell'
|
# 2. $env:CARGO_TARGET_DIR = ""
|
||||||
# let-env GITHUB_OUTPUT = 'C:\Users\dschroeder\source\repos\forks\nushell\output\out.txt'
|
# 2. hide-env CARGO_TARGET_DIR
|
||||||
# let-env OS = 'windows-latest'
|
# 3. $env.TARGET = 'x86_64-pc-windows-msvc'
|
||||||
# You need to run this twice. The first pass makes the output folder and builds everything
|
# 4. $env.TARGET_RUSTFLAGS = ''
|
||||||
# The second pass generates the msi file
|
# 5. $env.GITHUB_WORKSPACE = 'C:\Users\dschroeder\source\repos\forks\nushell'
|
||||||
# Pass 1 let-env _EXTRA_ = 'bin'
|
# 6. $env.GITHUB_OUTPUT = 'C:\Users\dschroeder\source\repos\forks\nushell\output\out.txt'
|
||||||
# Pass 2 let-env _EXTRA_ = 'msi'
|
# 7. $env.OS = 'windows-latest'
|
||||||
# make sure 7z.exe is in your path https://www.7-zip.org/download.html
|
# make sure 7z.exe is in your path https://www.7-zip.org/download.html
|
||||||
# let-env Path = ($env.Path | append 'c:\apps\7-zip')
|
# 8. $env.Path = ($env.Path | append 'c:\apps\7-zip')
|
||||||
# make sure aria2c.exe is in your path https://github.com/aria2/aria2
|
# make sure aria2c.exe is in your path https://github.com/aria2/aria2
|
||||||
# let-env Path = ($env.Path | append 'c:\path\to\aria2c')
|
# 9. $env.Path = ($env.Path | append 'c:\path\to\aria2c')
|
||||||
# make sure you have the wixtools installed https://wixtoolset.org/
|
# make sure you have the wixtools installed https://wixtoolset.org/
|
||||||
# let-env Path = ($env.Path | append 'C:\Users\dschroeder\AppData\Local\tauri\WixTools')
|
# 10. $env.Path = ($env.Path | append 'C:\Users\dschroeder\AppData\Local\tauri\WixTools')
|
||||||
# After msi is generated, if you have to update winget-pkgs repo, you'll need to patch the release
|
# You need to run the release-pkg twice. The first pass, with _EXTRA_ as 'bin', makes the output
|
||||||
|
# folder and builds everything. The second pass, that generates the msi file, with _EXTRA_ as 'msi'
|
||||||
|
# 11. $env._EXTRA_ = 'bin'
|
||||||
|
# 12. source .github\workflows\release-pkg.nu
|
||||||
|
# 13. cd ..
|
||||||
|
# 14. $env._EXTRA_ = 'msi'
|
||||||
|
# 15. source .github\workflows\release-pkg.nu
|
||||||
|
# After msi is generated, you have to update winget-pkgs repo, you'll need to patch the release
|
||||||
# by deleting the existing msi and uploading this new msi. Then you'll need to update the hash
|
# by deleting the existing msi and uploading this new msi. Then you'll need to update the hash
|
||||||
# on the winget-pkgs PR. To generate the hash, run this command
|
# on the winget-pkgs PR. To generate the hash, run this command
|
||||||
# open target\wix\nu-0.74.0-x86_64-pc-windows-msvc.msi | hash sha256
|
# 16. open target\wix\nu-0.74.0-x86_64-pc-windows-msvc.msi | hash sha256
|
||||||
# Then, just take the output and put it in the winget-pkgs PR for the hash on the msi
|
# Then, just take the output and put it in the winget-pkgs PR for the hash on the msi
|
||||||
|
|
||||||
|
|
||||||
@ -42,17 +50,17 @@ let flags = $env.TARGET_RUSTFLAGS
|
|||||||
let dist = $'($env.GITHUB_WORKSPACE)/output'
|
let dist = $'($env.GITHUB_WORKSPACE)/output'
|
||||||
let version = (open Cargo.toml | get package.version)
|
let version = (open Cargo.toml | get package.version)
|
||||||
|
|
||||||
$'Debugging info:'
|
print $'Debugging info:'
|
||||||
print { version: $version, bin: $bin, os: $os, target: $target, src: $src, flags: $flags, dist: $dist }; hr-line -b
|
print { version: $version, bin: $bin, os: $os, target: $target, src: $src, flags: $flags, dist: $dist }; hr-line -b
|
||||||
|
|
||||||
# $env
|
# $env
|
||||||
|
|
||||||
let USE_UBUNTU = 'ubuntu-20.04'
|
let USE_UBUNTU = 'ubuntu-20.04'
|
||||||
|
|
||||||
$'(char nl)Packaging ($bin) v($version) for ($target) in ($src)...'; hr-line -b
|
print $'(char nl)Packaging ($bin) v($version) for ($target) in ($src)...'; hr-line -b
|
||||||
if not ('Cargo.lock' | path exists) { cargo generate-lockfile }
|
if not ('Cargo.lock' | path exists) { cargo generate-lockfile }
|
||||||
|
|
||||||
$'Start building ($bin)...'; hr-line
|
print $'Start building ($bin)...'; hr-line
|
||||||
|
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
# Build for Ubuntu and macOS
|
# Build for Ubuntu and macOS
|
||||||
@ -62,23 +70,28 @@ if $os in [$USE_UBUNTU, 'macos-latest'] {
|
|||||||
sudo apt update
|
sudo apt update
|
||||||
sudo apt-get install libxcb-composite0-dev -y
|
sudo apt-get install libxcb-composite0-dev -y
|
||||||
}
|
}
|
||||||
if $target == 'aarch64-unknown-linux-gnu' {
|
match $target {
|
||||||
sudo apt-get install gcc-aarch64-linux-gnu -y
|
'aarch64-unknown-linux-gnu' => {
|
||||||
let-env CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER = 'aarch64-linux-gnu-gcc'
|
sudo apt-get install gcc-aarch64-linux-gnu -y
|
||||||
cargo-build-nu $flags
|
$env.CARGO_TARGET_AARCH64_UNKNOWN_LINUX_GNU_LINKER = 'aarch64-linux-gnu-gcc'
|
||||||
} else if $target == 'armv7-unknown-linux-gnueabihf' {
|
cargo-build-nu $flags
|
||||||
sudo apt-get install pkg-config gcc-arm-linux-gnueabihf -y
|
}
|
||||||
let-env CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER = 'arm-linux-gnueabihf-gcc'
|
'riscv64gc-unknown-linux-gnu' => {
|
||||||
cargo-build-nu $flags
|
sudo apt-get install gcc-riscv64-linux-gnu -y
|
||||||
} else if $target == 'riscv64gc-unknown-linux-gnu' {
|
$env.CARGO_TARGET_RISCV64GC_UNKNOWN_LINUX_GNU_LINKER = 'riscv64-linux-gnu-gcc'
|
||||||
sudo apt-get install gcc-riscv64-linux-gnu -y
|
cargo-build-nu $flags
|
||||||
let-env CARGO_TARGET_RISCV64GC_UNKNOWN_LINUX_GNU_LINKER = 'riscv64-linux-gnu-gcc'
|
}
|
||||||
cargo-build-nu $flags
|
'armv7-unknown-linux-gnueabihf' => {
|
||||||
} else {
|
sudo apt-get install pkg-config gcc-arm-linux-gnueabihf -y
|
||||||
# musl-tools to fix 'Failed to find tool. Is `musl-gcc` installed?'
|
$env.CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER = 'arm-linux-gnueabihf-gcc'
|
||||||
# Actually just for x86_64-unknown-linux-musl target
|
cargo-build-nu $flags
|
||||||
if $os == $USE_UBUNTU { sudo apt install musl-tools -y }
|
}
|
||||||
cargo-build-nu $flags
|
_ => {
|
||||||
|
# musl-tools to fix 'Failed to find tool. Is `musl-gcc` installed?'
|
||||||
|
# Actually just for x86_64-unknown-linux-musl target
|
||||||
|
if $os == $USE_UBUNTU { sudo apt install musl-tools -y }
|
||||||
|
cargo-build-nu $flags
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -86,11 +99,7 @@ if $os in [$USE_UBUNTU, 'macos-latest'] {
|
|||||||
# Build for Windows without static-link-openssl feature
|
# Build for Windows without static-link-openssl feature
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
if $os in ['windows-latest'] {
|
if $os in ['windows-latest'] {
|
||||||
if ($flags | str trim | is-empty) {
|
cargo-build-nu $flags
|
||||||
cargo build --release --all --target $target
|
|
||||||
} else {
|
|
||||||
cargo build --release --all --target $target $flags
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
@ -99,31 +108,36 @@ if $os in ['windows-latest'] {
|
|||||||
let suffix = if $os == 'windows-latest' { '.exe' }
|
let suffix = if $os == 'windows-latest' { '.exe' }
|
||||||
# nu, nu_plugin_* were all included
|
# nu, nu_plugin_* were all included
|
||||||
let executable = $'target/($target)/release/($bin)*($suffix)'
|
let executable = $'target/($target)/release/($bin)*($suffix)'
|
||||||
$'Current executable file: ($executable)'
|
print $'Current executable file: ($executable)'
|
||||||
|
|
||||||
cd $src; mkdir $dist;
|
cd $src; mkdir $dist;
|
||||||
rm -rf $'target/($target)/release/*.d' $'target/($target)/release/nu_pretty_hex*'
|
rm -rf $'target/($target)/release/*.d' $'target/($target)/release/nu_pretty_hex*'
|
||||||
$'(char nl)All executable files:'; hr-line
|
print $'(char nl)All executable files:'; hr-line
|
||||||
ls -f $executable
|
# We have to use `print` here to make sure the command output is displayed
|
||||||
|
print (ls -f $executable); sleep 1sec
|
||||||
|
|
||||||
$'(char nl)Copying release files...'; hr-line
|
print $'(char nl)Copying release files...'; hr-line
|
||||||
cp -v README.release.txt $'($dist)/README.txt'
|
"To use Nu plugins, use the register command to tell Nu where to find the plugin. For example:
|
||||||
|
|
||||||
|
> register ./nu_plugin_query" | save $'($dist)/README.txt'
|
||||||
[LICENSE $executable] | each {|it| cp -rv $it $dist } | flatten
|
[LICENSE $executable] | each {|it| cp -rv $it $dist } | flatten
|
||||||
|
# Sleep a few seconds to make sure the cp process finished successfully
|
||||||
|
sleep 3sec
|
||||||
|
|
||||||
$'(char nl)Check binary release version detail:'; hr-line
|
print $'(char nl)Check binary release version detail:'; hr-line
|
||||||
let ver = if $os == 'windows-latest' {
|
let ver = if $os == 'windows-latest' {
|
||||||
(do -i { ./output/nu.exe -c 'version' }) | str join
|
(do -i { .\output\nu.exe -c 'version' }) | str join
|
||||||
} else {
|
} else {
|
||||||
(do -i { ./output/nu -c 'version' }) | str join
|
(do -i { ./output/nu -c 'version' }) | str join
|
||||||
}
|
}
|
||||||
if ($ver | str trim | is-empty) {
|
if ($ver | str trim | is-empty) {
|
||||||
$'(ansi r)Incompatible nu binary...(ansi reset)'
|
print $'(ansi r)Incompatible nu binary...(ansi reset)'
|
||||||
} else { $ver }
|
} else { print $ver }
|
||||||
|
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
# Create a release archive and send it to output for the following steps
|
# Create a release archive and send it to output for the following steps
|
||||||
# ----------------------------------------------------------------------------
|
# ----------------------------------------------------------------------------
|
||||||
cd $dist; $'(char nl)Creating release archive...'; hr-line
|
cd $dist; print $'(char nl)Creating release archive...'; hr-line
|
||||||
if $os in [$USE_UBUNTU, 'macos-latest'] {
|
if $os in [$USE_UBUNTU, 'macos-latest'] {
|
||||||
|
|
||||||
let files = (ls | get name)
|
let files = (ls | get name)
|
||||||
@ -133,7 +147,7 @@ if $os in [$USE_UBUNTU, 'macos-latest'] {
|
|||||||
mkdir $dest
|
mkdir $dest
|
||||||
$files | each {|it| mv $it $dest } | ignore
|
$files | each {|it| mv $it $dest } | ignore
|
||||||
|
|
||||||
$'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls $dest
|
print $'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls $dest
|
||||||
|
|
||||||
tar -czf $archive $dest
|
tar -czf $archive $dest
|
||||||
print $'archive: ---> ($archive)'; ls $archive
|
print $'archive: ---> ($archive)'; ls $archive
|
||||||
@ -144,7 +158,7 @@ if $os in [$USE_UBUNTU, 'macos-latest'] {
|
|||||||
|
|
||||||
let releaseStem = $'($bin)-($version)-($target)'
|
let releaseStem = $'($bin)-($version)-($target)'
|
||||||
|
|
||||||
$'(char nl)Download less related stuffs...'; hr-line
|
print $'(char nl)Download less related stuffs...'; hr-line
|
||||||
aria2c https://github.com/jftuga/less-Windows/releases/download/less-v608/less.exe -o less.exe
|
aria2c https://github.com/jftuga/less-Windows/releases/download/less-v608/less.exe -o less.exe
|
||||||
aria2c https://raw.githubusercontent.com/jftuga/less-Windows/master/LICENSE -o LICENSE-for-less.txt
|
aria2c https://raw.githubusercontent.com/jftuga/less-Windows/master/LICENSE -o LICENSE-for-less.txt
|
||||||
|
|
||||||
@ -152,18 +166,18 @@ if $os in [$USE_UBUNTU, 'macos-latest'] {
|
|||||||
if (get-env _EXTRA_) == 'msi' {
|
if (get-env _EXTRA_) == 'msi' {
|
||||||
|
|
||||||
let wixRelease = $'($src)/target/wix/($releaseStem).msi'
|
let wixRelease = $'($src)/target/wix/($releaseStem).msi'
|
||||||
$'(char nl)Start creating Windows msi package...'
|
print $'(char nl)Start creating Windows msi package...'
|
||||||
cd $src; hr-line
|
cd $src; hr-line
|
||||||
# Wix need the binaries be stored in target/release/
|
# Wix need the binaries be stored in target/release/
|
||||||
cp -r $'($dist)/*' target/release/
|
cp -r $'($dist)/*' target/release/
|
||||||
cargo install cargo-wix --version 0.3.3
|
cargo install cargo-wix --version 0.3.4
|
||||||
cargo wix --no-build --nocapture --package nu --output $wixRelease
|
cargo wix --no-build --nocapture --package nu --output $wixRelease
|
||||||
print $'archive: ---> ($wixRelease)';
|
print $'archive: ---> ($wixRelease)';
|
||||||
echo $"archive=($wixRelease)" | save --append $env.GITHUB_OUTPUT
|
echo $"archive=($wixRelease)" | save --append $env.GITHUB_OUTPUT
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
|
|
||||||
$'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls
|
print $'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls
|
||||||
let archive = $'($dist)/($releaseStem).zip'
|
let archive = $'($dist)/($releaseStem).zip'
|
||||||
7z a $archive *
|
7z a $archive *
|
||||||
print $'archive: ---> ($archive)';
|
print $'archive: ---> ($archive)';
|
||||||
@ -176,9 +190,17 @@ if $os in [$USE_UBUNTU, 'macos-latest'] {
|
|||||||
|
|
||||||
def 'cargo-build-nu' [ options: string ] {
|
def 'cargo-build-nu' [ options: string ] {
|
||||||
if ($options | str trim | is-empty) {
|
if ($options | str trim | is-empty) {
|
||||||
cargo build --release --all --target $target --features=static-link-openssl
|
if $os == 'windows-latest' {
|
||||||
|
cargo build --release --all --target $target
|
||||||
|
} else {
|
||||||
|
cargo build --release --all --target $target --features=static-link-openssl
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
cargo build --release --all --target $target --features=static-link-openssl $options
|
if $os == 'windows-latest' {
|
||||||
|
cargo build --release --all --target $target $options
|
||||||
|
} else {
|
||||||
|
cargo build --release --all --target $target --features=static-link-openssl $options
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
21
.github/workflows/release.yml
vendored
21
.github/workflows/release.yml
vendored
@ -23,6 +23,7 @@ jobs:
|
|||||||
- aarch64-apple-darwin
|
- aarch64-apple-darwin
|
||||||
- x86_64-apple-darwin
|
- x86_64-apple-darwin
|
||||||
- x86_64-pc-windows-msvc
|
- x86_64-pc-windows-msvc
|
||||||
|
- aarch64-pc-windows-msvc
|
||||||
- x86_64-unknown-linux-gnu
|
- x86_64-unknown-linux-gnu
|
||||||
- x86_64-unknown-linux-musl
|
- x86_64-unknown-linux-musl
|
||||||
- aarch64-unknown-linux-gnu
|
- aarch64-unknown-linux-gnu
|
||||||
@ -44,6 +45,14 @@ jobs:
|
|||||||
extra: msi
|
extra: msi
|
||||||
os: windows-latest
|
os: windows-latest
|
||||||
target_rustflags: ''
|
target_rustflags: ''
|
||||||
|
- target: aarch64-pc-windows-msvc
|
||||||
|
extra: 'bin'
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
|
- target: aarch64-pc-windows-msvc
|
||||||
|
extra: msi
|
||||||
|
os: windows-latest
|
||||||
|
target_rustflags: ''
|
||||||
- target: x86_64-unknown-linux-gnu
|
- target: x86_64-unknown-linux-gnu
|
||||||
os: ubuntu-20.04
|
os: ubuntu-20.04
|
||||||
target_rustflags: ''
|
target_rustflags: ''
|
||||||
@ -55,27 +64,29 @@ jobs:
|
|||||||
target_rustflags: ''
|
target_rustflags: ''
|
||||||
- target: armv7-unknown-linux-gnueabihf
|
- target: armv7-unknown-linux-gnueabihf
|
||||||
os: ubuntu-20.04
|
os: ubuntu-20.04
|
||||||
target_rustflags: ''
|
target_rustflags: '--exclude=nu-cmd-dataframe'
|
||||||
- target: riscv64gc-unknown-linux-gnu
|
- target: riscv64gc-unknown-linux-gnu
|
||||||
os: ubuntu-20.04
|
os: ubuntu-20.04
|
||||||
target_rustflags: ''
|
target_rustflags: '--exclude=nu-cmd-dataframe'
|
||||||
|
|
||||||
runs-on: ${{matrix.os}}
|
runs-on: ${{matrix.os}}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3.1.0
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Update Rust Toolchain Target
|
- name: Update Rust Toolchain Target
|
||||||
run: |
|
run: |
|
||||||
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
||||||
|
|
||||||
- name: Setup Rust toolchain and cache
|
- name: Setup Rust toolchain and cache
|
||||||
uses: actions-rust-lang/setup-rust-toolchain@v1.3.5
|
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||||
|
with:
|
||||||
|
rustflags: ''
|
||||||
|
|
||||||
- name: Setup Nushell
|
- name: Setup Nushell
|
||||||
uses: hustcer/setup-nu@v3
|
uses: hustcer/setup-nu@v3
|
||||||
with:
|
with:
|
||||||
version: 0.72.1
|
version: 0.81.0
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
28
.github/workflows/stale.yml
vendored
28
.github/workflows/stale.yml
vendored
@ -1,28 +0,0 @@
|
|||||||
name: 'Close stale issues and PRs'
|
|
||||||
#on: [workflow_dispatch]
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: '30 1 * * *'
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
issues: write
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
stale:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/stale@v6
|
|
||||||
with:
|
|
||||||
#debug-only: true
|
|
||||||
ascending: true
|
|
||||||
operations-per-run: 520
|
|
||||||
enable-statistics: true
|
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
close-issue-message: 'This issue has been marked stale for more than 100000 days without activity. Closing this issue, but if you find that the issue is still valid, please reopen.'
|
|
||||||
close-pr-message: 'This PR has been marked stale for more than 100 days without activity. Closing this PR, but if you are still working on it, please reopen.'
|
|
||||||
days-before-issue-stale: 90
|
|
||||||
days-before-pr-stale: 45
|
|
||||||
days-before-issue-close: 100000
|
|
||||||
days-before-pr-close: 100
|
|
||||||
exempt-issue-labels: 'exempt,keep'
|
|
4
.github/workflows/typos.yml
vendored
4
.github/workflows/typos.yml
vendored
@ -7,7 +7,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout Actions Repository
|
- name: Checkout Actions Repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Check spelling of book
|
- name: Check spelling
|
||||||
uses: crate-ci/typos@master
|
uses: crate-ci/typos@master
|
||||||
|
23
.github/workflows/winget-submission.yml
vendored
23
.github/workflows/winget-submission.yml
vendored
@ -1,19 +1,26 @@
|
|||||||
name: Submit Nushell package to Windows Package Manager Community Repository
|
name: Submit Nushell package to Windows Package Manager Community Repository
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
|
||||||
release:
|
release:
|
||||||
types: [published]
|
types: [released]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
tag_name:
|
||||||
|
description: 'Specific tag name'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
winget:
|
winget:
|
||||||
name: Publish winget package
|
name: Publish winget package
|
||||||
runs-on: windows-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Submit package to Windows Package Manager Community Repository
|
- name: Submit package to Windows Package Manager Community Repository
|
||||||
run: |
|
uses: vedantmgoyal2009/winget-releaser@v2
|
||||||
iwr https://github.com/microsoft/winget-create/releases/download/v1.0.4.0/wingetcreate.exe -OutFile wingetcreate.exe
|
with:
|
||||||
$github = Get-Content '${{ github.event_path }}' | ConvertFrom-Json
|
identifier: Nushell.Nushell
|
||||||
$installerUrl = $github.release.assets | Where-Object -Property name -match 'windows-msvc.msi' | Select -ExpandProperty browser_download_url -First 1
|
version: ${{ inputs.tag_name || github.event.release.tag_name }}
|
||||||
.\wingetcreate.exe update Nushell.Nushell -s -v $github.release.tag_name -u $installerUrl -t ${{ secrets.NUSHELL_PAT }}
|
release-tag: ${{ inputs.tag_name || github.event.release.tag_name }}
|
||||||
|
token: ${{ secrets.NUSHELL_PAT }}
|
||||||
|
fork-user: fdncred
|
||||||
|
13
.gitignore
vendored
13
.gitignore
vendored
@ -22,6 +22,9 @@ debian/nu/
|
|||||||
# VSCode's IDE items
|
# VSCode's IDE items
|
||||||
.vscode/*
|
.vscode/*
|
||||||
|
|
||||||
|
# JetBrains' Fleet IDE
|
||||||
|
.fleet/*
|
||||||
|
|
||||||
# Visual Studio Extension SourceGear Rust items
|
# Visual Studio Extension SourceGear Rust items
|
||||||
VSWorkspaceSettings.json
|
VSWorkspaceSettings.json
|
||||||
unstable_cargo_features.txt
|
unstable_cargo_features.txt
|
||||||
@ -38,4 +41,12 @@ tarpaulin-report.html
|
|||||||
.vs/*
|
.vs/*
|
||||||
*.rsproj
|
*.rsproj
|
||||||
*.rsproj.user
|
*.rsproj.user
|
||||||
*.sln
|
*.sln
|
||||||
|
*.code-workspace
|
||||||
|
|
||||||
|
# direnv
|
||||||
|
.direnv/
|
||||||
|
.envrc
|
||||||
|
|
||||||
|
# pre-commit-hooks
|
||||||
|
.pre-commit-config.yaml
|
||||||
|
@ -10,3 +10,4 @@ ba = "ba"
|
|||||||
Plasticos = "Plasticos"
|
Plasticos = "Plasticos"
|
||||||
IIF = "IIF"
|
IIF = "IIF"
|
||||||
numer = "numer"
|
numer = "numer"
|
||||||
|
ratatui = "ratatui"
|
||||||
|
192
CONTRIBUTING.md
192
CONTRIBUTING.md
@ -2,7 +2,20 @@
|
|||||||
|
|
||||||
Welcome to Nushell and thank you for considering contributing!
|
Welcome to Nushell and thank you for considering contributing!
|
||||||
|
|
||||||
## Review Process
|
## Table of contents
|
||||||
|
- [Proposing design changes](#proposing-design-changes)
|
||||||
|
- [Developing](#developing)
|
||||||
|
- [Setup](#setup)
|
||||||
|
- [Tests](#tests)
|
||||||
|
- [Useful commands](#useful-commands)
|
||||||
|
- [Debugging tips](#debugging-tips)
|
||||||
|
- [Git etiquette](#git-etiquette)
|
||||||
|
- [Our Rust style](#our-rust-style)
|
||||||
|
- [Generally discouraged](#generally-discouraged)
|
||||||
|
- [Things we want to get better at](#things-we-want-to-get-better-at)
|
||||||
|
- [License](#license)
|
||||||
|
|
||||||
|
## Proposing design changes
|
||||||
|
|
||||||
First of all, before diving into the code, if you want to create a new feature, change something significantly, and especially if the change is user-facing, it is a good practice to first get an approval from the core team before starting to work on it.
|
First of all, before diving into the code, if you want to create a new feature, change something significantly, and especially if the change is user-facing, it is a good practice to first get an approval from the core team before starting to work on it.
|
||||||
This saves both your and our time if we realize the change needs to go another direction before spending time on it.
|
This saves both your and our time if we realize the change needs to go another direction before spending time on it.
|
||||||
@ -41,10 +54,13 @@ Tests can be found in different places:
|
|||||||
* command examples
|
* command examples
|
||||||
* crate-specific tests
|
* crate-specific tests
|
||||||
|
|
||||||
The most comprehensive test suite we have is the `nu-test-support` crate. For testing specific features, such as running Nushell in a REPL mode, we have so called "testbins". For simple tests, you can find `run_test()` and `fail_test()` functions.
|
Most of the tests are built upon the `nu-test-support` crate. For testing specific features, such as running Nushell in a REPL mode, we have so called "testbins". For simple tests, you can find `run_test()` and `fail_test()` functions.
|
||||||
|
|
||||||
### Useful Commands
|
### Useful Commands
|
||||||
|
|
||||||
|
As Nushell is build using a cargo workspace consisting of multiple crates keep in mind that you may need to pass additional flags compared to how you may be used to it from a single crate project.
|
||||||
|
Read cargo's documentation for more details: https://doc.rust-lang.org/cargo/reference/workspaces.html
|
||||||
|
|
||||||
- Build and run Nushell:
|
- Build and run Nushell:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
@ -59,7 +75,12 @@ The most comprehensive test suite we have is the `nu-test-support` crate. For te
|
|||||||
- Run Clippy on Nushell:
|
- Run Clippy on Nushell:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect
|
cargo clippy --workspace -- -D warnings -D clippy::unwrap_used
|
||||||
|
```
|
||||||
|
or via the `toolkit.nu` command:
|
||||||
|
```shell
|
||||||
|
use toolkit.nu clippy
|
||||||
|
clippy
|
||||||
```
|
```
|
||||||
|
|
||||||
- Run all tests:
|
- Run all tests:
|
||||||
@ -68,6 +89,17 @@ The most comprehensive test suite we have is the `nu-test-support` crate. For te
|
|||||||
cargo test --workspace
|
cargo test --workspace
|
||||||
```
|
```
|
||||||
|
|
||||||
|
along with dataframe tests
|
||||||
|
|
||||||
|
```shell
|
||||||
|
cargo test --workspace --features=dataframe
|
||||||
|
```
|
||||||
|
or via the `toolkit.nu` command:
|
||||||
|
```shell
|
||||||
|
use toolkit.nu test
|
||||||
|
test
|
||||||
|
```
|
||||||
|
|
||||||
- Run all tests for a specific command
|
- Run all tests for a specific command
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
@ -79,12 +111,30 @@ The most comprehensive test suite we have is the `nu-test-support` crate. For te
|
|||||||
```shell
|
```shell
|
||||||
cargo fmt --all -- --check
|
cargo fmt --all -- --check
|
||||||
```
|
```
|
||||||
|
or via the `toolkit.nu` command:
|
||||||
|
```shell
|
||||||
|
use toolkit.nu fmt
|
||||||
|
fmt --check
|
||||||
|
```
|
||||||
|
|
||||||
- Format the code in the project
|
- Format the code in the project
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
cargo fmt --all
|
cargo fmt --all
|
||||||
```
|
```
|
||||||
|
or via the `toolkit.nu` command:
|
||||||
|
```shell
|
||||||
|
use toolkit.nu fmt
|
||||||
|
fmt
|
||||||
|
```
|
||||||
|
|
||||||
|
- Set up `git` hooks to check formatting and run `clippy` before committing and pushing:
|
||||||
|
|
||||||
|
```shell
|
||||||
|
use toolkit.nu setup-git-hooks
|
||||||
|
setup-git-hooks
|
||||||
|
```
|
||||||
|
_Unfortunately, this hook isn't available on Windows._
|
||||||
|
|
||||||
### Debugging Tips
|
### Debugging Tips
|
||||||
|
|
||||||
@ -99,3 +149,139 @@ The most comprehensive test suite we have is the `nu-test-support` crate. For te
|
|||||||
cargo run --release -- --log-level trace --log-target file
|
cargo run --release -- --log-level trace --log-target file
|
||||||
open $"($nu.temp-path)/nu-($nu.pid).log"
|
open $"($nu.temp-path)/nu-($nu.pid).log"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Git etiquette
|
||||||
|
|
||||||
|
As nushell thrives on its broad base of volunteer contributors and maintainers with different backgrounds we have a few guidelines for how we best utilize git and GitHub for our contributions. We strive to balance three goals with those recommendations:
|
||||||
|
|
||||||
|
1. The **volunteer maintainers and contributors** can easily follow the changes you propose, gauge the impact, and come to help you or make a decision.
|
||||||
|
2. **You as a contributor** can focus most of your time on improving the quality of the nushell project and contributing your expertise to the code or documentation.
|
||||||
|
3. Making sure we can trace back *why* decisions were made in the past.
|
||||||
|
This includes discarded approaches. Also we want to quickly identify regressions and fix when something broke.
|
||||||
|
|
||||||
|
### How we merge PRs
|
||||||
|
|
||||||
|
In general the maintainers **squash** all changes of your PR into a single commit when merging.
|
||||||
|
|
||||||
|
This keeps a clean enough linear history, while not forcing you to conform to a too strict style while iterating in your PR or fixing small problems. As an added benefit the commits on the `main` branch are tied to the discussion that happened in the PR through their `#1234` issue number.
|
||||||
|
|
||||||
|
> **Note**
|
||||||
|
> **Pro advice:** In some circumstances, we can agree on rebase-merging a particularly large but connected PR as a series of atomic commits onto the `main` branch to ensure we can more easily revert or bisect particular aspects.
|
||||||
|
|
||||||
|
### A good PR makes a change!
|
||||||
|
|
||||||
|
As a result of this PR-centric strategy and the general goal that the reviewers should easily understand your change, the **PR title and description matters** a great deal!
|
||||||
|
|
||||||
|
Make sure your description is **concise** but contains all relevant information and context.
|
||||||
|
This means demonstrating what changes, ideally through nushell code or output **examples**.
|
||||||
|
Furthermore links to technical documentation or instructions for folks that want to play with your change make the review process much easier.
|
||||||
|
|
||||||
|
> **Note**
|
||||||
|
> Try to follow the suggestions in our PR message template to make sure we can quickly focus on the technical merits and impact on the users.
|
||||||
|
|
||||||
|
#### A PR should limit itself to a single functional change or related set of same changes.
|
||||||
|
|
||||||
|
Mixing different changes in the same PR will make the review process much harder. A PR might get stuck on one aspect while we would actually like to land another change. Furthermore, if we are forced to revert a change, mixing and matching different aspects makes fixing bugs or regressions much harder.
|
||||||
|
|
||||||
|
Thus, please try to **separate out unrelated changes**!
|
||||||
|
**Don't** mix unrelated refactors with a potentially contested change.
|
||||||
|
Stylistic fixes and housekeeping can be bundled up into singular PRs.
|
||||||
|
|
||||||
|
#### Guidelines for the PR title
|
||||||
|
|
||||||
|
The PR title should be concise but contain everything for a contributor to know if they should help out in the review of this particular change.
|
||||||
|
|
||||||
|
**DON'T**
|
||||||
|
- `Update file/in/some/deeply/nested/path.rs`
|
||||||
|
- Why are you making this change?
|
||||||
|
- `Fix 2134`
|
||||||
|
- What has to be fixed?
|
||||||
|
- Hard to follow when not online on GitHub.
|
||||||
|
- ``Ignore `~` expansion``
|
||||||
|
- In what context should this change take effect?
|
||||||
|
- `[feature] refactor the whole parser and also make nushell indentation-sensitive, upgrade to using Cpython. Let me know what you think!`
|
||||||
|
- Be concise
|
||||||
|
- Maybe break up into smaller commits or PRs if the title already appears too long?
|
||||||
|
|
||||||
|
**DO**
|
||||||
|
- Mention the nushell feature or command that is affected.
|
||||||
|
- ``Fix URL parsing in `http get` (issue #1234)``
|
||||||
|
- You can mention the issue number if other context is there.
|
||||||
|
- In general, mention all related issues in the description to crosslink (e.g. `Fixes #1234`, `Closes #6789`)
|
||||||
|
- For internal changes mention the area or symbols affected if it helps to clarify
|
||||||
|
- ``Factor out `quote_string()` from parser to reuse in `explore` ``
|
||||||
|
|
||||||
|
### Review process / Merge conflicts
|
||||||
|
|
||||||
|
> **Note**
|
||||||
|
> Keep in mind that the maintainers are volunteers that need to allocate their attention to several different areas and active PRs. We will try to get back to you as soon as possible.
|
||||||
|
|
||||||
|
You can help us to make the review process a smooth experience:
|
||||||
|
- Testing:
|
||||||
|
- We generally review in detail after all the tests pass. Let us know if there is a problem you want to discuss to fix a test failure or forces us to accept a breaking change.
|
||||||
|
- If you fix a bug, it is highly recommended that you add a test that reproduces the original issue/panic in a minimal form.
|
||||||
|
- In general, added tests help us to understand which assumptions go into a particular addition/change.
|
||||||
|
- Try to also test corner cases where those assumptions might break. This can be more valuable than simply adding many similar tests.
|
||||||
|
- Commit history inside a PR during code review:
|
||||||
|
- Good **atomic commits** can help follow larger changes, but we are not pedantic.
|
||||||
|
- We don't shame fixup commits while you try to figure out a problem. They can help others see what you tried and what didn't work. (see our [squash policy](#how-we-merge-prs))
|
||||||
|
- During active review constant **force pushing** just to amend changes can be confusing!
|
||||||
|
- GitHub's UI presents reviewers with less options to compare diffs
|
||||||
|
- fetched branches for experimentation become invalid!
|
||||||
|
- the notification a maintainer receives has a low signal-to-noise ratio
|
||||||
|
- Git pros *can* use their judgement to rebase/squash to clean up the history *if it aids the understanding* of a larger change during review
|
||||||
|
- Merge conflicts:
|
||||||
|
- In general you should take care of resolving merge conflicts.
|
||||||
|
- Use your judgement whether to `git merge main` or to `git rebase main`
|
||||||
|
- Choose what simplifies having confidence in the conflict resolution and the review. **Merge commits in your branch are OK** in the squash model.
|
||||||
|
- Feel free to notify your reviewers or affected PR authors if your change might cause larger conflicts with another change.
|
||||||
|
- During the rollup of multiple PRs, we may choose to resolve merge conflicts and CI failures ourselves. (Allow maintainers to push to your branch to enable us to do this quickly.)
|
||||||
|
|
||||||
|
## Our Rust style
|
||||||
|
To make the collaboration on a project the scale of Nushell easy, we want to work towards a style of Rust code that can easily be understood by all of our contributors. We conservatively rely on most of [`clippy`s suggestions](https://github.com/rust-lang/rust-clippy) to get to the holy grail of "idiomatic" code. Good code in our eyes is not the most clever use of all available language features or with the most unique personal touch but readable and strikes a balance between being concise, and also unsurprising and explicit in the places where it matters.
|
||||||
|
One example of this philosophy is that we generally avoid to fight the borrow-checker in our data model but rather try to get to a correct and simple solution first and then figure out where we should reuse data to achieve the necessary performance. As we are still pre-1.0 this served us well to be able to quickly refactor or change larger parts of the code base.
|
||||||
|
|
||||||
|
### Generally discouraged
|
||||||
|
#### `+nightly` language features or things only available in the most recent `+stable`
|
||||||
|
To make life for the people easier that maintain the Nushell packages in various distributions with their own release cycle of `rustc` we typically rely on slightly older Rust versions. We do not make explicit guarantees how far back in the past we live but you can find out in our [`rust-toolchain.toml`](https://github.com/nushell/nushell/blob/main/rust-toolchain.toml)
|
||||||
|
(As a rule of thumb this has been typically been approximately 2 releases behind the newest stable compiler.)
|
||||||
|
The use of nightly features is prohibited.
|
||||||
|
|
||||||
|
#### Panicking
|
||||||
|
As Nushell aims to provide a reliable foundational way for folks to interact with their computer, we cannot carelessly crash the execution of their work by panicking Nushell.
|
||||||
|
Thus panicking is not an allowed error handling strategy for anything that could be triggered by user input OR behavior of the outside system. If Nushell panics this is a bug or we are against all odds already in an unrecoverable state (The system stopped cooperating, we went out of memory). The use of `.unwrap()` is thus outright banned and any uses of `.expect()` or related panicking macros like `unreachable!` should include a helpful description which assumptions have been violated.
|
||||||
|
|
||||||
|
#### `unsafe` code
|
||||||
|
For any use of `unsafe` code we need to require even higher standards and additional review. If you add or alter `unsafe` blocks you have to be familiar with the promises you need to uphold as found in the [Rustonomicon](https://doc.rust-lang.org/nomicon/intro.html). All `unsafe` uses should include `// SAFETY:` comments explaining how the invariants are upheld and thus alerting you what to watch out for when making a change.
|
||||||
|
##### FFI with system calls and the outside world
|
||||||
|
As a shell Nushell needs to interact with system APIs in several places, for which FFI code with unsafe blocks may be necessary. In some cases this can be handled by safe API wrapper crates but in some cases we may choose to directly do those calls.
|
||||||
|
If you do so you need to document the system behavior on top of the Rust memory model guarantees that you uphold. This means documenting whether using a particular system call is safe to use in a particular context and all failure cases are properly recovered.
|
||||||
|
##### Implementing self-contained data structures
|
||||||
|
Another motivation for reaching to `unsafe` code might be to try to implement a particular data structure that is not expressible on safe `std` library APIs. Doing so in the Nushell code base would have to clear a high bar for need based on profiling results. Also you should first do a survey of the [crate ecosystem](https://crates.io) that there doesn't exist a usable well vetted crate that already provides safe APIs to the desired datastructure.
|
||||||
|
##### Make things go faster by removing checks
|
||||||
|
This is probably a bad idea if you feel tempted to do so. Don't
|
||||||
|
#### Macros
|
||||||
|
Another advanced feature people feel tempted to use to work around perceived limitations of Rusts syntax and we are not particularly fans of are custom macros.
|
||||||
|
They have clear downsides not only in terms of readability if they locally introduce a different syntax. Most tooling apart from the compiler will struggle more with them. This limits for example consistent automatic formatting or automated refactors with `rust-analyzer`.
|
||||||
|
That you can fluently read `macro_rules!` is less likely than regular code. This can lead people to introduce funky behavior when using a macro. Be it because a macro is not following proper hygiene rules or because it introduces excessive work at compile time.
|
||||||
|
|
||||||
|
So we generally discourage the addition of macros. In a lot of cases your macro may start do something that can be expressed with functions or generics in a much more reusable fashion.
|
||||||
|
The only exceptions we may allow need to demonstrate that the macro can fix something that is otherwise extremely unreadable, error-prone, or consistently worse at compile time.
|
||||||
|
### Things we want to get better at
|
||||||
|
These are things we did pretty liberally to get Nushell off the ground, that make things harder for a high quality stable product. You may run across them but shouldn't take them as an endorsed example.
|
||||||
|
#### Liberal use of third-party dependencies
|
||||||
|
The amazing variety of crates on [crates.io](https://crates.io) allowed us to quickly get Nushell into a feature rich state but it left us with a bunch of baggage to clean up.
|
||||||
|
Each dependency introduces a compile time cost and duplicated code can add to the overall binary size. Also vetting more for correct and secure implementations takes unreasonably more time as this is also a continuous process of reacting to updates or potential vulnerabilities.
|
||||||
|
|
||||||
|
Thus we only want to accept dependencies that are essential and well tested implementations of a particular requirement of Nushells codebase.
|
||||||
|
Also as a project for the move to 1.0 we will try to unify among a set of dependencies if they possibly implement similar things in an area. We don't need three different crates with potentially perfect fit for three problems but rather one reliable crate with a maximized overlap between what it provides and what we need.
|
||||||
|
We will favor crates that are well tested and used and promise to be more stable and still frequently maintained.
|
||||||
|
#### Deeply nested code
|
||||||
|
As Nushell uses a lot of enums in its internal data representation there are a lot of `match` expressions. Combined with the need to handle a lot of edge cases and be defensive about any errors this has led to some absolutely hard to read deeply nested code (e.g. in the parser but also in the implementation of several commands).
|
||||||
|
This can be observed both as a "rightward drift" where the main part of the code is found after many levels of indentations or by long function bodies with several layers of branching with seemingly repeated branching inside the higher branch level.
|
||||||
|
This can also be exacerbated by "quick" bugfixes/enhancements that may just try to add a special case to catch a previously unexpected condition. The likelihood of introducing a bug in a sea of code duplication is high.
|
||||||
|
To combat this, consider using the early-`return` pattern to reject invalid data early in one place instead of building a tree through Rust's expression constructs with a lot of duplicated paths. Unpacking data into a type that expresses that the necessary things already have been checked and using functions to properly deal with separate and common behavior can also help.
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
We use the [MIT License](https://github.com/nushell/nushell/blob/main/LICENSE) in all of our Nushell projects. If you are including or referencing a crate that uses the [GPL License](https://www.gnu.org/licenses/gpl-3.0.en.html#license-text) unfortunately we will not be able to accept your PR.
|
||||||
|
3414
Cargo.lock
generated
3414
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
120
Cargo.toml
120
Cargo.toml
@ -1,5 +1,6 @@
|
|||||||
[package]
|
[package]
|
||||||
authors = ["The Nushell Project Developers"]
|
authors = ["The Nushell Project Developers"]
|
||||||
|
build = "scripts/build.rs"
|
||||||
default-run = "nu"
|
default-run = "nu"
|
||||||
description = "A new type of shell"
|
description = "A new type of shell"
|
||||||
documentation = "https://www.nushell.sh/book/"
|
documentation = "https://www.nushell.sh/book/"
|
||||||
@ -10,7 +11,7 @@ license = "MIT"
|
|||||||
name = "nu"
|
name = "nu"
|
||||||
repository = "https://github.com/nushell/nushell"
|
repository = "https://github.com/nushell/nushell"
|
||||||
rust-version = "1.60"
|
rust-version = "1.60"
|
||||||
version = "0.75.0"
|
version = "0.84.0"
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
@ -27,6 +28,10 @@ members = [
|
|||||||
"crates/nu-engine",
|
"crates/nu-engine",
|
||||||
"crates/nu-parser",
|
"crates/nu-parser",
|
||||||
"crates/nu-system",
|
"crates/nu-system",
|
||||||
|
"crates/nu-cmd-base",
|
||||||
|
"crates/nu-cmd-extra",
|
||||||
|
"crates/nu-cmd-lang",
|
||||||
|
"crates/nu-cmd-dataframe",
|
||||||
"crates/nu-command",
|
"crates/nu-command",
|
||||||
"crates/nu-protocol",
|
"crates/nu-protocol",
|
||||||
"crates/nu-plugin",
|
"crates/nu-plugin",
|
||||||
@ -35,60 +40,69 @@ members = [
|
|||||||
"crates/nu_plugin_example",
|
"crates/nu_plugin_example",
|
||||||
"crates/nu_plugin_query",
|
"crates/nu_plugin_query",
|
||||||
"crates/nu_plugin_custom_values",
|
"crates/nu_plugin_custom_values",
|
||||||
|
"crates/nu_plugin_formats",
|
||||||
|
"crates/nu-std",
|
||||||
"crates/nu-utils",
|
"crates/nu-utils",
|
||||||
]
|
]
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
chrono = { version = "0.4.23", features = ["serde"] }
|
nu-cli = { path = "./crates/nu-cli", version = "0.84.0" }
|
||||||
crossterm = "0.24.0"
|
nu-color-config = { path = "./crates/nu-color-config", version = "0.84.0" }
|
||||||
ctrlc = "3.2.1"
|
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.84.0" }
|
||||||
log = "0.4"
|
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.84.0" }
|
||||||
miette = { version = "5.5.0", features = ["fancy-no-backtrace"] }
|
nu-cmd-dataframe = { path = "./crates/nu-cmd-dataframe", version = "0.84.0", features = ["dataframe"], optional = true }
|
||||||
nu-ansi-term = "0.46.0"
|
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.84.0", optional = true }
|
||||||
nu-cli = { path = "./crates/nu-cli", version = "0.75.0" }
|
nu-command = { path = "./crates/nu-command", version = "0.84.0" }
|
||||||
nu-color-config = { path = "./crates/nu-color-config", version = "0.75.0" }
|
nu-engine = { path = "./crates/nu-engine", version = "0.84.0" }
|
||||||
nu-command = { path = "./crates/nu-command", version = "0.75.0" }
|
nu-explore = { path = "./crates/nu-explore", version = "0.84.0" }
|
||||||
nu-engine = { path = "./crates/nu-engine", version = "0.75.0" }
|
nu-json = { path = "./crates/nu-json", version = "0.84.0" }
|
||||||
nu-json = { path = "./crates/nu-json", version = "0.75.0" }
|
nu-parser = { path = "./crates/nu-parser", version = "0.84.0" }
|
||||||
nu-parser = { path = "./crates/nu-parser", version = "0.75.0" }
|
nu-path = { path = "./crates/nu-path", version = "0.84.0" }
|
||||||
nu-path = { path = "./crates/nu-path", version = "0.75.0" }
|
nu-plugin = { path = "./crates/nu-plugin", optional = true, version = "0.84.0" }
|
||||||
nu-plugin = { path = "./crates/nu-plugin", optional = true, version = "0.75.0" }
|
nu-pretty-hex = { path = "./crates/nu-pretty-hex", version = "0.84.0" }
|
||||||
nu-pretty-hex = { path = "./crates/nu-pretty-hex", version = "0.75.0" }
|
nu-protocol = { path = "./crates/nu-protocol", version = "0.84.0" }
|
||||||
nu-protocol = { path = "./crates/nu-protocol", version = "0.75.0" }
|
nu-system = { path = "./crates/nu-system", version = "0.84.0" }
|
||||||
nu-system = { path = "./crates/nu-system", version = "0.75.0" }
|
nu-table = { path = "./crates/nu-table", version = "0.84.0" }
|
||||||
nu-table = { path = "./crates/nu-table", version = "0.75.0" }
|
nu-term-grid = { path = "./crates/nu-term-grid", version = "0.84.0" }
|
||||||
nu-term-grid = { path = "./crates/nu-term-grid", version = "0.75.0" }
|
nu-std = { path = "./crates/nu-std", version = "0.84.0" }
|
||||||
nu-utils = { path = "./crates/nu-utils", version = "0.75.0" }
|
nu-utils = { path = "./crates/nu-utils", version = "0.84.0" }
|
||||||
reedline = { version = "0.15.0", features = ["bashisms", "sqlite"] }
|
nu-ansi-term = "0.49.0"
|
||||||
|
reedline = { version = "0.23.0", features = ["bashisms", "sqlite"]}
|
||||||
|
|
||||||
rayon = "1.6.1"
|
crossterm = "0.26"
|
||||||
is_executable = "1.0.1"
|
ctrlc = "3.4"
|
||||||
simplelog = "0.12.0"
|
log = "0.4"
|
||||||
time = "0.3.12"
|
miette = { version = "5.10", features = ["fancy-no-backtrace"] }
|
||||||
|
mimalloc = { version = "0.1.37", default-features = false, optional = true}
|
||||||
|
serde_json = "1.0"
|
||||||
|
simplelog = "0.12"
|
||||||
|
time = "0.3"
|
||||||
|
|
||||||
[target.'cfg(not(target_os = "windows"))'.dependencies]
|
[target.'cfg(not(target_os = "windows"))'.dependencies]
|
||||||
# Our dependencies don't use OpenSSL on Windows
|
# Our dependencies don't use OpenSSL on Windows
|
||||||
openssl = { version = "0.10.38", features = ["vendored"], optional = true }
|
openssl = { version = "0.10", features = ["vendored"], optional = true }
|
||||||
signal-hook = { version = "0.3.14", default-features = false }
|
signal-hook = { version = "0.3", default-features = false }
|
||||||
|
|
||||||
|
|
||||||
[target.'cfg(windows)'.build-dependencies]
|
[target.'cfg(windows)'.build-dependencies]
|
||||||
winres = "0.1"
|
winresource = "0.1"
|
||||||
|
|
||||||
[target.'cfg(target_family = "unix")'.dependencies]
|
[target.'cfg(target_family = "unix")'.dependencies]
|
||||||
nix = { version = "0.25", default-features = false, features = ["signal", "process", "fs", "term"] }
|
nix = { version = "0.26", default-features = false, features = [
|
||||||
atty = "0.2"
|
"signal",
|
||||||
|
"process",
|
||||||
|
"fs",
|
||||||
|
"term",
|
||||||
|
] }
|
||||||
|
is-terminal = "0.4.8"
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-test-support = { path = "./crates/nu-test-support", version = "0.75.0" }
|
nu-test-support = { path = "./crates/nu-test-support", version = "0.84.0" }
|
||||||
tempfile = "3.2.0"
|
assert_cmd = "2.0"
|
||||||
assert_cmd = "2.0.2"
|
criterion = "0.5"
|
||||||
criterion = "0.4"
|
pretty_assertions = "1.4"
|
||||||
pretty_assertions = "1.0.0"
|
rstest = { version = "0.18", default-features = false }
|
||||||
serial_test = "1.0.0"
|
serial_test = "2.0"
|
||||||
hamcrest2 = "0.3.0"
|
tempfile = "3.7"
|
||||||
rstest = { version = "0.15.0", default-features = false }
|
|
||||||
itertools = "0.10.3"
|
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
plugin = [
|
plugin = [
|
||||||
@ -99,26 +113,28 @@ plugin = [
|
|||||||
"nu-protocol/plugin",
|
"nu-protocol/plugin",
|
||||||
"nu-engine/plugin",
|
"nu-engine/plugin",
|
||||||
]
|
]
|
||||||
# extra used to be more useful but now it's the same as default. Leaving it in for backcompat with existing build scripts
|
|
||||||
extra = ["default"]
|
|
||||||
default = ["plugin", "which-support", "trash-support", "sqlite"]
|
default = ["plugin", "which-support", "trash-support", "sqlite"]
|
||||||
stable = ["default"]
|
stable = ["default"]
|
||||||
wasi = []
|
wasi = ["nu-cmd-lang/wasi"]
|
||||||
|
# NOTE: individual features are also passed to `nu-cmd-lang` that uses them to generate the feature matrix in the `version` command
|
||||||
|
|
||||||
# Enable to statically link OpenSSL; otherwise the system version will be used. Not enabled by default because it takes a while to build
|
# Enable to statically link OpenSSL; otherwise the system version will be used. Not enabled by default because it takes a while to build
|
||||||
static-link-openssl = ["dep:openssl"]
|
static-link-openssl = ["dep:openssl", "nu-cmd-lang/static-link-openssl"]
|
||||||
|
|
||||||
|
mimalloc = ["nu-cmd-lang/mimalloc", "dep:mimalloc"]
|
||||||
|
|
||||||
# Stable (Default)
|
# Stable (Default)
|
||||||
which-support = ["nu-command/which-support"]
|
which-support = ["nu-command/which-support", "nu-cmd-lang/which-support"]
|
||||||
trash-support = ["nu-command/trash-support"]
|
trash-support = ["nu-command/trash-support", "nu-cmd-lang/trash-support"]
|
||||||
|
|
||||||
# Extra
|
# Extra feature for nushell
|
||||||
|
extra = ["dep:nu-cmd-extra", "nu-cmd-lang/extra"]
|
||||||
|
|
||||||
# Dataframe feature for nushell
|
# Dataframe feature for nushell
|
||||||
dataframe = ["nu-command/dataframe"]
|
dataframe = ["dep:nu-cmd-dataframe", "nu-cmd-lang/dataframe"]
|
||||||
|
|
||||||
# SQLite commands for nushell
|
# SQLite commands for nushell
|
||||||
sqlite = ["nu-command/sqlite"]
|
sqlite = ["nu-command/sqlite", "nu-cmd-lang/sqlite"]
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
opt-level = "s" # Optimize for size
|
opt-level = "s" # Optimize for size
|
||||||
@ -143,11 +159,13 @@ debug = false
|
|||||||
[[bin]]
|
[[bin]]
|
||||||
name = "nu"
|
name = "nu"
|
||||||
path = "src/main.rs"
|
path = "src/main.rs"
|
||||||
|
bench = false
|
||||||
|
|
||||||
# To use a development version of a dependency please use a global override here
|
# To use a development version of a dependency please use a global override here
|
||||||
# changing versions in each sub-crate of the workspace is tedious
|
# changing versions in each sub-crate of the workspace is tedious
|
||||||
[patch.crates-io]
|
[patch.crates-io]
|
||||||
# reedline = { git = "https://github.com/nushell/reedline.git", branch = "main" }
|
# reedline = { git = "https://github.com/nushell/reedline.git", branch = "main"}
|
||||||
|
# nu-ansi-term = {git = "https://github.com/nushell/nu-ansi-term.git", branch = "main"}
|
||||||
|
|
||||||
# Criterion benchmarking setup
|
# Criterion benchmarking setup
|
||||||
# Run all benchmarks with `cargo bench`
|
# Run all benchmarks with `cargo bench`
|
||||||
|
15
Cross.toml
15
Cross.toml
@ -1,9 +1,18 @@
|
|||||||
# Configuration for cross-rs: https://github.com/cross-rs/cross
|
# Configuration for cross-rs: https://github.com/cross-rs/cross
|
||||||
# Run cross-rs like this:
|
# Run cross-rs like this:
|
||||||
# cross build --target aarch64-unknown-linux-musl --release
|
# cross build --target aarch64-unknown-linux-gnu --release
|
||||||
|
# or
|
||||||
|
# cross build --target aarch64-unknown-linux-musl --release --features=static-link-openssl
|
||||||
|
|
||||||
[target.aarch64-unknown-linux-gnu]
|
[target.aarch64-unknown-linux-gnu]
|
||||||
dockerfile = "./docker/cross-rs/aarch64-unknown-linux-gnu.dockerfile"
|
pre-build = [
|
||||||
|
"dpkg --add-architecture $CROSS_DEB_ARCH",
|
||||||
|
"apt-get update && apt-get install --assume-yes libssl-dev:$CROSS_DEB_ARCH clang"
|
||||||
|
]
|
||||||
|
|
||||||
|
# NOTE: for musl you will need to build with --features=static-link-openssl
|
||||||
[target.aarch64-unknown-linux-musl]
|
[target.aarch64-unknown-linux-musl]
|
||||||
dockerfile = "./docker/cross-rs/aarch64-unknown-linux-musl.dockerfile"
|
pre-build = [
|
||||||
|
"dpkg --add-architecture $CROSS_DEB_ARCH",
|
||||||
|
"apt-get update && apt-get install --assume-yes clang"
|
||||||
|
]
|
||||||
|
2
LICENSE
2
LICENSE
@ -1,6 +1,6 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2019 - 2022 The Nushell Project Developers
|
Copyright (c) 2019 - 2023 The Nushell Project Developers
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
56
README.md
56
README.md
@ -1,27 +1,29 @@
|
|||||||
# Nushell <!-- omit in toc -->
|
# Nushell <!-- omit in toc -->
|
||||||
[](https://crates.io/crates/nu)
|
[](https://crates.io/crates/nu)
|
||||||

|
[](https://github.com/nushell/nushell/actions)
|
||||||
|
[](https://github.com/nushell/nushell/actions/workflows/nightly-build.yml)
|
||||||
[](https://discord.gg/NtAbbGn)
|
[](https://discord.gg/NtAbbGn)
|
||||||
[](https://changelog.com/podcast/363)
|
[](https://changelog.com/podcast/363)
|
||||||
[](https://twitter.com/nu_shell)
|
[](https://twitter.com/nu_shell)
|
||||||

|
[](https://github.com/nushell/nushell/graphs/commit-activity)
|
||||||

|
[](https://github.com/nushell/nushell/graphs/contributors)
|
||||||
|
[](https://codecov.io/gh/nushell/nushell)
|
||||||
|
|
||||||
A new type of shell.
|
A new type of shell.
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
## Table of Contents <!-- omit in toc -->
|
## Table of Contents <!-- omit in toc -->
|
||||||
|
|
||||||
- [Status](#status)
|
- [Status](#status)
|
||||||
- [Learning About Nu](#learning-about-nu)
|
- [Learning About Nu](#learning-about-nu)
|
||||||
- [Installation](#installation)
|
- [Installation](#installation)
|
||||||
|
- [Configuration](#configuration)
|
||||||
- [Philosophy](#philosophy)
|
- [Philosophy](#philosophy)
|
||||||
- [Pipelines](#pipelines)
|
- [Pipelines](#pipelines)
|
||||||
- [Opening files](#opening-files)
|
- [Opening files](#opening-files)
|
||||||
- [Plugins](#plugins)
|
- [Plugins](#plugins)
|
||||||
- [Goals](#goals)
|
- [Goals](#goals)
|
||||||
- [Progress](#progress)
|
|
||||||
- [Officially Supported By](#officially-supported-by)
|
- [Officially Supported By](#officially-supported-by)
|
||||||
- [Contributing](#contributing)
|
- [Contributing](#contributing)
|
||||||
- [License](#license)
|
- [License](#license)
|
||||||
@ -32,7 +34,7 @@ This project has reached a minimum-viable-product level of quality. Many people
|
|||||||
|
|
||||||
## Learning About Nu
|
## Learning About Nu
|
||||||
|
|
||||||
The [Nushell book](https://www.nushell.sh/book/) is the primary source of Nushell documentation. You can find [a full list of Nu commands in the book](https://www.nushell.sh/book/command_reference.html), and we have many examples of using Nu in our [cookbook](https://www.nushell.sh/cookbook/).
|
The [Nushell book](https://www.nushell.sh/book/) is the primary source of Nushell documentation. You can find [a full list of Nu commands in the book](https://www.nushell.sh/commands/), and we have many examples of using Nu in our [cookbook](https://www.nushell.sh/cookbook/).
|
||||||
|
|
||||||
We're also active on [Discord](https://discord.gg/NtAbbGn) and [Twitter](https://twitter.com/nu_shell); come and chat with us!
|
We're also active on [Discord](https://discord.gg/NtAbbGn) and [Twitter](https://twitter.com/nu_shell); come and chat with us!
|
||||||
|
|
||||||
@ -54,6 +56,22 @@ Detailed installation instructions can be found in the [installation chapter of
|
|||||||
[](https://repology.org/project/nushell/versions)
|
[](https://repology.org/project/nushell/versions)
|
||||||
|
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
The default configurations can be found at [sample_config](crates/nu-utils/src/sample_config)
|
||||||
|
which are the configuration files one gets when they startup Nushell for the first time.
|
||||||
|
|
||||||
|
It sets all of the default configuration to run Nushell. From here one can
|
||||||
|
then customize this file for their specific needs.
|
||||||
|
|
||||||
|
To see where *config.nu* is located on your system simply type this command.
|
||||||
|
|
||||||
|
```rust
|
||||||
|
$nu.config-path
|
||||||
|
```
|
||||||
|
|
||||||
|
Please see our [book](https://www.nushell.sh) for all of the Nushell documentation.
|
||||||
|
|
||||||
|
|
||||||
## Philosophy
|
## Philosophy
|
||||||
|
|
||||||
@ -174,7 +192,8 @@ These binaries interact with nu via a simple JSON-RPC protocol where the command
|
|||||||
If the plugin is a filter, data streams to it one element at a time, and it can stream data back in return via stdin/stdout.
|
If the plugin is a filter, data streams to it one element at a time, and it can stream data back in return via stdin/stdout.
|
||||||
If the plugin is a sink, it is given the full vector of final data and is given free reign over stdin/stdout to use as it pleases.
|
If the plugin is a sink, it is given the full vector of final data and is given free reign over stdin/stdout to use as it pleases.
|
||||||
|
|
||||||
The [awesome-nu repo](https://github.com/nushell/awesome-nu#plugins) lists a variety of nu-plugins.
|
The [awesome-nu repo](https://github.com/nushell/awesome-nu#plugins) lists a variety of nu-plugins while the [showcase repo](https://github.com/nushell/showcase) *shows* off informative blog posts that have been written about Nushell along with videos that highlight technical
|
||||||
|
topics that have been presented.
|
||||||
|
|
||||||
## Goals
|
## Goals
|
||||||
|
|
||||||
@ -190,27 +209,6 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat
|
|||||||
|
|
||||||
- Finally, Nu views data functionally. Rather than using mutation, pipelines act as a means to load, change, and save data without mutable state.
|
- Finally, Nu views data functionally. Rather than using mutation, pipelines act as a means to load, change, and save data without mutable state.
|
||||||
|
|
||||||
## Progress
|
|
||||||
|
|
||||||
Nu is under heavy development and will naturally change as it matures. The chart below isn't meant to be exhaustive, but it helps give an idea for some of the areas of development and their relative maturity:
|
|
||||||
|
|
||||||
| Features | Not started | Prototype | MVP | Preview | Mature | Notes |
|
|
||||||
| ------------- | :---------: | :-------: | :-: | :-----: | :----: | -------------------------------------------------------------------- |
|
|
||||||
| Aliases | | | | X | | Aliases allow for shortening large commands, while passing flags |
|
|
||||||
| Notebook | | X | | | | Initial jupyter support, but it loses state and lacks features |
|
|
||||||
| File ops | | | | X | | cp, mv, rm, mkdir have some support, but lacking others |
|
|
||||||
| Environment | | | | X | | Temporary environment and scoped environment variables |
|
|
||||||
| Shells | | | | X | | Basic value and file shells, but no opt-in/opt-out for commands |
|
|
||||||
| Protocol | | | | X | | Streaming protocol is serviceable |
|
|
||||||
| Plugins | | | X | | | Plugins work on one row at a time, lack batching and expression eval |
|
|
||||||
| Errors | | | | X | | Error reporting works, but could use usability polish |
|
|
||||||
| Documentation | | | X | | | Book updated to latest release, including usage examples |
|
|
||||||
| Paging | | | | X | | Textview has paging, but we'd like paging for tables |
|
|
||||||
| Functions | | | | X | | Functions and aliases are supported |
|
|
||||||
| Variables | | | | X | | Nu supports variables and environment variables |
|
|
||||||
| Completions | | | | X | | Completions for filepaths |
|
|
||||||
| Type-checking | | | | x | | Commands check basic types, and input/output types |
|
|
||||||
|
|
||||||
## Officially Supported By
|
## Officially Supported By
|
||||||
|
|
||||||
Please submit an issue or PR to be added to this list.
|
Please submit an issue or PR to be added to this list.
|
||||||
@ -220,6 +218,8 @@ Please submit an issue or PR to be added to this list.
|
|||||||
- [oh-my-posh](https://ohmyposh.dev)
|
- [oh-my-posh](https://ohmyposh.dev)
|
||||||
- [Couchbase Shell](https://couchbase.sh)
|
- [Couchbase Shell](https://couchbase.sh)
|
||||||
- [virtualenv](https://github.com/pypa/virtualenv)
|
- [virtualenv](https://github.com/pypa/virtualenv)
|
||||||
|
- [atuin](https://github.com/ellie/atuin)
|
||||||
|
- [clap](https://github.com/clap-rs/clap/tree/master/clap_complete_nushell)
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
|
@ -1,3 +0,0 @@
|
|||||||
To use Nu plugins, use the register command to tell Nu where to find the plugin. For example:
|
|
||||||
|
|
||||||
> register ./nu_plugin_query
|
|
Before Width: | Height: | Size: 1.2 MiB After Width: | Height: | Size: 1.2 MiB |
@ -2,16 +2,19 @@ use criterion::{criterion_group, criterion_main, BatchSize, Criterion};
|
|||||||
use nu_cli::eval_source;
|
use nu_cli::eval_source;
|
||||||
use nu_parser::parse;
|
use nu_parser::parse;
|
||||||
use nu_plugin::{EncodingType, PluginResponse};
|
use nu_plugin::{EncodingType, PluginResponse};
|
||||||
use nu_protocol::{PipelineData, Span, Value};
|
use nu_protocol::{engine::EngineState, PipelineData, Span, Value};
|
||||||
use nu_utils::{get_default_config, get_default_env};
|
use nu_utils::{get_default_config, get_default_env};
|
||||||
|
|
||||||
|
fn load_bench_commands() -> EngineState {
|
||||||
|
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
|
||||||
|
}
|
||||||
// FIXME: All benchmarks live in this 1 file to speed up build times when benchmarking.
|
// FIXME: All benchmarks live in this 1 file to speed up build times when benchmarking.
|
||||||
// When the *_benchmarks functions were in different files, `cargo bench` would build
|
// When the *_benchmarks functions were in different files, `cargo bench` would build
|
||||||
// an executable for every single one - incredibly slowly. Would be nice to figure out
|
// an executable for every single one - incredibly slowly. Would be nice to figure out
|
||||||
// a way to split things up again.
|
// a way to split things up again.
|
||||||
|
|
||||||
fn parser_benchmarks(c: &mut Criterion) {
|
fn parser_benchmarks(c: &mut Criterion) {
|
||||||
let mut engine_state = nu_command::create_default_context();
|
let mut engine_state = load_bench_commands();
|
||||||
// parsing config.nu breaks without PWD set
|
// parsing config.nu breaks without PWD set
|
||||||
engine_state.add_env_var(
|
engine_state.add_env_var(
|
||||||
"PWD".into(),
|
"PWD".into(),
|
||||||
@ -22,7 +25,7 @@ fn parser_benchmarks(c: &mut Criterion) {
|
|||||||
c.bench_function("parse_default_env_file", |b| {
|
c.bench_function("parse_default_env_file", |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| nu_protocol::engine::StateWorkingSet::new(&engine_state),
|
|| nu_protocol::engine::StateWorkingSet::new(&engine_state),
|
||||||
|mut working_set| parse(&mut working_set, None, default_env, false, &[]),
|
|mut working_set| parse(&mut working_set, None, default_env, false),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
@ -31,14 +34,14 @@ fn parser_benchmarks(c: &mut Criterion) {
|
|||||||
c.bench_function("parse_default_config_file", |b| {
|
c.bench_function("parse_default_config_file", |b| {
|
||||||
b.iter_batched(
|
b.iter_batched(
|
||||||
|| nu_protocol::engine::StateWorkingSet::new(&engine_state),
|
|| nu_protocol::engine::StateWorkingSet::new(&engine_state),
|
||||||
|mut working_set| parse(&mut working_set, None, default_config, false, &[]),
|
|mut working_set| parse(&mut working_set, None, default_config, false),
|
||||||
BatchSize::SmallInput,
|
BatchSize::SmallInput,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
|
||||||
c.bench_function("eval default_env.nu", |b| {
|
c.bench_function("eval default_env.nu", |b| {
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let mut engine_state = nu_command::create_default_context();
|
let mut engine_state = load_bench_commands();
|
||||||
let mut stack = nu_protocol::engine::Stack::new();
|
let mut stack = nu_protocol::engine::Stack::new();
|
||||||
eval_source(
|
eval_source(
|
||||||
&mut engine_state,
|
&mut engine_state,
|
||||||
@ -46,13 +49,14 @@ fn parser_benchmarks(c: &mut Criterion) {
|
|||||||
get_default_env().as_bytes(),
|
get_default_env().as_bytes(),
|
||||||
"default_env.nu",
|
"default_env.nu",
|
||||||
PipelineData::empty(),
|
PipelineData::empty(),
|
||||||
|
false,
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|
||||||
c.bench_function("eval default_config.nu", |b| {
|
c.bench_function("eval default_config.nu", |b| {
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let mut engine_state = nu_command::create_default_context();
|
let mut engine_state = load_bench_commands();
|
||||||
// parsing config.nu breaks without PWD set
|
// parsing config.nu breaks without PWD set
|
||||||
engine_state.add_env_var(
|
engine_state.add_env_var(
|
||||||
"PWD".into(),
|
"PWD".into(),
|
||||||
@ -65,6 +69,7 @@ fn parser_benchmarks(c: &mut Criterion) {
|
|||||||
get_default_config().as_bytes(),
|
get_default_config().as_bytes(),
|
||||||
"default_config.nu",
|
"default_config.nu",
|
||||||
PipelineData::empty(),
|
PipelineData::empty(),
|
||||||
|
false,
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
@ -73,7 +78,7 @@ fn parser_benchmarks(c: &mut Criterion) {
|
|||||||
fn eval_benchmarks(c: &mut Criterion) {
|
fn eval_benchmarks(c: &mut Criterion) {
|
||||||
c.bench_function("eval default_env.nu", |b| {
|
c.bench_function("eval default_env.nu", |b| {
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let mut engine_state = nu_command::create_default_context();
|
let mut engine_state = load_bench_commands();
|
||||||
let mut stack = nu_protocol::engine::Stack::new();
|
let mut stack = nu_protocol::engine::Stack::new();
|
||||||
eval_source(
|
eval_source(
|
||||||
&mut engine_state,
|
&mut engine_state,
|
||||||
@ -81,13 +86,14 @@ fn eval_benchmarks(c: &mut Criterion) {
|
|||||||
get_default_env().as_bytes(),
|
get_default_env().as_bytes(),
|
||||||
"default_env.nu",
|
"default_env.nu",
|
||||||
PipelineData::empty(),
|
PipelineData::empty(),
|
||||||
|
false,
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|
||||||
c.bench_function("eval default_config.nu", |b| {
|
c.bench_function("eval default_config.nu", |b| {
|
||||||
b.iter(|| {
|
b.iter(|| {
|
||||||
let mut engine_state = nu_command::create_default_context();
|
let mut engine_state = load_bench_commands();
|
||||||
// parsing config.nu breaks without PWD set
|
// parsing config.nu breaks without PWD set
|
||||||
engine_state.add_env_var(
|
engine_state.add_env_var(
|
||||||
"PWD".into(),
|
"PWD".into(),
|
||||||
@ -100,6 +106,7 @@ fn eval_benchmarks(c: &mut Criterion) {
|
|||||||
get_default_config().as_bytes(),
|
get_default_config().as_bytes(),
|
||||||
"default_config.nu",
|
"default_config.nu",
|
||||||
PipelineData::empty(),
|
PipelineData::empty(),
|
||||||
|
false,
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
@ -120,17 +127,7 @@ fn encoding_test_data(row_cnt: usize, col_cnt: usize) -> Value {
|
|||||||
|
|
||||||
fn encoding_benchmarks(c: &mut Criterion) {
|
fn encoding_benchmarks(c: &mut Criterion) {
|
||||||
let mut group = c.benchmark_group("Encoding");
|
let mut group = c.benchmark_group("Encoding");
|
||||||
let test_cnt_pairs = [
|
let test_cnt_pairs = [(100, 5), (100, 15), (10000, 5), (10000, 15)];
|
||||||
(100, 5),
|
|
||||||
(100, 10),
|
|
||||||
(100, 15),
|
|
||||||
(1000, 5),
|
|
||||||
(1000, 10),
|
|
||||||
(1000, 15),
|
|
||||||
(10000, 5),
|
|
||||||
(10000, 10),
|
|
||||||
(10000, 15),
|
|
||||||
];
|
|
||||||
for (row_cnt, col_cnt) in test_cnt_pairs.into_iter() {
|
for (row_cnt, col_cnt) in test_cnt_pairs.into_iter() {
|
||||||
for fmt in ["json", "msgpack"] {
|
for fmt in ["json", "msgpack"] {
|
||||||
group.bench_function(&format!("{fmt} encode {row_cnt} * {col_cnt}"), |b| {
|
group.bench_function(&format!("{fmt} encode {row_cnt} * {col_cnt}"), |b| {
|
||||||
@ -147,17 +144,7 @@ fn encoding_benchmarks(c: &mut Criterion) {
|
|||||||
|
|
||||||
fn decoding_benchmarks(c: &mut Criterion) {
|
fn decoding_benchmarks(c: &mut Criterion) {
|
||||||
let mut group = c.benchmark_group("Decoding");
|
let mut group = c.benchmark_group("Decoding");
|
||||||
let test_cnt_pairs = [
|
let test_cnt_pairs = [(100, 5), (100, 15), (10000, 5), (10000, 15)];
|
||||||
(100, 5),
|
|
||||||
(100, 10),
|
|
||||||
(100, 15),
|
|
||||||
(1000, 5),
|
|
||||||
(1000, 10),
|
|
||||||
(1000, 15),
|
|
||||||
(10000, 5),
|
|
||||||
(10000, 10),
|
|
||||||
(10000, 15),
|
|
||||||
];
|
|
||||||
for (row_cnt, col_cnt) in test_cnt_pairs.into_iter() {
|
for (row_cnt, col_cnt) in test_cnt_pairs.into_iter() {
|
||||||
for fmt in ["json", "msgpack"] {
|
for fmt in ["json", "msgpack"] {
|
||||||
group.bench_function(&format!("{fmt} decode for {row_cnt} * {col_cnt}"), |b| {
|
group.bench_function(&format!("{fmt} decode for {row_cnt} * {col_cnt}"), |b| {
|
||||||
|
@ -1,36 +0,0 @@
|
|||||||
@echo off
|
|
||||||
@echo -------------------------------------------------------------------
|
|
||||||
@echo Building nushell (nu.exe) with dataframes and all the plugins
|
|
||||||
@echo -------------------------------------------------------------------
|
|
||||||
@echo.
|
|
||||||
|
|
||||||
echo Building nushell.exe
|
|
||||||
cargo build --features=dataframe
|
|
||||||
@echo.
|
|
||||||
|
|
||||||
@cd crates\nu_plugin_example
|
|
||||||
echo Building nu_plugin_example.exe
|
|
||||||
cargo build
|
|
||||||
@echo.
|
|
||||||
|
|
||||||
@cd ..\..\crates\nu_plugin_gstat
|
|
||||||
echo Building nu_plugin_gstat.exe
|
|
||||||
cargo build
|
|
||||||
@echo.
|
|
||||||
|
|
||||||
@cd ..\..\crates\nu_plugin_inc
|
|
||||||
echo Building nu_plugin_inc.exe
|
|
||||||
cargo build
|
|
||||||
@echo.
|
|
||||||
|
|
||||||
@cd ..\..\crates\nu_plugin_query
|
|
||||||
echo Building nu_plugin_query.exe
|
|
||||||
cargo build
|
|
||||||
@echo.
|
|
||||||
|
|
||||||
@cd ..\..\crates\nu_plugin_custom_values
|
|
||||||
echo Building nu_plugin_custom_values.exe
|
|
||||||
cargo build
|
|
||||||
@echo.
|
|
||||||
|
|
||||||
@cd ..\..
|
|
23
build-all.nu
23
build-all.nu
@ -1,23 +0,0 @@
|
|||||||
echo '-------------------------------------------------------------------'
|
|
||||||
echo 'Building nushell (nu) with dataframes and all the plugins'
|
|
||||||
echo '-------------------------------------------------------------------'
|
|
||||||
|
|
||||||
echo $'(char nl)Building nushell'
|
|
||||||
echo '----------------------------'
|
|
||||||
cargo build --features=dataframe
|
|
||||||
|
|
||||||
let plugins = [
|
|
||||||
nu_plugin_inc,
|
|
||||||
nu_plugin_gstat,
|
|
||||||
nu_plugin_query,
|
|
||||||
nu_plugin_example,
|
|
||||||
nu_plugin_custom_values,
|
|
||||||
]
|
|
||||||
|
|
||||||
for plugin in $plugins {
|
|
||||||
$'(char nl)Building ($plugin)'
|
|
||||||
'----------------------------'
|
|
||||||
cd $'crates/($plugin)'
|
|
||||||
cargo build
|
|
||||||
ignore
|
|
||||||
}
|
|
17
codecov.yml
Normal file
17
codecov.yml
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
coverage:
|
||||||
|
status:
|
||||||
|
project:
|
||||||
|
default:
|
||||||
|
target: 55%
|
||||||
|
threshold: 2%
|
||||||
|
patch:
|
||||||
|
default:
|
||||||
|
informational: true
|
||||||
|
|
||||||
|
comment:
|
||||||
|
layout: reach, diff, files
|
||||||
|
behavior: default
|
||||||
|
require_base: yes
|
||||||
|
require_head: yes
|
||||||
|
after_n_builds: 1 # Disabled windows else: 2
|
||||||
|
|
@ -5,35 +5,40 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cli"
|
|||||||
edition = "2021"
|
edition = "2021"
|
||||||
license = "MIT"
|
license = "MIT"
|
||||||
name = "nu-cli"
|
name = "nu-cli"
|
||||||
version = "0.75.0"
|
version = "0.84.0"
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
bench = false
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
nu-test-support = { path = "../nu-test-support", version = "0.75.0" }
|
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.84.0" }
|
||||||
nu-command = { path = "../nu-command", version = "0.75.0" }
|
nu-test-support = { path = "../nu-test-support", version = "0.84.0" }
|
||||||
rstest = { version = "0.15.0", default-features = false }
|
rstest = { version = "0.18.1", default-features = false }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-engine = { path = "../nu-engine", version = "0.75.0" }
|
nu-cmd-base = { path = "../nu-cmd-base", version = "0.84.0" }
|
||||||
nu-path = { path = "../nu-path", version = "0.75.0" }
|
nu-command = { path = "../nu-command", version = "0.84.0" }
|
||||||
nu-parser = { path = "../nu-parser", version = "0.75.0" }
|
nu-engine = { path = "../nu-engine", version = "0.84.0" }
|
||||||
nu-protocol = { path = "../nu-protocol", version = "0.75.0" }
|
nu-path = { path = "../nu-path", version = "0.84.0" }
|
||||||
nu-utils = { path = "../nu-utils", version = "0.75.0" }
|
nu-parser = { path = "../nu-parser", version = "0.84.0" }
|
||||||
nu-ansi-term = "0.46.0"
|
nu-protocol = { path = "../nu-protocol", version = "0.84.0" }
|
||||||
nu-color-config = { path = "../nu-color-config", version = "0.75.0" }
|
nu-utils = { path = "../nu-utils", version = "0.84.0" }
|
||||||
reedline = { version = "0.15.0", features = ["bashisms", "sqlite"] }
|
nu-color-config = { path = "../nu-color-config", version = "0.84.0" }
|
||||||
|
nu-ansi-term = "0.49.0"
|
||||||
|
reedline = { version = "0.23.0", features = ["bashisms", "sqlite"]}
|
||||||
|
|
||||||
atty = "0.2.14"
|
chrono = { default-features = false, features = ["std"], version = "0.4" }
|
||||||
chrono = { default-features = false, features = ["std"], version = "0.4.23" }
|
crossterm = "0.26"
|
||||||
crossterm = "0.24.0"
|
fancy-regex = "0.11"
|
||||||
fancy-regex = "0.11.0"
|
fuzzy-matcher = "0.3"
|
||||||
fuzzy-matcher = "0.3.7"
|
is_executable = "1.0"
|
||||||
is_executable = "1.0.1"
|
is-terminal = "0.4.8"
|
||||||
once_cell = "1.17.0"
|
|
||||||
log = "0.4"
|
log = "0.4"
|
||||||
miette = { version = "5.5.0", features = ["fancy-no-backtrace"] }
|
miette = { version = "5.10", features = ["fancy-no-backtrace"] }
|
||||||
|
once_cell = "1.18"
|
||||||
percent-encoding = "2"
|
percent-encoding = "2"
|
||||||
sysinfo = "0.27.7"
|
sysinfo = "0.29"
|
||||||
thiserror = "1.0.31"
|
unicode-segmentation = "1.10"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
plugin = []
|
plugin = []
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
MIT License
|
MIT License
|
||||||
|
|
||||||
Copyright (c) 2019 - 2022 The Nushell Project Developers
|
Copyright (c) 2019 - 2023 The Nushell Project Developers
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
129
crates/nu-cli/src/commands/commandline.rs
Normal file
129
crates/nu-cli/src/commands/commandline.rs
Normal file
@ -0,0 +1,129 @@
|
|||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::ast::Call;
|
||||||
|
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||||
|
use nu_protocol::Category;
|
||||||
|
use nu_protocol::IntoPipelineData;
|
||||||
|
use nu_protocol::{PipelineData, ShellError, Signature, SyntaxShape, Type, Value};
|
||||||
|
use unicode_segmentation::UnicodeSegmentation;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct Commandline;
|
||||||
|
|
||||||
|
impl Command for Commandline {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"commandline"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build("commandline")
|
||||||
|
.input_output_types(vec![
|
||||||
|
(Type::Nothing, Type::Nothing),
|
||||||
|
(Type::String, Type::String),
|
||||||
|
])
|
||||||
|
.switch(
|
||||||
|
"cursor",
|
||||||
|
"Set or get the current cursor position",
|
||||||
|
Some('c'),
|
||||||
|
)
|
||||||
|
.switch(
|
||||||
|
"append",
|
||||||
|
"appends the string to the end of the buffer",
|
||||||
|
Some('a'),
|
||||||
|
)
|
||||||
|
.switch(
|
||||||
|
"insert",
|
||||||
|
"inserts the string into the buffer at the cursor position",
|
||||||
|
Some('i'),
|
||||||
|
)
|
||||||
|
.switch(
|
||||||
|
"replace",
|
||||||
|
"replaces the current contents of the buffer (default)",
|
||||||
|
Some('r'),
|
||||||
|
)
|
||||||
|
.optional(
|
||||||
|
"cmd",
|
||||||
|
SyntaxShape::String,
|
||||||
|
"the string to perform the operation with",
|
||||||
|
)
|
||||||
|
.category(Category::Core)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"View or modify the current command line input buffer."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn search_terms(&self) -> Vec<&str> {
|
||||||
|
vec!["repl", "interactive"]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
_input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
if let Some(cmd) = call.opt::<Value>(engine_state, stack, 0)? {
|
||||||
|
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
|
||||||
|
|
||||||
|
if call.has_flag("cursor") {
|
||||||
|
let cmd_str = cmd.as_string()?;
|
||||||
|
match cmd_str.parse::<i64>() {
|
||||||
|
Ok(n) => {
|
||||||
|
repl.cursor_pos = if n <= 0 {
|
||||||
|
0usize
|
||||||
|
} else {
|
||||||
|
repl.buffer
|
||||||
|
.grapheme_indices(true)
|
||||||
|
.map(|(i, _c)| i)
|
||||||
|
.nth(n as usize)
|
||||||
|
.unwrap_or(repl.buffer.len())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(_) => {
|
||||||
|
return Err(ShellError::CantConvert {
|
||||||
|
to_type: "int".to_string(),
|
||||||
|
from_type: "string".to_string(),
|
||||||
|
span: cmd.span()?,
|
||||||
|
help: Some(format!(
|
||||||
|
r#"string "{cmd_str}" does not represent a valid integer"#
|
||||||
|
)),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if call.has_flag("append") {
|
||||||
|
repl.buffer.push_str(&cmd.as_string()?);
|
||||||
|
} else if call.has_flag("insert") {
|
||||||
|
let cmd_str = cmd.as_string()?;
|
||||||
|
let cursor_pos = repl.cursor_pos;
|
||||||
|
repl.buffer.insert_str(cursor_pos, &cmd_str);
|
||||||
|
repl.cursor_pos += cmd_str.len();
|
||||||
|
} else {
|
||||||
|
repl.buffer = cmd.as_string()?;
|
||||||
|
repl.cursor_pos = repl.buffer.len();
|
||||||
|
}
|
||||||
|
Ok(Value::Nothing { span: call.head }.into_pipeline_data())
|
||||||
|
} else {
|
||||||
|
let repl = engine_state.repl_state.lock().expect("repl state mutex");
|
||||||
|
if call.has_flag("cursor") {
|
||||||
|
let char_pos = repl
|
||||||
|
.buffer
|
||||||
|
.grapheme_indices(true)
|
||||||
|
.chain(std::iter::once((repl.buffer.len(), "")))
|
||||||
|
.position(|(i, _c)| i == repl.cursor_pos)
|
||||||
|
.expect("Cursor position isn't on a grapheme boundary");
|
||||||
|
Ok(Value::String {
|
||||||
|
val: char_pos.to_string(),
|
||||||
|
span: call.head,
|
||||||
|
}
|
||||||
|
.into_pipeline_data())
|
||||||
|
} else {
|
||||||
|
Ok(Value::String {
|
||||||
|
val: repl.buffer.to_string(),
|
||||||
|
span: call.head,
|
||||||
|
}
|
||||||
|
.into_pipeline_data())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
33
crates/nu-cli/src/commands/default_context.rs
Normal file
33
crates/nu-cli/src/commands/default_context.rs
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
use nu_protocol::engine::{EngineState, StateWorkingSet};
|
||||||
|
|
||||||
|
use crate::commands::*;
|
||||||
|
|
||||||
|
pub fn add_cli_context(mut engine_state: EngineState) -> EngineState {
|
||||||
|
let delta = {
|
||||||
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
|
||||||
|
macro_rules! bind_command {
|
||||||
|
( $( $command:expr ),* $(,)? ) => {
|
||||||
|
$( working_set.add_decl(Box::new($command)); )*
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
bind_command! {
|
||||||
|
Commandline,
|
||||||
|
History,
|
||||||
|
HistorySession,
|
||||||
|
Keybindings,
|
||||||
|
KeybindingsDefault,
|
||||||
|
KeybindingsList,
|
||||||
|
KeybindingsListen,
|
||||||
|
};
|
||||||
|
|
||||||
|
working_set.render()
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Err(err) = engine_state.merge_delta(delta) {
|
||||||
|
eprintln!("Error creating CLI command context: {err:?}");
|
||||||
|
}
|
||||||
|
|
||||||
|
engine_state
|
||||||
|
}
|
@ -18,7 +18,7 @@ impl Command for History {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Get the command history"
|
"Get the command history."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> nu_protocol::Signature {
|
fn signature(&self) -> nu_protocol::Signature {
|
||||||
@ -43,7 +43,7 @@ impl Command for History {
|
|||||||
_stack: &mut Stack,
|
_stack: &mut Stack,
|
||||||
call: &Call,
|
call: &Call,
|
||||||
_input: PipelineData,
|
_input: PipelineData,
|
||||||
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let head = call.head;
|
let head = call.head;
|
||||||
|
|
||||||
// todo for sqlite history this command should be an alias to `open ~/.config/nushell/history.sqlite3 | get history`
|
// todo for sqlite history this command should be an alias to `open ~/.config/nushell/history.sqlite3 | get history`
|
||||||
@ -91,7 +91,7 @@ impl Command for History {
|
|||||||
match engine_state.config.history_file_format {
|
match engine_state.config.history_file_format {
|
||||||
HistoryFileFormat::PlainText => Ok(history_reader
|
HistoryFileFormat::PlainText => Ok(history_reader
|
||||||
.and_then(|h| {
|
.and_then(|h| {
|
||||||
h.search(SearchQuery::everything(SearchDirection::Forward))
|
h.search(SearchQuery::everything(SearchDirection::Forward, None))
|
||||||
.ok()
|
.ok()
|
||||||
})
|
})
|
||||||
.map(move |entries| {
|
.map(move |entries| {
|
||||||
@ -114,7 +114,7 @@ impl Command for History {
|
|||||||
.into_pipeline_data(ctrlc)),
|
.into_pipeline_data(ctrlc)),
|
||||||
HistoryFileFormat::Sqlite => Ok(history_reader
|
HistoryFileFormat::Sqlite => Ok(history_reader
|
||||||
.and_then(|h| {
|
.and_then(|h| {
|
||||||
h.search(SearchQuery::everything(SearchDirection::Forward))
|
h.search(SearchQuery::everything(SearchDirection::Forward, None))
|
||||||
.ok()
|
.ok()
|
||||||
})
|
})
|
||||||
.map(move |entries| {
|
.map(move |entries| {
|
@ -1,6 +1,8 @@
|
|||||||
use nu_protocol::ast::Call;
|
use nu_protocol::ast::Call;
|
||||||
use nu_protocol::engine::{Command, EngineState, Stack};
|
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||||
use nu_protocol::{Category, Example, IntoPipelineData, PipelineData, Signature, Type, Value};
|
use nu_protocol::{
|
||||||
|
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Type, Value,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct HistorySession;
|
pub struct HistorySession;
|
||||||
@ -11,7 +13,7 @@ impl Command for HistorySession {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Get the command history session"
|
"Get the command history session."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> nu_protocol::Signature {
|
fn signature(&self) -> nu_protocol::Signature {
|
||||||
@ -34,7 +36,7 @@ impl Command for HistorySession {
|
|||||||
_stack: &mut Stack,
|
_stack: &mut Stack,
|
||||||
call: &Call,
|
call: &Call,
|
||||||
_input: PipelineData,
|
_input: PipelineData,
|
||||||
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
Ok(Value::int(engine_state.history_session_id, call.head).into_pipeline_data())
|
Ok(Value::int(engine_state.history_session_id, call.head).into_pipeline_data())
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -2,7 +2,7 @@ use nu_engine::get_full_help;
|
|||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::Call,
|
ast::Call,
|
||||||
engine::{Command, EngineState, Stack},
|
engine::{Command, EngineState, Stack},
|
||||||
Category, IntoPipelineData, PipelineData, Signature, Type, Value,
|
Category, IntoPipelineData, PipelineData, ShellError, Signature, Type, Value,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@ -20,7 +20,7 @@ impl Command for Keybindings {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Keybindings related commands"
|
"Keybindings related commands."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
@ -37,7 +37,7 @@ impl Command for Keybindings {
|
|||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
call: &Call,
|
call: &Call,
|
||||||
_input: PipelineData,
|
_input: PipelineData,
|
||||||
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
Ok(Value::String {
|
Ok(Value::String {
|
||||||
val: get_full_help(
|
val: get_full_help(
|
||||||
&Keybindings.signature(),
|
&Keybindings.signature(),
|
@ -1,7 +1,7 @@
|
|||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::Call,
|
ast::Call,
|
||||||
engine::{Command, EngineState, Stack},
|
engine::{Command, EngineState, Stack},
|
||||||
Category, Example, IntoPipelineData, PipelineData, Signature, Type, Value,
|
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Type, Value,
|
||||||
};
|
};
|
||||||
use reedline::get_reedline_default_keybindings;
|
use reedline::get_reedline_default_keybindings;
|
||||||
|
|
||||||
@ -20,7 +20,7 @@ impl Command for KeybindingsDefault {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"List default keybindings"
|
"List default keybindings."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
@ -37,7 +37,7 @@ impl Command for KeybindingsDefault {
|
|||||||
_stack: &mut Stack,
|
_stack: &mut Stack,
|
||||||
call: &Call,
|
call: &Call,
|
||||||
_input: PipelineData,
|
_input: PipelineData,
|
||||||
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let records = get_reedline_default_keybindings()
|
let records = get_reedline_default_keybindings()
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(mode, modifier, code, event)| {
|
.map(|(mode, modifier, code, event)| {
|
@ -1,7 +1,7 @@
|
|||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::Call,
|
ast::Call,
|
||||||
engine::{Command, EngineState, Stack},
|
engine::{Command, EngineState, Stack},
|
||||||
Category, Example, IntoPipelineData, PipelineData, Signature, Span, Type, Value,
|
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, Type, Value,
|
||||||
};
|
};
|
||||||
use reedline::{
|
use reedline::{
|
||||||
get_reedline_edit_commands, get_reedline_keybinding_modifiers, get_reedline_keycodes,
|
get_reedline_edit_commands, get_reedline_keybinding_modifiers, get_reedline_keycodes,
|
||||||
@ -28,7 +28,7 @@ impl Command for KeybindingsList {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"List available options that can be used to create keybindings"
|
"List available options that can be used to create keybindings."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
@ -57,16 +57,16 @@ impl Command for KeybindingsList {
|
|||||||
_stack: &mut Stack,
|
_stack: &mut Stack,
|
||||||
call: &Call,
|
call: &Call,
|
||||||
_input: PipelineData,
|
_input: PipelineData,
|
||||||
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let records = if call.named_len() == 0 {
|
let records = if call.named_len() == 0 {
|
||||||
let all_options = vec!["modifiers", "keycodes", "edits", "modes", "events"];
|
let all_options = ["modifiers", "keycodes", "edits", "modes", "events"];
|
||||||
all_options
|
all_options
|
||||||
.iter()
|
.iter()
|
||||||
.flat_map(|argument| get_records(argument, &call.head))
|
.flat_map(|argument| get_records(argument, call.head))
|
||||||
.collect()
|
.collect()
|
||||||
} else {
|
} else {
|
||||||
call.named_iter()
|
call.named_iter()
|
||||||
.flat_map(|(argument, _, _)| get_records(argument.item.as_str(), &call.head))
|
.flat_map(|(argument, _, _)| get_records(argument.item.as_str(), call.head))
|
||||||
.collect()
|
.collect()
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -78,7 +78,7 @@ impl Command for KeybindingsList {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_records(entry_type: &str, span: &Span) -> Vec<Value> {
|
fn get_records(entry_type: &str, span: Span) -> Vec<Value> {
|
||||||
let values = match entry_type {
|
let values = match entry_type {
|
||||||
"modifiers" => get_reedline_keybinding_modifiers().sorted(),
|
"modifiers" => get_reedline_keybinding_modifiers().sorted(),
|
||||||
"keycodes" => get_reedline_keycodes().sorted(),
|
"keycodes" => get_reedline_keycodes().sorted(),
|
||||||
@ -95,15 +95,15 @@ fn get_records(entry_type: &str, span: &Span) -> Vec<Value> {
|
|||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
fn convert_to_record(edit: &str, entry_type: &str, span: &Span) -> Value {
|
fn convert_to_record(edit: &str, entry_type: &str, span: Span) -> Value {
|
||||||
let entry_type = Value::string(entry_type, *span);
|
let entry_type = Value::string(entry_type, span);
|
||||||
|
|
||||||
let name = Value::string(edit, *span);
|
let name = Value::string(edit, span);
|
||||||
|
|
||||||
Value::Record {
|
Value::Record {
|
||||||
cols: vec!["type".to_string(), "name".to_string()],
|
cols: vec!["type".to_string(), "name".to_string()],
|
||||||
vals: vec![entry_type, name],
|
vals: vec![entry_type, name],
|
||||||
span: *span,
|
span,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -102,7 +102,13 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
|
|||||||
// are printed, it's a good chance your terminal is eating
|
// are printed, it's a good chance your terminal is eating
|
||||||
// those events.
|
// those events.
|
||||||
fn print_events_helper(event: Event) -> Result<Value, ShellError> {
|
fn print_events_helper(event: Event) -> Result<Value, ShellError> {
|
||||||
if let Event::Key(KeyEvent { code, modifiers }) = event {
|
if let Event::Key(KeyEvent {
|
||||||
|
code,
|
||||||
|
modifiers,
|
||||||
|
kind,
|
||||||
|
state,
|
||||||
|
}) = event
|
||||||
|
{
|
||||||
match code {
|
match code {
|
||||||
KeyCode::Char(c) => {
|
KeyCode::Char(c) => {
|
||||||
let record = Value::Record {
|
let record = Value::Record {
|
||||||
@ -111,12 +117,16 @@ fn print_events_helper(event: Event) -> Result<Value, ShellError> {
|
|||||||
"code".into(),
|
"code".into(),
|
||||||
"modifier".into(),
|
"modifier".into(),
|
||||||
"flags".into(),
|
"flags".into(),
|
||||||
|
"kind".into(),
|
||||||
|
"state".into(),
|
||||||
],
|
],
|
||||||
vals: vec![
|
vals: vec![
|
||||||
Value::string(format!("{c}"), Span::unknown()),
|
Value::string(format!("{c}"), Span::unknown()),
|
||||||
Value::string(format!("{:#08x}", u32::from(c)), Span::unknown()),
|
Value::string(format!("{:#08x}", u32::from(c)), Span::unknown()),
|
||||||
Value::string(format!("{modifiers:?}"), Span::unknown()),
|
Value::string(format!("{modifiers:?}"), Span::unknown()),
|
||||||
Value::string(format!("{modifiers:#08b}"), Span::unknown()),
|
Value::string(format!("{modifiers:#08b}"), Span::unknown()),
|
||||||
|
Value::string(format!("{kind:?}"), Span::unknown()),
|
||||||
|
Value::string(format!("{state:?}"), Span::unknown()),
|
||||||
],
|
],
|
||||||
span: Span::unknown(),
|
span: Span::unknown(),
|
||||||
};
|
};
|
||||||
@ -124,11 +134,19 @@ fn print_events_helper(event: Event) -> Result<Value, ShellError> {
|
|||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
let record = Value::Record {
|
let record = Value::Record {
|
||||||
cols: vec!["code".into(), "modifier".into(), "flags".into()],
|
cols: vec![
|
||||||
|
"code".into(),
|
||||||
|
"modifier".into(),
|
||||||
|
"flags".into(),
|
||||||
|
"kind".into(),
|
||||||
|
"state".into(),
|
||||||
|
],
|
||||||
vals: vec![
|
vals: vec![
|
||||||
Value::string(format!("{code:?}"), Span::unknown()),
|
Value::string(format!("{code:?}"), Span::unknown()),
|
||||||
Value::string(format!("{modifiers:?}"), Span::unknown()),
|
Value::string(format!("{modifiers:?}"), Span::unknown()),
|
||||||
Value::string(format!("{modifiers:#08b}"), Span::unknown()),
|
Value::string(format!("{modifiers:#08b}"), Span::unknown()),
|
||||||
|
Value::string(format!("{kind:?}"), Span::unknown()),
|
||||||
|
Value::string(format!("{state:?}"), Span::unknown()),
|
||||||
],
|
],
|
||||||
span: Span::unknown(),
|
span: Span::unknown(),
|
||||||
};
|
};
|
||||||
@ -144,14 +162,3 @@ fn print_events_helper(event: Event) -> Result<Value, ShellError> {
|
|||||||
Ok(record)
|
Ok(record)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use crate::KeybindingsListen;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn examples_work_as_expected() {
|
|
||||||
use crate::test_examples;
|
|
||||||
test_examples(KeybindingsListen {})
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,9 +1,18 @@
|
|||||||
|
mod commandline;
|
||||||
|
mod default_context;
|
||||||
|
mod history;
|
||||||
|
mod history_session;
|
||||||
mod keybindings;
|
mod keybindings;
|
||||||
mod keybindings_default;
|
mod keybindings_default;
|
||||||
mod keybindings_list;
|
mod keybindings_list;
|
||||||
mod keybindings_listen;
|
mod keybindings_listen;
|
||||||
|
|
||||||
|
pub use commandline::Commandline;
|
||||||
|
pub use history::History;
|
||||||
|
pub use history_session::HistorySession;
|
||||||
pub use keybindings::Keybindings;
|
pub use keybindings::Keybindings;
|
||||||
pub use keybindings_default::KeybindingsDefault;
|
pub use keybindings_default::KeybindingsDefault;
|
||||||
pub use keybindings_list::KeybindingsList;
|
pub use keybindings_list::KeybindingsList;
|
||||||
pub use keybindings_listen::KeybindingsListen;
|
pub use keybindings_listen::KeybindingsListen;
|
||||||
|
|
||||||
|
pub use default_context::add_cli_context;
|
@ -37,7 +37,8 @@ impl CommandCompletion {
|
|||||||
) -> Vec<String> {
|
) -> Vec<String> {
|
||||||
let mut executables = vec![];
|
let mut executables = vec![];
|
||||||
|
|
||||||
let paths = self.engine_state.get_env_var("PATH");
|
// os agnostic way to get the PATH env var
|
||||||
|
let paths = self.engine_state.get_path_env_var();
|
||||||
|
|
||||||
if let Some(paths) = paths {
|
if let Some(paths) = paths {
|
||||||
if let Ok(paths) = paths.as_list() {
|
if let Ok(paths) = paths.as_list() {
|
||||||
@ -87,8 +88,8 @@ impl CommandCompletion {
|
|||||||
|
|
||||||
let filter_predicate = |command: &[u8]| match_algorithm.matches_u8(command, partial);
|
let filter_predicate = |command: &[u8]| match_algorithm.matches_u8(command, partial);
|
||||||
|
|
||||||
let results = working_set
|
let mut results = working_set
|
||||||
.find_commands_by_predicate(filter_predicate)
|
.find_commands_by_predicate(filter_predicate, true)
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(move |x| Suggestion {
|
.map(move |x| Suggestion {
|
||||||
value: String::from_utf8_lossy(&x.0).to_string(),
|
value: String::from_utf8_lossy(&x.0).to_string(),
|
||||||
@ -96,20 +97,8 @@ impl CommandCompletion {
|
|||||||
extra: None,
|
extra: None,
|
||||||
span: reedline::Span::new(span.start - offset, span.end - offset),
|
span: reedline::Span::new(span.start - offset, span.end - offset),
|
||||||
append_whitespace: true,
|
append_whitespace: true,
|
||||||
});
|
})
|
||||||
|
.collect::<Vec<_>>();
|
||||||
let results_aliases = working_set
|
|
||||||
.find_aliases_by_predicate(filter_predicate)
|
|
||||||
.into_iter()
|
|
||||||
.map(move |x| Suggestion {
|
|
||||||
value: String::from_utf8_lossy(&x).to_string(),
|
|
||||||
description: None,
|
|
||||||
extra: None,
|
|
||||||
span: reedline::Span::new(span.start - offset, span.end - offset),
|
|
||||||
append_whitespace: true,
|
|
||||||
});
|
|
||||||
|
|
||||||
let mut results = results.chain(results_aliases).collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let partial = working_set.get_span_contents(span);
|
let partial = working_set.get_span_contents(span);
|
||||||
let partial = String::from_utf8_lossy(partial).to_string();
|
let partial = String::from_utf8_lossy(partial).to_string();
|
||||||
@ -164,11 +153,11 @@ impl Completer for CommandCompletion {
|
|||||||
.flattened
|
.flattened
|
||||||
.iter()
|
.iter()
|
||||||
.rev()
|
.rev()
|
||||||
.skip_while(|x| x.0.end + offset > pos)
|
.skip_while(|x| x.0.end > pos)
|
||||||
.take_while(|x| {
|
.take_while(|x| {
|
||||||
matches!(
|
matches!(
|
||||||
x.1,
|
x.1,
|
||||||
FlatShape::InternalCall
|
FlatShape::InternalCall(_)
|
||||||
| FlatShape::External
|
| FlatShape::External
|
||||||
| FlatShape::ExternalArg
|
| FlatShape::ExternalArg
|
||||||
| FlatShape::Literal
|
| FlatShape::Literal
|
||||||
@ -196,8 +185,9 @@ impl Completer for CommandCompletion {
|
|||||||
|
|
||||||
let config = working_set.get_config();
|
let config = working_set.get_config();
|
||||||
let commands = if matches!(self.flat_shape, nu_parser::FlatShape::External)
|
let commands = if matches!(self.flat_shape, nu_parser::FlatShape::External)
|
||||||
|| matches!(self.flat_shape, nu_parser::FlatShape::InternalCall)
|
|| matches!(self.flat_shape, nu_parser::FlatShape::InternalCall(_))
|
||||||
|| ((span.end - span.start) == 0)
|
|| ((span.end - span.start) == 0)
|
||||||
|
|| is_passthrough_command(working_set.delta.get_file_contents())
|
||||||
{
|
{
|
||||||
// we're in a gap or at a command
|
// we're in a gap or at a command
|
||||||
if working_set.get_span_contents(span).is_empty() && !self.force_completion_after_space
|
if working_set.get_span_contents(span).is_empty() && !self.force_completion_after_space
|
||||||
@ -215,13 +205,114 @@ impl Completer for CommandCompletion {
|
|||||||
vec![]
|
vec![]
|
||||||
};
|
};
|
||||||
|
|
||||||
subcommands
|
subcommands.into_iter().chain(commands).collect::<Vec<_>>()
|
||||||
.into_iter()
|
|
||||||
.chain(commands.into_iter())
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_sort_by(&self) -> SortBy {
|
fn get_sort_by(&self) -> SortBy {
|
||||||
SortBy::LevenshteinDistance
|
SortBy::LevenshteinDistance
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn find_non_whitespace_index(contents: &[u8], start: usize) -> usize {
|
||||||
|
match contents.get(start..) {
|
||||||
|
Some(contents) => {
|
||||||
|
contents
|
||||||
|
.iter()
|
||||||
|
.take_while(|x| x.is_ascii_whitespace())
|
||||||
|
.count()
|
||||||
|
+ start
|
||||||
|
}
|
||||||
|
None => start,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_passthrough_command(working_set_file_contents: &[(Vec<u8>, usize, usize)]) -> bool {
|
||||||
|
for (contents, _, _) in working_set_file_contents {
|
||||||
|
let last_pipe_pos_rev = contents.iter().rev().position(|x| x == &b'|');
|
||||||
|
let last_pipe_pos = last_pipe_pos_rev.map(|x| contents.len() - x).unwrap_or(0);
|
||||||
|
|
||||||
|
let cur_pos = find_non_whitespace_index(contents, last_pipe_pos);
|
||||||
|
|
||||||
|
let result = match contents.get(cur_pos..) {
|
||||||
|
Some(contents) => contents.starts_with(b"sudo "),
|
||||||
|
None => false,
|
||||||
|
};
|
||||||
|
if result {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
false
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod command_completions_tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_find_non_whitespace_index() {
|
||||||
|
let commands = vec![
|
||||||
|
(" hello", 4),
|
||||||
|
("sudo ", 0),
|
||||||
|
(" sudo ", 2),
|
||||||
|
(" sudo ", 2),
|
||||||
|
(" hello ", 1),
|
||||||
|
(" hello ", 3),
|
||||||
|
(" hello | sudo ", 4),
|
||||||
|
(" sudo|sudo", 5),
|
||||||
|
("sudo | sudo ", 0),
|
||||||
|
(" hello sud", 1),
|
||||||
|
];
|
||||||
|
for (idx, ele) in commands.iter().enumerate() {
|
||||||
|
let index = find_non_whitespace_index(&Vec::from(ele.0.as_bytes()), 0);
|
||||||
|
assert_eq!(index, ele.1, "Failed on index {}", idx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_is_last_command_passthrough() {
|
||||||
|
let commands = vec![
|
||||||
|
(" hello", false),
|
||||||
|
(" sudo ", true),
|
||||||
|
("sudo ", true),
|
||||||
|
(" hello", false),
|
||||||
|
(" sudo", false),
|
||||||
|
(" sudo ", true),
|
||||||
|
(" sudo ", true),
|
||||||
|
(" sudo ", true),
|
||||||
|
(" hello ", false),
|
||||||
|
(" hello | sudo ", true),
|
||||||
|
(" sudo|sudo", false),
|
||||||
|
("sudo | sudo ", true),
|
||||||
|
(" hello sud", false),
|
||||||
|
(" sudo | sud ", false),
|
||||||
|
(" sudo|sudo ", true),
|
||||||
|
(" sudo | sudo ls | sudo ", true),
|
||||||
|
];
|
||||||
|
for (idx, ele) in commands.iter().enumerate() {
|
||||||
|
let input = ele.0.as_bytes();
|
||||||
|
|
||||||
|
let mut engine_state = EngineState::new();
|
||||||
|
engine_state.add_file("test.nu".into(), vec![]);
|
||||||
|
|
||||||
|
let delta = {
|
||||||
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
|
let _ = working_set.add_file("child.nu".into(), input);
|
||||||
|
working_set.render()
|
||||||
|
};
|
||||||
|
|
||||||
|
let result = engine_state.merge_delta(delta);
|
||||||
|
assert!(
|
||||||
|
result.is_ok(),
|
||||||
|
"Merge delta has failed: {}",
|
||||||
|
result.err().unwrap()
|
||||||
|
);
|
||||||
|
|
||||||
|
let is_passthrough_command = is_passthrough_command(engine_state.get_file_contents());
|
||||||
|
assert_eq!(
|
||||||
|
is_passthrough_command, ele.1,
|
||||||
|
"index for '{}': {}",
|
||||||
|
ele.0, idx
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -67,7 +67,7 @@ impl NuCompleter {
|
|||||||
) -> Option<Vec<Suggestion>> {
|
) -> Option<Vec<Suggestion>> {
|
||||||
let stack = self.stack.clone();
|
let stack = self.stack.clone();
|
||||||
let block = self.engine_state.get_block(block_id);
|
let block = self.engine_state.get_block(block_id);
|
||||||
let mut callee_stack = stack.gather_captures(&block.captures);
|
let mut callee_stack = stack.gather_captures(&self.engine_state, &block.captures);
|
||||||
|
|
||||||
// Line
|
// Line
|
||||||
if let Some(pos_arg) = block.signature.required_positional.get(0) {
|
if let Some(pos_arg) = block.signature.required_positional.get(0) {
|
||||||
@ -111,23 +111,15 @@ impl NuCompleter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn completion_helper(&mut self, line: &str, pos: usize) -> Vec<Suggestion> {
|
fn completion_helper(&mut self, line: &str, pos: usize) -> Vec<Suggestion> {
|
||||||
// pos: is the position of the cursor in the shell input.
|
|
||||||
// e.g. lets say you have an alias -> `alias ll = ls -l` and you type in the shell:
|
|
||||||
// > ll -a | c
|
|
||||||
// and your cursor is right after `c` then `pos` = 9
|
|
||||||
|
|
||||||
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
||||||
let mut offset = working_set.next_span_start();
|
let offset = working_set.next_span_start();
|
||||||
let (mut new_line, alias_offset) = try_find_alias(line.as_bytes(), &working_set);
|
let initial_line = line.to_string();
|
||||||
// new_line: vector containing all alias "translations" so if it was `ll` now is `ls -l`.
|
let mut line = line.to_string();
|
||||||
// alias_offset:vector the offset between the name and the alias)
|
line.insert(pos, 'a');
|
||||||
let initial_line = line.to_string(); // Entire line in the shell input.
|
|
||||||
let alias_total_offset: usize = alias_offset.iter().sum(); // the sum of all alias offsets.
|
|
||||||
new_line.insert(alias_total_offset + pos, b'a');
|
|
||||||
let pos = offset + pos;
|
let pos = offset + pos;
|
||||||
let config = self.engine_state.get_config();
|
let config = self.engine_state.get_config();
|
||||||
|
|
||||||
let (output, _err) = parse(&mut working_set, Some("completer"), &new_line, false, &[]);
|
let output = parse(&mut working_set, Some("completer"), line.as_bytes(), false);
|
||||||
|
|
||||||
for pipeline in output.pipelines.into_iter() {
|
for pipeline in output.pipelines.into_iter() {
|
||||||
for pipeline_element in pipeline.elements {
|
for pipeline_element in pipeline.elements {
|
||||||
@ -136,12 +128,16 @@ impl NuCompleter {
|
|||||||
| PipelineElement::Redirection(_, _, expr)
|
| PipelineElement::Redirection(_, _, expr)
|
||||||
| PipelineElement::And(_, expr)
|
| PipelineElement::And(_, expr)
|
||||||
| PipelineElement::Or(_, expr)
|
| PipelineElement::Or(_, expr)
|
||||||
|
| PipelineElement::SameTargetRedirection { cmd: (_, expr), .. }
|
||||||
| PipelineElement::SeparateRedirection { out: (_, expr), .. } => {
|
| PipelineElement::SeparateRedirection { out: (_, expr), .. } => {
|
||||||
let flattened: Vec<_> = flatten_expression(&working_set, &expr);
|
let flattened: Vec<_> = flatten_expression(&working_set, &expr);
|
||||||
let span_offset: usize = alias_offset.iter().sum();
|
|
||||||
let mut spans: Vec<String> = vec![];
|
let mut spans: Vec<String> = vec![];
|
||||||
|
|
||||||
for (flat_idx, flat) in flattened.iter().enumerate() {
|
for (flat_idx, flat) in flattened.iter().enumerate() {
|
||||||
|
let is_passthrough_command = spans
|
||||||
|
.first()
|
||||||
|
.filter(|content| *content == &String::from("sudo"))
|
||||||
|
.is_some();
|
||||||
// Read the current spam to string
|
// Read the current spam to string
|
||||||
let current_span = working_set.get_span_contents(flat.0).to_vec();
|
let current_span = working_set.get_span_contents(flat.0).to_vec();
|
||||||
let current_span_str = String::from_utf8_lossy(¤t_span);
|
let current_span_str = String::from_utf8_lossy(¤t_span);
|
||||||
@ -157,32 +153,17 @@ impl NuCompleter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Complete based on the last span
|
// Complete based on the last span
|
||||||
if pos + span_offset >= flat.0.start && pos + span_offset < flat.0.end {
|
if pos >= flat.0.start && pos < flat.0.end {
|
||||||
// Context variables
|
// Context variables
|
||||||
let most_left_var =
|
let most_left_var =
|
||||||
most_left_variable(flat_idx, &working_set, flattened.clone());
|
most_left_variable(flat_idx, &working_set, flattened.clone());
|
||||||
|
|
||||||
// Create a new span
|
// Create a new span
|
||||||
// if flat_idx == 0
|
let new_span = Span::new(flat.0.start, flat.0.end - 1);
|
||||||
let mut span_start = flat.0.start;
|
|
||||||
let mut span_end = flat.0.end - 1 - span_offset;
|
|
||||||
|
|
||||||
if flat_idx != 0 {
|
|
||||||
span_start = flat.0.start - span_offset;
|
|
||||||
span_end = flat.0.end - 1 - span_offset;
|
|
||||||
}
|
|
||||||
|
|
||||||
if span_end < span_start {
|
|
||||||
span_start = flat.0.start;
|
|
||||||
span_end = flat.0.end - 1;
|
|
||||||
offset += span_offset
|
|
||||||
}
|
|
||||||
|
|
||||||
let new_span = Span::new(span_start, span_end);
|
|
||||||
|
|
||||||
// Parses the prefix. Completion should look up to the cursor position, not after.
|
// Parses the prefix. Completion should look up to the cursor position, not after.
|
||||||
let mut prefix = working_set.get_span_contents(flat.0).to_vec();
|
let mut prefix = working_set.get_span_contents(flat.0).to_vec();
|
||||||
let index = pos - (flat.0.start - span_offset);
|
let index = pos - flat.0.start;
|
||||||
prefix.drain(index..);
|
prefix.drain(index..);
|
||||||
|
|
||||||
// Variables completion
|
// Variables completion
|
||||||
@ -193,10 +174,6 @@ impl NuCompleter {
|
|||||||
most_left_var.unwrap_or((vec![], vec![])),
|
most_left_var.unwrap_or((vec![], vec![])),
|
||||||
);
|
);
|
||||||
|
|
||||||
if offset > new_span.start {
|
|
||||||
offset -= span_offset;
|
|
||||||
}
|
|
||||||
|
|
||||||
return self.process_completion(
|
return self.process_completion(
|
||||||
&mut completer,
|
&mut completer,
|
||||||
&working_set,
|
&working_set,
|
||||||
@ -236,8 +213,9 @@ impl NuCompleter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// specially check if it is currently empty - always complete commands
|
// specially check if it is currently empty - always complete commands
|
||||||
if flat_idx == 0
|
if (is_passthrough_command && flat_idx == 1)
|
||||||
&& working_set.get_span_contents(new_span).is_empty()
|
|| (flat_idx == 0
|
||||||
|
&& working_set.get_span_contents(new_span).is_empty())
|
||||||
{
|
{
|
||||||
let mut completer = CommandCompletion::new(
|
let mut completer = CommandCompletion::new(
|
||||||
self.engine_state.clone(),
|
self.engine_state.clone(),
|
||||||
@ -258,7 +236,7 @@ impl NuCompleter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Completions that depends on the previous expression (e.g: use, source-env)
|
// Completions that depends on the previous expression (e.g: use, source-env)
|
||||||
if flat_idx > 0 {
|
if (is_passthrough_command && flat_idx > 1) || flat_idx > 0 {
|
||||||
if let Some(previous_expr) = flattened.get(flat_idx - 1) {
|
if let Some(previous_expr) = flattened.get(flat_idx - 1) {
|
||||||
// Read the content for the previous expression
|
// Read the content for the previous expression
|
||||||
let prev_expr_str =
|
let prev_expr_str =
|
||||||
@ -405,85 +383,6 @@ impl ReedlineCompleter for NuCompleter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type MatchedAlias = Vec<(Vec<u8>, Vec<u8>)>;
|
|
||||||
|
|
||||||
// Handler the completion when giving lines contains at least one alias. (e.g: `g checkout`)
|
|
||||||
// that `g` is an alias of `git`
|
|
||||||
fn try_find_alias(line: &[u8], working_set: &StateWorkingSet) -> (Vec<u8>, Vec<usize>) {
|
|
||||||
// An vector represents the offsets of alias
|
|
||||||
// e.g: the offset is 2 for the alias `g` of `git`
|
|
||||||
let mut alias_offset = vec![];
|
|
||||||
let mut output = vec![];
|
|
||||||
if let Some(matched_alias) = search_alias(line, working_set) {
|
|
||||||
let mut lens = matched_alias.len();
|
|
||||||
for (input_vec, line_vec) in matched_alias {
|
|
||||||
alias_offset.push(line_vec.len() - input_vec.len());
|
|
||||||
output.extend(line_vec);
|
|
||||||
if lens > 1 {
|
|
||||||
output.push(b' ');
|
|
||||||
lens -= 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !line.is_empty() {
|
|
||||||
let last = line.last().expect("input is empty");
|
|
||||||
if last == &b' ' {
|
|
||||||
output.push(b' ');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
output = line.to_vec();
|
|
||||||
}
|
|
||||||
|
|
||||||
(output, alias_offset)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn search_alias(input: &[u8], working_set: &StateWorkingSet) -> Option<MatchedAlias> {
|
|
||||||
let mut vec_names = vec![];
|
|
||||||
let mut vec_alias = vec![];
|
|
||||||
let mut pos = 0;
|
|
||||||
let mut is_alias = false;
|
|
||||||
for (index, character) in input.iter().enumerate() {
|
|
||||||
if *character == b' ' {
|
|
||||||
let range = &input[pos..index];
|
|
||||||
vec_names.push(range.to_owned());
|
|
||||||
pos = index + 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Push the rest to names vector.
|
|
||||||
if pos < input.len() {
|
|
||||||
vec_names.push(input[pos..].to_owned());
|
|
||||||
}
|
|
||||||
|
|
||||||
for name in &vec_names {
|
|
||||||
if let Some(alias_id) = working_set.find_alias(&name[..]) {
|
|
||||||
let alias_span = working_set.get_alias(alias_id);
|
|
||||||
let mut span_vec = vec![];
|
|
||||||
is_alias = true;
|
|
||||||
for alias in alias_span {
|
|
||||||
let name = working_set.get_span_contents(*alias);
|
|
||||||
if !name.is_empty() {
|
|
||||||
span_vec.push(name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Join span of vector together for complex alias, e.g: `f` is an alias for `git remote -v`
|
|
||||||
let full_aliases = span_vec.join(&[b' '][..]);
|
|
||||||
vec_alias.push(full_aliases);
|
|
||||||
} else {
|
|
||||||
vec_alias.push(name.to_owned());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if is_alias {
|
|
||||||
// Zip names and alias vectors, the original inputs and its aliases mapping.
|
|
||||||
// e.g:(['g'], ['g','i','t'])
|
|
||||||
let output = vec_names.into_iter().zip(vec_alias).collect();
|
|
||||||
Some(output)
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// reads the most left variable returning it's name (e.g: $myvar)
|
// reads the most left variable returning it's name (e.g: $myvar)
|
||||||
// and the depth (a.b.c)
|
// and the depth (a.b.c)
|
||||||
fn most_left_variable(
|
fn most_left_variable(
|
||||||
@ -504,7 +403,7 @@ fn most_left_variable(
|
|||||||
let result = working_set.get_span_contents(item.0).to_vec();
|
let result = working_set.get_span_contents(item.0).to_vec();
|
||||||
|
|
||||||
match item.1 {
|
match item.1 {
|
||||||
FlatShape::Variable => {
|
FlatShape::Variable(_) => {
|
||||||
variables_found.push(result);
|
variables_found.push(result);
|
||||||
found_var = true;
|
found_var = true;
|
||||||
|
|
||||||
@ -595,3 +494,64 @@ pub fn map_value_completions<'a>(
|
|||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod completer_tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_completion_helper() {
|
||||||
|
let mut engine_state =
|
||||||
|
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context());
|
||||||
|
|
||||||
|
// Custom additions
|
||||||
|
let delta = {
|
||||||
|
let working_set = nu_protocol::engine::StateWorkingSet::new(&engine_state);
|
||||||
|
working_set.render()
|
||||||
|
};
|
||||||
|
|
||||||
|
let result = engine_state.merge_delta(delta);
|
||||||
|
assert!(
|
||||||
|
result.is_ok(),
|
||||||
|
"Error merging delta: {:?}",
|
||||||
|
result.err().unwrap()
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut completer = NuCompleter::new(engine_state.into(), Stack::new());
|
||||||
|
let dataset = vec![
|
||||||
|
("sudo", false, "", Vec::new()),
|
||||||
|
("sudo l", true, "l", vec!["ls", "let", "lines", "loop"]),
|
||||||
|
(" sudo", false, "", Vec::new()),
|
||||||
|
(" sudo le", true, "le", vec!["let", "length"]),
|
||||||
|
(
|
||||||
|
"ls | c",
|
||||||
|
true,
|
||||||
|
"c",
|
||||||
|
vec!["cd", "config", "const", "cp", "cal"],
|
||||||
|
),
|
||||||
|
("ls | sudo m", true, "m", vec!["mv", "mut", "move"]),
|
||||||
|
];
|
||||||
|
for (line, has_result, begins_with, expected_values) in dataset {
|
||||||
|
let result = completer.completion_helper(line, line.len());
|
||||||
|
// Test whether the result is empty or not
|
||||||
|
assert_eq!(!result.is_empty(), has_result, "line: {}", line);
|
||||||
|
|
||||||
|
// Test whether the result begins with the expected value
|
||||||
|
result
|
||||||
|
.iter()
|
||||||
|
.for_each(|x| assert!(x.value.starts_with(begins_with)));
|
||||||
|
|
||||||
|
// Test whether the result contains all the expected values
|
||||||
|
assert_eq!(
|
||||||
|
result
|
||||||
|
.iter()
|
||||||
|
.map(|x| expected_values.contains(&x.value.as_str()))
|
||||||
|
.filter(|x| *x)
|
||||||
|
.count(),
|
||||||
|
expected_values.len(),
|
||||||
|
"line: {}",
|
||||||
|
line
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -6,6 +6,7 @@ use nu_protocol::{
|
|||||||
PipelineData, Span, Type, Value,
|
PipelineData, Span, Type, Value,
|
||||||
};
|
};
|
||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
|
use std::collections::HashMap;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use super::completer::map_value_completions;
|
use super::completer::map_value_completions;
|
||||||
@ -66,7 +67,7 @@ impl Completer for CustomCompletion {
|
|||||||
],
|
],
|
||||||
redirect_stdout: true,
|
redirect_stdout: true,
|
||||||
redirect_stderr: true,
|
redirect_stderr: true,
|
||||||
parser_info: vec![],
|
parser_info: HashMap::new(),
|
||||||
},
|
},
|
||||||
PipelineData::empty(),
|
PipelineData::empty(),
|
||||||
);
|
);
|
||||||
|
@ -8,7 +8,7 @@ use std::fs;
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use super::{partial_from, prepend_base_dir};
|
use super::{partial_from, prepend_base_dir, SortBy};
|
||||||
|
|
||||||
const SEP: char = std::path::MAIN_SEPARATOR;
|
const SEP: char = std::path::MAIN_SEPARATOR;
|
||||||
|
|
||||||
@ -60,12 +60,20 @@ impl Completer for DirectoryCompletion {
|
|||||||
|
|
||||||
// Sort items
|
// Sort items
|
||||||
let mut sorted_items = items;
|
let mut sorted_items = items;
|
||||||
sorted_items.sort_by(|a, b| a.value.cmp(&b.value));
|
|
||||||
sorted_items.sort_by(|a, b| {
|
match self.get_sort_by() {
|
||||||
let a_distance = levenshtein_distance(&prefix_str, &a.value);
|
SortBy::Ascending => {
|
||||||
let b_distance = levenshtein_distance(&prefix_str, &b.value);
|
sorted_items.sort_by(|a, b| a.value.cmp(&b.value));
|
||||||
a_distance.cmp(&b_distance)
|
}
|
||||||
});
|
SortBy::LevenshteinDistance => {
|
||||||
|
sorted_items.sort_by(|a, b| {
|
||||||
|
let a_distance = levenshtein_distance(&prefix_str, &a.value);
|
||||||
|
let b_distance = levenshtein_distance(&prefix_str, &b.value);
|
||||||
|
a_distance.cmp(&b_distance)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
|
||||||
// Separate the results between hidden and non hidden
|
// Separate the results between hidden and non hidden
|
||||||
let mut hidden: Vec<Suggestion> = vec![];
|
let mut hidden: Vec<Suggestion> = vec![];
|
||||||
|
@ -7,6 +7,8 @@ use reedline::Suggestion;
|
|||||||
use std::path::{is_separator, Path};
|
use std::path::{is_separator, Path};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
use super::SortBy;
|
||||||
|
|
||||||
const SEP: char = std::path::MAIN_SEPARATOR;
|
const SEP: char = std::path::MAIN_SEPARATOR;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
@ -55,12 +57,20 @@ impl Completer for FileCompletion {
|
|||||||
|
|
||||||
// Sort items
|
// Sort items
|
||||||
let mut sorted_items = items;
|
let mut sorted_items = items;
|
||||||
sorted_items.sort_by(|a, b| a.value.cmp(&b.value));
|
|
||||||
sorted_items.sort_by(|a, b| {
|
match self.get_sort_by() {
|
||||||
let a_distance = levenshtein_distance(&prefix_str, &a.value);
|
SortBy::Ascending => {
|
||||||
let b_distance = levenshtein_distance(&prefix_str, &b.value);
|
sorted_items.sort_by(|a, b| a.value.cmp(&b.value));
|
||||||
a_distance.cmp(&b_distance)
|
}
|
||||||
});
|
SortBy::LevenshteinDistance => {
|
||||||
|
sorted_items.sort_by(|a, b| {
|
||||||
|
let a_distance = levenshtein_distance(&prefix_str, &a.value);
|
||||||
|
let b_distance = levenshtein_distance(&prefix_str, &b.value);
|
||||||
|
a_distance.cmp(&b_distance)
|
||||||
|
});
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
|
||||||
// Separate the results between hidden and non hidden
|
// Separate the results between hidden and non hidden
|
||||||
let mut hidden: Vec<Suggestion> = vec![];
|
let mut hidden: Vec<Suggestion> = vec![];
|
||||||
@ -141,6 +151,16 @@ pub fn file_path_completion(
|
|||||||
|| path.contains('#')
|
|| path.contains('#')
|
||||||
|| path.contains('(')
|
|| path.contains('(')
|
||||||
|| path.contains(')')
|
|| path.contains(')')
|
||||||
|
|| path.starts_with('0')
|
||||||
|
|| path.starts_with('1')
|
||||||
|
|| path.starts_with('2')
|
||||||
|
|| path.starts_with('3')
|
||||||
|
|| path.starts_with('4')
|
||||||
|
|| path.starts_with('5')
|
||||||
|
|| path.starts_with('6')
|
||||||
|
|| path.starts_with('7')
|
||||||
|
|| path.starts_with('8')
|
||||||
|
|| path.starts_with('9')
|
||||||
{
|
{
|
||||||
path = format!("`{path}`");
|
path = format!("`{path}`");
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use crate::completions::{Completer, CompletionOptions};
|
use crate::completions::{Completer, CompletionOptions};
|
||||||
use nu_engine::eval_variable;
|
use nu_engine::{column::get_columns, eval_variable};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
Span, Value,
|
Span, Value,
|
||||||
@ -179,11 +179,7 @@ impl Completer for VariableCompletion {
|
|||||||
let mut removed_overlays = vec![];
|
let mut removed_overlays = vec![];
|
||||||
// Working set scope vars
|
// Working set scope vars
|
||||||
for scope_frame in working_set.delta.scope.iter().rev() {
|
for scope_frame in working_set.delta.scope.iter().rev() {
|
||||||
for overlay_frame in scope_frame
|
for overlay_frame in scope_frame.active_overlays(&mut removed_overlays).rev() {
|
||||||
.active_overlays(&mut removed_overlays)
|
|
||||||
.iter()
|
|
||||||
.rev()
|
|
||||||
{
|
|
||||||
for v in &overlay_frame.vars {
|
for v in &overlay_frame.vars {
|
||||||
if options.match_algorithm.matches_u8_insensitive(
|
if options.match_algorithm.matches_u8_insensitive(
|
||||||
options.case_sensitive,
|
options.case_sensitive,
|
||||||
@ -204,12 +200,7 @@ impl Completer for VariableCompletion {
|
|||||||
|
|
||||||
// Permanent state vars
|
// Permanent state vars
|
||||||
// for scope in &self.engine_state.scope {
|
// for scope in &self.engine_state.scope {
|
||||||
for overlay_frame in self
|
for overlay_frame in self.engine_state.active_overlays(&removed_overlays).rev() {
|
||||||
.engine_state
|
|
||||||
.active_overlays(&removed_overlays)
|
|
||||||
.iter()
|
|
||||||
.rev()
|
|
||||||
{
|
|
||||||
for v in &overlay_frame.vars {
|
for v in &overlay_frame.vars {
|
||||||
if options.match_algorithm.matches_u8_insensitive(
|
if options.match_algorithm.matches_u8_insensitive(
|
||||||
options.case_sensitive,
|
options.case_sensitive,
|
||||||
@ -276,7 +267,19 @@ fn nested_suggestions(
|
|||||||
|
|
||||||
output
|
output
|
||||||
}
|
}
|
||||||
|
Value::List { vals, span: _ } => {
|
||||||
|
for column_name in get_columns(vals.as_slice()) {
|
||||||
|
output.push(Suggestion {
|
||||||
|
value: column_name,
|
||||||
|
description: None,
|
||||||
|
extra: None,
|
||||||
|
span: current_span,
|
||||||
|
append_whitespace: false,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
output
|
||||||
|
}
|
||||||
_ => output,
|
_ => output,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -291,7 +294,7 @@ fn recursive_value(val: Value, sublevels: Vec<Vec<u8>>) -> Value {
|
|||||||
vals,
|
vals,
|
||||||
span: _,
|
span: _,
|
||||||
} => {
|
} => {
|
||||||
for item in cols.into_iter().zip(vals.into_iter()) {
|
for item in cols.into_iter().zip(vals) {
|
||||||
// Check if index matches with sublevel
|
// Check if index matches with sublevel
|
||||||
if item.0.as_bytes().to_vec() == next_sublevel {
|
if item.0.as_bytes().to_vec() == next_sublevel {
|
||||||
// If matches try to fetch recursively the next
|
// If matches try to fetch recursively the next
|
||||||
@ -304,6 +307,38 @@ fn recursive_value(val: Value, sublevels: Vec<Vec<u8>>) -> Value {
|
|||||||
span: Span::unknown(),
|
span: Span::unknown(),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
Value::LazyRecord { val, span: _ } => {
|
||||||
|
for col in val.column_names() {
|
||||||
|
if col.as_bytes().to_vec() == next_sublevel {
|
||||||
|
return recursive_value(
|
||||||
|
val.get_column_value(col).unwrap_or_default(),
|
||||||
|
sublevels.into_iter().skip(1).collect(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Current sublevel value not found
|
||||||
|
return Value::Nothing {
|
||||||
|
span: Span::unknown(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
Value::List { vals, span } => {
|
||||||
|
for col in get_columns(vals.as_slice()) {
|
||||||
|
if col.as_bytes().to_vec() == next_sublevel {
|
||||||
|
return recursive_value(
|
||||||
|
Value::List { vals, span }
|
||||||
|
.get_data_by_key(&col)
|
||||||
|
.unwrap_or_default(),
|
||||||
|
sublevels.into_iter().skip(1).collect(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Current sublevel value not found
|
||||||
|
return Value::Nothing {
|
||||||
|
span: Span::unknown(),
|
||||||
|
};
|
||||||
|
}
|
||||||
_ => return val,
|
_ => return val,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,13 +1,12 @@
|
|||||||
use crate::util::{eval_source, report_error};
|
use crate::util::eval_source;
|
||||||
#[cfg(feature = "plugin")]
|
|
||||||
use nu_parser::ParseError;
|
|
||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
use nu_path::canonicalize_with;
|
use nu_path::canonicalize_with;
|
||||||
use nu_protocol::engine::{EngineState, Stack, StateWorkingSet};
|
use nu_protocol::engine::{EngineState, Stack, StateWorkingSet};
|
||||||
#[cfg(feature = "plugin")]
|
use nu_protocol::report_error;
|
||||||
use nu_protocol::Spanned;
|
|
||||||
use nu_protocol::{HistoryFileFormat, PipelineData};
|
use nu_protocol::{HistoryFileFormat, PipelineData};
|
||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
|
use nu_protocol::{ParseError, Spanned};
|
||||||
|
#[cfg(feature = "plugin")]
|
||||||
use nu_utils::utils::perf;
|
use nu_utils::utils::perf;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
@ -41,6 +40,7 @@ pub fn read_plugin_file(
|
|||||||
&contents,
|
&contents,
|
||||||
&plugin_filename,
|
&plugin_filename,
|
||||||
PipelineData::empty(),
|
PipelineData::empty(),
|
||||||
|
false,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -51,6 +51,7 @@ pub fn read_plugin_file(
|
|||||||
file!(),
|
file!(),
|
||||||
line!(),
|
line!(),
|
||||||
column!(),
|
column!(),
|
||||||
|
engine_state.get_config().use_ansi_coloring,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -93,6 +94,7 @@ pub fn eval_config_contents(
|
|||||||
&contents,
|
&contents,
|
||||||
&config_filename,
|
&config_filename,
|
||||||
PipelineData::empty(),
|
PipelineData::empty(),
|
||||||
|
false,
|
||||||
);
|
);
|
||||||
|
|
||||||
// Merge the environment in case env vars changed in the config
|
// Merge the environment in case env vars changed in the config
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
use crate::util::report_error;
|
|
||||||
use log::info;
|
use log::info;
|
||||||
use miette::Result;
|
use miette::Result;
|
||||||
use nu_engine::{convert_env_values, eval_block};
|
use nu_engine::{convert_env_values, eval_block};
|
||||||
use nu_parser::parse;
|
use nu_parser::parse;
|
||||||
use nu_protocol::engine::Stack;
|
use nu_protocol::engine::Stack;
|
||||||
|
use nu_protocol::report_error;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, StateWorkingSet},
|
engine::{EngineState, StateWorkingSet},
|
||||||
PipelineData, Spanned, Value,
|
PipelineData, Spanned, Value,
|
||||||
@ -34,9 +34,9 @@ pub fn evaluate_commands(
|
|||||||
|
|
||||||
let mut working_set = StateWorkingSet::new(engine_state);
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
let (output, err) = parse(&mut working_set, None, commands.item.as_bytes(), false, &[]);
|
let output = parse(&mut working_set, None, commands.item.as_bytes(), false);
|
||||||
if let Some(err) = err {
|
if let Some(err) = working_set.parse_errors.first() {
|
||||||
report_error(&working_set, &err);
|
report_error(&working_set, err);
|
||||||
|
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
@ -1,14 +1,16 @@
|
|||||||
use crate::util::{eval_source, report_error};
|
use crate::util::eval_source;
|
||||||
use log::info;
|
use log::info;
|
||||||
use log::trace;
|
use log::trace;
|
||||||
use miette::{IntoDiagnostic, Result};
|
use miette::{IntoDiagnostic, Result};
|
||||||
|
use nu_engine::eval_block_with_early_return;
|
||||||
use nu_engine::{convert_env_values, current_dir};
|
use nu_engine::{convert_env_values, current_dir};
|
||||||
use nu_parser::parse;
|
use nu_parser::parse;
|
||||||
use nu_path::canonicalize_with;
|
use nu_path::canonicalize_with;
|
||||||
|
use nu_protocol::report_error;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::Call,
|
ast::Call,
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
Config, PipelineData, ShellError, Span, Type, Value,
|
Config, PipelineData, ShellError, Span, Value,
|
||||||
};
|
};
|
||||||
use nu_utils::stdout_write_all_and_flush;
|
use nu_utils::stdout_write_all_and_flush;
|
||||||
|
|
||||||
@ -92,27 +94,75 @@ pub fn evaluate_file(
|
|||||||
"FILE_PWD".to_string(),
|
"FILE_PWD".to_string(),
|
||||||
Value::string(parent.to_string_lossy(), Span::unknown()),
|
Value::string(parent.to_string_lossy(), Span::unknown()),
|
||||||
);
|
);
|
||||||
|
stack.add_env_var(
|
||||||
|
"CURRENT_FILE".to_string(),
|
||||||
|
Value::string(file_path.to_string_lossy(), Span::unknown()),
|
||||||
|
);
|
||||||
|
|
||||||
|
let source_filename = file_path
|
||||||
|
.file_name()
|
||||||
|
.expect("internal error: script missing filename");
|
||||||
|
|
||||||
let mut working_set = StateWorkingSet::new(engine_state);
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
trace!("parsing file: {}", file_path_str);
|
trace!("parsing file: {}", file_path_str);
|
||||||
let _ = parse(&mut working_set, Some(file_path_str), &file, false, &[]);
|
let block = parse(&mut working_set, Some(file_path_str), &file, false);
|
||||||
|
|
||||||
if working_set.find_decl(b"main", &Type::Any).is_some() {
|
for block in &mut working_set.delta.blocks {
|
||||||
|
if block.signature.name == "main" {
|
||||||
|
block.signature.name = source_filename.to_string_lossy().to_string();
|
||||||
|
} else if block.signature.name.starts_with("main ") {
|
||||||
|
block.signature.name = source_filename.to_string_lossy().to_string()
|
||||||
|
+ " "
|
||||||
|
+ &String::from_utf8_lossy(&block.signature.name.as_bytes()[5..]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let _ = engine_state.merge_delta(working_set.delta);
|
||||||
|
|
||||||
|
if engine_state.find_decl(b"main", &[]).is_some() {
|
||||||
let args = format!("main {}", args.join(" "));
|
let args = format!("main {}", args.join(" "));
|
||||||
|
|
||||||
|
let pipeline_data = eval_block_with_early_return(
|
||||||
|
engine_state,
|
||||||
|
stack,
|
||||||
|
&block,
|
||||||
|
PipelineData::empty(),
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
)
|
||||||
|
.unwrap_or_else(|e| {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(&working_set, &e);
|
||||||
|
std::process::exit(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
let result = pipeline_data.print(engine_state, stack, true, false);
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Err(err) => {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
|
report_error(&working_set, &err);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
Ok(exit_code) => {
|
||||||
|
if exit_code != 0 {
|
||||||
|
std::process::exit(exit_code as i32);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if !eval_source(
|
if !eval_source(
|
||||||
engine_state,
|
engine_state,
|
||||||
stack,
|
stack,
|
||||||
&file,
|
args.as_bytes(),
|
||||||
file_path_str,
|
"<commandline>",
|
||||||
PipelineData::empty(),
|
input,
|
||||||
|
true,
|
||||||
) {
|
) {
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
if !eval_source(engine_state, stack, args.as_bytes(), "<commandline>", input) {
|
} else if !eval_source(engine_state, stack, &file, file_path_str, input, true) {
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
} else if !eval_source(engine_state, stack, &file, file_path_str, input) {
|
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -137,7 +187,7 @@ pub(crate) fn print_table_or_error(
|
|||||||
|
|
||||||
if let PipelineData::Value(Value::Error { error }, ..) = &pipeline_data {
|
if let PipelineData::Value(Value::Error { error }, ..) = &pipeline_data {
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
report_error(&working_set, error);
|
report_error(&working_set, &**error);
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -146,12 +196,10 @@ pub(crate) fn print_table_or_error(
|
|||||||
if command.get_block_id().is_some() {
|
if command.get_block_id().is_some() {
|
||||||
print_or_exit(pipeline_data, engine_state, config);
|
print_or_exit(pipeline_data, engine_state, config);
|
||||||
} else {
|
} else {
|
||||||
let table = command.run(
|
// The final call on table command, it's ok to set redirect_output to false.
|
||||||
engine_state,
|
let mut call = Call::new(Span::new(0, 0));
|
||||||
stack,
|
call.redirect_stdout = false;
|
||||||
&Call::new(Span::new(0, 0)),
|
let table = command.run(engine_state, stack, &call, pipeline_data);
|
||||||
pipeline_data,
|
|
||||||
);
|
|
||||||
|
|
||||||
match table {
|
match table {
|
||||||
Ok(table) => {
|
Ok(table) => {
|
||||||
@ -187,7 +235,7 @@ fn print_or_exit(pipeline_data: PipelineData, engine_state: &mut EngineState, co
|
|||||||
if let Value::Error { error } = item {
|
if let Value::Error { error } = item {
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
report_error(&working_set, &error);
|
report_error(&working_set, &*error);
|
||||||
|
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
mod commands;
|
mod commands;
|
||||||
mod completions;
|
mod completions;
|
||||||
mod config_files;
|
mod config_files;
|
||||||
|
mod eval_cmds;
|
||||||
mod eval_file;
|
mod eval_file;
|
||||||
mod menus;
|
mod menus;
|
||||||
mod nu_highlight;
|
mod nu_highlight;
|
||||||
@ -13,18 +14,19 @@ mod syntax_highlight;
|
|||||||
mod util;
|
mod util;
|
||||||
mod validation;
|
mod validation;
|
||||||
|
|
||||||
pub use commands::evaluate_commands;
|
pub use commands::add_cli_context;
|
||||||
pub use completions::{FileCompletion, NuCompleter};
|
pub use completions::{FileCompletion, NuCompleter};
|
||||||
pub use config_files::eval_config_contents;
|
pub use config_files::eval_config_contents;
|
||||||
|
pub use eval_cmds::evaluate_commands;
|
||||||
pub use eval_file::evaluate_file;
|
pub use eval_file::evaluate_file;
|
||||||
pub use menus::{DescriptionMenu, NuHelpCompleter};
|
pub use menus::{DescriptionMenu, NuHelpCompleter};
|
||||||
|
pub use nu_cmd_base::util::get_init_cwd;
|
||||||
pub use nu_highlight::NuHighlight;
|
pub use nu_highlight::NuHighlight;
|
||||||
pub use print::Print;
|
pub use print::Print;
|
||||||
pub use prompt::NushellPrompt;
|
pub use prompt::NushellPrompt;
|
||||||
pub use repl::evaluate_repl;
|
pub use repl::evaluate_repl;
|
||||||
pub use repl::{eval_env_change_hook, eval_hook};
|
|
||||||
pub use syntax_highlight::NuHighlighter;
|
pub use syntax_highlight::NuHighlighter;
|
||||||
pub use util::{eval_source, gather_parent_env_vars, get_init_cwd, report_error, report_error_new};
|
pub use util::{eval_source, gather_parent_env_vars};
|
||||||
pub use validation::NuValidator;
|
pub use validation::NuValidator;
|
||||||
|
|
||||||
#[cfg(feature = "plugin")]
|
#[cfg(feature = "plugin")]
|
||||||
|
@ -646,7 +646,10 @@ impl Menu for DescriptionMenu {
|
|||||||
|lb| {
|
|lb| {
|
||||||
lb.replace_range(start..end, replacement);
|
lb.replace_range(start..end, replacement);
|
||||||
let mut offset = lb.insertion_point();
|
let mut offset = lb.insertion_point();
|
||||||
offset += lb.len().saturating_sub(end.saturating_sub(start));
|
offset += lb
|
||||||
|
.len()
|
||||||
|
.saturating_sub(end.saturating_sub(start))
|
||||||
|
.saturating_sub(start);
|
||||||
lb.set_insertion_point(offset);
|
lb.set_insertion_point(offset);
|
||||||
},
|
},
|
||||||
UndoBehavior::CreateUndoPoint,
|
UndoBehavior::CreateUndoPoint,
|
||||||
|
@ -57,7 +57,9 @@ impl NuHelpCompleter {
|
|||||||
let _ = write!(long_desc, "Usage:\r\n > {}\r\n", sig.call_signature());
|
let _ = write!(long_desc, "Usage:\r\n > {}\r\n", sig.call_signature());
|
||||||
|
|
||||||
if !sig.named.is_empty() {
|
if !sig.named.is_empty() {
|
||||||
long_desc.push_str(&get_flags_section(sig))
|
long_desc.push_str(&get_flags_section(sig, |v| {
|
||||||
|
v.into_string_parsable(", ", &self.0.config)
|
||||||
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
if !sig.required_positional.is_empty()
|
if !sig.required_positional.is_empty()
|
||||||
@ -69,10 +71,18 @@ impl NuHelpCompleter {
|
|||||||
let _ = write!(long_desc, " {}: {}\r\n", positional.name, positional.desc);
|
let _ = write!(long_desc, " {}: {}\r\n", positional.name, positional.desc);
|
||||||
}
|
}
|
||||||
for positional in &sig.optional_positional {
|
for positional in &sig.optional_positional {
|
||||||
|
let opt_suffix = if let Some(value) = &positional.default_value {
|
||||||
|
format!(
|
||||||
|
" (optional, default: {})",
|
||||||
|
&value.into_string_parsable(", ", &self.0.config),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
(" (optional)").to_string()
|
||||||
|
};
|
||||||
let _ = write!(
|
let _ = write!(
|
||||||
long_desc,
|
long_desc,
|
||||||
" (optional) {}: {}\r\n",
|
" (optional) {}: {}{}\r\n",
|
||||||
positional.name, positional.desc
|
positional.name, positional.desc, opt_suffix
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -94,10 +94,10 @@ fn convert_to_suggestions(
|
|||||||
Some(span @ Value::Record { .. }) => {
|
Some(span @ Value::Record { .. }) => {
|
||||||
let start = span
|
let start = span
|
||||||
.get_data_by_key("start")
|
.get_data_by_key("start")
|
||||||
.and_then(|val| val.as_integer().ok());
|
.and_then(|val| val.as_int().ok());
|
||||||
let end = span
|
let end = span
|
||||||
.get_data_by_key("end")
|
.get_data_by_key("end")
|
||||||
.and_then(|val| val.as_integer().ok());
|
.and_then(|val| val.as_int().ok());
|
||||||
match (start, end) {
|
match (start, end) {
|
||||||
(Some(start), Some(end)) => {
|
(Some(start), Some(end)) => {
|
||||||
let start = start.min(end);
|
let start = start.min(end);
|
||||||
|
@ -53,7 +53,9 @@ impl Command for NuHighlight {
|
|||||||
span: head,
|
span: head,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(err) => Value::Error { error: err },
|
Err(err) => Value::Error {
|
||||||
|
error: Box::new(err),
|
||||||
|
},
|
||||||
},
|
},
|
||||||
ctrlc,
|
ctrlc,
|
||||||
)
|
)
|
||||||
|
@ -16,7 +16,11 @@ impl Command for Print {
|
|||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build("print")
|
Signature::build("print")
|
||||||
.input_output_types(vec![(Type::Nothing, Type::Nothing)])
|
.input_output_types(vec![
|
||||||
|
(Type::Nothing, Type::Nothing),
|
||||||
|
(Type::Any, Type::Nothing),
|
||||||
|
])
|
||||||
|
.allow_variants_without_examples(true)
|
||||||
.rest("rest", SyntaxShape::Any, "the values to print")
|
.rest("rest", SyntaxShape::Any, "the values to print")
|
||||||
.switch(
|
.switch(
|
||||||
"no-newline",
|
"no-newline",
|
||||||
@ -28,7 +32,7 @@ impl Command for Print {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Print the given values to stdout"
|
"Print the given values to stdout."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extra_usage(&self) -> &str {
|
fn extra_usage(&self) -> &str {
|
||||||
@ -47,15 +51,20 @@ Since this command has no output, there is no point in piping it with other comm
|
|||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
call: &Call,
|
call: &Call,
|
||||||
_input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let args: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
let args: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
||||||
let no_newline = call.has_flag("no-newline");
|
let no_newline = call.has_flag("no-newline");
|
||||||
let to_stderr = call.has_flag("stderr");
|
let to_stderr = call.has_flag("stderr");
|
||||||
|
|
||||||
for arg in args {
|
// This will allow for easy printing of pipelines as well
|
||||||
arg.into_pipeline_data()
|
if !args.is_empty() {
|
||||||
.print(engine_state, stack, no_newline, to_stderr)?;
|
for arg in args {
|
||||||
|
arg.into_pipeline_data()
|
||||||
|
.print(engine_state, stack, no_newline, to_stderr)?;
|
||||||
|
}
|
||||||
|
} else if !input.is_nothing() {
|
||||||
|
input.print(engine_state, stack, no_newline, to_stderr)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(PipelineData::empty())
|
Ok(PipelineData::empty())
|
||||||
|
@ -106,11 +106,12 @@ impl Prompt for NushellPrompt {
|
|||||||
prompt_string.replace('\n', "\r\n").into()
|
prompt_string.replace('\n', "\r\n").into()
|
||||||
} else {
|
} else {
|
||||||
let default = DefaultPrompt::default();
|
let default = DefaultPrompt::default();
|
||||||
default
|
let prompt = default
|
||||||
.render_prompt_left()
|
.render_prompt_left()
|
||||||
.to_string()
|
.to_string()
|
||||||
.replace('\n', "\r\n")
|
.replace('\n', "\r\n");
|
||||||
.into()
|
|
||||||
|
prompt.into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -131,22 +132,22 @@ impl Prompt for NushellPrompt {
|
|||||||
match edit_mode {
|
match edit_mode {
|
||||||
PromptEditMode::Default => match &self.default_prompt_indicator {
|
PromptEditMode::Default => match &self.default_prompt_indicator {
|
||||||
Some(indicator) => indicator,
|
Some(indicator) => indicator,
|
||||||
None => "〉",
|
None => "> ",
|
||||||
}
|
}
|
||||||
.into(),
|
.into(),
|
||||||
PromptEditMode::Emacs => match &self.default_prompt_indicator {
|
PromptEditMode::Emacs => match &self.default_prompt_indicator {
|
||||||
Some(indicator) => indicator,
|
Some(indicator) => indicator,
|
||||||
None => "〉",
|
None => "> ",
|
||||||
}
|
}
|
||||||
.into(),
|
.into(),
|
||||||
PromptEditMode::Vi(vi_mode) => match vi_mode {
|
PromptEditMode::Vi(vi_mode) => match vi_mode {
|
||||||
PromptViMode::Normal => match &self.default_vi_normal_prompt_indicator {
|
PromptViMode::Normal => match &self.default_vi_normal_prompt_indicator {
|
||||||
Some(indicator) => indicator,
|
Some(indicator) => indicator,
|
||||||
None => ": ",
|
None => "> ",
|
||||||
},
|
},
|
||||||
PromptViMode::Insert => match &self.default_vi_insert_prompt_indicator {
|
PromptViMode::Insert => match &self.default_vi_insert_prompt_indicator {
|
||||||
Some(indicator) => indicator,
|
Some(indicator) => indicator,
|
||||||
None => "〉",
|
None => ": ",
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
.into(),
|
.into(),
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
use crate::util::report_error;
|
|
||||||
use crate::NushellPrompt;
|
use crate::NushellPrompt;
|
||||||
use log::trace;
|
use log::trace;
|
||||||
use nu_engine::eval_subexpression;
|
use nu_engine::eval_subexpression;
|
||||||
|
use nu_protocol::report_error;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, Stack, StateWorkingSet},
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
Config, PipelineData, Value,
|
Config, PipelineData, Value,
|
||||||
|
@ -97,12 +97,11 @@ pub(crate) fn add_menus(
|
|||||||
{
|
{
|
||||||
let (block, _) = {
|
let (block, _) = {
|
||||||
let mut working_set = StateWorkingSet::new(&engine_state);
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
let (output, _) = parse(
|
let output = parse(
|
||||||
&mut working_set,
|
&mut working_set,
|
||||||
Some(name), // format!("entry #{}", entry_num)
|
Some(name), // format!("entry #{}", entry_num)
|
||||||
definition.as_bytes(),
|
definition.as_bytes(),
|
||||||
true,
|
true,
|
||||||
&[],
|
|
||||||
);
|
);
|
||||||
|
|
||||||
(output, working_set.render())
|
(output, working_set.render())
|
||||||
@ -132,7 +131,7 @@ fn add_menu(
|
|||||||
config: &Config,
|
config: &Config,
|
||||||
) -> Result<Reedline, ShellError> {
|
) -> Result<Reedline, ShellError> {
|
||||||
if let Value::Record { cols, vals, span } = &menu.menu_type {
|
if let Value::Record { cols, vals, span } = &menu.menu_type {
|
||||||
let layout = extract_value("layout", cols, vals, span)?.into_string("", config);
|
let layout = extract_value("layout", cols, vals, *span)?.into_string("", config);
|
||||||
|
|
||||||
match layout.as_str() {
|
match layout.as_str() {
|
||||||
"columnar" => add_columnar_menu(line_editor, menu, engine_state, stack, config),
|
"columnar" => add_columnar_menu(line_editor, menu, engine_state, stack, config),
|
||||||
@ -156,7 +155,7 @@ fn add_menu(
|
|||||||
macro_rules! add_style {
|
macro_rules! add_style {
|
||||||
// first arm match add!(1,2), add!(2,3) etc
|
// first arm match add!(1,2), add!(2,3) etc
|
||||||
($name:expr, $cols: expr, $vals:expr, $span:expr, $config: expr, $menu:expr, $f:expr) => {
|
($name:expr, $cols: expr, $vals:expr, $span:expr, $config: expr, $menu:expr, $f:expr) => {
|
||||||
$menu = match extract_value($name, $cols, $vals, $span) {
|
$menu = match extract_value($name, $cols, $vals, *$span) {
|
||||||
Ok(text) => {
|
Ok(text) => {
|
||||||
let style = match text {
|
let style = match text {
|
||||||
Value::String { val, .. } => lookup_ansi_color_style(&val),
|
Value::String { val, .. } => lookup_ansi_color_style(&val),
|
||||||
@ -182,25 +181,25 @@ pub(crate) fn add_columnar_menu(
|
|||||||
let mut columnar_menu = ColumnarMenu::default().with_name(&name);
|
let mut columnar_menu = ColumnarMenu::default().with_name(&name);
|
||||||
|
|
||||||
if let Value::Record { cols, vals, span } = &menu.menu_type {
|
if let Value::Record { cols, vals, span } = &menu.menu_type {
|
||||||
columnar_menu = match extract_value("columns", cols, vals, span) {
|
columnar_menu = match extract_value("columns", cols, vals, *span) {
|
||||||
Ok(columns) => {
|
Ok(columns) => {
|
||||||
let columns = columns.as_integer()?;
|
let columns = columns.as_int()?;
|
||||||
columnar_menu.with_columns(columns as u16)
|
columnar_menu.with_columns(columns as u16)
|
||||||
}
|
}
|
||||||
Err(_) => columnar_menu,
|
Err(_) => columnar_menu,
|
||||||
};
|
};
|
||||||
|
|
||||||
columnar_menu = match extract_value("col_width", cols, vals, span) {
|
columnar_menu = match extract_value("col_width", cols, vals, *span) {
|
||||||
Ok(col_width) => {
|
Ok(col_width) => {
|
||||||
let col_width = col_width.as_integer()?;
|
let col_width = col_width.as_int()?;
|
||||||
columnar_menu.with_column_width(Some(col_width as usize))
|
columnar_menu.with_column_width(Some(col_width as usize))
|
||||||
}
|
}
|
||||||
Err(_) => columnar_menu.with_column_width(None),
|
Err(_) => columnar_menu.with_column_width(None),
|
||||||
};
|
};
|
||||||
|
|
||||||
columnar_menu = match extract_value("col_padding", cols, vals, span) {
|
columnar_menu = match extract_value("col_padding", cols, vals, *span) {
|
||||||
Ok(col_padding) => {
|
Ok(col_padding) => {
|
||||||
let col_padding = col_padding.as_integer()?;
|
let col_padding = col_padding.as_int()?;
|
||||||
columnar_menu.with_column_padding(col_padding as usize)
|
columnar_menu.with_column_padding(col_padding as usize)
|
||||||
}
|
}
|
||||||
Err(_) => columnar_menu,
|
Err(_) => columnar_menu,
|
||||||
@ -284,9 +283,9 @@ pub(crate) fn add_list_menu(
|
|||||||
let mut list_menu = ListMenu::default().with_name(&name);
|
let mut list_menu = ListMenu::default().with_name(&name);
|
||||||
|
|
||||||
if let Value::Record { cols, vals, span } = &menu.menu_type {
|
if let Value::Record { cols, vals, span } = &menu.menu_type {
|
||||||
list_menu = match extract_value("page_size", cols, vals, span) {
|
list_menu = match extract_value("page_size", cols, vals, *span) {
|
||||||
Ok(page_size) => {
|
Ok(page_size) => {
|
||||||
let page_size = page_size.as_integer()?;
|
let page_size = page_size.as_int()?;
|
||||||
list_menu.with_page_size(page_size as usize)
|
list_menu.with_page_size(page_size as usize)
|
||||||
}
|
}
|
||||||
Err(_) => list_menu,
|
Err(_) => list_menu,
|
||||||
@ -370,41 +369,41 @@ pub(crate) fn add_description_menu(
|
|||||||
let mut description_menu = DescriptionMenu::default().with_name(&name);
|
let mut description_menu = DescriptionMenu::default().with_name(&name);
|
||||||
|
|
||||||
if let Value::Record { cols, vals, span } = &menu.menu_type {
|
if let Value::Record { cols, vals, span } = &menu.menu_type {
|
||||||
description_menu = match extract_value("columns", cols, vals, span) {
|
description_menu = match extract_value("columns", cols, vals, *span) {
|
||||||
Ok(columns) => {
|
Ok(columns) => {
|
||||||
let columns = columns.as_integer()?;
|
let columns = columns.as_int()?;
|
||||||
description_menu.with_columns(columns as u16)
|
description_menu.with_columns(columns as u16)
|
||||||
}
|
}
|
||||||
Err(_) => description_menu,
|
Err(_) => description_menu,
|
||||||
};
|
};
|
||||||
|
|
||||||
description_menu = match extract_value("col_width", cols, vals, span) {
|
description_menu = match extract_value("col_width", cols, vals, *span) {
|
||||||
Ok(col_width) => {
|
Ok(col_width) => {
|
||||||
let col_width = col_width.as_integer()?;
|
let col_width = col_width.as_int()?;
|
||||||
description_menu.with_column_width(Some(col_width as usize))
|
description_menu.with_column_width(Some(col_width as usize))
|
||||||
}
|
}
|
||||||
Err(_) => description_menu.with_column_width(None),
|
Err(_) => description_menu.with_column_width(None),
|
||||||
};
|
};
|
||||||
|
|
||||||
description_menu = match extract_value("col_padding", cols, vals, span) {
|
description_menu = match extract_value("col_padding", cols, vals, *span) {
|
||||||
Ok(col_padding) => {
|
Ok(col_padding) => {
|
||||||
let col_padding = col_padding.as_integer()?;
|
let col_padding = col_padding.as_int()?;
|
||||||
description_menu.with_column_padding(col_padding as usize)
|
description_menu.with_column_padding(col_padding as usize)
|
||||||
}
|
}
|
||||||
Err(_) => description_menu,
|
Err(_) => description_menu,
|
||||||
};
|
};
|
||||||
|
|
||||||
description_menu = match extract_value("selection_rows", cols, vals, span) {
|
description_menu = match extract_value("selection_rows", cols, vals, *span) {
|
||||||
Ok(selection_rows) => {
|
Ok(selection_rows) => {
|
||||||
let selection_rows = selection_rows.as_integer()?;
|
let selection_rows = selection_rows.as_int()?;
|
||||||
description_menu.with_selection_rows(selection_rows as u16)
|
description_menu.with_selection_rows(selection_rows as u16)
|
||||||
}
|
}
|
||||||
Err(_) => description_menu,
|
Err(_) => description_menu,
|
||||||
};
|
};
|
||||||
|
|
||||||
description_menu = match extract_value("description_rows", cols, vals, span) {
|
description_menu = match extract_value("description_rows", cols, vals, *span) {
|
||||||
Ok(description_rows) => {
|
Ok(description_rows) => {
|
||||||
let description_rows = description_rows.as_integer()?;
|
let description_rows = description_rows.as_int()?;
|
||||||
description_menu.with_description_rows(description_rows as usize)
|
description_menu.with_description_rows(description_rows as usize)
|
||||||
}
|
}
|
||||||
Err(_) => description_menu,
|
Err(_) => description_menu,
|
||||||
@ -524,6 +523,12 @@ fn add_menu_keybindings(keybindings: &mut Keybindings) {
|
|||||||
KeyCode::F(1),
|
KeyCode::F(1),
|
||||||
ReedlineEvent::Menu("help_menu".to_string()),
|
ReedlineEvent::Menu("help_menu".to_string()),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
keybindings.add_binding(
|
||||||
|
KeyModifiers::CONTROL,
|
||||||
|
KeyCode::Char('q'),
|
||||||
|
ReedlineEvent::SearchHistory,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum KeybindingsMode {
|
pub enum KeybindingsMode {
|
||||||
@ -626,9 +631,12 @@ fn add_parsed_keybinding(
|
|||||||
"shift" => KeyModifiers::SHIFT,
|
"shift" => KeyModifiers::SHIFT,
|
||||||
"alt" => KeyModifiers::ALT,
|
"alt" => KeyModifiers::ALT,
|
||||||
"none" => KeyModifiers::NONE,
|
"none" => KeyModifiers::NONE,
|
||||||
"control | shift" => KeyModifiers::CONTROL | KeyModifiers::SHIFT,
|
"shift_alt" | "alt_shift" => KeyModifiers::SHIFT | KeyModifiers::ALT,
|
||||||
"control | alt" => KeyModifiers::CONTROL | KeyModifiers::ALT,
|
"control_shift" | "shift_control" => KeyModifiers::CONTROL | KeyModifiers::SHIFT,
|
||||||
"control | alt | shift" => KeyModifiers::CONTROL | KeyModifiers::ALT | KeyModifiers::SHIFT,
|
"control_alt" | "alt_control" => KeyModifiers::CONTROL | KeyModifiers::ALT,
|
||||||
|
"control_alt_shift" | "control_shift_alt" => {
|
||||||
|
KeyModifiers::CONTROL | KeyModifiers::ALT | KeyModifiers::SHIFT
|
||||||
|
}
|
||||||
_ => {
|
_ => {
|
||||||
return Err(ShellError::UnsupportedConfigValue(
|
return Err(ShellError::UnsupportedConfigValue(
|
||||||
"CONTROL, SHIFT, ALT or NONE".to_string(),
|
"CONTROL, SHIFT, ALT or NONE".to_string(),
|
||||||
@ -717,26 +725,26 @@ impl<'config> EventType<'config> {
|
|||||||
fn try_from_columns(
|
fn try_from_columns(
|
||||||
cols: &'config [String],
|
cols: &'config [String],
|
||||||
vals: &'config [Value],
|
vals: &'config [Value],
|
||||||
span: &'config Span,
|
span: Span,
|
||||||
) -> Result<Self, ShellError> {
|
) -> Result<Self, ShellError> {
|
||||||
extract_value("send", cols, vals, span)
|
extract_value("send", cols, vals, span)
|
||||||
.map(Self::Send)
|
.map(Self::Send)
|
||||||
.or_else(|_| extract_value("edit", cols, vals, span).map(Self::Edit))
|
.or_else(|_| extract_value("edit", cols, vals, span).map(Self::Edit))
|
||||||
.or_else(|_| extract_value("until", cols, vals, span).map(Self::Until))
|
.or_else(|_| extract_value("until", cols, vals, span).map(Self::Until))
|
||||||
.map_err(|_| ShellError::MissingConfigValue("send, edit or until".to_string(), *span))
|
.map_err(|_| ShellError::MissingConfigValue("send, edit or until".to_string(), span))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>, ShellError> {
|
fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>, ShellError> {
|
||||||
match value {
|
match value {
|
||||||
Value::Record { cols, vals, span } => {
|
Value::Record { cols, vals, span } => {
|
||||||
match EventType::try_from_columns(cols, vals, span)? {
|
match EventType::try_from_columns(cols, vals, *span)? {
|
||||||
EventType::Send(value) => event_from_record(
|
EventType::Send(value) => event_from_record(
|
||||||
value.into_string("", config).to_lowercase().as_str(),
|
value.into_string("", config).to_lowercase().as_str(),
|
||||||
cols,
|
cols,
|
||||||
vals,
|
vals,
|
||||||
config,
|
config,
|
||||||
span,
|
*span,
|
||||||
)
|
)
|
||||||
.map(Some),
|
.map(Some),
|
||||||
EventType::Edit(value) => {
|
EventType::Edit(value) => {
|
||||||
@ -745,7 +753,7 @@ fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>,
|
|||||||
cols,
|
cols,
|
||||||
vals,
|
vals,
|
||||||
config,
|
config,
|
||||||
span,
|
*span,
|
||||||
)?;
|
)?;
|
||||||
Ok(Some(ReedlineEvent::Edit(vec![edit])))
|
Ok(Some(ReedlineEvent::Edit(vec![edit])))
|
||||||
}
|
}
|
||||||
@ -808,7 +816,7 @@ fn event_from_record(
|
|||||||
cols: &[String],
|
cols: &[String],
|
||||||
vals: &[Value],
|
vals: &[Value],
|
||||||
config: &Config,
|
config: &Config,
|
||||||
span: &Span,
|
span: Span,
|
||||||
) -> Result<ReedlineEvent, ShellError> {
|
) -> Result<ReedlineEvent, ShellError> {
|
||||||
let event = match name {
|
let event = match name {
|
||||||
"none" => ReedlineEvent::None,
|
"none" => ReedlineEvent::None,
|
||||||
@ -851,7 +859,7 @@ fn event_from_record(
|
|||||||
return Err(ShellError::UnsupportedConfigValue(
|
return Err(ShellError::UnsupportedConfigValue(
|
||||||
"Reedline event".to_string(),
|
"Reedline event".to_string(),
|
||||||
v.to_string(),
|
v.to_string(),
|
||||||
*span,
|
span,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -864,7 +872,7 @@ fn edit_from_record(
|
|||||||
cols: &[String],
|
cols: &[String],
|
||||||
vals: &[Value],
|
vals: &[Value],
|
||||||
config: &Config,
|
config: &Config,
|
||||||
span: &Span,
|
span: Span,
|
||||||
) -> Result<EditCommand, ShellError> {
|
) -> Result<EditCommand, ShellError> {
|
||||||
let edit = match name {
|
let edit = match name {
|
||||||
"movetostart" => EditCommand::MoveToStart,
|
"movetostart" => EditCommand::MoveToStart,
|
||||||
@ -882,7 +890,7 @@ fn edit_from_record(
|
|||||||
"movebigwordrightstart" => EditCommand::MoveBigWordRightStart,
|
"movebigwordrightstart" => EditCommand::MoveBigWordRightStart,
|
||||||
"movetoposition" => {
|
"movetoposition" => {
|
||||||
let value = extract_value("value", cols, vals, span)?;
|
let value = extract_value("value", cols, vals, span)?;
|
||||||
EditCommand::MoveToPosition(value.as_integer()? as usize)
|
EditCommand::MoveToPosition(value.as_int()? as usize)
|
||||||
}
|
}
|
||||||
"insertchar" => {
|
"insertchar" => {
|
||||||
let value = extract_value("value", cols, vals, span)?;
|
let value = extract_value("value", cols, vals, span)?;
|
||||||
@ -966,7 +974,7 @@ fn edit_from_record(
|
|||||||
return Err(ShellError::UnsupportedConfigValue(
|
return Err(ShellError::UnsupportedConfigValue(
|
||||||
"reedline EditCommand".to_string(),
|
"reedline EditCommand".to_string(),
|
||||||
e.to_string(),
|
e.to_string(),
|
||||||
*span,
|
span,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -993,7 +1001,7 @@ mod test {
|
|||||||
let vals = vec![Value::test_string("Enter")];
|
let vals = vec![Value::test_string("Enter")];
|
||||||
|
|
||||||
let span = Span::test_data();
|
let span = Span::test_data();
|
||||||
let b = EventType::try_from_columns(&cols, &vals, &span).unwrap();
|
let b = EventType::try_from_columns(&cols, &vals, span).unwrap();
|
||||||
assert!(matches!(b, EventType::Send(_)));
|
assert!(matches!(b, EventType::Send(_)));
|
||||||
|
|
||||||
let event = Value::Record {
|
let event = Value::Record {
|
||||||
@ -1013,7 +1021,7 @@ mod test {
|
|||||||
let vals = vec![Value::test_string("Clear")];
|
let vals = vec![Value::test_string("Clear")];
|
||||||
|
|
||||||
let span = Span::test_data();
|
let span = Span::test_data();
|
||||||
let b = EventType::try_from_columns(&cols, &vals, &span).unwrap();
|
let b = EventType::try_from_columns(&cols, &vals, span).unwrap();
|
||||||
assert!(matches!(b, EventType::Edit(_)));
|
assert!(matches!(b, EventType::Edit(_)));
|
||||||
|
|
||||||
let event = Value::Record {
|
let event = Value::Record {
|
||||||
@ -1039,7 +1047,7 @@ mod test {
|
|||||||
];
|
];
|
||||||
|
|
||||||
let span = Span::test_data();
|
let span = Span::test_data();
|
||||||
let b = EventType::try_from_columns(&cols, &vals, &span).unwrap();
|
let b = EventType::try_from_columns(&cols, &vals, span).unwrap();
|
||||||
assert!(matches!(b, EventType::Send(_)));
|
assert!(matches!(b, EventType::Send(_)));
|
||||||
|
|
||||||
let event = Value::Record {
|
let event = Value::Record {
|
||||||
@ -1089,7 +1097,7 @@ mod test {
|
|||||||
}];
|
}];
|
||||||
|
|
||||||
let span = Span::test_data();
|
let span = Span::test_data();
|
||||||
let b = EventType::try_from_columns(&cols, &vals, &span).unwrap();
|
let b = EventType::try_from_columns(&cols, &vals, span).unwrap();
|
||||||
assert!(matches!(b, EventType::Until(_)));
|
assert!(matches!(b, EventType::Until(_)));
|
||||||
|
|
||||||
let event = Value::Record {
|
let event = Value::Record {
|
||||||
@ -1157,7 +1165,7 @@ mod test {
|
|||||||
let vals = vec![Value::test_string("Enter")];
|
let vals = vec![Value::test_string("Enter")];
|
||||||
|
|
||||||
let span = Span::test_data();
|
let span = Span::test_data();
|
||||||
let b = EventType::try_from_columns(&cols, &vals, &span);
|
let b = EventType::try_from_columns(&cols, &vals, span);
|
||||||
assert!(matches!(b, Err(ShellError::MissingConfigValue(_, _))));
|
assert!(matches!(b, Err(ShellError::MissingConfigValue(_, _))));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -18,10 +18,7 @@ impl Highlighter for NuHighlighter {
|
|||||||
trace!("highlighting: {}", line);
|
trace!("highlighting: {}", line);
|
||||||
|
|
||||||
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
||||||
let block = {
|
let block = parse(&mut working_set, None, line.as_bytes(), false);
|
||||||
let (block, _) = parse(&mut working_set, None, line.as_bytes(), false, &[]);
|
|
||||||
block
|
|
||||||
};
|
|
||||||
let (shapes, global_span_offset) = {
|
let (shapes, global_span_offset) = {
|
||||||
let shapes = flatten_block(&working_set, &block);
|
let shapes = flatten_block(&working_set, &block);
|
||||||
(shapes, self.engine_state.next_span_start())
|
(shapes, self.engine_state.next_span_start())
|
||||||
@ -62,7 +59,7 @@ impl Highlighter for NuHighlighter {
|
|||||||
($shape:expr, $span:expr, $text:expr) => {{
|
($shape:expr, $span:expr, $text:expr) => {{
|
||||||
let spans = split_span_by_highlight_positions(
|
let spans = split_span_by_highlight_positions(
|
||||||
line,
|
line,
|
||||||
&$span,
|
$span,
|
||||||
&matching_brackets_pos,
|
&matching_brackets_pos,
|
||||||
global_span_offset,
|
global_span_offset,
|
||||||
);
|
);
|
||||||
@ -79,29 +76,28 @@ impl Highlighter for NuHighlighter {
|
|||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! add_colored_token {
|
let mut add_colored_token = |shape: &FlatShape, text: String| {
|
||||||
($shape:expr, $text:expr) => {
|
output.push((get_shape_color(shape.to_string(), &self.config), text));
|
||||||
output.push((get_shape_color($shape.to_string(), &self.config), $text))
|
};
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
match shape.1 {
|
match shape.1 {
|
||||||
FlatShape::Garbage => add_colored_token!(shape.1, next_token),
|
FlatShape::Garbage => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Nothing => add_colored_token!(shape.1, next_token),
|
FlatShape::Nothing => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Binary => add_colored_token!(shape.1, next_token),
|
FlatShape::Binary => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Bool => add_colored_token!(shape.1, next_token),
|
FlatShape::Bool => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Int => add_colored_token!(shape.1, next_token),
|
FlatShape::Int => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Float => add_colored_token!(shape.1, next_token),
|
FlatShape::Float => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Range => add_colored_token!(shape.1, next_token),
|
FlatShape::Range => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::InternalCall => add_colored_token!(shape.1, next_token),
|
FlatShape::InternalCall(_) => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::External => add_colored_token!(shape.1, next_token),
|
FlatShape::External => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::ExternalArg => add_colored_token!(shape.1, next_token),
|
FlatShape::ExternalArg => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Literal => add_colored_token!(shape.1, next_token),
|
FlatShape::Keyword => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Operator => add_colored_token!(shape.1, next_token),
|
FlatShape::Literal => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Signature => add_colored_token!(shape.1, next_token),
|
FlatShape::Operator => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::String => add_colored_token!(shape.1, next_token),
|
FlatShape::Signature => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::StringInterpolation => add_colored_token!(shape.1, next_token),
|
FlatShape::String => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::DateTime => add_colored_token!(shape.1, next_token),
|
FlatShape::StringInterpolation => add_colored_token(&shape.1, next_token),
|
||||||
|
FlatShape::DateTime => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::List => {
|
FlatShape::List => {
|
||||||
add_colored_token_with_bracket_highlight!(shape.1, shape.0, next_token)
|
add_colored_token_with_bracket_highlight!(shape.1, shape.0, next_token)
|
||||||
}
|
}
|
||||||
@ -115,17 +111,23 @@ impl Highlighter for NuHighlighter {
|
|||||||
FlatShape::Block => {
|
FlatShape::Block => {
|
||||||
add_colored_token_with_bracket_highlight!(shape.1, shape.0, next_token)
|
add_colored_token_with_bracket_highlight!(shape.1, shape.0, next_token)
|
||||||
}
|
}
|
||||||
|
FlatShape::Closure => {
|
||||||
|
add_colored_token_with_bracket_highlight!(shape.1, shape.0, next_token)
|
||||||
|
}
|
||||||
|
|
||||||
FlatShape::Filepath => add_colored_token!(shape.1, next_token),
|
FlatShape::Filepath => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Directory => add_colored_token!(shape.1, next_token),
|
FlatShape::Directory => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::GlobPattern => add_colored_token!(shape.1, next_token),
|
FlatShape::GlobPattern => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Variable => add_colored_token!(shape.1, next_token),
|
FlatShape::Variable(_) | FlatShape::VarDecl(_) => {
|
||||||
FlatShape::Flag => add_colored_token!(shape.1, next_token),
|
add_colored_token(&shape.1, next_token)
|
||||||
FlatShape::Pipe => add_colored_token!(shape.1, next_token),
|
}
|
||||||
FlatShape::And => add_colored_token!(shape.1, next_token),
|
FlatShape::Flag => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Or => add_colored_token!(shape.1, next_token),
|
FlatShape::Pipe => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Redirection => add_colored_token!(shape.1, next_token),
|
FlatShape::And => add_colored_token(&shape.1, next_token),
|
||||||
FlatShape::Custom(..) => add_colored_token!(shape.1, next_token),
|
FlatShape::Or => add_colored_token(&shape.1, next_token),
|
||||||
|
FlatShape::Redirection => add_colored_token(&shape.1, next_token),
|
||||||
|
FlatShape::Custom(..) => add_colored_token(&shape.1, next_token),
|
||||||
|
FlatShape::MatchPattern => add_colored_token(&shape.1, next_token),
|
||||||
}
|
}
|
||||||
last_seen_span = shape.0.end;
|
last_seen_span = shape.0.end;
|
||||||
}
|
}
|
||||||
@ -141,7 +143,7 @@ impl Highlighter for NuHighlighter {
|
|||||||
|
|
||||||
fn split_span_by_highlight_positions(
|
fn split_span_by_highlight_positions(
|
||||||
line: &str,
|
line: &str,
|
||||||
span: &Span,
|
span: Span,
|
||||||
highlight_positions: &Vec<usize>,
|
highlight_positions: &Vec<usize>,
|
||||||
global_span_offset: usize,
|
global_span_offset: usize,
|
||||||
) -> Vec<(Span, bool)> {
|
) -> Vec<(Span, bool)> {
|
||||||
@ -235,6 +237,7 @@ fn find_matching_block_end_in_block(
|
|||||||
| PipelineElement::Redirection(_, _, e)
|
| PipelineElement::Redirection(_, _, e)
|
||||||
| PipelineElement::And(_, e)
|
| PipelineElement::And(_, e)
|
||||||
| PipelineElement::Or(_, e)
|
| PipelineElement::Or(_, e)
|
||||||
|
| PipelineElement::SameTargetRedirection { cmd: (_, e), .. }
|
||||||
| PipelineElement::SeparateRedirection { out: (_, e), .. } => {
|
| PipelineElement::SeparateRedirection { out: (_, e), .. } => {
|
||||||
if e.span.contains(global_cursor_offset) {
|
if e.span.contains(global_cursor_offset) {
|
||||||
if let Some(pos) = find_matching_block_end_in_expr(
|
if let Some(pos) = find_matching_block_end_in_expr(
|
||||||
@ -308,6 +311,8 @@ fn find_matching_block_end_in_expr(
|
|||||||
Expr::ImportPattern(_) => None,
|
Expr::ImportPattern(_) => None,
|
||||||
Expr::Overlay(_) => None,
|
Expr::Overlay(_) => None,
|
||||||
Expr::Signature(_) => None,
|
Expr::Signature(_) => None,
|
||||||
|
Expr::MatchPattern(_) => None,
|
||||||
|
Expr::MatchBlock(_) => None,
|
||||||
Expr::Nothing => None,
|
Expr::Nothing => None,
|
||||||
Expr::Garbage => None,
|
Expr::Garbage => None,
|
||||||
|
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
use crate::repl::eval_hook;
|
use nu_command::hook::eval_hook;
|
||||||
use nu_engine::eval_block;
|
use nu_engine::{eval_block, eval_block_with_early_return};
|
||||||
use nu_parser::{escape_quote_string, lex, parse, unescape_unquote_string, Token, TokenContents};
|
use nu_parser::{escape_quote_string, lex, parse, unescape_unquote_string, Token, TokenContents};
|
||||||
use nu_protocol::engine::StateWorkingSet;
|
use nu_protocol::engine::StateWorkingSet;
|
||||||
use nu_protocol::CliError;
|
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
engine::{EngineState, Stack},
|
engine::{EngineState, Stack},
|
||||||
print_if_stream, PipelineData, ShellError, Span, Value,
|
print_if_stream, PipelineData, ShellError, Span, Value,
|
||||||
};
|
};
|
||||||
|
use nu_protocol::{report_error, report_error_new};
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
use nu_utils::enable_vt_processing;
|
use nu_utils::enable_vt_processing;
|
||||||
use nu_utils::utils::perf;
|
use nu_utils::utils::perf;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::Path;
|
||||||
|
|
||||||
// This will collect environment variables from std::env and adds them to a stack.
|
// This will collect environment variables from std::env and adds them to a stack.
|
||||||
//
|
//
|
||||||
@ -105,7 +105,7 @@ fn gather_env_vars(
|
|||||||
span: full_span,
|
span: full_span,
|
||||||
} = token
|
} = token
|
||||||
{
|
{
|
||||||
let contents = engine_state.get_span_contents(&full_span);
|
let contents = engine_state.get_span_contents(full_span);
|
||||||
let (parts, _) = lex(contents, full_span.start, &[], &[b'='], true);
|
let (parts, _) = lex(contents, full_span.start, &[], &[b'='], true);
|
||||||
|
|
||||||
let name = if let Some(Token {
|
let name = if let Some(Token {
|
||||||
@ -113,7 +113,8 @@ fn gather_env_vars(
|
|||||||
span,
|
span,
|
||||||
}) = parts.get(0)
|
}) = parts.get(0)
|
||||||
{
|
{
|
||||||
let bytes = engine_state.get_span_contents(span);
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
|
let bytes = working_set.get_span_contents(*span);
|
||||||
|
|
||||||
if bytes.len() < 2 {
|
if bytes.len() < 2 {
|
||||||
report_capture_error(
|
report_capture_error(
|
||||||
@ -125,9 +126,12 @@ fn gather_env_vars(
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let (bytes, parse_error) = unescape_unquote_string(bytes, *span);
|
let (bytes, err) = unescape_unquote_string(bytes, *span);
|
||||||
|
if let Some(err) = err {
|
||||||
|
working_set.error(err);
|
||||||
|
}
|
||||||
|
|
||||||
if parse_error.is_some() {
|
if working_set.parse_errors.first().is_some() {
|
||||||
report_capture_error(
|
report_capture_error(
|
||||||
engine_state,
|
engine_state,
|
||||||
&String::from_utf8_lossy(contents),
|
&String::from_utf8_lossy(contents),
|
||||||
@ -153,7 +157,8 @@ fn gather_env_vars(
|
|||||||
span,
|
span,
|
||||||
}) = parts.get(2)
|
}) = parts.get(2)
|
||||||
{
|
{
|
||||||
let bytes = engine_state.get_span_contents(span);
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
|
let bytes = working_set.get_span_contents(*span);
|
||||||
|
|
||||||
if bytes.len() < 2 {
|
if bytes.len() < 2 {
|
||||||
report_capture_error(
|
report_capture_error(
|
||||||
@ -165,9 +170,12 @@ fn gather_env_vars(
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let (bytes, parse_error) = unescape_unquote_string(bytes, *span);
|
let (bytes, err) = unescape_unquote_string(bytes, *span);
|
||||||
|
if let Some(err) = err {
|
||||||
|
working_set.error(err);
|
||||||
|
}
|
||||||
|
|
||||||
if parse_error.is_some() {
|
if working_set.parse_errors.first().is_some() {
|
||||||
report_capture_error(
|
report_capture_error(
|
||||||
engine_state,
|
engine_state,
|
||||||
&String::from_utf8_lossy(contents),
|
&String::from_utf8_lossy(contents),
|
||||||
@ -203,21 +211,21 @@ pub fn eval_source(
|
|||||||
source: &[u8],
|
source: &[u8],
|
||||||
fname: &str,
|
fname: &str,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
|
allow_return: bool,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let start_time = std::time::Instant::now();
|
let start_time = std::time::Instant::now();
|
||||||
|
|
||||||
let (block, delta) = {
|
let (block, delta) = {
|
||||||
let mut working_set = StateWorkingSet::new(engine_state);
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
let (output, err) = parse(
|
let output = parse(
|
||||||
&mut working_set,
|
&mut working_set,
|
||||||
Some(fname), // format!("entry #{}", entry_num)
|
Some(fname), // format!("entry #{}", entry_num)
|
||||||
source,
|
source,
|
||||||
false,
|
false,
|
||||||
&[],
|
|
||||||
);
|
);
|
||||||
if let Some(err) = err {
|
if let Some(err) = working_set.parse_errors.first() {
|
||||||
set_last_exit_code(stack, 1);
|
set_last_exit_code(stack, 1);
|
||||||
report_error(&working_set, &err);
|
report_error(&working_set, err);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -230,7 +238,13 @@ pub fn eval_source(
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
match eval_block(engine_state, stack, &block, input, false, false) {
|
let b = if allow_return {
|
||||||
|
eval_block_with_early_return(engine_state, stack, &block, input, false, false)
|
||||||
|
} else {
|
||||||
|
eval_block(engine_state, stack, &block, input, false, false)
|
||||||
|
};
|
||||||
|
|
||||||
|
match b {
|
||||||
Ok(pipeline_data) => {
|
Ok(pipeline_data) => {
|
||||||
let config = engine_state.get_config();
|
let config = engine_state.get_config();
|
||||||
let result;
|
let result;
|
||||||
@ -290,6 +304,7 @@ pub fn eval_source(
|
|||||||
file!(),
|
file!(),
|
||||||
line!(),
|
line!(),
|
||||||
column!(),
|
column!(),
|
||||||
|
engine_state.get_config().use_ansi_coloring,
|
||||||
);
|
);
|
||||||
|
|
||||||
true
|
true
|
||||||
@ -302,43 +317,6 @@ fn set_last_exit_code(stack: &mut Stack, exit_code: i64) {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn report_error(
|
|
||||||
working_set: &StateWorkingSet,
|
|
||||||
error: &(dyn miette::Diagnostic + Send + Sync + 'static),
|
|
||||||
) {
|
|
||||||
eprintln!("Error: {:?}", CliError(error, working_set));
|
|
||||||
// reset vt processing, aka ansi because illbehaved externals can break it
|
|
||||||
#[cfg(windows)]
|
|
||||||
{
|
|
||||||
let _ = nu_utils::enable_vt_processing();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn report_error_new(
|
|
||||||
engine_state: &EngineState,
|
|
||||||
error: &(dyn miette::Diagnostic + Send + Sync + 'static),
|
|
||||||
) {
|
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
|
||||||
|
|
||||||
report_error(&working_set, error);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_init_cwd() -> PathBuf {
|
|
||||||
std::env::current_dir().unwrap_or_else(|_| {
|
|
||||||
std::env::var("PWD")
|
|
||||||
.map(Into::into)
|
|
||||||
.unwrap_or_else(|_| nu_path::home_dir().unwrap_or_default())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_guaranteed_cwd(engine_state: &EngineState, stack: &Stack) -> PathBuf {
|
|
||||||
nu_engine::env::current_dir(engine_state, stack).unwrap_or_else(|e| {
|
|
||||||
let working_set = StateWorkingSet::new(engine_state);
|
|
||||||
report_error(&working_set, &e);
|
|
||||||
get_init_cwd()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
@ -1,5 +1,8 @@
|
|||||||
use nu_parser::{parse, ParseError};
|
use nu_parser::parse;
|
||||||
use nu_protocol::engine::{EngineState, StateWorkingSet};
|
use nu_protocol::{
|
||||||
|
engine::{EngineState, StateWorkingSet},
|
||||||
|
ParseError,
|
||||||
|
};
|
||||||
use reedline::{ValidationResult, Validator};
|
use reedline::{ValidationResult, Validator};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
@ -10,9 +13,12 @@ pub struct NuValidator {
|
|||||||
impl Validator for NuValidator {
|
impl Validator for NuValidator {
|
||||||
fn validate(&self, line: &str) -> ValidationResult {
|
fn validate(&self, line: &str) -> ValidationResult {
|
||||||
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
||||||
let (_, err) = parse(&mut working_set, None, line.as_bytes(), false, &[]);
|
parse(&mut working_set, None, line.as_bytes(), false);
|
||||||
|
|
||||||
if matches!(err, Some(ParseError::UnexpectedEof(..))) {
|
if matches!(
|
||||||
|
working_set.parse_errors.first(),
|
||||||
|
Some(ParseError::UnexpectedEof(..))
|
||||||
|
) {
|
||||||
ValidationResult::Incomplete
|
ValidationResult::Incomplete
|
||||||
} else {
|
} else {
|
||||||
ValidationResult::Complete
|
ValidationResult::Complete
|
||||||
|
@ -143,7 +143,7 @@ fn external_completer_trailing_space() {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn external_completer_no_trailing_space() {
|
fn external_completer_no_trailing_space() {
|
||||||
let block = "let external_completer = {|spans| $spans}";
|
let block = "{|spans| $spans}";
|
||||||
let input = "gh alias".to_string();
|
let input = "gh alias".to_string();
|
||||||
|
|
||||||
let suggestions = run_external_completion(block, &input);
|
let suggestions = run_external_completion(block, &input);
|
||||||
@ -154,7 +154,7 @@ fn external_completer_no_trailing_space() {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn external_completer_pass_flags() {
|
fn external_completer_pass_flags() {
|
||||||
let block = "let external_completer = {|spans| $spans}";
|
let block = "{|spans| $spans}";
|
||||||
let input = "gh api --".to_string();
|
let input = "gh api --".to_string();
|
||||||
|
|
||||||
let suggestions = run_external_completion(block, &input);
|
let suggestions = run_external_completion(block, &input);
|
||||||
@ -178,11 +178,11 @@ fn file_completions() {
|
|||||||
|
|
||||||
// Create the expected values
|
// Create the expected values
|
||||||
let expected_paths: Vec<String> = vec![
|
let expected_paths: Vec<String> = vec![
|
||||||
|
folder(dir.join("another")),
|
||||||
|
file(dir.join("custom_completion.nu")),
|
||||||
file(dir.join("nushell")),
|
file(dir.join("nushell")),
|
||||||
folder(dir.join("test_a")),
|
folder(dir.join("test_a")),
|
||||||
folder(dir.join("test_b")),
|
folder(dir.join("test_b")),
|
||||||
folder(dir.join("another")),
|
|
||||||
file(dir.join("custom_completion.nu")),
|
|
||||||
file(dir.join(".hidden_file")),
|
file(dir.join(".hidden_file")),
|
||||||
folder(dir.join(".hidden_folder")),
|
folder(dir.join(".hidden_folder")),
|
||||||
];
|
];
|
||||||
@ -212,21 +212,21 @@ fn command_ls_with_filecompletion() {
|
|||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
let expected_paths: Vec<String> = vec![
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another\\".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
"nushell".to_string(),
|
"nushell".to_string(),
|
||||||
"test_a\\".to_string(),
|
"test_a\\".to_string(),
|
||||||
"test_b\\".to_string(),
|
"test_b\\".to_string(),
|
||||||
"another\\".to_string(),
|
|
||||||
"custom_completion.nu".to_string(),
|
|
||||||
".hidden_file".to_string(),
|
".hidden_file".to_string(),
|
||||||
".hidden_folder\\".to_string(),
|
".hidden_folder\\".to_string(),
|
||||||
];
|
];
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
let expected_paths: Vec<String> = vec![
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another/".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
"nushell".to_string(),
|
"nushell".to_string(),
|
||||||
"test_a/".to_string(),
|
"test_a/".to_string(),
|
||||||
"test_b/".to_string(),
|
"test_b/".to_string(),
|
||||||
"another/".to_string(),
|
|
||||||
"custom_completion.nu".to_string(),
|
|
||||||
".hidden_file".to_string(),
|
".hidden_file".to_string(),
|
||||||
".hidden_folder/".to_string(),
|
".hidden_folder/".to_string(),
|
||||||
];
|
];
|
||||||
@ -244,21 +244,21 @@ fn command_open_with_filecompletion() {
|
|||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
let expected_paths: Vec<String> = vec![
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another\\".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
"nushell".to_string(),
|
"nushell".to_string(),
|
||||||
"test_a\\".to_string(),
|
"test_a\\".to_string(),
|
||||||
"test_b\\".to_string(),
|
"test_b\\".to_string(),
|
||||||
"another\\".to_string(),
|
|
||||||
"custom_completion.nu".to_string(),
|
|
||||||
".hidden_file".to_string(),
|
".hidden_file".to_string(),
|
||||||
".hidden_folder\\".to_string(),
|
".hidden_folder\\".to_string(),
|
||||||
];
|
];
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
let expected_paths: Vec<String> = vec![
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another/".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
"nushell".to_string(),
|
"nushell".to_string(),
|
||||||
"test_a/".to_string(),
|
"test_a/".to_string(),
|
||||||
"test_b/".to_string(),
|
"test_b/".to_string(),
|
||||||
"another/".to_string(),
|
|
||||||
"custom_completion.nu".to_string(),
|
|
||||||
".hidden_file".to_string(),
|
".hidden_file".to_string(),
|
||||||
".hidden_folder/".to_string(),
|
".hidden_folder/".to_string(),
|
||||||
];
|
];
|
||||||
@ -277,21 +277,21 @@ fn command_rm_with_globcompletion() {
|
|||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
let expected_paths: Vec<String> = vec![
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another\\".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
"nushell".to_string(),
|
"nushell".to_string(),
|
||||||
"test_a\\".to_string(),
|
"test_a\\".to_string(),
|
||||||
"test_b\\".to_string(),
|
"test_b\\".to_string(),
|
||||||
"another\\".to_string(),
|
|
||||||
"custom_completion.nu".to_string(),
|
|
||||||
".hidden_file".to_string(),
|
".hidden_file".to_string(),
|
||||||
".hidden_folder\\".to_string(),
|
".hidden_folder\\".to_string(),
|
||||||
];
|
];
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
let expected_paths: Vec<String> = vec![
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another/".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
"nushell".to_string(),
|
"nushell".to_string(),
|
||||||
"test_a/".to_string(),
|
"test_a/".to_string(),
|
||||||
"test_b/".to_string(),
|
"test_b/".to_string(),
|
||||||
"another/".to_string(),
|
|
||||||
"custom_completion.nu".to_string(),
|
|
||||||
".hidden_file".to_string(),
|
".hidden_file".to_string(),
|
||||||
".hidden_folder/".to_string(),
|
".hidden_folder/".to_string(),
|
||||||
];
|
];
|
||||||
@ -310,21 +310,21 @@ fn command_cp_with_globcompletion() {
|
|||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
let expected_paths: Vec<String> = vec![
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another\\".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
"nushell".to_string(),
|
"nushell".to_string(),
|
||||||
"test_a\\".to_string(),
|
"test_a\\".to_string(),
|
||||||
"test_b\\".to_string(),
|
"test_b\\".to_string(),
|
||||||
"another\\".to_string(),
|
|
||||||
"custom_completion.nu".to_string(),
|
|
||||||
".hidden_file".to_string(),
|
".hidden_file".to_string(),
|
||||||
".hidden_folder\\".to_string(),
|
".hidden_folder\\".to_string(),
|
||||||
];
|
];
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
let expected_paths: Vec<String> = vec![
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another/".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
"nushell".to_string(),
|
"nushell".to_string(),
|
||||||
"test_a/".to_string(),
|
"test_a/".to_string(),
|
||||||
"test_b/".to_string(),
|
"test_b/".to_string(),
|
||||||
"another/".to_string(),
|
|
||||||
"custom_completion.nu".to_string(),
|
|
||||||
".hidden_file".to_string(),
|
".hidden_file".to_string(),
|
||||||
".hidden_folder/".to_string(),
|
".hidden_folder/".to_string(),
|
||||||
];
|
];
|
||||||
@ -343,21 +343,21 @@ fn command_save_with_filecompletion() {
|
|||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
let expected_paths: Vec<String> = vec![
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another\\".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
"nushell".to_string(),
|
"nushell".to_string(),
|
||||||
"test_a\\".to_string(),
|
"test_a\\".to_string(),
|
||||||
"test_b\\".to_string(),
|
"test_b\\".to_string(),
|
||||||
"another\\".to_string(),
|
|
||||||
"custom_completion.nu".to_string(),
|
|
||||||
".hidden_file".to_string(),
|
".hidden_file".to_string(),
|
||||||
".hidden_folder\\".to_string(),
|
".hidden_folder\\".to_string(),
|
||||||
];
|
];
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
let expected_paths: Vec<String> = vec![
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another/".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
"nushell".to_string(),
|
"nushell".to_string(),
|
||||||
"test_a/".to_string(),
|
"test_a/".to_string(),
|
||||||
"test_b/".to_string(),
|
"test_b/".to_string(),
|
||||||
"another/".to_string(),
|
|
||||||
"custom_completion.nu".to_string(),
|
|
||||||
".hidden_file".to_string(),
|
".hidden_file".to_string(),
|
||||||
".hidden_folder/".to_string(),
|
".hidden_folder/".to_string(),
|
||||||
];
|
];
|
||||||
@ -376,21 +376,21 @@ fn command_touch_with_filecompletion() {
|
|||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
let expected_paths: Vec<String> = vec![
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another\\".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
"nushell".to_string(),
|
"nushell".to_string(),
|
||||||
"test_a\\".to_string(),
|
"test_a\\".to_string(),
|
||||||
"test_b\\".to_string(),
|
"test_b\\".to_string(),
|
||||||
"another\\".to_string(),
|
|
||||||
"custom_completion.nu".to_string(),
|
|
||||||
".hidden_file".to_string(),
|
".hidden_file".to_string(),
|
||||||
".hidden_folder\\".to_string(),
|
".hidden_folder\\".to_string(),
|
||||||
];
|
];
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
let expected_paths: Vec<String> = vec![
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another/".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
"nushell".to_string(),
|
"nushell".to_string(),
|
||||||
"test_a/".to_string(),
|
"test_a/".to_string(),
|
||||||
"test_b/".to_string(),
|
"test_b/".to_string(),
|
||||||
"another/".to_string(),
|
|
||||||
"custom_completion.nu".to_string(),
|
|
||||||
".hidden_file".to_string(),
|
".hidden_file".to_string(),
|
||||||
".hidden_folder/".to_string(),
|
".hidden_folder/".to_string(),
|
||||||
];
|
];
|
||||||
@ -409,21 +409,21 @@ fn command_watch_with_filecompletion() {
|
|||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
let expected_paths: Vec<String> = vec![
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another\\".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
"nushell".to_string(),
|
"nushell".to_string(),
|
||||||
"test_a\\".to_string(),
|
"test_a\\".to_string(),
|
||||||
"test_b\\".to_string(),
|
"test_b\\".to_string(),
|
||||||
"another\\".to_string(),
|
|
||||||
"custom_completion.nu".to_string(),
|
|
||||||
".hidden_file".to_string(),
|
".hidden_file".to_string(),
|
||||||
".hidden_folder\\".to_string(),
|
".hidden_folder\\".to_string(),
|
||||||
];
|
];
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
let expected_paths: Vec<String> = vec![
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another/".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
"nushell".to_string(),
|
"nushell".to_string(),
|
||||||
"test_a/".to_string(),
|
"test_a/".to_string(),
|
||||||
"test_b/".to_string(),
|
"test_b/".to_string(),
|
||||||
"another/".to_string(),
|
|
||||||
"custom_completion.nu".to_string(),
|
|
||||||
".hidden_file".to_string(),
|
".hidden_file".to_string(),
|
||||||
".hidden_folder/".to_string(),
|
".hidden_folder/".to_string(),
|
||||||
];
|
];
|
||||||
@ -499,9 +499,9 @@ fn folder_with_directorycompletions() {
|
|||||||
|
|
||||||
// Create the expected values
|
// Create the expected values
|
||||||
let expected_paths: Vec<String> = vec![
|
let expected_paths: Vec<String> = vec![
|
||||||
|
folder(dir.join("another")),
|
||||||
folder(dir.join("test_a")),
|
folder(dir.join("test_a")),
|
||||||
folder(dir.join("test_b")),
|
folder(dir.join("test_b")),
|
||||||
folder(dir.join("another")),
|
|
||||||
folder(dir.join(".hidden_folder")),
|
folder(dir.join(".hidden_folder")),
|
||||||
];
|
];
|
||||||
|
|
||||||
@ -524,17 +524,22 @@ fn variables_completions() {
|
|||||||
// Test completions for $nu
|
// Test completions for $nu
|
||||||
let suggestions = completer.complete("$nu.", 4);
|
let suggestions = completer.complete("$nu.", 4);
|
||||||
|
|
||||||
assert_eq!(9, suggestions.len());
|
assert_eq!(14, suggestions.len());
|
||||||
|
|
||||||
let expected: Vec<String> = vec![
|
let expected: Vec<String> = vec![
|
||||||
"config-path".into(),
|
"config-path".into(),
|
||||||
|
"current-exe".into(),
|
||||||
|
"default-config-dir".into(),
|
||||||
"env-path".into(),
|
"env-path".into(),
|
||||||
"history-path".into(),
|
"history-path".into(),
|
||||||
"home-path".into(),
|
"home-path".into(),
|
||||||
|
"is-interactive".into(),
|
||||||
|
"is-login".into(),
|
||||||
"loginshell-path".into(),
|
"loginshell-path".into(),
|
||||||
"os-info".into(),
|
"os-info".into(),
|
||||||
"pid".into(),
|
"pid".into(),
|
||||||
"scope".into(),
|
"plugin-path".into(),
|
||||||
|
"startup-time".into(),
|
||||||
"temp-path".into(),
|
"temp-path".into(),
|
||||||
];
|
];
|
||||||
|
|
||||||
@ -551,6 +556,18 @@ fn variables_completions() {
|
|||||||
// Match results
|
// Match results
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(expected, suggestions);
|
||||||
|
|
||||||
|
// Test completions for $nu.os-info
|
||||||
|
let suggestions = completer.complete("$nu.os-info.", 12);
|
||||||
|
assert_eq!(4, suggestions.len());
|
||||||
|
let expected: Vec<String> = vec![
|
||||||
|
"arch".into(),
|
||||||
|
"family".into(),
|
||||||
|
"kernel_version".into(),
|
||||||
|
"name".into(),
|
||||||
|
];
|
||||||
|
// Match results
|
||||||
|
match_suggestions(expected, suggestions);
|
||||||
|
|
||||||
// Test completions for custom var
|
// Test completions for custom var
|
||||||
let suggestions = completer.complete("$actor.", 7);
|
let suggestions = completer.complete("$actor.", 7);
|
||||||
|
|
||||||
@ -574,9 +591,12 @@ fn variables_completions() {
|
|||||||
// Test completions for $env
|
// Test completions for $env
|
||||||
let suggestions = completer.complete("$env.", 5);
|
let suggestions = completer.complete("$env.", 5);
|
||||||
|
|
||||||
assert_eq!(2, suggestions.len());
|
assert_eq!(3, suggestions.len());
|
||||||
|
|
||||||
let expected: Vec<String> = vec!["PWD".into(), "TEST".into()];
|
#[cfg(windows)]
|
||||||
|
let expected: Vec<String> = vec!["PWD".into(), "Path".into(), "TEST".into()];
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
let expected: Vec<String> = vec!["PATH".into(), "PWD".into(), "TEST".into()];
|
||||||
|
|
||||||
// Match results
|
// Match results
|
||||||
match_suggestions(expected, suggestions);
|
match_suggestions(expected, suggestions);
|
||||||
@ -657,8 +677,8 @@ fn run_external_completion(block: &str, input: &str) -> Vec<Suggestion> {
|
|||||||
let (dir, _, mut engine_state, mut stack) = new_engine();
|
let (dir, _, mut engine_state, mut stack) = new_engine();
|
||||||
let (_, delta) = {
|
let (_, delta) = {
|
||||||
let mut working_set = StateWorkingSet::new(&engine_state);
|
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||||
let (block, err) = parse(&mut working_set, None, block.as_bytes(), false, &[]);
|
let block = parse(&mut working_set, None, block.as_bytes(), false);
|
||||||
assert!(err.is_none());
|
assert!(working_set.parse_errors.is_empty());
|
||||||
|
|
||||||
(block, working_set.render())
|
(block, working_set.render())
|
||||||
};
|
};
|
||||||
@ -692,21 +712,21 @@ fn unknown_command_completion() {
|
|||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
let expected_paths: Vec<String> = vec![
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another\\".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
"nushell".to_string(),
|
"nushell".to_string(),
|
||||||
"test_a\\".to_string(),
|
"test_a\\".to_string(),
|
||||||
"test_b\\".to_string(),
|
"test_b\\".to_string(),
|
||||||
"another\\".to_string(),
|
|
||||||
"custom_completion.nu".to_string(),
|
|
||||||
".hidden_file".to_string(),
|
".hidden_file".to_string(),
|
||||||
".hidden_folder\\".to_string(),
|
".hidden_folder\\".to_string(),
|
||||||
];
|
];
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
let expected_paths: Vec<String> = vec![
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another/".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
"nushell".to_string(),
|
"nushell".to_string(),
|
||||||
"test_a/".to_string(),
|
"test_a/".to_string(),
|
||||||
"test_b/".to_string(),
|
"test_b/".to_string(),
|
||||||
"another/".to_string(),
|
|
||||||
"custom_completion.nu".to_string(),
|
|
||||||
".hidden_file".to_string(),
|
".hidden_file".to_string(),
|
||||||
".hidden_folder/".to_string(),
|
".hidden_folder/".to_string(),
|
||||||
];
|
];
|
||||||
@ -752,21 +772,21 @@ fn filecompletions_triggers_after_cursor() {
|
|||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
let expected_paths: Vec<String> = vec![
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another\\".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
"nushell".to_string(),
|
"nushell".to_string(),
|
||||||
"test_a\\".to_string(),
|
"test_a\\".to_string(),
|
||||||
"test_b\\".to_string(),
|
"test_b\\".to_string(),
|
||||||
"another\\".to_string(),
|
|
||||||
"custom_completion.nu".to_string(),
|
|
||||||
".hidden_file".to_string(),
|
".hidden_file".to_string(),
|
||||||
".hidden_folder\\".to_string(),
|
".hidden_folder\\".to_string(),
|
||||||
];
|
];
|
||||||
#[cfg(not(windows))]
|
#[cfg(not(windows))]
|
||||||
let expected_paths: Vec<String> = vec![
|
let expected_paths: Vec<String> = vec![
|
||||||
|
"another/".to_string(),
|
||||||
|
"custom_completion.nu".to_string(),
|
||||||
"nushell".to_string(),
|
"nushell".to_string(),
|
||||||
"test_a/".to_string(),
|
"test_a/".to_string(),
|
||||||
"test_b/".to_string(),
|
"test_b/".to_string(),
|
||||||
"another/".to_string(),
|
|
||||||
"custom_completion.nu".to_string(),
|
|
||||||
".hidden_file".to_string(),
|
".hidden_file".to_string(),
|
||||||
".hidden_folder/".to_string(),
|
".hidden_folder/".to_string(),
|
||||||
];
|
];
|
||||||
@ -816,8 +836,9 @@ fn extern_complete_flags(mut extern_completer: NuCompleter) {
|
|||||||
match_suggestions(expected, suggestions);
|
match_suggestions(expected, suggestions);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[ignore = "was reverted, still needs fixing"]
|
||||||
#[rstest]
|
#[rstest]
|
||||||
fn alias_offset_bug_7748() {
|
fn alias_offset_bug_7648() {
|
||||||
let (dir, _, mut engine, mut stack) = new_engine();
|
let (dir, _, mut engine, mut stack) = new_engine();
|
||||||
|
|
||||||
// Create an alias
|
// Create an alias
|
||||||
@ -826,15 +847,15 @@ fn alias_offset_bug_7748() {
|
|||||||
|
|
||||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||||
|
|
||||||
// Issue #7748
|
// Issue #7648
|
||||||
// Nushell crashes when an alias name is shorter than the alias command
|
// Nushell crashes when an alias name is shorter than the alias command
|
||||||
// and the alias command is a external command
|
// and the alias command is a external command
|
||||||
// This happens because of offset is not correct.
|
// This happens because of offset is not correct.
|
||||||
// This crashes before PR #7779
|
// This crashes before PR #7779
|
||||||
let _suggestions = completer.complete("e", 1);
|
let _suggestions = completer.complete("e", 1);
|
||||||
//println!(" --------- suggestions: {:?}", suggestions);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[ignore = "was reverted, still needs fixing"]
|
||||||
#[rstest]
|
#[rstest]
|
||||||
fn alias_offset_bug_7754() {
|
fn alias_offset_bug_7754() {
|
||||||
let (dir, _, mut engine, mut stack) = new_engine();
|
let (dir, _, mut engine, mut stack) = new_engine();
|
||||||
@ -850,6 +871,14 @@ fn alias_offset_bug_7754() {
|
|||||||
// and the alias command contains pipes.
|
// and the alias command contains pipes.
|
||||||
// This crashes before PR #7756
|
// This crashes before PR #7756
|
||||||
let _suggestions = completer.complete("ll -a | c", 9);
|
let _suggestions = completer.complete("ll -a | c", 9);
|
||||||
|
}
|
||||||
//println!(" --------- suggestions: {:?}", suggestions);
|
|
||||||
|
#[test]
|
||||||
|
fn get_path_env_var_8003() {
|
||||||
|
// Create a new engine
|
||||||
|
let (_, _, engine, _) = new_engine();
|
||||||
|
// Get the path env var in a platform agnostic way
|
||||||
|
let the_path = engine.get_path_env_var();
|
||||||
|
// Make sure it's not empty
|
||||||
|
assert!(the_path.is_some());
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use nu_command::create_default_context;
|
|
||||||
use nu_engine::eval_block;
|
use nu_engine::eval_block;
|
||||||
use nu_parser::parse;
|
use nu_parser::parse;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
@ -11,6 +10,10 @@ use nu_test_support::fs;
|
|||||||
use reedline::Suggestion;
|
use reedline::Suggestion;
|
||||||
const SEP: char = std::path::MAIN_SEPARATOR;
|
const SEP: char = std::path::MAIN_SEPARATOR;
|
||||||
|
|
||||||
|
fn create_default_context() -> EngineState {
|
||||||
|
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
|
||||||
|
}
|
||||||
|
|
||||||
// creates a new engine with the current path into the completions fixtures folder
|
// creates a new engine with the current path into the completions fixtures folder
|
||||||
pub fn new_engine() -> (PathBuf, String, EngineState, Stack) {
|
pub fn new_engine() -> (PathBuf, String, EngineState, Stack) {
|
||||||
// Target folder inside assets
|
// Target folder inside assets
|
||||||
@ -43,6 +46,22 @@ pub fn new_engine() -> (PathBuf, String, EngineState, Stack) {
|
|||||||
span: nu_protocol::Span::new(0, dir_str.len()),
|
span: nu_protocol::Span::new(0, dir_str.len()),
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
#[cfg(windows)]
|
||||||
|
stack.add_env_var(
|
||||||
|
"Path".to_string(),
|
||||||
|
Value::String {
|
||||||
|
val: "c:\\some\\path;c:\\some\\other\\path".to_string(),
|
||||||
|
span: nu_protocol::Span::new(0, dir_str.len()),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
stack.add_env_var(
|
||||||
|
"PATH".to_string(),
|
||||||
|
Value::String {
|
||||||
|
val: "/some/path:/some/other/path".to_string(),
|
||||||
|
span: nu_protocol::Span::new(0, dir_str.len()),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
// Merge environment into the permanent state
|
// Merge environment into the permanent state
|
||||||
let merge_result = engine_state.merge_env(&mut stack, &dir);
|
let merge_result = engine_state.merge_env(&mut stack, &dir);
|
||||||
@ -130,9 +149,9 @@ pub fn merge_input(
|
|||||||
let (block, delta) = {
|
let (block, delta) = {
|
||||||
let mut working_set = StateWorkingSet::new(engine_state);
|
let mut working_set = StateWorkingSet::new(engine_state);
|
||||||
|
|
||||||
let (block, err) = parse(&mut working_set, None, input, false, &[]);
|
let block = parse(&mut working_set, None, input, false);
|
||||||
|
|
||||||
assert!(err.is_none());
|
assert!(working_set.parse_errors.is_empty());
|
||||||
|
|
||||||
(block, working_set.render())
|
(block, working_set.render())
|
||||||
};
|
};
|
||||||
|
16
crates/nu-cmd-base/Cargo.toml
Normal file
16
crates/nu-cmd-base/Cargo.toml
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
[package]
|
||||||
|
authors = ["The Nushell Project Developers"]
|
||||||
|
description = "The foundation tools to build Nushell commands."
|
||||||
|
edition = "2021"
|
||||||
|
license = "MIT"
|
||||||
|
name = "nu-cmd-base"
|
||||||
|
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base"
|
||||||
|
version = "0.84.0"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
nu-engine = { path = "../nu-engine", version = "0.84.0" }
|
||||||
|
nu-path = { path = "../nu-path", version = "0.84.0" }
|
||||||
|
nu-protocol = { version = "0.84.0", path = "../nu-protocol" }
|
||||||
|
indexmap = { version = "2.0" }
|
1
crates/nu-cmd-base/src/formats/mod.rs
Normal file
1
crates/nu-cmd-base/src/formats/mod.rs
Normal file
@ -0,0 +1 @@
|
|||||||
|
pub mod to;
|
20
crates/nu-cmd-base/src/formats/to/delimited.rs
Normal file
20
crates/nu-cmd-base/src/formats/to/delimited.rs
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
use indexmap::{indexset, IndexSet};
|
||||||
|
use nu_protocol::Value;
|
||||||
|
|
||||||
|
pub fn merge_descriptors(values: &[Value]) -> Vec<String> {
|
||||||
|
let mut ret: Vec<String> = vec![];
|
||||||
|
let mut seen: IndexSet<String> = indexset! {};
|
||||||
|
for value in values {
|
||||||
|
let data_descriptors = match value {
|
||||||
|
Value::Record { cols, .. } => cols.to_owned(),
|
||||||
|
_ => vec!["".to_string()],
|
||||||
|
};
|
||||||
|
for desc in data_descriptors {
|
||||||
|
if !desc.is_empty() && !seen.contains(&desc) {
|
||||||
|
seen.insert(desc.to_string());
|
||||||
|
ret.push(desc.to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ret
|
||||||
|
}
|
1
crates/nu-cmd-base/src/formats/to/mod.rs
Normal file
1
crates/nu-cmd-base/src/formats/to/mod.rs
Normal file
@ -0,0 +1 @@
|
|||||||
|
pub mod delimited;
|
@ -76,7 +76,9 @@ where
|
|||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
if let Err(error) = r {
|
if let Err(error) = r {
|
||||||
return Value::Error { error };
|
return Value::Error {
|
||||||
|
error: Box::new(error),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
v
|
v
|
3
crates/nu-cmd-base/src/lib.rs
Normal file
3
crates/nu-cmd-base/src/lib.rs
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
pub mod formats;
|
||||||
|
pub mod input_handler;
|
||||||
|
pub mod util;
|
57
crates/nu-cmd-base/src/util.rs
Normal file
57
crates/nu-cmd-base/src/util.rs
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
use nu_protocol::report_error;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::RangeInclusion,
|
||||||
|
engine::{EngineState, Stack, StateWorkingSet},
|
||||||
|
Range, ShellError, Span, Value,
|
||||||
|
};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
pub fn get_init_cwd() -> PathBuf {
|
||||||
|
std::env::current_dir().unwrap_or_else(|_| {
|
||||||
|
std::env::var("PWD")
|
||||||
|
.map(Into::into)
|
||||||
|
.unwrap_or_else(|_| nu_path::home_dir().unwrap_or_default())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_guaranteed_cwd(engine_state: &EngineState, stack: &Stack) -> PathBuf {
|
||||||
|
nu_engine::env::current_dir(engine_state, stack).unwrap_or_else(|e| {
|
||||||
|
let working_set = StateWorkingSet::new(engine_state);
|
||||||
|
report_error(&working_set, &e);
|
||||||
|
crate::util::get_init_cwd()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
type MakeRangeError = fn(&str, Span) -> ShellError;
|
||||||
|
|
||||||
|
pub fn process_range(range: &Range) -> Result<(isize, isize), MakeRangeError> {
|
||||||
|
let start = match &range.from {
|
||||||
|
Value::Int { val, .. } => isize::try_from(*val).unwrap_or_default(),
|
||||||
|
Value::Nothing { .. } => 0,
|
||||||
|
_ => {
|
||||||
|
return Err(|msg, span| ShellError::TypeMismatch {
|
||||||
|
err_message: msg.to_string(),
|
||||||
|
span,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let end = match &range.to {
|
||||||
|
Value::Int { val, .. } => {
|
||||||
|
if matches!(range.inclusion, RangeInclusion::Inclusive) {
|
||||||
|
isize::try_from(*val).unwrap_or(isize::max_value())
|
||||||
|
} else {
|
||||||
|
isize::try_from(*val).unwrap_or(isize::max_value()) - 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Value::Nothing { .. } => isize::max_value(),
|
||||||
|
_ => {
|
||||||
|
return Err(|msg, span| ShellError::TypeMismatch {
|
||||||
|
err_message: msg.to_string(),
|
||||||
|
span,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok((start, end))
|
||||||
|
}
|
65
crates/nu-cmd-dataframe/Cargo.toml
Normal file
65
crates/nu-cmd-dataframe/Cargo.toml
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
[package]
|
||||||
|
authors = ["The Nushell Project Developers"]
|
||||||
|
description = "Nushell's dataframe commands based on polars."
|
||||||
|
edition = "2021"
|
||||||
|
license = "MIT"
|
||||||
|
name = "nu-cmd-dataframe"
|
||||||
|
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-dataframe"
|
||||||
|
version = "0.84.0"
|
||||||
|
|
||||||
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
|
[lib]
|
||||||
|
bench = false
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
nu-engine = { path = "../nu-engine", version = "0.84.0" }
|
||||||
|
nu-parser = { path = "../nu-parser", version = "0.84.0" }
|
||||||
|
nu-protocol = { path = "../nu-protocol", version = "0.84.0" }
|
||||||
|
|
||||||
|
# Potential dependencies for extras
|
||||||
|
chrono = { version = "0.4", features = ["std", "unstable-locales"], default-features = false }
|
||||||
|
fancy-regex = "0.11"
|
||||||
|
indexmap = { version = "2.0" }
|
||||||
|
num = { version = "0.4", optional = true }
|
||||||
|
serde = { version = "1.0", features = ["derive"] }
|
||||||
|
sqlparser = { version = "0.34", features = ["serde"], optional = true }
|
||||||
|
polars-io = { version = "0.30.0", features = ["avro"] }
|
||||||
|
|
||||||
|
[dependencies.polars]
|
||||||
|
features = [
|
||||||
|
"arg_where",
|
||||||
|
"checked_arithmetic",
|
||||||
|
"concat_str",
|
||||||
|
"cross_join",
|
||||||
|
"csv",
|
||||||
|
"cum_agg",
|
||||||
|
"default",
|
||||||
|
"dtype-categorical",
|
||||||
|
"dtype-datetime",
|
||||||
|
"dtype-struct",
|
||||||
|
"dynamic_groupby",
|
||||||
|
"ipc",
|
||||||
|
"is_in",
|
||||||
|
"json",
|
||||||
|
"lazy",
|
||||||
|
"object",
|
||||||
|
"parquet",
|
||||||
|
"random",
|
||||||
|
"rolling_window",
|
||||||
|
"rows",
|
||||||
|
"serde",
|
||||||
|
"serde-lazy",
|
||||||
|
"strings",
|
||||||
|
"to_dummies"
|
||||||
|
]
|
||||||
|
optional = true
|
||||||
|
version = "0.30.0"
|
||||||
|
|
||||||
|
[features]
|
||||||
|
dataframe = ["num", "polars", "sqlparser"]
|
||||||
|
default = []
|
||||||
|
|
||||||
|
[dev-dependencies]
|
||||||
|
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.84.0" }
|
||||||
|
nu-test-support = { path = "../nu-test-support", version = "0.84.0" }
|
21
crates/nu-cmd-dataframe/LICENSE
Normal file
21
crates/nu-cmd-dataframe/LICENSE
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
MIT License
|
||||||
|
|
||||||
|
Copyright (c) 2019 - 2023 The Nushell Project Developers
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
@ -12,19 +12,21 @@ pub struct AppendDF;
|
|||||||
|
|
||||||
impl Command for AppendDF {
|
impl Command for AppendDF {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"append"
|
"dfr append"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Appends a new dataframe"
|
"Appends a new dataframe."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.required("other", SyntaxShape::Any, "dataframe to be appended")
|
.required("other", SyntaxShape::Any, "dataframe to be appended")
|
||||||
.switch("col", "appends in col orientation", Some('c'))
|
.switch("col", "appends in col orientation", Some('c'))
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -32,8 +34,8 @@ impl Command for AppendDF {
|
|||||||
vec![
|
vec![
|
||||||
Example {
|
Example {
|
||||||
description: "Appends a dataframe as new columns",
|
description: "Appends a dataframe as new columns",
|
||||||
example: r#"let a = ([[a b]; [1 2] [3 4]] | into df);
|
example: r#"let a = ([[a b]; [1 2] [3 4]] | dfr into-df);
|
||||||
$a | append $a"#,
|
$a | dfr append $a"#,
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![
|
NuDataFrame::try_from_columns(vec![
|
||||||
Column::new(
|
Column::new(
|
||||||
@ -59,8 +61,8 @@ impl Command for AppendDF {
|
|||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Appends a dataframe merging at the end of columns",
|
description: "Appends a dataframe merging at the end of columns",
|
||||||
example: r#"let a = ([[a b]; [1 2] [3 4]] | into df);
|
example: r#"let a = ([[a b]; [1 2] [3 4]] | dfr into-df);
|
||||||
$a | append $a --col"#,
|
$a | dfr append $a --col"#,
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![
|
NuDataFrame::try_from_columns(vec![
|
||||||
Column::new(
|
Column::new(
|
@ -10,24 +10,23 @@ pub struct ColumnsDF;
|
|||||||
|
|
||||||
impl Command for ColumnsDF {
|
impl Command for ColumnsDF {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"columns"
|
"dfr columns"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Show dataframe columns"
|
"Show dataframe columns."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(Type::Custom("dataframe".into()), Type::Any)
|
||||||
.output_type(Type::Any)
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![Example {
|
vec![Example {
|
||||||
description: "Dataframe columns",
|
description: "Dataframe columns",
|
||||||
example: "[[a b]; [1 2] [3 4]] | into df | columns",
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr columns",
|
||||||
result: Some(Value::List {
|
result: Some(Value::List {
|
||||||
vals: vec![Value::test_string("a"), Value::test_string("b")],
|
vals: vec![Value::test_string("a"), Value::test_string("b")],
|
||||||
span: Span::test_data(),
|
span: Span::test_data(),
|
@ -13,25 +13,27 @@ pub struct DropDF;
|
|||||||
|
|
||||||
impl Command for DropDF {
|
impl Command for DropDF {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"drop"
|
"dfr drop"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Creates a new dataframe by dropping the selected columns"
|
"Creates a new dataframe by dropping the selected columns."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.rest("rest", SyntaxShape::Any, "column names to be dropped")
|
.rest("rest", SyntaxShape::Any, "column names to be dropped")
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![Example {
|
vec![Example {
|
||||||
description: "drop column a",
|
description: "drop column a",
|
||||||
example: "[[a b]; [1 2] [3 4]] | into df | drop a",
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr drop a",
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![Column::new(
|
NuDataFrame::try_from_columns(vec![Column::new(
|
||||||
"b".to_string(),
|
"b".to_string(),
|
@ -14,18 +14,18 @@ pub struct DropDuplicates;
|
|||||||
|
|
||||||
impl Command for DropDuplicates {
|
impl Command for DropDuplicates {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"drop-duplicates"
|
"dfr drop-duplicates"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Drops duplicate values in dataframe"
|
"Drops duplicate values in dataframe."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.optional(
|
.optional(
|
||||||
"subset",
|
"subset",
|
||||||
SyntaxShape::Table,
|
SyntaxShape::Table(vec![]),
|
||||||
"subset of columns to drop duplicates",
|
"subset of columns to drop duplicates",
|
||||||
)
|
)
|
||||||
.switch("maintain", "maintain order", Some('m'))
|
.switch("maintain", "maintain order", Some('m'))
|
||||||
@ -34,15 +34,17 @@ impl Command for DropDuplicates {
|
|||||||
"keeps last duplicate value (by default keeps first)",
|
"keeps last duplicate value (by default keeps first)",
|
||||||
Some('l'),
|
Some('l'),
|
||||||
)
|
)
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![Example {
|
vec![Example {
|
||||||
description: "drop duplicates",
|
description: "drop duplicates",
|
||||||
example: "[[a b]; [1 2] [3 4] [1 2]] | into df | drop-duplicates",
|
example: "[[a b]; [1 2] [3 4] [1 2]] | dfr into-df | dfr drop-duplicates",
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![
|
NuDataFrame::try_from_columns(vec![
|
||||||
Column::new(
|
Column::new(
|
||||||
@ -97,7 +99,7 @@ fn command(
|
|||||||
};
|
};
|
||||||
|
|
||||||
df.as_ref()
|
df.as_ref()
|
||||||
.unique(subset_slice, keep_strategy)
|
.unique(subset_slice, keep_strategy, None)
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
ShellError::GenericError(
|
ShellError::GenericError(
|
||||||
"Error dropping duplicates".into(),
|
"Error dropping duplicates".into(),
|
@ -13,22 +13,24 @@ pub struct DropNulls;
|
|||||||
|
|
||||||
impl Command for DropNulls {
|
impl Command for DropNulls {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"drop-nulls"
|
"dfr drop-nulls"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Drops null values in dataframe"
|
"Drops null values in dataframe."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.optional(
|
.optional(
|
||||||
"subset",
|
"subset",
|
||||||
SyntaxShape::Table,
|
SyntaxShape::Table(vec![]),
|
||||||
"subset of columns to drop nulls",
|
"subset of columns to drop nulls",
|
||||||
)
|
)
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -36,10 +38,10 @@ impl Command for DropNulls {
|
|||||||
vec![
|
vec![
|
||||||
Example {
|
Example {
|
||||||
description: "drop null values in dataframe",
|
description: "drop null values in dataframe",
|
||||||
example: r#"let df = ([[a b]; [1 2] [3 0] [1 2]] | into df);
|
example: r#"let df = ([[a b]; [1 2] [3 0] [1 2]] | dfr into-df);
|
||||||
let res = ($df.b / $df.b);
|
let res = ($df.b / $df.b);
|
||||||
let a = ($df | with-column $res --name res);
|
let a = ($df | dfr with-column $res --name res);
|
||||||
$a | drop-nulls"#,
|
$a | dfr drop-nulls"#,
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![
|
NuDataFrame::try_from_columns(vec![
|
||||||
Column::new(
|
Column::new(
|
||||||
@ -61,8 +63,8 @@ impl Command for DropNulls {
|
|||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "drop null values in dataframe",
|
description: "drop null values in dataframe",
|
||||||
example: r#"let s = ([1 2 0 0 3 4] | into df);
|
example: r#"let s = ([1 2 0 0 3 4] | dfr into-df);
|
||||||
($s / $s) | drop-nulls"#,
|
($s / $s) | dfr drop-nulls"#,
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![Column::new(
|
NuDataFrame::try_from_columns(vec![Column::new(
|
||||||
"div_0_0".to_string(),
|
"div_0_0".to_string(),
|
@ -10,24 +10,26 @@ pub struct DataTypes;
|
|||||||
|
|
||||||
impl Command for DataTypes {
|
impl Command for DataTypes {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"dtypes"
|
"dfr dtypes"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Show dataframe data types"
|
"Show dataframe data types."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![Example {
|
vec![Example {
|
||||||
description: "Dataframe dtypes",
|
description: "Dataframe dtypes",
|
||||||
example: "[[a b]; [1 2] [3 4]] | into df | dtypes",
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr dtypes",
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![
|
NuDataFrame::try_from_columns(vec![
|
||||||
Column::new(
|
Column::new(
|
@ -11,17 +11,19 @@ pub struct Dummies;
|
|||||||
|
|
||||||
impl Command for Dummies {
|
impl Command for Dummies {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"dummies"
|
"dfr dummies"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Creates a new dataframe with dummy variables"
|
"Creates a new dataframe with dummy variables."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -29,7 +31,7 @@ impl Command for Dummies {
|
|||||||
vec![
|
vec![
|
||||||
Example {
|
Example {
|
||||||
description: "Create new dataframe with dummy variables from a dataframe",
|
description: "Create new dataframe with dummy variables from a dataframe",
|
||||||
example: "[[a b]; [1 2] [3 4]] | into df | dummies",
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr dummies",
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![
|
NuDataFrame::try_from_columns(vec![
|
||||||
Column::new(
|
Column::new(
|
||||||
@ -55,7 +57,7 @@ impl Command for Dummies {
|
|||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Create new dataframe with dummy variables from a series",
|
description: "Create new dataframe with dummy variables from a series",
|
||||||
example: "[1 2 2 3 3] | into df | dummies",
|
example: "[1 2 2 3 3] | dfr into-df | dfr dummies",
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![
|
NuDataFrame::try_from_columns(vec![
|
||||||
Column::new(
|
Column::new(
|
||||||
@ -116,7 +118,7 @@ fn command(
|
|||||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||||
|
|
||||||
df.as_ref()
|
df.as_ref()
|
||||||
.to_dummies()
|
.to_dummies(None)
|
||||||
.map_err(|e| {
|
.map_err(|e| {
|
||||||
ShellError::GenericError(
|
ShellError::GenericError(
|
||||||
"Error calculating dummies".into(),
|
"Error calculating dummies".into(),
|
@ -15,11 +15,11 @@ pub struct FilterWith;
|
|||||||
|
|
||||||
impl Command for FilterWith {
|
impl Command for FilterWith {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"filter-with"
|
"dfr filter-with"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Filters dataframe using a mask or expression as reference"
|
"Filters dataframe using a mask or expression as reference."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
@ -29,8 +29,10 @@ impl Command for FilterWith {
|
|||||||
SyntaxShape::Any,
|
SyntaxShape::Any,
|
||||||
"boolean mask used to filter data",
|
"boolean mask used to filter data",
|
||||||
)
|
)
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe or lazyframe".into()))
|
.category(Category::Custom("dataframe or lazyframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -38,8 +40,8 @@ impl Command for FilterWith {
|
|||||||
vec![
|
vec![
|
||||||
Example {
|
Example {
|
||||||
description: "Filter dataframe using a bool mask",
|
description: "Filter dataframe using a bool mask",
|
||||||
example: r#"let mask = ([true false] | into df);
|
example: r#"let mask = ([true false] | dfr into-df);
|
||||||
[[a b]; [1 2] [3 4]] | into df | filter-with $mask"#,
|
[[a b]; [1 2] [3 4]] | dfr into-df | dfr filter-with $mask"#,
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![
|
NuDataFrame::try_from_columns(vec![
|
||||||
Column::new("a".to_string(), vec![Value::test_int(1)]),
|
Column::new("a".to_string(), vec![Value::test_int(1)]),
|
||||||
@ -51,7 +53,7 @@ impl Command for FilterWith {
|
|||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Filter dataframe using an expression",
|
description: "Filter dataframe using an expression",
|
||||||
example: "[[a b]; [1 2] [3 4]] | into df | filter-with ((col a) > 1)",
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr filter-with ((dfr col a) > 1)",
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![
|
NuDataFrame::try_from_columns(vec![
|
||||||
Column::new("a".to_string(), vec![Value::test_int(3)]),
|
Column::new("a".to_string(), vec![Value::test_int(3)]),
|
@ -1,4 +1,4 @@
|
|||||||
use super::super::values::{Column, NuDataFrame};
|
use super::super::values::{Column, NuDataFrame, NuExpression};
|
||||||
use nu_engine::CallExt;
|
use nu_engine::CallExt;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ast::Call,
|
ast::Call,
|
||||||
@ -11,11 +11,11 @@ pub struct FirstDF;
|
|||||||
|
|
||||||
impl Command for FirstDF {
|
impl Command for FirstDF {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"first"
|
"dfr first"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Show only the first number of rows."
|
"Show only the first number of rows or create a first expression"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
@ -25,8 +25,16 @@ impl Command for FirstDF {
|
|||||||
SyntaxShape::Int,
|
SyntaxShape::Int,
|
||||||
"starting from the front, the number of rows to return",
|
"starting from the front, the number of rows to return",
|
||||||
)
|
)
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_types(vec![
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
(
|
||||||
|
Type::Custom("expression".into()),
|
||||||
|
Type::Custom("expression".into()),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
),
|
||||||
|
])
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -34,7 +42,7 @@ impl Command for FirstDF {
|
|||||||
vec![
|
vec![
|
||||||
Example {
|
Example {
|
||||||
description: "Return the first row of a dataframe",
|
description: "Return the first row of a dataframe",
|
||||||
example: "[[a b]; [1 2] [3 4]] | into df | first",
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr first",
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![
|
NuDataFrame::try_from_columns(vec![
|
||||||
Column::new("a".to_string(), vec![Value::test_int(1)]),
|
Column::new("a".to_string(), vec![Value::test_int(1)]),
|
||||||
@ -46,7 +54,7 @@ impl Command for FirstDF {
|
|||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Return the first two rows of a dataframe",
|
description: "Return the first two rows of a dataframe",
|
||||||
example: "[[a b]; [1 2] [3 4]] | into df | first 2",
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr first 2",
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![
|
NuDataFrame::try_from_columns(vec![
|
||||||
Column::new(
|
Column::new(
|
||||||
@ -62,6 +70,11 @@ impl Command for FirstDF {
|
|||||||
.into_value(Span::test_data()),
|
.into_value(Span::test_data()),
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
|
Example {
|
||||||
|
description: "Creates a first expression from a column",
|
||||||
|
example: "dfr col a | dfr first",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -72,8 +85,19 @@ impl Command for FirstDF {
|
|||||||
call: &Call,
|
call: &Call,
|
||||||
input: PipelineData,
|
input: PipelineData,
|
||||||
) -> Result<PipelineData, ShellError> {
|
) -> Result<PipelineData, ShellError> {
|
||||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
let value = input.into_value(call.head);
|
||||||
command(engine_state, stack, call, df)
|
if NuDataFrame::can_downcast(&value) {
|
||||||
|
let df = NuDataFrame::try_from_value(value)?;
|
||||||
|
command(engine_state, stack, call, df)
|
||||||
|
} else {
|
||||||
|
let expr = NuExpression::try_from_value(value)?;
|
||||||
|
let expr: NuExpression = expr.into_polars().first().into();
|
||||||
|
|
||||||
|
Ok(PipelineData::Value(
|
||||||
|
NuExpression::into_value(expr, call.head),
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -95,11 +119,25 @@ fn command(
|
|||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod test {
|
mod test {
|
||||||
use super::super::super::test_dataframe::test_dataframe;
|
use super::super::super::test_dataframe::{build_test_engine_state, test_dataframe_example};
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use crate::dataframe::lazy::aggregate::LazyAggregate;
|
||||||
|
use crate::dataframe::lazy::groupby::ToLazyGroupBy;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_examples() {
|
fn test_examples_dataframe() {
|
||||||
test_dataframe(vec![Box::new(FirstDF {})])
|
let mut engine_state = build_test_engine_state(vec![Box::new(FirstDF {})]);
|
||||||
|
test_dataframe_example(&mut engine_state, &FirstDF.examples()[0]);
|
||||||
|
test_dataframe_example(&mut engine_state, &FirstDF.examples()[1]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples_expression() {
|
||||||
|
let mut engine_state = build_test_engine_state(vec![
|
||||||
|
Box::new(FirstDF {}),
|
||||||
|
Box::new(LazyAggregate {}),
|
||||||
|
Box::new(ToLazyGroupBy {}),
|
||||||
|
]);
|
||||||
|
test_dataframe_example(&mut engine_state, &FirstDF.examples()[2]);
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -14,25 +14,27 @@ pub struct GetDF;
|
|||||||
|
|
||||||
impl Command for GetDF {
|
impl Command for GetDF {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"get"
|
"dfr get"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Creates dataframe with the selected columns"
|
"Creates dataframe with the selected columns."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.rest("rest", SyntaxShape::Any, "column names to sort dataframe")
|
.rest("rest", SyntaxShape::Any, "column names to sort dataframe")
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![Example {
|
vec![Example {
|
||||||
description: "Returns the selected column",
|
description: "Returns the selected column",
|
||||||
example: "[[a b]; [1 2] [3 4]] | into df | get a",
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr get a",
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![Column::new(
|
NuDataFrame::try_from_columns(vec![Column::new(
|
||||||
"a".to_string(),
|
"a".to_string(),
|
120
crates/nu-cmd-dataframe/src/dataframe/eager/last.rs
Normal file
120
crates/nu-cmd-dataframe/src/dataframe/eager/last.rs
Normal file
@ -0,0 +1,120 @@
|
|||||||
|
use super::super::values::{utils::DEFAULT_ROWS, Column, NuDataFrame, NuExpression};
|
||||||
|
use nu_engine::CallExt;
|
||||||
|
use nu_protocol::{
|
||||||
|
ast::Call,
|
||||||
|
engine::{Command, EngineState, Stack},
|
||||||
|
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct LastDF;
|
||||||
|
|
||||||
|
impl Command for LastDF {
|
||||||
|
fn name(&self) -> &str {
|
||||||
|
"dfr last"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn usage(&self) -> &str {
|
||||||
|
"Creates new dataframe with tail rows or creates a last expression."
|
||||||
|
}
|
||||||
|
|
||||||
|
fn signature(&self) -> Signature {
|
||||||
|
Signature::build(self.name())
|
||||||
|
.optional("rows", SyntaxShape::Int, "Number of rows for tail")
|
||||||
|
.input_output_types(vec![
|
||||||
|
(
|
||||||
|
Type::Custom("expression".into()),
|
||||||
|
Type::Custom("expression".into()),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
),
|
||||||
|
])
|
||||||
|
.category(Category::Custom("dataframe".into()))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn examples(&self) -> Vec<Example> {
|
||||||
|
vec![
|
||||||
|
Example {
|
||||||
|
description: "Create new dataframe with last rows",
|
||||||
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr last 1",
|
||||||
|
result: Some(
|
||||||
|
NuDataFrame::try_from_columns(vec![
|
||||||
|
Column::new("a".to_string(), vec![Value::test_int(3)]),
|
||||||
|
Column::new("b".to_string(), vec![Value::test_int(4)]),
|
||||||
|
])
|
||||||
|
.expect("simple df for test should not fail")
|
||||||
|
.into_value(Span::test_data()),
|
||||||
|
),
|
||||||
|
},
|
||||||
|
Example {
|
||||||
|
description: "Creates a last expression from a column",
|
||||||
|
example: "dfr col a | dfr last",
|
||||||
|
result: None,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
fn run(
|
||||||
|
&self,
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
input: PipelineData,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let value = input.into_value(call.head);
|
||||||
|
if NuDataFrame::can_downcast(&value) {
|
||||||
|
let df = NuDataFrame::try_from_value(value)?;
|
||||||
|
command(engine_state, stack, call, df)
|
||||||
|
} else {
|
||||||
|
let expr = NuExpression::try_from_value(value)?;
|
||||||
|
let expr: NuExpression = expr.into_polars().last().into();
|
||||||
|
|
||||||
|
Ok(PipelineData::Value(
|
||||||
|
NuExpression::into_value(expr, call.head),
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn command(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
df: NuDataFrame,
|
||||||
|
) -> Result<PipelineData, ShellError> {
|
||||||
|
let rows: Option<usize> = call.opt(engine_state, stack, 0)?;
|
||||||
|
let rows = rows.unwrap_or(DEFAULT_ROWS);
|
||||||
|
|
||||||
|
let res = df.as_ref().tail(Some(rows));
|
||||||
|
Ok(PipelineData::Value(
|
||||||
|
NuDataFrame::dataframe_into_value(res, call.head),
|
||||||
|
None,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::super::super::test_dataframe::{build_test_engine_state, test_dataframe_example};
|
||||||
|
use super::*;
|
||||||
|
use crate::dataframe::lazy::aggregate::LazyAggregate;
|
||||||
|
use crate::dataframe::lazy::groupby::ToLazyGroupBy;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples_dataframe() {
|
||||||
|
let mut engine_state = build_test_engine_state(vec![Box::new(LastDF {})]);
|
||||||
|
test_dataframe_example(&mut engine_state, &LastDF.examples()[0]);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_examples_expression() {
|
||||||
|
let mut engine_state = build_test_engine_state(vec![
|
||||||
|
Box::new(LastDF {}),
|
||||||
|
Box::new(LazyAggregate {}),
|
||||||
|
Box::new(ToLazyGroupBy {}),
|
||||||
|
]);
|
||||||
|
test_dataframe_example(&mut engine_state, &LastDF.examples()[1]);
|
||||||
|
}
|
||||||
|
}
|
@ -11,11 +11,11 @@ pub struct ListDF;
|
|||||||
|
|
||||||
impl Command for ListDF {
|
impl Command for ListDF {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"ls-df"
|
"dfr ls"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Lists stored dataframes"
|
"Lists stored dataframes."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
@ -25,8 +25,8 @@ impl Command for ListDF {
|
|||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![Example {
|
vec![Example {
|
||||||
description: "Creates a new dataframe and shows it in the dataframe list",
|
description: "Creates a new dataframe and shows it in the dataframe list",
|
||||||
example: r#"let test = ([[a b];[1 2] [3 4]] | into df);
|
example: r#"let test = ([[a b];[1 2] [3 4]] | dfr into-df);
|
||||||
ls-df"#,
|
ls"#,
|
||||||
result: None,
|
result: None,
|
||||||
}]
|
}]
|
||||||
}
|
}
|
@ -15,24 +15,24 @@ pub struct MeltDF;
|
|||||||
|
|
||||||
impl Command for MeltDF {
|
impl Command for MeltDF {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"melt"
|
"dfr melt"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Unpivot a DataFrame from wide to long format"
|
"Unpivot a DataFrame from wide to long format."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.required_named(
|
.required_named(
|
||||||
"columns",
|
"columns",
|
||||||
SyntaxShape::Table,
|
SyntaxShape::Table(vec![]),
|
||||||
"column names for melting",
|
"column names for melting",
|
||||||
Some('c'),
|
Some('c'),
|
||||||
)
|
)
|
||||||
.required_named(
|
.required_named(
|
||||||
"values",
|
"values",
|
||||||
SyntaxShape::Table,
|
SyntaxShape::Table(vec![]),
|
||||||
"column names used as value columns",
|
"column names used as value columns",
|
||||||
Some('v'),
|
Some('v'),
|
||||||
)
|
)
|
||||||
@ -48,8 +48,10 @@ impl Command for MeltDF {
|
|||||||
"optional name for value column",
|
"optional name for value column",
|
||||||
Some('l'),
|
Some('l'),
|
||||||
)
|
)
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -57,7 +59,7 @@ impl Command for MeltDF {
|
|||||||
vec![Example {
|
vec![Example {
|
||||||
description: "melt dataframe",
|
description: "melt dataframe",
|
||||||
example:
|
example:
|
||||||
"[[a b c d]; [x 1 4 a] [y 2 5 b] [z 3 6 c]] | into df | melt -c [b c] -v [a d]",
|
"[[a b c d]; [x 1 4 a] [y 2 5 b] [z 3 6 c]] | dfr into-df | dfr melt -c [b c] -v [a d]",
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![
|
NuDataFrame::try_from_columns(vec![
|
||||||
Column::new(
|
Column::new(
|
@ -22,8 +22,10 @@ mod sql_expr;
|
|||||||
mod summary;
|
mod summary;
|
||||||
mod take;
|
mod take;
|
||||||
mod to_arrow;
|
mod to_arrow;
|
||||||
|
mod to_avro;
|
||||||
mod to_csv;
|
mod to_csv;
|
||||||
mod to_df;
|
mod to_df;
|
||||||
|
mod to_json_lines;
|
||||||
mod to_nu;
|
mod to_nu;
|
||||||
mod to_parquet;
|
mod to_parquet;
|
||||||
mod with_column;
|
mod with_column;
|
||||||
@ -54,8 +56,10 @@ pub use sql_expr::parse_sql_expr;
|
|||||||
pub use summary::Summary;
|
pub use summary::Summary;
|
||||||
pub use take::TakeDF;
|
pub use take::TakeDF;
|
||||||
pub use to_arrow::ToArrow;
|
pub use to_arrow::ToArrow;
|
||||||
|
pub use to_avro::ToAvro;
|
||||||
pub use to_csv::ToCSV;
|
pub use to_csv::ToCSV;
|
||||||
pub use to_df::ToDataFrame;
|
pub use to_df::ToDataFrame;
|
||||||
|
pub use to_json_lines::ToJsonLines;
|
||||||
pub use to_nu::ToNu;
|
pub use to_nu::ToNu;
|
||||||
pub use to_parquet::ToParquet;
|
pub use to_parquet::ToParquet;
|
||||||
pub use with_column::WithColumn;
|
pub use with_column::WithColumn;
|
||||||
@ -94,10 +98,12 @@ pub fn add_eager_decls(working_set: &mut StateWorkingSet) {
|
|||||||
SliceDF,
|
SliceDF,
|
||||||
TakeDF,
|
TakeDF,
|
||||||
ToArrow,
|
ToArrow,
|
||||||
|
ToAvro,
|
||||||
ToCSV,
|
ToCSV,
|
||||||
ToDataFrame,
|
ToDataFrame,
|
||||||
ToNu,
|
ToNu,
|
||||||
ToParquet,
|
ToParquet,
|
||||||
|
ToJsonLines,
|
||||||
WithColumn
|
WithColumn
|
||||||
);
|
);
|
||||||
}
|
}
|
@ -9,20 +9,22 @@ use nu_protocol::{
|
|||||||
use std::{fs::File, io::BufReader, path::PathBuf};
|
use std::{fs::File, io::BufReader, path::PathBuf};
|
||||||
|
|
||||||
use polars::prelude::{
|
use polars::prelude::{
|
||||||
CsvEncoding, CsvReader, IpcReader, JsonReader, LazyCsvReader, LazyFrame, ParallelStrategy,
|
CsvEncoding, CsvReader, IpcReader, JsonFormat, JsonReader, LazyCsvReader, LazyFileListReader,
|
||||||
ParquetReader, ScanArgsIpc, ScanArgsParquet, SerReader,
|
LazyFrame, ParallelStrategy, ParquetReader, ScanArgsIpc, ScanArgsParquet, SerReader,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use polars_io::avro::AvroReader;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct OpenDataFrame;
|
pub struct OpenDataFrame;
|
||||||
|
|
||||||
impl Command for OpenDataFrame {
|
impl Command for OpenDataFrame {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"open-df"
|
"dfr open"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Opens csv, json, arrow, or parquet file to create dataframe"
|
"Opens CSV, JSON, JSON lines, arrow, avro, or parquet file to create dataframe."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
@ -36,7 +38,7 @@ impl Command for OpenDataFrame {
|
|||||||
.named(
|
.named(
|
||||||
"type",
|
"type",
|
||||||
SyntaxShape::String,
|
SyntaxShape::String,
|
||||||
"File type: csv, tsv, json, parquet, arrow. If omitted, derive from file extension",
|
"File type: csv, tsv, json, parquet, arrow, avro. If omitted, derive from file extension",
|
||||||
Some('t'),
|
Some('t'),
|
||||||
)
|
)
|
||||||
.named(
|
.named(
|
||||||
@ -68,15 +70,14 @@ impl Command for OpenDataFrame {
|
|||||||
"Columns to be selected from csv file. CSV and Parquet file",
|
"Columns to be selected from csv file. CSV and Parquet file",
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
.input_type(Type::Any)
|
.input_output_type(Type::Any, Type::Custom("dataframe".into()))
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![Example {
|
vec![Example {
|
||||||
description: "Takes a file name and creates a dataframe",
|
description: "Takes a file name and creates a dataframe",
|
||||||
example: "open test.csv",
|
example: "dfr open test.csv",
|
||||||
result: None,
|
result: None,
|
||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
@ -118,6 +119,8 @@ fn command(
|
|||||||
"parquet" => from_parquet(engine_state, stack, call),
|
"parquet" => from_parquet(engine_state, stack, call),
|
||||||
"ipc" | "arrow" => from_ipc(engine_state, stack, call),
|
"ipc" | "arrow" => from_ipc(engine_state, stack, call),
|
||||||
"json" => from_json(engine_state, stack, call),
|
"json" => from_json(engine_state, stack, call),
|
||||||
|
"jsonl" => from_jsonl(engine_state, stack, call),
|
||||||
|
"avro" => from_avro(engine_state, stack, call),
|
||||||
_ => Err(ShellError::FileNotFoundCustom(
|
_ => Err(ShellError::FileNotFoundCustom(
|
||||||
format!("{msg}. Supported values: csv, tsv, parquet, ipc, arrow, json"),
|
format!("{msg}. Supported values: csv, tsv, parquet, ipc, arrow, json"),
|
||||||
blamed,
|
blamed,
|
||||||
@ -145,6 +148,8 @@ fn from_parquet(
|
|||||||
rechunk: false,
|
rechunk: false,
|
||||||
row_count: None,
|
row_count: None,
|
||||||
low_memory: false,
|
low_memory: false,
|
||||||
|
cloud_options: None,
|
||||||
|
use_statistics: false,
|
||||||
};
|
};
|
||||||
|
|
||||||
let df: NuLazyFrame = LazyFrame::scan_parquet(file, args)
|
let df: NuLazyFrame = LazyFrame::scan_parquet(file, args)
|
||||||
@ -197,6 +202,46 @@ fn from_parquet(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn from_avro(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
) -> Result<Value, ShellError> {
|
||||||
|
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
||||||
|
let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?;
|
||||||
|
|
||||||
|
let r = File::open(&file.item).map_err(|e| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Error opening file".into(),
|
||||||
|
e.to_string(),
|
||||||
|
Some(file.span),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
let reader = AvroReader::new(r);
|
||||||
|
|
||||||
|
let reader = match columns {
|
||||||
|
None => reader,
|
||||||
|
Some(columns) => reader.with_columns(Some(columns)),
|
||||||
|
};
|
||||||
|
|
||||||
|
let df: NuDataFrame = reader
|
||||||
|
.finish()
|
||||||
|
.map_err(|e| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Avro reader error".into(),
|
||||||
|
format!("{e:?}"),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})?
|
||||||
|
.into();
|
||||||
|
|
||||||
|
Ok(df.into_value(call.head))
|
||||||
|
}
|
||||||
|
|
||||||
fn from_ipc(
|
fn from_ipc(
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
||||||
@ -297,6 +342,44 @@ fn from_json(
|
|||||||
Ok(df.into_value(call.head))
|
Ok(df.into_value(call.head))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn from_jsonl(
|
||||||
|
engine_state: &EngineState,
|
||||||
|
stack: &mut Stack,
|
||||||
|
call: &Call,
|
||||||
|
) -> Result<Value, ShellError> {
|
||||||
|
let infer_schema: Option<usize> = call.get_flag(engine_state, stack, "infer-schema")?;
|
||||||
|
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
||||||
|
let file = File::open(&file.item).map_err(|e| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Error opening file".into(),
|
||||||
|
e.to_string(),
|
||||||
|
Some(file.span),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})?;
|
||||||
|
|
||||||
|
let buf_reader = BufReader::new(file);
|
||||||
|
let reader = JsonReader::new(buf_reader)
|
||||||
|
.with_json_format(JsonFormat::JsonLines)
|
||||||
|
.infer_schema_len(infer_schema);
|
||||||
|
|
||||||
|
let df: NuDataFrame = reader
|
||||||
|
.finish()
|
||||||
|
.map_err(|e| {
|
||||||
|
ShellError::GenericError(
|
||||||
|
"Json lines reader error".into(),
|
||||||
|
format!("{e:?}"),
|
||||||
|
Some(call.head),
|
||||||
|
None,
|
||||||
|
Vec::new(),
|
||||||
|
)
|
||||||
|
})?
|
||||||
|
.into();
|
||||||
|
|
||||||
|
Ok(df.into_value(call.head))
|
||||||
|
}
|
||||||
|
|
||||||
fn from_csv(
|
fn from_csv(
|
||||||
engine_state: &EngineState,
|
engine_state: &EngineState,
|
||||||
stack: &mut Stack,
|
stack: &mut Stack,
|
@ -18,7 +18,7 @@ pub struct QueryDf;
|
|||||||
|
|
||||||
impl Command for QueryDf {
|
impl Command for QueryDf {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"query df"
|
"dfr query"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
@ -28,8 +28,10 @@ impl Command for QueryDf {
|
|||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.required("sql", SyntaxShape::String, "sql query")
|
.required("sql", SyntaxShape::String, "sql query")
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -40,7 +42,7 @@ impl Command for QueryDf {
|
|||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![Example {
|
vec![Example {
|
||||||
description: "Query dataframe using SQL",
|
description: "Query dataframe using SQL",
|
||||||
example: "[[a b]; [1 2] [3 4]] | into df | query df 'select a from df'",
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr query 'select a from df'",
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![Column::new(
|
NuDataFrame::try_from_columns(vec![Column::new(
|
||||||
"a".to_string(),
|
"a".to_string(),
|
@ -14,11 +14,11 @@ pub struct RenameDF;
|
|||||||
|
|
||||||
impl Command for RenameDF {
|
impl Command for RenameDF {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"rename"
|
"dfr rename"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Rename a dataframe column"
|
"Rename a dataframe column."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
@ -33,8 +33,10 @@ impl Command for RenameDF {
|
|||||||
SyntaxShape::Any,
|
SyntaxShape::Any,
|
||||||
"New names for the selected column(s). A string or list of strings",
|
"New names for the selected column(s). A string or list of strings",
|
||||||
)
|
)
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe or lazyframe".into()))
|
.category(Category::Custom("dataframe or lazyframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -42,7 +44,7 @@ impl Command for RenameDF {
|
|||||||
vec![
|
vec![
|
||||||
Example {
|
Example {
|
||||||
description: "Renames a series",
|
description: "Renames a series",
|
||||||
example: "[5 6 7 8] | into df | rename '0' new_name",
|
example: "[5 6 7 8] | dfr into-df | dfr rename '0' new_name",
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![Column::new(
|
NuDataFrame::try_from_columns(vec![Column::new(
|
||||||
"new_name".to_string(),
|
"new_name".to_string(),
|
||||||
@ -59,7 +61,7 @@ impl Command for RenameDF {
|
|||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Renames a dataframe column",
|
description: "Renames a dataframe column",
|
||||||
example: "[[a b]; [1 2] [3 4]] | into df | rename a a_new",
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr rename a a_new",
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![
|
NuDataFrame::try_from_columns(vec![
|
||||||
Column::new(
|
Column::new(
|
||||||
@ -77,7 +79,7 @@ impl Command for RenameDF {
|
|||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Renames two dataframe columns",
|
description: "Renames two dataframe columns",
|
||||||
example: "[[a b]; [1 2] [3 4]] | into df | rename [a b] [a_new b_new]",
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr rename [a b] [a_new b_new]",
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![
|
NuDataFrame::try_from_columns(vec![
|
||||||
Column::new(
|
Column::new(
|
||||||
@ -156,10 +158,10 @@ fn command_lazy(
|
|||||||
|
|
||||||
if columns.len() != new_names.len() {
|
if columns.len() != new_names.len() {
|
||||||
let value: Value = call.req(engine_state, stack, 1)?;
|
let value: Value = call.req(engine_state, stack, 1)?;
|
||||||
return Err(ShellError::IncompatibleParametersSingle(
|
return Err(ShellError::IncompatibleParametersSingle {
|
||||||
"New name list has different size to column list".into(),
|
msg: "New name list has different size to column list".into(),
|
||||||
value.span()?,
|
span: value.span()?,
|
||||||
));
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let lazy = lazy.into_polars();
|
let lazy = lazy.into_polars();
|
@ -12,11 +12,11 @@ pub struct SampleDF;
|
|||||||
|
|
||||||
impl Command for SampleDF {
|
impl Command for SampleDF {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"sample"
|
"dfr sample"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Create sample dataframe"
|
"Create sample dataframe."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
@ -41,8 +41,10 @@ impl Command for SampleDF {
|
|||||||
)
|
)
|
||||||
.switch("replace", "sample with replace", Some('e'))
|
.switch("replace", "sample with replace", Some('e'))
|
||||||
.switch("shuffle", "shuffle sample", Some('u'))
|
.switch("shuffle", "shuffle sample", Some('u'))
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -50,12 +52,12 @@ impl Command for SampleDF {
|
|||||||
vec![
|
vec![
|
||||||
Example {
|
Example {
|
||||||
description: "Sample rows from dataframe",
|
description: "Sample rows from dataframe",
|
||||||
example: "[[a b]; [1 2] [3 4]] | into df | sample -n 1",
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr sample -n 1",
|
||||||
result: None, // No expected value because sampling is random
|
result: None, // No expected value because sampling is random
|
||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Shows sample row using fraction and replace",
|
description: "Shows sample row using fraction and replace",
|
||||||
example: "[[a b]; [1 2] [3 4] [5 6]] | into df | sample -f 0.5 -e",
|
example: "[[a b]; [1 2] [3 4] [5 6]] | dfr into-df | dfr sample -f 0.5 -e",
|
||||||
result: None, // No expected value because sampling is random
|
result: None, // No expected value because sampling is random
|
||||||
},
|
},
|
||||||
]
|
]
|
@ -13,24 +13,26 @@ pub struct ShapeDF;
|
|||||||
|
|
||||||
impl Command for ShapeDF {
|
impl Command for ShapeDF {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"shape"
|
"dfr shape"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Shows column and row size for a dataframe"
|
"Shows column and row size for a dataframe."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![Example {
|
vec![Example {
|
||||||
description: "Shows row and column shape",
|
description: "Shows row and column shape",
|
||||||
example: "[[a b]; [1 2] [3 4]] | into df | shape",
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr shape",
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![
|
NuDataFrame::try_from_columns(vec![
|
||||||
Column::new("rows".to_string(), vec![Value::test_int(2)]),
|
Column::new("rows".to_string(), vec![Value::test_int(2)]),
|
@ -14,26 +14,28 @@ pub struct SliceDF;
|
|||||||
|
|
||||||
impl Command for SliceDF {
|
impl Command for SliceDF {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"slice"
|
"dfr slice"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Creates new dataframe from a slice of rows"
|
"Creates new dataframe from a slice of rows."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.required("offset", SyntaxShape::Int, "start of slice")
|
.required("offset", SyntaxShape::Int, "start of slice")
|
||||||
.required("size", SyntaxShape::Int, "size of slice")
|
.required("size", SyntaxShape::Int, "size of slice")
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![Example {
|
vec![Example {
|
||||||
description: "Create new dataframe from a slice of the rows",
|
description: "Create new dataframe from a slice of the rows",
|
||||||
example: "[[a b]; [1 2] [3 4]] | into df | slice 0 1",
|
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr slice 0 1",
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![
|
NuDataFrame::try_from_columns(vec![
|
||||||
Column::new("a".to_string(), vec![Value::test_int(1)]),
|
Column::new("a".to_string(), vec![Value::test_int(1)]),
|
@ -18,7 +18,7 @@ impl SQLContext {
|
|||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
Self {
|
Self {
|
||||||
table_map: HashMap::new(),
|
table_map: HashMap::new(),
|
||||||
dialect: GenericDialect::default(),
|
dialect: GenericDialect,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -30,7 +30,7 @@ impl SQLContext {
|
|||||||
// Determine involved dataframe
|
// Determine involved dataframe
|
||||||
// Implicit join require some more work in query parsers, Explicit join are preferred for now.
|
// Implicit join require some more work in query parsers, Explicit join are preferred for now.
|
||||||
let tbl = select_stmt.from.get(0).ok_or_else(|| {
|
let tbl = select_stmt.from.get(0).ok_or_else(|| {
|
||||||
PolarsError::NotFound(ErrString::from("No table found in select statement"))
|
PolarsError::ComputeError(ErrString::from("No table found in select statement"))
|
||||||
})?;
|
})?;
|
||||||
let mut alias_map = HashMap::new();
|
let mut alias_map = HashMap::new();
|
||||||
let tbl_name = match &tbl.relation {
|
let tbl_name = match &tbl.relation {
|
||||||
@ -39,7 +39,9 @@ impl SQLContext {
|
|||||||
.0
|
.0
|
||||||
.get(0)
|
.get(0)
|
||||||
.ok_or_else(|| {
|
.ok_or_else(|| {
|
||||||
PolarsError::NotFound(ErrString::from("No table found in select statement"))
|
PolarsError::ComputeError(ErrString::from(
|
||||||
|
"No table found in select statement",
|
||||||
|
))
|
||||||
})?
|
})?
|
||||||
.value
|
.value
|
||||||
.to_string();
|
.to_string();
|
||||||
@ -148,7 +150,7 @@ impl SQLContext {
|
|||||||
let agg_df = df.groupby(group_by).agg(agg_projection);
|
let agg_df = df.groupby(group_by).agg(agg_projection);
|
||||||
let mut final_proj_pos = groupby_pos
|
let mut final_proj_pos = groupby_pos
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.chain(agg_proj_pos.into_iter())
|
.chain(agg_proj_pos)
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
final_proj_pos.sort_by(|(proj_pa, _), (proj_pb, _)| proj_pa.cmp(proj_pb));
|
final_proj_pos.sort_by(|(proj_pa, _), (proj_pb, _)| proj_pa.cmp(proj_pb));
|
||||||
@ -161,8 +163,8 @@ impl SQLContext {
|
|||||||
.collect()
|
.collect()
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
.schema()
|
.schema()
|
||||||
.get_index(shm_p)
|
.get_at_index(shm_p)
|
||||||
.unwrap_or((&"".to_string(), &DataType::Null))
|
.unwrap_or((&"".into(), &DataType::Null))
|
||||||
.0)
|
.0)
|
||||||
})
|
})
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
@ -181,7 +183,7 @@ impl SQLContext {
|
|||||||
} else {
|
} else {
|
||||||
let ast = ast
|
let ast = ast
|
||||||
.get(0)
|
.get(0)
|
||||||
.ok_or_else(|| PolarsError::NotFound(ErrString::from("No statement found")))?;
|
.ok_or_else(|| PolarsError::ComputeError(ErrString::from("No statement found")))?;
|
||||||
Ok(match ast {
|
Ok(match ast {
|
||||||
Statement::Query(query) => {
|
Statement::Query(query) => {
|
||||||
let rs = match &*query.body {
|
let rs = match &*query.body {
|
@ -3,7 +3,7 @@ use polars::prelude::{col, lit, DataType, Expr, LiteralValue, PolarsResult as Re
|
|||||||
|
|
||||||
use sqlparser::ast::{
|
use sqlparser::ast::{
|
||||||
BinaryOperator as SQLBinaryOperator, DataType as SQLDataType, Expr as SqlExpr,
|
BinaryOperator as SQLBinaryOperator, DataType as SQLDataType, Expr as SqlExpr,
|
||||||
Function as SQLFunction, Value as SqlValue, WindowSpec,
|
Function as SQLFunction, Value as SqlValue, WindowType,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn map_sql_polars_datatype(data_type: &SQLDataType) -> Result<DataType> {
|
fn map_sql_polars_datatype(data_type: &SQLDataType) -> Result<DataType> {
|
||||||
@ -29,8 +29,8 @@ fn map_sql_polars_datatype(data_type: &SQLDataType) -> Result<DataType> {
|
|||||||
SQLDataType::Boolean => DataType::Boolean,
|
SQLDataType::Boolean => DataType::Boolean,
|
||||||
SQLDataType::Date => DataType::Date,
|
SQLDataType::Date => DataType::Date,
|
||||||
SQLDataType::Time(_, _) => DataType::Time,
|
SQLDataType::Time(_, _) => DataType::Time,
|
||||||
SQLDataType::Timestamp(_, _) => DataType::Datetime(TimeUnit::Milliseconds, None),
|
SQLDataType::Timestamp(_, _) => DataType::Datetime(TimeUnit::Microseconds, None),
|
||||||
SQLDataType::Interval => DataType::Duration(TimeUnit::Milliseconds),
|
SQLDataType::Interval => DataType::Duration(TimeUnit::Microseconds),
|
||||||
SQLDataType::Array(inner_type) => match inner_type {
|
SQLDataType::Array(inner_type) => match inner_type {
|
||||||
Some(inner_type) => DataType::List(Box::new(map_sql_polars_datatype(inner_type)?)),
|
Some(inner_type) => DataType::List(Box::new(map_sql_polars_datatype(inner_type)?)),
|
||||||
None => {
|
None => {
|
||||||
@ -125,18 +125,26 @@ pub fn parse_sql_expr(expr: &SqlExpr) -> Result<Expr> {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn apply_window_spec(expr: Expr, window_spec: &Option<WindowSpec>) -> Result<Expr> {
|
fn apply_window_spec(expr: Expr, window_type: &Option<WindowType>) -> Result<Expr> {
|
||||||
Ok(match &window_spec {
|
Ok(match &window_type {
|
||||||
Some(window_spec) => {
|
Some(wtype) => match wtype {
|
||||||
// Process for simple window specification, partition by first
|
WindowType::WindowSpec(window_spec) => {
|
||||||
let partition_by = window_spec
|
// Process for simple window specification, partition by first
|
||||||
.partition_by
|
let partition_by = window_spec
|
||||||
.iter()
|
.partition_by
|
||||||
.map(parse_sql_expr)
|
.iter()
|
||||||
.collect::<Result<Vec<_>>>()?;
|
.map(parse_sql_expr)
|
||||||
expr.over(partition_by)
|
.collect::<Result<Vec<_>>>()?;
|
||||||
// Order by and Row range may not be supported at the moment
|
expr.over(partition_by)
|
||||||
}
|
// Order by and Row range may not be supported at the moment
|
||||||
|
}
|
||||||
|
// TODO: make NamedWindow work
|
||||||
|
WindowType::NamedWindow(_named) => {
|
||||||
|
return Err(PolarsError::ComputeError(
|
||||||
|
format!("Expression: {expr:?} was not supported in polars-sql yet!").into(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
},
|
||||||
None => expr,
|
None => expr,
|
||||||
})
|
})
|
||||||
}
|
}
|
@ -19,7 +19,7 @@ pub struct Summary;
|
|||||||
|
|
||||||
impl Command for Summary {
|
impl Command for Summary {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"summary"
|
"dfr summary"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
@ -29,11 +29,13 @@ impl Command for Summary {
|
|||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build(self.name())
|
Signature::build(self.name())
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.named(
|
.named(
|
||||||
"quantiles",
|
"quantiles",
|
||||||
SyntaxShape::Table,
|
SyntaxShape::Table(vec![]),
|
||||||
"provide optional quantiles",
|
"provide optional quantiles",
|
||||||
Some('q'),
|
Some('q'),
|
||||||
)
|
)
|
||||||
@ -42,7 +44,7 @@ impl Command for Summary {
|
|||||||
fn examples(&self) -> Vec<Example> {
|
fn examples(&self) -> Vec<Example> {
|
||||||
vec![Example {
|
vec![Example {
|
||||||
description: "list dataframe descriptives",
|
description: "list dataframe descriptives",
|
||||||
example: "[[a b]; [1 1] [1 1]] | into df | summary",
|
example: "[[a b]; [1 1] [1 1]] | dfr into-df | dfr summary",
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![
|
NuDataFrame::try_from_columns(vec![
|
||||||
Column::new(
|
Column::new(
|
@ -15,11 +15,11 @@ pub struct TakeDF;
|
|||||||
|
|
||||||
impl Command for TakeDF {
|
impl Command for TakeDF {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"take"
|
"dfr take"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
"Creates new dataframe using the given indices"
|
"Creates new dataframe using the given indices."
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
@ -29,8 +29,10 @@ impl Command for TakeDF {
|
|||||||
SyntaxShape::Any,
|
SyntaxShape::Any,
|
||||||
"list of indices used to take data",
|
"list of indices used to take data",
|
||||||
)
|
)
|
||||||
.input_type(Type::Custom("dataframe".into()))
|
.input_output_type(
|
||||||
.output_type(Type::Custom("dataframe".into()))
|
Type::Custom("dataframe".into()),
|
||||||
|
Type::Custom("dataframe".into()),
|
||||||
|
)
|
||||||
.category(Category::Custom("dataframe".into()))
|
.category(Category::Custom("dataframe".into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -38,9 +40,9 @@ impl Command for TakeDF {
|
|||||||
vec![
|
vec![
|
||||||
Example {
|
Example {
|
||||||
description: "Takes selected rows from dataframe",
|
description: "Takes selected rows from dataframe",
|
||||||
example: r#"let df = ([[a b]; [4 1] [5 2] [4 3]] | into df);
|
example: r#"let df = ([[a b]; [4 1] [5 2] [4 3]] | dfr into-df);
|
||||||
let indices = ([0 2] | into df);
|
let indices = ([0 2] | dfr into-df);
|
||||||
$df | take $indices"#,
|
$df | dfr take $indices"#,
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![
|
NuDataFrame::try_from_columns(vec![
|
||||||
Column::new(
|
Column::new(
|
||||||
@ -58,9 +60,9 @@ impl Command for TakeDF {
|
|||||||
},
|
},
|
||||||
Example {
|
Example {
|
||||||
description: "Takes selected rows from series",
|
description: "Takes selected rows from series",
|
||||||
example: r#"let series = ([4 1 5 2 4 3] | into df);
|
example: r#"let series = ([4 1 5 2 4 3] | dfr into-df);
|
||||||
let indices = ([0 2] | into df);
|
let indices = ([0 2] | dfr into-df);
|
||||||
$series | take $indices"#,
|
$series | dfr take $indices"#,
|
||||||
result: Some(
|
result: Some(
|
||||||
NuDataFrame::try_from_columns(vec![Column::new(
|
NuDataFrame::try_from_columns(vec![Column::new(
|
||||||
"0".to_string(),
|
"0".to_string(),
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user