mirror of
https://github.com/nushell/nushell.git
synced 2025-07-01 15:11:52 +02:00
Compare commits
959 Commits
Author | SHA1 | Date | |
---|---|---|---|
3016d7a64c | |||
9a07b41c9d | |||
565c6409d9 | |||
27793e7452 | |||
7066cc5004 | |||
71aacf5032 | |||
3ee2fc60f9 | |||
669659f974 | |||
4cda183103 | |||
9a9fdd7a35 | |||
626d597527 | |||
8c112c9efd | |||
98525043ed | |||
872aa78373 | |||
8948c350d4 | |||
9ff92c6878 | |||
38a42905ae | |||
39cf43ef06 | |||
262914cf92 | |||
4c4609d646 | |||
65e5abaa3e | |||
f24877ba08 | |||
ab08328a30 | |||
345edbbe10 | |||
e69a02d379 | |||
0126620c19 | |||
387328fe73 | |||
eaedb30a8c | |||
c6cb406a53 | |||
d3895d71db | |||
7b95e37bbe | |||
bf425874b8 | |||
f6e248f343 | |||
ecaed7f0ae | |||
0aae485395 | |||
523d57b2d8 | |||
2a721bad52 | |||
f4d9ddd3ad | |||
daeb56fe90 | |||
43687207b4 | |||
c0ff0f12f0 | |||
2697ea9a25 | |||
55b67e17bb | |||
123547444c | |||
88f1f386bb | |||
461f69ac5d | |||
e09b4817e1 | |||
96744e3155 | |||
4372d00ea9 | |||
7884de1941 | |||
098527b263 | |||
995989dad4 | |||
67a63162b2 | |||
6be91a68f3 | |||
f7d647ac3c | |||
a2a1c1656f | |||
99ba365c4a | |||
28f58057b6 | |||
f17f857b1f | |||
cf68334fa0 | |||
6ff3a4180b | |||
1058707a29 | |||
6e590fe0a2 | |||
b23fe30530 | |||
123bf2d736 | |||
7c1eb6b0c8 | |||
63ccc62a24 | |||
c0bac5a440 | |||
68a3d7c430 | |||
d11080e9ab | |||
a4ef7c1ac4 | |||
6a1691f378 | |||
a71abdaf15 | |||
1350f1eff7 | |||
752d25b004 | |||
68fcd71898 | |||
fb4251aba7 | |||
28f0f32ae7 | |||
06c590d894 | |||
0487e9ffcb | |||
1c49ca503a | |||
360ebeb0bc | |||
7f1e025cc9 | |||
1b220815b9 | |||
671bd08bcd | |||
bce2627e45 | |||
903afda6d9 | |||
317653d5d2 | |||
a20b24a712 | |||
74d62581b9 | |||
c68324762d | |||
525acf9d9e | |||
cb67de675e | |||
fd7eef1499 | |||
a603b067e5 | |||
b0600449e5 | |||
4073783d6e | |||
779a3c075e | |||
d77eeeb5dd | |||
7a181960b7 | |||
1bf016bae3 | |||
da4c918392 | |||
db4b3a561d | |||
d74c59eace | |||
06b0bea77f | |||
3ec0655577 | |||
005b8b02b2 | |||
5042f19d1b | |||
bacc1f6317 | |||
5536c9c7bc | |||
58c6fea60b | |||
e7f1bf8535 | |||
84517138bc | |||
fb7f6fc08b | |||
3633772d52 | |||
4de9a3eb34 | |||
b2092df27e | |||
09f513bb53 | |||
857c522808 | |||
9fa2b77611 | |||
f8a8eca836 | |||
20aa59085b | |||
4b91ed57dd | |||
366348dea0 | |||
b6b36e00c6 | |||
c79432f33c | |||
08931e976e | |||
c2992d5d8b | |||
342907c04d | |||
f5f21aca2d | |||
9342ad6ae1 | |||
cb4ca157fd | |||
d1c807230b | |||
4e5d3db952 | |||
b8d37a7541 | |||
c7a8aac883 | |||
0d518bf813 | |||
e083b75aef | |||
2e5a857983 | |||
0b6f15b69e | |||
16f3d9b4e1 | |||
3e0fa8ff85 | |||
d40019a141 | |||
d9b324c5d3 | |||
00176c5666 | |||
f16ac886a8 | |||
c77ec0b667 | |||
6779b248e4 | |||
0a355db5c0 | |||
cf9813cbf8 | |||
c371d1a535 | |||
4e0a65c822 | |||
6530403ff8 | |||
c08f46f836 | |||
175dab4898 | |||
798ae7b251 | |||
f879c00f9d | |||
d03ad6a257 | |||
14e4d05a9f | |||
86dd045554 | |||
0e023eaa84 | |||
427857a78e | |||
eea3f79c3c | |||
b004e80f77 | |||
25b62c2ac3 | |||
859f7b3dc7 | |||
39b020037d | |||
1334de72b0 | |||
56acebb826 | |||
d646903161 | |||
e43d893ea3 | |||
f03d81b0c8 | |||
7f1fd7699e | |||
a3f1116ea8 | |||
ef1d70eb67 | |||
cffce7f4b2 | |||
a4809d2f08 | |||
f286286510 | |||
2a65d43c13 | |||
0aabe84460 | |||
323207ca1d | |||
e35376f1e7 | |||
7e37e4bb5f | |||
a44ad949f1 | |||
4105255a5a | |||
ff5815c0a3 | |||
3af4f34f11 | |||
092d496ff5 | |||
415ebf207f | |||
aaac273cd0 | |||
90d65bb987 | |||
4c5a8c1804 | |||
188aca8fe6 | |||
c59d6d31bc | |||
64f34e0287 | |||
6edf91dcae | |||
bc872a1a2a | |||
ea1bd9f8f9 | |||
4458aae3d4 | |||
e7a4af14cd | |||
90095c72f6 | |||
f12f590d82 | |||
c8f30fa3bf | |||
ff290a5c3d | |||
56067da39c | |||
5d63f47c85 | |||
ee6547dbb7 | |||
19e76332fa | |||
55bf4d847f | |||
a4199ea312 | |||
afb7e1cf66 | |||
61d5aed0a2 | |||
41a7c351cb | |||
7ac3e97bfe | |||
924986576d | |||
7071617f18 | |||
e72a4116ec | |||
a109283118 | |||
42b03917fb | |||
12a07052f9 | |||
0cd927d634 | |||
e4c2c123ab | |||
d25be66929 | |||
6eb6086823 | |||
e88a531945 | |||
a093e66822 | |||
387c5462e9 | |||
724818030d | |||
8cad12a05c | |||
41119d3f88 | |||
0ebbc8f71c | |||
bd07f7b302 | |||
1867bb1a88 | |||
62272975f2 | |||
7bb9ee55c4 | |||
2c1560e281 | |||
d5206cbd68 | |||
c74cff213e | |||
2ea5819b02 | |||
1115190a49 | |||
7132c5ad02 | |||
1920ece759 | |||
6f59abaf43 | |||
5f7425a7b4 | |||
1ab9ec3ebc | |||
f2095ed0cc | |||
7e26b4fcc2 | |||
ad95e4cc27 | |||
ee5a18167c | |||
75c9e3e5df | |||
77f10eb270 | |||
42bb42a2e1 | |||
f597380112 | |||
b92b4120dc | |||
de5ad5de19 | |||
64695cd67c | |||
21b3eeed99 | |||
a86a7e6c29 | |||
15421dc45e | |||
9522052063 | |||
ba880277bf | |||
40241e9be6 | |||
34f3da7150 | |||
534287ed65 | |||
913c2b8d1c | |||
aeffa188f0 | |||
543a25599c | |||
7ad0e5541e | |||
c1cc1c82cc | |||
b0b4c3dffd | |||
f3de373c59 | |||
df1fecd2cb | |||
9620e27e4f | |||
072ecec8ca | |||
748d82cec1 | |||
5e5d1ea81b | |||
bb570e42be | |||
3a050864df | |||
8cfa96b4c0 | |||
6f384da57e | |||
f8e63328d8 | |||
5d98a727ca | |||
109f629cb6 | |||
ff6a67d293 | |||
03ae01f11e | |||
697f3c03f1 | |||
1cecb37628 | |||
89436e978b | |||
cd0a52cf00 | |||
c6043eb500 | |||
729373aba0 | |||
f59a6990dc | |||
0f9094ead2 | |||
70f7db14d4 | |||
0cba269d80 | |||
ec2593efb8 | |||
c2283596ac | |||
c9399f5142 | |||
c9c93f5b4d | |||
2264682443 | |||
247c33b6d6 | |||
a6da8ce769 | |||
020e121391 | |||
7d5bd0d6be | |||
87717b9ddd | |||
3c6fac059e | |||
9092fc1b12 | |||
5b557a888e | |||
398b756aee | |||
533c1a89af | |||
84742275a1 | |||
92d968b8c8 | |||
50102bf69b | |||
156232fe08 | |||
0a3761a594 | |||
44dc890124 | |||
6ead98effb | |||
d5f76c02f0 | |||
fd77114d82 | |||
78f52e8b66 | |||
5b01685fc3 | |||
c2b684464f | |||
fd56768fdc | |||
070afa4528 | |||
23fec8eb0d | |||
c9841f67e9 | |||
76482cc1b2 | |||
d43f4253e8 | |||
0fba08808c | |||
b3a52a247f | |||
4763801cb2 | |||
ecb3b3a364 | |||
3e5f81ae14 | |||
ca05553fc6 | |||
fa5d7babb9 | |||
94b27267fd | |||
d1390ac95b | |||
d717e8faeb | |||
a95a4505ef | |||
b03f1efac4 | |||
5d5088b5d5 | |||
c1c73811d5 | |||
51bf8d9f6a | |||
858c93d2e5 | |||
31146a7591 | |||
05d7d6d6ad | |||
fb3350ebc3 | |||
2ffe30ecf0 | |||
f8dc3421b0 | |||
c1a30ac60f | |||
fc06afd051 | |||
c9aa6ba0f3 | |||
f8c82588b6 | |||
67eec92e76 | |||
b227eea668 | |||
58d002d469 | |||
5d283755e3 | |||
35e8db160d | |||
7d8df4ba9e | |||
f36c055c50 | |||
76bdda1178 | |||
5c07e82fc0 | |||
6ea5bdcf47 | |||
15c7e1b725 | |||
e5d9f405db | |||
80881c75f9 | |||
250ee62e16 | |||
54d73748e4 | |||
d08e254d16 | |||
7f771babb7 | |||
b9b9eaaca5 | |||
0303d709e6 | |||
0e1322e6d6 | |||
e290fa0e68 | |||
112306aab5 | |||
98952082ae | |||
8386bc0919 | |||
182b0ab4fb | |||
fa83458a6d | |||
54398f9546 | |||
077d1c8125 | |||
1ff8c2d81d | |||
d77f1753c2 | |||
85c6047b71 | |||
d37893cca0 | |||
95ac436d26 | |||
c2a46070aa | |||
57808ca7cc | |||
6cfe35eb7e | |||
b2734db015 | |||
823e578c46 | |||
95a745e622 | |||
776df7cd93 | |||
d5677625a7 | |||
64288b4350 | |||
a42fd3611a | |||
e36f69bf3c | |||
5ad7b8f029 | |||
ffb80b8873 | |||
177e800a07 | |||
a7e8970383 | |||
a324a50bb7 | |||
0578cf85ac | |||
1b54dd5418 | |||
8ad5d8bb6a | |||
869b01205c | |||
f5b2f5a9ee | |||
adfa4d00c0 | |||
12effd9b4e | |||
c26fca7419 | |||
da59dfe7d2 | |||
08715e6308 | |||
07d7899a97 | |||
494a5a5286 | |||
f41c93b2d3 | |||
5063e01c12 | |||
d1137cc700 | |||
3966c0a9fd | |||
dbdb1f6600 | |||
84cdc0d521 | |||
ab59dab129 | |||
e0c8a3d14c | |||
e93e51d672 | |||
4205edbc70 | |||
80bee40807 | |||
461837773b | |||
52d4259f58 | |||
274a8366c6 | |||
a1dfc35968 | |||
5886a74ccc | |||
4367aa9f58 | |||
e9c298713e | |||
c110ddff66 | |||
a806717f35 | |||
2b5f1ee5b3 | |||
77a1c3c7b2 | |||
82b3ae826f | |||
1b3092ae7c | |||
942ff7df4d | |||
51abe4c262 | |||
e8e0526f57 | |||
0b25385109 | |||
415b1273b4 | |||
6bee80dcd7 | |||
588a078872 | |||
93096a07aa | |||
523d0bca16 | |||
fe92051bb3 | |||
ee648ecb7d | |||
91920373b5 | |||
33a7bc405f | |||
cd75640a90 | |||
0f600bc3f5 | |||
aed4b626b8 | |||
92503e6571 | |||
44c0db46e1 | |||
1fd3bc1ba6 | |||
59ea28cf06 | |||
435abadd8a | |||
86cd387439 | |||
edbf3aaccb | |||
b03ef56bcb | |||
55316a9f27 | |||
d3ec3dc66b | |||
7ebae0b5f7 | |||
f45aed257f | |||
60da7abbc7 | |||
7a3cbf43e8 | |||
45b02ce2ab | |||
c039e4b3d0 | |||
9b202d560d | |||
1874082a2c | |||
1359b26da2 | |||
b9bc527d27 | |||
51cdd9fbb2 | |||
d838871063 | |||
4d16d92847 | |||
fc01701a41 | |||
51d5d0eac8 | |||
81d00f71a9 | |||
77fbf3e2d2 | |||
f565661f42 | |||
1a864ea6f4 | |||
c1738620e3 | |||
56e35fc3f9 | |||
29591c97a7 | |||
697dee6750 | |||
0ca8fcf58c | |||
a46048f362 | |||
0569a9c92e | |||
c1ca10ffd1 | |||
15c22db8f4 | |||
1c52b112c8 | |||
275dba82d5 | |||
cf7040a215 | |||
72cb4b6032 | |||
d4cbab454e | |||
3645178ff1 | |||
005180f269 | |||
72f7b9b7cc | |||
3f61ca19f0 | |||
d8c59eddb3 | |||
ac43372618 | |||
dccb2b48f3 | |||
2e68e6ddbf | |||
3dfe1a4f0e | |||
c87bac04c0 | |||
4b301710d3 | |||
7d67ca3652 | |||
01d8961eb7 | |||
7ac5a01e2f | |||
e2fb0e5b82 | |||
a11e41332c | |||
f3656f7822 | |||
38f4ab0bc9 | |||
f35741d50e | |||
d93315d8f5 | |||
8429aec57f | |||
f043a8a8ff | |||
c6016d7659 | |||
78b4472b32 | |||
cb754befe9 | |||
0588a4fc19 | |||
ff3a0a0de3 | |||
c799f77577 | |||
d3182a6737 | |||
b5e09b8a30 | |||
05efd735b9 | |||
5e0499fcf9 | |||
74d3f3c1d6 | |||
de6edf18d9 | |||
a35ecb4837 | |||
a01ef85bda | |||
6445c4e7de | |||
066c9118ae | |||
52e8b0afb2 | |||
db3f3eaf5a | |||
878f0cf6e1 | |||
7caf27b665 | |||
22b375ccd4 | |||
6a2539534f | |||
f310a9be8c | |||
d0dc6986dd | |||
11480c77be | |||
4fd2b702ee | |||
030e55acbf | |||
c5e1b64b40 | |||
999f7b229f | |||
de1c7bb39f | |||
54bc662e0e | |||
5f2089a15b | |||
adb99938f7 | |||
b907939916 | |||
27e6271402 | |||
0a8f27f6f2 | |||
1662e61ecb | |||
b58819d51e | |||
9692240b4f | |||
d204defb68 | |||
9e7f84afb0 | |||
ed8dee04b6 | |||
4171c654a5 | |||
22ee041002 | |||
7162d4d9aa | |||
9c70c68914 | |||
93b4aa5fcf | |||
71b0e12b57 | |||
09b3dab35d | |||
88a87158c2 | |||
faf84e69b4 | |||
5d8763ed1d | |||
58124e66a4 | |||
25af32d3a8 | |||
5a746c0ed6 | |||
89ccdc2b06 | |||
76ee00e013 | |||
4e5a1ced13 | |||
1f62024a15 | |||
6181ea5fc1 | |||
1751ac12f4 | |||
6cff54ed0d | |||
ec3e4ce120 | |||
f97443aff6 | |||
c925537c48 | |||
c5545c59c6 | |||
55044aa7d6 | |||
4be7004289 | |||
814a5caf9a | |||
81ece18d5e | |||
0ba81f1d51 | |||
c81fa397b6 | |||
8c36e9df44 | |||
1402508416 | |||
dc3c34275d | |||
f77fe04425 | |||
20ac30b6e2 | |||
4007256cfd | |||
b634f1b010 | |||
8a77d1ed92 | |||
69a17fb247 | |||
9f144798d3 | |||
0b651b6372 | |||
ce09186e2e | |||
0c67d742f0 | |||
2ef34a3b4b | |||
2d72f892fe | |||
ee4e0a933b | |||
765b303689 | |||
e427c68731 | |||
ff6c0fcb81 | |||
bcf3537395 | |||
4efccb2b1c | |||
67b5e1bde9 | |||
eb4fd144eb | |||
d51e82f33b | |||
7827b1fb87 | |||
7dbda76fad | |||
0dbd014d8b | |||
d064d187ab | |||
399319476a | |||
4f4e8c984e | |||
129ae0bf3e | |||
471c58448e | |||
a03c1c266c | |||
afdb68dc71 | |||
bc1b2fa5bd | |||
8c507dc984 | |||
4a82ee6c11 | |||
e8da57b05e | |||
f481879ed3 | |||
7c1487e18d | |||
2cc4191ec9 | |||
844cb1213b | |||
783f2a9342 | |||
eb6870cab5 | |||
0d367af24a | |||
5c15a4dd6e | |||
eeade99452 | |||
679879f79b | |||
c4bbc3edad | |||
6c6d215197 | |||
4b9ec03110 | |||
b9ecfeb890 | |||
4dbbacc35d | |||
28ef14399c | |||
20aaaaf90c | |||
7c274ad4d8 | |||
30c331e882 | |||
fa2e6e5d53 | |||
7eaa6d01ab | |||
d34581db4a | |||
85d6529f0d | |||
50039164f1 | |||
c64017de7b | |||
9e445fd4c5 | |||
cc4f8bbd82 | |||
16453b6986 | |||
d6b9153ac5 | |||
80a183dde2 | |||
f2af12af2c | |||
7ad4c679b3 | |||
9a0c6f2e02 | |||
78d0e1d0b8 | |||
dc739f703a | |||
ac7263d957 | |||
9c52b93975 | |||
80220b722b | |||
d1dc610769 | |||
cc767463e6 | |||
8f4ea69c22 | |||
6c026242d4 | |||
4a26719b0c | |||
d2f513da36 | |||
feef612388 | |||
65074ec449 | |||
a19cac2673 | |||
5326e51e4f | |||
b39cca91e5 | |||
57825a5c45 | |||
19cee5fda1 | |||
95a4649cc9 | |||
e96039fb1b | |||
65e2733571 | |||
bc437da5c7 | |||
65c4083ae6 | |||
6a2fd91a01 | |||
b6d31e0e45 | |||
d2c87ad4b4 | |||
a26a01c8d0 | |||
414216edfa | |||
6df001f72d | |||
4880721b73 | |||
1bb953877e | |||
ef7ade59f3 | |||
1c677c9577 | |||
1072bd06c3 | |||
4aa9102523 | |||
0c7a8e3634 | |||
addf8ca942 | |||
7cfd4d2cfa | |||
4ae53d93fb | |||
29e809ad77 | |||
e1c6be0682 | |||
bf40f035f6 | |||
989a14709c | |||
8d8b44342b | |||
7980ad9f7f | |||
af15f794b4 | |||
a6f62e05ae | |||
f8939de14f | |||
01ade02ac1 | |||
39d93b536a | |||
32f67557af | |||
f05eed8e8d | |||
f0a265dbee | |||
bc7736bc99 | |||
19d732f313 | |||
a9a82de5c4 | |||
9074015d1c | |||
2b77544e58 | |||
5ee74b6ab5 | |||
3a04bd9154 | |||
2c176a7f14 | |||
026e18399e | |||
bbf0b45c59 | |||
5bd7300cd5 | |||
ffb5051f6c | |||
ce4ea16c08 | |||
48c94c75fc | |||
73d3708006 | |||
bbea7da669 | |||
7f39609d9a | |||
a14e9e0a2e | |||
3e14dc3eb8 | |||
ba6d8ad261 | |||
2a08865851 | |||
0a3bfe7f73 | |||
451a9c64d3 | |||
88d79c84cd | |||
abcb0877e2 | |||
9e1e2a4320 | |||
d53b0a99d0 | |||
1fb4f9e455 | |||
6e9b6f22c9 | |||
e90b099622 | |||
84c10de864 | |||
d618b60d9e | |||
c761f7f844 | |||
7b89fab327 | |||
eddff46155 | |||
baa50ec9b2 | |||
513186c390 | |||
0c37463bfa | |||
94fc33bbee | |||
ce378a68a6 | |||
fa40740e77 | |||
762fdb98ac | |||
5f795b1aec | |||
6811700b90 | |||
248aca7a44 | |||
17abbdf6e0 | |||
40eca52ed5 | |||
7907dda8f7 | |||
8501024546 | |||
21d30d1e4d | |||
eeaa65c8af | |||
6754b8534e | |||
2ffff959fc | |||
fed4233db4 | |||
2f47263380 | |||
4d5386635e | |||
872eb2c3df | |||
e62a77a885 | |||
9bca63ebef | |||
ae54dc862c | |||
5e951b2be9 | |||
f78d57a703 | |||
f021be623e | |||
b6189879e3 | |||
c7c6445b03 | |||
535aec0648 | |||
664dd291eb | |||
a9216deaa4 | |||
99caad7d60 | |||
7486850357 | |||
bb06661d24 | |||
6a374182a7 | |||
f433b3102f | |||
456e2a8ee3 | |||
1ee3bf784c | |||
54394fe9af | |||
7a728340de | |||
5f1e8a6af8 | |||
e566a073dc | |||
9a4dad6ca1 | |||
eca9f461da | |||
08aaa9494c | |||
352f913c39 | |||
aeeb5dd405 | |||
278bf7ffa9 | |||
b15c824932 | |||
5ad3bfa31b | |||
e5145358eb | |||
3a20fbfe94 | |||
dac32557cd | |||
fedd879b2e | |||
a46c21cffb | |||
6cdfee3573 | |||
af79eb2943 | |||
e9d4730099 | |||
844f541719 | |||
d28f728787 | |||
f35808cb89 | |||
7d6b23ee2f | |||
e68ae4c8d1 | |||
faad6ca355 | |||
c77c1bd297 | |||
5b4b4446b7 | |||
93f20b406e | |||
02318cf3a7 | |||
35fc387505 | |||
fd4ba0443d | |||
b943cbedff | |||
3fd1a26ec0 | |||
3f2c76df28 | |||
7d3312e96e | |||
c59d9dc306 | |||
1f06f8405c | |||
38f454d5ab | |||
487f1a97ea | |||
0f05475e2e | |||
cc805f3f01 | |||
5ac5b90aed | |||
3d73287ea4 | |||
7ebdced256 | |||
ad12018199 | |||
27dcc3ecc3 | |||
e25a795cf6 | |||
16c15e83a3 | |||
cea67cb30b | |||
1e3e034021 | |||
8da27a1a09 | |||
030e749fe7 | |||
a785e64bc9 | |||
d4eeef4bd1 | |||
c8a07d477f | |||
af82eeca72 | |||
3d698b74d8 | |||
d2abb8603a | |||
894e0f7658 | |||
a5087c4966 | |||
ac4ab452d4 | |||
5378727049 | |||
0786ddddbd | |||
7617084ca3 | |||
28941f1a06 | |||
43ceb3edec | |||
bffd8e4dd2 | |||
66023f6243 | |||
10fc32e3ef | |||
3148acd3a4 | |||
e6ce8a89be | |||
74f8081290 | |||
2ae1de2470 | |||
028a327ce8 | |||
318862aad6 | |||
9f4510f2e1 | |||
98c7ab96b6 | |||
13114e972b | |||
4a1b3e26ef | |||
1e3248dfe8 | |||
2aa4cd5cc5 | |||
fb908df17d | |||
cdf09abcc0 | |||
fe2c498a81 | |||
fe7122280d | |||
2e0fb7c1a6 | |||
f33b60c001 | |||
7e48607820 | |||
68a821c84a | |||
c5e59efa4d | |||
ec5b9b9f37 | |||
e88a51e930 | |||
35f8d8548a | |||
7a123d3eb1 | |||
3ed45c7ba8 | |||
8b160f9850 | |||
696b2cda4a | |||
435348aa61 | |||
0a5f41abc2 | |||
2b97bc54c5 | |||
ad49c17eba | |||
f1e88d95c1 | |||
6eac9bfd0f | |||
5d94b16d71 | |||
3bd46fe27a | |||
7b1c7debcb | |||
e77a0a48aa | |||
aa37572ddc | |||
a0cecf7658 | |||
23170ff368 | |||
f9ffd9ae29 | |||
d5fa7b8a55 | |||
f94df58486 | |||
6f5bd62a97 | |||
bb3cc9e153 | |||
202dfdaee2 | |||
0674d4960b | |||
85c2035016 | |||
02be83efbf | |||
b964347895 | |||
56ed1eb807 | |||
570175f95d | |||
839010b89d | |||
7694d09d14 | |||
4accc67843 | |||
c070e2d6f7 | |||
d302d63030 | |||
a2e117f8b0 | |||
b1974fae39 | |||
7248de1167 | |||
a9582e1c62 | |||
bb6335830a | |||
c8f3799c20 | |||
bd3a61a2f7 | |||
077643cadf | |||
fa2d9a8a58 | |||
58f98a4260 | |||
066790552a | |||
dcb1a1996c | |||
6b4d06d8a7 | |||
f615038938 | |||
71b74a284a | |||
2b431f994f | |||
7e096e61d7 | |||
9d7a1097f2 | |||
a98b3124c5 | |||
572698bf3e | |||
7162289d77 | |||
14bf25da14 | |||
a455e2e5eb | |||
840b4b854b | |||
ec4941c8ac | |||
dd86f14a5a | |||
63103580d2 | |||
d25df9c00b | |||
778a00efa1 | |||
fea822792f | |||
f6033ac5af | |||
583ef8674e | |||
94bec72079 | |||
28ed21864d | |||
e16ce1df36 | |||
87abfee268 | |||
ba0f069c31 | |||
154856066f | |||
f91713b714 | |||
f1bf485b2a | |||
955de76116 |
@ -26,3 +26,8 @@ rustflags = ["-C", "link-args=-stack:10000000", "-C", "target-feature=+crt-stati
|
||||
# [target.aarch64-apple-darwin]
|
||||
# linker = "clang"
|
||||
# rustflags = ["-C", "link-arg=-fuse-ld=mold"]
|
||||
|
||||
[target.aarch64-apple-darwin]
|
||||
# We can guarantee that this target will always run on a CPU with _at least_
|
||||
# these capabilities, so let's optimize for them
|
||||
rustflags = ["-Ctarget-cpu=apple-m1"]
|
4
.github/pull_request_template.md
vendored
4
.github/pull_request_template.md
vendored
@ -24,8 +24,8 @@ Don't forget to add tests that cover your changes.
|
||||
Make sure you've run and fixed any issues with these commands:
|
||||
|
||||
- `cargo fmt --all -- --check` to check standard code formatting (`cargo fmt --all` applies these changes)
|
||||
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect -A clippy::result_large_err` to check that you're using the standard code style
|
||||
- `cargo test --workspace` to check that all tests pass
|
||||
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to check that you're using the standard code style
|
||||
- `cargo test --workspace` to check that all tests pass (on Windows make sure to [enable developer mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
|
||||
- `cargo run -- -c "use std testing; testing run-tests --path crates/nu-std"` to run the tests for the standard library
|
||||
|
||||
> **Note**
|
||||
|
25
.github/workflows/audit.yml
vendored
Normal file
25
.github/workflows/audit.yml
vendored
Normal file
@ -0,0 +1,25 @@
|
||||
name: Security audit
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- '**/Cargo.toml'
|
||||
- '**/Cargo.lock'
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
CARGO_TERM_COLOR: always
|
||||
CLICOLOR: 1
|
||||
|
||||
jobs:
|
||||
security_audit:
|
||||
runs-on: ubuntu-latest
|
||||
# Prevent sudden announcement of a new advisory from failing ci:
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: rustsec/audit-check@v1.4.1
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
82
.github/workflows/ci.yml
vendored
82
.github/workflows/ci.yml
vendored
@ -7,9 +7,10 @@ on:
|
||||
name: continuous-integration
|
||||
|
||||
env:
|
||||
NUSHELL_CARGO_TARGET: ci
|
||||
NUSHELL_CARGO_PROFILE: ci
|
||||
NU_LOG_LEVEL: DEBUG
|
||||
CLIPPY_OPTIONS: "-D warnings -D clippy::unwrap_used -A clippy::needless_collect -A clippy::result_large_err"
|
||||
# If changing these settings also change toolkit.nu
|
||||
CLIPPY_OPTIONS: "-D warnings -D clippy::unwrap_used"
|
||||
|
||||
jobs:
|
||||
fmt-clippy:
|
||||
@ -37,19 +38,24 @@ jobs:
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||
with:
|
||||
rustflags: ""
|
||||
|
||||
- name: cargo fmt
|
||||
run: cargo fmt --all -- --check
|
||||
|
||||
# If changing these settings also change toolkit.nu
|
||||
- name: Clippy
|
||||
run: cargo clippy --workspace ${{ matrix.flags }} --exclude nu_plugin_* -- $CLIPPY_OPTIONS
|
||||
|
||||
# In tests we don't have to deny unwrap
|
||||
- name: Clippy of tests
|
||||
run: cargo clippy --tests --workspace ${{ matrix.flags }} --exclude nu_plugin_* -- -D warnings
|
||||
|
||||
tests:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
@ -57,6 +63,10 @@ jobs:
|
||||
platform: [windows-latest, macos-latest, ubuntu-20.04]
|
||||
feature: [default, dataframe, extra]
|
||||
include:
|
||||
# linux CI cannot handle clipboard feature
|
||||
- default-flags: ""
|
||||
- platform: ubuntu-20.04
|
||||
default-flags: "--no-default-features --features=default-no-clipboard"
|
||||
- feature: default
|
||||
flags: ""
|
||||
- feature: dataframe
|
||||
@ -76,15 +86,26 @@ jobs:
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||
with:
|
||||
rustflags: ""
|
||||
|
||||
- name: Tests
|
||||
run: cargo test --workspace --profile ci --exclude nu_plugin_* ${{ matrix.flags }}
|
||||
run: cargo test --workspace --profile ci --exclude nu_plugin_* ${{ matrix.default-flags }} ${{ matrix.flags }}
|
||||
|
||||
- name: Check for clean repo
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo "there are changes";
|
||||
git status --porcelain
|
||||
exit 1
|
||||
else
|
||||
echo "no changes in working directory";
|
||||
fi
|
||||
|
||||
std-lib-and-python-virtualenv:
|
||||
strategy:
|
||||
@ -97,10 +118,10 @@ jobs:
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||
with:
|
||||
rustflags: ""
|
||||
|
||||
@ -108,30 +129,32 @@ jobs:
|
||||
run: cargo install --path . --locked --no-default-features
|
||||
|
||||
- name: Standard library tests
|
||||
run: nu -c 'use std testing; testing run-tests --path crates/nu-std'
|
||||
run: nu -c 'use crates/nu-std/testing.nu; testing run-tests --path crates/nu-std'
|
||||
|
||||
- name: Setup Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.10"
|
||||
|
||||
- run: python -m pip install tox
|
||||
|
||||
- name: Install virtualenv
|
||||
run: git clone https://github.com/pypa/virtualenv.git
|
||||
run: pip install virtualenv
|
||||
shell: bash
|
||||
|
||||
- name: Test Nushell in virtualenv
|
||||
run: |
|
||||
cd virtualenv
|
||||
# if we encounter problems with bleeding edge tests pin to the latest tag
|
||||
# git checkout $(git describe --tags | cut -d - -f 1)
|
||||
# We need to disable failing on coverage levels.
|
||||
nu -c "open pyproject.toml | upsert tool.coverage.report.fail_under 1 | save patchproject.toml"
|
||||
mv patchproject.toml pyproject.toml
|
||||
tox -e ${{ matrix.py }} -- -k nushell
|
||||
run: nu scripts/test_virtualenv.nu
|
||||
shell: bash
|
||||
|
||||
- name: Check for clean repo
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo "there are changes";
|
||||
git status --porcelain
|
||||
exit 1
|
||||
else
|
||||
echo "no changes in working directory";
|
||||
fi
|
||||
|
||||
plugins:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
@ -141,10 +164,10 @@ jobs:
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||
with:
|
||||
rustflags: ""
|
||||
|
||||
@ -153,3 +176,14 @@ jobs:
|
||||
|
||||
- name: Tests
|
||||
run: cargo test --profile ci --package nu_plugin_*
|
||||
|
||||
- name: Check for clean repo
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo "there are changes";
|
||||
git status --porcelain
|
||||
exit 1
|
||||
else
|
||||
echo "no changes in working directory";
|
||||
fi
|
||||
|
162
.github/workflows/nightly-build.yml
vendored
162
.github/workflows/nightly-build.yml
vendored
@ -13,7 +13,7 @@ on:
|
||||
- nightly # Just for test purpose only with the nightly repo
|
||||
# This schedule will run only from the default branch
|
||||
schedule:
|
||||
- cron: '15 1 * * *' # run at 01:15 AM UTC
|
||||
- cron: '15 0 * * *' # run at 00:15 AM UTC
|
||||
|
||||
defaults:
|
||||
run:
|
||||
@ -27,7 +27,7 @@ jobs:
|
||||
# if: github.repository == 'nushell/nightly'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
if: github.repository == 'nushell/nightly'
|
||||
with:
|
||||
ref: main
|
||||
@ -36,12 +36,10 @@ jobs:
|
||||
token: ${{ secrets.WORKFLOW_TOKEN }}
|
||||
|
||||
- name: Setup Nushell
|
||||
uses: hustcer/setup-nu@v3
|
||||
uses: hustcer/setup-nu@v3.9
|
||||
if: github.repository == 'nushell/nightly'
|
||||
with:
|
||||
version: 0.81.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
version: 0.90.1
|
||||
|
||||
# Synchronize the main branch of nightly repo with the main branch of Nushell official repo
|
||||
- name: Prepare for Nightly Release
|
||||
@ -53,22 +51,21 @@ jobs:
|
||||
# We can't push if no user name and email are configured
|
||||
git config user.name 'hustcer'
|
||||
git config user.email 'hustcer@outlook.com'
|
||||
git fetch origin main
|
||||
git pull origin main
|
||||
git remote add src https://github.com/nushell/nushell.git
|
||||
git fetch src main
|
||||
# git pull --rebase src main
|
||||
# All the changes will be overwritten by the upstream main branch
|
||||
git reset --hard src/main
|
||||
git push origin main -f
|
||||
let sha_short = (git rev-parse --short src/main | str trim | str substring 0..7)
|
||||
let sha_short = (git rev-parse --short origin/main | str trim | str substring 0..7)
|
||||
let tag_name = $'nightly-($sha_short)'
|
||||
if (git ls-remote --tags origin $tag_name | is-empty) {
|
||||
git tag -a $tag_name -m $'Nightly build from ($sha_short)'
|
||||
git push origin --tags
|
||||
}
|
||||
|
||||
release:
|
||||
name: Release
|
||||
standard:
|
||||
name: Std
|
||||
needs: prepare
|
||||
strategy:
|
||||
fail-fast: false
|
||||
@ -118,38 +115,39 @@ jobs:
|
||||
target_rustflags: ''
|
||||
- target: armv7-unknown-linux-gnueabihf
|
||||
os: ubuntu-20.04
|
||||
target_rustflags: '--exclude=nu-cmd-dataframe'
|
||||
target_rustflags: ''
|
||||
- target: riscv64gc-unknown-linux-gnu
|
||||
os: ubuntu-20.04
|
||||
target_rustflags: '--exclude=nu-cmd-dataframe'
|
||||
os: ubuntu-latest
|
||||
target_rustflags: ''
|
||||
|
||||
runs-on: ${{matrix.os}}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Update Rust Toolchain Target
|
||||
run: |
|
||||
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
|
||||
with:
|
||||
rustflags: ''
|
||||
|
||||
- name: Setup Nushell
|
||||
uses: hustcer/setup-nu@v3
|
||||
uses: hustcer/setup-nu@v3.9
|
||||
with:
|
||||
version: 0.81.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
version: 0.90.1
|
||||
|
||||
- name: Release Nu Binary
|
||||
id: nu
|
||||
run: nu .github/workflows/release-pkg.nu
|
||||
env:
|
||||
RELEASE_TYPE: standard
|
||||
OS: ${{ matrix.os }}
|
||||
REF: ${{ github.ref }}
|
||||
TARGET: ${{ matrix.target }}
|
||||
@ -158,7 +156,7 @@ jobs:
|
||||
|
||||
- name: Create an Issue for Release Failure
|
||||
if: ${{ failure() }}
|
||||
uses: JasonEtco/create-an-issue@v2.9.1
|
||||
uses: JasonEtco/create-an-issue@v2.9.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
@ -176,7 +174,119 @@ jobs:
|
||||
# REF: https://github.com/marketplace/actions/gh-release
|
||||
# Create a release only in nushell/nightly repo
|
||||
- name: Publish Archive
|
||||
uses: softprops/action-gh-release@v0.1.13
|
||||
uses: softprops/action-gh-release@v0.1.15
|
||||
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
|
||||
with:
|
||||
prerelease: true
|
||||
files: ${{ steps.nu.outputs.archive }}
|
||||
tag_name: nightly-${{ steps.vars.outputs.sha_short }}
|
||||
name: Nu-nightly-${{ steps.vars.outputs.date }}-${{ steps.vars.outputs.sha_short }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
full:
|
||||
name: Full
|
||||
needs: prepare
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
target:
|
||||
- aarch64-apple-darwin
|
||||
- x86_64-apple-darwin
|
||||
- x86_64-pc-windows-msvc
|
||||
- aarch64-pc-windows-msvc
|
||||
- x86_64-unknown-linux-gnu
|
||||
- x86_64-unknown-linux-musl
|
||||
- aarch64-unknown-linux-gnu
|
||||
extra: ['bin']
|
||||
include:
|
||||
- target: aarch64-apple-darwin
|
||||
os: macos-latest
|
||||
target_rustflags: '--features=dataframe,extra'
|
||||
- target: x86_64-apple-darwin
|
||||
os: macos-latest
|
||||
target_rustflags: '--features=dataframe,extra'
|
||||
- target: x86_64-pc-windows-msvc
|
||||
extra: 'bin'
|
||||
os: windows-latest
|
||||
target_rustflags: '--features=dataframe,extra'
|
||||
- target: x86_64-pc-windows-msvc
|
||||
extra: msi
|
||||
os: windows-latest
|
||||
target_rustflags: '--features=dataframe,extra'
|
||||
- target: aarch64-pc-windows-msvc
|
||||
extra: 'bin'
|
||||
os: windows-latest
|
||||
target_rustflags: '--features=dataframe,extra'
|
||||
- target: aarch64-pc-windows-msvc
|
||||
extra: msi
|
||||
os: windows-latest
|
||||
target_rustflags: '--features=dataframe,extra'
|
||||
- target: x86_64-unknown-linux-gnu
|
||||
os: ubuntu-20.04
|
||||
target_rustflags: '--features=dataframe,extra'
|
||||
- target: x86_64-unknown-linux-musl
|
||||
os: ubuntu-20.04
|
||||
target_rustflags: '--features=dataframe,extra'
|
||||
- target: aarch64-unknown-linux-gnu
|
||||
os: ubuntu-20.04
|
||||
target_rustflags: '--features=dataframe,extra'
|
||||
|
||||
runs-on: ${{matrix.os}}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Update Rust Toolchain Target
|
||||
run: |
|
||||
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
|
||||
with:
|
||||
rustflags: ''
|
||||
|
||||
- name: Setup Nushell
|
||||
uses: hustcer/setup-nu@v3.9
|
||||
with:
|
||||
version: 0.90.1
|
||||
|
||||
- name: Release Nu Binary
|
||||
id: nu
|
||||
run: nu .github/workflows/release-pkg.nu
|
||||
env:
|
||||
RELEASE_TYPE: full
|
||||
OS: ${{ matrix.os }}
|
||||
REF: ${{ github.ref }}
|
||||
TARGET: ${{ matrix.target }}
|
||||
_EXTRA_: ${{ matrix.extra }}
|
||||
TARGET_RUSTFLAGS: ${{ matrix.target_rustflags }}
|
||||
|
||||
- name: Create an Issue for Release Failure
|
||||
if: ${{ failure() }}
|
||||
uses: JasonEtco/create-an-issue@v2.9.2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
update_existing: true
|
||||
search_existing: open
|
||||
filename: .github/AUTO_ISSUE_TEMPLATE/nightly-build-fail.md
|
||||
|
||||
- name: Set Outputs of Short SHA
|
||||
id: vars
|
||||
run: |
|
||||
echo "date=$(date -u +'%Y-%m-%d')" >> $GITHUB_OUTPUT
|
||||
sha_short=$(git rev-parse --short HEAD)
|
||||
echo "sha_short=${sha_short:0:7}" >> $GITHUB_OUTPUT
|
||||
|
||||
# REF: https://github.com/marketplace/actions/gh-release
|
||||
# Create a release only in nushell/nightly repo
|
||||
- name: Publish Archive
|
||||
uses: softprops/action-gh-release@v0.1.15
|
||||
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
|
||||
with:
|
||||
draft: false
|
||||
@ -200,16 +310,14 @@ jobs:
|
||||
- name: Waiting for Release
|
||||
run: sleep 1800
|
||||
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
|
||||
- name: Setup Nushell
|
||||
uses: hustcer/setup-nu@v3
|
||||
uses: hustcer/setup-nu@v3.9
|
||||
with:
|
||||
version: 0.81.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
version: 0.90.1
|
||||
|
||||
# Keep the last a few releases
|
||||
- name: Delete Older Releases
|
||||
|
75
.github/workflows/release-pkg.nu
vendored
75
.github/workflows/release-pkg.nu
vendored
@ -9,34 +9,36 @@
|
||||
# Instructions for manually creating an MSI for Winget Releases when they fail
|
||||
# Added 2022-11-29 when Windows packaging wouldn't work
|
||||
# Updated again on 2023-02-23 because msis are still failing validation
|
||||
# Update on 2023-10-18 to use RELEASE_TYPE env var to determine if full or not
|
||||
# To run this manual for windows here are the steps I take
|
||||
# checkout the release you want to publish
|
||||
# 1. git checkout 0.76.0
|
||||
# 1. git checkout 0.86.0
|
||||
# unset CARGO_TARGET_DIR if set (I have to do this in the parent shell to get it to work)
|
||||
# 2. $env:CARGO_TARGET_DIR = ""
|
||||
# 2. hide-env CARGO_TARGET_DIR
|
||||
# 3. $env.TARGET = 'x86_64-pc-windows-msvc'
|
||||
# 4. $env.TARGET_RUSTFLAGS = ''
|
||||
# 5. $env.GITHUB_WORKSPACE = 'C:\Users\dschroeder\source\repos\forks\nushell'
|
||||
# 6. $env.GITHUB_OUTPUT = 'C:\Users\dschroeder\source\repos\forks\nushell\output\out.txt'
|
||||
# 5. $env.GITHUB_WORKSPACE = 'D:\nushell'
|
||||
# 6. $env.GITHUB_OUTPUT = 'D:\nushell\output\out.txt'
|
||||
# 7. $env.OS = 'windows-latest'
|
||||
# 8. $env.RELEASE_TYPE = '' # There is full and '' for normal releases
|
||||
# make sure 7z.exe is in your path https://www.7-zip.org/download.html
|
||||
# 8. $env.Path = ($env.Path | append 'c:\apps\7-zip')
|
||||
# 9. $env.Path = ($env.Path | append 'c:\apps\7-zip')
|
||||
# make sure aria2c.exe is in your path https://github.com/aria2/aria2
|
||||
# 9. $env.Path = ($env.Path | append 'c:\path\to\aria2c')
|
||||
# 10. $env.Path = ($env.Path | append 'c:\path\to\aria2c')
|
||||
# make sure you have the wixtools installed https://wixtoolset.org/
|
||||
# 10. $env.Path = ($env.Path | append 'C:\Users\dschroeder\AppData\Local\tauri\WixTools')
|
||||
# 11. $env.Path = ($env.Path | append 'C:\Users\dschroeder\AppData\Local\tauri\WixTools')
|
||||
# You need to run the release-pkg twice. The first pass, with _EXTRA_ as 'bin', makes the output
|
||||
# folder and builds everything. The second pass, that generates the msi file, with _EXTRA_ as 'msi'
|
||||
# 11. $env._EXTRA_ = 'bin'
|
||||
# 12. source .github\workflows\release-pkg.nu
|
||||
# 13. cd ..
|
||||
# 14. $env._EXTRA_ = 'msi'
|
||||
# 15. source .github\workflows\release-pkg.nu
|
||||
# 12. $env._EXTRA_ = 'bin'
|
||||
# 13. source .github\workflows\release-pkg.nu
|
||||
# 14. cd ..
|
||||
# 15. $env._EXTRA_ = 'msi'
|
||||
# 16. source .github\workflows\release-pkg.nu
|
||||
# After msi is generated, you have to update winget-pkgs repo, you'll need to patch the release
|
||||
# by deleting the existing msi and uploading this new msi. Then you'll need to update the hash
|
||||
# on the winget-pkgs PR. To generate the hash, run this command
|
||||
# 16. open target\wix\nu-0.74.0-x86_64-pc-windows-msvc.msi | hash sha256
|
||||
# 17. open target\wix\nu-0.74.0-x86_64-pc-windows-msvc.msi | hash sha256
|
||||
# Then, just take the output and put it in the winget-pkgs PR for the hash on the msi
|
||||
|
||||
|
||||
@ -51,11 +53,26 @@ let dist = $'($env.GITHUB_WORKSPACE)/output'
|
||||
let version = (open Cargo.toml | get package.version)
|
||||
|
||||
print $'Debugging info:'
|
||||
print { version: $version, bin: $bin, os: $os, target: $target, src: $src, flags: $flags, dist: $dist }; hr-line -b
|
||||
print { version: $version, bin: $bin, os: $os, releaseType: $env.RELEASE_TYPE, target: $target, src: $src, flags: $flags, dist: $dist }; hr-line -b
|
||||
|
||||
# Rename the full release name so that we won't break the existing scripts for standard release downloading, such as:
|
||||
# curl -s https://api.github.com/repos/chmln/sd/releases/latest | grep browser_download_url | cut -d '"' -f 4 | grep x86_64-unknown-linux-musl
|
||||
const FULL_RLS_NAMING = {
|
||||
x86_64-apple-darwin: 'x86_64-darwin-full',
|
||||
aarch64-apple-darwin: 'aarch64-darwin-full',
|
||||
x86_64-unknown-linux-gnu: 'x86_64-linux-gnu-full',
|
||||
x86_64-pc-windows-msvc: 'x86_64-windows-msvc-full',
|
||||
x86_64-unknown-linux-musl: 'x86_64-linux-musl-full',
|
||||
aarch64-unknown-linux-gnu: 'aarch64-linux-gnu-full',
|
||||
aarch64-pc-windows-msvc: 'aarch64-windows-msvc-full',
|
||||
riscv64gc-unknown-linux-gnu: 'riscv64-linux-gnu-full',
|
||||
armv7-unknown-linux-gnueabihf: 'armv7-linux-gnueabihf-full',
|
||||
}
|
||||
|
||||
# $env
|
||||
|
||||
let USE_UBUNTU = 'ubuntu-20.04'
|
||||
let FULL_NAME = $FULL_RLS_NAMING | get -i $target | default 'unknown-target-full'
|
||||
|
||||
print $'(char nl)Packaging ($bin) v($version) for ($target) in ($src)...'; hr-line -b
|
||||
if not ('Cargo.lock' | path exists) { cargo generate-lockfile }
|
||||
@ -65,8 +82,8 @@ print $'Start building ($bin)...'; hr-line
|
||||
# ----------------------------------------------------------------------------
|
||||
# Build for Ubuntu and macOS
|
||||
# ----------------------------------------------------------------------------
|
||||
if $os in [$USE_UBUNTU, 'macos-latest'] {
|
||||
if $os == $USE_UBUNTU {
|
||||
if $os in [$USE_UBUNTU, 'macos-latest', 'ubuntu-latest'] {
|
||||
if $os starts-with ubuntu {
|
||||
sudo apt update
|
||||
sudo apt-get install libxcb-composite0-dev -y
|
||||
}
|
||||
@ -89,7 +106,7 @@ if $os in [$USE_UBUNTU, 'macos-latest'] {
|
||||
_ => {
|
||||
# musl-tools to fix 'Failed to find tool. Is `musl-gcc` installed?'
|
||||
# Actually just for x86_64-unknown-linux-musl target
|
||||
if $os == $USE_UBUNTU { sudo apt install musl-tools -y }
|
||||
if $os starts-with ubuntu { sudo apt install musl-tools -y }
|
||||
cargo-build-nu $flags
|
||||
}
|
||||
}
|
||||
@ -119,10 +136,8 @@ print (ls -f $executable); sleep 1sec
|
||||
print $'(char nl)Copying release files...'; hr-line
|
||||
"To use Nu plugins, use the register command to tell Nu where to find the plugin. For example:
|
||||
|
||||
> register ./nu_plugin_query" | save $'($dist)/README.txt'
|
||||
> register ./nu_plugin_query" | save $'($dist)/README.txt' -f
|
||||
[LICENSE $executable] | each {|it| cp -rv $it $dist } | flatten
|
||||
# Sleep a few seconds to make sure the cp process finished successfully
|
||||
sleep 3sec
|
||||
|
||||
print $'(char nl)Check binary release version detail:'; hr-line
|
||||
let ver = if $os == 'windows-latest' {
|
||||
@ -131,17 +146,17 @@ let ver = if $os == 'windows-latest' {
|
||||
(do -i { ./output/nu -c 'version' }) | str join
|
||||
}
|
||||
if ($ver | str trim | is-empty) {
|
||||
print $'(ansi r)Incompatible nu binary...(ansi reset)'
|
||||
print $'(ansi r)Incompatible Nu binary: The binary cross compiled is not runnable on current arch...(ansi reset)'
|
||||
} else { print $ver }
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Create a release archive and send it to output for the following steps
|
||||
# ----------------------------------------------------------------------------
|
||||
cd $dist; print $'(char nl)Creating release archive...'; hr-line
|
||||
if $os in [$USE_UBUNTU, 'macos-latest'] {
|
||||
if $os in [$USE_UBUNTU, 'macos-latest', 'ubuntu-latest'] {
|
||||
|
||||
let files = (ls | get name)
|
||||
let dest = $'($bin)-($version)-($target)'
|
||||
let dest = if $env.RELEASE_TYPE == 'full' { $'($bin)-($version)-($FULL_NAME)' } else { $'($bin)-($version)-($target)' }
|
||||
let archive = $'($dist)/($dest).tar.gz'
|
||||
|
||||
mkdir $dest
|
||||
@ -156,7 +171,7 @@ if $os in [$USE_UBUNTU, 'macos-latest'] {
|
||||
|
||||
} else if $os == 'windows-latest' {
|
||||
|
||||
let releaseStem = $'($bin)-($version)-($target)'
|
||||
let releaseStem = if $env.RELEASE_TYPE == 'full' { $'($bin)-($version)-($FULL_NAME)' } else { $'($bin)-($version)-($target)' }
|
||||
|
||||
print $'(char nl)Download less related stuffs...'; hr-line
|
||||
aria2c https://github.com/jftuga/less-Windows/releases/download/less-v608/less.exe -o less.exe
|
||||
@ -172,18 +187,22 @@ if $os in [$USE_UBUNTU, 'macos-latest'] {
|
||||
cp -r $'($dist)/*' target/release/
|
||||
cargo install cargo-wix --version 0.3.4
|
||||
cargo wix --no-build --nocapture --package nu --output $wixRelease
|
||||
print $'archive: ---> ($wixRelease)';
|
||||
echo $"archive=($wixRelease)" | save --append $env.GITHUB_OUTPUT
|
||||
# Workaround for https://github.com/softprops/action-gh-release/issues/280
|
||||
let archive = ($wixRelease | str replace --all '\' '/')
|
||||
print $'archive: ---> ($archive)';
|
||||
echo $"archive=($archive)" | save --append $env.GITHUB_OUTPUT
|
||||
|
||||
} else {
|
||||
|
||||
print $'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls
|
||||
let archive = $'($dist)/($releaseStem).zip'
|
||||
7z a $archive *
|
||||
print $'archive: ---> ($archive)';
|
||||
let pkg = (ls -f $archive | get name)
|
||||
if not ($pkg | is-empty) {
|
||||
echo $"archive=($pkg | get 0)" | save --append $env.GITHUB_OUTPUT
|
||||
# Workaround for https://github.com/softprops/action-gh-release/issues/280
|
||||
let archive = ($pkg | get 0 | str replace --all '\' '/')
|
||||
print $'archive: ---> ($archive)'
|
||||
echo $"archive=($archive)" | save --append $env.GITHUB_OUTPUT
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -206,7 +225,7 @@ def 'cargo-build-nu' [ options: string ] {
|
||||
|
||||
# Print a horizontal line marker
|
||||
def 'hr-line' [
|
||||
--blank-line(-b): bool
|
||||
--blank-line(-b)
|
||||
] {
|
||||
print $'(ansi g)---------------------------------------------------------------------------->(ansi reset)'
|
||||
if $blank_line { char nl }
|
||||
|
113
.github/workflows/release.yml
vendored
113
.github/workflows/release.yml
vendored
@ -14,8 +14,8 @@ defaults:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
all:
|
||||
name: All
|
||||
standard:
|
||||
name: Std
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
@ -64,36 +64,36 @@ jobs:
|
||||
target_rustflags: ''
|
||||
- target: armv7-unknown-linux-gnueabihf
|
||||
os: ubuntu-20.04
|
||||
target_rustflags: '--exclude=nu-cmd-dataframe'
|
||||
target_rustflags: ''
|
||||
- target: riscv64gc-unknown-linux-gnu
|
||||
os: ubuntu-20.04
|
||||
target_rustflags: '--exclude=nu-cmd-dataframe'
|
||||
os: ubuntu-latest
|
||||
target_rustflags: ''
|
||||
|
||||
runs-on: ${{matrix.os}}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Update Rust Toolchain Target
|
||||
run: |
|
||||
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.5.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
|
||||
with:
|
||||
rustflags: ''
|
||||
|
||||
- name: Setup Nushell
|
||||
uses: hustcer/setup-nu@v3
|
||||
uses: hustcer/setup-nu@v3.9
|
||||
with:
|
||||
version: 0.81.0
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
version: 0.90.1
|
||||
|
||||
- name: Release Nu Binary
|
||||
id: nu
|
||||
run: nu .github/workflows/release-pkg.nu
|
||||
env:
|
||||
RELEASE_TYPE: standard
|
||||
OS: ${{ matrix.os }}
|
||||
REF: ${{ github.ref }}
|
||||
TARGET: ${{ matrix.target }}
|
||||
@ -102,7 +102,96 @@ jobs:
|
||||
|
||||
# REF: https://github.com/marketplace/actions/gh-release
|
||||
- name: Publish Archive
|
||||
uses: softprops/action-gh-release@v0.1.13
|
||||
uses: softprops/action-gh-release@v0.1.15
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
||||
with:
|
||||
draft: true
|
||||
files: ${{ steps.nu.outputs.archive }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
full:
|
||||
name: Full
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
target:
|
||||
- aarch64-apple-darwin
|
||||
- x86_64-apple-darwin
|
||||
- x86_64-pc-windows-msvc
|
||||
- aarch64-pc-windows-msvc
|
||||
- x86_64-unknown-linux-gnu
|
||||
- x86_64-unknown-linux-musl
|
||||
- aarch64-unknown-linux-gnu
|
||||
extra: ['bin']
|
||||
include:
|
||||
- target: aarch64-apple-darwin
|
||||
os: macos-latest
|
||||
target_rustflags: '--features=dataframe,extra'
|
||||
- target: x86_64-apple-darwin
|
||||
os: macos-latest
|
||||
target_rustflags: '--features=dataframe,extra'
|
||||
- target: x86_64-pc-windows-msvc
|
||||
extra: 'bin'
|
||||
os: windows-latest
|
||||
target_rustflags: '--features=dataframe,extra'
|
||||
- target: x86_64-pc-windows-msvc
|
||||
extra: msi
|
||||
os: windows-latest
|
||||
target_rustflags: '--features=dataframe,extra'
|
||||
- target: aarch64-pc-windows-msvc
|
||||
extra: 'bin'
|
||||
os: windows-latest
|
||||
target_rustflags: '--features=dataframe,extra'
|
||||
- target: aarch64-pc-windows-msvc
|
||||
extra: msi
|
||||
os: windows-latest
|
||||
target_rustflags: '--features=dataframe,extra'
|
||||
- target: x86_64-unknown-linux-gnu
|
||||
os: ubuntu-20.04
|
||||
target_rustflags: '--features=dataframe,extra'
|
||||
- target: x86_64-unknown-linux-musl
|
||||
os: ubuntu-20.04
|
||||
target_rustflags: '--features=dataframe,extra'
|
||||
- target: aarch64-unknown-linux-gnu
|
||||
os: ubuntu-20.04
|
||||
target_rustflags: '--features=dataframe,extra'
|
||||
|
||||
runs-on: ${{matrix.os}}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Update Rust Toolchain Target
|
||||
run: |
|
||||
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
|
||||
with:
|
||||
rustflags: ''
|
||||
|
||||
- name: Setup Nushell
|
||||
uses: hustcer/setup-nu@v3.9
|
||||
with:
|
||||
version: 0.90.1
|
||||
|
||||
- name: Release Nu Binary
|
||||
id: nu
|
||||
run: nu .github/workflows/release-pkg.nu
|
||||
env:
|
||||
RELEASE_TYPE: full
|
||||
OS: ${{ matrix.os }}
|
||||
REF: ${{ github.ref }}
|
||||
TARGET: ${{ matrix.target }}
|
||||
_EXTRA_: ${{ matrix.extra }}
|
||||
TARGET_RUSTFLAGS: ${{ matrix.target_rustflags }}
|
||||
|
||||
# REF: https://github.com/marketplace/actions/gh-release
|
||||
- name: Publish Archive
|
||||
uses: softprops/action-gh-release@v0.1.15
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
||||
with:
|
||||
draft: true
|
||||
|
4
.github/workflows/typos.yml
vendored
4
.github/workflows/typos.yml
vendored
@ -7,7 +7,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions Repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Check spelling
|
||||
uses: crate-ci/typos@master
|
||||
uses: crate-ci/typos@v1.19.0
|
||||
|
7
.github/workflows/winget-submission.yml
vendored
7
.github/workflows/winget-submission.yml
vendored
@ -7,19 +7,22 @@ on:
|
||||
inputs:
|
||||
tag_name:
|
||||
description: 'Specific tag name'
|
||||
required: true
|
||||
required: true
|
||||
type: string
|
||||
|
||||
jobs:
|
||||
|
||||
winget:
|
||||
name: Publish winget package
|
||||
runs-on: windows-latest
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Submit package to Windows Package Manager Community Repository
|
||||
uses: vedantmgoyal2009/winget-releaser@v2
|
||||
with:
|
||||
identifier: Nushell.Nushell
|
||||
# Exclude all `*-msvc-full.msi` full release files,
|
||||
# and only the default `*msvc.msi` files will be included
|
||||
installers-regex: 'msvc\.msi$'
|
||||
version: ${{ inputs.tag_name || github.event.release.tag_name }}
|
||||
release-tag: ${{ inputs.tag_name || github.event.release.tag_name }}
|
||||
token: ${{ secrets.NUSHELL_PAT }}
|
||||
|
@ -2,7 +2,25 @@
|
||||
|
||||
Welcome to Nushell and thank you for considering contributing!
|
||||
|
||||
## Review Process
|
||||
## Table of contents
|
||||
- [Proposing design changes](#proposing-design-changes)
|
||||
- [Developing](#developing)
|
||||
- [Setup](#setup)
|
||||
- [Tests](#tests)
|
||||
- [Useful commands](#useful-commands)
|
||||
- [Debugging tips](#debugging-tips)
|
||||
- [Git etiquette](#git-etiquette)
|
||||
- [License](#license)
|
||||
|
||||
## Other helpful resources
|
||||
|
||||
More resources can be found in the nascent [developer documentation](devdocs/README.md) in this repo.
|
||||
|
||||
- [Developer FAQ](FAQ.md)
|
||||
- [Platform support policy](PLATFORM_SUPPORT.md)
|
||||
- [Our Rust style](devdocs/rust_style.md)
|
||||
|
||||
## Proposing design changes
|
||||
|
||||
First of all, before diving into the code, if you want to create a new feature, change something significantly, and especially if the change is user-facing, it is a good practice to first get an approval from the core team before starting to work on it.
|
||||
This saves both your and our time if we realize the change needs to go another direction before spending time on it.
|
||||
@ -33,7 +51,7 @@ cargo build
|
||||
|
||||
### Tests
|
||||
|
||||
It is a good practice to cover your changes with a test. Also, try to think about corner cases and various ways how your changes could break. Cover those in the tests as well.
|
||||
It is good practice to cover your changes with a test. Also, try to think about corner cases and various ways how your changes could break. Cover those in the tests as well.
|
||||
|
||||
Tests can be found in different places:
|
||||
* `/tests`
|
||||
@ -41,80 +59,83 @@ Tests can be found in different places:
|
||||
* command examples
|
||||
* crate-specific tests
|
||||
|
||||
The most comprehensive test suite we have is the `nu-test-support` crate. For testing specific features, such as running Nushell in a REPL mode, we have so called "testbins". For simple tests, you can find `run_test()` and `fail_test()` functions.
|
||||
Most of the tests are built upon the `nu-test-support` crate. For testing specific features, such as running Nushell in a REPL mode, we have so called "testbins". For simple tests, you can find `run_test()` and `fail_test()` functions.
|
||||
|
||||
### Useful Commands
|
||||
|
||||
As Nushell is built using a cargo workspace consisting of multiple crates keep in mind that you may need to pass additional flags compared to how you may be used to it from a single crate project.
|
||||
Read cargo's documentation for more details: https://doc.rust-lang.org/cargo/reference/workspaces.html
|
||||
|
||||
- Build and run Nushell:
|
||||
|
||||
```shell
|
||||
```nushell
|
||||
cargo run
|
||||
```
|
||||
|
||||
- Build and run with dataframe support.
|
||||
```shell
|
||||
```nushell
|
||||
cargo run --features=dataframe
|
||||
```
|
||||
|
||||
- Run Clippy on Nushell:
|
||||
|
||||
```shell
|
||||
cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A clippy::needless_collect -A clippy::result_large_err
|
||||
```nushell
|
||||
cargo clippy --workspace -- -D warnings -D clippy::unwrap_used
|
||||
```
|
||||
or via the `toolkit.nu` command:
|
||||
```shell
|
||||
```nushell
|
||||
use toolkit.nu clippy
|
||||
clippy
|
||||
```
|
||||
|
||||
- Run all tests:
|
||||
|
||||
```shell
|
||||
```nushell
|
||||
cargo test --workspace
|
||||
```
|
||||
|
||||
along with dataframe tests
|
||||
|
||||
```shell
|
||||
```nushell
|
||||
cargo test --workspace --features=dataframe
|
||||
```
|
||||
or via the `toolkit.nu` command:
|
||||
```shell
|
||||
```nushell
|
||||
use toolkit.nu test
|
||||
test
|
||||
```
|
||||
|
||||
- Run all tests for a specific command
|
||||
|
||||
```shell
|
||||
```nushell
|
||||
cargo test --package nu-cli --test main -- commands::<command_name_here>
|
||||
```
|
||||
|
||||
- Check to see if there are code formatting issues
|
||||
|
||||
```shell
|
||||
```nushell
|
||||
cargo fmt --all -- --check
|
||||
```
|
||||
or via the `toolkit.nu` command:
|
||||
```shell
|
||||
```nushell
|
||||
use toolkit.nu fmt
|
||||
fmt --check
|
||||
```
|
||||
|
||||
- Format the code in the project
|
||||
|
||||
```shell
|
||||
```nushell
|
||||
cargo fmt --all
|
||||
```
|
||||
or via the `toolkit.nu` command:
|
||||
```shell
|
||||
```nushell
|
||||
use toolkit.nu fmt
|
||||
fmt
|
||||
```
|
||||
|
||||
- Set up `git` hooks to check formatting and run `clippy` before committing and pushing:
|
||||
|
||||
```shell
|
||||
```nushell
|
||||
use toolkit.nu setup-git-hooks
|
||||
setup-git-hooks
|
||||
```
|
||||
@ -124,12 +145,12 @@ The most comprehensive test suite we have is the `nu-test-support` crate. For te
|
||||
|
||||
- To view verbose logs when developing, enable the `trace` log level.
|
||||
|
||||
```shell
|
||||
```nushell
|
||||
cargo run --release -- --log-level trace
|
||||
```
|
||||
|
||||
- To redirect trace logs to a file, enable the `--log-target file` switch.
|
||||
```shell
|
||||
```nushell
|
||||
cargo run --release -- --log-level trace --log-target file
|
||||
open $"($nu.temp-path)/nu-($nu.pid).log"
|
||||
```
|
||||
@ -220,7 +241,7 @@ You can help us to make the review process a smooth experience:
|
||||
- Choose what simplifies having confidence in the conflict resolution and the review. **Merge commits in your branch are OK** in the squash model.
|
||||
- Feel free to notify your reviewers or affected PR authors if your change might cause larger conflicts with another change.
|
||||
- During the rollup of multiple PRs, we may choose to resolve merge conflicts and CI failures ourselves. (Allow maintainers to push to your branch to enable us to do this quickly.)
|
||||
|
||||
### License
|
||||
|
||||
## License
|
||||
|
||||
We use the [MIT License](https://github.com/nushell/nushell/blob/main/LICENSE) in all of our Nushell projects. If you are including or referencing a crate that uses the [GPL License](https://www.gnu.org/licenses/gpl-3.0.en.html#license-text) unfortunately we will not be able to accept your PR.
|
||||
|
3650
Cargo.lock
generated
3650
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
91
Cargo.toml
91
Cargo.toml
@ -10,8 +10,8 @@ homepage = "https://www.nushell.sh"
|
||||
license = "MIT"
|
||||
name = "nu"
|
||||
repository = "https://github.com/nushell/nushell"
|
||||
rust-version = "1.60"
|
||||
version = "0.83.1"
|
||||
rust-version = "1.74.1"
|
||||
version = "0.91.0"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
@ -33,48 +33,53 @@ members = [
|
||||
"crates/nu-cmd-lang",
|
||||
"crates/nu-cmd-dataframe",
|
||||
"crates/nu-command",
|
||||
"crates/nu-color-config",
|
||||
"crates/nu-explore",
|
||||
"crates/nu-json",
|
||||
"crates/nu-lsp",
|
||||
"crates/nu-pretty-hex",
|
||||
"crates/nu-protocol",
|
||||
"crates/nu-plugin",
|
||||
"crates/nu_plugin_inc",
|
||||
"crates/nu_plugin_gstat",
|
||||
"crates/nu_plugin_example",
|
||||
"crates/nu_plugin_stream_example",
|
||||
"crates/nu_plugin_query",
|
||||
"crates/nu_plugin_custom_values",
|
||||
"crates/nu_plugin_formats",
|
||||
"crates/nu-std",
|
||||
"crates/nu-table",
|
||||
"crates/nu-term-grid",
|
||||
"crates/nu-test-support",
|
||||
"crates/nu-utils",
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
nu-cli = { path = "./crates/nu-cli", version = "0.83.1" }
|
||||
nu-color-config = { path = "./crates/nu-color-config", version = "0.83.1" }
|
||||
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.83.1" }
|
||||
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.83.1" }
|
||||
nu-cmd-dataframe = { path = "./crates/nu-cmd-dataframe", version = "0.83.1", optional = true }
|
||||
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.83.1", optional = true }
|
||||
nu-command = { path = "./crates/nu-command", version = "0.83.1" }
|
||||
nu-engine = { path = "./crates/nu-engine", version = "0.83.1" }
|
||||
nu-explore = { path = "./crates/nu-explore", version = "0.83.1" }
|
||||
nu-json = { path = "./crates/nu-json", version = "0.83.1" }
|
||||
nu-parser = { path = "./crates/nu-parser", version = "0.83.1" }
|
||||
nu-path = { path = "./crates/nu-path", version = "0.83.1" }
|
||||
nu-plugin = { path = "./crates/nu-plugin", optional = true, version = "0.83.1" }
|
||||
nu-pretty-hex = { path = "./crates/nu-pretty-hex", version = "0.83.1" }
|
||||
nu-protocol = { path = "./crates/nu-protocol", version = "0.83.1" }
|
||||
nu-system = { path = "./crates/nu-system", version = "0.83.1" }
|
||||
nu-table = { path = "./crates/nu-table", version = "0.83.1" }
|
||||
nu-term-grid = { path = "./crates/nu-term-grid", version = "0.83.1" }
|
||||
nu-std = { path = "./crates/nu-std", version = "0.83.1" }
|
||||
nu-utils = { path = "./crates/nu-utils", version = "0.83.1" }
|
||||
nu-ansi-term = "0.49.0"
|
||||
reedline = { version = "0.22.0", features = ["bashisms", "sqlite"]}
|
||||
nu-cli = { path = "./crates/nu-cli", version = "0.91.0" }
|
||||
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.91.0" }
|
||||
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.91.0" }
|
||||
nu-cmd-dataframe = { path = "./crates/nu-cmd-dataframe", version = "0.91.0", features = [
|
||||
"dataframe",
|
||||
], optional = true }
|
||||
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.91.0", optional = true }
|
||||
nu-command = { path = "./crates/nu-command", version = "0.91.0" }
|
||||
nu-engine = { path = "./crates/nu-engine", version = "0.91.0" }
|
||||
nu-explore = { path = "./crates/nu-explore", version = "0.91.0" }
|
||||
nu-lsp = { path = "./crates/nu-lsp/", version = "0.91.0" }
|
||||
nu-parser = { path = "./crates/nu-parser", version = "0.91.0" }
|
||||
nu-path = { path = "./crates/nu-path", version = "0.91.0" }
|
||||
nu-plugin = { path = "./crates/nu-plugin", optional = true, version = "0.91.0" }
|
||||
nu-protocol = { path = "./crates/nu-protocol", version = "0.91.0" }
|
||||
nu-std = { path = "./crates/nu-std", version = "0.91.0" }
|
||||
nu-utils = { path = "./crates/nu-utils", version = "0.91.0" }
|
||||
|
||||
mimalloc = { version = "0.1.37", default-features = false, optional = true}
|
||||
reedline = { version = "0.30.0", features = ["bashisms", "sqlite"] }
|
||||
|
||||
crossterm = "0.26"
|
||||
crossterm = "0.27"
|
||||
ctrlc = "3.4"
|
||||
log = "0.4"
|
||||
miette = { version = "5.10", features = ["fancy-no-backtrace"] }
|
||||
miette = { version = "7.1", features = ["fancy-no-backtrace", "fancy"] }
|
||||
mimalloc = { version = "0.1.37", default-features = false, optional = true }
|
||||
serde_json = "1.0"
|
||||
simplelog = "0.12"
|
||||
time = "0.3"
|
||||
@ -82,28 +87,26 @@ time = "0.3"
|
||||
[target.'cfg(not(target_os = "windows"))'.dependencies]
|
||||
# Our dependencies don't use OpenSSL on Windows
|
||||
openssl = { version = "0.10", features = ["vendored"], optional = true }
|
||||
signal-hook = { version = "0.3", default-features = false }
|
||||
|
||||
[target.'cfg(windows)'.build-dependencies]
|
||||
winresource = "0.1"
|
||||
|
||||
[target.'cfg(target_family = "unix")'.dependencies]
|
||||
nix = { version = "0.26", default-features = false, features = [
|
||||
nix = { version = "0.27", default-features = false, features = [
|
||||
"signal",
|
||||
"process",
|
||||
"fs",
|
||||
"term",
|
||||
] }
|
||||
is-terminal = "0.4.8"
|
||||
|
||||
[dev-dependencies]
|
||||
nu-test-support = { path = "./crates/nu-test-support", version = "0.83.1" }
|
||||
tempfile = "3.7"
|
||||
nu-test-support = { path = "./crates/nu-test-support", version = "0.91.0" }
|
||||
assert_cmd = "2.0"
|
||||
criterion = "0.5"
|
||||
divan = "0.1.14"
|
||||
pretty_assertions = "1.4"
|
||||
serial_test = "2.0"
|
||||
rstest = { version = "0.17", default-features = false }
|
||||
rstest = { version = "0.18", default-features = false }
|
||||
serial_test = "3.0"
|
||||
tempfile = "3.10"
|
||||
|
||||
[features]
|
||||
plugin = [
|
||||
@ -114,15 +117,26 @@ plugin = [
|
||||
"nu-protocol/plugin",
|
||||
"nu-engine/plugin",
|
||||
]
|
||||
default = ["plugin", "which-support", "trash-support", "sqlite"]
|
||||
default = ["default-no-clipboard", "system-clipboard"]
|
||||
# Enables convenient omitting of the system-clipboard feature, as it leads to problems in ci on linux
|
||||
# See https://github.com/nushell/nushell/pull/11535
|
||||
default-no-clipboard = [
|
||||
"plugin",
|
||||
"which-support",
|
||||
"trash-support",
|
||||
"sqlite",
|
||||
"mimalloc",
|
||||
]
|
||||
stable = ["default"]
|
||||
wasi = ["nu-cmd-lang/wasi"]
|
||||
# NOTE: individual features are also passed to `nu-cmd-lang` that uses them to generate the feature matrix in the `version` command
|
||||
|
||||
# Enable to statically link OpenSSL; otherwise the system version will be used. Not enabled by default because it takes a while to build
|
||||
# Enable to statically link OpenSSL (perl is required, to build OpenSSL https://docs.rs/openssl/latest/openssl/);
|
||||
# otherwise the system version will be used. Not enabled by default because it takes a while to build
|
||||
static-link-openssl = ["dep:openssl", "nu-cmd-lang/static-link-openssl"]
|
||||
|
||||
mimalloc = ["nu-cmd-lang/mimalloc", "dep:mimalloc"]
|
||||
system-clipboard = ["reedline/system_clipboard"]
|
||||
|
||||
# Stable (Default)
|
||||
which-support = ["nu-command/which-support", "nu-cmd-lang/which-support"]
|
||||
@ -165,10 +179,9 @@ bench = false
|
||||
# To use a development version of a dependency please use a global override here
|
||||
# changing versions in each sub-crate of the workspace is tedious
|
||||
[patch.crates-io]
|
||||
# reedline = { git = "https://github.com/nushell/reedline.git", branch = "main"}
|
||||
# reedline = { git = "https://github.com/nushell/reedline", branch = "main" }
|
||||
# nu-ansi-term = {git = "https://github.com/nushell/nu-ansi-term.git", branch = "main"}
|
||||
|
||||
# Criterion benchmarking setup
|
||||
# Run all benchmarks with `cargo bench`
|
||||
# Run individual benchmarks like `cargo bench -- <regex>` e.g. `cargo bench -- parse`
|
||||
[[bench]]
|
||||
|
15
Cross.toml
15
Cross.toml
@ -1,9 +1,18 @@
|
||||
# Configuration for cross-rs: https://github.com/cross-rs/cross
|
||||
# Run cross-rs like this:
|
||||
# cross build --target aarch64-unknown-linux-musl --release
|
||||
# cross build --target aarch64-unknown-linux-gnu --release
|
||||
# or
|
||||
# cross build --target aarch64-unknown-linux-musl --release --features=static-link-openssl
|
||||
|
||||
[target.aarch64-unknown-linux-gnu]
|
||||
dockerfile = "./docker/cross-rs/aarch64-unknown-linux-gnu.dockerfile"
|
||||
pre-build = [
|
||||
"dpkg --add-architecture $CROSS_DEB_ARCH",
|
||||
"apt-get update && apt-get install --assume-yes libssl-dev:$CROSS_DEB_ARCH clang"
|
||||
]
|
||||
|
||||
# NOTE: for musl you will need to build with --features=static-link-openssl
|
||||
[target.aarch64-unknown-linux-musl]
|
||||
dockerfile = "./docker/cross-rs/aarch64-unknown-linux-musl.dockerfile"
|
||||
pre-build = [
|
||||
"dpkg --add-architecture $CROSS_DEB_ARCH",
|
||||
"apt-get update && apt-get install --assume-yes clang"
|
||||
]
|
||||
|
@ -7,7 +7,6 @@
|
||||
[](https://twitter.com/nu_shell)
|
||||
[](https://github.com/nushell/nushell/graphs/commit-activity)
|
||||
[](https://github.com/nushell/nushell/graphs/contributors)
|
||||
[](https://codecov.io/gh/nushell/nushell)
|
||||
|
||||
A new type of shell.
|
||||
|
||||
@ -55,6 +54,7 @@ Detailed installation instructions can be found in the [installation chapter of
|
||||
|
||||
[](https://repology.org/project/nushell/versions)
|
||||
|
||||
For details about which platforms the Nushell team actively supports, see [our platform support policy](devdocs/PLATFORM_SUPPORT.md).
|
||||
|
||||
## Configuration
|
||||
|
||||
@ -199,7 +199,7 @@ topics that have been presented.
|
||||
|
||||
Nu adheres closely to a set of goals that make up its design philosophy. As features are added, they are checked against these goals.
|
||||
|
||||
- First and foremost, Nu is cross-platform. Commands and techniques should work across platforms and Nu has first-class support for Windows, macOS, and Linux.
|
||||
- First and foremost, Nu is cross-platform. Commands and techniques should work across platforms and Nu has [first-class support for Windows, macOS, and Linux](devdocs/PLATFORM_SUPPORT.md).
|
||||
|
||||
- Nu ensures compatibility with existing platform-specific executables.
|
||||
|
||||
@ -220,13 +220,15 @@ Please submit an issue or PR to be added to this list.
|
||||
- [virtualenv](https://github.com/pypa/virtualenv)
|
||||
- [atuin](https://github.com/ellie/atuin)
|
||||
- [clap](https://github.com/clap-rs/clap/tree/master/clap_complete_nushell)
|
||||
- [Dorothy](http://github.com/bevry/dorothy)
|
||||
- [Direnv](https://github.com/direnv/direnv/blob/master/docs/hook.md#nushell)
|
||||
|
||||
## Contributing
|
||||
|
||||
See [Contributing](CONTRIBUTING.md) for details. Thanks to all the people who already contributed!
|
||||
|
||||
<a href="https://github.com/nushell/nushell/graphs/contributors">
|
||||
<img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=500" />
|
||||
<img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=600" />
|
||||
</a>
|
||||
|
||||
## License
|
||||
|
@ -1,6 +1,6 @@
|
||||
# Criterion benchmarks
|
||||
# Divan benchmarks
|
||||
|
||||
These are benchmarks using [Criterion](https://github.com/bheisler/criterion.rs), a microbenchmarking tool for Rust.
|
||||
These are benchmarks using [Divan](https://github.com/nvzqz/divan), a microbenchmarking tool for Rust.
|
||||
|
||||
Run all benchmarks with `cargo bench`
|
||||
|
||||
|
@ -1,174 +1,209 @@
|
||||
use criterion::{criterion_group, criterion_main, BatchSize, Criterion};
|
||||
use nu_cli::eval_source;
|
||||
use nu_parser::parse;
|
||||
use nu_plugin::{EncodingType, PluginResponse};
|
||||
use nu_protocol::{engine::EngineState, PipelineData, Span, Value};
|
||||
use nu_plugin::{Encoder, EncodingType, PluginCallResponse, PluginOutput};
|
||||
use nu_protocol::{
|
||||
engine::EngineState, eval_const::create_nu_constant, PipelineData, Span, Value, NU_VARIABLE_ID,
|
||||
};
|
||||
use nu_utils::{get_default_config, get_default_env};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
fn main() {
|
||||
// Run registered benchmarks.
|
||||
divan::main();
|
||||
}
|
||||
|
||||
fn load_bench_commands() -> EngineState {
|
||||
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
|
||||
}
|
||||
|
||||
fn canonicalize_path(engine_state: &EngineState, path: &Path) -> PathBuf {
|
||||
let cwd = engine_state.current_work_dir();
|
||||
|
||||
if path.exists() {
|
||||
match nu_path::canonicalize_with(path, cwd) {
|
||||
Ok(canon_path) => canon_path,
|
||||
Err(_) => path.to_owned(),
|
||||
}
|
||||
} else {
|
||||
path.to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
fn get_home_path(engine_state: &EngineState) -> PathBuf {
|
||||
nu_path::home_dir()
|
||||
.map(|path| canonicalize_path(engine_state, &path))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn setup_engine() -> EngineState {
|
||||
let mut engine_state = load_bench_commands();
|
||||
let home_path = get_home_path(&engine_state);
|
||||
|
||||
// parsing config.nu breaks without PWD set, so set a valid path
|
||||
engine_state.add_env_var(
|
||||
"PWD".into(),
|
||||
Value::string(home_path.to_string_lossy(), Span::test_data()),
|
||||
);
|
||||
|
||||
let nu_const = create_nu_constant(&engine_state, Span::unknown())
|
||||
.expect("Failed to create nushell constant.");
|
||||
engine_state.set_variable_const_val(NU_VARIABLE_ID, nu_const);
|
||||
|
||||
engine_state
|
||||
}
|
||||
|
||||
// FIXME: All benchmarks live in this 1 file to speed up build times when benchmarking.
|
||||
// When the *_benchmarks functions were in different files, `cargo bench` would build
|
||||
// an executable for every single one - incredibly slowly. Would be nice to figure out
|
||||
// a way to split things up again.
|
||||
|
||||
fn parser_benchmarks(c: &mut Criterion) {
|
||||
let mut engine_state = load_bench_commands();
|
||||
// parsing config.nu breaks without PWD set
|
||||
engine_state.add_env_var(
|
||||
"PWD".into(),
|
||||
Value::string("/some/dir".to_string(), Span::test_data()),
|
||||
);
|
||||
#[divan::bench_group()]
|
||||
mod parser_benchmarks {
|
||||
use super::*;
|
||||
|
||||
let default_env = get_default_env().as_bytes();
|
||||
c.bench_function("parse_default_env_file", |b| {
|
||||
b.iter_batched(
|
||||
|| nu_protocol::engine::StateWorkingSet::new(&engine_state),
|
||||
|mut working_set| parse(&mut working_set, None, default_env, false),
|
||||
BatchSize::SmallInput,
|
||||
)
|
||||
});
|
||||
#[divan::bench()]
|
||||
fn parse_default_config_file(bencher: divan::Bencher) {
|
||||
let engine_state = setup_engine();
|
||||
let default_env = get_default_config().as_bytes();
|
||||
|
||||
let default_config = get_default_config().as_bytes();
|
||||
c.bench_function("parse_default_config_file", |b| {
|
||||
b.iter_batched(
|
||||
|| nu_protocol::engine::StateWorkingSet::new(&engine_state),
|
||||
|mut working_set| parse(&mut working_set, None, default_config, false),
|
||||
BatchSize::SmallInput,
|
||||
)
|
||||
});
|
||||
bencher
|
||||
.with_inputs(|| nu_protocol::engine::StateWorkingSet::new(&engine_state))
|
||||
.bench_refs(|mut working_set| parse(&mut working_set, None, default_env, false))
|
||||
}
|
||||
|
||||
c.bench_function("eval default_env.nu", |b| {
|
||||
b.iter(|| {
|
||||
let mut engine_state = load_bench_commands();
|
||||
let mut stack = nu_protocol::engine::Stack::new();
|
||||
eval_source(
|
||||
&mut engine_state,
|
||||
&mut stack,
|
||||
get_default_env().as_bytes(),
|
||||
"default_env.nu",
|
||||
PipelineData::empty(),
|
||||
false,
|
||||
)
|
||||
})
|
||||
});
|
||||
#[divan::bench()]
|
||||
fn parse_default_env_file(bencher: divan::Bencher) {
|
||||
let engine_state = setup_engine();
|
||||
let default_env = get_default_env().as_bytes();
|
||||
|
||||
c.bench_function("eval default_config.nu", |b| {
|
||||
b.iter(|| {
|
||||
let mut engine_state = load_bench_commands();
|
||||
// parsing config.nu breaks without PWD set
|
||||
engine_state.add_env_var(
|
||||
"PWD".into(),
|
||||
Value::string("/some/dir".to_string(), Span::test_data()),
|
||||
);
|
||||
let mut stack = nu_protocol::engine::Stack::new();
|
||||
eval_source(
|
||||
&mut engine_state,
|
||||
&mut stack,
|
||||
get_default_config().as_bytes(),
|
||||
"default_config.nu",
|
||||
PipelineData::empty(),
|
||||
false,
|
||||
)
|
||||
})
|
||||
});
|
||||
bencher
|
||||
.with_inputs(|| nu_protocol::engine::StateWorkingSet::new(&engine_state))
|
||||
.bench_refs(|mut working_set| parse(&mut working_set, None, default_env, false))
|
||||
}
|
||||
}
|
||||
|
||||
fn eval_benchmarks(c: &mut Criterion) {
|
||||
c.bench_function("eval default_env.nu", |b| {
|
||||
b.iter(|| {
|
||||
let mut engine_state = load_bench_commands();
|
||||
let mut stack = nu_protocol::engine::Stack::new();
|
||||
eval_source(
|
||||
&mut engine_state,
|
||||
&mut stack,
|
||||
get_default_env().as_bytes(),
|
||||
"default_env.nu",
|
||||
PipelineData::empty(),
|
||||
false,
|
||||
)
|
||||
})
|
||||
});
|
||||
#[divan::bench_group()]
|
||||
mod eval_benchmarks {
|
||||
use super::*;
|
||||
|
||||
c.bench_function("eval default_config.nu", |b| {
|
||||
b.iter(|| {
|
||||
let mut engine_state = load_bench_commands();
|
||||
// parsing config.nu breaks without PWD set
|
||||
engine_state.add_env_var(
|
||||
"PWD".into(),
|
||||
Value::string("/some/dir".to_string(), Span::test_data()),
|
||||
);
|
||||
let mut stack = nu_protocol::engine::Stack::new();
|
||||
eval_source(
|
||||
&mut engine_state,
|
||||
&mut stack,
|
||||
get_default_config().as_bytes(),
|
||||
"default_config.nu",
|
||||
PipelineData::empty(),
|
||||
false,
|
||||
)
|
||||
})
|
||||
});
|
||||
#[divan::bench()]
|
||||
fn eval_default_env(bencher: divan::Bencher) {
|
||||
let default_env = get_default_env().as_bytes();
|
||||
let fname = "default_env.nu";
|
||||
bencher
|
||||
.with_inputs(|| (setup_engine(), nu_protocol::engine::Stack::new()))
|
||||
.bench_values(|(mut engine_state, mut stack)| {
|
||||
eval_source(
|
||||
&mut engine_state,
|
||||
&mut stack,
|
||||
default_env,
|
||||
fname,
|
||||
PipelineData::empty(),
|
||||
false,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
#[divan::bench()]
|
||||
fn eval_default_config(bencher: divan::Bencher) {
|
||||
let default_env = get_default_config().as_bytes();
|
||||
let fname = "default_config.nu";
|
||||
bencher
|
||||
.with_inputs(|| (setup_engine(), nu_protocol::engine::Stack::new()))
|
||||
.bench_values(|(mut engine_state, mut stack)| {
|
||||
eval_source(
|
||||
&mut engine_state,
|
||||
&mut stack,
|
||||
default_env,
|
||||
fname,
|
||||
PipelineData::empty(),
|
||||
false,
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// generate a new table data with `row_cnt` rows, `col_cnt` columns.
|
||||
fn encoding_test_data(row_cnt: usize, col_cnt: usize) -> Value {
|
||||
let columns: Vec<String> = (0..col_cnt).map(|x| format!("col_{x}")).collect();
|
||||
let vals: Vec<Value> = (0..col_cnt as i64).map(Value::test_int).collect();
|
||||
|
||||
Value::List {
|
||||
vals: (0..row_cnt)
|
||||
.map(|_| Value::test_record(columns.clone(), vals.clone()))
|
||||
let record = Value::test_record(
|
||||
(0..col_cnt)
|
||||
.map(|x| (format!("col_{x}"), Value::test_int(x as i64)))
|
||||
.collect(),
|
||||
span: Span::test_data(),
|
||||
);
|
||||
|
||||
Value::list(vec![record; row_cnt], Span::test_data())
|
||||
}
|
||||
|
||||
#[divan::bench_group()]
|
||||
mod encoding_benchmarks {
|
||||
use super::*;
|
||||
|
||||
#[divan::bench(args = [(100, 5), (10000, 15)])]
|
||||
fn json_encode(bencher: divan::Bencher, (row_cnt, col_cnt): (usize, usize)) {
|
||||
let test_data = PluginOutput::CallResponse(
|
||||
0,
|
||||
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
|
||||
);
|
||||
let encoder = EncodingType::try_from_bytes(b"json").unwrap();
|
||||
bencher
|
||||
.with_inputs(|| (vec![]))
|
||||
.bench_values(|mut res| encoder.encode(&test_data, &mut res))
|
||||
}
|
||||
|
||||
#[divan::bench(args = [(100, 5), (10000, 15)])]
|
||||
fn msgpack_encode(bencher: divan::Bencher, (row_cnt, col_cnt): (usize, usize)) {
|
||||
let test_data = PluginOutput::CallResponse(
|
||||
0,
|
||||
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
|
||||
);
|
||||
let encoder = EncodingType::try_from_bytes(b"msgpack").unwrap();
|
||||
bencher
|
||||
.with_inputs(|| (vec![]))
|
||||
.bench_values(|mut res| encoder.encode(&test_data, &mut res))
|
||||
}
|
||||
}
|
||||
|
||||
fn encoding_benchmarks(c: &mut Criterion) {
|
||||
let mut group = c.benchmark_group("Encoding");
|
||||
let test_cnt_pairs = [(100, 5), (100, 15), (10000, 5), (10000, 15)];
|
||||
for (row_cnt, col_cnt) in test_cnt_pairs.into_iter() {
|
||||
for fmt in ["json", "msgpack"] {
|
||||
group.bench_function(&format!("{fmt} encode {row_cnt} * {col_cnt}"), |b| {
|
||||
let mut res = vec![];
|
||||
let test_data =
|
||||
PluginResponse::Value(Box::new(encoding_test_data(row_cnt, col_cnt)));
|
||||
let encoder = EncodingType::try_from_bytes(fmt.as_bytes()).unwrap();
|
||||
b.iter(|| encoder.encode_response(&test_data, &mut res))
|
||||
});
|
||||
}
|
||||
}
|
||||
group.finish();
|
||||
}
|
||||
#[divan::bench_group()]
|
||||
mod decoding_benchmarks {
|
||||
use super::*;
|
||||
|
||||
fn decoding_benchmarks(c: &mut Criterion) {
|
||||
let mut group = c.benchmark_group("Decoding");
|
||||
let test_cnt_pairs = [(100, 5), (100, 15), (10000, 5), (10000, 15)];
|
||||
for (row_cnt, col_cnt) in test_cnt_pairs.into_iter() {
|
||||
for fmt in ["json", "msgpack"] {
|
||||
group.bench_function(&format!("{fmt} decode for {row_cnt} * {col_cnt}"), |b| {
|
||||
let mut res = vec![];
|
||||
let test_data =
|
||||
PluginResponse::Value(Box::new(encoding_test_data(row_cnt, col_cnt)));
|
||||
let encoder = EncodingType::try_from_bytes(fmt.as_bytes()).unwrap();
|
||||
encoder.encode_response(&test_data, &mut res).unwrap();
|
||||
let mut binary_data = std::io::Cursor::new(res);
|
||||
b.iter(|| {
|
||||
binary_data.set_position(0);
|
||||
encoder.decode_response(&mut binary_data)
|
||||
})
|
||||
});
|
||||
}
|
||||
#[divan::bench(args = [(100, 5), (10000, 15)])]
|
||||
fn json_decode(bencher: divan::Bencher, (row_cnt, col_cnt): (usize, usize)) {
|
||||
let test_data = PluginOutput::CallResponse(
|
||||
0,
|
||||
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
|
||||
);
|
||||
let encoder = EncodingType::try_from_bytes(b"json").unwrap();
|
||||
let mut res = vec![];
|
||||
encoder.encode(&test_data, &mut res).unwrap();
|
||||
bencher
|
||||
.with_inputs(|| {
|
||||
let mut binary_data = std::io::Cursor::new(res.clone());
|
||||
binary_data.set_position(0);
|
||||
binary_data
|
||||
})
|
||||
.bench_values(|mut binary_data| -> Result<Option<PluginOutput>, _> {
|
||||
encoder.decode(&mut binary_data)
|
||||
})
|
||||
}
|
||||
group.finish();
|
||||
}
|
||||
|
||||
criterion_group!(
|
||||
benches,
|
||||
parser_benchmarks,
|
||||
eval_benchmarks,
|
||||
encoding_benchmarks,
|
||||
decoding_benchmarks
|
||||
);
|
||||
criterion_main!(benches);
|
||||
#[divan::bench(args = [(100, 5), (10000, 15)])]
|
||||
fn msgpack_decode(bencher: divan::Bencher, (row_cnt, col_cnt): (usize, usize)) {
|
||||
let test_data = PluginOutput::CallResponse(
|
||||
0,
|
||||
PluginCallResponse::value(encoding_test_data(row_cnt, col_cnt)),
|
||||
);
|
||||
let encoder = EncodingType::try_from_bytes(b"msgpack").unwrap();
|
||||
let mut res = vec![];
|
||||
encoder.encode(&test_data, &mut res).unwrap();
|
||||
bencher
|
||||
.with_inputs(|| {
|
||||
let mut binary_data = std::io::Cursor::new(res.clone());
|
||||
binary_data.set_position(0);
|
||||
binary_data
|
||||
})
|
||||
.bench_values(|mut binary_data| -> Result<Option<PluginOutput>, _> {
|
||||
encoder.decode(&mut binary_data)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
17
codecov.yml
17
codecov.yml
@ -1,17 +0,0 @@
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: 55%
|
||||
threshold: 2%
|
||||
patch:
|
||||
default:
|
||||
informational: true
|
||||
|
||||
comment:
|
||||
layout: reach, diff, files
|
||||
behavior: default
|
||||
require_base: yes
|
||||
require_head: yes
|
||||
after_n_builds: 1 # Disabled windows else: 2
|
||||
|
@ -5,40 +5,43 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cli"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
name = "nu-cli"
|
||||
version = "0.83.1"
|
||||
version = "0.91.0"
|
||||
|
||||
[lib]
|
||||
bench = false
|
||||
|
||||
[dev-dependencies]
|
||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.83.1" }
|
||||
nu-test-support = { path = "../nu-test-support", version = "0.83.1" }
|
||||
rstest = { version = "0.17.0", default-features = false }
|
||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.91.0" }
|
||||
nu-command = { path = "../nu-command", version = "0.91.0" }
|
||||
nu-test-support = { path = "../nu-test-support", version = "0.91.0" }
|
||||
rstest = { version = "0.18.1", default-features = false }
|
||||
|
||||
[dependencies]
|
||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.83.1" }
|
||||
nu-command = { path = "../nu-command", version = "0.83.1" }
|
||||
nu-engine = { path = "../nu-engine", version = "0.83.1" }
|
||||
nu-path = { path = "../nu-path", version = "0.83.1" }
|
||||
nu-parser = { path = "../nu-parser", version = "0.83.1" }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.83.1" }
|
||||
nu-utils = { path = "../nu-utils", version = "0.83.1" }
|
||||
nu-color-config = { path = "../nu-color-config", version = "0.83.1" }
|
||||
nu-ansi-term = "0.49.0"
|
||||
reedline = { version = "0.22.0", features = ["bashisms", "sqlite"]}
|
||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.91.0" }
|
||||
nu-engine = { path = "../nu-engine", version = "0.91.0" }
|
||||
nu-path = { path = "../nu-path", version = "0.91.0" }
|
||||
nu-parser = { path = "../nu-parser", version = "0.91.0" }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.91.0" }
|
||||
nu-utils = { path = "../nu-utils", version = "0.91.0" }
|
||||
nu-color-config = { path = "../nu-color-config", version = "0.91.0" }
|
||||
nu-ansi-term = "0.50.0"
|
||||
reedline = { version = "0.30.0", features = ["bashisms", "sqlite"] }
|
||||
|
||||
chrono = { default-features = false, features = ["std"], version = "0.4" }
|
||||
crossterm = "0.26"
|
||||
fancy-regex = "0.11"
|
||||
crossterm = "0.27"
|
||||
fancy-regex = "0.13"
|
||||
fuzzy-matcher = "0.3"
|
||||
is_executable = "1.0"
|
||||
is-terminal = "0.4.8"
|
||||
log = "0.4"
|
||||
miette = { version = "5.10", features = ["fancy-no-backtrace"] }
|
||||
miette = { version = "7.1", features = ["fancy-no-backtrace"] }
|
||||
lscolors = { version = "0.17", default-features = false, features = ["nu-ansi-term"] }
|
||||
once_cell = "1.18"
|
||||
percent-encoding = "2"
|
||||
sysinfo = "0.29"
|
||||
unicode-segmentation = "1.10"
|
||||
pathdiff = "0.2"
|
||||
sysinfo = "0.30"
|
||||
unicode-segmentation = "1.11"
|
||||
uuid = { version = "1.6.0", features = ["v4"] }
|
||||
which = "6.0.0"
|
||||
|
||||
[features]
|
||||
plugin = []
|
||||
|
@ -1,129 +0,0 @@
|
||||
use nu_engine::CallExt;
|
||||
use nu_protocol::ast::Call;
|
||||
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||
use nu_protocol::Category;
|
||||
use nu_protocol::IntoPipelineData;
|
||||
use nu_protocol::{PipelineData, ShellError, Signature, SyntaxShape, Type, Value};
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Commandline;
|
||||
|
||||
impl Command for Commandline {
|
||||
fn name(&self) -> &str {
|
||||
"commandline"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("commandline")
|
||||
.input_output_types(vec![
|
||||
(Type::Nothing, Type::Nothing),
|
||||
(Type::String, Type::String),
|
||||
])
|
||||
.switch(
|
||||
"cursor",
|
||||
"Set or get the current cursor position",
|
||||
Some('c'),
|
||||
)
|
||||
.switch(
|
||||
"append",
|
||||
"appends the string to the end of the buffer",
|
||||
Some('a'),
|
||||
)
|
||||
.switch(
|
||||
"insert",
|
||||
"inserts the string into the buffer at the cursor position",
|
||||
Some('i'),
|
||||
)
|
||||
.switch(
|
||||
"replace",
|
||||
"replaces the current contents of the buffer (default)",
|
||||
Some('r'),
|
||||
)
|
||||
.optional(
|
||||
"cmd",
|
||||
SyntaxShape::String,
|
||||
"the string to perform the operation with",
|
||||
)
|
||||
.category(Category::Core)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"View or modify the current command line input buffer."
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["repl", "interactive"]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
if let Some(cmd) = call.opt::<Value>(engine_state, stack, 0)? {
|
||||
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
|
||||
|
||||
if call.has_flag("cursor") {
|
||||
let cmd_str = cmd.as_string()?;
|
||||
match cmd_str.parse::<i64>() {
|
||||
Ok(n) => {
|
||||
repl.cursor_pos = if n <= 0 {
|
||||
0usize
|
||||
} else {
|
||||
repl.buffer
|
||||
.grapheme_indices(true)
|
||||
.map(|(i, _c)| i)
|
||||
.nth(n as usize)
|
||||
.unwrap_or(repl.buffer.len())
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
return Err(ShellError::CantConvert {
|
||||
to_type: "int".to_string(),
|
||||
from_type: "string".to_string(),
|
||||
span: cmd.span()?,
|
||||
help: Some(format!(
|
||||
r#"string "{cmd_str}" does not represent a valid integer"#
|
||||
)),
|
||||
})
|
||||
}
|
||||
}
|
||||
} else if call.has_flag("append") {
|
||||
repl.buffer.push_str(&cmd.as_string()?);
|
||||
} else if call.has_flag("insert") {
|
||||
let cmd_str = cmd.as_string()?;
|
||||
let cursor_pos = repl.cursor_pos;
|
||||
repl.buffer.insert_str(cursor_pos, &cmd_str);
|
||||
repl.cursor_pos += cmd_str.len();
|
||||
} else {
|
||||
repl.buffer = cmd.as_string()?;
|
||||
repl.cursor_pos = repl.buffer.len();
|
||||
}
|
||||
Ok(Value::Nothing { span: call.head }.into_pipeline_data())
|
||||
} else {
|
||||
let repl = engine_state.repl_state.lock().expect("repl state mutex");
|
||||
if call.has_flag("cursor") {
|
||||
let char_pos = repl
|
||||
.buffer
|
||||
.grapheme_indices(true)
|
||||
.chain(std::iter::once((repl.buffer.len(), "")))
|
||||
.position(|(i, _c)| i == repl.cursor_pos)
|
||||
.expect("Cursor position isn't on a grapheme boundary");
|
||||
Ok(Value::String {
|
||||
val: char_pos.to_string(),
|
||||
span: call.head,
|
||||
}
|
||||
.into_pipeline_data())
|
||||
} else {
|
||||
Ok(Value::String {
|
||||
val: repl.buffer.to_string(),
|
||||
span: call.head,
|
||||
}
|
||||
.into_pipeline_data())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
189
crates/nu-cli/src/commands/commandline/commandline_.rs
Normal file
189
crates/nu-cli/src/commands/commandline/commandline_.rs
Normal file
@ -0,0 +1,189 @@
|
||||
use nu_engine::CallExt;
|
||||
use nu_protocol::{
|
||||
ast::Call,
|
||||
engine::{Command, EngineState, Stack},
|
||||
Category, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Type, Value,
|
||||
};
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Commandline;
|
||||
|
||||
impl Command for Commandline {
|
||||
fn name(&self) -> &str {
|
||||
"commandline"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("commandline")
|
||||
.input_output_types(vec![
|
||||
(Type::Nothing, Type::Nothing),
|
||||
(Type::String, Type::String),
|
||||
])
|
||||
.switch(
|
||||
"cursor",
|
||||
"Set or get the current cursor position",
|
||||
Some('c'),
|
||||
)
|
||||
.switch(
|
||||
"cursor-end",
|
||||
"Set the current cursor position to the end of the buffer",
|
||||
Some('e'),
|
||||
)
|
||||
.switch(
|
||||
"append",
|
||||
"appends the string to the end of the buffer",
|
||||
Some('a'),
|
||||
)
|
||||
.switch(
|
||||
"insert",
|
||||
"inserts the string into the buffer at the cursor position",
|
||||
Some('i'),
|
||||
)
|
||||
.switch(
|
||||
"replace",
|
||||
"replaces the current contents of the buffer (default)",
|
||||
Some('r'),
|
||||
)
|
||||
.optional(
|
||||
"cmd",
|
||||
SyntaxShape::String,
|
||||
"the string to perform the operation with",
|
||||
)
|
||||
.category(Category::Core)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"View or modify the current command line input buffer."
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["repl", "interactive"]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
if let Some(cmd) = call.opt::<Value>(engine_state, stack, 0)? {
|
||||
let span = cmd.span();
|
||||
let cmd = cmd.coerce_into_string()?;
|
||||
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
|
||||
|
||||
if call.has_flag(engine_state, stack, "cursor")? {
|
||||
nu_protocol::report_error_new(
|
||||
engine_state,
|
||||
&ShellError::GenericError {
|
||||
error: "`--cursor (-c)` is deprecated".into(),
|
||||
msg: "Setting the current cursor position by `--cursor (-c)` is deprecated"
|
||||
.into(),
|
||||
span: Some(call.arguments_span()),
|
||||
help: Some("Use `commandline set-cursor`".into()),
|
||||
inner: vec![],
|
||||
},
|
||||
);
|
||||
match cmd.parse::<i64>() {
|
||||
Ok(n) => {
|
||||
repl.cursor_pos = if n <= 0 {
|
||||
0usize
|
||||
} else {
|
||||
repl.buffer
|
||||
.grapheme_indices(true)
|
||||
.map(|(i, _c)| i)
|
||||
.nth(n as usize)
|
||||
.unwrap_or(repl.buffer.len())
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
return Err(ShellError::CantConvert {
|
||||
to_type: "int".to_string(),
|
||||
from_type: "string".to_string(),
|
||||
span,
|
||||
help: Some(format!(r#"string "{cmd}" does not represent a valid int"#)),
|
||||
})
|
||||
}
|
||||
}
|
||||
} else if call.has_flag(engine_state, stack, "append")? {
|
||||
nu_protocol::report_error_new(
|
||||
engine_state,
|
||||
&ShellError::GenericError {
|
||||
error: "`--append (-a)` is deprecated".into(),
|
||||
msg: "Appending the string to the end of the buffer by `--append (-a)` is deprecated".into(),
|
||||
span: Some(call.arguments_span()),
|
||||
help: Some("Use `commandline edit --append (-a)`".into()),
|
||||
inner: vec![],
|
||||
},
|
||||
);
|
||||
repl.buffer.push_str(&cmd);
|
||||
} else if call.has_flag(engine_state, stack, "insert")? {
|
||||
nu_protocol::report_error_new(
|
||||
engine_state,
|
||||
&ShellError::GenericError {
|
||||
error: "`--insert (-i)` is deprecated".into(),
|
||||
msg: "Inserts the string into the buffer at the cursor position by `--insert (-i)` is deprecated".into(),
|
||||
span: Some(call.arguments_span()),
|
||||
help: Some("Use `commandline edit --insert (-i)`".into()),
|
||||
inner: vec![],
|
||||
},
|
||||
);
|
||||
let cursor_pos = repl.cursor_pos;
|
||||
repl.buffer.insert_str(cursor_pos, &cmd);
|
||||
repl.cursor_pos += cmd.len();
|
||||
} else {
|
||||
nu_protocol::report_error_new(
|
||||
engine_state,
|
||||
&ShellError::GenericError {
|
||||
error: "`--replace (-r)` is deprecated".into(),
|
||||
msg: "Replaceing the current contents of the buffer by `--replace (-p)` or positional argument is deprecated".into(),
|
||||
span: Some(call.arguments_span()),
|
||||
help: Some("Use `commandline edit --replace (-r)`".into()),
|
||||
inner: vec![],
|
||||
},
|
||||
);
|
||||
repl.buffer = cmd;
|
||||
repl.cursor_pos = repl.buffer.len();
|
||||
}
|
||||
Ok(Value::nothing(call.head).into_pipeline_data())
|
||||
} else {
|
||||
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
|
||||
if call.has_flag(engine_state, stack, "cursor-end")? {
|
||||
nu_protocol::report_error_new(
|
||||
engine_state,
|
||||
&ShellError::GenericError {
|
||||
error: "`--cursor-end (-e)` is deprecated".into(),
|
||||
msg: "Setting the current cursor position to the end of the buffer by `--cursor-end (-e)` is deprecated".into(),
|
||||
span: Some(call.arguments_span()),
|
||||
help: Some("Use `commandline set-cursor --end (-e)`".into()),
|
||||
inner: vec![],
|
||||
},
|
||||
);
|
||||
repl.cursor_pos = repl.buffer.len();
|
||||
Ok(Value::nothing(call.head).into_pipeline_data())
|
||||
} else if call.has_flag(engine_state, stack, "cursor")? {
|
||||
nu_protocol::report_error_new(
|
||||
engine_state,
|
||||
&ShellError::GenericError {
|
||||
error: "`--cursor (-c)` is deprecated".into(),
|
||||
msg: "Getting the current cursor position by `--cursor (-c)` is deprecated"
|
||||
.into(),
|
||||
span: Some(call.arguments_span()),
|
||||
help: Some("Use `commandline get-cursor`".into()),
|
||||
inner: vec![],
|
||||
},
|
||||
);
|
||||
let char_pos = repl
|
||||
.buffer
|
||||
.grapheme_indices(true)
|
||||
.chain(std::iter::once((repl.buffer.len(), "")))
|
||||
.position(|(i, _c)| i == repl.cursor_pos)
|
||||
.expect("Cursor position isn't on a grapheme boundary");
|
||||
Ok(Value::string(char_pos.to_string(), call.head).into_pipeline_data())
|
||||
} else {
|
||||
Ok(Value::string(repl.buffer.to_string(), call.head).into_pipeline_data())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
71
crates/nu-cli/src/commands/commandline/edit.rs
Normal file
71
crates/nu-cli/src/commands/commandline/edit.rs
Normal file
@ -0,0 +1,71 @@
|
||||
use nu_engine::CallExt;
|
||||
use nu_protocol::{
|
||||
ast::Call,
|
||||
engine::{Command, EngineState, Stack},
|
||||
Category, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Type, Value,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
|
||||
impl Command for SubCommand {
|
||||
fn name(&self) -> &str {
|
||||
"commandline edit"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(self.name())
|
||||
.input_output_types(vec![(Type::Nothing, Type::Nothing)])
|
||||
.switch(
|
||||
"append",
|
||||
"appends the string to the end of the buffer",
|
||||
Some('a'),
|
||||
)
|
||||
.switch(
|
||||
"insert",
|
||||
"inserts the string into the buffer at the cursor position",
|
||||
Some('i'),
|
||||
)
|
||||
.switch(
|
||||
"replace",
|
||||
"replaces the current contents of the buffer (default)",
|
||||
Some('r'),
|
||||
)
|
||||
.required(
|
||||
"str",
|
||||
SyntaxShape::String,
|
||||
"the string to perform the operation with",
|
||||
)
|
||||
.category(Category::Core)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Modify the current command line input buffer."
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["repl", "interactive"]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let str: String = call.req(engine_state, stack, 0)?;
|
||||
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
|
||||
if call.has_flag(engine_state, stack, "append")? {
|
||||
repl.buffer.push_str(&str);
|
||||
} else if call.has_flag(engine_state, stack, "insert")? {
|
||||
let cursor_pos = repl.cursor_pos;
|
||||
repl.buffer.insert_str(cursor_pos, &str);
|
||||
repl.cursor_pos += str.len();
|
||||
} else {
|
||||
repl.buffer = str;
|
||||
repl.cursor_pos = repl.buffer.len();
|
||||
}
|
||||
Ok(Value::nothing(call.head).into_pipeline_data())
|
||||
}
|
||||
}
|
56
crates/nu-cli/src/commands/commandline/get_cursor.rs
Normal file
56
crates/nu-cli/src/commands/commandline/get_cursor.rs
Normal file
@ -0,0 +1,56 @@
|
||||
use nu_protocol::{
|
||||
ast::Call,
|
||||
engine::{Command, EngineState, Stack},
|
||||
Category, IntoPipelineData, PipelineData, ShellError, Signature, Type, Value,
|
||||
};
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
|
||||
impl Command for SubCommand {
|
||||
fn name(&self) -> &str {
|
||||
"commandline get-cursor"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(self.name())
|
||||
.input_output_types(vec![(Type::Nothing, Type::Int)])
|
||||
.allow_variants_without_examples(true)
|
||||
.category(Category::Core)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Get the current cursor position."
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["repl", "interactive"]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
_stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let repl = engine_state.repl_state.lock().expect("repl state mutex");
|
||||
let char_pos = repl
|
||||
.buffer
|
||||
.grapheme_indices(true)
|
||||
.chain(std::iter::once((repl.buffer.len(), "")))
|
||||
.position(|(i, _c)| i == repl.cursor_pos)
|
||||
.expect("Cursor position isn't on a grapheme boundary");
|
||||
match i64::try_from(char_pos) {
|
||||
Ok(pos) => Ok(Value::int(pos, call.head).into_pipeline_data()),
|
||||
Err(e) => Err(ShellError::GenericError {
|
||||
error: "Failed to convert cursor position to int".to_string(),
|
||||
msg: e.to_string(),
|
||||
span: None,
|
||||
help: None,
|
||||
inner: vec![],
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
9
crates/nu-cli/src/commands/commandline/mod.rs
Normal file
9
crates/nu-cli/src/commands/commandline/mod.rs
Normal file
@ -0,0 +1,9 @@
|
||||
mod commandline_;
|
||||
mod edit;
|
||||
mod get_cursor;
|
||||
mod set_cursor;
|
||||
|
||||
pub use commandline_::Commandline;
|
||||
pub use edit::SubCommand as CommandlineEdit;
|
||||
pub use get_cursor::SubCommand as CommandlineGetCursor;
|
||||
pub use set_cursor::SubCommand as CommandlineSetCursor;
|
69
crates/nu-cli/src/commands/commandline/set_cursor.rs
Normal file
69
crates/nu-cli/src/commands/commandline/set_cursor.rs
Normal file
@ -0,0 +1,69 @@
|
||||
use nu_engine::CallExt;
|
||||
use nu_protocol::{
|
||||
ast::Call,
|
||||
engine::{Command, EngineState, Stack},
|
||||
Category, IntoPipelineData, PipelineData, ShellError, Signature, SyntaxShape, Type, Value,
|
||||
};
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
|
||||
impl Command for SubCommand {
|
||||
fn name(&self) -> &str {
|
||||
"commandline set-cursor"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(self.name())
|
||||
.input_output_types(vec![(Type::Nothing, Type::Nothing)])
|
||||
.switch(
|
||||
"end",
|
||||
"set the current cursor position to the end of the buffer",
|
||||
Some('e'),
|
||||
)
|
||||
.optional("pos", SyntaxShape::Int, "Cursor position to be set")
|
||||
.category(Category::Core)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Set the current cursor position."
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["repl", "interactive"]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
|
||||
if let Some(pos) = call.opt::<i64>(engine_state, stack, 0)? {
|
||||
repl.cursor_pos = if pos <= 0 {
|
||||
0usize
|
||||
} else {
|
||||
repl.buffer
|
||||
.grapheme_indices(true)
|
||||
.map(|(i, _c)| i)
|
||||
.nth(pos as usize)
|
||||
.unwrap_or(repl.buffer.len())
|
||||
};
|
||||
Ok(Value::nothing(call.head).into_pipeline_data())
|
||||
} else if call.has_flag(engine_state, stack, "end")? {
|
||||
repl.cursor_pos = repl.buffer.len();
|
||||
Ok(Value::nothing(call.head).into_pipeline_data())
|
||||
} else {
|
||||
Err(ShellError::GenericError {
|
||||
error: "Required a positional argument or a flag".to_string(),
|
||||
msg: "".to_string(),
|
||||
span: None,
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
@ -14,6 +14,9 @@ pub fn add_cli_context(mut engine_state: EngineState) -> EngineState {
|
||||
|
||||
bind_command! {
|
||||
Commandline,
|
||||
CommandlineEdit,
|
||||
CommandlineGetCursor,
|
||||
CommandlineSetCursor,
|
||||
History,
|
||||
HistorySession,
|
||||
Keybindings,
|
||||
|
@ -1,8 +1,9 @@
|
||||
use nu_engine::CallExt;
|
||||
use nu_protocol::ast::Call;
|
||||
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||
use nu_protocol::{
|
||||
Category, Example, HistoryFileFormat, IntoInterruptiblePipelineData, PipelineData, ShellError,
|
||||
Signature, Span, Type, Value,
|
||||
record, Category, Example, HistoryFileFormat, IntoInterruptiblePipelineData, PipelineData,
|
||||
ShellError, Signature, Span, Type, Value,
|
||||
};
|
||||
use reedline::{
|
||||
FileBackedHistory, History as ReedlineHistory, HistoryItem, SearchDirection, SearchQuery,
|
||||
@ -23,10 +24,7 @@ impl Command for History {
|
||||
|
||||
fn signature(&self) -> nu_protocol::Signature {
|
||||
Signature::build("history")
|
||||
.input_output_types(vec![
|
||||
(Type::Nothing, Type::Table(vec![])),
|
||||
(Type::Nothing, Type::Nothing),
|
||||
])
|
||||
.input_output_types(vec![(Type::Nothing, Type::Any)])
|
||||
.allow_variants_without_examples(true)
|
||||
.switch("clear", "Clears out the history entries", Some('c'))
|
||||
.switch(
|
||||
@ -34,27 +32,31 @@ impl Command for History {
|
||||
"Show long listing of entries for sqlite history",
|
||||
Some('l'),
|
||||
)
|
||||
.category(Category::Misc)
|
||||
.category(Category::History)
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
_stack: &mut Stack,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let head = call.head;
|
||||
|
||||
let Some(history) = engine_state.history_config() else {
|
||||
return Ok(PipelineData::empty());
|
||||
};
|
||||
|
||||
// todo for sqlite history this command should be an alias to `open ~/.config/nushell/history.sqlite3 | get history`
|
||||
if let Some(config_path) = nu_path::config_dir() {
|
||||
let clear = call.has_flag("clear");
|
||||
let long = call.has_flag("long");
|
||||
let clear = call.has_flag(engine_state, stack, "clear")?;
|
||||
let long = call.has_flag(engine_state, stack, "long")?;
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
|
||||
let mut history_path = config_path;
|
||||
history_path.push("nushell");
|
||||
match engine_state.config.history_file_format {
|
||||
match history.file_format {
|
||||
HistoryFileFormat::Sqlite => {
|
||||
history_path.push("history.sqlite3");
|
||||
}
|
||||
@ -68,49 +70,48 @@ impl Command for History {
|
||||
// TODO: FIXME also clear the auxiliary files when using sqlite
|
||||
Ok(PipelineData::empty())
|
||||
} else {
|
||||
let history_reader: Option<Box<dyn ReedlineHistory>> =
|
||||
match engine_state.config.history_file_format {
|
||||
HistoryFileFormat::Sqlite => SqliteBackedHistory::with_file(history_path)
|
||||
let history_reader: Option<Box<dyn ReedlineHistory>> = match history.file_format {
|
||||
HistoryFileFormat::Sqlite => {
|
||||
SqliteBackedHistory::with_file(history_path.clone(), None, None)
|
||||
.map(|inner| {
|
||||
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
|
||||
boxed
|
||||
})
|
||||
.ok(),
|
||||
.ok()
|
||||
}
|
||||
|
||||
HistoryFileFormat::PlainText => FileBackedHistory::with_file(
|
||||
engine_state.config.max_history_size as usize,
|
||||
history_path,
|
||||
)
|
||||
.map(|inner| {
|
||||
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
|
||||
boxed
|
||||
})
|
||||
.ok(),
|
||||
};
|
||||
HistoryFileFormat::PlainText => FileBackedHistory::with_file(
|
||||
history.max_size as usize,
|
||||
history_path.clone(),
|
||||
)
|
||||
.map(|inner| {
|
||||
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
|
||||
boxed
|
||||
})
|
||||
.ok(),
|
||||
};
|
||||
|
||||
match engine_state.config.history_file_format {
|
||||
match history.file_format {
|
||||
HistoryFileFormat::PlainText => Ok(history_reader
|
||||
.and_then(|h| {
|
||||
h.search(SearchQuery::everything(SearchDirection::Forward, None))
|
||||
.ok()
|
||||
})
|
||||
.map(move |entries| {
|
||||
entries
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(move |(idx, entry)| Value::Record {
|
||||
cols: vec!["command".to_string(), "index".to_string()],
|
||||
vals: vec![
|
||||
Value::String {
|
||||
val: entry.command_line,
|
||||
span: head,
|
||||
},
|
||||
Value::int(idx as i64, head),
|
||||
],
|
||||
span: head,
|
||||
})
|
||||
entries.into_iter().enumerate().map(move |(idx, entry)| {
|
||||
Value::record(
|
||||
record! {
|
||||
"command" => Value::string(entry.command_line, head),
|
||||
"index" => Value::int(idx as i64, head),
|
||||
},
|
||||
head,
|
||||
)
|
||||
})
|
||||
})
|
||||
.ok_or(ShellError::FileNotFound(head))?
|
||||
.ok_or(ShellError::FileNotFound {
|
||||
file: history_path.display().to_string(),
|
||||
span: head,
|
||||
})?
|
||||
.into_pipeline_data(ctrlc)),
|
||||
HistoryFileFormat::Sqlite => Ok(history_reader
|
||||
.and_then(|h| {
|
||||
@ -122,12 +123,15 @@ impl Command for History {
|
||||
create_history_record(idx, entry, long, head)
|
||||
})
|
||||
})
|
||||
.ok_or(ShellError::FileNotFound(head))?
|
||||
.ok_or(ShellError::FileNotFound {
|
||||
file: history_path.display().to_string(),
|
||||
span: head,
|
||||
})?
|
||||
.into_pipeline_data(ctrlc)),
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Err(ShellError::FileNotFound(head))
|
||||
Err(ShellError::ConfigDirNotFound { span: Some(head) })
|
||||
}
|
||||
}
|
||||
|
||||
@ -144,7 +148,7 @@ impl Command for History {
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
example: "history | wrap cmd | where cmd =~ cargo",
|
||||
example: "history | where command =~ cargo | get command",
|
||||
description: "Search all the commands from history that contains 'cargo'",
|
||||
result: None,
|
||||
},
|
||||
@ -156,8 +160,8 @@ fn create_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span)
|
||||
//1. Format all the values
|
||||
//2. Create a record of either short or long columns and values
|
||||
|
||||
let item_id_value = Value::Int {
|
||||
val: match entry.id {
|
||||
let item_id_value = Value::int(
|
||||
match entry.id {
|
||||
Some(id) => {
|
||||
let ids = id.to_string();
|
||||
match ids.parse::<i64>() {
|
||||
@ -167,21 +171,18 @@ fn create_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span)
|
||||
}
|
||||
None => 0i64,
|
||||
},
|
||||
span: head,
|
||||
};
|
||||
let start_timestamp_value = Value::String {
|
||||
val: match entry.start_timestamp {
|
||||
head,
|
||||
);
|
||||
let start_timestamp_value = Value::string(
|
||||
match entry.start_timestamp {
|
||||
Some(time) => time.to_string(),
|
||||
None => "".into(),
|
||||
},
|
||||
span: head,
|
||||
};
|
||||
let command_value = Value::String {
|
||||
val: entry.command_line,
|
||||
span: head,
|
||||
};
|
||||
let session_id_value = Value::Int {
|
||||
val: match entry.session_id {
|
||||
head,
|
||||
);
|
||||
let command_value = Value::string(entry.command_line, head);
|
||||
let session_id_value = Value::int(
|
||||
match entry.session_id {
|
||||
Some(sid) => {
|
||||
let sids = sid.to_string();
|
||||
match sids.parse::<i64>() {
|
||||
@ -191,74 +192,56 @@ fn create_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span)
|
||||
}
|
||||
None => 0i64,
|
||||
},
|
||||
span: head,
|
||||
};
|
||||
let hostname_value = Value::String {
|
||||
val: match entry.hostname {
|
||||
head,
|
||||
);
|
||||
let hostname_value = Value::string(
|
||||
match entry.hostname {
|
||||
Some(host) => host,
|
||||
None => "".into(),
|
||||
},
|
||||
span: head,
|
||||
};
|
||||
let cwd_value = Value::String {
|
||||
val: match entry.cwd {
|
||||
head,
|
||||
);
|
||||
let cwd_value = Value::string(
|
||||
match entry.cwd {
|
||||
Some(cwd) => cwd,
|
||||
None => "".into(),
|
||||
},
|
||||
span: head,
|
||||
};
|
||||
let duration_value = Value::Duration {
|
||||
val: match entry.duration {
|
||||
head,
|
||||
);
|
||||
let duration_value = Value::duration(
|
||||
match entry.duration {
|
||||
Some(d) => d.as_nanos().try_into().unwrap_or(0),
|
||||
None => 0,
|
||||
},
|
||||
span: head,
|
||||
};
|
||||
head,
|
||||
);
|
||||
let exit_status_value = Value::int(entry.exit_status.unwrap_or(0), head);
|
||||
let index_value = Value::int(idx as i64, head);
|
||||
if long {
|
||||
Value::Record {
|
||||
cols: vec![
|
||||
"item_id".into(),
|
||||
"start_timestamp".into(),
|
||||
"command".to_string(),
|
||||
"session_id".into(),
|
||||
"hostname".into(),
|
||||
"cwd".into(),
|
||||
"duration".into(),
|
||||
"exit_status".into(),
|
||||
"idx".to_string(),
|
||||
],
|
||||
vals: vec![
|
||||
item_id_value,
|
||||
start_timestamp_value,
|
||||
command_value,
|
||||
session_id_value,
|
||||
hostname_value,
|
||||
cwd_value,
|
||||
duration_value,
|
||||
exit_status_value,
|
||||
index_value,
|
||||
],
|
||||
span: head,
|
||||
}
|
||||
Value::record(
|
||||
record! {
|
||||
"item_id" => item_id_value,
|
||||
"start_timestamp" => start_timestamp_value,
|
||||
"command" => command_value,
|
||||
"session_id" => session_id_value,
|
||||
"hostname" => hostname_value,
|
||||
"cwd" => cwd_value,
|
||||
"duration" => duration_value,
|
||||
"exit_status" => exit_status_value,
|
||||
"idx" => index_value,
|
||||
},
|
||||
head,
|
||||
)
|
||||
} else {
|
||||
Value::Record {
|
||||
cols: vec![
|
||||
"start_timestamp".into(),
|
||||
"command".to_string(),
|
||||
"cwd".into(),
|
||||
"duration".into(),
|
||||
"exit_status".into(),
|
||||
],
|
||||
vals: vec![
|
||||
start_timestamp_value,
|
||||
command_value,
|
||||
cwd_value,
|
||||
duration_value,
|
||||
exit_status_value,
|
||||
],
|
||||
span: head,
|
||||
}
|
||||
Value::record(
|
||||
record! {
|
||||
"start_timestamp" => start_timestamp_value,
|
||||
"command" => command_value,
|
||||
"cwd" => cwd_value,
|
||||
"duration" => duration_value,
|
||||
"exit_status" => exit_status_value,
|
||||
},
|
||||
head,
|
||||
)
|
||||
}
|
||||
}
|
@ -18,7 +18,7 @@ impl Command for HistorySession {
|
||||
|
||||
fn signature(&self) -> nu_protocol::Signature {
|
||||
Signature::build("history session")
|
||||
.category(Category::Misc)
|
||||
.category(Category::History)
|
||||
.input_output_types(vec![(Type::Nothing, Type::Int)])
|
||||
}
|
||||
|
5
crates/nu-cli/src/commands/history/mod.rs
Normal file
5
crates/nu-cli/src/commands/history/mod.rs
Normal file
@ -0,0 +1,5 @@
|
||||
mod history_;
|
||||
mod history_session;
|
||||
|
||||
pub use history_::History;
|
||||
pub use history_session::HistorySession;
|
@ -24,7 +24,10 @@ impl Command for Keybindings {
|
||||
}
|
||||
|
||||
fn extra_usage(&self) -> &str {
|
||||
"You must use one of the following subcommands. Using this command as-is will only produce this help message."
|
||||
r#"You must use one of the following subcommands. Using this command as-is will only produce this help message.
|
||||
|
||||
For more information on input and keybindings, check:
|
||||
https://www.nushell.sh/book/line_editor.html"#
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
@ -38,16 +41,16 @@ impl Command for Keybindings {
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
Ok(Value::String {
|
||||
val: get_full_help(
|
||||
Ok(Value::string(
|
||||
get_full_help(
|
||||
&Keybindings.signature(),
|
||||
&Keybindings.examples(),
|
||||
engine_state,
|
||||
stack,
|
||||
self.is_parser_keyword(),
|
||||
),
|
||||
span: call.head,
|
||||
}
|
||||
call.head,
|
||||
)
|
||||
.into_pipeline_data())
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
use nu_protocol::{
|
||||
ast::Call,
|
||||
engine::{Command, EngineState, Stack},
|
||||
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Type, Value,
|
||||
record, Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Type, Value,
|
||||
};
|
||||
use reedline::get_reedline_default_keybindings;
|
||||
|
||||
@ -41,43 +41,18 @@ impl Command for KeybindingsDefault {
|
||||
let records = get_reedline_default_keybindings()
|
||||
.into_iter()
|
||||
.map(|(mode, modifier, code, event)| {
|
||||
let mode = Value::String {
|
||||
val: mode,
|
||||
span: call.head,
|
||||
};
|
||||
|
||||
let modifier = Value::String {
|
||||
val: modifier,
|
||||
span: call.head,
|
||||
};
|
||||
|
||||
let code = Value::String {
|
||||
val: code,
|
||||
span: call.head,
|
||||
};
|
||||
|
||||
let event = Value::String {
|
||||
val: event,
|
||||
span: call.head,
|
||||
};
|
||||
|
||||
Value::Record {
|
||||
cols: vec![
|
||||
"mode".to_string(),
|
||||
"modifier".to_string(),
|
||||
"code".to_string(),
|
||||
"event".to_string(),
|
||||
],
|
||||
vals: vec![mode, modifier, code, event],
|
||||
span: call.head,
|
||||
}
|
||||
Value::record(
|
||||
record! {
|
||||
"mode" => Value::string(mode, call.head),
|
||||
"modifier" => Value::string(modifier, call.head),
|
||||
"code" => Value::string(code, call.head),
|
||||
"event" => Value::string(event, call.head),
|
||||
},
|
||||
call.head,
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(Value::List {
|
||||
vals: records,
|
||||
span: call.head,
|
||||
}
|
||||
.into_pipeline_data())
|
||||
Ok(Value::list(records, call.head).into_pipeline_data())
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,8 @@
|
||||
use nu_protocol::{
|
||||
ast::Call,
|
||||
engine::{Command, EngineState, Stack},
|
||||
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, Type, Value,
|
||||
record, Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, Type,
|
||||
Value,
|
||||
};
|
||||
use reedline::{
|
||||
get_reedline_edit_commands, get_reedline_keybinding_modifiers, get_reedline_keycodes,
|
||||
@ -35,7 +36,7 @@ impl Command for KeybindingsList {
|
||||
vec![
|
||||
Example {
|
||||
description: "Get list of key modifiers",
|
||||
example: "keybindings list -m",
|
||||
example: "keybindings list --modifiers",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
@ -62,23 +63,19 @@ impl Command for KeybindingsList {
|
||||
let all_options = ["modifiers", "keycodes", "edits", "modes", "events"];
|
||||
all_options
|
||||
.iter()
|
||||
.flat_map(|argument| get_records(argument, &call.head))
|
||||
.flat_map(|argument| get_records(argument, call.head))
|
||||
.collect()
|
||||
} else {
|
||||
call.named_iter()
|
||||
.flat_map(|(argument, _, _)| get_records(argument.item.as_str(), &call.head))
|
||||
.flat_map(|(argument, _, _)| get_records(argument.item.as_str(), call.head))
|
||||
.collect()
|
||||
};
|
||||
|
||||
Ok(Value::List {
|
||||
vals: records,
|
||||
span: call.head,
|
||||
}
|
||||
.into_pipeline_data())
|
||||
Ok(Value::list(records, call.head).into_pipeline_data())
|
||||
}
|
||||
}
|
||||
|
||||
fn get_records(entry_type: &str, span: &Span) -> Vec<Value> {
|
||||
fn get_records(entry_type: &str, span: Span) -> Vec<Value> {
|
||||
let values = match entry_type {
|
||||
"modifiers" => get_reedline_keybinding_modifiers().sorted(),
|
||||
"keycodes" => get_reedline_keycodes().sorted(),
|
||||
@ -95,16 +92,14 @@ fn get_records(entry_type: &str, span: &Span) -> Vec<Value> {
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn convert_to_record(edit: &str, entry_type: &str, span: &Span) -> Value {
|
||||
let entry_type = Value::string(entry_type, *span);
|
||||
|
||||
let name = Value::string(edit, *span);
|
||||
|
||||
Value::Record {
|
||||
cols: vec!["type".to_string(), "name".to_string()],
|
||||
vals: vec![entry_type, name],
|
||||
span: *span,
|
||||
}
|
||||
fn convert_to_record(edit: &str, entry_type: &str, span: Span) -> Value {
|
||||
Value::record(
|
||||
record! {
|
||||
"type" => Value::string(entry_type, span),
|
||||
"name" => Value::string(edit, span),
|
||||
},
|
||||
span,
|
||||
)
|
||||
}
|
||||
|
||||
// Helper to sort a vec and return a vec
|
||||
|
@ -1,9 +1,11 @@
|
||||
use crossterm::execute;
|
||||
use crossterm::QueueableCommand;
|
||||
use crossterm::{event::Event, event::KeyCode, event::KeyEvent, terminal};
|
||||
use nu_protocol::ast::Call;
|
||||
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||
use nu_protocol::{
|
||||
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, Type, Value,
|
||||
record, Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, Type,
|
||||
Value,
|
||||
};
|
||||
use std::io::{stdout, Write};
|
||||
|
||||
@ -19,6 +21,10 @@ impl Command for KeybindingsListen {
|
||||
"Get input from the user."
|
||||
}
|
||||
|
||||
fn extra_usage(&self) -> &str {
|
||||
"This is an internal debugging tool. For better output, try `input listen --types [key]`"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(self.name())
|
||||
.category(Category::Platform)
|
||||
@ -39,13 +45,13 @@ impl Command for KeybindingsListen {
|
||||
Ok(v) => Ok(v.into_pipeline_data()),
|
||||
Err(e) => {
|
||||
terminal::disable_raw_mode()?;
|
||||
Err(ShellError::GenericError(
|
||||
"Error with input".to_string(),
|
||||
"".to_string(),
|
||||
None,
|
||||
Some(e.to_string()),
|
||||
Vec::new(),
|
||||
))
|
||||
Err(ShellError::GenericError {
|
||||
error: "Error with input".into(),
|
||||
msg: "".into(),
|
||||
span: None,
|
||||
help: Some(e.to_string()),
|
||||
inner: vec![],
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -64,6 +70,32 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
|
||||
|
||||
stdout().flush()?;
|
||||
terminal::enable_raw_mode()?;
|
||||
|
||||
if config.use_kitty_protocol {
|
||||
if let Ok(false) = crossterm::terminal::supports_keyboard_enhancement() {
|
||||
println!("WARN: The terminal doesn't support use_kitty_protocol config.\r");
|
||||
}
|
||||
|
||||
// enable kitty protocol
|
||||
//
|
||||
// Note that, currently, only the following support this protocol:
|
||||
// * [kitty terminal](https://sw.kovidgoyal.net/kitty/)
|
||||
// * [foot terminal](https://codeberg.org/dnkl/foot/issues/319)
|
||||
// * [WezTerm terminal](https://wezfurlong.org/wezterm/config/lua/config/enable_kitty_keyboard.html)
|
||||
// * [notcurses library](https://github.com/dankamongmen/notcurses/issues/2131)
|
||||
// * [neovim text editor](https://github.com/neovim/neovim/pull/18181)
|
||||
// * [kakoune text editor](https://github.com/mawww/kakoune/issues/4103)
|
||||
// * [dte text editor](https://gitlab.com/craigbarnes/dte/-/issues/138)
|
||||
//
|
||||
// Refer to https://sw.kovidgoyal.net/kitty/keyboard-protocol/ if you're curious.
|
||||
let _ = execute!(
|
||||
stdout(),
|
||||
crossterm::event::PushKeyboardEnhancementFlags(
|
||||
crossterm::event::KeyboardEnhancementFlags::DISAMBIGUATE_ESCAPE_CODES
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
let mut stdout = std::io::BufWriter::new(std::io::stderr());
|
||||
|
||||
loop {
|
||||
@ -78,10 +110,9 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
|
||||
let v = print_events_helper(event)?;
|
||||
// Print out the record
|
||||
let o = match v {
|
||||
Value::Record { cols, vals, .. } => cols
|
||||
Value::Record { val, .. } => val
|
||||
.iter()
|
||||
.zip(vals.iter())
|
||||
.map(|(x, y)| format!("{}: {}", x, y.into_string("", config)))
|
||||
.map(|(x, y)| format!("{}: {}", x, y.to_expanded_string("", config)))
|
||||
.collect::<Vec<String>>()
|
||||
.join(", "),
|
||||
|
||||
@ -91,6 +122,14 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
|
||||
stdout.queue(crossterm::style::Print("\r\n"))?;
|
||||
stdout.flush()?;
|
||||
}
|
||||
|
||||
if config.use_kitty_protocol {
|
||||
let _ = execute!(
|
||||
std::io::stdout(),
|
||||
crossterm::event::PopKeyboardEnhancementFlags
|
||||
);
|
||||
}
|
||||
|
||||
terminal::disable_raw_mode()?;
|
||||
|
||||
Ok(Value::nothing(Span::unknown()))
|
||||
@ -111,54 +150,29 @@ fn print_events_helper(event: Event) -> Result<Value, ShellError> {
|
||||
{
|
||||
match code {
|
||||
KeyCode::Char(c) => {
|
||||
let record = Value::Record {
|
||||
cols: vec![
|
||||
"char".into(),
|
||||
"code".into(),
|
||||
"modifier".into(),
|
||||
"flags".into(),
|
||||
"kind".into(),
|
||||
"state".into(),
|
||||
],
|
||||
vals: vec![
|
||||
Value::string(format!("{c}"), Span::unknown()),
|
||||
Value::string(format!("{:#08x}", u32::from(c)), Span::unknown()),
|
||||
Value::string(format!("{modifiers:?}"), Span::unknown()),
|
||||
Value::string(format!("{modifiers:#08b}"), Span::unknown()),
|
||||
Value::string(format!("{kind:?}"), Span::unknown()),
|
||||
Value::string(format!("{state:?}"), Span::unknown()),
|
||||
],
|
||||
span: Span::unknown(),
|
||||
let record = record! {
|
||||
"char" => Value::string(format!("{c}"), Span::unknown()),
|
||||
"code" => Value::string(format!("{:#08x}", u32::from(c)), Span::unknown()),
|
||||
"modifier" => Value::string(format!("{modifiers:?}"), Span::unknown()),
|
||||
"flags" => Value::string(format!("{modifiers:#08b}"), Span::unknown()),
|
||||
"kind" => Value::string(format!("{kind:?}"), Span::unknown()),
|
||||
"state" => Value::string(format!("{state:?}"), Span::unknown()),
|
||||
};
|
||||
Ok(record)
|
||||
Ok(Value::record(record, Span::unknown()))
|
||||
}
|
||||
_ => {
|
||||
let record = Value::Record {
|
||||
cols: vec![
|
||||
"code".into(),
|
||||
"modifier".into(),
|
||||
"flags".into(),
|
||||
"kind".into(),
|
||||
"state".into(),
|
||||
],
|
||||
vals: vec![
|
||||
Value::string(format!("{code:?}"), Span::unknown()),
|
||||
Value::string(format!("{modifiers:?}"), Span::unknown()),
|
||||
Value::string(format!("{modifiers:#08b}"), Span::unknown()),
|
||||
Value::string(format!("{kind:?}"), Span::unknown()),
|
||||
Value::string(format!("{state:?}"), Span::unknown()),
|
||||
],
|
||||
span: Span::unknown(),
|
||||
let record = record! {
|
||||
"code" => Value::string(format!("{code:?}"), Span::unknown()),
|
||||
"modifier" => Value::string(format!("{modifiers:?}"), Span::unknown()),
|
||||
"flags" => Value::string(format!("{modifiers:#08b}"), Span::unknown()),
|
||||
"kind" => Value::string(format!("{kind:?}"), Span::unknown()),
|
||||
"state" => Value::string(format!("{state:?}"), Span::unknown()),
|
||||
};
|
||||
Ok(record)
|
||||
Ok(Value::record(record, Span::unknown()))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let record = Value::Record {
|
||||
cols: vec!["event".into()],
|
||||
vals: vec![Value::string(format!("{event:?}"), Span::unknown())],
|
||||
span: Span::unknown(),
|
||||
};
|
||||
Ok(record)
|
||||
let record = record! { "event" => Value::string(format!("{event:?}"), Span::unknown()) };
|
||||
Ok(Value::record(record, Span::unknown()))
|
||||
}
|
||||
}
|
||||
|
@ -1,15 +1,13 @@
|
||||
mod commandline;
|
||||
mod default_context;
|
||||
mod history;
|
||||
mod history_session;
|
||||
mod keybindings;
|
||||
mod keybindings_default;
|
||||
mod keybindings_list;
|
||||
mod keybindings_listen;
|
||||
|
||||
pub use commandline::Commandline;
|
||||
pub use history::History;
|
||||
pub use history_session::HistorySession;
|
||||
pub use commandline::{Commandline, CommandlineEdit, CommandlineGetCursor, CommandlineSetCursor};
|
||||
pub use history::{History, HistorySession};
|
||||
pub use keybindings::Keybindings;
|
||||
pub use keybindings_default::KeybindingsDefault;
|
||||
pub use keybindings_list::KeybindingsList;
|
||||
|
@ -43,9 +43,9 @@ impl CommandCompletion {
|
||||
if let Some(paths) = paths {
|
||||
if let Ok(paths) = paths.as_list() {
|
||||
for path in paths {
|
||||
let path = path.as_string().unwrap_or_default();
|
||||
let path = path.coerce_str().unwrap_or_default();
|
||||
|
||||
if let Ok(mut contents) = std::fs::read_dir(path) {
|
||||
if let Ok(mut contents) = std::fs::read_dir(path.as_ref()) {
|
||||
while let Some(Ok(item)) = contents.next() {
|
||||
if self.engine_state.config.max_external_completion_results
|
||||
> executables.len() as i64
|
||||
@ -94,6 +94,7 @@ impl CommandCompletion {
|
||||
.map(move |x| Suggestion {
|
||||
value: String::from_utf8_lossy(&x.0).to_string(),
|
||||
description: x.1,
|
||||
style: None,
|
||||
extra: None,
|
||||
span: reedline::Span::new(span.start - offset, span.end - offset),
|
||||
append_whitespace: true,
|
||||
@ -110,6 +111,7 @@ impl CommandCompletion {
|
||||
.map(move |x| Suggestion {
|
||||
value: x,
|
||||
description: None,
|
||||
style: None,
|
||||
extra: None,
|
||||
span: reedline::Span::new(span.start - offset, span.end - offset),
|
||||
append_whitespace: true,
|
||||
@ -123,6 +125,7 @@ impl CommandCompletion {
|
||||
results.push(Suggestion {
|
||||
value: format!("^{}", external.value),
|
||||
description: None,
|
||||
style: None,
|
||||
extra: None,
|
||||
span: external.span,
|
||||
append_whitespace: true,
|
||||
@ -234,7 +237,7 @@ pub fn is_passthrough_command(working_set_file_contents: &[(Vec<u8>, usize, usiz
|
||||
let cur_pos = find_non_whitespace_index(contents, last_pipe_pos);
|
||||
|
||||
let result = match contents.get(cur_pos..) {
|
||||
Some(contents) => contents.starts_with(b"sudo "),
|
||||
Some(contents) => contents.starts_with(b"sudo ") || contents.starts_with(b"doas "),
|
||||
None => false,
|
||||
};
|
||||
if result {
|
||||
@ -250,7 +253,7 @@ mod command_completions_tests {
|
||||
|
||||
#[test]
|
||||
fn test_find_non_whitespace_index() {
|
||||
let commands = vec![
|
||||
let commands = [
|
||||
(" hello", 4),
|
||||
("sudo ", 0),
|
||||
(" sudo ", 2),
|
||||
@ -263,14 +266,14 @@ mod command_completions_tests {
|
||||
(" hello sud", 1),
|
||||
];
|
||||
for (idx, ele) in commands.iter().enumerate() {
|
||||
let index = find_non_whitespace_index(&Vec::from(ele.0.as_bytes()), 0);
|
||||
let index = find_non_whitespace_index(ele.0.as_bytes(), 0);
|
||||
assert_eq!(index, ele.1, "Failed on index {}", idx);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_last_command_passthrough() {
|
||||
let commands = vec![
|
||||
let commands = [
|
||||
(" hello", false),
|
||||
(" sudo ", true),
|
||||
("sudo ", true),
|
||||
|
@ -1,7 +1,8 @@
|
||||
use crate::completions::{
|
||||
CommandCompletion, Completer, CompletionOptions, CustomCompletion, DirectoryCompletion,
|
||||
DotNuCompletion, FileCompletion, FlagCompletion, MatchAlgorithm, VariableCompletion,
|
||||
DotNuCompletion, FileCompletion, FlagCompletion, VariableCompletion,
|
||||
};
|
||||
use nu_color_config::{color_record_to_nustyle, lookup_ansi_color_style};
|
||||
use nu_engine::eval_block;
|
||||
use nu_parser::{flatten_expression, parse, FlatShape};
|
||||
use nu_protocol::{
|
||||
@ -39,15 +40,12 @@ impl NuCompleter {
|
||||
) -> Vec<Suggestion> {
|
||||
let config = self.engine_state.get_config();
|
||||
|
||||
let mut options = CompletionOptions {
|
||||
let options = CompletionOptions {
|
||||
case_sensitive: config.case_sensitive_completions,
|
||||
match_algorithm: config.completion_algorithm.into(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
if config.completion_algorithm == "fuzzy" {
|
||||
options.match_algorithm = MatchAlgorithm::Fuzzy;
|
||||
}
|
||||
|
||||
// Fetch
|
||||
let mut suggestions =
|
||||
completer.fetch(working_set, prefix.clone(), new_span, offset, pos, &options);
|
||||
@ -67,20 +65,20 @@ impl NuCompleter {
|
||||
) -> Option<Vec<Suggestion>> {
|
||||
let stack = self.stack.clone();
|
||||
let block = self.engine_state.get_block(block_id);
|
||||
let mut callee_stack = stack.gather_captures(&block.captures);
|
||||
let mut callee_stack = stack.gather_captures(&self.engine_state, &block.captures);
|
||||
|
||||
// Line
|
||||
if let Some(pos_arg) = block.signature.required_positional.get(0) {
|
||||
if let Some(pos_arg) = block.signature.required_positional.first() {
|
||||
if let Some(var_id) = pos_arg.var_id {
|
||||
callee_stack.add_var(
|
||||
var_id,
|
||||
Value::List {
|
||||
vals: spans
|
||||
Value::list(
|
||||
spans
|
||||
.iter()
|
||||
.map(|it| Value::string(it, Span::unknown()))
|
||||
.collect(),
|
||||
span: Span::unknown(),
|
||||
},
|
||||
Span::unknown(),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -97,7 +95,7 @@ impl NuCompleter {
|
||||
match result {
|
||||
Ok(pd) => {
|
||||
let value = pd.into_value(span);
|
||||
if let Value::List { vals, span: _ } = value {
|
||||
if let Value::List { vals, .. } = value {
|
||||
let result =
|
||||
map_value_completions(vals.iter(), Span::new(span.start, span.end), offset);
|
||||
|
||||
@ -113,10 +111,16 @@ impl NuCompleter {
|
||||
fn completion_helper(&mut self, line: &str, pos: usize) -> Vec<Suggestion> {
|
||||
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
||||
let offset = working_set.next_span_start();
|
||||
// TODO: Callers should be trimming the line themselves
|
||||
let line = if line.len() > pos { &line[..pos] } else { line };
|
||||
// Adjust offset so that the spans of the suggestions will start at the right
|
||||
// place even with `only_buffer_difference: true`
|
||||
let fake_offset = offset + line.len() - pos;
|
||||
let pos = offset + line.len();
|
||||
let initial_line = line.to_string();
|
||||
let mut line = line.to_string();
|
||||
line.insert(pos, 'a');
|
||||
let pos = offset + pos;
|
||||
line.push('a');
|
||||
|
||||
let config = self.engine_state.get_config();
|
||||
|
||||
let output = parse(&mut working_set, Some("completer"), line.as_bytes(), false);
|
||||
@ -125,35 +129,47 @@ impl NuCompleter {
|
||||
for pipeline_element in pipeline.elements {
|
||||
match pipeline_element {
|
||||
PipelineElement::Expression(_, expr)
|
||||
| PipelineElement::Redirection(_, _, expr)
|
||||
| PipelineElement::ErrPipedExpression(_, expr)
|
||||
| PipelineElement::OutErrPipedExpression(_, expr)
|
||||
| PipelineElement::Redirection(_, _, expr, _)
|
||||
| PipelineElement::And(_, expr)
|
||||
| PipelineElement::Or(_, expr)
|
||||
| PipelineElement::SameTargetRedirection { cmd: (_, expr), .. }
|
||||
| PipelineElement::SeparateRedirection { out: (_, expr), .. } => {
|
||||
| PipelineElement::SeparateRedirection {
|
||||
out: (_, expr, _), ..
|
||||
} => {
|
||||
let flattened: Vec<_> = flatten_expression(&working_set, &expr);
|
||||
let mut spans: Vec<String> = vec![];
|
||||
|
||||
for (flat_idx, flat) in flattened.iter().enumerate() {
|
||||
let is_passthrough_command = spans
|
||||
.first()
|
||||
.filter(|content| *content == &String::from("sudo"))
|
||||
.filter(|content| {
|
||||
content.as_str() == "sudo" || content.as_str() == "doas"
|
||||
})
|
||||
.is_some();
|
||||
// Read the current spam to string
|
||||
let current_span = working_set.get_span_contents(flat.0).to_vec();
|
||||
let current_span_str = String::from_utf8_lossy(¤t_span);
|
||||
|
||||
let is_last_span = pos >= flat.0.start && pos < flat.0.end;
|
||||
|
||||
// Skip the last 'a' as span item
|
||||
if flat_idx == flattened.len() - 1 {
|
||||
let mut chars = current_span_str.chars();
|
||||
chars.next_back();
|
||||
let current_span_str = chars.as_str().to_owned();
|
||||
spans.push(current_span_str.to_string());
|
||||
if is_last_span {
|
||||
let offset = pos - flat.0.start;
|
||||
if offset == 0 {
|
||||
spans.push(String::new())
|
||||
} else {
|
||||
let mut current_span_str = current_span_str.to_string();
|
||||
current_span_str.remove(offset);
|
||||
spans.push(current_span_str);
|
||||
}
|
||||
} else {
|
||||
spans.push(current_span_str.to_string());
|
||||
}
|
||||
|
||||
// Complete based on the last span
|
||||
if pos >= flat.0.start && pos < flat.0.end {
|
||||
if is_last_span {
|
||||
// Context variables
|
||||
let most_left_var =
|
||||
most_left_variable(flat_idx, &working_set, flattened.clone());
|
||||
@ -179,7 +195,7 @@ impl NuCompleter {
|
||||
&working_set,
|
||||
prefix,
|
||||
new_span,
|
||||
offset,
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
}
|
||||
@ -193,7 +209,7 @@ impl NuCompleter {
|
||||
&working_set,
|
||||
prefix.clone(),
|
||||
new_span,
|
||||
offset,
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
|
||||
@ -204,9 +220,12 @@ impl NuCompleter {
|
||||
// We got no results for internal completion
|
||||
// now we can check if external completer is set and use it
|
||||
if let Some(block_id) = config.external_completer {
|
||||
if let Some(external_result) = self
|
||||
.external_completion(block_id, &spans, offset, new_span)
|
||||
{
|
||||
if let Some(external_result) = self.external_completion(
|
||||
block_id,
|
||||
&spans,
|
||||
fake_offset,
|
||||
new_span,
|
||||
) {
|
||||
return external_result;
|
||||
}
|
||||
}
|
||||
@ -230,7 +249,7 @@ impl NuCompleter {
|
||||
&working_set,
|
||||
prefix,
|
||||
new_span,
|
||||
offset,
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
}
|
||||
@ -243,29 +262,35 @@ impl NuCompleter {
|
||||
working_set.get_span_contents(previous_expr.0).to_vec();
|
||||
|
||||
// Completion for .nu files
|
||||
if prev_expr_str == b"use" || prev_expr_str == b"source-env"
|
||||
if prev_expr_str == b"use"
|
||||
|| prev_expr_str == b"overlay use"
|
||||
|| prev_expr_str == b"source-env"
|
||||
{
|
||||
let mut completer =
|
||||
DotNuCompletion::new(self.engine_state.clone());
|
||||
let mut completer = DotNuCompletion::new(
|
||||
self.engine_state.clone(),
|
||||
self.stack.clone(),
|
||||
);
|
||||
|
||||
return self.process_completion(
|
||||
&mut completer,
|
||||
&working_set,
|
||||
prefix,
|
||||
new_span,
|
||||
offset,
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
} else if prev_expr_str == b"ls" {
|
||||
let mut completer =
|
||||
FileCompletion::new(self.engine_state.clone());
|
||||
let mut completer = FileCompletion::new(
|
||||
self.engine_state.clone(),
|
||||
self.stack.clone(),
|
||||
);
|
||||
|
||||
return self.process_completion(
|
||||
&mut completer,
|
||||
&working_set,
|
||||
prefix,
|
||||
new_span,
|
||||
offset,
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
}
|
||||
@ -287,33 +312,37 @@ impl NuCompleter {
|
||||
&working_set,
|
||||
prefix,
|
||||
new_span,
|
||||
offset,
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
}
|
||||
FlatShape::Directory => {
|
||||
let mut completer =
|
||||
DirectoryCompletion::new(self.engine_state.clone());
|
||||
let mut completer = DirectoryCompletion::new(
|
||||
self.engine_state.clone(),
|
||||
self.stack.clone(),
|
||||
);
|
||||
|
||||
return self.process_completion(
|
||||
&mut completer,
|
||||
&working_set,
|
||||
prefix,
|
||||
new_span,
|
||||
offset,
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
}
|
||||
FlatShape::Filepath | FlatShape::GlobPattern => {
|
||||
let mut completer =
|
||||
FileCompletion::new(self.engine_state.clone());
|
||||
let mut completer = FileCompletion::new(
|
||||
self.engine_state.clone(),
|
||||
self.stack.clone(),
|
||||
);
|
||||
|
||||
return self.process_completion(
|
||||
&mut completer,
|
||||
&working_set,
|
||||
prefix,
|
||||
new_span,
|
||||
offset,
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
}
|
||||
@ -332,7 +361,7 @@ impl NuCompleter {
|
||||
&working_set,
|
||||
prefix.clone(),
|
||||
new_span,
|
||||
offset,
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
|
||||
@ -343,21 +372,26 @@ impl NuCompleter {
|
||||
// Try to complete using an external completer (if set)
|
||||
if let Some(block_id) = config.external_completer {
|
||||
if let Some(external_result) = self.external_completion(
|
||||
block_id, &spans, offset, new_span,
|
||||
block_id,
|
||||
&spans,
|
||||
fake_offset,
|
||||
new_span,
|
||||
) {
|
||||
return external_result;
|
||||
}
|
||||
}
|
||||
|
||||
// Check for file completion
|
||||
let mut completer =
|
||||
FileCompletion::new(self.engine_state.clone());
|
||||
let mut completer = FileCompletion::new(
|
||||
self.engine_state.clone(),
|
||||
self.stack.clone(),
|
||||
);
|
||||
out = self.process_completion(
|
||||
&mut completer,
|
||||
&working_set,
|
||||
prefix,
|
||||
new_span,
|
||||
offset,
|
||||
fake_offset,
|
||||
pos,
|
||||
);
|
||||
|
||||
@ -440,10 +474,11 @@ pub fn map_value_completions<'a>(
|
||||
) -> Vec<Suggestion> {
|
||||
list.filter_map(move |x| {
|
||||
// Match for string values
|
||||
if let Ok(s) = x.as_string() {
|
||||
if let Ok(s) = x.coerce_string() {
|
||||
return Some(Suggestion {
|
||||
value: s,
|
||||
description: None,
|
||||
style: None,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
@ -454,10 +489,11 @@ pub fn map_value_completions<'a>(
|
||||
}
|
||||
|
||||
// Match for record values
|
||||
if let Ok((cols, vals)) = x.as_record() {
|
||||
if let Ok(record) = x.as_record() {
|
||||
let mut suggestion = Suggestion {
|
||||
value: String::from(""), // Initialize with empty string
|
||||
description: None,
|
||||
style: None,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
@ -467,11 +503,11 @@ pub fn map_value_completions<'a>(
|
||||
};
|
||||
|
||||
// Iterate the cols looking for `value` and `description`
|
||||
cols.iter().zip(vals).for_each(|it| {
|
||||
record.iter().for_each(|it| {
|
||||
// Match `value` column
|
||||
if it.0 == "value" {
|
||||
// Convert the value to string
|
||||
if let Ok(val_str) = it.1.as_string() {
|
||||
if let Ok(val_str) = it.1.coerce_string() {
|
||||
// Update the suggestion value
|
||||
suggestion.value = val_str;
|
||||
}
|
||||
@ -480,11 +516,21 @@ pub fn map_value_completions<'a>(
|
||||
// Match `description` column
|
||||
if it.0 == "description" {
|
||||
// Convert the value to string
|
||||
if let Ok(desc_str) = it.1.as_string() {
|
||||
if let Ok(desc_str) = it.1.coerce_string() {
|
||||
// Update the suggestion value
|
||||
suggestion.description = Some(desc_str);
|
||||
}
|
||||
}
|
||||
|
||||
// Match `style` column
|
||||
if it.0 == "style" {
|
||||
// Convert the value to string
|
||||
suggestion.style = match it.1 {
|
||||
Value::String { val, .. } => Some(lookup_ansi_color_style(val)),
|
||||
Value::Record { .. } => Some(color_record_to_nustyle(it.1)),
|
||||
_ => None,
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
return Some(suggestion);
|
||||
@ -518,7 +564,7 @@ mod completer_tests {
|
||||
);
|
||||
|
||||
let mut completer = NuCompleter::new(engine_state.into(), Stack::new());
|
||||
let dataset = vec![
|
||||
let dataset = [
|
||||
("sudo", false, "", Vec::new()),
|
||||
("sudo l", true, "l", vec!["ls", "let", "lines", "loop"]),
|
||||
(" sudo", false, "", Vec::new()),
|
||||
|
246
crates/nu-cli/src/completions/completion_common.rs
Normal file
246
crates/nu-cli/src/completions/completion_common.rs
Normal file
@ -0,0 +1,246 @@
|
||||
use crate::completions::{matches, CompletionOptions};
|
||||
use nu_ansi_term::Style;
|
||||
use nu_engine::env_to_string;
|
||||
use nu_path::home_dir;
|
||||
use nu_protocol::engine::{EngineState, Stack};
|
||||
use nu_protocol::{engine::StateWorkingSet, Span};
|
||||
use nu_utils::get_ls_colors;
|
||||
use std::ffi::OsStr;
|
||||
use std::path::{is_separator, Component, Path, PathBuf, MAIN_SEPARATOR as SEP};
|
||||
|
||||
fn complete_rec(
|
||||
partial: &[String],
|
||||
cwd: &Path,
|
||||
options: &CompletionOptions,
|
||||
dir: bool,
|
||||
isdir: bool,
|
||||
) -> Vec<PathBuf> {
|
||||
let mut completions = vec![];
|
||||
|
||||
if let Ok(result) = cwd.read_dir() {
|
||||
for entry in result.filter_map(|e| e.ok()) {
|
||||
let entry_name = entry.file_name().to_string_lossy().into_owned();
|
||||
let path = entry.path();
|
||||
|
||||
if !dir || path.is_dir() {
|
||||
match partial.first() {
|
||||
Some(base) if matches(base, &entry_name, options) => {
|
||||
let partial = &partial[1..];
|
||||
if !partial.is_empty() || isdir {
|
||||
completions.extend(complete_rec(partial, &path, options, dir, isdir));
|
||||
if entry_name.eq(base) {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
completions.push(path)
|
||||
}
|
||||
}
|
||||
None => completions.push(path),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
completions
|
||||
}
|
||||
|
||||
enum OriginalCwd {
|
||||
None,
|
||||
Home(PathBuf),
|
||||
Some(PathBuf),
|
||||
// referencing a single local file
|
||||
Local(PathBuf),
|
||||
}
|
||||
|
||||
impl OriginalCwd {
|
||||
fn apply(&self, p: &Path) -> String {
|
||||
let mut ret = match self {
|
||||
Self::None => p.to_string_lossy().into_owned(),
|
||||
Self::Some(base) => pathdiff::diff_paths(p, base)
|
||||
.unwrap_or(p.to_path_buf())
|
||||
.to_string_lossy()
|
||||
.into_owned(),
|
||||
Self::Home(home) => match p.strip_prefix(home) {
|
||||
Ok(suffix) => format!("~{}{}", SEP, suffix.to_string_lossy()),
|
||||
_ => p.to_string_lossy().into_owned(),
|
||||
},
|
||||
Self::Local(base) => Path::new(".")
|
||||
.join(pathdiff::diff_paths(p, base).unwrap_or(p.to_path_buf()))
|
||||
.to_string_lossy()
|
||||
.into_owned(),
|
||||
};
|
||||
|
||||
if p.is_dir() {
|
||||
ret.push(SEP);
|
||||
}
|
||||
ret
|
||||
}
|
||||
}
|
||||
|
||||
fn surround_remove(partial: &str) -> String {
|
||||
for c in ['`', '"', '\''] {
|
||||
if partial.starts_with(c) {
|
||||
let ret = partial.strip_prefix(c).unwrap_or(partial);
|
||||
return match ret.split(c).collect::<Vec<_>>()[..] {
|
||||
[inside] => inside.to_string(),
|
||||
[inside, outside] if inside.ends_with(is_separator) => format!("{inside}{outside}"),
|
||||
_ => ret.to_string(),
|
||||
};
|
||||
}
|
||||
}
|
||||
partial.to_string()
|
||||
}
|
||||
|
||||
pub fn complete_item(
|
||||
want_directory: bool,
|
||||
span: nu_protocol::Span,
|
||||
partial: &str,
|
||||
cwd: &str,
|
||||
options: &CompletionOptions,
|
||||
engine_state: &EngineState,
|
||||
stack: &Stack,
|
||||
) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
|
||||
let partial = surround_remove(partial);
|
||||
let isdir = partial.ends_with(is_separator);
|
||||
let cwd_pathbuf = Path::new(cwd).to_path_buf();
|
||||
let ls_colors = (engine_state.config.use_ls_colors_completions
|
||||
&& engine_state.config.use_ansi_coloring)
|
||||
.then(|| {
|
||||
let ls_colors_env_str = match stack.get_env_var(engine_state, "LS_COLORS") {
|
||||
Some(v) => env_to_string("LS_COLORS", &v, engine_state, stack).ok(),
|
||||
None => None,
|
||||
};
|
||||
get_ls_colors(ls_colors_env_str)
|
||||
});
|
||||
let mut original_cwd = OriginalCwd::None;
|
||||
let mut components_vec: Vec<Component> = Path::new(&partial).components().collect();
|
||||
|
||||
// Path components that end with a single "." get normalized away,
|
||||
// so if the partial path ends in a literal "." we must add it back in manually
|
||||
if partial.ends_with('.') && partial.len() > 1 {
|
||||
components_vec.push(Component::Normal(OsStr::new(".")));
|
||||
};
|
||||
let mut components = components_vec.into_iter().peekable();
|
||||
|
||||
let mut cwd = match components.peek().cloned() {
|
||||
Some(c @ Component::Prefix(..)) => {
|
||||
// windows only by definition
|
||||
components.next();
|
||||
if let Some(Component::RootDir) = components.peek().cloned() {
|
||||
components.next();
|
||||
};
|
||||
[c, Component::RootDir].iter().collect()
|
||||
}
|
||||
Some(c @ Component::RootDir) => {
|
||||
components.next();
|
||||
PathBuf::from(c.as_os_str())
|
||||
}
|
||||
Some(Component::Normal(home)) if home.to_string_lossy() == "~" => {
|
||||
components.next();
|
||||
original_cwd = OriginalCwd::Home(home_dir().unwrap_or(cwd_pathbuf.clone()));
|
||||
home_dir().unwrap_or(cwd_pathbuf)
|
||||
}
|
||||
Some(Component::CurDir) => {
|
||||
components.next();
|
||||
original_cwd = match components.peek().cloned() {
|
||||
Some(Component::Normal(_)) | None => OriginalCwd::Local(cwd_pathbuf.clone()),
|
||||
_ => OriginalCwd::Some(cwd_pathbuf.clone()),
|
||||
};
|
||||
cwd_pathbuf
|
||||
}
|
||||
_ => {
|
||||
original_cwd = OriginalCwd::Some(cwd_pathbuf.clone());
|
||||
cwd_pathbuf
|
||||
}
|
||||
};
|
||||
|
||||
let mut partial = vec![];
|
||||
|
||||
for component in components {
|
||||
match component {
|
||||
Component::Prefix(..) => unreachable!(),
|
||||
Component::RootDir => unreachable!(),
|
||||
Component::CurDir => {}
|
||||
Component::ParentDir => {
|
||||
if partial.pop().is_none() {
|
||||
cwd.pop();
|
||||
}
|
||||
}
|
||||
Component::Normal(c) => partial.push(c.to_string_lossy().into_owned()),
|
||||
}
|
||||
}
|
||||
|
||||
complete_rec(partial.as_slice(), &cwd, options, want_directory, isdir)
|
||||
.into_iter()
|
||||
.map(|p| {
|
||||
let path = original_cwd.apply(&p);
|
||||
let style = ls_colors.as_ref().map(|lsc| {
|
||||
lsc.style_for_path_with_metadata(
|
||||
&path,
|
||||
std::fs::symlink_metadata(&path).ok().as_ref(),
|
||||
)
|
||||
.map(lscolors::Style::to_nu_ansi_term_style)
|
||||
.unwrap_or_default()
|
||||
});
|
||||
(span, escape_path(path, want_directory), style)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
// Fix files or folders with quotes or hashes
|
||||
pub fn escape_path(path: String, dir: bool) -> String {
|
||||
// make glob pattern have the highest priority.
|
||||
let glob_contaminated = path.contains(['[', '*', ']', '?']);
|
||||
if glob_contaminated {
|
||||
return if path.contains('\'') {
|
||||
// decide to use double quote, also need to escape `"` in path
|
||||
// or else users can't do anything with completed path either.
|
||||
format!("\"{}\"", path.replace('"', r#"\""#))
|
||||
} else {
|
||||
format!("'{path}'")
|
||||
};
|
||||
}
|
||||
|
||||
let filename_contaminated = !dir && path.contains(['\'', '"', ' ', '#', '(', ')']);
|
||||
let dirname_contaminated = dir && path.contains(['\'', '"', ' ', '#']);
|
||||
let maybe_flag = path.starts_with('-');
|
||||
let maybe_number = path.parse::<f64>().is_ok();
|
||||
if filename_contaminated || dirname_contaminated || maybe_flag || maybe_number {
|
||||
format!("`{path}`")
|
||||
} else {
|
||||
path
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AdjustView {
|
||||
pub prefix: String,
|
||||
pub span: Span,
|
||||
pub readjusted: bool,
|
||||
}
|
||||
|
||||
pub fn adjust_if_intermediate(
|
||||
prefix: &[u8],
|
||||
working_set: &StateWorkingSet,
|
||||
mut span: nu_protocol::Span,
|
||||
) -> AdjustView {
|
||||
let span_contents = String::from_utf8_lossy(working_set.get_span_contents(span)).to_string();
|
||||
let mut prefix = String::from_utf8_lossy(prefix).to_string();
|
||||
|
||||
// A difference of 1 because of the cursor's unicode code point in between.
|
||||
// Using .chars().count() because unicode and Windows.
|
||||
let readjusted = span_contents.chars().count() - prefix.chars().count() > 1;
|
||||
if readjusted {
|
||||
let remnant: String = span_contents
|
||||
.chars()
|
||||
.skip(prefix.chars().count() + 1)
|
||||
.take_while(|&c| !is_separator(c))
|
||||
.collect();
|
||||
prefix.push_str(&remnant);
|
||||
span = Span::new(span.start, span.start + prefix.chars().count() + 1);
|
||||
}
|
||||
AdjustView {
|
||||
prefix,
|
||||
span,
|
||||
readjusted,
|
||||
}
|
||||
}
|
@ -2,6 +2,7 @@ use std::fmt::Display;
|
||||
|
||||
use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher};
|
||||
use nu_parser::trim_quotes_str;
|
||||
use nu_protocol::CompletionAlgorithm;
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum SortBy {
|
||||
@ -55,6 +56,15 @@ impl MatchAlgorithm {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CompletionAlgorithm> for MatchAlgorithm {
|
||||
fn from(value: CompletionAlgorithm) -> Self {
|
||||
match value {
|
||||
CompletionAlgorithm::Prefix => MatchAlgorithm::Prefix,
|
||||
CompletionAlgorithm::Fuzzy => MatchAlgorithm::Fuzzy,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<String> for MatchAlgorithm {
|
||||
type Error = InvalidMatchAlgorithm;
|
||||
|
||||
|
@ -5,6 +5,7 @@ use nu_protocol::{
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
PipelineData, Span, Type, Value,
|
||||
};
|
||||
use nu_utils::IgnoreCaseExt;
|
||||
use reedline::Suggestion;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
@ -79,21 +80,20 @@ impl Completer for CustomCompletion {
|
||||
.map(|pd| {
|
||||
let value = pd.into_value(span);
|
||||
match &value {
|
||||
Value::Record { .. } => {
|
||||
let completions = value
|
||||
.get_data_by_key("completions")
|
||||
Value::Record { val, .. } => {
|
||||
let completions = val
|
||||
.get("completions")
|
||||
.and_then(|val| {
|
||||
val.as_list()
|
||||
.ok()
|
||||
.map(|it| map_value_completions(it.iter(), span, offset))
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let options = value.get_data_by_key("options");
|
||||
let options = val.get("options");
|
||||
|
||||
if let Some(Value::Record { .. }) = &options {
|
||||
let options = options.unwrap_or_default();
|
||||
if let Some(Value::Record { val: options, .. }) = &options {
|
||||
let should_sort = options
|
||||
.get_data_by_key("sort")
|
||||
.get("sort")
|
||||
.and_then(|val| val.as_bool().ok())
|
||||
.unwrap_or(false);
|
||||
|
||||
@ -103,11 +103,11 @@ impl Completer for CustomCompletion {
|
||||
|
||||
custom_completion_options = Some(CompletionOptions {
|
||||
case_sensitive: options
|
||||
.get_data_by_key("case_sensitive")
|
||||
.get("case_sensitive")
|
||||
.and_then(|val| val.as_bool().ok())
|
||||
.unwrap_or(true),
|
||||
positional: options
|
||||
.get_data_by_key("positional")
|
||||
.get("positional")
|
||||
.and_then(|val| val.as_bool().ok())
|
||||
.unwrap_or(true),
|
||||
sort_by: if should_sort {
|
||||
@ -115,11 +115,9 @@ impl Completer for CustomCompletion {
|
||||
} else {
|
||||
SortBy::None
|
||||
},
|
||||
match_algorithm: match options
|
||||
.get_data_by_key("completion_algorithm")
|
||||
{
|
||||
match_algorithm: match options.get("completion_algorithm") {
|
||||
Some(option) => option
|
||||
.as_string()
|
||||
.coerce_string()
|
||||
.ok()
|
||||
.and_then(|option| option.try_into().ok())
|
||||
.unwrap_or(MatchAlgorithm::Prefix),
|
||||
@ -156,8 +154,8 @@ fn filter(prefix: &[u8], items: Vec<Suggestion>, options: &CompletionOptions) ->
|
||||
(true, true) => it.value.as_bytes().starts_with(prefix),
|
||||
(true, false) => it.value.contains(std::str::from_utf8(prefix).unwrap_or("")),
|
||||
(false, positional) => {
|
||||
let value = it.value.to_lowercase();
|
||||
let prefix = std::str::from_utf8(prefix).unwrap_or("").to_lowercase();
|
||||
let value = it.value.to_folded_case();
|
||||
let prefix = std::str::from_utf8(prefix).unwrap_or("").to_folded_case();
|
||||
if positional {
|
||||
value.starts_with(&prefix)
|
||||
} else {
|
||||
|
@ -1,55 +1,65 @@
|
||||
use crate::completions::{matches, Completer, CompletionOptions};
|
||||
use crate::completions::{
|
||||
completion_common::{adjust_if_intermediate, complete_item, AdjustView},
|
||||
Completer, CompletionOptions, SortBy,
|
||||
};
|
||||
use nu_ansi_term::Style;
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, StateWorkingSet},
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
levenshtein_distance, Span,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
use std::path::{Path, MAIN_SEPARATOR as SEP};
|
||||
use std::sync::Arc;
|
||||
|
||||
use super::{partial_from, prepend_base_dir, SortBy};
|
||||
|
||||
const SEP: char = std::path::MAIN_SEPARATOR;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DirectoryCompletion {
|
||||
engine_state: Arc<EngineState>,
|
||||
stack: Stack,
|
||||
}
|
||||
|
||||
impl DirectoryCompletion {
|
||||
pub fn new(engine_state: Arc<EngineState>) -> Self {
|
||||
Self { engine_state }
|
||||
pub fn new(engine_state: Arc<EngineState>, stack: Stack) -> Self {
|
||||
Self {
|
||||
engine_state,
|
||||
stack,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Completer for DirectoryCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
_: &StateWorkingSet,
|
||||
working_set: &StateWorkingSet,
|
||||
prefix: Vec<u8>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<Suggestion> {
|
||||
let cwd = self.engine_state.current_work_dir();
|
||||
let partial = String::from_utf8_lossy(&prefix).to_string();
|
||||
let AdjustView { prefix, span, .. } = adjust_if_intermediate(&prefix, working_set, span);
|
||||
|
||||
// Filter only the folders
|
||||
let output: Vec<_> = directory_completion(span, &partial, &cwd, options)
|
||||
.into_iter()
|
||||
.map(move |x| Suggestion {
|
||||
value: x.1,
|
||||
description: None,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: x.0.start - offset,
|
||||
end: x.0.end - offset,
|
||||
},
|
||||
append_whitespace: false,
|
||||
})
|
||||
.collect();
|
||||
let output: Vec<_> = directory_completion(
|
||||
span,
|
||||
&prefix,
|
||||
&self.engine_state.current_work_dir(),
|
||||
options,
|
||||
self.engine_state.as_ref(),
|
||||
&self.stack,
|
||||
)
|
||||
.into_iter()
|
||||
.map(move |x| Suggestion {
|
||||
value: x.1,
|
||||
description: None,
|
||||
style: x.2,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: x.0.start - offset,
|
||||
end: x.0.end - offset,
|
||||
},
|
||||
append_whitespace: false,
|
||||
})
|
||||
.collect();
|
||||
|
||||
output
|
||||
}
|
||||
@ -63,7 +73,12 @@ impl Completer for DirectoryCompletion {
|
||||
|
||||
match self.get_sort_by() {
|
||||
SortBy::Ascending => {
|
||||
sorted_items.sort_by(|a, b| a.value.cmp(&b.value));
|
||||
sorted_items.sort_by(|a, b| {
|
||||
// Ignore trailing slashes in folder names when sorting
|
||||
a.value
|
||||
.trim_end_matches(SEP)
|
||||
.cmp(b.value.trim_end_matches(SEP))
|
||||
});
|
||||
}
|
||||
SortBy::LevenshteinDistance => {
|
||||
sorted_items.sort_by(|a, b| {
|
||||
@ -105,61 +120,8 @@ pub fn directory_completion(
|
||||
partial: &str,
|
||||
cwd: &str,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<(nu_protocol::Span, String)> {
|
||||
let original_input = partial;
|
||||
|
||||
let (base_dir_name, partial) = partial_from(partial);
|
||||
|
||||
let base_dir = nu_path::expand_path_with(&base_dir_name, cwd);
|
||||
|
||||
// This check is here as base_dir.read_dir() with base_dir == "" will open the current dir
|
||||
// which we don't want in this case (if we did, base_dir would already be ".")
|
||||
if base_dir == Path::new("") {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
if let Ok(result) = base_dir.read_dir() {
|
||||
return result
|
||||
.filter_map(|entry| {
|
||||
entry.ok().and_then(|entry| {
|
||||
if let Ok(metadata) = fs::metadata(entry.path()) {
|
||||
if metadata.is_dir() {
|
||||
let mut file_name = entry.file_name().to_string_lossy().into_owned();
|
||||
if matches(&partial, &file_name, options) {
|
||||
let mut path = if prepend_base_dir(original_input, &base_dir_name) {
|
||||
format!("{base_dir_name}{file_name}")
|
||||
} else {
|
||||
file_name.to_string()
|
||||
};
|
||||
|
||||
if entry.path().is_dir() {
|
||||
path.push(SEP);
|
||||
file_name.push(SEP);
|
||||
}
|
||||
|
||||
// Fix files or folders with quotes or hash
|
||||
if path.contains('\'')
|
||||
|| path.contains('"')
|
||||
|| path.contains(' ')
|
||||
|| path.contains('#')
|
||||
{
|
||||
path = format!("`{path}`");
|
||||
}
|
||||
|
||||
Some((span, path))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
}
|
||||
|
||||
Vec::new()
|
||||
engine_state: &EngineState,
|
||||
stack: &Stack,
|
||||
) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
|
||||
complete_item(true, span, partial, cwd, options, engine_state, stack)
|
||||
}
|
||||
|
@ -1,22 +1,26 @@
|
||||
use crate::completions::{
|
||||
file_path_completion, partial_from, Completer, CompletionOptions, SortBy,
|
||||
};
|
||||
use crate::completions::{file_path_completion, Completer, CompletionOptions, SortBy};
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, StateWorkingSet},
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
Span,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use std::sync::Arc;
|
||||
const SEP: char = std::path::MAIN_SEPARATOR;
|
||||
use std::{
|
||||
path::{is_separator, Path, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DotNuCompletion {
|
||||
engine_state: Arc<EngineState>,
|
||||
stack: Stack,
|
||||
}
|
||||
|
||||
impl DotNuCompletion {
|
||||
pub fn new(engine_state: Arc<EngineState>) -> Self {
|
||||
Self { engine_state }
|
||||
pub fn new(engine_state: Arc<EngineState>, stack: Stack) -> Self {
|
||||
Self {
|
||||
engine_state,
|
||||
stack,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -30,9 +34,16 @@ impl Completer for DotNuCompletion {
|
||||
_: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<Suggestion> {
|
||||
let prefix_str = String::from_utf8_lossy(&prefix).to_string();
|
||||
let prefix_str = String::from_utf8_lossy(&prefix).replace('`', "");
|
||||
let mut search_dirs: Vec<String> = vec![];
|
||||
let (base_dir, mut partial) = partial_from(&prefix_str);
|
||||
|
||||
// If prefix_str is only a word we want to search in the current dir
|
||||
let (base, partial) = prefix_str
|
||||
.rsplit_once(is_separator)
|
||||
.unwrap_or((".", &prefix_str));
|
||||
let base_dir = base.replace(is_separator, MAIN_SEPARATOR_STR);
|
||||
let mut partial = partial.to_string();
|
||||
// On windows, this standardizes paths to use \
|
||||
let mut is_current_folder = false;
|
||||
|
||||
// Fetch the lib dirs
|
||||
@ -43,7 +54,7 @@ impl Completer for DotNuCompletion {
|
||||
.into_iter()
|
||||
.flat_map(|it| {
|
||||
it.iter().map(|x| {
|
||||
x.as_path()
|
||||
x.to_path()
|
||||
.expect("internal error: failed to convert lib path")
|
||||
})
|
||||
})
|
||||
@ -58,7 +69,8 @@ impl Completer for DotNuCompletion {
|
||||
};
|
||||
|
||||
// Check if the base_dir is a folder
|
||||
if base_dir != format!(".{SEP}") {
|
||||
// rsplit_once removes the separator
|
||||
if base_dir != "." {
|
||||
// Add the base dir into the directories to be searched
|
||||
search_dirs.push(base_dir.clone());
|
||||
|
||||
@ -83,21 +95,34 @@ impl Completer for DotNuCompletion {
|
||||
// and transform them into suggestions
|
||||
let output: Vec<Suggestion> = search_dirs
|
||||
.into_iter()
|
||||
.flat_map(|it| {
|
||||
file_path_completion(span, &partial, &it, options)
|
||||
.flat_map(|search_dir| {
|
||||
let completions = file_path_completion(
|
||||
span,
|
||||
&partial,
|
||||
&search_dir,
|
||||
options,
|
||||
self.engine_state.as_ref(),
|
||||
&self.stack,
|
||||
);
|
||||
completions
|
||||
.into_iter()
|
||||
.filter(|it| {
|
||||
.filter(move |it| {
|
||||
// Different base dir, so we list the .nu files or folders
|
||||
if !is_current_folder {
|
||||
it.1.ends_with(".nu") || it.1.ends_with(SEP)
|
||||
} else {
|
||||
// Lib dirs, so we filter only the .nu files
|
||||
it.1.ends_with(".nu")
|
||||
// Lib dirs, so we filter only the .nu files or directory modules
|
||||
if it.1.ends_with(SEP) {
|
||||
Path::new(&search_dir).join(&it.1).join("mod.nu").exists()
|
||||
} else {
|
||||
it.1.ends_with(".nu")
|
||||
}
|
||||
}
|
||||
})
|
||||
.map(move |x| Suggestion {
|
||||
value: x.1,
|
||||
description: None,
|
||||
style: x.2,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: x.0.start - offset,
|
||||
|
@ -1,52 +1,70 @@
|
||||
use crate::completions::{Completer, CompletionOptions};
|
||||
use crate::completions::{
|
||||
completion_common::{adjust_if_intermediate, complete_item, AdjustView},
|
||||
Completer, CompletionOptions, SortBy,
|
||||
};
|
||||
use nu_ansi_term::Style;
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, StateWorkingSet},
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
levenshtein_distance, Span,
|
||||
};
|
||||
use nu_utils::IgnoreCaseExt;
|
||||
use reedline::Suggestion;
|
||||
use std::path::{is_separator, Path};
|
||||
use std::path::{Path, MAIN_SEPARATOR as SEP};
|
||||
use std::sync::Arc;
|
||||
|
||||
use super::SortBy;
|
||||
|
||||
const SEP: char = std::path::MAIN_SEPARATOR;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FileCompletion {
|
||||
engine_state: Arc<EngineState>,
|
||||
stack: Stack,
|
||||
}
|
||||
|
||||
impl FileCompletion {
|
||||
pub fn new(engine_state: Arc<EngineState>) -> Self {
|
||||
Self { engine_state }
|
||||
pub fn new(engine_state: Arc<EngineState>, stack: Stack) -> Self {
|
||||
Self {
|
||||
engine_state,
|
||||
stack,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Completer for FileCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
_: &StateWorkingSet,
|
||||
working_set: &StateWorkingSet,
|
||||
prefix: Vec<u8>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<Suggestion> {
|
||||
let cwd = self.engine_state.current_work_dir();
|
||||
let prefix = String::from_utf8_lossy(&prefix).to_string();
|
||||
let output: Vec<_> = file_path_completion(span, &prefix, &cwd, options)
|
||||
.into_iter()
|
||||
.map(move |x| Suggestion {
|
||||
value: x.1,
|
||||
description: None,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: x.0.start - offset,
|
||||
end: x.0.end - offset,
|
||||
},
|
||||
append_whitespace: false,
|
||||
})
|
||||
.collect();
|
||||
let AdjustView {
|
||||
prefix,
|
||||
span,
|
||||
readjusted,
|
||||
} = adjust_if_intermediate(&prefix, working_set, span);
|
||||
|
||||
let output: Vec<_> = complete_item(
|
||||
readjusted,
|
||||
span,
|
||||
&prefix,
|
||||
&self.engine_state.current_work_dir(),
|
||||
options,
|
||||
self.engine_state.as_ref(),
|
||||
&self.stack,
|
||||
)
|
||||
.into_iter()
|
||||
.map(move |x| Suggestion {
|
||||
value: x.1,
|
||||
description: None,
|
||||
style: x.2,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: x.0.start - offset,
|
||||
end: x.0.end - offset,
|
||||
},
|
||||
append_whitespace: false,
|
||||
})
|
||||
.collect();
|
||||
|
||||
output
|
||||
}
|
||||
@ -60,7 +78,12 @@ impl Completer for FileCompletion {
|
||||
|
||||
match self.get_sort_by() {
|
||||
SortBy::Ascending => {
|
||||
sorted_items.sort_by(|a, b| a.value.cmp(&b.value));
|
||||
sorted_items.sort_by(|a, b| {
|
||||
// Ignore trailing slashes in folder names when sorting
|
||||
a.value
|
||||
.trim_end_matches(SEP)
|
||||
.cmp(b.value.trim_end_matches(SEP))
|
||||
});
|
||||
}
|
||||
SortBy::LevenshteinDistance => {
|
||||
sorted_items.sort_by(|a, b| {
|
||||
@ -97,84 +120,15 @@ impl Completer for FileCompletion {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn partial_from(input: &str) -> (String, String) {
|
||||
let partial = input.replace('`', "");
|
||||
|
||||
// If partial is only a word we want to search in the current dir
|
||||
let (base, rest) = partial.rsplit_once(is_separator).unwrap_or((".", &partial));
|
||||
// On windows, this standardizes paths to use \
|
||||
let mut base = base.replace(is_separator, &SEP.to_string());
|
||||
|
||||
// rsplit_once removes the separator
|
||||
base.push(SEP);
|
||||
|
||||
(base.to_string(), rest.to_string())
|
||||
}
|
||||
|
||||
pub fn file_path_completion(
|
||||
span: nu_protocol::Span,
|
||||
partial: &str,
|
||||
cwd: &str,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<(nu_protocol::Span, String)> {
|
||||
let original_input = partial;
|
||||
let (base_dir_name, partial) = partial_from(partial);
|
||||
|
||||
let base_dir = nu_path::expand_path_with(&base_dir_name, cwd);
|
||||
// This check is here as base_dir.read_dir() with base_dir == "" will open the current dir
|
||||
// which we don't want in this case (if we did, base_dir would already be ".")
|
||||
if base_dir == Path::new("") {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
if let Ok(result) = base_dir.read_dir() {
|
||||
return result
|
||||
.filter_map(|entry| {
|
||||
entry.ok().and_then(|entry| {
|
||||
let mut file_name = entry.file_name().to_string_lossy().into_owned();
|
||||
if matches(&partial, &file_name, options) {
|
||||
let mut path = if prepend_base_dir(original_input, &base_dir_name) {
|
||||
format!("{base_dir_name}{file_name}")
|
||||
} else {
|
||||
file_name.to_string()
|
||||
};
|
||||
|
||||
if entry.path().is_dir() {
|
||||
path.push(SEP);
|
||||
file_name.push(SEP);
|
||||
}
|
||||
|
||||
// Fix files or folders with quotes or hashes
|
||||
if path.contains('\'')
|
||||
|| path.contains('"')
|
||||
|| path.contains(' ')
|
||||
|| path.contains('#')
|
||||
|| path.contains('(')
|
||||
|| path.contains(')')
|
||||
|| path.starts_with('0')
|
||||
|| path.starts_with('1')
|
||||
|| path.starts_with('2')
|
||||
|| path.starts_with('3')
|
||||
|| path.starts_with('4')
|
||||
|| path.starts_with('5')
|
||||
|| path.starts_with('6')
|
||||
|| path.starts_with('7')
|
||||
|| path.starts_with('8')
|
||||
|| path.starts_with('9')
|
||||
{
|
||||
path = format!("`{path}`");
|
||||
}
|
||||
|
||||
Some((span, path))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
}
|
||||
|
||||
Vec::new()
|
||||
engine_state: &EngineState,
|
||||
stack: &Stack,
|
||||
) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
|
||||
complete_item(false, span, partial, cwd, options, engine_state, stack)
|
||||
}
|
||||
|
||||
pub fn matches(partial: &str, from: &str, options: &CompletionOptions) -> bool {
|
||||
@ -182,28 +136,8 @@ pub fn matches(partial: &str, from: &str, options: &CompletionOptions) -> bool {
|
||||
if !options.case_sensitive {
|
||||
return options
|
||||
.match_algorithm
|
||||
.matches_str(&from.to_ascii_lowercase(), &partial.to_ascii_lowercase());
|
||||
.matches_str(&from.to_folded_case(), &partial.to_folded_case());
|
||||
}
|
||||
|
||||
options.match_algorithm.matches_str(from, partial)
|
||||
}
|
||||
|
||||
/// Returns whether the base_dir should be prepended to the file path
|
||||
pub fn prepend_base_dir(input: &str, base_dir: &str) -> bool {
|
||||
if base_dir == format!(".{SEP}") {
|
||||
// if the current base_dir path is the local folder we only add a "./" prefix if the user
|
||||
// input already includes a local folder prefix.
|
||||
let manually_entered = {
|
||||
let mut chars = input.chars();
|
||||
let first_char = chars.next();
|
||||
let second_char = chars.next();
|
||||
|
||||
first_char == Some('.') && second_char.map(is_separator).unwrap_or(false)
|
||||
};
|
||||
|
||||
manually_entered
|
||||
} else {
|
||||
// always prepend the base dir if it is a subfolder
|
||||
true
|
||||
}
|
||||
}
|
||||
|
@ -46,6 +46,7 @@ impl Completer for FlagCompletion {
|
||||
output.push(Suggestion {
|
||||
value: String::from_utf8_lossy(&named).to_string(),
|
||||
description: Some(flag_desc.to_string()),
|
||||
style: None,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
@ -68,6 +69,7 @@ impl Completer for FlagCompletion {
|
||||
output.push(Suggestion {
|
||||
value: String::from_utf8_lossy(&named).to_string(),
|
||||
description: Some(flag_desc.to_string()),
|
||||
style: None,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
|
@ -1,6 +1,7 @@
|
||||
mod base;
|
||||
mod command_completions;
|
||||
mod completer;
|
||||
mod completion_common;
|
||||
mod completion_options;
|
||||
mod custom_completions;
|
||||
mod directory_completions;
|
||||
@ -16,8 +17,6 @@ pub use completion_options::{CompletionOptions, MatchAlgorithm, SortBy};
|
||||
pub use custom_completions::CustomCompletion;
|
||||
pub use directory_completions::DirectoryCompletion;
|
||||
pub use dotnu_completions::DotNuCompletion;
|
||||
pub use file_completions::{
|
||||
file_path_completion, matches, partial_from, prepend_base_dir, FileCompletion,
|
||||
};
|
||||
pub use file_completions::{file_path_completion, matches, FileCompletion};
|
||||
pub use flag_completions::FlagCompletion;
|
||||
pub use variable_completions::VariableCompletion;
|
||||
|
@ -43,10 +43,8 @@ impl Completer for VariableCompletion {
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<Suggestion> {
|
||||
let mut output = vec![];
|
||||
let builtins = ["$nu", "$in", "$env", "$nothing"];
|
||||
let var_str = std::str::from_utf8(&self.var_context.0)
|
||||
.unwrap_or("")
|
||||
.to_lowercase();
|
||||
let builtins = ["$nu", "$in", "$env"];
|
||||
let var_str = std::str::from_utf8(&self.var_context.0).unwrap_or("");
|
||||
let var_id = working_set.find_variable(&self.var_context.0);
|
||||
let current_span = reedline::Span {
|
||||
start: span.start - offset,
|
||||
@ -57,7 +55,7 @@ impl Completer for VariableCompletion {
|
||||
// Completions for the given variable
|
||||
if !var_str.is_empty() {
|
||||
// Completion for $env.<tab>
|
||||
if var_str.as_str() == "$env" {
|
||||
if var_str == "$env" {
|
||||
let env_vars = self.stack.get_env_vars(&self.engine_state);
|
||||
|
||||
// Return nested values
|
||||
@ -97,6 +95,7 @@ impl Completer for VariableCompletion {
|
||||
output.push(Suggestion {
|
||||
value: env_var.0,
|
||||
description: None,
|
||||
style: None,
|
||||
extra: None,
|
||||
span: current_span,
|
||||
append_whitespace: false,
|
||||
@ -109,7 +108,7 @@ impl Completer for VariableCompletion {
|
||||
}
|
||||
|
||||
// Completions for $nu.<tab>
|
||||
if var_str.as_str() == "$nu" {
|
||||
if var_str == "$nu" {
|
||||
// Eval nu var
|
||||
if let Ok(nuval) = eval_variable(
|
||||
&self.engine_state,
|
||||
@ -167,6 +166,7 @@ impl Completer for VariableCompletion {
|
||||
output.push(Suggestion {
|
||||
value: builtin.to_string(),
|
||||
description: None,
|
||||
style: None,
|
||||
extra: None,
|
||||
span: current_span,
|
||||
append_whitespace: false,
|
||||
@ -189,6 +189,7 @@ impl Completer for VariableCompletion {
|
||||
output.push(Suggestion {
|
||||
value: String::from_utf8_lossy(v.0).to_string(),
|
||||
description: None,
|
||||
style: None,
|
||||
extra: None,
|
||||
span: current_span,
|
||||
append_whitespace: false,
|
||||
@ -210,6 +211,7 @@ impl Completer for VariableCompletion {
|
||||
output.push(Suggestion {
|
||||
value: String::from_utf8_lossy(v.0).to_string(),
|
||||
description: None,
|
||||
style: None,
|
||||
extra: None,
|
||||
span: current_span,
|
||||
append_whitespace: false,
|
||||
@ -235,16 +237,13 @@ fn nested_suggestions(
|
||||
let value = recursive_value(val, sublevels);
|
||||
|
||||
match value {
|
||||
Value::Record {
|
||||
cols,
|
||||
vals: _,
|
||||
span: _,
|
||||
} => {
|
||||
Value::Record { val, .. } => {
|
||||
// Add all the columns as completion
|
||||
for item in cols {
|
||||
for (col, _) in val.into_iter() {
|
||||
output.push(Suggestion {
|
||||
value: item,
|
||||
value: col,
|
||||
description: None,
|
||||
style: None,
|
||||
extra: None,
|
||||
span: current_span,
|
||||
append_whitespace: false,
|
||||
@ -259,6 +258,7 @@ fn nested_suggestions(
|
||||
output.push(Suggestion {
|
||||
value: column_name.to_string(),
|
||||
description: None,
|
||||
style: None,
|
||||
extra: None,
|
||||
span: current_span,
|
||||
append_whitespace: false,
|
||||
@ -267,11 +267,12 @@ fn nested_suggestions(
|
||||
|
||||
output
|
||||
}
|
||||
Value::List { vals, span: _ } => {
|
||||
Value::List { vals, .. } => {
|
||||
for column_name in get_columns(vals.as_slice()) {
|
||||
output.push(Suggestion {
|
||||
value: column_name,
|
||||
description: None,
|
||||
style: None,
|
||||
extra: None,
|
||||
span: current_span,
|
||||
append_whitespace: false,
|
||||
@ -288,13 +289,10 @@ fn nested_suggestions(
|
||||
fn recursive_value(val: Value, sublevels: Vec<Vec<u8>>) -> Value {
|
||||
// Go to next sublevel
|
||||
if let Some(next_sublevel) = sublevels.clone().into_iter().next() {
|
||||
let span = val.span();
|
||||
match val {
|
||||
Value::Record {
|
||||
cols,
|
||||
vals,
|
||||
span: _,
|
||||
} => {
|
||||
for item in cols.into_iter().zip(vals) {
|
||||
Value::Record { val, .. } => {
|
||||
for item in val {
|
||||
// Check if index matches with sublevel
|
||||
if item.0.as_bytes().to_vec() == next_sublevel {
|
||||
// If matches try to fetch recursively the next
|
||||
@ -303,11 +301,9 @@ fn recursive_value(val: Value, sublevels: Vec<Vec<u8>>) -> Value {
|
||||
}
|
||||
|
||||
// Current sublevel value not found
|
||||
return Value::Nothing {
|
||||
span: Span::unknown(),
|
||||
};
|
||||
return Value::nothing(span);
|
||||
}
|
||||
Value::LazyRecord { val, span: _ } => {
|
||||
Value::LazyRecord { val, .. } => {
|
||||
for col in val.column_names() {
|
||||
if col.as_bytes().to_vec() == next_sublevel {
|
||||
return recursive_value(
|
||||
@ -318,15 +314,13 @@ fn recursive_value(val: Value, sublevels: Vec<Vec<u8>>) -> Value {
|
||||
}
|
||||
|
||||
// Current sublevel value not found
|
||||
return Value::Nothing {
|
||||
span: Span::unknown(),
|
||||
};
|
||||
return Value::nothing(span);
|
||||
}
|
||||
Value::List { vals, span } => {
|
||||
Value::List { vals, .. } => {
|
||||
for col in get_columns(vals.as_slice()) {
|
||||
if col.as_bytes().to_vec() == next_sublevel {
|
||||
return recursive_value(
|
||||
Value::List { vals, span }
|
||||
Value::list(vals, span)
|
||||
.get_data_by_key(&col)
|
||||
.unwrap_or_default(),
|
||||
sublevels.into_iter().skip(1).collect(),
|
||||
@ -335,9 +329,7 @@ fn recursive_value(val: Value, sublevels: Vec<Vec<u8>>) -> Value {
|
||||
}
|
||||
|
||||
// Current sublevel value not found
|
||||
return Value::Nothing {
|
||||
span: Span::unknown(),
|
||||
};
|
||||
return Value::nothing(span);
|
||||
}
|
||||
_ => return val,
|
||||
}
|
||||
|
@ -61,17 +61,18 @@ pub fn add_plugin_file(
|
||||
plugin_file: Option<Spanned<String>>,
|
||||
storage_path: &str,
|
||||
) {
|
||||
if let Some(plugin_file) = plugin_file {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
let cwd = working_set.get_cwd();
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
let cwd = working_set.get_cwd();
|
||||
|
||||
if let Some(plugin_file) = plugin_file {
|
||||
if let Ok(path) = canonicalize_with(&plugin_file.item, cwd) {
|
||||
engine_state.plugin_signatures = Some(path)
|
||||
} else {
|
||||
let e = ParseError::FileNotFound(plugin_file.item, plugin_file.span);
|
||||
report_error(&working_set, &e);
|
||||
}
|
||||
} else if let Some(mut plugin_path) = nu_path::config_dir() {
|
||||
} else if let Some(plugin_path) = nu_path::config_dir() {
|
||||
let mut plugin_path = canonicalize_with(&plugin_path, cwd).unwrap_or(plugin_path);
|
||||
// Path to store plugins signatures
|
||||
plugin_path.push(storage_path);
|
||||
plugin_path.push(PLUGIN_FILE);
|
||||
|
@ -28,13 +28,17 @@ pub fn evaluate_commands(
|
||||
let (block, delta) = {
|
||||
if let Some(ref t_mode) = table_mode {
|
||||
let mut config = engine_state.get_config().clone();
|
||||
config.table_mode = t_mode.as_string()?;
|
||||
engine_state.set_config(&config);
|
||||
config.table_mode = t_mode.coerce_str()?.parse().unwrap_or_default();
|
||||
engine_state.set_config(config);
|
||||
}
|
||||
|
||||
let mut working_set = StateWorkingSet::new(engine_state);
|
||||
|
||||
let output = parse(&mut working_set, None, commands.item.as_bytes(), false);
|
||||
if let Some(warning) = working_set.parse_warnings.first() {
|
||||
report_error(&working_set, warning);
|
||||
}
|
||||
|
||||
if let Some(err) = working_set.parse_errors.first() {
|
||||
report_error(&working_set, err);
|
||||
|
||||
@ -55,7 +59,7 @@ pub fn evaluate_commands(
|
||||
Ok(pipeline_data) => {
|
||||
let mut config = engine_state.get_config().clone();
|
||||
if let Some(t_mode) = table_mode {
|
||||
config.table_mode = t_mode.as_string()?;
|
||||
config.table_mode = t_mode.coerce_str()?.parse().unwrap_or_default();
|
||||
}
|
||||
crate::eval_file::print_table_or_error(engine_state, stack, pipeline_data, &mut config)
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ use crate::util::eval_source;
|
||||
use log::info;
|
||||
use log::trace;
|
||||
use miette::{IntoDiagnostic, Result};
|
||||
use nu_engine::eval_block;
|
||||
use nu_engine::{convert_env_values, current_dir};
|
||||
use nu_parser::parse;
|
||||
use nu_path::canonicalize_with;
|
||||
@ -34,10 +35,10 @@ pub fn evaluate_file(
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(
|
||||
&working_set,
|
||||
&ShellError::FileNotFoundCustom(
|
||||
format!("Could not access file '{}': {:?}", path, e.to_string()),
|
||||
Span::unknown(),
|
||||
),
|
||||
&ShellError::FileNotFoundCustom {
|
||||
msg: format!("Could not access file '{}': {:?}", path, e.to_string()),
|
||||
span: Span::unknown(),
|
||||
},
|
||||
);
|
||||
std::process::exit(1);
|
||||
});
|
||||
@ -46,13 +47,13 @@ pub fn evaluate_file(
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(
|
||||
&working_set,
|
||||
&ShellError::NonUtf8Custom(
|
||||
format!(
|
||||
&ShellError::NonUtf8Custom {
|
||||
msg: format!(
|
||||
"Input file name '{}' is not valid UTF8",
|
||||
file_path.to_string_lossy()
|
||||
),
|
||||
Span::unknown(),
|
||||
),
|
||||
span: Span::unknown(),
|
||||
},
|
||||
);
|
||||
std::process::exit(1);
|
||||
});
|
||||
@ -63,14 +64,14 @@ pub fn evaluate_file(
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(
|
||||
&working_set,
|
||||
&ShellError::FileNotFoundCustom(
|
||||
format!(
|
||||
&ShellError::FileNotFoundCustom {
|
||||
msg: format!(
|
||||
"Could not read file '{}': {:?}",
|
||||
file_path_str,
|
||||
e.to_string()
|
||||
),
|
||||
Span::unknown(),
|
||||
),
|
||||
span: Span::unknown(),
|
||||
},
|
||||
);
|
||||
std::process::exit(1);
|
||||
});
|
||||
@ -81,10 +82,10 @@ pub fn evaluate_file(
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(
|
||||
&working_set,
|
||||
&ShellError::FileNotFoundCustom(
|
||||
format!("The file path '{file_path_str}' does not have a parent"),
|
||||
Span::unknown(),
|
||||
),
|
||||
&ShellError::FileNotFoundCustom {
|
||||
msg: format!("The file path '{file_path_str}' does not have a parent"),
|
||||
span: Span::unknown(),
|
||||
},
|
||||
);
|
||||
std::process::exit(1);
|
||||
});
|
||||
@ -97,24 +98,76 @@ pub fn evaluate_file(
|
||||
"CURRENT_FILE".to_string(),
|
||||
Value::string(file_path.to_string_lossy(), Span::unknown()),
|
||||
);
|
||||
stack.add_env_var(
|
||||
"PROCESS_PATH".to_string(),
|
||||
Value::string(path, Span::unknown()),
|
||||
);
|
||||
|
||||
let source_filename = file_path
|
||||
.file_name()
|
||||
.expect("internal error: script missing filename");
|
||||
|
||||
let mut working_set = StateWorkingSet::new(engine_state);
|
||||
trace!("parsing file: {}", file_path_str);
|
||||
let _ = parse(&mut working_set, Some(file_path_str), &file, false);
|
||||
let block = parse(&mut working_set, Some(file_path_str), &file, false);
|
||||
|
||||
if working_set.find_decl(b"main").is_some() {
|
||||
if let Some(err) = working_set.parse_errors.first() {
|
||||
report_error(&working_set, err);
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
for block in &mut working_set.delta.blocks {
|
||||
if block.signature.name == "main" {
|
||||
block.signature.name = source_filename.to_string_lossy().to_string();
|
||||
} else if block.signature.name.starts_with("main ") {
|
||||
block.signature.name =
|
||||
source_filename.to_string_lossy().to_string() + " " + &block.signature.name[5..];
|
||||
}
|
||||
}
|
||||
|
||||
let _ = engine_state.merge_delta(working_set.delta);
|
||||
|
||||
if engine_state.find_decl(b"main", &[]).is_some() {
|
||||
let args = format!("main {}", args.join(" "));
|
||||
|
||||
if !eval_source(
|
||||
let pipeline_data = eval_block(
|
||||
engine_state,
|
||||
stack,
|
||||
&file,
|
||||
file_path_str,
|
||||
&block,
|
||||
PipelineData::empty(),
|
||||
true,
|
||||
) {
|
||||
std::process::exit(1);
|
||||
false,
|
||||
false,
|
||||
);
|
||||
let pipeline_data = match pipeline_data {
|
||||
Err(ShellError::Return { .. }) => {
|
||||
// allows early exists before `main` is run.
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
x => x,
|
||||
}
|
||||
.unwrap_or_else(|e| {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(&working_set, &e);
|
||||
std::process::exit(1);
|
||||
});
|
||||
|
||||
let result = pipeline_data.print(engine_state, stack, true, false);
|
||||
|
||||
match result {
|
||||
Err(err) => {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
|
||||
report_error(&working_set, &err);
|
||||
std::process::exit(1);
|
||||
}
|
||||
Ok(exit_code) => {
|
||||
if exit_code != 0 {
|
||||
std::process::exit(exit_code as i32);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !eval_source(
|
||||
engine_state,
|
||||
stack,
|
||||
@ -146,9 +199,9 @@ pub(crate) fn print_table_or_error(
|
||||
};
|
||||
|
||||
// Change the engine_state config to use the passed in configuration
|
||||
engine_state.set_config(config);
|
||||
engine_state.set_config(config.clone());
|
||||
|
||||
if let PipelineData::Value(Value::Error { error }, ..) = &pipeline_data {
|
||||
if let PipelineData::Value(Value::Error { error, .. }, ..) = &pipeline_data {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(&working_set, &**error);
|
||||
std::process::exit(1);
|
||||
@ -195,7 +248,7 @@ pub(crate) fn print_table_or_error(
|
||||
|
||||
fn print_or_exit(pipeline_data: PipelineData, engine_state: &mut EngineState, config: &Config) {
|
||||
for item in pipeline_data {
|
||||
if let Value::Error { error } = item {
|
||||
if let Value::Error { error, .. } = item {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
|
||||
report_error(&working_set, &*error);
|
||||
@ -203,7 +256,7 @@ fn print_or_exit(pipeline_data: PipelineData, engine_state: &mut EngineState, co
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
let out = item.into_string("\n", config) + "\n";
|
||||
let out = item.to_expanded_string("\n", config) + "\n";
|
||||
let _ = stdout_write_all_and_flush(out).map_err(|err| eprintln!("{err}"));
|
||||
}
|
||||
}
|
||||
|
@ -19,7 +19,7 @@ pub use completions::{FileCompletion, NuCompleter};
|
||||
pub use config_files::eval_config_contents;
|
||||
pub use eval_cmds::evaluate_commands;
|
||||
pub use eval_file::evaluate_file;
|
||||
pub use menus::{DescriptionMenu, NuHelpCompleter};
|
||||
pub use menus::NuHelpCompleter;
|
||||
pub use nu_cmd_base::util::get_init_cwd;
|
||||
pub use nu_highlight::NuHighlight;
|
||||
pub use print::Print;
|
||||
|
@ -1,727 +0,0 @@
|
||||
use {
|
||||
nu_ansi_term::{ansi::RESET, Style},
|
||||
reedline::{
|
||||
menu_functions::string_difference, Completer, Editor, Menu, MenuEvent, MenuTextStyle,
|
||||
Painter, Suggestion, UndoBehavior,
|
||||
},
|
||||
};
|
||||
|
||||
/// Default values used as reference for the menu. These values are set during
|
||||
/// the initial declaration of the menu and are always kept as reference for the
|
||||
/// changeable [`WorkingDetails`]
|
||||
struct DefaultMenuDetails {
|
||||
/// Number of columns that the menu will have
|
||||
pub columns: u16,
|
||||
/// Column width
|
||||
pub col_width: Option<usize>,
|
||||
/// Column padding
|
||||
pub col_padding: usize,
|
||||
/// Number of rows for commands
|
||||
pub selection_rows: u16,
|
||||
/// Number of rows allowed to display the description
|
||||
pub description_rows: usize,
|
||||
}
|
||||
|
||||
impl Default for DefaultMenuDetails {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
columns: 4,
|
||||
col_width: None,
|
||||
col_padding: 2,
|
||||
selection_rows: 4,
|
||||
description_rows: 10,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents the actual column conditions of the menu. These conditions change
|
||||
/// since they need to accommodate possible different line sizes for the column values
|
||||
#[derive(Default)]
|
||||
struct WorkingDetails {
|
||||
/// Number of columns that the menu will have
|
||||
pub columns: u16,
|
||||
/// Column width
|
||||
pub col_width: usize,
|
||||
/// Number of rows for description
|
||||
pub description_rows: usize,
|
||||
}
|
||||
|
||||
/// Completion menu definition
|
||||
pub struct DescriptionMenu {
|
||||
/// Menu name
|
||||
name: String,
|
||||
/// Menu status
|
||||
active: bool,
|
||||
/// Menu coloring
|
||||
color: MenuTextStyle,
|
||||
/// Default column details that are set when creating the menu
|
||||
/// These values are the reference for the working details
|
||||
default_details: DefaultMenuDetails,
|
||||
/// Number of minimum rows that are displayed when
|
||||
/// the required lines is larger than the available lines
|
||||
min_rows: u16,
|
||||
/// Working column details keep changing based on the collected values
|
||||
working_details: WorkingDetails,
|
||||
/// Menu cached values
|
||||
values: Vec<Suggestion>,
|
||||
/// column position of the cursor. Starts from 0
|
||||
col_pos: u16,
|
||||
/// row position in the menu. Starts from 0
|
||||
row_pos: u16,
|
||||
/// Menu marker when active
|
||||
marker: String,
|
||||
/// Event sent to the menu
|
||||
event: Option<MenuEvent>,
|
||||
/// String collected after the menu is activated
|
||||
input: Option<String>,
|
||||
/// Examples to select
|
||||
examples: Vec<String>,
|
||||
/// Example index
|
||||
example_index: Option<usize>,
|
||||
/// Examples may not be shown if there is not enough space in the screen
|
||||
show_examples: bool,
|
||||
/// Skipped description rows
|
||||
skipped_rows: usize,
|
||||
/// Calls the completer using only the line buffer difference difference
|
||||
/// after the menu was activated
|
||||
only_buffer_difference: bool,
|
||||
}
|
||||
|
||||
impl Default for DescriptionMenu {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
name: "description_menu".to_string(),
|
||||
active: false,
|
||||
color: MenuTextStyle::default(),
|
||||
default_details: DefaultMenuDetails::default(),
|
||||
min_rows: 3,
|
||||
working_details: WorkingDetails::default(),
|
||||
values: Vec::new(),
|
||||
col_pos: 0,
|
||||
row_pos: 0,
|
||||
marker: "? ".to_string(),
|
||||
event: None,
|
||||
input: None,
|
||||
examples: Vec::new(),
|
||||
example_index: None,
|
||||
show_examples: true,
|
||||
skipped_rows: 0,
|
||||
only_buffer_difference: true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Menu configuration
|
||||
impl DescriptionMenu {
|
||||
/// Menu builder with new name
|
||||
pub fn with_name(mut self, name: &str) -> Self {
|
||||
self.name = name.into();
|
||||
self
|
||||
}
|
||||
|
||||
/// Menu builder with new value for text style
|
||||
pub fn with_text_style(mut self, text_style: Style) -> Self {
|
||||
self.color.text_style = text_style;
|
||||
self
|
||||
}
|
||||
|
||||
/// Menu builder with new value for text style
|
||||
pub fn with_selected_text_style(mut self, selected_text_style: Style) -> Self {
|
||||
self.color.selected_text_style = selected_text_style;
|
||||
self
|
||||
}
|
||||
|
||||
/// Menu builder with new value for text style
|
||||
pub fn with_description_text_style(mut self, description_text_style: Style) -> Self {
|
||||
self.color.description_style = description_text_style;
|
||||
self
|
||||
}
|
||||
|
||||
/// Menu builder with new columns value
|
||||
pub fn with_columns(mut self, columns: u16) -> Self {
|
||||
self.default_details.columns = columns;
|
||||
self
|
||||
}
|
||||
|
||||
/// Menu builder with new column width value
|
||||
pub fn with_column_width(mut self, col_width: Option<usize>) -> Self {
|
||||
self.default_details.col_width = col_width;
|
||||
self
|
||||
}
|
||||
|
||||
/// Menu builder with new column width value
|
||||
pub fn with_column_padding(mut self, col_padding: usize) -> Self {
|
||||
self.default_details.col_padding = col_padding;
|
||||
self
|
||||
}
|
||||
|
||||
/// Menu builder with new selection rows value
|
||||
pub fn with_selection_rows(mut self, selection_rows: u16) -> Self {
|
||||
self.default_details.selection_rows = selection_rows;
|
||||
self
|
||||
}
|
||||
|
||||
/// Menu builder with new description rows value
|
||||
pub fn with_description_rows(mut self, description_rows: usize) -> Self {
|
||||
self.default_details.description_rows = description_rows;
|
||||
self
|
||||
}
|
||||
|
||||
/// Menu builder with marker
|
||||
pub fn with_marker(mut self, marker: String) -> Self {
|
||||
self.marker = marker;
|
||||
self
|
||||
}
|
||||
|
||||
/// Menu builder with new only buffer difference
|
||||
pub fn with_only_buffer_difference(mut self, only_buffer_difference: bool) -> Self {
|
||||
self.only_buffer_difference = only_buffer_difference;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
// Menu functionality
|
||||
impl DescriptionMenu {
|
||||
/// Move menu cursor to the next element
|
||||
fn move_next(&mut self) {
|
||||
let mut new_col = self.col_pos + 1;
|
||||
let mut new_row = self.row_pos;
|
||||
|
||||
if new_col >= self.get_cols() {
|
||||
new_row += 1;
|
||||
new_col = 0;
|
||||
}
|
||||
|
||||
if new_row >= self.get_rows() {
|
||||
new_row = 0;
|
||||
new_col = 0;
|
||||
}
|
||||
|
||||
let position = new_row * self.get_cols() + new_col;
|
||||
if position >= self.get_values().len() as u16 {
|
||||
self.reset_position();
|
||||
} else {
|
||||
self.col_pos = new_col;
|
||||
self.row_pos = new_row;
|
||||
}
|
||||
}
|
||||
|
||||
/// Move menu cursor to the previous element
|
||||
fn move_previous(&mut self) {
|
||||
let new_col = self.col_pos.checked_sub(1);
|
||||
|
||||
let (new_col, new_row) = match new_col {
|
||||
Some(col) => (col, self.row_pos),
|
||||
None => match self.row_pos.checked_sub(1) {
|
||||
Some(row) => (self.get_cols().saturating_sub(1), row),
|
||||
None => (
|
||||
self.get_cols().saturating_sub(1),
|
||||
self.get_rows().saturating_sub(1),
|
||||
),
|
||||
},
|
||||
};
|
||||
|
||||
let position = new_row * self.get_cols() + new_col;
|
||||
if position >= self.get_values().len() as u16 {
|
||||
self.col_pos = (self.get_values().len() as u16 % self.get_cols()).saturating_sub(1);
|
||||
self.row_pos = self.get_rows().saturating_sub(1);
|
||||
} else {
|
||||
self.col_pos = new_col;
|
||||
self.row_pos = new_row;
|
||||
}
|
||||
}
|
||||
|
||||
/// Menu index based on column and row position
|
||||
fn index(&self) -> usize {
|
||||
let index = self.row_pos * self.get_cols() + self.col_pos;
|
||||
index as usize
|
||||
}
|
||||
|
||||
/// Get selected value from the menu
|
||||
fn get_value(&self) -> Option<Suggestion> {
|
||||
self.get_values().get(self.index()).cloned()
|
||||
}
|
||||
|
||||
/// Calculates how many rows the Menu will use
|
||||
fn get_rows(&self) -> u16 {
|
||||
let values = self.get_values().len() as u16;
|
||||
|
||||
if values == 0 {
|
||||
// When the values are empty the no_records_msg is shown, taking 1 line
|
||||
return 1;
|
||||
}
|
||||
|
||||
let rows = values / self.get_cols();
|
||||
if values % self.get_cols() != 0 {
|
||||
rows + 1
|
||||
} else {
|
||||
rows
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns working details col width
|
||||
fn get_width(&self) -> usize {
|
||||
self.working_details.col_width
|
||||
}
|
||||
|
||||
/// Reset menu position
|
||||
fn reset_position(&mut self) {
|
||||
self.col_pos = 0;
|
||||
self.row_pos = 0;
|
||||
self.skipped_rows = 0;
|
||||
}
|
||||
|
||||
fn no_records_msg(&self, use_ansi_coloring: bool) -> String {
|
||||
let msg = "TYPE TO START SEARCH";
|
||||
if use_ansi_coloring {
|
||||
format!(
|
||||
"{}{}{}",
|
||||
self.color.selected_text_style.prefix(),
|
||||
msg,
|
||||
RESET
|
||||
)
|
||||
} else {
|
||||
msg.to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns working details columns
|
||||
fn get_cols(&self) -> u16 {
|
||||
self.working_details.columns.max(1)
|
||||
}
|
||||
|
||||
/// End of line for menu
|
||||
fn end_of_line(&self, column: u16, index: usize) -> &str {
|
||||
let is_last = index == self.values.len().saturating_sub(1);
|
||||
if column == self.get_cols().saturating_sub(1) || is_last {
|
||||
"\r\n"
|
||||
} else {
|
||||
""
|
||||
}
|
||||
}
|
||||
|
||||
/// Update list of examples from the actual value
|
||||
fn update_examples(&mut self) {
|
||||
self.examples = self
|
||||
.get_value()
|
||||
.and_then(|suggestion| suggestion.extra)
|
||||
.unwrap_or_default();
|
||||
|
||||
self.example_index = None;
|
||||
}
|
||||
|
||||
/// Creates default string that represents one suggestion from the menu
|
||||
fn create_entry_string(
|
||||
&self,
|
||||
suggestion: &Suggestion,
|
||||
index: usize,
|
||||
column: u16,
|
||||
empty_space: usize,
|
||||
use_ansi_coloring: bool,
|
||||
) -> String {
|
||||
if use_ansi_coloring {
|
||||
if index == self.index() {
|
||||
format!(
|
||||
"{}{}{}{:>empty$}{}",
|
||||
self.color.selected_text_style.prefix(),
|
||||
&suggestion.value,
|
||||
RESET,
|
||||
"",
|
||||
self.end_of_line(column, index),
|
||||
empty = empty_space,
|
||||
)
|
||||
} else {
|
||||
format!(
|
||||
"{}{}{}{:>empty$}{}",
|
||||
self.color.text_style.prefix(),
|
||||
&suggestion.value,
|
||||
RESET,
|
||||
"",
|
||||
self.end_of_line(column, index),
|
||||
empty = empty_space,
|
||||
)
|
||||
}
|
||||
} else {
|
||||
// If no ansi coloring is found, then the selection word is
|
||||
// the line in uppercase
|
||||
let (marker, empty_space) = if index == self.index() {
|
||||
(">", empty_space.saturating_sub(1))
|
||||
} else {
|
||||
("", empty_space)
|
||||
};
|
||||
|
||||
let line = format!(
|
||||
"{}{}{:>empty$}{}",
|
||||
marker,
|
||||
&suggestion.value,
|
||||
"",
|
||||
self.end_of_line(column, index),
|
||||
empty = empty_space,
|
||||
);
|
||||
|
||||
if index == self.index() {
|
||||
line.to_uppercase()
|
||||
} else {
|
||||
line
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Description string with color
|
||||
fn create_description_string(&self, use_ansi_coloring: bool) -> String {
|
||||
let description = self
|
||||
.get_value()
|
||||
.and_then(|suggestion| suggestion.description)
|
||||
.unwrap_or_default()
|
||||
.lines()
|
||||
.skip(self.skipped_rows)
|
||||
.take(self.working_details.description_rows)
|
||||
.collect::<Vec<&str>>()
|
||||
.join("\r\n");
|
||||
|
||||
if use_ansi_coloring && !description.is_empty() {
|
||||
format!(
|
||||
"{}{}{}",
|
||||
self.color.description_style.prefix(),
|
||||
description,
|
||||
RESET,
|
||||
)
|
||||
} else {
|
||||
description
|
||||
}
|
||||
}
|
||||
|
||||
/// Selectable list of examples from the actual value
|
||||
fn create_example_string(&self, use_ansi_coloring: bool) -> String {
|
||||
if !self.show_examples {
|
||||
return "".into();
|
||||
}
|
||||
|
||||
let examples: String = self
|
||||
.examples
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, example)| {
|
||||
if let Some(example_index) = self.example_index {
|
||||
if index == example_index {
|
||||
format!(
|
||||
" {}{}{}\r\n",
|
||||
self.color.selected_text_style.prefix(),
|
||||
example,
|
||||
RESET
|
||||
)
|
||||
} else {
|
||||
format!(" {example}\r\n")
|
||||
}
|
||||
} else {
|
||||
format!(" {example}\r\n")
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
if examples.is_empty() {
|
||||
"".into()
|
||||
} else if use_ansi_coloring {
|
||||
format!(
|
||||
"{}\r\n\r\nExamples:\r\n{}{}",
|
||||
self.color.description_style.prefix(),
|
||||
RESET,
|
||||
examples,
|
||||
)
|
||||
} else {
|
||||
format!("\r\n\r\nExamples:\r\n{examples}",)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Menu for DescriptionMenu {
|
||||
/// Menu name
|
||||
fn name(&self) -> &str {
|
||||
self.name.as_str()
|
||||
}
|
||||
|
||||
/// Menu indicator
|
||||
fn indicator(&self) -> &str {
|
||||
self.marker.as_str()
|
||||
}
|
||||
|
||||
/// Deactivates context menu
|
||||
fn is_active(&self) -> bool {
|
||||
self.active
|
||||
}
|
||||
|
||||
/// The menu stays active even with one record
|
||||
fn can_quick_complete(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
/// The menu does not need to partially complete
|
||||
fn can_partially_complete(
|
||||
&mut self,
|
||||
_values_updated: bool,
|
||||
_editor: &mut Editor,
|
||||
_completer: &mut dyn Completer,
|
||||
) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
/// Selects what type of event happened with the menu
|
||||
fn menu_event(&mut self, event: MenuEvent) {
|
||||
match &event {
|
||||
MenuEvent::Activate(_) => self.active = true,
|
||||
MenuEvent::Deactivate => {
|
||||
self.active = false;
|
||||
self.input = None;
|
||||
self.values = Vec::new();
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
|
||||
self.event = Some(event);
|
||||
}
|
||||
|
||||
/// Updates menu values
|
||||
fn update_values(&mut self, editor: &mut Editor, completer: &mut dyn Completer) {
|
||||
if self.only_buffer_difference {
|
||||
if let Some(old_string) = &self.input {
|
||||
let (start, input) = string_difference(editor.get_buffer(), old_string);
|
||||
if !input.is_empty() {
|
||||
self.reset_position();
|
||||
self.values = completer.complete(input, start);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let trimmed_buffer = editor.get_buffer().replace('\n', " ");
|
||||
self.values = completer.complete(
|
||||
trimmed_buffer.as_str(),
|
||||
editor.line_buffer().insertion_point(),
|
||||
);
|
||||
self.reset_position();
|
||||
}
|
||||
}
|
||||
|
||||
/// The working details for the menu changes based on the size of the lines
|
||||
/// collected from the completer
|
||||
fn update_working_details(
|
||||
&mut self,
|
||||
editor: &mut Editor,
|
||||
completer: &mut dyn Completer,
|
||||
painter: &Painter,
|
||||
) {
|
||||
if let Some(event) = self.event.take() {
|
||||
// Updating all working parameters from the menu before executing any of the
|
||||
// possible event
|
||||
let max_width = self.get_values().iter().fold(0, |acc, suggestion| {
|
||||
let str_len = suggestion.value.len() + self.default_details.col_padding;
|
||||
if str_len > acc {
|
||||
str_len
|
||||
} else {
|
||||
acc
|
||||
}
|
||||
});
|
||||
|
||||
// If no default width is found, then the total screen width is used to estimate
|
||||
// the column width based on the default number of columns
|
||||
let default_width = if let Some(col_width) = self.default_details.col_width {
|
||||
col_width
|
||||
} else {
|
||||
let col_width = painter.screen_width() / self.default_details.columns;
|
||||
col_width as usize
|
||||
};
|
||||
|
||||
// Adjusting the working width of the column based the max line width found
|
||||
// in the menu values
|
||||
if max_width > default_width {
|
||||
self.working_details.col_width = max_width;
|
||||
} else {
|
||||
self.working_details.col_width = default_width;
|
||||
};
|
||||
|
||||
// The working columns is adjusted based on possible number of columns
|
||||
// that could be fitted in the screen with the calculated column width
|
||||
let possible_cols = painter.screen_width() / self.working_details.col_width as u16;
|
||||
if possible_cols > self.default_details.columns {
|
||||
self.working_details.columns = self.default_details.columns.max(1);
|
||||
} else {
|
||||
self.working_details.columns = possible_cols;
|
||||
}
|
||||
|
||||
// Updating the working rows to display the description
|
||||
if self.menu_required_lines(painter.screen_width()) <= painter.remaining_lines() {
|
||||
self.working_details.description_rows = self.default_details.description_rows;
|
||||
self.show_examples = true;
|
||||
} else {
|
||||
self.working_details.description_rows = painter
|
||||
.remaining_lines()
|
||||
.saturating_sub(self.default_details.selection_rows + 1)
|
||||
as usize;
|
||||
|
||||
self.show_examples = false;
|
||||
}
|
||||
|
||||
match event {
|
||||
MenuEvent::Activate(_) => {
|
||||
self.reset_position();
|
||||
self.input = Some(editor.get_buffer().to_string());
|
||||
self.update_values(editor, completer);
|
||||
}
|
||||
MenuEvent::Deactivate => self.active = false,
|
||||
MenuEvent::Edit(_) => {
|
||||
self.reset_position();
|
||||
self.update_values(editor, completer);
|
||||
self.update_examples()
|
||||
}
|
||||
MenuEvent::NextElement => {
|
||||
self.skipped_rows = 0;
|
||||
self.move_next();
|
||||
self.update_examples();
|
||||
}
|
||||
MenuEvent::PreviousElement => {
|
||||
self.skipped_rows = 0;
|
||||
self.move_previous();
|
||||
self.update_examples();
|
||||
}
|
||||
MenuEvent::MoveUp => {
|
||||
if let Some(example_index) = self.example_index {
|
||||
if let Some(index) = example_index.checked_sub(1) {
|
||||
self.example_index = Some(index);
|
||||
} else {
|
||||
self.example_index = Some(self.examples.len().saturating_sub(1));
|
||||
}
|
||||
} else if !self.examples.is_empty() {
|
||||
self.example_index = Some(0);
|
||||
}
|
||||
}
|
||||
MenuEvent::MoveDown => {
|
||||
if let Some(example_index) = self.example_index {
|
||||
let index = example_index + 1;
|
||||
if index < self.examples.len() {
|
||||
self.example_index = Some(index);
|
||||
} else {
|
||||
self.example_index = Some(0);
|
||||
}
|
||||
} else if !self.examples.is_empty() {
|
||||
self.example_index = Some(0);
|
||||
}
|
||||
}
|
||||
MenuEvent::MoveLeft => self.skipped_rows = self.skipped_rows.saturating_sub(1),
|
||||
MenuEvent::MoveRight => {
|
||||
let skipped = self.skipped_rows + 1;
|
||||
let description_rows = self
|
||||
.get_value()
|
||||
.and_then(|suggestion| suggestion.description)
|
||||
.unwrap_or_default()
|
||||
.lines()
|
||||
.count();
|
||||
|
||||
let allowed_skips =
|
||||
description_rows.saturating_sub(self.working_details.description_rows);
|
||||
|
||||
if skipped < allowed_skips {
|
||||
self.skipped_rows = skipped;
|
||||
} else {
|
||||
self.skipped_rows = allowed_skips;
|
||||
}
|
||||
}
|
||||
MenuEvent::PreviousPage | MenuEvent::NextPage => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The buffer gets replaced in the Span location
|
||||
fn replace_in_buffer(&self, editor: &mut Editor) {
|
||||
if let Some(Suggestion { value, span, .. }) = self.get_value() {
|
||||
let start = span.start.min(editor.line_buffer().len());
|
||||
let end = span.end.min(editor.line_buffer().len());
|
||||
|
||||
let replacement = if let Some(example_index) = self.example_index {
|
||||
self.examples
|
||||
.get(example_index)
|
||||
.expect("the example index is always checked")
|
||||
} else {
|
||||
&value
|
||||
};
|
||||
|
||||
editor.edit_buffer(
|
||||
|lb| {
|
||||
lb.replace_range(start..end, replacement);
|
||||
let mut offset = lb.insertion_point();
|
||||
offset += lb.len().saturating_sub(end.saturating_sub(start));
|
||||
lb.set_insertion_point(offset);
|
||||
},
|
||||
UndoBehavior::CreateUndoPoint,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/// Minimum rows that should be displayed by the menu
|
||||
fn min_rows(&self) -> u16 {
|
||||
self.get_rows().min(self.min_rows)
|
||||
}
|
||||
|
||||
/// Gets values from filler that will be displayed in the menu
|
||||
fn get_values(&self) -> &[Suggestion] {
|
||||
&self.values
|
||||
}
|
||||
|
||||
fn menu_required_lines(&self, _terminal_columns: u16) -> u16 {
|
||||
let example_lines = self
|
||||
.examples
|
||||
.iter()
|
||||
.fold(0, |acc, example| example.lines().count() + acc);
|
||||
|
||||
self.default_details.selection_rows
|
||||
+ self.default_details.description_rows as u16
|
||||
+ example_lines as u16
|
||||
+ 3
|
||||
}
|
||||
|
||||
fn menu_string(&self, _available_lines: u16, use_ansi_coloring: bool) -> String {
|
||||
if self.get_values().is_empty() {
|
||||
self.no_records_msg(use_ansi_coloring)
|
||||
} else {
|
||||
// The skip values represent the number of lines that should be skipped
|
||||
// while printing the menu
|
||||
let available_lines = self.default_details.selection_rows;
|
||||
let skip_values = if self.row_pos >= available_lines {
|
||||
let skip_lines = self.row_pos.saturating_sub(available_lines) + 1;
|
||||
(skip_lines * self.get_cols()) as usize
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
// It seems that crossterm prefers to have a complete string ready to be printed
|
||||
// rather than looping through the values and printing multiple things
|
||||
// This reduces the flickering when printing the menu
|
||||
let available_values = (available_lines * self.get_cols()) as usize;
|
||||
let selection_values: String = self
|
||||
.get_values()
|
||||
.iter()
|
||||
.skip(skip_values)
|
||||
.take(available_values)
|
||||
.enumerate()
|
||||
.map(|(index, suggestion)| {
|
||||
// Correcting the enumerate index based on the number of skipped values
|
||||
let index = index + skip_values;
|
||||
let column = index as u16 % self.get_cols();
|
||||
let empty_space = self.get_width().saturating_sub(suggestion.value.len());
|
||||
|
||||
self.create_entry_string(
|
||||
suggestion,
|
||||
index,
|
||||
column,
|
||||
empty_space,
|
||||
use_ansi_coloring,
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
format!(
|
||||
"{}{}{}",
|
||||
selection_values,
|
||||
self.create_description_string(use_ansi_coloring),
|
||||
self.create_example_string(use_ansi_coloring)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
@ -1,5 +1,6 @@
|
||||
use nu_engine::documentation::get_flags_section;
|
||||
use nu_protocol::{engine::EngineState, levenshtein_distance};
|
||||
use nu_utils::IgnoreCaseExt;
|
||||
use reedline::{Completer, Suggestion};
|
||||
use std::fmt::Write;
|
||||
use std::sync::Arc;
|
||||
@ -13,21 +14,19 @@ impl NuHelpCompleter {
|
||||
|
||||
fn completion_helper(&self, line: &str, pos: usize) -> Vec<Suggestion> {
|
||||
let full_commands = self.0.get_signatures_with_examples(false);
|
||||
let folded_line = line.to_folded_case();
|
||||
|
||||
//Vec<(Signature, Vec<Example>, bool, bool)> {
|
||||
let mut commands = full_commands
|
||||
.iter()
|
||||
.filter(|(sig, _, _, _, _)| {
|
||||
sig.name.to_lowercase().contains(&line.to_lowercase())
|
||||
|| sig.usage.to_lowercase().contains(&line.to_lowercase())
|
||||
sig.name.to_folded_case().contains(&folded_line)
|
||||
|| sig.usage.to_folded_case().contains(&folded_line)
|
||||
|| sig
|
||||
.search_terms
|
||||
.iter()
|
||||
.any(|term| term.to_lowercase().contains(&line.to_lowercase()))
|
||||
|| sig
|
||||
.extra_usage
|
||||
.to_lowercase()
|
||||
.contains(&line.to_lowercase())
|
||||
.any(|term| term.to_folded_case().contains(&folded_line))
|
||||
|| sig.extra_usage.to_folded_case().contains(&folded_line)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
@ -57,8 +56,8 @@ impl NuHelpCompleter {
|
||||
let _ = write!(long_desc, "Usage:\r\n > {}\r\n", sig.call_signature());
|
||||
|
||||
if !sig.named.is_empty() {
|
||||
long_desc.push_str(&get_flags_section(sig, |v| {
|
||||
v.into_string_parsable(", ", &self.0.config)
|
||||
long_desc.push_str(&get_flags_section(Some(&*self.0.clone()), sig, |v| {
|
||||
v.to_parsable_string(", ", &self.0.config)
|
||||
}))
|
||||
}
|
||||
|
||||
@ -74,7 +73,7 @@ impl NuHelpCompleter {
|
||||
let opt_suffix = if let Some(value) = &positional.default_value {
|
||||
format!(
|
||||
" (optional, default: {})",
|
||||
&value.into_string_parsable(", ", &self.0.config),
|
||||
&value.to_parsable_string(", ", &self.0.config),
|
||||
)
|
||||
} else {
|
||||
(" (optional)").to_string()
|
||||
@ -103,10 +102,11 @@ impl NuHelpCompleter {
|
||||
Suggestion {
|
||||
value: sig.name.clone(),
|
||||
description: Some(long_desc),
|
||||
style: None,
|
||||
extra: Some(extra),
|
||||
span: reedline::Span {
|
||||
start: pos,
|
||||
end: pos + line.len(),
|
||||
start: pos - line.len(),
|
||||
end: pos,
|
||||
},
|
||||
append_whitespace: false,
|
||||
}
|
||||
@ -120,3 +120,42 @@ impl Completer for NuHelpCompleter {
|
||||
self.completion_helper(line, pos)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use rstest::rstest;
|
||||
|
||||
#[rstest]
|
||||
#[case("who", 5, 8, &["whoami"])]
|
||||
#[case("hash", 1, 5, &["hash", "hash md5", "hash sha256"])]
|
||||
#[case("into f", 0, 6, &["into float", "into filesize"])]
|
||||
#[case("into nonexistent", 0, 16, &[])]
|
||||
fn test_help_completer(
|
||||
#[case] line: &str,
|
||||
#[case] start: usize,
|
||||
#[case] end: usize,
|
||||
#[case] expected: &[&str],
|
||||
) {
|
||||
let engine_state =
|
||||
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context());
|
||||
let mut completer = NuHelpCompleter::new(engine_state.into());
|
||||
let suggestions = completer.complete(line, end);
|
||||
|
||||
assert_eq!(
|
||||
expected.len(),
|
||||
suggestions.len(),
|
||||
"expected {:?}, got {:?}",
|
||||
expected,
|
||||
suggestions
|
||||
.iter()
|
||||
.map(|s| s.value.clone())
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
|
||||
for (exp, actual) in expected.iter().zip(suggestions) {
|
||||
assert_eq!(exp, &actual.value);
|
||||
assert_eq!(reedline::Span::new(start, end), actual.span);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -80,24 +80,20 @@ fn convert_to_suggestions(
|
||||
only_buffer_difference: bool,
|
||||
) -> Vec<Suggestion> {
|
||||
match value {
|
||||
Value::Record { .. } => {
|
||||
let text = value
|
||||
.get_data_by_key("value")
|
||||
.and_then(|val| val.as_string().ok())
|
||||
Value::Record { val, .. } => {
|
||||
let text = val
|
||||
.get("value")
|
||||
.and_then(|val| val.coerce_string().ok())
|
||||
.unwrap_or_else(|| "No value key".to_string());
|
||||
|
||||
let description = value
|
||||
.get_data_by_key("description")
|
||||
.and_then(|val| val.as_string().ok());
|
||||
let description = val
|
||||
.get("description")
|
||||
.and_then(|val| val.coerce_string().ok());
|
||||
|
||||
let span = match value.get_data_by_key("span") {
|
||||
Some(span @ Value::Record { .. }) => {
|
||||
let start = span
|
||||
.get_data_by_key("start")
|
||||
.and_then(|val| val.as_int().ok());
|
||||
let end = span
|
||||
.get_data_by_key("end")
|
||||
.and_then(|val| val.as_int().ok());
|
||||
let span = match val.get("span") {
|
||||
Some(Value::Record { val: span, .. }) => {
|
||||
let start = span.get("start").and_then(|val| val.as_int().ok());
|
||||
let end = span.get("end").and_then(|val| val.as_int().ok());
|
||||
match (start, end) {
|
||||
(Some(start), Some(end)) => {
|
||||
let start = start.min(end);
|
||||
@ -107,9 +103,13 @@ fn convert_to_suggestions(
|
||||
}
|
||||
}
|
||||
_ => reedline::Span {
|
||||
start: if only_buffer_difference { pos } else { 0 },
|
||||
start: if only_buffer_difference {
|
||||
pos - line.len()
|
||||
} else {
|
||||
0
|
||||
},
|
||||
end: if only_buffer_difference {
|
||||
pos + line.len()
|
||||
pos
|
||||
} else {
|
||||
line.len()
|
||||
},
|
||||
@ -117,21 +117,25 @@ fn convert_to_suggestions(
|
||||
}
|
||||
}
|
||||
_ => reedline::Span {
|
||||
start: if only_buffer_difference { pos } else { 0 },
|
||||
start: if only_buffer_difference {
|
||||
pos - line.len()
|
||||
} else {
|
||||
0
|
||||
},
|
||||
end: if only_buffer_difference {
|
||||
pos + line.len()
|
||||
pos
|
||||
} else {
|
||||
line.len()
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
let extra = match value.get_data_by_key("extra") {
|
||||
let extra = match val.get("extra") {
|
||||
Some(Value::List { vals, .. }) => {
|
||||
let extra: Vec<String> = vals
|
||||
.into_iter()
|
||||
.iter()
|
||||
.filter_map(|extra| match extra {
|
||||
Value::String { val, .. } => Some(val),
|
||||
Value::String { val, .. } => Some(val.clone()),
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
@ -144,6 +148,7 @@ fn convert_to_suggestions(
|
||||
vec![Suggestion {
|
||||
value: text,
|
||||
description,
|
||||
style: None,
|
||||
extra,
|
||||
span,
|
||||
append_whitespace: false,
|
||||
@ -156,10 +161,19 @@ fn convert_to_suggestions(
|
||||
_ => vec![Suggestion {
|
||||
value: format!("Not a record: {value:?}"),
|
||||
description: None,
|
||||
style: None,
|
||||
extra: None,
|
||||
span: reedline::Span {
|
||||
start: 0,
|
||||
end: line.len(),
|
||||
start: if only_buffer_difference {
|
||||
pos - line.len()
|
||||
} else {
|
||||
0
|
||||
},
|
||||
end: if only_buffer_difference {
|
||||
pos
|
||||
} else {
|
||||
line.len()
|
||||
},
|
||||
},
|
||||
append_whitespace: false,
|
||||
}],
|
||||
|
@ -1,7 +1,5 @@
|
||||
mod description_menu;
|
||||
mod help_completions;
|
||||
mod menu_completions;
|
||||
|
||||
pub use description_menu::DescriptionMenu;
|
||||
pub use help_completions::NuHelpCompleter;
|
||||
pub use menu_completions::NuMenuCompleter;
|
||||
|
@ -28,7 +28,7 @@ impl Command for NuHighlight {
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
_stack: &mut Stack,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
@ -40,22 +40,17 @@ impl Command for NuHighlight {
|
||||
|
||||
let highlighter = crate::NuHighlighter {
|
||||
engine_state,
|
||||
stack: std::sync::Arc::new(stack.clone()),
|
||||
config,
|
||||
};
|
||||
|
||||
input.map(
|
||||
move |x| match x.as_string() {
|
||||
move |x| match x.coerce_into_string() {
|
||||
Ok(line) => {
|
||||
let highlights = highlighter.highlight(&line, line.len());
|
||||
|
||||
Value::String {
|
||||
val: highlights.render_simple(),
|
||||
span: head,
|
||||
}
|
||||
Value::string(highlights.render_simple(), head)
|
||||
}
|
||||
Err(err) => Value::Error {
|
||||
error: Box::new(err),
|
||||
},
|
||||
Err(err) => Value::error(err, head),
|
||||
},
|
||||
ctrlc,
|
||||
)
|
||||
|
@ -54,8 +54,8 @@ Since this command has no output, there is no point in piping it with other comm
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let args: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
||||
let no_newline = call.has_flag("no-newline");
|
||||
let to_stderr = call.has_flag("stderr");
|
||||
let no_newline = call.has_flag(engine_state, stack, "no-newline")?;
|
||||
let to_stderr = call.has_flag(engine_state, stack, "stderr")?;
|
||||
|
||||
// This will allow for easy printing of pipelines as well
|
||||
if !args.is_empty() {
|
||||
|
@ -1,3 +1,4 @@
|
||||
use crate::prompt_update::{POST_PROMPT_MARKER, PRE_PROMPT_MARKER};
|
||||
#[cfg(windows)]
|
||||
use nu_utils::enable_vt_processing;
|
||||
use reedline::DefaultPrompt;
|
||||
@ -11,6 +12,7 @@ use {
|
||||
/// Nushell prompt definition
|
||||
#[derive(Clone)]
|
||||
pub struct NushellPrompt {
|
||||
shell_integration: bool,
|
||||
left_prompt_string: Option<String>,
|
||||
right_prompt_string: Option<String>,
|
||||
default_prompt_indicator: Option<String>,
|
||||
@ -20,15 +22,10 @@ pub struct NushellPrompt {
|
||||
render_right_prompt_on_last_line: bool,
|
||||
}
|
||||
|
||||
impl Default for NushellPrompt {
|
||||
fn default() -> Self {
|
||||
NushellPrompt::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl NushellPrompt {
|
||||
pub fn new() -> NushellPrompt {
|
||||
pub fn new(shell_integration: bool) -> NushellPrompt {
|
||||
NushellPrompt {
|
||||
shell_integration,
|
||||
left_prompt_string: None,
|
||||
right_prompt_string: None,
|
||||
default_prompt_indicator: None,
|
||||
@ -109,10 +106,13 @@ impl Prompt for NushellPrompt {
|
||||
let prompt = default
|
||||
.render_prompt_left()
|
||||
.to_string()
|
||||
.replace('\n', "\r\n")
|
||||
+ " ";
|
||||
.replace('\n', "\r\n");
|
||||
|
||||
prompt.into()
|
||||
if self.shell_integration {
|
||||
format!("{PRE_PROMPT_MARKER}{prompt}{POST_PROMPT_MARKER}").into()
|
||||
} else {
|
||||
prompt.into()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -144,11 +144,11 @@ impl Prompt for NushellPrompt {
|
||||
PromptEditMode::Vi(vi_mode) => match vi_mode {
|
||||
PromptViMode::Normal => match &self.default_vi_normal_prompt_indicator {
|
||||
Some(indicator) => indicator,
|
||||
None => ": ",
|
||||
None => "> ",
|
||||
},
|
||||
PromptViMode::Insert => match &self.default_vi_insert_prompt_indicator {
|
||||
Some(indicator) => indicator,
|
||||
None => "> ",
|
||||
None => ": ",
|
||||
},
|
||||
}
|
||||
.into(),
|
||||
|
@ -15,10 +15,19 @@ pub(crate) const PROMPT_INDICATOR: &str = "PROMPT_INDICATOR";
|
||||
pub(crate) const PROMPT_INDICATOR_VI_INSERT: &str = "PROMPT_INDICATOR_VI_INSERT";
|
||||
pub(crate) const PROMPT_INDICATOR_VI_NORMAL: &str = "PROMPT_INDICATOR_VI_NORMAL";
|
||||
pub(crate) const PROMPT_MULTILINE_INDICATOR: &str = "PROMPT_MULTILINE_INDICATOR";
|
||||
pub(crate) const TRANSIENT_PROMPT_COMMAND: &str = "TRANSIENT_PROMPT_COMMAND";
|
||||
pub(crate) const TRANSIENT_PROMPT_COMMAND_RIGHT: &str = "TRANSIENT_PROMPT_COMMAND_RIGHT";
|
||||
pub(crate) const TRANSIENT_PROMPT_INDICATOR: &str = "TRANSIENT_PROMPT_INDICATOR";
|
||||
pub(crate) const TRANSIENT_PROMPT_INDICATOR_VI_INSERT: &str =
|
||||
"TRANSIENT_PROMPT_INDICATOR_VI_INSERT";
|
||||
pub(crate) const TRANSIENT_PROMPT_INDICATOR_VI_NORMAL: &str =
|
||||
"TRANSIENT_PROMPT_INDICATOR_VI_NORMAL";
|
||||
pub(crate) const TRANSIENT_PROMPT_MULTILINE_INDICATOR: &str =
|
||||
"TRANSIENT_PROMPT_MULTILINE_INDICATOR";
|
||||
// According to Daniel Imms @Tyriar, we need to do these this way:
|
||||
// <133 A><prompt><133 B><command><133 C><command output>
|
||||
const PRE_PROMPT_MARKER: &str = "\x1b]133;A\x1b\\";
|
||||
const POST_PROMPT_MARKER: &str = "\x1b]133;B\x1b\\";
|
||||
pub(crate) const PRE_PROMPT_MARKER: &str = "\x1b]133;A\x1b\\";
|
||||
pub(crate) const POST_PROMPT_MARKER: &str = "\x1b]133;B\x1b\\";
|
||||
|
||||
fn get_prompt_string(
|
||||
prompt: &str,
|
||||
@ -29,13 +38,9 @@ fn get_prompt_string(
|
||||
stack
|
||||
.get_env_var(engine_state, prompt)
|
||||
.and_then(|v| match v {
|
||||
Value::Closure {
|
||||
val: block_id,
|
||||
captures,
|
||||
..
|
||||
} => {
|
||||
let block = engine_state.get_block(block_id);
|
||||
let mut stack = stack.captures_to_stack(&captures);
|
||||
Value::Closure { val, .. } => {
|
||||
let block = engine_state.get_block(val.block_id);
|
||||
let mut stack = stack.captures_to_stack(val.captures);
|
||||
// Use eval_subexpression to force a redirection of output, so we can use everything in prompt
|
||||
let ret_val =
|
||||
eval_subexpression(engine_state, &mut stack, block, PipelineData::empty());
|
||||
@ -91,12 +96,12 @@ fn get_prompt_string(
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn update_prompt<'prompt>(
|
||||
pub(crate) fn update_prompt(
|
||||
config: &Config,
|
||||
engine_state: &EngineState,
|
||||
stack: &Stack,
|
||||
nu_prompt: &'prompt mut NushellPrompt,
|
||||
) -> &'prompt dyn Prompt {
|
||||
nu_prompt: &mut NushellPrompt,
|
||||
) {
|
||||
let mut stack = stack.clone();
|
||||
|
||||
let left_prompt_string = get_prompt_string(PROMPT_COMMAND, config, engine_state, &mut stack);
|
||||
@ -139,9 +144,55 @@ pub(crate) fn update_prompt<'prompt>(
|
||||
(prompt_vi_insert_string, prompt_vi_normal_string),
|
||||
config.render_right_prompt_on_last_line,
|
||||
);
|
||||
|
||||
let ret_val = nu_prompt as &dyn Prompt;
|
||||
trace!("update_prompt {}:{}:{}", file!(), line!(), column!());
|
||||
|
||||
ret_val
|
||||
}
|
||||
|
||||
/// Construct the transient prompt based on the normal nu_prompt
|
||||
pub(crate) fn make_transient_prompt(
|
||||
config: &Config,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
nu_prompt: &NushellPrompt,
|
||||
) -> Box<dyn Prompt> {
|
||||
let mut nu_prompt = nu_prompt.clone();
|
||||
|
||||
if let Some(s) = get_prompt_string(TRANSIENT_PROMPT_COMMAND, config, engine_state, stack) {
|
||||
nu_prompt.update_prompt_left(Some(s))
|
||||
}
|
||||
|
||||
if let Some(s) = get_prompt_string(TRANSIENT_PROMPT_COMMAND_RIGHT, config, engine_state, stack)
|
||||
{
|
||||
nu_prompt.update_prompt_right(Some(s), config.render_right_prompt_on_last_line)
|
||||
}
|
||||
|
||||
if let Some(s) = get_prompt_string(TRANSIENT_PROMPT_INDICATOR, config, engine_state, stack) {
|
||||
nu_prompt.update_prompt_indicator(Some(s))
|
||||
}
|
||||
if let Some(s) = get_prompt_string(
|
||||
TRANSIENT_PROMPT_INDICATOR_VI_INSERT,
|
||||
config,
|
||||
engine_state,
|
||||
stack,
|
||||
) {
|
||||
nu_prompt.update_prompt_vi_insert(Some(s))
|
||||
}
|
||||
if let Some(s) = get_prompt_string(
|
||||
TRANSIENT_PROMPT_INDICATOR_VI_NORMAL,
|
||||
config,
|
||||
engine_state,
|
||||
stack,
|
||||
) {
|
||||
nu_prompt.update_prompt_vi_normal(Some(s))
|
||||
}
|
||||
|
||||
if let Some(s) = get_prompt_string(
|
||||
TRANSIENT_PROMPT_MULTILINE_INDICATOR,
|
||||
config,
|
||||
engine_state,
|
||||
stack,
|
||||
) {
|
||||
nu_prompt.update_prompt_multiline(Some(s))
|
||||
}
|
||||
|
||||
Box::new(nu_prompt)
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,15 +1,17 @@
|
||||
use log::trace;
|
||||
use nu_ansi_term::Style;
|
||||
use nu_color_config::{get_matching_brackets_style, get_shape_color};
|
||||
use nu_engine::env;
|
||||
use nu_parser::{flatten_block, parse, FlatShape};
|
||||
use nu_protocol::ast::{Argument, Block, Expr, Expression, PipelineElement};
|
||||
use nu_protocol::engine::{EngineState, StateWorkingSet};
|
||||
use nu_protocol::ast::{Argument, Block, Expr, Expression, PipelineElement, RecordItem};
|
||||
use nu_protocol::engine::{EngineState, Stack, StateWorkingSet};
|
||||
use nu_protocol::{Config, Span};
|
||||
use reedline::{Highlighter, StyledText};
|
||||
use std::sync::Arc;
|
||||
|
||||
pub struct NuHighlighter {
|
||||
pub engine_state: Arc<EngineState>,
|
||||
pub stack: Arc<Stack>,
|
||||
pub config: Config,
|
||||
}
|
||||
|
||||
@ -17,10 +19,37 @@ impl Highlighter for NuHighlighter {
|
||||
fn highlight(&self, line: &str, _cursor: usize) -> StyledText {
|
||||
trace!("highlighting: {}", line);
|
||||
|
||||
let highlight_resolved_externals =
|
||||
self.engine_state.get_config().highlight_resolved_externals;
|
||||
let mut working_set = StateWorkingSet::new(&self.engine_state);
|
||||
let block = parse(&mut working_set, None, line.as_bytes(), false);
|
||||
let (shapes, global_span_offset) = {
|
||||
let shapes = flatten_block(&working_set, &block);
|
||||
let mut shapes = flatten_block(&working_set, &block);
|
||||
// Highlighting externals has a config point because of concerns that using which to resolve
|
||||
// externals may slow down things too much.
|
||||
if highlight_resolved_externals {
|
||||
for (span, shape) in shapes.iter_mut() {
|
||||
if *shape == FlatShape::External {
|
||||
let str_contents =
|
||||
working_set.get_span_contents(Span::new(span.start, span.end));
|
||||
|
||||
let str_word = String::from_utf8_lossy(str_contents).to_string();
|
||||
let paths = env::path_str(&self.engine_state, &self.stack, *span).ok();
|
||||
let res = if let Ok(cwd) =
|
||||
env::current_dir_str(&self.engine_state, &self.stack)
|
||||
{
|
||||
which::which_in(str_word, paths.as_ref(), cwd).ok()
|
||||
} else {
|
||||
which::which_in_global(str_word, paths.as_ref())
|
||||
.ok()
|
||||
.and_then(|mut i| i.next())
|
||||
};
|
||||
if res.is_some() {
|
||||
*shape = FlatShape::ExternalResolved;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
(shapes, self.engine_state.next_span_start())
|
||||
};
|
||||
|
||||
@ -59,7 +88,7 @@ impl Highlighter for NuHighlighter {
|
||||
($shape:expr, $span:expr, $text:expr) => {{
|
||||
let spans = split_span_by_highlight_positions(
|
||||
line,
|
||||
&$span,
|
||||
$span,
|
||||
&matching_brackets_pos,
|
||||
global_span_offset,
|
||||
);
|
||||
@ -91,6 +120,7 @@ impl Highlighter for NuHighlighter {
|
||||
FlatShape::InternalCall(_) => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::External => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::ExternalArg => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::ExternalResolved => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Keyword => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Literal => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Operator => add_colored_token(&shape.1, next_token),
|
||||
@ -143,8 +173,8 @@ impl Highlighter for NuHighlighter {
|
||||
|
||||
fn split_span_by_highlight_positions(
|
||||
line: &str,
|
||||
span: &Span,
|
||||
highlight_positions: &Vec<usize>,
|
||||
span: Span,
|
||||
highlight_positions: &[usize],
|
||||
global_span_offset: usize,
|
||||
) -> Vec<(Span, bool)> {
|
||||
let mut start = span.start;
|
||||
@ -234,11 +264,13 @@ fn find_matching_block_end_in_block(
|
||||
for e in &p.elements {
|
||||
match e {
|
||||
PipelineElement::Expression(_, e)
|
||||
| PipelineElement::Redirection(_, _, e)
|
||||
| PipelineElement::ErrPipedExpression(_, e)
|
||||
| PipelineElement::OutErrPipedExpression(_, e)
|
||||
| PipelineElement::Redirection(_, _, e, _)
|
||||
| PipelineElement::And(_, e)
|
||||
| PipelineElement::Or(_, e)
|
||||
| PipelineElement::SameTargetRedirection { cmd: (_, e), .. }
|
||||
| PipelineElement::SeparateRedirection { out: (_, e), .. } => {
|
||||
| PipelineElement::SeparateRedirection { out: (_, e, _), .. } => {
|
||||
if e.span.contains(global_cursor_offset) {
|
||||
if let Some(pos) = find_matching_block_end_in_expr(
|
||||
line,
|
||||
@ -303,18 +335,18 @@ fn find_matching_block_end_in_expr(
|
||||
Expr::Keyword(..) => None,
|
||||
Expr::ValueWithUnit(..) => None,
|
||||
Expr::DateTime(_) => None,
|
||||
Expr::Filepath(_) => None,
|
||||
Expr::Directory(_) => None,
|
||||
Expr::GlobPattern(_) => None,
|
||||
Expr::Filepath(_, _) => None,
|
||||
Expr::Directory(_, _) => None,
|
||||
Expr::GlobPattern(_, _) => None,
|
||||
Expr::String(_) => None,
|
||||
Expr::CellPath(_) => None,
|
||||
Expr::ImportPattern(_) => None,
|
||||
Expr::Overlay(_) => None,
|
||||
Expr::Signature(_) => None,
|
||||
Expr::MatchPattern(_) => None,
|
||||
Expr::MatchBlock(_) => None,
|
||||
Expr::Nothing => None,
|
||||
Expr::Garbage => None,
|
||||
Expr::Spread(_) => None,
|
||||
|
||||
Expr::Table(hdr, rows) => {
|
||||
if expr_last == global_cursor_offset {
|
||||
@ -346,9 +378,16 @@ fn find_matching_block_end_in_expr(
|
||||
Some(expr_last)
|
||||
} else {
|
||||
// cursor is inside record
|
||||
for (k, v) in exprs {
|
||||
find_in_expr_or_continue!(k);
|
||||
find_in_expr_or_continue!(v);
|
||||
for expr in exprs {
|
||||
match expr {
|
||||
RecordItem::Pair(k, v) => {
|
||||
find_in_expr_or_continue!(k);
|
||||
find_in_expr_or_continue!(v);
|
||||
}
|
||||
RecordItem::Spread(_, record) => {
|
||||
find_in_expr_or_continue!(record);
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
@ -360,6 +399,7 @@ fn find_matching_block_end_in_expr(
|
||||
Argument::Named((_, _, opt_expr)) => opt_expr.as_ref(),
|
||||
Argument::Positional(inner_expr) => Some(inner_expr),
|
||||
Argument::Unknown(inner_expr) => Some(inner_expr),
|
||||
Argument::Spread(inner_expr) => Some(inner_expr),
|
||||
};
|
||||
|
||||
if let Some(inner_expr) = opt_expr {
|
||||
|
@ -1,4 +1,4 @@
|
||||
use nu_command::hook::eval_hook;
|
||||
use nu_cmd_base::hook::eval_hook;
|
||||
use nu_engine::{eval_block, eval_block_with_early_return};
|
||||
use nu_parser::{escape_quote_string, lex, parse, unescape_unquote_string, Token, TokenContents};
|
||||
use nu_protocol::engine::StateWorkingSet;
|
||||
@ -43,13 +43,13 @@ fn gather_env_vars(
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(
|
||||
&working_set,
|
||||
&ShellError::GenericError(
|
||||
format!("Environment variable was not captured: {env_str}"),
|
||||
"".to_string(),
|
||||
None,
|
||||
Some(msg.into()),
|
||||
Vec::new(),
|
||||
),
|
||||
&ShellError::GenericError {
|
||||
error: format!("Environment variable was not captured: {env_str}"),
|
||||
msg: "".into(),
|
||||
span: None,
|
||||
help: Some(msg.into()),
|
||||
inner: vec![],
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@ -75,15 +75,15 @@ fn gather_env_vars(
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(
|
||||
&working_set,
|
||||
&ShellError::GenericError(
|
||||
"Current directory is not a valid utf-8 path".to_string(),
|
||||
"".to_string(),
|
||||
None,
|
||||
Some(format!(
|
||||
&ShellError::GenericError {
|
||||
error: "Current directory is not a valid utf-8 path".into(),
|
||||
msg: "".into(),
|
||||
span: None,
|
||||
help: Some(format!(
|
||||
"Retrieving current directory failed: {init_cwd:?} not a valid utf-8 path"
|
||||
)),
|
||||
Vec::new(),
|
||||
),
|
||||
inner: vec![],
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -105,13 +105,13 @@ fn gather_env_vars(
|
||||
span: full_span,
|
||||
} = token
|
||||
{
|
||||
let contents = engine_state.get_span_contents(&full_span);
|
||||
let contents = engine_state.get_span_contents(full_span);
|
||||
let (parts, _) = lex(contents, full_span.start, &[], &[b'='], true);
|
||||
|
||||
let name = if let Some(Token {
|
||||
contents: TokenContents::Item,
|
||||
span,
|
||||
}) = parts.get(0)
|
||||
}) = parts.first()
|
||||
{
|
||||
let mut working_set = StateWorkingSet::new(engine_state);
|
||||
let bytes = working_set.get_span_contents(*span);
|
||||
@ -185,10 +185,7 @@ fn gather_env_vars(
|
||||
continue;
|
||||
}
|
||||
|
||||
Value::String {
|
||||
val: bytes,
|
||||
span: *span,
|
||||
}
|
||||
Value::string(bytes, *span)
|
||||
} else {
|
||||
report_capture_error(
|
||||
engine_state,
|
||||
@ -223,6 +220,10 @@ pub fn eval_source(
|
||||
source,
|
||||
false,
|
||||
);
|
||||
if let Some(warning) = working_set.parse_warnings.first() {
|
||||
report_error(&working_set, warning);
|
||||
}
|
||||
|
||||
if let Some(err) = working_set.parse_errors.first() {
|
||||
set_last_exit_code(stack, 1);
|
||||
report_error(&working_set, err);
|
||||
@ -257,7 +258,14 @@ pub fn eval_source(
|
||||
{
|
||||
result = print_if_stream(stream, stderr_stream, false, exit_code);
|
||||
} else if let Some(hook) = config.hooks.display_output.clone() {
|
||||
match eval_hook(engine_state, stack, Some(pipeline_data), vec![], &hook) {
|
||||
match eval_hook(
|
||||
engine_state,
|
||||
stack,
|
||||
Some(pipeline_data),
|
||||
vec![],
|
||||
&hook,
|
||||
"display_output",
|
||||
) {
|
||||
Err(err) => {
|
||||
result = Err(err);
|
||||
}
|
||||
|
@ -1,11 +1,16 @@
|
||||
pub mod support;
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
use nu_cli::NuCompleter;
|
||||
use nu_parser::parse;
|
||||
use nu_protocol::engine::StateWorkingSet;
|
||||
use reedline::{Completer, Suggestion};
|
||||
use rstest::{fixture, rstest};
|
||||
use support::{completions_helpers::new_quote_engine, file, folder, match_suggestions, new_engine};
|
||||
use support::{
|
||||
completions_helpers::{new_partial_engine, new_quote_engine},
|
||||
file, folder, match_suggestions, new_engine,
|
||||
};
|
||||
|
||||
#[fixture]
|
||||
fn completer() -> NuCompleter {
|
||||
@ -54,6 +59,29 @@ fn extern_completer() -> NuCompleter {
|
||||
NuCompleter::new(std::sync::Arc::new(engine), stack)
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
fn custom_completer() -> NuCompleter {
|
||||
// Create a new engine
|
||||
let (dir, _, mut engine, mut stack) = new_engine();
|
||||
|
||||
// Add record value as example
|
||||
let record = r#"
|
||||
let external_completer = {|spans|
|
||||
$spans
|
||||
}
|
||||
|
||||
$env.config.completions.external = {
|
||||
enable: true
|
||||
max_results: 100
|
||||
completer: $external_completer
|
||||
}
|
||||
"#;
|
||||
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||
|
||||
// Instantiate a new completer
|
||||
NuCompleter::new(std::sync::Arc::new(engine), stack)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn variables_dollar_sign_with_varialblecompletion() {
|
||||
let (_, _, engine, stack) = new_engine();
|
||||
@ -63,7 +91,7 @@ fn variables_dollar_sign_with_varialblecompletion() {
|
||||
let target_dir = "$ ";
|
||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||
|
||||
assert_eq!(7, suggestions.len());
|
||||
assert_eq!(8, suggestions.len());
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
@ -116,15 +144,34 @@ fn dotnu_completions() {
|
||||
let completion_str = "source-env ".to_string();
|
||||
let suggestions = completer.complete(&completion_str, completion_str.len());
|
||||
|
||||
assert_eq!(1, suggestions.len());
|
||||
assert_eq!("custom_completion.nu", suggestions.get(0).unwrap().value);
|
||||
assert_eq!(2, suggestions.len());
|
||||
assert_eq!("custom_completion.nu", suggestions.first().unwrap().value);
|
||||
#[cfg(windows)]
|
||||
assert_eq!("directory_completion\\", suggestions.get(1).unwrap().value);
|
||||
#[cfg(not(windows))]
|
||||
assert_eq!("directory_completion/", suggestions.get(1).unwrap().value);
|
||||
|
||||
// Test use completion
|
||||
let completion_str = "use ".to_string();
|
||||
let suggestions = completer.complete(&completion_str, completion_str.len());
|
||||
|
||||
assert_eq!(1, suggestions.len());
|
||||
assert_eq!("custom_completion.nu", suggestions.get(0).unwrap().value);
|
||||
assert_eq!(2, suggestions.len());
|
||||
assert_eq!("custom_completion.nu", suggestions.first().unwrap().value);
|
||||
#[cfg(windows)]
|
||||
assert_eq!("directory_completion\\", suggestions.get(1).unwrap().value);
|
||||
#[cfg(not(windows))]
|
||||
assert_eq!("directory_completion/", suggestions.get(1).unwrap().value);
|
||||
|
||||
// Test overlay use completion
|
||||
let completion_str = "overlay use ".to_string();
|
||||
let suggestions = completer.complete(&completion_str, completion_str.len());
|
||||
|
||||
assert_eq!(2, suggestions.len());
|
||||
assert_eq!("custom_completion.nu", suggestions.first().unwrap().value);
|
||||
#[cfg(windows)]
|
||||
assert_eq!("directory_completion\\", suggestions.get(1).unwrap().value);
|
||||
#[cfg(not(windows))]
|
||||
assert_eq!("directory_completion/", suggestions.get(1).unwrap().value);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -136,7 +183,7 @@ fn external_completer_trailing_space() {
|
||||
|
||||
let suggestions = run_external_completion(block, &input);
|
||||
assert_eq!(3, suggestions.len());
|
||||
assert_eq!("gh", suggestions.get(0).unwrap().value);
|
||||
assert_eq!("gh", suggestions.first().unwrap().value);
|
||||
assert_eq!("alias", suggestions.get(1).unwrap().value);
|
||||
assert_eq!("", suggestions.get(2).unwrap().value);
|
||||
}
|
||||
@ -148,7 +195,7 @@ fn external_completer_no_trailing_space() {
|
||||
|
||||
let suggestions = run_external_completion(block, &input);
|
||||
assert_eq!(2, suggestions.len());
|
||||
assert_eq!("gh", suggestions.get(0).unwrap().value);
|
||||
assert_eq!("gh", suggestions.first().unwrap().value);
|
||||
assert_eq!("alias", suggestions.get(1).unwrap().value);
|
||||
}
|
||||
|
||||
@ -159,7 +206,7 @@ fn external_completer_pass_flags() {
|
||||
|
||||
let suggestions = run_external_completion(block, &input);
|
||||
assert_eq!(3, suggestions.len());
|
||||
assert_eq!("gh", suggestions.get(0).unwrap().value);
|
||||
assert_eq!("gh", suggestions.first().unwrap().value);
|
||||
assert_eq!("api", suggestions.get(1).unwrap().value);
|
||||
assert_eq!("--", suggestions.get(2).unwrap().value);
|
||||
}
|
||||
@ -180,6 +227,7 @@ fn file_completions() {
|
||||
let expected_paths: Vec<String> = vec![
|
||||
folder(dir.join("another")),
|
||||
file(dir.join("custom_completion.nu")),
|
||||
folder(dir.join("directory_completion")),
|
||||
file(dir.join("nushell")),
|
||||
folder(dir.join("test_a")),
|
||||
folder(dir.join("test_b")),
|
||||
@ -199,6 +247,97 @@ fn file_completions() {
|
||||
|
||||
// Match the results
|
||||
match_suggestions(expected_paths, suggestions);
|
||||
|
||||
// Test completions for hidden files
|
||||
let target_dir = format!("ls {}/.", folder(dir.join(".hidden_folder")));
|
||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||
|
||||
let expected_paths: Vec<String> =
|
||||
vec![file(dir.join(".hidden_folder").join(".hidden_subfile"))];
|
||||
|
||||
// Match the results
|
||||
match_suggestions(expected_paths, suggestions);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn partial_completions() {
|
||||
// Create a new engine
|
||||
let (dir, _, engine, stack) = new_partial_engine();
|
||||
|
||||
// Instantiate a new completer
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
|
||||
// Test completions for a folder's name
|
||||
let target_dir = format!("cd {}", file(dir.join("pa")));
|
||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||
|
||||
// Create the expected values
|
||||
let expected_paths: Vec<String> = vec![
|
||||
folder(dir.join("partial_a")),
|
||||
folder(dir.join("partial_b")),
|
||||
folder(dir.join("partial_c")),
|
||||
];
|
||||
|
||||
// Match the results
|
||||
match_suggestions(expected_paths, suggestions);
|
||||
|
||||
// Test completions for the files whose name begin with "h"
|
||||
// and are present under directories whose names begin with "pa"
|
||||
let dir_str = file(dir.join("pa").join("h"));
|
||||
let target_dir = format!("cp {dir_str}");
|
||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||
|
||||
// Create the expected values
|
||||
let expected_paths: Vec<String> = vec![
|
||||
file(dir.join("partial_a").join("hello")),
|
||||
file(dir.join("partial_a").join("hola")),
|
||||
file(dir.join("partial_b").join("hello_b")),
|
||||
file(dir.join("partial_b").join("hi_b")),
|
||||
file(dir.join("partial_c").join("hello_c")),
|
||||
];
|
||||
|
||||
// Match the results
|
||||
match_suggestions(expected_paths, suggestions);
|
||||
|
||||
// Test completion for all files under directories whose names begin with "pa"
|
||||
let dir_str = folder(dir.join("pa"));
|
||||
let target_dir = format!("ls {dir_str}");
|
||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||
|
||||
// Create the expected values
|
||||
let expected_paths: Vec<String> = vec![
|
||||
file(dir.join("partial_a").join("anotherfile")),
|
||||
file(dir.join("partial_a").join("hello")),
|
||||
file(dir.join("partial_a").join("hola")),
|
||||
file(dir.join("partial_b").join("hello_b")),
|
||||
file(dir.join("partial_b").join("hi_b")),
|
||||
file(dir.join("partial_c").join("hello_c")),
|
||||
];
|
||||
|
||||
// Match the results
|
||||
match_suggestions(expected_paths, suggestions);
|
||||
|
||||
// Test completion for a single file
|
||||
let dir_str = file(dir.join("fi").join("so"));
|
||||
let target_dir = format!("rm {dir_str}");
|
||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||
|
||||
// Create the expected values
|
||||
let expected_paths: Vec<String> = vec![file(dir.join("final_partial").join("somefile"))];
|
||||
|
||||
// Match the results
|
||||
match_suggestions(expected_paths, suggestions);
|
||||
|
||||
// Test completion where there is a sneaky `..` in the path
|
||||
let dir_str = file(dir.join("par").join("..").join("fi").join("so"));
|
||||
let target_dir = format!("rm {dir_str}");
|
||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||
|
||||
// Create the expected values
|
||||
let expected_paths: Vec<String> = vec![file(dir.join("final_partial").join("somefile"))];
|
||||
|
||||
// Match the results
|
||||
match_suggestions(expected_paths, suggestions);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -214,6 +353,7 @@ fn command_ls_with_filecompletion() {
|
||||
let expected_paths: Vec<String> = vec![
|
||||
"another\\".to_string(),
|
||||
"custom_completion.nu".to_string(),
|
||||
"directory_completion\\".to_string(),
|
||||
"nushell".to_string(),
|
||||
"test_a\\".to_string(),
|
||||
"test_b\\".to_string(),
|
||||
@ -224,6 +364,7 @@ fn command_ls_with_filecompletion() {
|
||||
let expected_paths: Vec<String> = vec![
|
||||
"another/".to_string(),
|
||||
"custom_completion.nu".to_string(),
|
||||
"directory_completion/".to_string(),
|
||||
"nushell".to_string(),
|
||||
"test_a/".to_string(),
|
||||
"test_b/".to_string(),
|
||||
@ -246,6 +387,7 @@ fn command_open_with_filecompletion() {
|
||||
let expected_paths: Vec<String> = vec![
|
||||
"another\\".to_string(),
|
||||
"custom_completion.nu".to_string(),
|
||||
"directory_completion\\".to_string(),
|
||||
"nushell".to_string(),
|
||||
"test_a\\".to_string(),
|
||||
"test_b\\".to_string(),
|
||||
@ -256,6 +398,7 @@ fn command_open_with_filecompletion() {
|
||||
let expected_paths: Vec<String> = vec![
|
||||
"another/".to_string(),
|
||||
"custom_completion.nu".to_string(),
|
||||
"directory_completion/".to_string(),
|
||||
"nushell".to_string(),
|
||||
"test_a/".to_string(),
|
||||
"test_b/".to_string(),
|
||||
@ -279,6 +422,7 @@ fn command_rm_with_globcompletion() {
|
||||
let expected_paths: Vec<String> = vec![
|
||||
"another\\".to_string(),
|
||||
"custom_completion.nu".to_string(),
|
||||
"directory_completion\\".to_string(),
|
||||
"nushell".to_string(),
|
||||
"test_a\\".to_string(),
|
||||
"test_b\\".to_string(),
|
||||
@ -289,6 +433,7 @@ fn command_rm_with_globcompletion() {
|
||||
let expected_paths: Vec<String> = vec![
|
||||
"another/".to_string(),
|
||||
"custom_completion.nu".to_string(),
|
||||
"directory_completion/".to_string(),
|
||||
"nushell".to_string(),
|
||||
"test_a/".to_string(),
|
||||
"test_b/".to_string(),
|
||||
@ -312,6 +457,7 @@ fn command_cp_with_globcompletion() {
|
||||
let expected_paths: Vec<String> = vec![
|
||||
"another\\".to_string(),
|
||||
"custom_completion.nu".to_string(),
|
||||
"directory_completion\\".to_string(),
|
||||
"nushell".to_string(),
|
||||
"test_a\\".to_string(),
|
||||
"test_b\\".to_string(),
|
||||
@ -322,6 +468,7 @@ fn command_cp_with_globcompletion() {
|
||||
let expected_paths: Vec<String> = vec![
|
||||
"another/".to_string(),
|
||||
"custom_completion.nu".to_string(),
|
||||
"directory_completion/".to_string(),
|
||||
"nushell".to_string(),
|
||||
"test_a/".to_string(),
|
||||
"test_b/".to_string(),
|
||||
@ -345,6 +492,7 @@ fn command_save_with_filecompletion() {
|
||||
let expected_paths: Vec<String> = vec![
|
||||
"another\\".to_string(),
|
||||
"custom_completion.nu".to_string(),
|
||||
"directory_completion\\".to_string(),
|
||||
"nushell".to_string(),
|
||||
"test_a\\".to_string(),
|
||||
"test_b\\".to_string(),
|
||||
@ -355,6 +503,7 @@ fn command_save_with_filecompletion() {
|
||||
let expected_paths: Vec<String> = vec![
|
||||
"another/".to_string(),
|
||||
"custom_completion.nu".to_string(),
|
||||
"directory_completion/".to_string(),
|
||||
"nushell".to_string(),
|
||||
"test_a/".to_string(),
|
||||
"test_b/".to_string(),
|
||||
@ -378,6 +527,7 @@ fn command_touch_with_filecompletion() {
|
||||
let expected_paths: Vec<String> = vec![
|
||||
"another\\".to_string(),
|
||||
"custom_completion.nu".to_string(),
|
||||
"directory_completion\\".to_string(),
|
||||
"nushell".to_string(),
|
||||
"test_a\\".to_string(),
|
||||
"test_b\\".to_string(),
|
||||
@ -388,6 +538,7 @@ fn command_touch_with_filecompletion() {
|
||||
let expected_paths: Vec<String> = vec![
|
||||
"another/".to_string(),
|
||||
"custom_completion.nu".to_string(),
|
||||
"directory_completion/".to_string(),
|
||||
"nushell".to_string(),
|
||||
"test_a/".to_string(),
|
||||
"test_b/".to_string(),
|
||||
@ -411,6 +562,7 @@ fn command_watch_with_filecompletion() {
|
||||
let expected_paths: Vec<String> = vec![
|
||||
"another\\".to_string(),
|
||||
"custom_completion.nu".to_string(),
|
||||
"directory_completion\\".to_string(),
|
||||
"nushell".to_string(),
|
||||
"test_a\\".to_string(),
|
||||
"test_b\\".to_string(),
|
||||
@ -421,6 +573,7 @@ fn command_watch_with_filecompletion() {
|
||||
let expected_paths: Vec<String> = vec![
|
||||
"another/".to_string(),
|
||||
"custom_completion.nu".to_string(),
|
||||
"directory_completion/".to_string(),
|
||||
"nushell".to_string(),
|
||||
"test_a/".to_string(),
|
||||
"test_b/".to_string(),
|
||||
@ -441,10 +594,27 @@ fn file_completion_quoted() {
|
||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||
|
||||
let expected_paths: Vec<String> = vec![
|
||||
"\'[a] bc.txt\'".to_string(),
|
||||
"`--help`".to_string(),
|
||||
"`-42`".to_string(),
|
||||
"`-inf`".to_string(),
|
||||
"`4.2`".to_string(),
|
||||
"`te st.txt`".to_string(),
|
||||
"`te#st.txt`".to_string(),
|
||||
"`te'st.txt`".to_string(),
|
||||
"`te(st).txt`".to_string(),
|
||||
format!("`{}`", folder("test dir".into())),
|
||||
];
|
||||
|
||||
match_suggestions(expected_paths, suggestions);
|
||||
|
||||
let dir: PathBuf = "test dir".into();
|
||||
let target_dir = format!("open '{}'", folder(dir.clone()));
|
||||
let suggestions = completer.complete(&target_dir, target_dir.len());
|
||||
|
||||
let expected_paths: Vec<String> = vec![
|
||||
format!("`{}`", file(dir.join("double quote"))),
|
||||
format!("`{}`", file(dir.join("single quote"))),
|
||||
];
|
||||
|
||||
match_suggestions(expected_paths, suggestions)
|
||||
@ -500,6 +670,7 @@ fn folder_with_directorycompletions() {
|
||||
// Create the expected values
|
||||
let expected_paths: Vec<String> = vec![
|
||||
folder(dir.join("another")),
|
||||
folder(dir.join("directory_completion")),
|
||||
folder(dir.join("test_a")),
|
||||
folder(dir.join("test_b")),
|
||||
folder(dir.join(".hidden_folder")),
|
||||
@ -524,13 +695,14 @@ fn variables_completions() {
|
||||
// Test completions for $nu
|
||||
let suggestions = completer.complete("$nu.", 4);
|
||||
|
||||
assert_eq!(14, suggestions.len());
|
||||
assert_eq!(15, suggestions.len());
|
||||
|
||||
let expected: Vec<String> = vec![
|
||||
"config-path".into(),
|
||||
"current-exe".into(),
|
||||
"default-config-dir".into(),
|
||||
"env-path".into(),
|
||||
"history-enabled".into(),
|
||||
"history-path".into(),
|
||||
"home-path".into(),
|
||||
"is-interactive".into(),
|
||||
@ -549,9 +721,13 @@ fn variables_completions() {
|
||||
// Test completions for $nu.h (filter)
|
||||
let suggestions = completer.complete("$nu.h", 5);
|
||||
|
||||
assert_eq!(2, suggestions.len());
|
||||
assert_eq!(3, suggestions.len());
|
||||
|
||||
let expected: Vec<String> = vec!["history-path".into(), "home-path".into()];
|
||||
let expected: Vec<String> = vec![
|
||||
"history-enabled".into(),
|
||||
"history-path".into(),
|
||||
"home-path".into(),
|
||||
];
|
||||
|
||||
// Match results
|
||||
match_suggestions(expected, suggestions);
|
||||
@ -693,7 +869,7 @@ fn run_external_completion(block: &str, input: &str) -> Vec<Suggestion> {
|
||||
// Change config adding the external completer
|
||||
let mut config = engine_state.get_config().clone();
|
||||
config.external_completer = Some(latest_block_id);
|
||||
engine_state.set_config(&config);
|
||||
engine_state.set_config(config);
|
||||
|
||||
// Instantiate a new completer
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine_state), stack);
|
||||
@ -714,6 +890,7 @@ fn unknown_command_completion() {
|
||||
let expected_paths: Vec<String> = vec![
|
||||
"another\\".to_string(),
|
||||
"custom_completion.nu".to_string(),
|
||||
"directory_completion\\".to_string(),
|
||||
"nushell".to_string(),
|
||||
"test_a\\".to_string(),
|
||||
"test_b\\".to_string(),
|
||||
@ -724,6 +901,7 @@ fn unknown_command_completion() {
|
||||
let expected_paths: Vec<String> = vec![
|
||||
"another/".to_string(),
|
||||
"custom_completion.nu".to_string(),
|
||||
"directory_completion/".to_string(),
|
||||
"nushell".to_string(),
|
||||
"test_a/".to_string(),
|
||||
"test_b/".to_string(),
|
||||
@ -774,6 +952,7 @@ fn filecompletions_triggers_after_cursor() {
|
||||
let expected_paths: Vec<String> = vec![
|
||||
"another\\".to_string(),
|
||||
"custom_completion.nu".to_string(),
|
||||
"directory_completion\\".to_string(),
|
||||
"nushell".to_string(),
|
||||
"test_a\\".to_string(),
|
||||
"test_b\\".to_string(),
|
||||
@ -784,6 +963,7 @@ fn filecompletions_triggers_after_cursor() {
|
||||
let expected_paths: Vec<String> = vec![
|
||||
"another/".to_string(),
|
||||
"custom_completion.nu".to_string(),
|
||||
"directory_completion/".to_string(),
|
||||
"nushell".to_string(),
|
||||
"test_a/".to_string(),
|
||||
"test_b/".to_string(),
|
||||
@ -836,6 +1016,34 @@ fn extern_complete_flags(mut extern_completer: NuCompleter) {
|
||||
match_suggestions(expected, suggestions);
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn custom_completer_triggers_cursor_before_word(mut custom_completer: NuCompleter) {
|
||||
let suggestions = custom_completer.complete("cmd foo bar", 8);
|
||||
let expected: Vec<String> = vec!["cmd".into(), "foo".into(), "".into()];
|
||||
match_suggestions(expected, suggestions);
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn custom_completer_triggers_cursor_on_word_left_boundary(mut custom_completer: NuCompleter) {
|
||||
let suggestions = custom_completer.complete("cmd foo bar", 8);
|
||||
let expected: Vec<String> = vec!["cmd".into(), "foo".into(), "".into()];
|
||||
match_suggestions(expected, suggestions);
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn custom_completer_triggers_cursor_next_to_word(mut custom_completer: NuCompleter) {
|
||||
let suggestions = custom_completer.complete("cmd foo bar", 11);
|
||||
let expected: Vec<String> = vec!["cmd".into(), "foo".into(), "bar".into()];
|
||||
match_suggestions(expected, suggestions);
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
fn custom_completer_triggers_cursor_after_word(mut custom_completer: NuCompleter) {
|
||||
let suggestions = custom_completer.complete("cmd foo bar ", 12);
|
||||
let expected: Vec<String> = vec!["cmd".into(), "foo".into(), "bar".into(), "".into()];
|
||||
match_suggestions(expected, suggestions);
|
||||
}
|
||||
|
||||
#[ignore = "was reverted, still needs fixing"]
|
||||
#[rstest]
|
||||
fn alias_offset_bug_7648() {
|
||||
|
@ -4,7 +4,8 @@ use nu_engine::eval_block;
|
||||
use nu_parser::parse;
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
PipelineData, ShellError, Span, Value,
|
||||
eval_const::create_nu_constant,
|
||||
PipelineData, ShellError, Span, Value, NU_VARIABLE_ID,
|
||||
};
|
||||
use nu_test_support::fs;
|
||||
use reedline::Suggestion;
|
||||
@ -28,39 +29,41 @@ pub fn new_engine() -> (PathBuf, String, EngineState, Stack) {
|
||||
// Create a new engine with default context
|
||||
let mut engine_state = create_default_context();
|
||||
|
||||
// Add $nu
|
||||
let nu_const =
|
||||
create_nu_constant(&engine_state, Span::test_data()).expect("Failed creating $nu");
|
||||
engine_state.set_variable_const_val(NU_VARIABLE_ID, nu_const);
|
||||
|
||||
// New stack
|
||||
let mut stack = Stack::new();
|
||||
|
||||
// Add pwd as env var
|
||||
stack.add_env_var(
|
||||
"PWD".to_string(),
|
||||
Value::String {
|
||||
val: dir_str.clone(),
|
||||
span: nu_protocol::Span::new(0, dir_str.len()),
|
||||
},
|
||||
Value::string(dir_str.clone(), nu_protocol::Span::new(0, dir_str.len())),
|
||||
);
|
||||
stack.add_env_var(
|
||||
"TEST".to_string(),
|
||||
Value::String {
|
||||
val: "NUSHELL".to_string(),
|
||||
span: nu_protocol::Span::new(0, dir_str.len()),
|
||||
},
|
||||
Value::string(
|
||||
"NUSHELL".to_string(),
|
||||
nu_protocol::Span::new(0, dir_str.len()),
|
||||
),
|
||||
);
|
||||
#[cfg(windows)]
|
||||
stack.add_env_var(
|
||||
"Path".to_string(),
|
||||
Value::String {
|
||||
val: "c:\\some\\path;c:\\some\\other\\path".to_string(),
|
||||
span: nu_protocol::Span::new(0, dir_str.len()),
|
||||
},
|
||||
Value::string(
|
||||
"c:\\some\\path;c:\\some\\other\\path".to_string(),
|
||||
nu_protocol::Span::new(0, dir_str.len()),
|
||||
),
|
||||
);
|
||||
#[cfg(not(windows))]
|
||||
stack.add_env_var(
|
||||
"PATH".to_string(),
|
||||
Value::String {
|
||||
val: "/some/path:/some/other/path".to_string(),
|
||||
span: nu_protocol::Span::new(0, dir_str.len()),
|
||||
},
|
||||
Value::string(
|
||||
"/some/path:/some/other/path".to_string(),
|
||||
nu_protocol::Span::new(0, dir_str.len()),
|
||||
),
|
||||
);
|
||||
|
||||
// Merge environment into the permanent state
|
||||
@ -89,17 +92,50 @@ pub fn new_quote_engine() -> (PathBuf, String, EngineState, Stack) {
|
||||
// Add pwd as env var
|
||||
stack.add_env_var(
|
||||
"PWD".to_string(),
|
||||
Value::String {
|
||||
val: dir_str.clone(),
|
||||
span: nu_protocol::Span::new(0, dir_str.len()),
|
||||
},
|
||||
Value::string(dir_str.clone(), nu_protocol::Span::new(0, dir_str.len())),
|
||||
);
|
||||
stack.add_env_var(
|
||||
"TEST".to_string(),
|
||||
Value::String {
|
||||
val: "NUSHELL".to_string(),
|
||||
span: nu_protocol::Span::new(0, dir_str.len()),
|
||||
},
|
||||
Value::string(
|
||||
"NUSHELL".to_string(),
|
||||
nu_protocol::Span::new(0, dir_str.len()),
|
||||
),
|
||||
);
|
||||
|
||||
// Merge environment into the permanent state
|
||||
let merge_result = engine_state.merge_env(&mut stack, &dir);
|
||||
assert!(merge_result.is_ok());
|
||||
|
||||
(dir, dir_str, engine_state, stack)
|
||||
}
|
||||
|
||||
pub fn new_partial_engine() -> (PathBuf, String, EngineState, Stack) {
|
||||
// Target folder inside assets
|
||||
let dir = fs::fixtures().join("partial_completions");
|
||||
let mut dir_str = dir
|
||||
.clone()
|
||||
.into_os_string()
|
||||
.into_string()
|
||||
.unwrap_or_default();
|
||||
dir_str.push(SEP);
|
||||
|
||||
// Create a new engine with default context
|
||||
let mut engine_state = create_default_context();
|
||||
|
||||
// New stack
|
||||
let mut stack = Stack::new();
|
||||
|
||||
// Add pwd as env var
|
||||
stack.add_env_var(
|
||||
"PWD".to_string(),
|
||||
Value::string(dir_str.clone(), nu_protocol::Span::new(0, dir_str.len())),
|
||||
);
|
||||
stack.add_env_var(
|
||||
"TEST".to_string(),
|
||||
Value::string(
|
||||
"NUSHELL".to_string(),
|
||||
nu_protocol::Span::new(0, dir_str.len()),
|
||||
),
|
||||
);
|
||||
|
||||
// Merge environment into the permanent state
|
||||
@ -162,12 +198,7 @@ pub fn merge_input(
|
||||
engine_state,
|
||||
stack,
|
||||
&block,
|
||||
PipelineData::Value(
|
||||
Value::Nothing {
|
||||
span: Span::unknown(),
|
||||
},
|
||||
None
|
||||
),
|
||||
PipelineData::Value(Value::nothing(Span::unknown(),), None),
|
||||
false,
|
||||
false
|
||||
)
|
||||
|
@ -5,12 +5,17 @@ edition = "2021"
|
||||
license = "MIT"
|
||||
name = "nu-cmd-base"
|
||||
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base"
|
||||
version = "0.83.1"
|
||||
version = "0.91.0"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
nu-engine = { path = "../nu-engine", version = "0.83.1" }
|
||||
nu-path = { path = "../nu-path", version = "0.83.1" }
|
||||
nu-protocol = { version = "0.83.1", path = "../nu-protocol" }
|
||||
indexmap = { version = "2.0" }
|
||||
nu-engine = { path = "../nu-engine", version = "0.91.0" }
|
||||
nu-parser = { path = "../nu-parser", version = "0.91.0" }
|
||||
nu-path = { path = "../nu-path", version = "0.91.0" }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.91.0" }
|
||||
|
||||
indexmap = "2.2"
|
||||
miette = "7.1.0"
|
||||
|
||||
[dev-dependencies]
|
||||
|
21
crates/nu-cmd-base/LICENSE
Normal file
21
crates/nu-cmd-base/LICENSE
Normal file
@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2019 - 2023 The Nushell Project Developers
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
@ -6,7 +6,7 @@ pub fn merge_descriptors(values: &[Value]) -> Vec<String> {
|
||||
let mut seen: IndexSet<String> = indexset! {};
|
||||
for value in values {
|
||||
let data_descriptors = match value {
|
||||
Value::Record { cols, .. } => cols.to_owned(),
|
||||
Value::Record { val, .. } => val.columns().cloned().collect(),
|
||||
_ => vec!["".to_string()],
|
||||
};
|
||||
for desc in data_descriptors {
|
||||
|
362
crates/nu-cmd-base/src/hook.rs
Normal file
362
crates/nu-cmd-base/src/hook.rs
Normal file
@ -0,0 +1,362 @@
|
||||
use crate::util::get_guaranteed_cwd;
|
||||
use miette::Result;
|
||||
use nu_engine::{eval_block, eval_block_with_early_return};
|
||||
use nu_parser::parse;
|
||||
use nu_protocol::cli_error::{report_error, report_error_new};
|
||||
use nu_protocol::engine::{EngineState, Stack, StateWorkingSet};
|
||||
use nu_protocol::{BlockId, PipelineData, PositionalArg, ShellError, Span, Type, Value, VarId};
|
||||
|
||||
pub fn eval_env_change_hook(
|
||||
env_change_hook: Option<Value>,
|
||||
engine_state: &mut EngineState,
|
||||
stack: &mut Stack,
|
||||
) -> Result<(), ShellError> {
|
||||
if let Some(hook) = env_change_hook {
|
||||
match hook {
|
||||
Value::Record { val, .. } => {
|
||||
for (env_name, hook_value) in &val {
|
||||
let before = engine_state
|
||||
.previous_env_vars
|
||||
.get(env_name)
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
|
||||
let after = stack
|
||||
.get_env_var(engine_state, env_name)
|
||||
.unwrap_or_default();
|
||||
|
||||
if before != after {
|
||||
eval_hook(
|
||||
engine_state,
|
||||
stack,
|
||||
None,
|
||||
vec![("$before".into(), before), ("$after".into(), after.clone())],
|
||||
hook_value,
|
||||
"env_change",
|
||||
)?;
|
||||
|
||||
engine_state
|
||||
.previous_env_vars
|
||||
.insert(env_name.to_string(), after);
|
||||
}
|
||||
}
|
||||
}
|
||||
x => {
|
||||
return Err(ShellError::TypeMismatch {
|
||||
err_message: "record for the 'env_change' hook".to_string(),
|
||||
span: x.span(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn eval_hook(
|
||||
engine_state: &mut EngineState,
|
||||
stack: &mut Stack,
|
||||
input: Option<PipelineData>,
|
||||
arguments: Vec<(String, Value)>,
|
||||
value: &Value,
|
||||
hook_name: &str,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let mut output = PipelineData::empty();
|
||||
|
||||
let span = value.span();
|
||||
match value {
|
||||
Value::String { val, .. } => {
|
||||
let (block, delta, vars) = {
|
||||
let mut working_set = StateWorkingSet::new(engine_state);
|
||||
|
||||
let mut vars: Vec<(VarId, Value)> = vec![];
|
||||
|
||||
for (name, val) in arguments {
|
||||
let var_id = working_set.add_variable(
|
||||
name.as_bytes().to_vec(),
|
||||
val.span(),
|
||||
Type::Any,
|
||||
false,
|
||||
);
|
||||
vars.push((var_id, val));
|
||||
}
|
||||
|
||||
let output = parse(
|
||||
&mut working_set,
|
||||
Some(&format!("{hook_name} hook")),
|
||||
val.as_bytes(),
|
||||
false,
|
||||
);
|
||||
if let Some(err) = working_set.parse_errors.first() {
|
||||
report_error(&working_set, err);
|
||||
|
||||
return Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "valid source code".into(),
|
||||
value: "source code with syntax errors".into(),
|
||||
span,
|
||||
});
|
||||
}
|
||||
|
||||
(output, working_set.render(), vars)
|
||||
};
|
||||
|
||||
engine_state.merge_delta(delta)?;
|
||||
let input = if let Some(input) = input {
|
||||
input
|
||||
} else {
|
||||
PipelineData::empty()
|
||||
};
|
||||
|
||||
let var_ids: Vec<VarId> = vars
|
||||
.into_iter()
|
||||
.map(|(var_id, val)| {
|
||||
stack.add_var(var_id, val);
|
||||
var_id
|
||||
})
|
||||
.collect();
|
||||
|
||||
match eval_block(engine_state, stack, &block, input, false, false) {
|
||||
Ok(pipeline_data) => {
|
||||
output = pipeline_data;
|
||||
}
|
||||
Err(err) => {
|
||||
report_error_new(engine_state, &err);
|
||||
}
|
||||
}
|
||||
|
||||
for var_id in var_ids.iter() {
|
||||
stack.remove_var(*var_id);
|
||||
}
|
||||
}
|
||||
Value::List { vals, .. } => {
|
||||
for val in vals {
|
||||
eval_hook(
|
||||
engine_state,
|
||||
stack,
|
||||
None,
|
||||
arguments.clone(),
|
||||
val,
|
||||
&format!("{hook_name} list, recursive"),
|
||||
)?;
|
||||
}
|
||||
}
|
||||
Value::Record { val, .. } => {
|
||||
// Hooks can optionally be a record in this form:
|
||||
// {
|
||||
// condition: {|before, after| ... } # block that evaluates to true/false
|
||||
// code: # block or a string
|
||||
// }
|
||||
// The condition block will be run to check whether the main hook (in `code`) should be run.
|
||||
// If it returns true (the default if a condition block is not specified), the hook should be run.
|
||||
let do_run_hook = if let Some(condition) = val.get("condition") {
|
||||
let other_span = condition.span();
|
||||
if let Ok(block_id) = condition.coerce_block() {
|
||||
match run_hook_block(
|
||||
engine_state,
|
||||
stack,
|
||||
block_id,
|
||||
None,
|
||||
arguments.clone(),
|
||||
other_span,
|
||||
) {
|
||||
Ok(pipeline_data) => {
|
||||
if let PipelineData::Value(Value::Bool { val, .. }, ..) = pipeline_data
|
||||
{
|
||||
val
|
||||
} else {
|
||||
return Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "boolean output".to_string(),
|
||||
value: "other PipelineData variant".to_string(),
|
||||
span: other_span,
|
||||
});
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "block".to_string(),
|
||||
value: format!("{}", condition.get_type()),
|
||||
span: other_span,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// always run the hook
|
||||
true
|
||||
};
|
||||
|
||||
if do_run_hook {
|
||||
let Some(follow) = val.get("code") else {
|
||||
return Err(ShellError::CantFindColumn {
|
||||
col_name: "code".into(),
|
||||
span,
|
||||
src_span: span,
|
||||
});
|
||||
};
|
||||
let source_span = follow.span();
|
||||
match follow {
|
||||
Value::String { val, .. } => {
|
||||
let (block, delta, vars) = {
|
||||
let mut working_set = StateWorkingSet::new(engine_state);
|
||||
|
||||
let mut vars: Vec<(VarId, Value)> = vec![];
|
||||
|
||||
for (name, val) in arguments {
|
||||
let var_id = working_set.add_variable(
|
||||
name.as_bytes().to_vec(),
|
||||
val.span(),
|
||||
Type::Any,
|
||||
false,
|
||||
);
|
||||
vars.push((var_id, val));
|
||||
}
|
||||
|
||||
let output = parse(
|
||||
&mut working_set,
|
||||
Some(&format!("{hook_name} hook")),
|
||||
val.as_bytes(),
|
||||
false,
|
||||
);
|
||||
if let Some(err) = working_set.parse_errors.first() {
|
||||
report_error(&working_set, err);
|
||||
|
||||
return Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "valid source code".into(),
|
||||
value: "source code with syntax errors".into(),
|
||||
span: source_span,
|
||||
});
|
||||
}
|
||||
|
||||
(output, working_set.render(), vars)
|
||||
};
|
||||
|
||||
engine_state.merge_delta(delta)?;
|
||||
let input = PipelineData::empty();
|
||||
|
||||
let var_ids: Vec<VarId> = vars
|
||||
.into_iter()
|
||||
.map(|(var_id, val)| {
|
||||
stack.add_var(var_id, val);
|
||||
var_id
|
||||
})
|
||||
.collect();
|
||||
|
||||
match eval_block(engine_state, stack, &block, input, false, false) {
|
||||
Ok(pipeline_data) => {
|
||||
output = pipeline_data;
|
||||
}
|
||||
Err(err) => {
|
||||
report_error_new(engine_state, &err);
|
||||
}
|
||||
}
|
||||
|
||||
for var_id in var_ids.iter() {
|
||||
stack.remove_var(*var_id);
|
||||
}
|
||||
}
|
||||
Value::Block { val: block_id, .. } => {
|
||||
run_hook_block(
|
||||
engine_state,
|
||||
stack,
|
||||
*block_id,
|
||||
input,
|
||||
arguments,
|
||||
source_span,
|
||||
)?;
|
||||
}
|
||||
Value::Closure { val, .. } => {
|
||||
run_hook_block(
|
||||
engine_state,
|
||||
stack,
|
||||
val.block_id,
|
||||
input,
|
||||
arguments,
|
||||
source_span,
|
||||
)?;
|
||||
}
|
||||
other => {
|
||||
return Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "block or string".to_string(),
|
||||
value: format!("{}", other.get_type()),
|
||||
span: source_span,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Value::Block { val: block_id, .. } => {
|
||||
output = run_hook_block(engine_state, stack, *block_id, input, arguments, span)?;
|
||||
}
|
||||
Value::Closure { val, .. } => {
|
||||
output = run_hook_block(engine_state, stack, val.block_id, input, arguments, span)?;
|
||||
}
|
||||
other => {
|
||||
return Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "string, block, record, or list of commands".into(),
|
||||
value: format!("{}", other.get_type()),
|
||||
span: other.span(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let cwd = get_guaranteed_cwd(engine_state, stack);
|
||||
engine_state.merge_env(stack, cwd)?;
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
fn run_hook_block(
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
block_id: BlockId,
|
||||
optional_input: Option<PipelineData>,
|
||||
arguments: Vec<(String, Value)>,
|
||||
span: Span,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let block = engine_state.get_block(block_id);
|
||||
|
||||
let input = optional_input.unwrap_or_else(PipelineData::empty);
|
||||
|
||||
let mut callee_stack = stack.gather_captures(engine_state, &block.captures);
|
||||
|
||||
for (idx, PositionalArg { var_id, .. }) in
|
||||
block.signature.required_positional.iter().enumerate()
|
||||
{
|
||||
if let Some(var_id) = var_id {
|
||||
if let Some(arg) = arguments.get(idx) {
|
||||
callee_stack.add_var(*var_id, arg.1.clone())
|
||||
} else {
|
||||
return Err(ShellError::IncompatibleParametersSingle {
|
||||
msg: "This hook block has too many parameters".into(),
|
||||
span,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let pipeline_data =
|
||||
eval_block_with_early_return(engine_state, &mut callee_stack, block, input, false, false)?;
|
||||
|
||||
if let PipelineData::Value(Value::Error { error, .. }, _) = pipeline_data {
|
||||
return Err(*error);
|
||||
}
|
||||
|
||||
// If all went fine, preserve the environment of the called block
|
||||
let caller_env_vars = stack.get_env_var_names(engine_state);
|
||||
|
||||
// remove env vars that are present in the caller but not in the callee
|
||||
// (the callee hid them)
|
||||
for var in caller_env_vars.iter() {
|
||||
if !callee_stack.has_env_var(engine_state, var) {
|
||||
stack.remove_env_var(engine_state, var);
|
||||
}
|
||||
}
|
||||
|
||||
// add new env vars from callee to caller
|
||||
for (var, value) in callee_stack.get_stack_env_vars() {
|
||||
stack.add_env_var(var, value);
|
||||
}
|
||||
Ok(pipeline_data)
|
||||
}
|
@ -76,9 +76,7 @@ where
|
||||
}),
|
||||
);
|
||||
if let Err(error) = r {
|
||||
return Value::Error {
|
||||
error: Box::new(error),
|
||||
};
|
||||
return Value::error(error, span);
|
||||
}
|
||||
}
|
||||
v
|
||||
|
@ -1,3 +1,4 @@
|
||||
pub mod formats;
|
||||
pub mod hook;
|
||||
pub mod input_handler;
|
||||
pub mod util;
|
||||
|
@ -55,3 +55,73 @@ pub fn process_range(range: &Range) -> Result<(isize, isize), MakeRangeError> {
|
||||
|
||||
Ok((start, end))
|
||||
}
|
||||
|
||||
const HELP_MSG: &str = "Nushell's config file can be found with the command: $nu.config-path. \
|
||||
For more help: (https://nushell.sh/book/configuration.html#configurations-with-built-in-commands)";
|
||||
|
||||
fn get_editor_commandline(
|
||||
value: &Value,
|
||||
var_name: &str,
|
||||
) -> Result<(String, Vec<String>), ShellError> {
|
||||
match value {
|
||||
Value::String { val, .. } if !val.is_empty() => Ok((val.to_string(), Vec::new())),
|
||||
Value::List { vals, .. } if !vals.is_empty() => {
|
||||
let mut editor_cmd = vals.iter().map(|l| l.coerce_string());
|
||||
match editor_cmd.next().transpose()? {
|
||||
Some(editor) if !editor.is_empty() => {
|
||||
let params = editor_cmd.collect::<Result<_, ShellError>>()?;
|
||||
Ok((editor, params))
|
||||
}
|
||||
_ => Err(ShellError::GenericError {
|
||||
error: "Editor executable is missing".into(),
|
||||
msg: "Set the first element to an executable".into(),
|
||||
span: Some(value.span()),
|
||||
help: Some(HELP_MSG.into()),
|
||||
inner: vec![],
|
||||
}),
|
||||
}
|
||||
}
|
||||
Value::String { .. } | Value::List { .. } => Err(ShellError::GenericError {
|
||||
error: format!("{var_name} should be a non-empty string or list<String>"),
|
||||
msg: "Specify an executable here".into(),
|
||||
span: Some(value.span()),
|
||||
help: Some(HELP_MSG.into()),
|
||||
inner: vec![],
|
||||
}),
|
||||
x => Err(ShellError::CantConvert {
|
||||
to_type: "string or list<string>".into(),
|
||||
from_type: x.get_type().to_string(),
|
||||
span: value.span(),
|
||||
help: None,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_editor(
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
span: Span,
|
||||
) -> Result<(String, Vec<String>), ShellError> {
|
||||
let config = engine_state.get_config();
|
||||
let env_vars = stack.get_env_vars(engine_state);
|
||||
|
||||
if let Ok(buff_editor) =
|
||||
get_editor_commandline(&config.buffer_editor, "$env.config.buffer_editor")
|
||||
{
|
||||
Ok(buff_editor)
|
||||
} else if let Some(value) = env_vars.get("EDITOR") {
|
||||
get_editor_commandline(value, "$env.EDITOR")
|
||||
} else if let Some(value) = env_vars.get("VISUAL") {
|
||||
get_editor_commandline(value, "$env.VISUAL")
|
||||
} else {
|
||||
Err(ShellError::GenericError {
|
||||
error: "No editor configured".into(),
|
||||
msg:
|
||||
"Please specify one via `$env.config.buffer_editor` or `$env.EDITOR`/`$env.VISUAL`"
|
||||
.into(),
|
||||
span: Some(span),
|
||||
help: Some(HELP_MSG.into()),
|
||||
inner: vec![],
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ edition = "2021"
|
||||
license = "MIT"
|
||||
name = "nu-cmd-dataframe"
|
||||
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-dataframe"
|
||||
version = "0.83.1"
|
||||
version = "0.91.0"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
@ -13,17 +13,23 @@ version = "0.83.1"
|
||||
bench = false
|
||||
|
||||
[dependencies]
|
||||
nu-engine = { path = "../nu-engine", version = "0.83.1" }
|
||||
nu-parser = { path = "../nu-parser", version = "0.83.1" }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.83.1" }
|
||||
nu-engine = { path = "../nu-engine", version = "0.91.0" }
|
||||
nu-parser = { path = "../nu-parser", version = "0.91.0" }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.91.0" }
|
||||
|
||||
# Potential dependencies for extras
|
||||
chrono = { version = "0.4", features = ["std", "unstable-locales"], default-features = false }
|
||||
fancy-regex = "0.11"
|
||||
indexmap = { version = "2.0" }
|
||||
chrono-tz = "0.8"
|
||||
fancy-regex = "0.13"
|
||||
indexmap = { version = "2.2" }
|
||||
num = { version = "0.4", optional = true }
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
sqlparser = { version = "0.34", features = ["serde"], optional = true }
|
||||
sqlparser = { version = "0.43", optional = true }
|
||||
polars-io = { version = "0.37", features = ["avro"], optional = true }
|
||||
polars-arrow = { version = "0.37", optional = true }
|
||||
polars-ops = { version = "0.37", optional = true }
|
||||
polars-plan = { version = "0.37", features = ["regex"], optional = true }
|
||||
polars-utils = { version = "0.37", optional = true }
|
||||
|
||||
[dependencies.polars]
|
||||
features = [
|
||||
@ -37,7 +43,11 @@ features = [
|
||||
"dtype-categorical",
|
||||
"dtype-datetime",
|
||||
"dtype-struct",
|
||||
"dynamic_groupby",
|
||||
"dtype-i8",
|
||||
"dtype-i16",
|
||||
"dtype-u8",
|
||||
"dtype-u16",
|
||||
"dynamic_group_by",
|
||||
"ipc",
|
||||
"is_in",
|
||||
"json",
|
||||
@ -53,12 +63,11 @@ features = [
|
||||
"to_dummies",
|
||||
]
|
||||
optional = true
|
||||
version = "0.30.0"
|
||||
version = "0.37"
|
||||
|
||||
[features]
|
||||
dataframe = ["default"]
|
||||
default = ["num", "polars", "sqlparser"]
|
||||
dataframe = ["num", "polars", "polars-io", "polars-arrow", "polars-ops", "polars-plan", "polars-utils", "sqlparser"]
|
||||
default = []
|
||||
|
||||
[dev-dependencies]
|
||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.83.1" }
|
||||
nu-test-support = { path = "../nu-test-support", version = "0.83.1" }
|
||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.91.0" }
|
||||
|
@ -37,24 +37,27 @@ impl Command for AppendDF {
|
||||
example: r#"let a = ([[a b]; [1 2] [3 4]] | dfr into-df);
|
||||
$a | dfr append $a"#,
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new(
|
||||
"a".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(3)],
|
||||
),
|
||||
Column::new(
|
||||
"b".to_string(),
|
||||
vec![Value::test_int(2), Value::test_int(4)],
|
||||
),
|
||||
Column::new(
|
||||
"a_x".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(3)],
|
||||
),
|
||||
Column::new(
|
||||
"b_x".to_string(),
|
||||
vec![Value::test_int(2), Value::test_int(4)],
|
||||
),
|
||||
])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![
|
||||
Column::new(
|
||||
"a".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(3)],
|
||||
),
|
||||
Column::new(
|
||||
"b".to_string(),
|
||||
vec![Value::test_int(2), Value::test_int(4)],
|
||||
),
|
||||
Column::new(
|
||||
"a_x".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(3)],
|
||||
),
|
||||
Column::new(
|
||||
"b_x".to_string(),
|
||||
vec![Value::test_int(2), Value::test_int(4)],
|
||||
),
|
||||
],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -64,26 +67,29 @@ impl Command for AppendDF {
|
||||
example: r#"let a = ([[a b]; [1 2] [3 4]] | dfr into-df);
|
||||
$a | dfr append $a --col"#,
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new(
|
||||
"a".to_string(),
|
||||
vec![
|
||||
Value::test_int(1),
|
||||
Value::test_int(3),
|
||||
Value::test_int(1),
|
||||
Value::test_int(3),
|
||||
],
|
||||
),
|
||||
Column::new(
|
||||
"b".to_string(),
|
||||
vec![
|
||||
Value::test_int(2),
|
||||
Value::test_int(4),
|
||||
Value::test_int(2),
|
||||
Value::test_int(4),
|
||||
],
|
||||
),
|
||||
])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![
|
||||
Column::new(
|
||||
"a".to_string(),
|
||||
vec![
|
||||
Value::test_int(1),
|
||||
Value::test_int(3),
|
||||
Value::test_int(1),
|
||||
Value::test_int(3),
|
||||
],
|
||||
),
|
||||
Column::new(
|
||||
"b".to_string(),
|
||||
vec![
|
||||
Value::test_int(2),
|
||||
Value::test_int(4),
|
||||
Value::test_int(2),
|
||||
Value::test_int(4),
|
||||
],
|
||||
),
|
||||
],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -110,7 +116,7 @@ fn command(
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let other: Value = call.req(engine_state, stack, 0)?;
|
||||
|
||||
let axis = if call.has_flag("col") {
|
||||
let axis = if call.has_flag(engine_state, stack, "col")? {
|
||||
Axis::Column
|
||||
} else {
|
||||
Axis::Row
|
||||
|
201
crates/nu-cmd-dataframe/src/dataframe/eager/cast.rs
Normal file
201
crates/nu-cmd-dataframe/src/dataframe/eager/cast.rs
Normal file
@ -0,0 +1,201 @@
|
||||
use crate::dataframe::values::{str_to_dtype, NuExpression, NuLazyFrame};
|
||||
|
||||
use super::super::values::NuDataFrame;
|
||||
use nu_engine::CallExt;
|
||||
use nu_protocol::{
|
||||
ast::Call,
|
||||
engine::{Command, EngineState, Stack},
|
||||
record, Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
|
||||
};
|
||||
use polars::prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct CastDF;
|
||||
|
||||
impl Command for CastDF {
|
||||
fn name(&self) -> &str {
|
||||
"dfr cast"
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Cast a column to a different dtype."
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(self.name())
|
||||
.input_output_types(vec![
|
||||
(
|
||||
Type::Custom("expression".into()),
|
||||
Type::Custom("expression".into()),
|
||||
),
|
||||
(
|
||||
Type::Custom("dataframe".into()),
|
||||
Type::Custom("dataframe".into()),
|
||||
),
|
||||
])
|
||||
.required(
|
||||
"dtype",
|
||||
SyntaxShape::String,
|
||||
"The dtype to cast the column to",
|
||||
)
|
||||
.optional(
|
||||
"column",
|
||||
SyntaxShape::String,
|
||||
"The column to cast. Required when used with a dataframe.",
|
||||
)
|
||||
.category(Category::Custom("dataframe".into()))
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "Cast a column in a dataframe to a different dtype",
|
||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr cast u8 a | dfr schema",
|
||||
result: Some(Value::record(
|
||||
record! {
|
||||
"a" => Value::string("u8", Span::test_data()),
|
||||
"b" => Value::string("i64", Span::test_data()),
|
||||
},
|
||||
Span::test_data(),
|
||||
)),
|
||||
},
|
||||
Example {
|
||||
description: "Cast a column in a lazy dataframe to a different dtype",
|
||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr into-lazy | dfr cast u8 a | dfr schema",
|
||||
result: Some(Value::record(
|
||||
record! {
|
||||
"a" => Value::string("u8", Span::test_data()),
|
||||
"b" => Value::string("i64", Span::test_data()),
|
||||
},
|
||||
Span::test_data(),
|
||||
)),
|
||||
},
|
||||
Example {
|
||||
description: "Cast a column in a expression to a different dtype",
|
||||
example: r#"[[a b]; [1 2] [1 4]] | dfr into-df | dfr group-by a | dfr agg [ (dfr col b | dfr cast u8 | dfr min | dfr as "b_min") ] | dfr schema"#,
|
||||
result: None
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
if NuLazyFrame::can_downcast(&value) {
|
||||
let (dtype, column_nm) = df_args(engine_state, stack, call)?;
|
||||
let df = NuLazyFrame::try_from_value(value)?;
|
||||
command_lazy(call, column_nm, dtype, df)
|
||||
} else if NuDataFrame::can_downcast(&value) {
|
||||
let (dtype, column_nm) = df_args(engine_state, stack, call)?;
|
||||
let df = NuDataFrame::try_from_value(value)?;
|
||||
command_eager(call, column_nm, dtype, df)
|
||||
} else {
|
||||
let dtype: String = call.req(engine_state, stack, 0)?;
|
||||
let dtype = str_to_dtype(&dtype, call.head)?;
|
||||
|
||||
let expr = NuExpression::try_from_value(value)?;
|
||||
let expr: NuExpression = expr.into_polars().cast(dtype).into();
|
||||
|
||||
Ok(PipelineData::Value(
|
||||
NuExpression::into_value(expr, call.head),
|
||||
None,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn df_args(
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
) -> Result<(DataType, String), ShellError> {
|
||||
let dtype = dtype_arg(engine_state, stack, call)?;
|
||||
let column_nm: String =
|
||||
call.opt(engine_state, stack, 1)?
|
||||
.ok_or(ShellError::MissingParameter {
|
||||
param_name: "column_name".into(),
|
||||
span: call.head,
|
||||
})?;
|
||||
Ok((dtype, column_nm))
|
||||
}
|
||||
|
||||
fn dtype_arg(
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
) -> Result<DataType, ShellError> {
|
||||
let dtype: String = call.req(engine_state, stack, 0)?;
|
||||
str_to_dtype(&dtype, call.head)
|
||||
}
|
||||
|
||||
fn command_lazy(
|
||||
call: &Call,
|
||||
column_nm: String,
|
||||
dtype: DataType,
|
||||
lazy: NuLazyFrame,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let column = col(&column_nm).cast(dtype);
|
||||
let lazy = lazy.into_polars().with_columns(&[column]);
|
||||
let lazy = NuLazyFrame::new(false, lazy);
|
||||
|
||||
Ok(PipelineData::Value(
|
||||
NuLazyFrame::into_value(lazy, call.head)?,
|
||||
None,
|
||||
))
|
||||
}
|
||||
|
||||
fn command_eager(
|
||||
call: &Call,
|
||||
column_nm: String,
|
||||
dtype: DataType,
|
||||
nu_df: NuDataFrame,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let mut df = nu_df.df;
|
||||
let column = df
|
||||
.column(&column_nm)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: format!("{e}"),
|
||||
msg: "".into(),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
|
||||
let casted = column.cast(&dtype).map_err(|e| ShellError::GenericError {
|
||||
error: format!("{e}"),
|
||||
msg: "".into(),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
|
||||
let _ = df
|
||||
.with_column(casted)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: format!("{e}"),
|
||||
msg: "".into(),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
|
||||
let df = NuDataFrame::new(false, df);
|
||||
Ok(PipelineData::Value(df.into_value(call.head), None))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
|
||||
use super::super::super::test_dataframe::test_dataframe;
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_examples() {
|
||||
test_dataframe(vec![Box::new(CastDF {})])
|
||||
}
|
||||
}
|
@ -27,10 +27,10 @@ impl Command for ColumnsDF {
|
||||
vec![Example {
|
||||
description: "Dataframe columns",
|
||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr columns",
|
||||
result: Some(Value::List {
|
||||
vals: vec![Value::test_string("a"), Value::test_string("b")],
|
||||
span: Span::test_data(),
|
||||
}),
|
||||
result: Some(Value::list(
|
||||
vec![Value::test_string("a"), Value::test_string("b")],
|
||||
Span::test_data(),
|
||||
)),
|
||||
}]
|
||||
}
|
||||
|
||||
@ -60,10 +60,7 @@ fn command(
|
||||
.map(|v| Value::string(*v, call.head))
|
||||
.collect();
|
||||
|
||||
let names = Value::List {
|
||||
vals: names,
|
||||
span: call.head,
|
||||
};
|
||||
let names = Value::list(names, call.head);
|
||||
|
||||
Ok(PipelineData::Value(names, None))
|
||||
}
|
||||
|
@ -35,10 +35,13 @@ impl Command for DropDF {
|
||||
description: "drop column a",
|
||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr drop a",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![Column::new(
|
||||
"b".to_string(),
|
||||
vec![Value::test_int(2), Value::test_int(4)],
|
||||
)])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![Column::new(
|
||||
"b".to_string(),
|
||||
vec![Value::test_int(2), Value::test_int(4)],
|
||||
)],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -68,26 +71,24 @@ fn command(
|
||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||
|
||||
let new_df = col_string
|
||||
.get(0)
|
||||
.ok_or_else(|| {
|
||||
ShellError::GenericError(
|
||||
"Empty names list".into(),
|
||||
"No column names were found".into(),
|
||||
Some(col_span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
.first()
|
||||
.ok_or_else(|| ShellError::GenericError {
|
||||
error: "Empty names list".into(),
|
||||
msg: "No column names were found".into(),
|
||||
span: Some(col_span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})
|
||||
.and_then(|col| {
|
||||
df.as_ref().drop(&col.item).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error dropping column".into(),
|
||||
e.to_string(),
|
||||
Some(col.span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
})
|
||||
df.as_ref()
|
||||
.drop(&col.item)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error dropping column".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(col.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})
|
||||
})?;
|
||||
|
||||
// If there are more columns in the drop selection list, these
|
||||
@ -96,15 +97,15 @@ fn command(
|
||||
.iter()
|
||||
.skip(1)
|
||||
.try_fold(new_df, |new_df, col| {
|
||||
new_df.drop(&col.item).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error dropping column".into(),
|
||||
e.to_string(),
|
||||
Some(col.span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
})
|
||||
new_df
|
||||
.drop(&col.item)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error dropping column".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(col.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})
|
||||
})
|
||||
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
|
||||
}
|
||||
|
@ -46,16 +46,19 @@ impl Command for DropDuplicates {
|
||||
description: "drop duplicates",
|
||||
example: "[[a b]; [1 2] [3 4] [1 2]] | dfr into-df | dfr drop-duplicates",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new(
|
||||
"a".to_string(),
|
||||
vec![Value::test_int(3), Value::test_int(1)],
|
||||
),
|
||||
Column::new(
|
||||
"b".to_string(),
|
||||
vec![Value::test_int(4), Value::test_int(2)],
|
||||
),
|
||||
])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![
|
||||
Column::new(
|
||||
"a".to_string(),
|
||||
vec![Value::test_int(3), Value::test_int(1)],
|
||||
),
|
||||
Column::new(
|
||||
"b".to_string(),
|
||||
vec![Value::test_int(4), Value::test_int(2)],
|
||||
),
|
||||
],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -92,7 +95,7 @@ fn command(
|
||||
|
||||
let subset_slice = subset.as_ref().map(|cols| &cols[..]);
|
||||
|
||||
let keep_strategy = if call.has_flag("last") {
|
||||
let keep_strategy = if call.has_flag(engine_state, stack, "last")? {
|
||||
UniqueKeepStrategy::Last
|
||||
} else {
|
||||
UniqueKeepStrategy::First
|
||||
@ -100,14 +103,12 @@ fn command(
|
||||
|
||||
df.as_ref()
|
||||
.unique(subset_slice, keep_strategy, None)
|
||||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error dropping duplicates".into(),
|
||||
e.to_string(),
|
||||
Some(col_span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error dropping duplicates".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(col_span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})
|
||||
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
|
||||
}
|
||||
|
@ -43,20 +43,23 @@ impl Command for DropNulls {
|
||||
let a = ($df | dfr with-column $res --name res);
|
||||
$a | dfr drop-nulls"#,
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new(
|
||||
"a".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(1)],
|
||||
),
|
||||
Column::new(
|
||||
"b".to_string(),
|
||||
vec![Value::test_int(2), Value::test_int(2)],
|
||||
),
|
||||
Column::new(
|
||||
"res".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(1)],
|
||||
),
|
||||
])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![
|
||||
Column::new(
|
||||
"a".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(1)],
|
||||
),
|
||||
Column::new(
|
||||
"b".to_string(),
|
||||
vec![Value::test_int(2), Value::test_int(2)],
|
||||
),
|
||||
Column::new(
|
||||
"res".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(1)],
|
||||
),
|
||||
],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -66,15 +69,18 @@ impl Command for DropNulls {
|
||||
example: r#"let s = ([1 2 0 0 3 4] | dfr into-df);
|
||||
($s / $s) | dfr drop-nulls"#,
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![Column::new(
|
||||
"div_0_0".to_string(),
|
||||
vec![
|
||||
Value::test_int(1),
|
||||
Value::test_int(1),
|
||||
Value::test_int(1),
|
||||
Value::test_int(1),
|
||||
],
|
||||
)])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![Column::new(
|
||||
"div_0_0".to_string(),
|
||||
vec![
|
||||
Value::test_int(1),
|
||||
Value::test_int(1),
|
||||
Value::test_int(1),
|
||||
Value::test_int(1),
|
||||
],
|
||||
)],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -115,14 +121,12 @@ fn command(
|
||||
|
||||
df.as_ref()
|
||||
.drop_nulls(subset_slice)
|
||||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error dropping nulls".into(),
|
||||
e.to_string(),
|
||||
Some(col_span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error dropping nulls".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(col_span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})
|
||||
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
|
||||
}
|
||||
|
@ -31,16 +31,19 @@ impl Command for DataTypes {
|
||||
description: "Dataframe dtypes",
|
||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr dtypes",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new(
|
||||
"column".to_string(),
|
||||
vec![Value::test_string("a"), Value::test_string("b")],
|
||||
),
|
||||
Column::new(
|
||||
"dtype".to_string(),
|
||||
vec![Value::test_string("i64"), Value::test_string("i64")],
|
||||
),
|
||||
])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![
|
||||
Column::new(
|
||||
"column".to_string(),
|
||||
vec![Value::test_string("a"), Value::test_string("b")],
|
||||
),
|
||||
Column::new(
|
||||
"dtype".to_string(),
|
||||
vec![Value::test_string("i64"), Value::test_string("i64")],
|
||||
),
|
||||
],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -79,10 +82,8 @@ fn command(
|
||||
.dtype();
|
||||
|
||||
let dtype_str = dtype.to_string();
|
||||
dtypes.push(Value::String {
|
||||
val: dtype_str,
|
||||
span: call.head,
|
||||
});
|
||||
|
||||
dtypes.push(Value::string(dtype_str, call.head));
|
||||
|
||||
Value::string(*v, call.head)
|
||||
})
|
||||
@ -91,7 +92,7 @@ fn command(
|
||||
let names_col = Column::new("column".to_string(), names);
|
||||
let dtypes_col = Column::new("dtype".to_string(), dtypes);
|
||||
|
||||
NuDataFrame::try_from_columns(vec![names_col, dtypes_col])
|
||||
NuDataFrame::try_from_columns(vec![names_col, dtypes_col], None)
|
||||
.map(|df| PipelineData::Value(df.into_value(call.head), None))
|
||||
}
|
||||
|
||||
|
@ -1,10 +1,11 @@
|
||||
use super::super::values::{Column, NuDataFrame};
|
||||
use super::super::values::NuDataFrame;
|
||||
use nu_engine::CallExt;
|
||||
use nu_protocol::{
|
||||
ast::Call,
|
||||
engine::{Command, EngineState, Stack},
|
||||
Category, Example, PipelineData, ShellError, Signature, Span, Type, Value,
|
||||
Category, Example, PipelineData, ShellError, Signature, Span, Type,
|
||||
};
|
||||
use polars::prelude::DataFrameOps;
|
||||
use polars::{prelude::*, series::Series};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Dummies;
|
||||
@ -20,6 +21,7 @@ impl Command for Dummies {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(self.name())
|
||||
.switch("drop-first", "Drop first row", Some('d'))
|
||||
.input_output_type(
|
||||
Type::Custom("dataframe".into()),
|
||||
Type::Custom("dataframe".into()),
|
||||
@ -33,24 +35,15 @@ impl Command for Dummies {
|
||||
description: "Create new dataframe with dummy variables from a dataframe",
|
||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr dummies",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new(
|
||||
"a_1".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(0)],
|
||||
),
|
||||
Column::new(
|
||||
"a_3".to_string(),
|
||||
vec![Value::test_int(0), Value::test_int(1)],
|
||||
),
|
||||
Column::new(
|
||||
"b_2".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(0)],
|
||||
),
|
||||
Column::new(
|
||||
"b_4".to_string(),
|
||||
vec![Value::test_int(0), Value::test_int(1)],
|
||||
),
|
||||
])
|
||||
NuDataFrame::try_from_series(
|
||||
vec![
|
||||
Series::new("a_1", &[1_u8, 0]),
|
||||
Series::new("a_3", &[0_u8, 1]),
|
||||
Series::new("b_2", &[1_u8, 0]),
|
||||
Series::new("b_4", &[0_u8, 1]),
|
||||
],
|
||||
Span::test_data(),
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -59,38 +52,14 @@ impl Command for Dummies {
|
||||
description: "Create new dataframe with dummy variables from a series",
|
||||
example: "[1 2 2 3 3] | dfr into-df | dfr dummies",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new(
|
||||
"0_1".to_string(),
|
||||
vec![
|
||||
Value::test_int(1),
|
||||
Value::test_int(0),
|
||||
Value::test_int(0),
|
||||
Value::test_int(0),
|
||||
Value::test_int(0),
|
||||
],
|
||||
),
|
||||
Column::new(
|
||||
"0_2".to_string(),
|
||||
vec![
|
||||
Value::test_int(0),
|
||||
Value::test_int(1),
|
||||
Value::test_int(1),
|
||||
Value::test_int(0),
|
||||
Value::test_int(0),
|
||||
],
|
||||
),
|
||||
Column::new(
|
||||
"0_3".to_string(),
|
||||
vec![
|
||||
Value::test_int(0),
|
||||
Value::test_int(0),
|
||||
Value::test_int(0),
|
||||
Value::test_int(1),
|
||||
Value::test_int(1),
|
||||
],
|
||||
),
|
||||
])
|
||||
NuDataFrame::try_from_series(
|
||||
vec![
|
||||
Series::new("0_1", &[1_u8, 0, 0, 0, 0]),
|
||||
Series::new("0_2", &[0_u8, 1, 1, 0, 0]),
|
||||
Series::new("0_3", &[0_u8, 0, 0, 1, 1]),
|
||||
],
|
||||
Span::test_data(),
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -110,23 +79,22 @@ impl Command for Dummies {
|
||||
}
|
||||
|
||||
fn command(
|
||||
_engine_state: &EngineState,
|
||||
_stack: &mut Stack,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let drop_first: bool = call.has_flag(engine_state, stack, "drop-first")?;
|
||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||
|
||||
df.as_ref()
|
||||
.to_dummies(None)
|
||||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error calculating dummies".into(),
|
||||
e.to_string(),
|
||||
Some(call.head),
|
||||
Some("The only allowed column types for dummies are String or Int".into()),
|
||||
Vec::new(),
|
||||
)
|
||||
.to_dummies(None, drop_first)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error calculating dummies".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(call.head),
|
||||
help: Some("The only allowed column types for dummies are String or Int".into()),
|
||||
inner: vec![],
|
||||
})
|
||||
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
|
||||
}
|
||||
|
@ -43,10 +43,13 @@ impl Command for FilterWith {
|
||||
example: r#"let mask = ([true false] | dfr into-df);
|
||||
[[a b]; [1 2] [3 4]] | dfr into-df | dfr filter-with $mask"#,
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new("a".to_string(), vec![Value::test_int(1)]),
|
||||
Column::new("b".to_string(), vec![Value::test_int(2)]),
|
||||
])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![
|
||||
Column::new("a".to_string(), vec![Value::test_int(1)]),
|
||||
Column::new("b".to_string(), vec![Value::test_int(2)]),
|
||||
],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -55,10 +58,13 @@ impl Command for FilterWith {
|
||||
description: "Filter dataframe using an expression",
|
||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr filter-with ((dfr col a) > 1)",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new("a".to_string(), vec![Value::test_int(3)]),
|
||||
Column::new("b".to_string(), vec![Value::test_int(4)]),
|
||||
])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![
|
||||
Column::new("a".to_string(), vec![Value::test_int(3)]),
|
||||
Column::new("b".to_string(), vec![Value::test_int(4)]),
|
||||
],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -92,7 +98,7 @@ fn command_eager(
|
||||
df: NuDataFrame,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let mask_value: Value = call.req(engine_state, stack, 0)?;
|
||||
let mask_span = mask_value.span()?;
|
||||
let mask_span = mask_value.span();
|
||||
|
||||
if NuExpression::can_downcast(&mask_value) {
|
||||
let expression = NuExpression::try_from_value(mask_value)?;
|
||||
@ -105,26 +111,22 @@ fn command_eager(
|
||||
))
|
||||
} else {
|
||||
let mask = NuDataFrame::try_from_value(mask_value)?.as_series(mask_span)?;
|
||||
let mask = mask.bool().map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error casting to bool".into(),
|
||||
e.to_string(),
|
||||
Some(mask_span),
|
||||
Some("Perhaps you want to use a series with booleans as mask".into()),
|
||||
Vec::new(),
|
||||
)
|
||||
let mask = mask.bool().map_err(|e| ShellError::GenericError {
|
||||
error: "Error casting to bool".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(mask_span),
|
||||
help: Some("Perhaps you want to use a series with booleans as mask".into()),
|
||||
inner: vec![],
|
||||
})?;
|
||||
|
||||
df.as_ref()
|
||||
.filter(mask)
|
||||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error filtering dataframe".into(),
|
||||
e.to_string(),
|
||||
Some(call.head),
|
||||
Some("The only allowed column types for dummies are String or Int".into()),
|
||||
Vec::new(),
|
||||
)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error filtering dataframe".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(call.head),
|
||||
help: Some("The only allowed column types for dummies are String or Int".into()),
|
||||
inner: vec![],
|
||||
})
|
||||
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::super::values::{Column, NuDataFrame};
|
||||
use super::super::values::{Column, NuDataFrame, NuExpression};
|
||||
use nu_engine::CallExt;
|
||||
use nu_protocol::{
|
||||
ast::Call,
|
||||
@ -15,7 +15,7 @@ impl Command for FirstDF {
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Show only the first number of rows."
|
||||
"Show only the first number of rows or create a first expression"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
@ -25,10 +25,16 @@ impl Command for FirstDF {
|
||||
SyntaxShape::Int,
|
||||
"starting from the front, the number of rows to return",
|
||||
)
|
||||
.input_output_type(
|
||||
Type::Custom("dataframe".into()),
|
||||
Type::Custom("dataframe".into()),
|
||||
)
|
||||
.input_output_types(vec![
|
||||
(
|
||||
Type::Custom("expression".into()),
|
||||
Type::Custom("expression".into()),
|
||||
),
|
||||
(
|
||||
Type::Custom("dataframe".into()),
|
||||
Type::Custom("dataframe".into()),
|
||||
),
|
||||
])
|
||||
.category(Category::Custom("dataframe".into()))
|
||||
}
|
||||
|
||||
@ -38,10 +44,13 @@ impl Command for FirstDF {
|
||||
description: "Return the first row of a dataframe",
|
||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr first",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new("a".to_string(), vec![Value::test_int(1)]),
|
||||
Column::new("b".to_string(), vec![Value::test_int(2)]),
|
||||
])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![
|
||||
Column::new("a".to_string(), vec![Value::test_int(1)]),
|
||||
Column::new("b".to_string(), vec![Value::test_int(2)]),
|
||||
],
|
||||
None,
|
||||
)
|
||||
.expect("should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -50,20 +59,28 @@ impl Command for FirstDF {
|
||||
description: "Return the first two rows of a dataframe",
|
||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr first 2",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new(
|
||||
"a".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(3)],
|
||||
),
|
||||
Column::new(
|
||||
"b".to_string(),
|
||||
vec![Value::test_int(2), Value::test_int(4)],
|
||||
),
|
||||
])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![
|
||||
Column::new(
|
||||
"a".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(3)],
|
||||
),
|
||||
Column::new(
|
||||
"b".to_string(),
|
||||
vec![Value::test_int(2), Value::test_int(4)],
|
||||
),
|
||||
],
|
||||
None,
|
||||
)
|
||||
.expect("should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
},
|
||||
Example {
|
||||
description: "Creates a first expression from a column",
|
||||
example: "dfr col a | dfr first",
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
@ -74,8 +91,19 @@ impl Command for FirstDF {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||
command(engine_state, stack, call, df)
|
||||
let value = input.into_value(call.head);
|
||||
if NuDataFrame::can_downcast(&value) {
|
||||
let df = NuDataFrame::try_from_value(value)?;
|
||||
command(engine_state, stack, call, df)
|
||||
} else {
|
||||
let expr = NuExpression::try_from_value(value)?;
|
||||
let expr: NuExpression = expr.into_polars().first().into();
|
||||
|
||||
Ok(PipelineData::Value(
|
||||
NuExpression::into_value(expr, call.head),
|
||||
None,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -97,11 +125,25 @@ fn command(
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::super::super::test_dataframe::test_dataframe;
|
||||
use super::super::super::test_dataframe::{build_test_engine_state, test_dataframe_example};
|
||||
use super::*;
|
||||
use crate::dataframe::lazy::aggregate::LazyAggregate;
|
||||
use crate::dataframe::lazy::groupby::ToLazyGroupBy;
|
||||
|
||||
#[test]
|
||||
fn test_examples() {
|
||||
test_dataframe(vec![Box::new(FirstDF {})])
|
||||
fn test_examples_dataframe() {
|
||||
let mut engine_state = build_test_engine_state(vec![Box::new(FirstDF {})]);
|
||||
test_dataframe_example(&mut engine_state, &FirstDF.examples()[0]);
|
||||
test_dataframe_example(&mut engine_state, &FirstDF.examples()[1]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_examples_expression() {
|
||||
let mut engine_state = build_test_engine_state(vec![
|
||||
Box::new(FirstDF {}),
|
||||
Box::new(LazyAggregate {}),
|
||||
Box::new(ToLazyGroupBy {}),
|
||||
]);
|
||||
test_dataframe_example(&mut engine_state, &FirstDF.examples()[2]);
|
||||
}
|
||||
}
|
||||
|
@ -36,10 +36,13 @@ impl Command for GetDF {
|
||||
description: "Returns the selected column",
|
||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr get a",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![Column::new(
|
||||
"a".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(3)],
|
||||
)])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![Column::new(
|
||||
"a".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(3)],
|
||||
)],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -70,14 +73,12 @@ fn command(
|
||||
|
||||
df.as_ref()
|
||||
.select(col_string)
|
||||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error selecting columns".into(),
|
||||
e.to_string(),
|
||||
Some(col_span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error selecting columns".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(col_span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})
|
||||
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::super::values::{utils::DEFAULT_ROWS, Column, NuDataFrame};
|
||||
use super::super::values::{utils::DEFAULT_ROWS, Column, NuDataFrame, NuExpression};
|
||||
use nu_engine::CallExt;
|
||||
use nu_protocol::{
|
||||
ast::Call,
|
||||
@ -21,26 +21,42 @@ impl Command for LastDF {
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(self.name())
|
||||
.optional("rows", SyntaxShape::Int, "Number of rows for tail")
|
||||
.input_output_type(
|
||||
Type::Custom("dataframe".into()),
|
||||
Type::Custom("dataframe".into()),
|
||||
)
|
||||
.input_output_types(vec![
|
||||
(
|
||||
Type::Custom("expression".into()),
|
||||
Type::Custom("expression".into()),
|
||||
),
|
||||
(
|
||||
Type::Custom("dataframe".into()),
|
||||
Type::Custom("dataframe".into()),
|
||||
),
|
||||
])
|
||||
.category(Category::Custom("dataframe".into()))
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Create new dataframe with last rows",
|
||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr last 1",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new("a".to_string(), vec![Value::test_int(3)]),
|
||||
Column::new("b".to_string(), vec![Value::test_int(4)]),
|
||||
])
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
}]
|
||||
vec![
|
||||
Example {
|
||||
description: "Create new dataframe with last rows",
|
||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr last 1",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![
|
||||
Column::new("a".to_string(), vec![Value::test_int(3)]),
|
||||
Column::new("b".to_string(), vec![Value::test_int(4)]),
|
||||
],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
},
|
||||
Example {
|
||||
description: "Creates a last expression from a column",
|
||||
example: "dfr col a | dfr last",
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
fn run(
|
||||
@ -50,8 +66,19 @@ impl Command for LastDF {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||
command(engine_state, stack, call, df)
|
||||
let value = input.into_value(call.head);
|
||||
if NuDataFrame::can_downcast(&value) {
|
||||
let df = NuDataFrame::try_from_value(value)?;
|
||||
command(engine_state, stack, call, df)
|
||||
} else {
|
||||
let expr = NuExpression::try_from_value(value)?;
|
||||
let expr: NuExpression = expr.into_polars().last().into();
|
||||
|
||||
Ok(PipelineData::Value(
|
||||
NuExpression::into_value(expr, call.head),
|
||||
None,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -73,11 +100,24 @@ fn command(
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::super::super::test_dataframe::test_dataframe;
|
||||
use super::super::super::test_dataframe::{build_test_engine_state, test_dataframe_example};
|
||||
use super::*;
|
||||
use crate::dataframe::lazy::aggregate::LazyAggregate;
|
||||
use crate::dataframe::lazy::groupby::ToLazyGroupBy;
|
||||
|
||||
#[test]
|
||||
fn test_examples() {
|
||||
test_dataframe(vec![Box::new(LastDF {})])
|
||||
fn test_examples_dataframe() {
|
||||
let mut engine_state = build_test_engine_state(vec![Box::new(LastDF {})]);
|
||||
test_dataframe_example(&mut engine_state, &LastDF.examples()[0]);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_examples_expression() {
|
||||
let mut engine_state = build_test_engine_state(vec![
|
||||
Box::new(LastDF {}),
|
||||
Box::new(LazyAggregate {}),
|
||||
Box::new(ToLazyGroupBy {}),
|
||||
]);
|
||||
test_dataframe_example(&mut engine_state, &LastDF.examples()[1]);
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
use nu_protocol::{
|
||||
ast::Call,
|
||||
engine::{Command, EngineState, Stack},
|
||||
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Value,
|
||||
record, Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Value,
|
||||
};
|
||||
|
||||
use crate::dataframe::values::NuDataFrame;
|
||||
@ -55,34 +55,18 @@ impl Command for ListDF {
|
||||
NuDataFrame::try_from_value(value).ok().map(|df| (name, df))
|
||||
})
|
||||
.map(|(name, df)| {
|
||||
let name = Value::String {
|
||||
val: name,
|
||||
span: call.head,
|
||||
};
|
||||
|
||||
let columns = Value::int(df.as_ref().width() as i64, call.head);
|
||||
|
||||
let rows = Value::int(df.as_ref().height() as i64, call.head);
|
||||
|
||||
let cols = vec![
|
||||
"name".to_string(),
|
||||
"columns".to_string(),
|
||||
"rows".to_string(),
|
||||
];
|
||||
let vals = vec![name, columns, rows];
|
||||
|
||||
Value::Record {
|
||||
cols,
|
||||
vals,
|
||||
span: call.head,
|
||||
}
|
||||
Value::record(
|
||||
record! {
|
||||
"name" => Value::string(name, call.head),
|
||||
"columns" => Value::int(df.as_ref().width() as i64, call.head),
|
||||
"rows" => Value::int(df.as_ref().height() as i64, call.head),
|
||||
},
|
||||
call.head,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<Value>>();
|
||||
|
||||
let list = Value::List {
|
||||
vals,
|
||||
span: call.head,
|
||||
};
|
||||
let list = Value::list(vals, call.head);
|
||||
|
||||
Ok(list.into_pipeline_data())
|
||||
}
|
||||
|
@ -106,7 +106,7 @@ impl Command for MeltDF {
|
||||
Value::test_string("c"),
|
||||
],
|
||||
),
|
||||
])
|
||||
], None)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -152,38 +152,34 @@ fn command(
|
||||
let mut res = df
|
||||
.as_ref()
|
||||
.melt(&id_col_string, &val_col_string)
|
||||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error calculating melt".into(),
|
||||
e.to_string(),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error calculating melt".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
|
||||
if let Some(name) = &variable_name {
|
||||
res.rename("variable", &name.item).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error renaming column".into(),
|
||||
e.to_string(),
|
||||
Some(name.span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
})?;
|
||||
res.rename("variable", &name.item)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error renaming column".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(name.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
}
|
||||
|
||||
if let Some(name) = &value_name {
|
||||
res.rename("value", &name.item).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error renaming column".into(),
|
||||
e.to_string(),
|
||||
Some(name.span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
})?;
|
||||
res.rename("value", &name.item)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error renaming column".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(name.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok(PipelineData::Value(
|
||||
@ -198,50 +194,50 @@ fn check_column_datatypes<T: AsRef<str>>(
|
||||
col_span: Span,
|
||||
) -> Result<(), ShellError> {
|
||||
if cols.is_empty() {
|
||||
return Err(ShellError::GenericError(
|
||||
"Merge error".into(),
|
||||
"empty column list".into(),
|
||||
Some(col_span),
|
||||
None,
|
||||
Vec::new(),
|
||||
));
|
||||
return Err(ShellError::GenericError {
|
||||
error: "Merge error".into(),
|
||||
msg: "empty column list".into(),
|
||||
span: Some(col_span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
});
|
||||
}
|
||||
|
||||
// Checking if they are same type
|
||||
if cols.len() > 1 {
|
||||
for w in cols.windows(2) {
|
||||
let l_series = df.column(w[0].as_ref()).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error selecting columns".into(),
|
||||
e.to_string(),
|
||||
Some(col_span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
})?;
|
||||
let l_series = df
|
||||
.column(w[0].as_ref())
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error selecting columns".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(col_span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
|
||||
let r_series = df.column(w[1].as_ref()).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error selecting columns".into(),
|
||||
e.to_string(),
|
||||
Some(col_span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
})?;
|
||||
let r_series = df
|
||||
.column(w[1].as_ref())
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error selecting columns".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(col_span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
|
||||
if l_series.dtype() != r_series.dtype() {
|
||||
return Err(ShellError::GenericError(
|
||||
"Merge error".into(),
|
||||
"found different column types in list".into(),
|
||||
Some(col_span),
|
||||
Some(format!(
|
||||
return Err(ShellError::GenericError {
|
||||
error: "Merge error".into(),
|
||||
msg: "found different column types in list".into(),
|
||||
span: Some(col_span),
|
||||
help: Some(format!(
|
||||
"datatypes {} and {} are incompatible",
|
||||
l_series.dtype(),
|
||||
r_series.dtype()
|
||||
)),
|
||||
Vec::new(),
|
||||
));
|
||||
inner: vec![],
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
mod append;
|
||||
mod cast;
|
||||
mod columns;
|
||||
mod drop;
|
||||
mod drop_duplicates;
|
||||
@ -15,6 +16,7 @@ mod open;
|
||||
mod query_df;
|
||||
mod rename;
|
||||
mod sample;
|
||||
mod schema;
|
||||
mod shape;
|
||||
mod slice;
|
||||
mod sql_context;
|
||||
@ -22,6 +24,7 @@ mod sql_expr;
|
||||
mod summary;
|
||||
mod take;
|
||||
mod to_arrow;
|
||||
mod to_avro;
|
||||
mod to_csv;
|
||||
mod to_df;
|
||||
mod to_json_lines;
|
||||
@ -33,6 +36,7 @@ use nu_protocol::engine::StateWorkingSet;
|
||||
|
||||
pub use self::open::OpenDataFrame;
|
||||
pub use append::AppendDF;
|
||||
pub use cast::CastDF;
|
||||
pub use columns::ColumnsDF;
|
||||
pub use drop::DropDF;
|
||||
pub use drop_duplicates::DropDuplicates;
|
||||
@ -48,13 +52,14 @@ pub use melt::MeltDF;
|
||||
pub use query_df::QueryDf;
|
||||
pub use rename::RenameDF;
|
||||
pub use sample::SampleDF;
|
||||
pub use schema::SchemaDF;
|
||||
pub use shape::ShapeDF;
|
||||
pub use slice::SliceDF;
|
||||
pub use sql_context::SQLContext;
|
||||
pub use sql_expr::parse_sql_expr;
|
||||
pub use summary::Summary;
|
||||
pub use take::TakeDF;
|
||||
pub use to_arrow::ToArrow;
|
||||
pub use to_avro::ToAvro;
|
||||
pub use to_csv::ToCSV;
|
||||
pub use to_df::ToDataFrame;
|
||||
pub use to_json_lines::ToJsonLines;
|
||||
@ -75,6 +80,7 @@ pub fn add_eager_decls(working_set: &mut StateWorkingSet) {
|
||||
// Dataframe commands
|
||||
bind_command!(
|
||||
AppendDF,
|
||||
CastDF,
|
||||
ColumnsDF,
|
||||
DataTypes,
|
||||
Summary,
|
||||
@ -92,10 +98,12 @@ pub fn add_eager_decls(working_set: &mut StateWorkingSet) {
|
||||
QueryDf,
|
||||
RenameDF,
|
||||
SampleDF,
|
||||
SchemaDF,
|
||||
ShapeDF,
|
||||
SliceDF,
|
||||
TakeDF,
|
||||
ToArrow,
|
||||
ToAvro,
|
||||
ToCSV,
|
||||
ToDataFrame,
|
||||
ToNu,
|
||||
|
@ -1,3 +1,5 @@
|
||||
use crate::dataframe::values::NuSchema;
|
||||
|
||||
use super::super::values::{NuDataFrame, NuLazyFrame};
|
||||
use nu_engine::CallExt;
|
||||
use nu_protocol::{
|
||||
@ -13,6 +15,8 @@ use polars::prelude::{
|
||||
LazyFrame, ParallelStrategy, ParquetReader, ScanArgsIpc, ScanArgsParquet, SerReader,
|
||||
};
|
||||
|
||||
use polars_io::avro::AvroReader;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OpenDataFrame;
|
||||
|
||||
@ -22,7 +26,7 @@ impl Command for OpenDataFrame {
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Opens CSV, JSON, JSON lines, arrow, or parquet file to create dataframe."
|
||||
"Opens CSV, JSON, JSON lines, arrow, avro, or parquet file to create dataframe."
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
@ -36,7 +40,7 @@ impl Command for OpenDataFrame {
|
||||
.named(
|
||||
"type",
|
||||
SyntaxShape::String,
|
||||
"File type: csv, tsv, json, parquet, arrow. If omitted, derive from file extension",
|
||||
"File type: csv, tsv, json, parquet, arrow, avro. If omitted, derive from file extension",
|
||||
Some('t'),
|
||||
)
|
||||
.named(
|
||||
@ -68,6 +72,12 @@ impl Command for OpenDataFrame {
|
||||
"Columns to be selected from csv file. CSV and Parquet file",
|
||||
None,
|
||||
)
|
||||
.named(
|
||||
"schema",
|
||||
SyntaxShape::Record(vec![]),
|
||||
r#"Polars Schema in format [{name: str}]. CSV, JSON, and JSONL files"#,
|
||||
Some('s')
|
||||
)
|
||||
.input_output_type(Type::Any, Type::Custom("dataframe".into()))
|
||||
.category(Category::Custom("dataframe".into()))
|
||||
}
|
||||
@ -114,19 +124,20 @@ fn command(
|
||||
match type_id {
|
||||
Some((e, msg, blamed)) => match e.as_str() {
|
||||
"csv" | "tsv" => from_csv(engine_state, stack, call),
|
||||
"parquet" => from_parquet(engine_state, stack, call),
|
||||
"parquet" | "parq" => from_parquet(engine_state, stack, call),
|
||||
"ipc" | "arrow" => from_ipc(engine_state, stack, call),
|
||||
"json" => from_json(engine_state, stack, call),
|
||||
"jsonl" => from_jsonl(engine_state, stack, call),
|
||||
_ => Err(ShellError::FileNotFoundCustom(
|
||||
format!("{msg}. Supported values: csv, tsv, parquet, ipc, arrow, json"),
|
||||
blamed,
|
||||
)),
|
||||
"avro" => from_avro(engine_state, stack, call),
|
||||
_ => Err(ShellError::FileNotFoundCustom {
|
||||
msg: format!("{msg}. Supported values: csv, tsv, parquet, ipc, arrow, json"),
|
||||
span: blamed,
|
||||
}),
|
||||
},
|
||||
None => Err(ShellError::FileNotFoundCustom(
|
||||
"File without extension".into(),
|
||||
file.span,
|
||||
)),
|
||||
None => Err(ShellError::FileNotFoundCustom {
|
||||
msg: "File without extension".into(),
|
||||
span: file.span,
|
||||
}),
|
||||
}
|
||||
.map(|value| PipelineData::Value(value, None))
|
||||
}
|
||||
@ -136,28 +147,27 @@ fn from_parquet(
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
) -> Result<Value, ShellError> {
|
||||
if call.has_flag("lazy") {
|
||||
if call.has_flag(engine_state, stack, "lazy")? {
|
||||
let file: String = call.req(engine_state, stack, 0)?;
|
||||
let args = ScanArgsParquet {
|
||||
n_rows: None,
|
||||
cache: true,
|
||||
parallel: ParallelStrategy::Auto,
|
||||
rechunk: false,
|
||||
row_count: None,
|
||||
row_index: None,
|
||||
low_memory: false,
|
||||
cloud_options: None,
|
||||
use_statistics: false,
|
||||
hive_partitioning: false,
|
||||
};
|
||||
|
||||
let df: NuLazyFrame = LazyFrame::scan_parquet(file, args)
|
||||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Parquet reader error".into(),
|
||||
format!("{e:?}"),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Parquet reader error".into(),
|
||||
msg: format!("{e:?}"),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?
|
||||
.into();
|
||||
|
||||
@ -166,14 +176,12 @@ fn from_parquet(
|
||||
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
||||
let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?;
|
||||
|
||||
let r = File::open(&file.item).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error opening file".into(),
|
||||
e.to_string(),
|
||||
Some(file.span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
let r = File::open(&file.item).map_err(|e| ShellError::GenericError {
|
||||
error: "Error opening file".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(file.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
let reader = ParquetReader::new(r);
|
||||
|
||||
@ -184,14 +192,12 @@ fn from_parquet(
|
||||
|
||||
let df: NuDataFrame = reader
|
||||
.finish()
|
||||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Parquet reader error".into(),
|
||||
format!("{e:?}"),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Parquet reader error".into(),
|
||||
msg: format!("{e:?}"),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?
|
||||
.into();
|
||||
|
||||
@ -199,30 +205,64 @@ fn from_parquet(
|
||||
}
|
||||
}
|
||||
|
||||
fn from_avro(
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
) -> Result<Value, ShellError> {
|
||||
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
||||
let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?;
|
||||
|
||||
let r = File::open(&file.item).map_err(|e| ShellError::GenericError {
|
||||
error: "Error opening file".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(file.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
let reader = AvroReader::new(r);
|
||||
|
||||
let reader = match columns {
|
||||
None => reader,
|
||||
Some(columns) => reader.with_columns(Some(columns)),
|
||||
};
|
||||
|
||||
let df: NuDataFrame = reader
|
||||
.finish()
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Avro reader error".into(),
|
||||
msg: format!("{e:?}"),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?
|
||||
.into();
|
||||
|
||||
Ok(df.into_value(call.head))
|
||||
}
|
||||
|
||||
fn from_ipc(
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
) -> Result<Value, ShellError> {
|
||||
if call.has_flag("lazy") {
|
||||
if call.has_flag(engine_state, stack, "lazy")? {
|
||||
let file: String = call.req(engine_state, stack, 0)?;
|
||||
let args = ScanArgsIpc {
|
||||
n_rows: None,
|
||||
cache: true,
|
||||
rechunk: false,
|
||||
row_count: None,
|
||||
row_index: None,
|
||||
memmap: true,
|
||||
};
|
||||
|
||||
let df: NuLazyFrame = LazyFrame::scan_ipc(file, args)
|
||||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"IPC reader error".into(),
|
||||
format!("{e:?}"),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "IPC reader error".into(),
|
||||
msg: format!("{e:?}"),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?
|
||||
.into();
|
||||
|
||||
@ -231,14 +271,12 @@ fn from_ipc(
|
||||
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
||||
let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?;
|
||||
|
||||
let r = File::open(&file.item).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error opening file".into(),
|
||||
e.to_string(),
|
||||
Some(file.span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
let r = File::open(&file.item).map_err(|e| ShellError::GenericError {
|
||||
error: "Error opening file".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(file.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
let reader = IpcReader::new(r);
|
||||
|
||||
@ -249,14 +287,12 @@ fn from_ipc(
|
||||
|
||||
let df: NuDataFrame = reader
|
||||
.finish()
|
||||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"IPC reader error".into(),
|
||||
format!("{e:?}"),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "IPC reader error".into(),
|
||||
msg: format!("{e:?}"),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?
|
||||
.into();
|
||||
|
||||
@ -270,29 +306,34 @@ fn from_json(
|
||||
call: &Call,
|
||||
) -> Result<Value, ShellError> {
|
||||
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
||||
let file = File::open(&file.item).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error opening file".into(),
|
||||
e.to_string(),
|
||||
Some(file.span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
let file = File::open(&file.item).map_err(|e| ShellError::GenericError {
|
||||
error: "Error opening file".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(file.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
let maybe_schema = call
|
||||
.get_flag(engine_state, stack, "schema")?
|
||||
.map(|schema| NuSchema::try_from(&schema))
|
||||
.transpose()?;
|
||||
|
||||
let buf_reader = BufReader::new(file);
|
||||
let reader = JsonReader::new(buf_reader);
|
||||
|
||||
let reader = match maybe_schema {
|
||||
Some(schema) => reader.with_schema(schema.into()),
|
||||
None => reader,
|
||||
};
|
||||
|
||||
let df: NuDataFrame = reader
|
||||
.finish()
|
||||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Json reader error".into(),
|
||||
format!("{e:?}"),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Json reader error".into(),
|
||||
msg: format!("{e:?}"),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?
|
||||
.into();
|
||||
|
||||
@ -305,15 +346,17 @@ fn from_jsonl(
|
||||
call: &Call,
|
||||
) -> Result<Value, ShellError> {
|
||||
let infer_schema: Option<usize> = call.get_flag(engine_state, stack, "infer-schema")?;
|
||||
let maybe_schema = call
|
||||
.get_flag(engine_state, stack, "schema")?
|
||||
.map(|schema| NuSchema::try_from(&schema))
|
||||
.transpose()?;
|
||||
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
||||
let file = File::open(&file.item).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error opening file".into(),
|
||||
e.to_string(),
|
||||
Some(file.span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
let file = File::open(&file.item).map_err(|e| ShellError::GenericError {
|
||||
error: "Error opening file".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(file.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
|
||||
let buf_reader = BufReader::new(file);
|
||||
@ -321,16 +364,19 @@ fn from_jsonl(
|
||||
.with_json_format(JsonFormat::JsonLines)
|
||||
.infer_schema_len(infer_schema);
|
||||
|
||||
let reader = match maybe_schema {
|
||||
Some(schema) => reader.with_schema(schema.into()),
|
||||
None => reader,
|
||||
};
|
||||
|
||||
let df: NuDataFrame = reader
|
||||
.finish()
|
||||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Json lines reader error".into(),
|
||||
format!("{e:?}"),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Json lines reader error".into(),
|
||||
msg: format!("{e:?}"),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?
|
||||
.into();
|
||||
|
||||
@ -343,12 +389,17 @@ fn from_csv(
|
||||
call: &Call,
|
||||
) -> Result<Value, ShellError> {
|
||||
let delimiter: Option<Spanned<String>> = call.get_flag(engine_state, stack, "delimiter")?;
|
||||
let no_header: bool = call.has_flag("no-header");
|
||||
let no_header: bool = call.has_flag(engine_state, stack, "no-header")?;
|
||||
let infer_schema: Option<usize> = call.get_flag(engine_state, stack, "infer-schema")?;
|
||||
let skip_rows: Option<usize> = call.get_flag(engine_state, stack, "skip-rows")?;
|
||||
let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?;
|
||||
|
||||
if call.has_flag("lazy") {
|
||||
let maybe_schema = call
|
||||
.get_flag(engine_state, stack, "schema")?
|
||||
.map(|schema| NuSchema::try_from(&schema))
|
||||
.transpose()?;
|
||||
|
||||
if call.has_flag(engine_state, stack, "lazy")? {
|
||||
let file: String = call.req(engine_state, stack, 0)?;
|
||||
let csv_reader = LazyCsvReader::new(file);
|
||||
|
||||
@ -356,25 +407,30 @@ fn from_csv(
|
||||
None => csv_reader,
|
||||
Some(d) => {
|
||||
if d.item.len() != 1 {
|
||||
return Err(ShellError::GenericError(
|
||||
"Incorrect delimiter".into(),
|
||||
"Delimiter has to be one character".into(),
|
||||
Some(d.span),
|
||||
None,
|
||||
Vec::new(),
|
||||
));
|
||||
return Err(ShellError::GenericError {
|
||||
error: "Incorrect delimiter".into(),
|
||||
msg: "Delimiter has to be one character".into(),
|
||||
span: Some(d.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
});
|
||||
} else {
|
||||
let delimiter = match d.item.chars().next() {
|
||||
Some(d) => d as u8,
|
||||
None => unreachable!(),
|
||||
};
|
||||
csv_reader.with_delimiter(delimiter)
|
||||
csv_reader.with_separator(delimiter)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let csv_reader = csv_reader.has_header(!no_header);
|
||||
|
||||
let csv_reader = match maybe_schema {
|
||||
Some(schema) => csv_reader.with_schema(Some(schema.into())),
|
||||
None => csv_reader,
|
||||
};
|
||||
|
||||
let csv_reader = match infer_schema {
|
||||
None => csv_reader,
|
||||
Some(r) => csv_reader.with_infer_schema_length(Some(r)),
|
||||
@ -387,14 +443,12 @@ fn from_csv(
|
||||
|
||||
let df: NuLazyFrame = csv_reader
|
||||
.finish()
|
||||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Parquet reader error".into(),
|
||||
format!("{e:?}"),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Parquet reader error".into(),
|
||||
msg: format!("{e:?}"),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?
|
||||
.into();
|
||||
|
||||
@ -402,14 +456,12 @@ fn from_csv(
|
||||
} else {
|
||||
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
||||
let csv_reader = CsvReader::from_path(&file.item)
|
||||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error creating CSV reader".into(),
|
||||
e.to_string(),
|
||||
Some(file.span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error creating CSV reader".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(file.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?
|
||||
.with_encoding(CsvEncoding::LossyUtf8);
|
||||
|
||||
@ -417,25 +469,30 @@ fn from_csv(
|
||||
None => csv_reader,
|
||||
Some(d) => {
|
||||
if d.item.len() != 1 {
|
||||
return Err(ShellError::GenericError(
|
||||
"Incorrect delimiter".into(),
|
||||
"Delimiter has to be one character".into(),
|
||||
Some(d.span),
|
||||
None,
|
||||
Vec::new(),
|
||||
));
|
||||
return Err(ShellError::GenericError {
|
||||
error: "Incorrect delimiter".into(),
|
||||
msg: "Delimiter has to be one character".into(),
|
||||
span: Some(d.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
});
|
||||
} else {
|
||||
let delimiter = match d.item.chars().next() {
|
||||
Some(d) => d as u8,
|
||||
None => unreachable!(),
|
||||
};
|
||||
csv_reader.with_delimiter(delimiter)
|
||||
csv_reader.with_separator(delimiter)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let csv_reader = csv_reader.has_header(!no_header);
|
||||
|
||||
let csv_reader = match maybe_schema {
|
||||
Some(schema) => csv_reader.with_schema(Some(schema.into())),
|
||||
None => csv_reader,
|
||||
};
|
||||
|
||||
let csv_reader = match infer_schema {
|
||||
None => csv_reader,
|
||||
Some(r) => csv_reader.infer_schema(Some(r)),
|
||||
@ -453,14 +510,12 @@ fn from_csv(
|
||||
|
||||
let df: NuDataFrame = csv_reader
|
||||
.finish()
|
||||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Parquet reader error".into(),
|
||||
format!("{e:?}"),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Parquet reader error".into(),
|
||||
msg: format!("{e:?}"),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?
|
||||
.into();
|
||||
|
||||
|
@ -44,10 +44,13 @@ impl Command for QueryDf {
|
||||
description: "Query dataframe using SQL",
|
||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr query 'select a from df'",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![Column::new(
|
||||
"a".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(3)],
|
||||
)])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![Column::new(
|
||||
"a".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(3)],
|
||||
)],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -76,22 +79,19 @@ fn command(
|
||||
|
||||
let mut ctx = SQLContext::new();
|
||||
ctx.register("df", &df.df);
|
||||
let df_sql = ctx.execute(&sql_query).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Dataframe Error".into(),
|
||||
e.to_string(),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
})?;
|
||||
let df_sql = ctx
|
||||
.execute(&sql_query)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Dataframe Error".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
let lazy = NuLazyFrame::new(false, df_sql);
|
||||
|
||||
let eager = lazy.collect(call.head)?;
|
||||
let value = Value::CustomValue {
|
||||
val: Box::new(eager),
|
||||
span: call.head,
|
||||
};
|
||||
let value = Value::custom_value(Box::new(eager), call.head);
|
||||
|
||||
Ok(PipelineData::Value(value, None))
|
||||
}
|
||||
|
@ -46,15 +46,18 @@ impl Command for RenameDF {
|
||||
description: "Renames a series",
|
||||
example: "[5 6 7 8] | dfr into-df | dfr rename '0' new_name",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![Column::new(
|
||||
"new_name".to_string(),
|
||||
vec![
|
||||
Value::test_int(5),
|
||||
Value::test_int(6),
|
||||
Value::test_int(7),
|
||||
Value::test_int(8),
|
||||
],
|
||||
)])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![Column::new(
|
||||
"new_name".to_string(),
|
||||
vec![
|
||||
Value::test_int(5),
|
||||
Value::test_int(6),
|
||||
Value::test_int(7),
|
||||
Value::test_int(8),
|
||||
],
|
||||
)],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -63,16 +66,19 @@ impl Command for RenameDF {
|
||||
description: "Renames a dataframe column",
|
||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr rename a a_new",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new(
|
||||
"a_new".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(3)],
|
||||
),
|
||||
Column::new(
|
||||
"b".to_string(),
|
||||
vec![Value::test_int(2), Value::test_int(4)],
|
||||
),
|
||||
])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![
|
||||
Column::new(
|
||||
"a_new".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(3)],
|
||||
),
|
||||
Column::new(
|
||||
"b".to_string(),
|
||||
vec![Value::test_int(2), Value::test_int(4)],
|
||||
),
|
||||
],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -81,16 +87,19 @@ impl Command for RenameDF {
|
||||
description: "Renames two dataframe columns",
|
||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr rename [a b] [a_new b_new]",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new(
|
||||
"a_new".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(3)],
|
||||
),
|
||||
Column::new(
|
||||
"b_new".to_string(),
|
||||
vec![Value::test_int(2), Value::test_int(4)],
|
||||
),
|
||||
])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![
|
||||
Column::new(
|
||||
"a_new".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(3)],
|
||||
),
|
||||
Column::new(
|
||||
"b_new".to_string(),
|
||||
vec![Value::test_int(2), Value::test_int(4)],
|
||||
),
|
||||
],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -130,15 +139,15 @@ fn command_eager(
|
||||
let new_names = extract_strings(new_names)?;
|
||||
|
||||
for (from, to) in columns.iter().zip(new_names.iter()) {
|
||||
df.as_mut().rename(from, to).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error renaming".into(),
|
||||
e.to_string(),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
})?;
|
||||
df.as_mut()
|
||||
.rename(from, to)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error renaming".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok(PipelineData::Value(df.into_value(call.head), None))
|
||||
@ -160,7 +169,7 @@ fn command_lazy(
|
||||
let value: Value = call.req(engine_state, stack, 1)?;
|
||||
return Err(ShellError::IncompatibleParametersSingle {
|
||||
msg: "New name list has different size to column list".into(),
|
||||
span: value.span()?,
|
||||
span: value.span(),
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -4,6 +4,8 @@ use nu_protocol::{
|
||||
engine::{Command, EngineState, Stack},
|
||||
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Type,
|
||||
};
|
||||
use polars::prelude::NamedFrom;
|
||||
use polars::series::Series;
|
||||
|
||||
use super::super::values::NuDataFrame;
|
||||
|
||||
@ -52,12 +54,13 @@ impl Command for SampleDF {
|
||||
vec![
|
||||
Example {
|
||||
description: "Sample rows from dataframe",
|
||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr sample -n 1",
|
||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr sample --n-rows 1",
|
||||
result: None, // No expected value because sampling is random
|
||||
},
|
||||
Example {
|
||||
description: "Shows sample row using fraction and replace",
|
||||
example: "[[a b]; [1 2] [3 4] [5 6]] | dfr into-df | dfr sample -f 0.5 -e",
|
||||
example:
|
||||
"[[a b]; [1 2] [3 4] [5 6]] | dfr into-df | dfr sample --fraction 0.5 --replace",
|
||||
result: None, // No expected value because sampling is random
|
||||
},
|
||||
]
|
||||
@ -80,55 +83,51 @@ fn command(
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let rows: Option<Spanned<usize>> = call.get_flag(engine_state, stack, "n-rows")?;
|
||||
let rows: Option<Spanned<i64>> = call.get_flag(engine_state, stack, "n-rows")?;
|
||||
let fraction: Option<Spanned<f64>> = call.get_flag(engine_state, stack, "fraction")?;
|
||||
let seed: Option<u64> = call
|
||||
.get_flag::<i64>(engine_state, stack, "seed")?
|
||||
.map(|val| val as u64);
|
||||
let replace: bool = call.has_flag("replace");
|
||||
let shuffle: bool = call.has_flag("shuffle");
|
||||
let replace: bool = call.has_flag(engine_state, stack, "replace")?;
|
||||
let shuffle: bool = call.has_flag(engine_state, stack, "shuffle")?;
|
||||
|
||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||
|
||||
match (rows, fraction) {
|
||||
(Some(rows), None) => df
|
||||
.as_ref()
|
||||
.sample_n(rows.item, replace, shuffle, seed)
|
||||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error creating sample".into(),
|
||||
e.to_string(),
|
||||
Some(rows.span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
.sample_n(&Series::new("s", &[rows.item]), replace, shuffle, seed)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error creating sample".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(rows.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
}),
|
||||
(None, Some(frac)) => df
|
||||
.as_ref()
|
||||
.sample_frac(frac.item, replace, shuffle, seed)
|
||||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error creating sample".into(),
|
||||
e.to_string(),
|
||||
Some(frac.span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
.sample_frac(&Series::new("frac", &[frac.item]), replace, shuffle, seed)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error creating sample".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(frac.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
}),
|
||||
(Some(_), Some(_)) => Err(ShellError::GenericError(
|
||||
"Incompatible flags".into(),
|
||||
"Only one selection criterion allowed".into(),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
)),
|
||||
(None, None) => Err(ShellError::GenericError(
|
||||
"No selection".into(),
|
||||
"No selection criterion was found".into(),
|
||||
Some(call.head),
|
||||
Some("Perhaps you want to use the flag -n or -f".into()),
|
||||
Vec::new(),
|
||||
)),
|
||||
(Some(_), Some(_)) => Err(ShellError::GenericError {
|
||||
error: "Incompatible flags".into(),
|
||||
msg: "Only one selection criterion allowed".into(),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
}),
|
||||
(None, None) => Err(ShellError::GenericError {
|
||||
error: "No selection".into(),
|
||||
msg: "No selection criterion was found".into(),
|
||||
span: Some(call.head),
|
||||
help: Some("Perhaps you want to use the flag -n or -f".into()),
|
||||
inner: vec![],
|
||||
}),
|
||||
}
|
||||
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
|
||||
}
|
||||
|
117
crates/nu-cmd-dataframe/src/dataframe/eager/schema.rs
Normal file
117
crates/nu-cmd-dataframe/src/dataframe/eager/schema.rs
Normal file
@ -0,0 +1,117 @@
|
||||
use super::super::values::NuDataFrame;
|
||||
use nu_engine::CallExt;
|
||||
use nu_protocol::{
|
||||
ast::Call,
|
||||
engine::{Command, EngineState, Stack},
|
||||
record, Category, Example, PipelineData, ShellError, Signature, Span, Type, Value,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SchemaDF;
|
||||
|
||||
impl Command for SchemaDF {
|
||||
fn name(&self) -> &str {
|
||||
"dfr schema"
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Show schema for a dataframe."
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(self.name())
|
||||
.switch("datatype-list", "creates a lazy dataframe", Some('l'))
|
||||
.input_output_type(
|
||||
Type::Custom("dataframe".into()),
|
||||
Type::Custom("dataframe".into()),
|
||||
)
|
||||
.category(Category::Custom("dataframe".into()))
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Dataframe schema",
|
||||
example: r#"[[a b]; [1 "foo"] [3 "bar"]] | dfr into-df | dfr schema"#,
|
||||
result: Some(Value::record(
|
||||
record! {
|
||||
"a" => Value::string("i64", Span::test_data()),
|
||||
"b" => Value::string("str", Span::test_data()),
|
||||
},
|
||||
Span::test_data(),
|
||||
)),
|
||||
}]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
if call.has_flag(engine_state, stack, "datatype-list")? {
|
||||
Ok(PipelineData::Value(datatype_list(Span::unknown()), None))
|
||||
} else {
|
||||
command(engine_state, stack, call, input)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn command(
|
||||
_engine_state: &EngineState,
|
||||
_stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||
let schema = df.schema();
|
||||
let value: Value = schema.into();
|
||||
Ok(PipelineData::Value(value, None))
|
||||
}
|
||||
|
||||
fn datatype_list(span: Span) -> Value {
|
||||
let types: Vec<Value> = [
|
||||
("null", ""),
|
||||
("bool", ""),
|
||||
("u8", ""),
|
||||
("u16", ""),
|
||||
("u32", ""),
|
||||
("u64", ""),
|
||||
("i8", ""),
|
||||
("i16", ""),
|
||||
("i32", ""),
|
||||
("i64", ""),
|
||||
("f32", ""),
|
||||
("f64", ""),
|
||||
("str", ""),
|
||||
("binary", ""),
|
||||
("date", ""),
|
||||
("datetime<time_unit: (ms, us, ns) timezone (optional)>", "Time Unit can be: milliseconds: ms, microseconds: us, nanoseconds: ns. Timezone wildcard is *. Other Timezone examples: UTC, America/Los_Angeles."),
|
||||
("duration<time_unit: (ms, us, ns)>", "Time Unit can be: milliseconds: ms, microseconds: us, nanoseconds: ns."),
|
||||
("time", ""),
|
||||
("object", ""),
|
||||
("unknown", ""),
|
||||
("list<dtype>", ""),
|
||||
]
|
||||
.iter()
|
||||
.map(|(dtype, note)| {
|
||||
Value::record(record! {
|
||||
"dtype" => Value::string(*dtype, span),
|
||||
"note" => Value::string(*note, span),
|
||||
},
|
||||
span)
|
||||
})
|
||||
.collect();
|
||||
Value::list(types, span)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::super::super::test_dataframe::test_dataframe;
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_examples() {
|
||||
test_dataframe(vec![Box::new(SchemaDF {})])
|
||||
}
|
||||
}
|
@ -34,10 +34,13 @@ impl Command for ShapeDF {
|
||||
description: "Shows row and column shape",
|
||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr shape",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new("rows".to_string(), vec![Value::test_int(2)]),
|
||||
Column::new("columns".to_string(), vec![Value::test_int(2)]),
|
||||
])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![
|
||||
Column::new("rows".to_string(), vec![Value::test_int(2)]),
|
||||
Column::new("columns".to_string(), vec![Value::test_int(2)]),
|
||||
],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -70,7 +73,7 @@ fn command(
|
||||
let rows_col = Column::new("rows".to_string(), vec![rows]);
|
||||
let cols_col = Column::new("columns".to_string(), vec![cols]);
|
||||
|
||||
NuDataFrame::try_from_columns(vec![rows_col, cols_col])
|
||||
NuDataFrame::try_from_columns(vec![rows_col, cols_col], None)
|
||||
.map(|df| PipelineData::Value(df.into_value(call.head), None))
|
||||
}
|
||||
|
||||
|
@ -37,10 +37,13 @@ impl Command for SliceDF {
|
||||
description: "Create new dataframe from a slice of the rows",
|
||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr slice 0 1",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new("a".to_string(), vec![Value::test_int(1)]),
|
||||
Column::new("b".to_string(), vec![Value::test_int(2)]),
|
||||
])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![
|
||||
Column::new("a".to_string(), vec![Value::test_int(1)]),
|
||||
Column::new("b".to_string(), vec![Value::test_int(2)]),
|
||||
],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
|
@ -2,7 +2,8 @@ use crate::dataframe::eager::sql_expr::parse_sql_expr;
|
||||
use polars::error::{ErrString, PolarsError};
|
||||
use polars::prelude::{col, DataFrame, DataType, IntoLazy, LazyFrame};
|
||||
use sqlparser::ast::{
|
||||
Expr as SqlExpr, Select, SelectItem, SetExpr, Statement, TableFactor, Value as SQLValue,
|
||||
Expr as SqlExpr, GroupByExpr, Select, SelectItem, SetExpr, Statement, TableFactor,
|
||||
Value as SQLValue,
|
||||
};
|
||||
use sqlparser::dialect::GenericDialect;
|
||||
use sqlparser::parser::Parser;
|
||||
@ -29,7 +30,7 @@ impl SQLContext {
|
||||
fn execute_select(&self, select_stmt: &Select) -> Result<LazyFrame, PolarsError> {
|
||||
// Determine involved dataframe
|
||||
// Implicit join require some more work in query parsers, Explicit join are preferred for now.
|
||||
let tbl = select_stmt.from.get(0).ok_or_else(|| {
|
||||
let tbl = select_stmt.from.first().ok_or_else(|| {
|
||||
PolarsError::ComputeError(ErrString::from("No table found in select statement"))
|
||||
})?;
|
||||
let mut alias_map = HashMap::new();
|
||||
@ -37,7 +38,7 @@ impl SQLContext {
|
||||
TableFactor::Table { name, alias, .. } => {
|
||||
let tbl_name = name
|
||||
.0
|
||||
.get(0)
|
||||
.first()
|
||||
.ok_or_else(|| {
|
||||
PolarsError::ComputeError(ErrString::from(
|
||||
"No table found in select statement",
|
||||
@ -96,8 +97,13 @@ impl SQLContext {
|
||||
.collect::<Result<Vec<_>, PolarsError>>()?;
|
||||
// Check for group by
|
||||
// After projection since there might be number.
|
||||
let group_by = select_stmt
|
||||
.group_by
|
||||
let group_by = match &select_stmt.group_by {
|
||||
GroupByExpr::All =>
|
||||
Err(
|
||||
PolarsError::ComputeError("Group-By Error: Only positive number or expression are supported, not all".into())
|
||||
)?,
|
||||
GroupByExpr::Expressions(expressions) => expressions
|
||||
}
|
||||
.iter()
|
||||
.map(
|
||||
|e|match e {
|
||||
@ -147,7 +153,7 @@ impl SQLContext {
|
||||
.enumerate()
|
||||
.map(|(agg_pj, (proj_p, expr))| (expr.clone(), (proj_p, agg_pj + group_by.len())))
|
||||
.unzip();
|
||||
let agg_df = df.groupby(group_by).agg(agg_projection);
|
||||
let agg_df = df.group_by(group_by).agg(agg_projection);
|
||||
let mut final_proj_pos = groupby_pos
|
||||
.into_iter()
|
||||
.chain(agg_proj_pos)
|
||||
@ -182,7 +188,7 @@ impl SQLContext {
|
||||
))
|
||||
} else {
|
||||
let ast = ast
|
||||
.get(0)
|
||||
.first()
|
||||
.ok_or_else(|| PolarsError::ComputeError(ErrString::from("No statement found")))?;
|
||||
Ok(match ast {
|
||||
Statement::Query(query) => {
|
||||
|
@ -2,8 +2,8 @@ use polars::error::PolarsError;
|
||||
use polars::prelude::{col, lit, DataType, Expr, LiteralValue, PolarsResult as Result, TimeUnit};
|
||||
|
||||
use sqlparser::ast::{
|
||||
BinaryOperator as SQLBinaryOperator, DataType as SQLDataType, Expr as SqlExpr,
|
||||
Function as SQLFunction, Value as SqlValue, WindowType,
|
||||
ArrayElemTypeDef, BinaryOperator as SQLBinaryOperator, DataType as SQLDataType,
|
||||
Expr as SqlExpr, Function as SQLFunction, Value as SqlValue, WindowType,
|
||||
};
|
||||
|
||||
fn map_sql_polars_datatype(data_type: &SQLDataType) -> Result<DataType> {
|
||||
@ -13,7 +13,7 @@ fn map_sql_polars_datatype(data_type: &SQLDataType) -> Result<DataType> {
|
||||
| SQLDataType::Uuid
|
||||
| SQLDataType::Clob(_)
|
||||
| SQLDataType::Text
|
||||
| SQLDataType::String => DataType::Utf8,
|
||||
| SQLDataType::String(_) => DataType::String,
|
||||
SQLDataType::Float(_) => DataType::Float32,
|
||||
SQLDataType::Real => DataType::Float32,
|
||||
SQLDataType::Double => DataType::Float64,
|
||||
@ -31,9 +31,12 @@ fn map_sql_polars_datatype(data_type: &SQLDataType) -> Result<DataType> {
|
||||
SQLDataType::Time(_, _) => DataType::Time,
|
||||
SQLDataType::Timestamp(_, _) => DataType::Datetime(TimeUnit::Microseconds, None),
|
||||
SQLDataType::Interval => DataType::Duration(TimeUnit::Microseconds),
|
||||
SQLDataType::Array(inner_type) => match inner_type {
|
||||
Some(inner_type) => DataType::List(Box::new(map_sql_polars_datatype(inner_type)?)),
|
||||
None => {
|
||||
SQLDataType::Array(array_type_def) => match array_type_def {
|
||||
ArrayElemTypeDef::AngleBracket(inner_type)
|
||||
| ArrayElemTypeDef::SquareBracket(inner_type) => {
|
||||
DataType::List(Box::new(map_sql_polars_datatype(inner_type)?))
|
||||
}
|
||||
_ => {
|
||||
return Err(PolarsError::ComputeError(
|
||||
"SQL Datatype Array(None) was not supported in polars-sql yet!".into(),
|
||||
))
|
||||
@ -59,7 +62,9 @@ fn binary_op_(left: Expr, right: Expr, op: &SQLBinaryOperator) -> Result<Expr> {
|
||||
SQLBinaryOperator::Multiply => left * right,
|
||||
SQLBinaryOperator::Divide => left / right,
|
||||
SQLBinaryOperator::Modulo => left % right,
|
||||
SQLBinaryOperator::StringConcat => left.cast(DataType::Utf8) + right.cast(DataType::Utf8),
|
||||
SQLBinaryOperator::StringConcat => {
|
||||
left.cast(DataType::String) + right.cast(DataType::String)
|
||||
}
|
||||
SQLBinaryOperator::Gt => left.gt(right),
|
||||
SQLBinaryOperator::Lt => left.lt(right),
|
||||
SQLBinaryOperator::GtEq => left.gt_eq(right),
|
||||
@ -114,7 +119,11 @@ pub fn parse_sql_expr(expr: &SqlExpr) -> Result<Expr> {
|
||||
binary_op_(left, right, op)?
|
||||
}
|
||||
SqlExpr::Function(sql_function) => parse_sql_function(sql_function)?,
|
||||
SqlExpr::Cast { expr, data_type } => cast_(parse_sql_expr(expr)?, data_type)?,
|
||||
SqlExpr::Cast {
|
||||
expr,
|
||||
data_type,
|
||||
format: _,
|
||||
} => cast_(parse_sql_expr(expr)?, data_type)?,
|
||||
SqlExpr::Nested(expr) => parse_sql_expr(expr)?,
|
||||
SqlExpr::Value(value) => literal_expr(value)?,
|
||||
_ => {
|
||||
@ -125,7 +134,7 @@ pub fn parse_sql_expr(expr: &SqlExpr) -> Result<Expr> {
|
||||
})
|
||||
}
|
||||
|
||||
fn apply_window_spec(expr: Expr, window_type: &Option<WindowType>) -> Result<Expr> {
|
||||
fn apply_window_spec(expr: Expr, window_type: Option<&WindowType>) -> Result<Expr> {
|
||||
Ok(match &window_type {
|
||||
Some(wtype) => match wtype {
|
||||
WindowType::WindowSpec(window_spec) => {
|
||||
@ -152,7 +161,7 @@ fn apply_window_spec(expr: Expr, window_type: &Option<WindowType>) -> Result<Exp
|
||||
fn parse_sql_function(sql_function: &SQLFunction) -> Result<Expr> {
|
||||
use sqlparser::ast::{FunctionArg, FunctionArgExpr};
|
||||
// Function name mostly do not have name space, so it mostly take the first args
|
||||
let function_name = sql_function.name.0[0].value.to_lowercase();
|
||||
let function_name = sql_function.name.0[0].value.to_ascii_lowercase();
|
||||
let args = sql_function
|
||||
.args
|
||||
.iter()
|
||||
@ -168,13 +177,13 @@ fn parse_sql_function(sql_function: &SQLFunction) -> Result<Expr> {
|
||||
sql_function.distinct,
|
||||
) {
|
||||
("sum", [FunctionArgExpr::Expr(expr)], false) => {
|
||||
apply_window_spec(parse_sql_expr(expr)?, &sql_function.over)?.sum()
|
||||
apply_window_spec(parse_sql_expr(expr)?, sql_function.over.as_ref())?.sum()
|
||||
}
|
||||
("count", [FunctionArgExpr::Expr(expr)], false) => {
|
||||
apply_window_spec(parse_sql_expr(expr)?, &sql_function.over)?.count()
|
||||
apply_window_spec(parse_sql_expr(expr)?, sql_function.over.as_ref())?.count()
|
||||
}
|
||||
("count", [FunctionArgExpr::Expr(expr)], true) => {
|
||||
apply_window_spec(parse_sql_expr(expr)?, &sql_function.over)?.n_unique()
|
||||
apply_window_spec(parse_sql_expr(expr)?, sql_function.over.as_ref())?.n_unique()
|
||||
}
|
||||
// Special case for wildcard args to count function.
|
||||
("count", [FunctionArgExpr::Wildcard], false) => lit(1i32).count(),
|
||||
|
@ -10,7 +10,7 @@ use polars::{
|
||||
chunked_array::ChunkedArray,
|
||||
prelude::{
|
||||
AnyValue, DataFrame, DataType, Float64Type, IntoSeries, NewChunkedArray,
|
||||
QuantileInterpolOptions, Series, Utf8Type,
|
||||
QuantileInterpolOptions, Series, StringType,
|
||||
},
|
||||
};
|
||||
|
||||
@ -46,53 +46,56 @@ impl Command for Summary {
|
||||
description: "list dataframe descriptives",
|
||||
example: "[[a b]; [1 1] [1 1]] | dfr into-df | dfr summary",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new(
|
||||
"descriptor".to_string(),
|
||||
vec![
|
||||
Value::test_string("count"),
|
||||
Value::test_string("sum"),
|
||||
Value::test_string("mean"),
|
||||
Value::test_string("median"),
|
||||
Value::test_string("std"),
|
||||
Value::test_string("min"),
|
||||
Value::test_string("25%"),
|
||||
Value::test_string("50%"),
|
||||
Value::test_string("75%"),
|
||||
Value::test_string("max"),
|
||||
],
|
||||
),
|
||||
Column::new(
|
||||
"a (i64)".to_string(),
|
||||
vec![
|
||||
Value::test_float(2.0),
|
||||
Value::test_float(2.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(0.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(1.0),
|
||||
],
|
||||
),
|
||||
Column::new(
|
||||
"b (i64)".to_string(),
|
||||
vec![
|
||||
Value::test_float(2.0),
|
||||
Value::test_float(2.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(0.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(1.0),
|
||||
],
|
||||
),
|
||||
])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![
|
||||
Column::new(
|
||||
"descriptor".to_string(),
|
||||
vec![
|
||||
Value::test_string("count"),
|
||||
Value::test_string("sum"),
|
||||
Value::test_string("mean"),
|
||||
Value::test_string("median"),
|
||||
Value::test_string("std"),
|
||||
Value::test_string("min"),
|
||||
Value::test_string("25%"),
|
||||
Value::test_string("50%"),
|
||||
Value::test_string("75%"),
|
||||
Value::test_string("max"),
|
||||
],
|
||||
),
|
||||
Column::new(
|
||||
"a (i64)".to_string(),
|
||||
vec![
|
||||
Value::test_float(2.0),
|
||||
Value::test_float(2.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(0.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(1.0),
|
||||
],
|
||||
),
|
||||
Column::new(
|
||||
"b (i64)".to_string(),
|
||||
vec![
|
||||
Value::test_float(2.0),
|
||||
Value::test_float(2.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(0.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(1.0),
|
||||
Value::test_float(1.0),
|
||||
],
|
||||
),
|
||||
],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -120,30 +123,31 @@ fn command(
|
||||
let quantiles = quantiles.map(|values| {
|
||||
values
|
||||
.iter()
|
||||
.map(|value| match value {
|
||||
Value::Float { val, span } => {
|
||||
if (&0.0..=&1.0).contains(&val) {
|
||||
Ok(*val)
|
||||
} else {
|
||||
Err(ShellError::GenericError(
|
||||
"Incorrect value for quantile".to_string(),
|
||||
"value should be between 0 and 1".to_string(),
|
||||
Some(*span),
|
||||
None,
|
||||
Vec::new(),
|
||||
))
|
||||
.map(|value| {
|
||||
let span = value.span();
|
||||
match value {
|
||||
Value::Float { val, .. } => {
|
||||
if (&0.0..=&1.0).contains(&val) {
|
||||
Ok(*val)
|
||||
} else {
|
||||
Err(ShellError::GenericError {
|
||||
error: "Incorrect value for quantile".into(),
|
||||
msg: "value should be between 0 and 1".into(),
|
||||
span: Some(span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})
|
||||
}
|
||||
}
|
||||
Value::Error { error, .. } => Err(*error.clone()),
|
||||
_ => Err(ShellError::GenericError {
|
||||
error: "Incorrect value for quantile".into(),
|
||||
msg: "value should be a float".into(),
|
||||
span: Some(span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
}),
|
||||
}
|
||||
_ => match value.span() {
|
||||
Ok(span) => Err(ShellError::GenericError(
|
||||
"Incorrect value for quantile".to_string(),
|
||||
"value should be a float".to_string(),
|
||||
Some(span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)),
|
||||
Err(e) => Err(e),
|
||||
},
|
||||
})
|
||||
.collect::<Result<Vec<f64>, ShellError>>()
|
||||
});
|
||||
@ -170,7 +174,7 @@ fn command(
|
||||
|
||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||
|
||||
let names = ChunkedArray::<Utf8Type>::from_slice_options("descriptor", &labels).into_series();
|
||||
let names = ChunkedArray::<StringType>::from_slice_options("descriptor", &labels).into_series();
|
||||
|
||||
let head = std::iter::once(names);
|
||||
|
||||
@ -178,42 +182,50 @@ fn command(
|
||||
.as_ref()
|
||||
.get_columns()
|
||||
.iter()
|
||||
.filter(|col| col.dtype() != &DataType::Object("object"))
|
||||
.filter(|col| !matches!(col.dtype(), &DataType::Object("object", _)))
|
||||
.map(|col| {
|
||||
let count = col.len() as f64;
|
||||
|
||||
let sum = col
|
||||
.sum_as_series()
|
||||
.cast(&DataType::Float64)
|
||||
.ok()
|
||||
.and_then(|ca| match ca.get(0) {
|
||||
Ok(AnyValue::Float64(v)) => Some(v),
|
||||
_ => None,
|
||||
});
|
||||
let sum = col.sum_as_series().ok().and_then(|series| {
|
||||
series
|
||||
.cast(&DataType::Float64)
|
||||
.ok()
|
||||
.and_then(|ca| match ca.get(0) {
|
||||
Ok(AnyValue::Float64(v)) => Some(v),
|
||||
_ => None,
|
||||
})
|
||||
});
|
||||
|
||||
let mean = match col.mean_as_series().get(0) {
|
||||
Ok(AnyValue::Float64(v)) => Some(v),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let median = match col.median_as_series().get(0) {
|
||||
Ok(AnyValue::Float64(v)) => Some(v),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let std = match col.std_as_series(0).get(0) {
|
||||
Ok(AnyValue::Float64(v)) => Some(v),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let min = col
|
||||
.min_as_series()
|
||||
.cast(&DataType::Float64)
|
||||
.ok()
|
||||
.and_then(|ca| match ca.get(0) {
|
||||
let median = match col.median_as_series() {
|
||||
Ok(v) => match v.get(0) {
|
||||
Ok(AnyValue::Float64(v)) => Some(v),
|
||||
_ => None,
|
||||
});
|
||||
},
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let std = match col.std_as_series(0) {
|
||||
Ok(v) => match v.get(0) {
|
||||
Ok(AnyValue::Float64(v)) => Some(v),
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let min = col.min_as_series().ok().and_then(|series| {
|
||||
series
|
||||
.cast(&DataType::Float64)
|
||||
.ok()
|
||||
.and_then(|ca| match ca.get(0) {
|
||||
Ok(AnyValue::Float64(v)) => Some(v),
|
||||
_ => None,
|
||||
})
|
||||
});
|
||||
|
||||
let mut quantiles = quantiles
|
||||
.clone()
|
||||
@ -229,14 +241,15 @@ fn command(
|
||||
})
|
||||
.collect::<Vec<Option<f64>>>();
|
||||
|
||||
let max = col
|
||||
.max_as_series()
|
||||
.cast(&DataType::Float64)
|
||||
.ok()
|
||||
.and_then(|ca| match ca.get(0) {
|
||||
Ok(AnyValue::Float64(v)) => Some(v),
|
||||
_ => None,
|
||||
});
|
||||
let max = col.max_as_series().ok().and_then(|series| {
|
||||
series
|
||||
.cast(&DataType::Float64)
|
||||
.ok()
|
||||
.and_then(|ca| match ca.get(0) {
|
||||
Ok(AnyValue::Float64(v)) => Some(v),
|
||||
_ => None,
|
||||
})
|
||||
});
|
||||
|
||||
let mut descriptors = vec![Some(count), sum, mean, median, std, min];
|
||||
descriptors.append(&mut quantiles);
|
||||
@ -249,14 +262,12 @@ fn command(
|
||||
let res = head.chain(tail).collect::<Vec<Series>>();
|
||||
|
||||
DataFrame::new(res)
|
||||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Dataframe Error".into(),
|
||||
e.to_string(),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Dataframe Error".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})
|
||||
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
|
||||
}
|
||||
|
@ -44,16 +44,19 @@ impl Command for TakeDF {
|
||||
let indices = ([0 2] | dfr into-df);
|
||||
$df | dfr take $indices"#,
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new(
|
||||
"a".to_string(),
|
||||
vec![Value::test_int(4), Value::test_int(4)],
|
||||
),
|
||||
Column::new(
|
||||
"b".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(3)],
|
||||
),
|
||||
])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![
|
||||
Column::new(
|
||||
"a".to_string(),
|
||||
vec![Value::test_int(4), Value::test_int(4)],
|
||||
),
|
||||
Column::new(
|
||||
"b".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(3)],
|
||||
),
|
||||
],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -64,10 +67,13 @@ impl Command for TakeDF {
|
||||
let indices = ([0 2] | dfr into-df);
|
||||
$series | dfr take $indices"#,
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![Column::new(
|
||||
"0".to_string(),
|
||||
vec![Value::test_int(4), Value::test_int(5)],
|
||||
)])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![Column::new(
|
||||
"0".to_string(),
|
||||
vec![Value::test_int(4), Value::test_int(5)],
|
||||
)],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -93,51 +99,45 @@ fn command(
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let index_value: Value = call.req(engine_state, stack, 0)?;
|
||||
let index_span = index_value.span()?;
|
||||
let index_span = index_value.span();
|
||||
let index = NuDataFrame::try_from_value(index_value)?.as_series(index_span)?;
|
||||
|
||||
let casted = match index.dtype() {
|
||||
DataType::UInt32 | DataType::UInt64 | DataType::Int32 | DataType::Int64 => {
|
||||
index.cast(&DataType::UInt32).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error casting index list".into(),
|
||||
e.to_string(),
|
||||
Some(index_span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
})
|
||||
}
|
||||
_ => Err(ShellError::GenericError(
|
||||
"Incorrect type".into(),
|
||||
"Series with incorrect type".into(),
|
||||
Some(call.head),
|
||||
Some("Consider using a Series with type int type".into()),
|
||||
Vec::new(),
|
||||
)),
|
||||
DataType::UInt32 | DataType::UInt64 | DataType::Int32 | DataType::Int64 => index
|
||||
.cast(&DataType::UInt32)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error casting index list".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(index_span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
}),
|
||||
_ => Err(ShellError::GenericError {
|
||||
error: "Incorrect type".into(),
|
||||
msg: "Series with incorrect type".into(),
|
||||
span: Some(call.head),
|
||||
help: Some("Consider using a Series with type int type".into()),
|
||||
inner: vec![],
|
||||
}),
|
||||
}?;
|
||||
|
||||
let indices = casted.u32().map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error casting index list".into(),
|
||||
e.to_string(),
|
||||
Some(index_span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
let indices = casted.u32().map_err(|e| ShellError::GenericError {
|
||||
error: "Error casting index list".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(index_span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
|
||||
NuDataFrame::try_from_pipeline(input, call.head).and_then(|df| {
|
||||
df.as_ref()
|
||||
.take(indices)
|
||||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error taking values".into(),
|
||||
e.to_string(),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error taking values".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(call.head),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})
|
||||
.map(|df| PipelineData::Value(NuDataFrame::dataframe_into_value(df, call.head), None))
|
||||
})
|
||||
|
@ -58,36 +58,28 @@ fn command(
|
||||
|
||||
let mut df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||
|
||||
let mut file = File::create(&file_name.item).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error with file name".into(),
|
||||
e.to_string(),
|
||||
Some(file_name.span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
let mut file = File::create(&file_name.item).map_err(|e| ShellError::GenericError {
|
||||
error: "Error with file name".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(file_name.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
|
||||
IpcWriter::new(&mut file).finish(df.as_mut()).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error saving file".into(),
|
||||
e.to_string(),
|
||||
Some(file_name.span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
})?;
|
||||
IpcWriter::new(&mut file)
|
||||
.finish(df.as_mut())
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error saving file".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(file_name.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
|
||||
let file_value = Value::String {
|
||||
val: format!("saved {:?}", &file_name.item),
|
||||
span: file_name.span,
|
||||
};
|
||||
let file_value = Value::string(format!("saved {:?}", &file_name.item), file_name.span);
|
||||
|
||||
Ok(PipelineData::Value(
|
||||
Value::List {
|
||||
vals: vec![file_value],
|
||||
span: call.head,
|
||||
},
|
||||
Value::list(vec![file_value], call.head),
|
||||
None,
|
||||
))
|
||||
}
|
||||
|
113
crates/nu-cmd-dataframe/src/dataframe/eager/to_avro.rs
Normal file
113
crates/nu-cmd-dataframe/src/dataframe/eager/to_avro.rs
Normal file
@ -0,0 +1,113 @@
|
||||
use std::{fs::File, path::PathBuf};
|
||||
|
||||
use nu_engine::CallExt;
|
||||
use nu_protocol::{
|
||||
ast::Call,
|
||||
engine::{Command, EngineState, Stack},
|
||||
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Type, Value,
|
||||
};
|
||||
use polars_io::avro::{AvroCompression, AvroWriter};
|
||||
use polars_io::SerWriter;
|
||||
|
||||
use super::super::values::NuDataFrame;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ToAvro;
|
||||
|
||||
impl Command for ToAvro {
|
||||
fn name(&self) -> &str {
|
||||
"dfr to-avro"
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Saves dataframe to avro file."
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(self.name())
|
||||
.named(
|
||||
"compression",
|
||||
SyntaxShape::String,
|
||||
"use compression, supports deflate or snappy",
|
||||
Some('c'),
|
||||
)
|
||||
.required("file", SyntaxShape::Filepath, "file path to save dataframe")
|
||||
.input_output_type(Type::Custom("dataframe".into()), Type::Any)
|
||||
.category(Category::Custom("dataframe".into()))
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Saves dataframe to avro file",
|
||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr to-avro test.avro",
|
||||
result: None,
|
||||
}]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
command(engine_state, stack, call, input)
|
||||
}
|
||||
}
|
||||
|
||||
fn get_compression(call: &Call) -> Result<Option<AvroCompression>, ShellError> {
|
||||
if let Some((compression, span)) = call
|
||||
.get_flag_expr("compression")
|
||||
.and_then(|e| e.as_string().map(|s| (s, e.span)))
|
||||
{
|
||||
match compression.as_ref() {
|
||||
"snappy" => Ok(Some(AvroCompression::Snappy)),
|
||||
"deflate" => Ok(Some(AvroCompression::Deflate)),
|
||||
_ => Err(ShellError::IncorrectValue {
|
||||
msg: "compression must be one of deflate or snappy".to_string(),
|
||||
val_span: span,
|
||||
call_span: span,
|
||||
}),
|
||||
}
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
fn command(
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let file_name: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
||||
let compression = get_compression(call)?;
|
||||
|
||||
let mut df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||
|
||||
let file = File::create(&file_name.item).map_err(|e| ShellError::GenericError {
|
||||
error: "Error with file name".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(file_name.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
|
||||
AvroWriter::new(file)
|
||||
.with_compression(compression)
|
||||
.finish(df.as_mut())
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error saving file".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(file_name.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
|
||||
let file_value = Value::string(format!("saved {:?}", &file_name.item), file_name.span);
|
||||
|
||||
Ok(PipelineData::Value(
|
||||
Value::list(vec![file_value], call.head),
|
||||
None,
|
||||
))
|
||||
}
|
@ -45,7 +45,7 @@ impl Command for ToCSV {
|
||||
},
|
||||
Example {
|
||||
description: "Saves dataframe to CSV file using other delimiter",
|
||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr to-csv test.csv -d '|'",
|
||||
example: "[[a b]; [1 2] [3 4]] | dfr into-df | dfr to-csv test.csv --delimiter '|'",
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
@ -70,70 +70,62 @@ fn command(
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let file_name: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
||||
let delimiter: Option<Spanned<String>> = call.get_flag(engine_state, stack, "delimiter")?;
|
||||
let no_header: bool = call.has_flag("no-header");
|
||||
let no_header: bool = call.has_flag(engine_state, stack, "no-header")?;
|
||||
|
||||
let mut df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||
|
||||
let mut file = File::create(&file_name.item).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error with file name".into(),
|
||||
e.to_string(),
|
||||
Some(file_name.span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
let mut file = File::create(&file_name.item).map_err(|e| ShellError::GenericError {
|
||||
error: "Error with file name".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(file_name.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
|
||||
let writer = CsvWriter::new(&mut file);
|
||||
|
||||
let writer = if no_header {
|
||||
writer.has_header(false)
|
||||
writer.include_header(false)
|
||||
} else {
|
||||
writer.has_header(true)
|
||||
writer.include_header(true)
|
||||
};
|
||||
|
||||
let mut writer = match delimiter {
|
||||
None => writer,
|
||||
Some(d) => {
|
||||
if d.item.len() != 1 {
|
||||
return Err(ShellError::GenericError(
|
||||
"Incorrect delimiter".into(),
|
||||
"Delimiter has to be one char".into(),
|
||||
Some(d.span),
|
||||
None,
|
||||
Vec::new(),
|
||||
));
|
||||
return Err(ShellError::GenericError {
|
||||
error: "Incorrect delimiter".into(),
|
||||
msg: "Delimiter has to be one char".into(),
|
||||
span: Some(d.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
});
|
||||
} else {
|
||||
let delimiter = match d.item.chars().next() {
|
||||
Some(d) => d as u8,
|
||||
None => unreachable!(),
|
||||
};
|
||||
|
||||
writer.with_delimiter(delimiter)
|
||||
writer.with_separator(delimiter)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
writer.finish(df.as_mut()).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error writing to file".into(),
|
||||
e.to_string(),
|
||||
Some(file_name.span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
})?;
|
||||
writer
|
||||
.finish(df.as_mut())
|
||||
.map_err(|e| ShellError::GenericError {
|
||||
error: "Error writing to file".into(),
|
||||
msg: e.to_string(),
|
||||
span: Some(file_name.span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})?;
|
||||
|
||||
let file_value = Value::String {
|
||||
val: format!("saved {:?}", &file_name.item),
|
||||
span: file_name.span,
|
||||
};
|
||||
let file_value = Value::string(format!("saved {:?}", &file_name.item), file_name.span);
|
||||
|
||||
Ok(PipelineData::Value(
|
||||
Value::List {
|
||||
vals: vec![file_value],
|
||||
span: call.head,
|
||||
},
|
||||
Value::list(vec![file_value], call.head),
|
||||
None,
|
||||
))
|
||||
}
|
||||
|
@ -1,10 +1,14 @@
|
||||
use crate::dataframe::values::NuSchema;
|
||||
|
||||
use super::super::values::{Column, NuDataFrame};
|
||||
|
||||
use nu_engine::CallExt;
|
||||
use nu_protocol::{
|
||||
ast::Call,
|
||||
engine::{Command, EngineState, Stack},
|
||||
Category, Example, PipelineData, ShellError, Signature, Span, Type, Value,
|
||||
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
|
||||
};
|
||||
use polars::prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ToDataFrame;
|
||||
@ -20,6 +24,12 @@ impl Command for ToDataFrame {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(self.name())
|
||||
.named(
|
||||
"schema",
|
||||
SyntaxShape::Record(vec![]),
|
||||
r#"Polars Schema in format [{name: str}]. CSV, JSON, and JSONL files"#,
|
||||
Some('s'),
|
||||
)
|
||||
.input_output_type(Type::Any, Type::Custom("dataframe".into()))
|
||||
.category(Category::Custom("dataframe".into()))
|
||||
}
|
||||
@ -30,16 +40,19 @@ impl Command for ToDataFrame {
|
||||
description: "Takes a dictionary and creates a dataframe",
|
||||
example: "[[a b];[1 2] [3 4]] | dfr into-df",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new(
|
||||
"a".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(3)],
|
||||
),
|
||||
Column::new(
|
||||
"b".to_string(),
|
||||
vec![Value::test_int(2), Value::test_int(4)],
|
||||
),
|
||||
])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![
|
||||
Column::new(
|
||||
"a".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(3)],
|
||||
),
|
||||
Column::new(
|
||||
"b".to_string(),
|
||||
vec![Value::test_int(2), Value::test_int(4)],
|
||||
),
|
||||
],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -48,24 +61,27 @@ impl Command for ToDataFrame {
|
||||
description: "Takes a list of tables and creates a dataframe",
|
||||
example: "[[1 2 a] [3 4 b] [5 6 c]] | dfr into-df",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new(
|
||||
"0".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(3), Value::test_int(5)],
|
||||
),
|
||||
Column::new(
|
||||
"1".to_string(),
|
||||
vec![Value::test_int(2), Value::test_int(4), Value::test_int(6)],
|
||||
),
|
||||
Column::new(
|
||||
"2".to_string(),
|
||||
vec![
|
||||
Value::test_string("a"),
|
||||
Value::test_string("b"),
|
||||
Value::test_string("c"),
|
||||
],
|
||||
),
|
||||
])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![
|
||||
Column::new(
|
||||
"0".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(3), Value::test_int(5)],
|
||||
),
|
||||
Column::new(
|
||||
"1".to_string(),
|
||||
vec![Value::test_int(2), Value::test_int(4), Value::test_int(6)],
|
||||
),
|
||||
Column::new(
|
||||
"2".to_string(),
|
||||
vec![
|
||||
Value::test_string("a"),
|
||||
Value::test_string("b"),
|
||||
Value::test_string("c"),
|
||||
],
|
||||
),
|
||||
],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -74,14 +90,17 @@ impl Command for ToDataFrame {
|
||||
description: "Takes a list and creates a dataframe",
|
||||
example: "[a b c] | dfr into-df",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![Column::new(
|
||||
"0".to_string(),
|
||||
vec![
|
||||
Value::test_string("a"),
|
||||
Value::test_string("b"),
|
||||
Value::test_string("c"),
|
||||
],
|
||||
)])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![Column::new(
|
||||
"0".to_string(),
|
||||
vec![
|
||||
Value::test_string("a"),
|
||||
Value::test_string("b"),
|
||||
Value::test_string("c"),
|
||||
],
|
||||
)],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
@ -90,30 +109,78 @@ impl Command for ToDataFrame {
|
||||
description: "Takes a list of booleans and creates a dataframe",
|
||||
example: "[true true false] | dfr into-df",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![Column::new(
|
||||
"0".to_string(),
|
||||
vec![
|
||||
Value::test_bool(true),
|
||||
Value::test_bool(true),
|
||||
Value::test_bool(false),
|
||||
],
|
||||
)])
|
||||
NuDataFrame::try_from_columns(
|
||||
vec![Column::new(
|
||||
"0".to_string(),
|
||||
vec![
|
||||
Value::test_bool(true),
|
||||
Value::test_bool(true),
|
||||
Value::test_bool(false),
|
||||
],
|
||||
)],
|
||||
None,
|
||||
)
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
},
|
||||
Example {
|
||||
description: "Convert to a dataframe and provide a schema",
|
||||
example: "{a: 1, b: {a: [1 2 3]}, c: [a b c]}| dfr into-df -s {a: u8, b: {a: list<u64>}, c: list<str>}",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_series(vec![
|
||||
Series::new("a", &[1u8]),
|
||||
{
|
||||
let dtype = DataType::Struct(vec![Field::new("a", DataType::List(Box::new(DataType::UInt64)))]);
|
||||
let vals = vec![AnyValue::StructOwned(
|
||||
Box::new((vec![AnyValue::List(Series::new("a", &[1u64, 2, 3]))], vec![Field::new("a", DataType::String)]))); 1];
|
||||
Series::from_any_values_and_dtype("b", &vals, &dtype, false)
|
||||
.expect("Struct series should not fail")
|
||||
},
|
||||
{
|
||||
let dtype = DataType::List(Box::new(DataType::String));
|
||||
let vals = vec![AnyValue::List(Series::new("c", &["a", "b", "c"]))];
|
||||
Series::from_any_values_and_dtype("c", &vals, &dtype, false)
|
||||
.expect("List series should not fail")
|
||||
}
|
||||
], Span::test_data())
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
},
|
||||
Example {
|
||||
description: "Convert to a dataframe and provide a schema that adds a new column",
|
||||
example: r#"[[a b]; [1 "foo"] [2 "bar"]] | dfr into-df -s {a: u8, b:str, c:i64} | dfr fill-null 3"#,
|
||||
result: Some(NuDataFrame::try_from_series(vec![
|
||||
Series::new("a", [1u8, 2]),
|
||||
Series::new("b", ["foo", "bar"]),
|
||||
Series::new("c", [3i64, 3]),
|
||||
], Span::test_data())
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
_engine_state: &EngineState,
|
||||
_stack: &mut Stack,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
NuDataFrame::try_from_iter(input.into_iter())
|
||||
.map(|df| PipelineData::Value(NuDataFrame::into_value(df, call.head), None))
|
||||
let maybe_schema = call
|
||||
.get_flag(engine_state, stack, "schema")?
|
||||
.map(|schema| NuSchema::try_from(&schema))
|
||||
.transpose()?;
|
||||
|
||||
let df = NuDataFrame::try_from_iter(input.into_iter(), maybe_schema.clone())?;
|
||||
|
||||
Ok(PipelineData::Value(
|
||||
NuDataFrame::into_value(df, call.head),
|
||||
None,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user