mirror of
https://github.com/nushell/nushell.git
synced 2025-07-01 07:00:37 +02:00
Compare commits
976 Commits
Author | SHA1 | Date | |
---|---|---|---|
ea4f8ff400 | |||
ebcdf5a8b1 | |||
440b9c8e1f | |||
96a886eb84 | |||
61d59f13fa | |||
5da7dcdbdb | |||
f92f11c0cf | |||
3bf96523a4 | |||
8d46398e13 | |||
461d558983 | |||
65c9160170 | |||
e3124d3561 | |||
b886fd364c | |||
21d949207f | |||
4a9e2ac37b | |||
9cc74e7a9f | |||
4adcf079e2 | |||
81cec2e50f | |||
ed7b2615c1 | |||
74e0e4f092 | |||
42fc9f52a1 | |||
c563e0cfb0 | |||
8671a3dbbd | |||
fc813af4c8 | |||
b83aa17c96 | |||
c7e10c3c57 | |||
e7d2717424 | |||
222c307648 | |||
eb9eb09ac5 | |||
6eacbabe17 | |||
33303f083c | |||
483974311d | |||
179ea5ae87 | |||
bdc7cdbcc4 | |||
2b524cd861 | |||
ad9f051d61 | |||
cfbe835910 | |||
8896ba80a4 | |||
803c24f9ce | |||
2f74574e35 | |||
8b9f02246f | |||
d9ecb7da93 | |||
18ce5de500 | |||
fbde02370a | |||
13452a7aa2 | |||
a8c49857d9 | |||
90afb65329 | |||
ff4907ed3b | |||
cbd7608898 | |||
adc9bbdc18 | |||
37bc922a67 | |||
ae51f6d722 | |||
1b2079ffdb | |||
9a968c4bdd | |||
89df01f829 | |||
dbb30cc9e0 | |||
02d63705cc | |||
ea97229688 | |||
6bf955a5a5 | |||
f90035e084 | |||
cc8b623ff8 | |||
60cb13c493 | |||
c10e483683 | |||
2d0c7b2214 | |||
88d421dcb6 | |||
7c50f7c714 | |||
bc043dcaeb | |||
10be753ab7 | |||
6906a0ca50 | |||
833471241a | |||
c4dcfdb77b | |||
1e8876b076 | |||
5483519c7d | |||
457f162fd9 | |||
58a8f30a25 | |||
70ba5d9d68 | |||
7b88bda9a1 | |||
bb37306d07 | |||
505cc014ac | |||
ff79959fdf | |||
8c2b1a22d4 | |||
3d62753e80 | |||
36c30ade3a | |||
e0eb29f161 | |||
c2ac8f730e | |||
1a0986903f | |||
0f25641722 | |||
7d6d48f3f7 | |||
6a8c183c1a | |||
0beb28e827 | |||
8352a09117 | |||
a9252c5075 | |||
73fbe26ef9 | |||
52fa9a978b | |||
d4357ad981 | |||
a0d7c1a4fd | |||
a340511e95 | |||
426e64501d | |||
b0d68c31e8 | |||
583cb96cff | |||
ff8831318d | |||
ce308ee461 | |||
21388175b8 | |||
520f11fb8f | |||
39b95fc59e | |||
63e68934f6 | |||
acc152564c | |||
8f63db4c95 | |||
cb133ed387 | |||
a7547a54bc | |||
d1969a3c9a | |||
ce582cdafb | |||
55de232a1c | |||
deca337a56 | |||
60e9f469af | |||
b500ac57c2 | |||
eadb8da9f7 | |||
cda15d91dd | |||
651a8716fb | |||
a1b7574306 | |||
09f12b9c4a | |||
9ae74e3941 | |||
d8bec8668f | |||
12ccaf5e33 | |||
5fecf59f54 | |||
a3aae2d26c | |||
d1d6518ece | |||
2d868323b6 | |||
0389815137 | |||
11cdb94699 | |||
0ca5c2f135 | |||
715b0d90a9 | |||
05c36d1bc7 | |||
208ebeefab | |||
b33f4b7f55 | |||
f41b1460aa | |||
220858d641 | |||
db261e3ed9 | |||
82eb1c5584 | |||
6be291b00a | |||
7add38fe32 | |||
78903724f5 | |||
cb57f0a539 | |||
717081bd2f | |||
e1ffaf2548 | |||
1db4be12d1 | |||
6193679dfc | |||
a9657e17ad | |||
03d455a688 | |||
bae04352ca | |||
a1497716f1 | |||
b5b63d2bf9 | |||
5c59611083 | |||
1503ee09ba | |||
24dba9dc53 | |||
a2dc3e3b33 | |||
95998bdd53 | |||
bd5de023a1 | |||
38e761493d | |||
7fcebf37ec | |||
0e9927ea4d | |||
d273ce89df | |||
2dc5c19b71 | |||
669b44ad7d | |||
eff063822a | |||
9a5c4d36be | |||
cd4560e97a | |||
24cc2f9d87 | |||
8f81812ef9 | |||
2229370b13 | |||
a33650a69e | |||
56d7e4bb89 | |||
e5f589ccdd | |||
8c4d3eaa7e | |||
89322f59f2 | |||
4e307480e4 | |||
d601abaee0 | |||
ceaa0f9375 | |||
d31b7024d8 | |||
9dd30d7756 | |||
eff9305eb3 | |||
885b87a842 | |||
017daeed18 | |||
c8c018452f | |||
1a0778d77e | |||
d75aa7ed1b | |||
39edd7e080 | |||
61dbcf3de6 | |||
f8ed4b45fd | |||
7b57f132bb | |||
dfca117551 | |||
29eb109b1e | |||
70d8163181 | |||
e4cef8a154 | |||
15146e68ad | |||
b0f9cda9b5 | |||
173162df2e | |||
c0b944edb6 | |||
26699d96eb | |||
08940ba4f8 | |||
ecb9799b6a | |||
a886e30e04 | |||
147009a161 | |||
12a1eefe73 | |||
0f8f3bcf9a | |||
639f4bd499 | |||
e82df7c1c9 | |||
41f4d0dcbc | |||
eb2a91ea7c | |||
b81d46574c | |||
1c6c85d35d | |||
67ea25afca | |||
f25525be6c | |||
a72f94f452 | |||
210c6f1c43 | |||
0cd90e2388 | |||
7ca2a6f8ac | |||
237a685605 | |||
2bf0397d80 | |||
5ec823996a | |||
67b6188b19 | |||
df74a0c961 | |||
af6c4bdc9c | |||
d7f26b177a | |||
470d130289 | |||
a23e96c945 | |||
9ba16dbdaf | |||
43f9ec295f | |||
f39e5b3f37 | |||
6c0b65b570 | |||
1dcaffb792 | |||
ca4222277e | |||
5c2bcd068b | |||
9aba96604b | |||
7be90c2644 | |||
7e9e93cf82 | |||
6d1f7cb3e3 | |||
334cf1862a | |||
49d86855ce | |||
5fe97b8d59 | |||
2bad1371f0 | |||
3030608de0 | |||
5d32cd2c40 | |||
07be33c119 | |||
eaf522b41f | |||
e76586ede4 | |||
1979b61a92 | |||
02fcc485fb | |||
55e05be0d8 | |||
e10ac2ede6 | |||
bf1f2d5ebd | |||
6aed1b42ae | |||
f33a26123c | |||
7c160725ed | |||
5832823dff | |||
3fe355c4a6 | |||
dd56c813f9 | |||
7a6cfa24fc | |||
2ea2a904e8 | |||
dfba62da00 | |||
b241e9edd5 | |||
946cef77f1 | |||
c99c8119fe | |||
2b4914608e | |||
8b80ceac32 | |||
e89bb2ee96 | |||
862d53bb6e | |||
820d0c0959 | |||
968eb45fb2 | |||
2c1d261cca | |||
69d1c8e948 | |||
2c7ab6e898 | |||
c986426478 | |||
09674a0026 | |||
9cca4ec18b | |||
90c86e6cbf | |||
4cb195a998 | |||
f7f09292d6 | |||
2c35e07c2d | |||
c949d2e893 | |||
83de8560ee | |||
00e5e6d719 | |||
1dd861b10f | |||
42aa2ff5ba | |||
74f62305b2 | |||
8f634f4140 | |||
33001d1992 | |||
f4b7333dc8 | |||
3dde851381 | |||
029f3843d3 | |||
0f6996b70d | |||
9160f36ea5 | |||
7f346dbf4c | |||
03888b9d81 | |||
966cebec34 | |||
44b7cfd696 | |||
a17ffdfe56 | |||
430b2746b8 | |||
1e566adcfc | |||
789781665d | |||
e926919582 | |||
8d5d01bbc9 | |||
58f7cfd099 | |||
b432866dc9 | |||
81e496673e | |||
2dab65f852 | |||
95dcb2fd6c | |||
d97b2e3c60 | |||
4fe7865ad0 | |||
d122bc3d89 | |||
7d17c2eb5e | |||
0e6e9abc12 | |||
f3982278e8 | |||
b1e591f84c | |||
122bcff356 | |||
087fe484f6 | |||
551fecd10d | |||
88bbe4abaa | |||
49f92e9090 | |||
4779d69de6 | |||
de7b000505 | |||
9eaa8908d2 | |||
fc72aa6abe | |||
8e1385417e | |||
95f89a093a | |||
e9b677a9e9 | |||
7555743ccc | |||
93612974e0 | |||
52a35827c7 | |||
c5a14bb8ff | |||
48bdcc71f4 | |||
78c93e5ae0 | |||
96af27fb4c | |||
12b8b4580c | |||
1616acd124 | |||
0cb4281fdb | |||
6f6ad23072 | |||
1ab09256d7 | |||
ee14811912 | |||
7939fb05ea | |||
53d30ee7ea | |||
058ce0ed2d | |||
9bb7f0c7dc | |||
9521b209d1 | |||
f51a79181a | |||
938fa6ee55 | |||
1d0d91d5e5 | |||
252155bdb9 | |||
be508cbd7f | |||
fcd1d59abd | |||
083c534948 | |||
bda3245725 | |||
1d44843970 | |||
d16946c6e8 | |||
2f6b4c5e9b | |||
4a967d19a9 | |||
3d58c3f70e | |||
c504c93a1d | |||
8b46ba8b6b | |||
f8ac9db15b | |||
7636963732 | |||
5d1e2b1df1 | |||
273226d666 | |||
2b8fb4fe00 | |||
2cb059146b | |||
fb7b0a8c11 | |||
d4aeadbb44 | |||
2a8f92b709 | |||
453e294883 | |||
e1c5ae3cd5 | |||
a8a0c78a32 | |||
879258039c | |||
4ac4f71a37 | |||
62e56d3581 | |||
2e1b6acc0e | |||
3eae657121 | |||
e74ce72f09 | |||
d577074da9 | |||
f7d5162582 | |||
0430167f1c | |||
1128fa137f | |||
81243c48f0 | |||
442df9e39c | |||
a58d9b0b3a | |||
2a3d5a9d42 | |||
a5d7d6dd46 | |||
18e3a5d40b | |||
553c951a60 | |||
781c4bd1d7 | |||
a2e335dcd7 | |||
c6fc6bd5a7 | |||
a7830ac1fd | |||
00713c9339 | |||
7d7dbd8b2c | |||
d4675d9138 | |||
6e88b3f8d6 | |||
720813339f | |||
5b4dd775d4 | |||
bfe398ca36 | |||
31e1f49cb6 | |||
26897b287c | |||
5a7707cb52 | |||
4b0b4ddce1 | |||
9fa2f43d06 | |||
2891867de9 | |||
55c7246830 | |||
17246db38b | |||
e60dac8957 | |||
d007b10fbf | |||
942030199d | |||
fb8ac4198b | |||
2ce5de58e6 | |||
2f18b9c856 | |||
bdc767bf23 | |||
3770a5eed1 | |||
0705fb9cd1 | |||
1a1a960836 | |||
5be818b5ee | |||
c7d3014849 | |||
164a089656 | |||
0b2d1327d2 | |||
5f6f18076c | |||
81de8ecd70 | |||
30ed63667b | |||
a56906ca6d | |||
0f0e1e2068 | |||
192ee59c75 | |||
803a348f41 | |||
1aa2ed1947 | |||
f04db2a7a3 | |||
30b3c42b37 | |||
4424481487 | |||
8b431e3a2e | |||
c58b432c21 | |||
34c09d8b35 | |||
13d5a15f75 | |||
339c5b7c83 | |||
5291f978c2 | |||
63fa6a6df7 | |||
4540f3829e | |||
1349187e17 | |||
b55ed69c92 | |||
948965d42f | |||
f4205132c7 | |||
5eae08ac76 | |||
3836da0cf1 | |||
6be42d94d9 | |||
945e9511ce | |||
cce12efe48 | |||
aa62de78e6 | |||
08b5d5cce5 | |||
03bb144150 | |||
5fa79e6e5f | |||
080b501ba8 | |||
66bc0542e0 | |||
ec1f7deb23 | |||
45f9d03025 | |||
4bc28f1752 | |||
a2705f9eb5 | |||
c0b4d19761 | |||
b53271b86a | |||
5ca4e903c8 | |||
0ad5f4389c | |||
7ea4895513 | |||
f46f8b286b | |||
f88ed6ecd5 | |||
a011791631 | |||
c783b07d58 | |||
e3e2554b3d | |||
926b0407c5 | |||
22a01d7e76 | |||
299453ecb7 | |||
fd684a204c | |||
cdbb3ee7b9 | |||
f0f6b3a3e5 | |||
93e121782c | |||
befeddad59 | |||
73c08fcb2b | |||
9a0ae7c4c0 | |||
28ca0e7116 | |||
84c720daf5 | |||
cdb082e92d | |||
0666b3784f | |||
379d89369c | |||
2bd345c367 | |||
0e418688d4 | |||
b97d89adb6 | |||
ee84435a0e | |||
500cd35ad2 | |||
3f5ebd75b6 | |||
75105033b2 | |||
8759936636 | |||
4dcaf2a201 | |||
089c5221cc | |||
0587308684 | |||
6eff420e17 | |||
d66f8cca40 | |||
06938659d2 | |||
46566296c0 | |||
4e1b06cb51 | |||
b99a8c9d80 | |||
b34547334a | |||
d9bfcb4c09 | |||
8ce14a7c86 | |||
301d1370c4 | |||
306e305b65 | |||
e117706518 | |||
737ea3940e | |||
e5337b50a9 | |||
23dc1b600a | |||
f05162811c | |||
0b71eb201c | |||
707ab1df6a | |||
c811d86dbd | |||
902e6d7a27 | |||
827e31191d | |||
b9b3101bd9 | |||
8e8a60a432 | |||
72d50cf8b7 | |||
3a1601de8e | |||
3f8dd1b705 | |||
f360489f1e | |||
79f19f2fc7 | |||
5cf6dea997 | |||
214714e0ab | |||
d894c8befe | |||
cc4d4acc6b | |||
dc52a6fec5 | |||
16e174be7e | |||
8e41a308cd | |||
787f292ca7 | |||
dad956b2ee | |||
1f477c8eb1 | |||
6260fa9f07 | |||
88f44701a9 | |||
9ed944312f | |||
6eb14522b6 | |||
ac12b02437 | |||
9ed2ca792f | |||
ebabca575c | |||
b60f91f722 | |||
2b4c54d383 | |||
ed1381adc4 | |||
1b7fabd1fd | |||
87a562e24b | |||
b5ff46db6a | |||
8b086d3613 | |||
d702c4605a | |||
6325bc5e54 | |||
25d90fa603 | |||
86f7f53f85 | |||
461eb43d9d | |||
df3892f323 | |||
0d3f76ddef | |||
816b9a6953 | |||
80788636ee | |||
c46ca36bcd | |||
62bd6fe08b | |||
f69b22f00b | |||
c6523eb8d9 | |||
76afa74320 | |||
a0d4ae18ee | |||
4884894ddb | |||
e7877db078 | |||
1181349c22 | |||
378395c22c | |||
2bcf2389aa | |||
a65e5ab01d | |||
4ff4e3f93d | |||
d36514a323 | |||
4401924128 | |||
5314b31b12 | |||
b2b5b89a92 | |||
76bbd41e43 | |||
5f3c8d45d8 | |||
38694a9850 | |||
0a0475ebad | |||
38ffcaad7b | |||
1b01598840 | |||
45ff964cbd | |||
81baf53814 | |||
6ebc0fc3ff | |||
b1da50774a | |||
469e23cae4 | |||
23ba613b00 | |||
f2dcae570c | |||
f1ce0c98fd | |||
35d2750757 | |||
4b1f4e63c3 | |||
c29bcc94e7 | |||
d3cbcf401f | |||
fb26109049 | |||
d99905b604 | |||
a8890d5cca | |||
5139054325 | |||
039d0a685a | |||
e0685315b4 | |||
02fc844e40 | |||
b48f50f018 | |||
dc0ac8e917 | |||
f2e8c391a2 | |||
7029d24f42 | |||
4e8289d7bb | |||
bf8763fc11 | |||
11375c19d2 | |||
8f4feeb119 | |||
e26364f885 | |||
fff0c6e2cb | |||
68c2729991 | |||
8127b5dd24 | |||
a9caa61ef9 | |||
99fe866d12 | |||
c0ad659985 | |||
f41c53fef1 | |||
981a000ee8 | |||
cc4da104e0 | |||
c266e6adaf | |||
d94b344342 | |||
6367fb6e9e | |||
5615d21ce9 | |||
e2c4ff8180 | |||
39770d4197 | |||
cfdb4bbf25 | |||
3760910f0b | |||
3c632e96f9 | |||
baf86dfb0e | |||
219b44a04f | |||
05ee7ea9c7 | |||
cc0616b753 | |||
cbf5fa6684 | |||
a7fa6d00c1 | |||
49f377688a | |||
0b96962157 | |||
ebce62629e | |||
7aacad3270 | |||
035b882db1 | |||
0872e9c3ae | |||
1a573d17c0 | |||
4f20c370f9 | |||
e4bb248142 | |||
dff6268d66 | |||
8f9aa1a250 | |||
7d2e8875e0 | |||
3515e3ee28 | |||
cf82814606 | |||
fc29d82614 | |||
75ced3e945 | |||
685dc78739 | |||
9daa5f9177 | |||
69fbfb939f | |||
f0ecaabd7d | |||
c16f49cf19 | |||
9411458689 | |||
8771872d86 | |||
cda9ae1e42 | |||
81d68cd478 | |||
4c9078cccc | |||
f51828d049 | |||
d97562f6e8 | |||
234484b6f8 | |||
3bd45c005b | |||
05b7c1fffa | |||
a332712275 | |||
b2d8bd08f8 | |||
217be24963 | |||
bf457cd4fc | |||
88a8e986eb | |||
5f0567f8df | |||
a980b9d0a6 | |||
08504f6e06 | |||
da66484578 | |||
424efdaafe | |||
a65a7df209 | |||
c63bb81c3e | |||
a70e77ba48 | |||
c8b5909ee8 | |||
3b0ba923e4 | |||
1940b36e07 | |||
dfec687a46 | |||
bcd85b6f3e | |||
c4b919b24c | |||
c560bac13f | |||
88d27fd607 | |||
3d5f853b03 | |||
07a37f9b47 | |||
0172ad8461 | |||
e1f74a6d57 | |||
e17f6d654c | |||
817830940b | |||
dc9e8161d9 | |||
7f61cbbfd6 | |||
acca56f77c | |||
6bc695f251 | |||
91bb566ee6 | |||
5f04bbbb8b | |||
49fb5cb1a8 | |||
6e036ca09a | |||
8d1e36fa3c | |||
bccff3b237 | |||
a13a024ac8 | |||
5e7263cd1a | |||
0aafc29fb5 | |||
bd37473515 | |||
1c18e37a7c | |||
547c436281 | |||
e0c0d39ede | |||
4edce44689 | |||
186c08467f | |||
367fb9b504 | |||
ac75562296 | |||
7a9b14b49d | |||
32196cfe78 | |||
4d3283e235 | |||
dd3a3a2717 | |||
83d8e936ad | |||
58576630db | |||
7c84634e3f | |||
671640b0a9 | |||
5f7082f053 | |||
2a90cb7355 | |||
e63976df7e | |||
d8c2493658 | |||
4ed25b63a6 | |||
b318d588fe | |||
42d2adc3e0 | |||
5d1eb031eb | |||
1e7840c376 | |||
a6e3470c6f | |||
582b5f45e8 | |||
eb0b6c87d6 | |||
b6ce907928 | |||
9cffbdb42a | |||
d69e131450 | |||
6e84ba182e | |||
6773dfce8d | |||
13ce9e4f64 | |||
f63f8cb154 | |||
6e1118681d | |||
e5cec8f4eb | |||
6c36bd822c | |||
029c586717 | |||
ea6493c041 | |||
455d32d9e5 | |||
7bd801a167 | |||
b6e84879b6 | |||
f7832c0e82 | |||
8c1ab7e0a3 | |||
9d0f69ac50 | |||
215ca6c5ca | |||
a04c90e22d | |||
a84d410f11 | |||
636bae2466 | |||
739a7ea730 | |||
3893fbb0b1 | |||
948205c8e6 | |||
6278afde8d | |||
f0cb2dafbb | |||
a3c145432e | |||
e6f55da080 | |||
30f98f7e64 | |||
c9409a2edb | |||
b857064d65 | |||
a541382776 | |||
07ad24ab97 | |||
55db643048 | |||
8f9b198d48 | |||
6c7129cc0c | |||
919d55f3fc | |||
bdf63420d1 | |||
b7af715f6b | |||
b6eda33438 | |||
ab641d9f18 | |||
c7e128eed1 | |||
cc0259bbed | |||
23fba6d2ea | |||
3182adb6a0 | |||
d52ec65f18 | |||
b968376be9 | |||
90bd8c82b7 | |||
0955e8c5b6 | |||
ef55367224 | |||
a60f454154 | |||
7a7df3e635 | |||
62198a29c2 | |||
e87a35104a | |||
1e051e573d | |||
e172a621f3 | |||
9f09930834 | |||
20c2de9eed | |||
22ca5a6b8d | |||
8b19399b13 | |||
d289c773d0 | |||
a935e0720f | |||
1c3ff179bc | |||
ccab3d6b6e | |||
3e39fae6e1 | |||
d575fd1c3a | |||
0a2fb137af | |||
4907575d3d | |||
4200df21d3 | |||
e0bb5a2bd2 | |||
a6c2c685bc | |||
1e2fa68db0 | |||
599f16f15c | |||
91da168251 | |||
e104bccfb9 | |||
74bd0e32cc | |||
03015ed33f | |||
79ea70d4ec | |||
3ec76af96e | |||
b8efd2a347 | |||
9083157baa | |||
6cdc9e3b77 | |||
f8d4adfb7a | |||
719d9aa83c | |||
9ebaa737aa | |||
88b0982dac | |||
8c2e12ad79 | |||
2c31b3db07 | |||
eedf833b6f | |||
69d81cc065 | |||
af9c31152a | |||
abb6fca5e3 | |||
3ec1c40320 | |||
619211c1bf | |||
3a685049da | |||
ae54d05930 | |||
e7c4597ad0 | |||
09c9495015 | |||
e05f387632 | |||
9870c7c9a6 | |||
3f75b6b371 | |||
04fed82e5e | |||
f3a1dfef95 | |||
f738932bbd | |||
4968b6b9d0 | |||
ee97c00818 | |||
1dbd431117 | |||
09ab583f64 | |||
9ad6d13982 | |||
8d4426f2f8 | |||
8c8f795e9e | |||
7f2f67238f | |||
740fe942c1 | |||
7c5dcbb3fc | |||
7e055810b1 | |||
5758993e9f | |||
d7014e671d | |||
b0427ca9ff | |||
3af575cce7 | |||
f787d272e6 | |||
f061c9a30e | |||
8812072f06 | |||
e911ff4d67 | |||
28b6db115a | |||
e735bd475f | |||
299d199150 | |||
5e784d38eb | |||
868029f655 | |||
043d1ed9fb | |||
6230a62e9e | |||
71b49c3374 | |||
2eef42c6b9 | |||
0209992f6c | |||
c9d54f821b | |||
59d6dee3b3 | |||
9d25b2f29a | |||
91ff57faa7 | |||
b99affba4b | |||
639bd4fc2e | |||
a0f38f8845 | |||
a11c9e9d70 | |||
bdbcf82967 | |||
1f47d72e86 | |||
d83781ddec | |||
e32e55938b | |||
de08b68ba8 | |||
0e3a8c552c | |||
389e7d2502 | |||
fce6146576 | |||
02313e6819 | |||
df0a174802 | |||
bcb7ef48b6 | |||
9f714e62cb | |||
a95c2198a6 | |||
2df91e7f92 | |||
44be445b57 | |||
e43632fd95 | |||
69e4abad0f | |||
3bedbd0669 | |||
1d15bbc95b | |||
5002d87af4 | |||
2a3805c164 | |||
52f646d8db | |||
36c1073441 | |||
2979595cc5 | |||
d67120be19 | |||
ad31f1cf26 | |||
99798ace7d | |||
ba4becc61c | |||
397499b106 | |||
55c3fc9141 | |||
2830ec008c | |||
4c8b09eb97 | |||
98e0864be8 | |||
6dc71f5ad0 | |||
d6f4e4c4fe | |||
33ae71f300 | |||
abcca0897e | |||
b1379b2b14 | |||
27ebccce80 | |||
6964968f14 | |||
68377c176d | |||
baadaee016 | |||
199aa2ad3a | |||
29b176b719 | |||
6ce20675eb | |||
1e9967c3bf | |||
e0bc85d0dd | |||
e3fd4d3f81 | |||
00709fc5bd | |||
157494e803 | |||
f03ba6793e | |||
475aa4f1dd | |||
1d6ac16530 | |||
573a7e2c7b | |||
cebbc82322 | |||
cf5b2aeb88 | |||
52eb9c2ef3 | |||
702dcd8581 | |||
9e6ada6411 | |||
a38663ec90 | |||
02804ab537 | |||
b2d0d9cf13 | |||
46589faaca | |||
166d5fa4ff | |||
4bd38847c2 | |||
30a4187be4 | |||
f0c83a4459 | |||
fc61416c79 | |||
8200831b07 | |||
497954d84c | |||
bcaef8959c | |||
5bef81a059 | |||
d68c3ec89a | |||
0c72f881a6 | |||
8195e2d638 | |||
e8c20390e0 | |||
13df0af514 | |||
54e9aa92bc | |||
1afff777a6 | |||
071faae772 | |||
08a241f763 | |||
63f9e273b3 | |||
71d604067a | |||
66d0e18674 | |||
a940a8aa80 | |||
0d30550950 | |||
65bb0ff167 | |||
151767a5e3 | |||
a948ec6c2c | |||
28a7461057 | |||
6f47990a63 | |||
183c2221bb | |||
03ee54a4df | |||
2541a712e4 | |||
ee877607fb | |||
93351b889a | |||
5fa9d76500 | |||
cd0d0364ec | |||
cf5fec63c0 | |||
5c5cf418fb | |||
fb14008f50 | |||
18c8c16c5e | |||
299a218de7 | |||
1a081c09de |
@ -30,4 +30,10 @@ rustflags = ["-C", "link-args=-stack:10000000", "-C", "target-feature=+crt-stati
|
||||
[target.aarch64-apple-darwin]
|
||||
# We can guarantee that this target will always run on a CPU with _at least_
|
||||
# these capabilities, so let's optimize for them
|
||||
rustflags = ["-Ctarget-cpu=apple-m1"]
|
||||
rustflags = ["-Ctarget-cpu=apple-m1"]
|
||||
|
||||
# This is required for uutils/coreutils version 0.1.0 and later
|
||||
# It looks like this is what they use to name their executable
|
||||
# https://github.com/uutils/coreutils/blob/61bd11a55118458704c4cbbf4e628cd657238d3e/src/uucore/src/lib/lib.rs#L201-L218
|
||||
[env]
|
||||
PROJECT_NAME_FOR_VERSION_STRING = "nushell"
|
40
.github/labeler.yml
vendored
Normal file
40
.github/labeler.yml
vendored
Normal file
@ -0,0 +1,40 @@
|
||||
# A bot for automatically labelling pull requests
|
||||
# See https://github.com/actions/labeler
|
||||
|
||||
dataframe:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- crates/nu_plugin_polars/**
|
||||
|
||||
std-library:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- crates/nu-std/**
|
||||
|
||||
ci:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- .github/workflows/**
|
||||
|
||||
|
||||
LSP:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- crates/nu-lsp/**
|
||||
|
||||
parser:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- crates/nu-parser/**
|
||||
|
||||
pr:plugins:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
# plugins API
|
||||
- crates/nu-plugin/**
|
||||
- crates/nu-plugin-core/**
|
||||
- crates/nu-plugin-engine/**
|
||||
- crates/nu-plugin-protocol/**
|
||||
- crates/nu-plugin-test-support/**
|
||||
# specific plugins (like polars)
|
||||
- crates/nu_plugin_*/**
|
2
.github/workflows/audit.yml
vendored
2
.github/workflows/audit.yml
vendored
@ -20,6 +20,6 @@ jobs:
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
- uses: rustsec/audit-check@v1.4.1
|
||||
- uses: rustsec/audit-check@v2.0.0
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
52
.github/workflows/beta-test.yml
vendored
Normal file
52
.github/workflows/beta-test.yml
vendored
Normal file
@ -0,0 +1,52 @@
|
||||
name: Test on Beta Toolchain
|
||||
# This workflow is made to run our tests on the beta toolchain to validate that
|
||||
# the beta toolchain works.
|
||||
# We do not intend to test here that we are working correctly but rather that
|
||||
# the beta toolchain works correctly.
|
||||
# The ci.yml handles our actual testing with our guarantees.
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# If this workflow fails, GitHub notifications will go to the last person
|
||||
# who edited this line.
|
||||
# See: https://docs.github.com/en/actions/monitoring-and-troubleshooting-workflows/monitoring-workflows/notifications-for-workflow-runs
|
||||
- cron: '0 0 * * *' # Runs daily at midnight UTC
|
||||
|
||||
env:
|
||||
NUSHELL_CARGO_PROFILE: ci
|
||||
NU_LOG_LEVEL: DEBUG
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref && github.ref || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
build-and-test:
|
||||
# this job is more for testing the beta toolchain and not our tests, so if
|
||||
# this fails but the tests of the regular ci pass, then this is fine
|
||||
continue-on-error: true
|
||||
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
platform: [windows-latest, macos-latest, ubuntu-22.04]
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- run: rustup update beta
|
||||
|
||||
- name: Tests
|
||||
run: cargo +beta test --workspace --profile ci --exclude nu_plugin_*
|
||||
- name: Check for clean repo
|
||||
shell: bash
|
||||
run: |
|
||||
if [ -n "$(git status --porcelain)" ]; then
|
||||
echo "there are changes";
|
||||
git status --porcelain
|
||||
exit 1
|
||||
else
|
||||
echo "no changes in working directory";
|
||||
fi
|
70
.github/workflows/ci.yml
vendored
70
.github/workflows/ci.yml
vendored
@ -3,6 +3,7 @@ on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- 'patch-release-*'
|
||||
|
||||
name: continuous-integration
|
||||
|
||||
@ -21,14 +22,14 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
# Pinning to Ubuntu 20.04 because building on newer Ubuntu versions causes linux-gnu
|
||||
# Pinning to Ubuntu 22.04 because building on newer Ubuntu versions causes linux-gnu
|
||||
# builds to link against a too-new-for-many-Linux-installs glibc version. Consider
|
||||
# revisiting this when 20.04 is closer to EOL (April 2025)
|
||||
# revisiting this when 22.04 is closer to EOL (June 2027)
|
||||
#
|
||||
# Using macOS 13 runner because 14 is based on the M1 and has half as much RAM (7 GB,
|
||||
# instead of 14 GB) which is too little for us right now. Revisit when `dfr` commands are
|
||||
# removed and we're only building the `polars` plugin instead
|
||||
platform: [windows-latest, macos-13, ubuntu-20.04]
|
||||
platform: [windows-latest, macos-13, ubuntu-22.04]
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
@ -36,7 +37,7 @@ jobs:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.12.0
|
||||
|
||||
- name: cargo fmt
|
||||
run: cargo fmt --all -- --check
|
||||
@ -56,7 +57,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
platform: [windows-latest, macos-latest, ubuntu-20.04]
|
||||
platform: [windows-latest, macos-latest, ubuntu-22.04]
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
@ -64,7 +65,7 @@ jobs:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.12.0
|
||||
|
||||
- name: Tests
|
||||
run: cargo test --workspace --profile ci --exclude nu_plugin_*
|
||||
@ -83,7 +84,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
platform: [ubuntu-20.04, macos-latest, windows-latest]
|
||||
platform: [ubuntu-22.04, macos-latest, windows-latest]
|
||||
py:
|
||||
- py
|
||||
|
||||
@ -93,10 +94,10 @@ jobs:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.12.0
|
||||
|
||||
- name: Install Nushell
|
||||
run: cargo install --path . --locked --no-default-features
|
||||
run: cargo install --path . --locked --force
|
||||
|
||||
- name: Standard library tests
|
||||
run: nu -c 'use crates/nu-std/testing.nu; testing run-tests --path crates/nu-std'
|
||||
@ -136,7 +137,7 @@ jobs:
|
||||
# instead of 14 GB) which is too little for us right now.
|
||||
#
|
||||
# Failure occurring with clippy for rust 1.77.2
|
||||
platform: [windows-latest, macos-13, ubuntu-20.04]
|
||||
platform: [windows-latest, macos-13, ubuntu-22.04]
|
||||
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
@ -144,7 +145,7 @@ jobs:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.12.0
|
||||
|
||||
- name: Clippy
|
||||
run: cargo clippy --package nu_plugin_* -- $CLIPPY_OPTIONS
|
||||
@ -162,3 +163,50 @@ jobs:
|
||||
else
|
||||
echo "no changes in working directory";
|
||||
fi
|
||||
|
||||
wasm:
|
||||
env:
|
||||
WASM_OPTIONS: --no-default-features --target wasm32-unknown-unknown
|
||||
CLIPPY_CONF_DIR: ${{ github.workspace }}/clippy/wasm/
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
job:
|
||||
- name: Build WASM
|
||||
command: cargo build
|
||||
args:
|
||||
- name: Clippy WASM
|
||||
command: cargo clippy
|
||||
args: -- $CLIPPY_OPTIONS
|
||||
|
||||
name: ${{ matrix.job.name }}
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.12.0
|
||||
|
||||
- name: Add wasm32-unknown-unknown target
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
|
||||
- run: ${{ matrix.job.command }} -p nu-cmd-base $WASM_OPTIONS ${{ matrix.job.args }}
|
||||
- run: ${{ matrix.job.command }} -p nu-cmd-extra $WASM_OPTIONS ${{ matrix.job.args }}
|
||||
- run: ${{ matrix.job.command }} -p nu-cmd-lang $WASM_OPTIONS ${{ matrix.job.args }}
|
||||
- run: ${{ matrix.job.command }} -p nu-color-config $WASM_OPTIONS ${{ matrix.job.args }}
|
||||
- run: ${{ matrix.job.command }} -p nu-command $WASM_OPTIONS ${{ matrix.job.args }}
|
||||
- run: ${{ matrix.job.command }} -p nu-derive-value $WASM_OPTIONS ${{ matrix.job.args }}
|
||||
- run: ${{ matrix.job.command }} -p nu-engine $WASM_OPTIONS ${{ matrix.job.args }}
|
||||
- run: ${{ matrix.job.command }} -p nu-glob $WASM_OPTIONS ${{ matrix.job.args }}
|
||||
- run: ${{ matrix.job.command }} -p nu-json $WASM_OPTIONS ${{ matrix.job.args }}
|
||||
- run: ${{ matrix.job.command }} -p nu-parser $WASM_OPTIONS ${{ matrix.job.args }}
|
||||
- run: ${{ matrix.job.command }} -p nu-path $WASM_OPTIONS ${{ matrix.job.args }}
|
||||
- run: ${{ matrix.job.command }} -p nu-pretty-hex $WASM_OPTIONS ${{ matrix.job.args }}
|
||||
- run: ${{ matrix.job.command }} -p nu-protocol $WASM_OPTIONS ${{ matrix.job.args }}
|
||||
- run: ${{ matrix.job.command }} -p nu-std $WASM_OPTIONS ${{ matrix.job.args }}
|
||||
- run: ${{ matrix.job.command }} -p nu-system $WASM_OPTIONS ${{ matrix.job.args }}
|
||||
- run: ${{ matrix.job.command }} -p nu-table $WASM_OPTIONS ${{ matrix.job.args }}
|
||||
- run: ${{ matrix.job.command }} -p nu-term-grid $WASM_OPTIONS ${{ matrix.job.args }}
|
||||
- run: ${{ matrix.job.command }} -p nu-utils $WASM_OPTIONS ${{ matrix.job.args }}
|
||||
- run: ${{ matrix.job.command }} -p nuon $WASM_OPTIONS ${{ matrix.job.args }}
|
||||
|
19
.github/workflows/labels.yml
vendored
Normal file
19
.github/workflows/labels.yml
vendored
Normal file
@ -0,0 +1,19 @@
|
||||
# Automatically labels PRs based on the configuration file
|
||||
# you are probably looking for 👉 `.github/labeler.yml`
|
||||
name: Label PRs
|
||||
|
||||
on:
|
||||
- pull_request_target
|
||||
|
||||
jobs:
|
||||
triage:
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'nushell'
|
||||
steps:
|
||||
- uses: actions/labeler@v5
|
||||
with:
|
||||
repo-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
sync-labels: true
|
30
.github/workflows/milestone.yml
vendored
Normal file
30
.github/workflows/milestone.yml
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
# Description:
|
||||
# - Add milestone to a merged PR automatically
|
||||
# - Add milestone to a closed issue that has a merged PR fix (if any)
|
||||
|
||||
name: Milestone Action
|
||||
on:
|
||||
issues:
|
||||
types: [closed]
|
||||
pull_request_target:
|
||||
types: [closed]
|
||||
|
||||
jobs:
|
||||
update-milestone:
|
||||
runs-on: ubuntu-latest
|
||||
name: Milestone Update
|
||||
steps:
|
||||
- name: Set Milestone for PR
|
||||
uses: hustcer/milestone-action@main
|
||||
if: github.event.pull_request.merged == true
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# Bind milestone to closed issue that has a merged PR fix
|
||||
- name: Set Milestone for Issue
|
||||
uses: hustcer/milestone-action@v2
|
||||
if: github.event.issue.state == 'closed'
|
||||
with:
|
||||
action: bind-issue
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
174
.github/workflows/nightly-build.yml
vendored
174
.github/workflows/nightly-build.yml
vendored
@ -4,6 +4,7 @@
|
||||
# 2. https://github.com/JasonEtco/create-an-issue
|
||||
# 3. https://docs.github.com/en/actions/learn-github-actions/variables
|
||||
# 4. https://github.com/actions/github-script
|
||||
# 5. https://docs.github.com/en/actions/writing-workflows/workflow-syntax-for-github-actions#jobsjob_idneeds
|
||||
#
|
||||
name: Nightly Build
|
||||
|
||||
@ -14,6 +15,7 @@ on:
|
||||
# This schedule will run only from the default branch
|
||||
schedule:
|
||||
- cron: '15 0 * * *' # run at 00:15 AM UTC
|
||||
workflow_dispatch:
|
||||
|
||||
defaults:
|
||||
run:
|
||||
@ -25,9 +27,14 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
# This job is required by the release job, so we should make it run both from Nushell repo and nightly repo
|
||||
# if: github.repository == 'nushell/nightly'
|
||||
# Map a step output to a job output
|
||||
outputs:
|
||||
skip: ${{ steps.vars.outputs.skip }}
|
||||
build_date: ${{ steps.vars.outputs.build_date }}
|
||||
nightly_tag: ${{ steps.vars.outputs.nightly_tag }}
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.1.7
|
||||
uses: actions/checkout@v4
|
||||
if: github.repository == 'nushell/nightly'
|
||||
with:
|
||||
ref: main
|
||||
@ -36,10 +43,10 @@ jobs:
|
||||
token: ${{ secrets.WORKFLOW_TOKEN }}
|
||||
|
||||
- name: Setup Nushell
|
||||
uses: hustcer/setup-nu@v3.13
|
||||
uses: hustcer/setup-nu@v3
|
||||
if: github.repository == 'nushell/nightly'
|
||||
with:
|
||||
version: 0.97.1
|
||||
version: 0.103.0
|
||||
|
||||
# Synchronize the main branch of nightly repo with the main branch of Nushell official repo
|
||||
- name: Prepare for Nightly Release
|
||||
@ -57,16 +64,53 @@ jobs:
|
||||
# All the changes will be overwritten by the upstream main branch
|
||||
git reset --hard src/main
|
||||
git push origin main -f
|
||||
let sha_short = (git rev-parse --short origin/main | str trim | str substring 0..7)
|
||||
let tag_name = $'nightly-($sha_short)'
|
||||
if (git ls-remote --tags origin $tag_name | is-empty) {
|
||||
git tag -a $tag_name -m $'Nightly build from ($sha_short)'
|
||||
|
||||
- name: Create Tag and Output Tag Name
|
||||
if: github.repository == 'nushell/nightly'
|
||||
id: vars
|
||||
shell: nu {0}
|
||||
run: |
|
||||
let date = date now | format date %m%d
|
||||
let version = open Cargo.toml | get package.version
|
||||
let sha_short = (git rev-parse --short origin/main | str trim | str substring 0..6)
|
||||
let latest_meta = http get https://api.github.com/repos/nushell/nightly/releases
|
||||
| sort-by -r created_at
|
||||
| where tag_name =~ nightly
|
||||
| get tag_name?.0? | default ''
|
||||
| parse '{version}-nightly.{build}+{hash}'
|
||||
if ($latest_meta.0?.hash? | default '') == $sha_short {
|
||||
print $'(ansi g)Latest nightly build is up-to-date, skip rebuilding.(ansi reset)'
|
||||
$'skip=true(char nl)' o>> $env.GITHUB_OUTPUT
|
||||
exit 0
|
||||
}
|
||||
let prev_ver = $latest_meta.0?.version? | default '0.0.0'
|
||||
let build = if ($latest_meta | is-empty) or ($version != $prev_ver) { 1 } else {
|
||||
($latest_meta | get build?.0? | default 0 | into int) + 1
|
||||
}
|
||||
let nightly_tag = $'($version)-nightly.($build)+($sha_short)'
|
||||
$'build_date=($date)(char nl)' o>> $env.GITHUB_OUTPUT
|
||||
$'nightly_tag=($nightly_tag)(char nl)' o>> $env.GITHUB_OUTPUT
|
||||
if (git ls-remote --tags origin $nightly_tag | is-empty) {
|
||||
ls **/Cargo.toml | each {|file|
|
||||
open --raw $file.name
|
||||
| str replace --all $'version = "($version)"' $'version = "($version)-nightly.($build)"'
|
||||
| save --force $file.name
|
||||
}
|
||||
# Disable the following two workflows for the automatic committed changes
|
||||
rm .github/workflows/ci.yml
|
||||
rm .github/workflows/audit.yml
|
||||
|
||||
git add .
|
||||
git commit -m $'Update version to ($version)-nightly.($build)'
|
||||
git tag -a $nightly_tag -m $'Nightly build from ($sha_short)'
|
||||
git push origin --tags
|
||||
git push origin main -f
|
||||
}
|
||||
|
||||
standard:
|
||||
name: Std
|
||||
release:
|
||||
name: Nu
|
||||
needs: prepare
|
||||
if: needs.prepare.outputs.skip != 'true'
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
@ -82,24 +126,16 @@ jobs:
|
||||
- armv7-unknown-linux-gnueabihf
|
||||
- armv7-unknown-linux-musleabihf
|
||||
- riscv64gc-unknown-linux-gnu
|
||||
extra: ['bin']
|
||||
- loongarch64-unknown-linux-gnu
|
||||
include:
|
||||
- target: aarch64-apple-darwin
|
||||
os: macos-latest
|
||||
- target: x86_64-apple-darwin
|
||||
os: macos-latest
|
||||
- target: x86_64-pc-windows-msvc
|
||||
extra: 'bin'
|
||||
os: windows-latest
|
||||
- target: x86_64-pc-windows-msvc
|
||||
extra: msi
|
||||
os: windows-latest
|
||||
- target: aarch64-pc-windows-msvc
|
||||
extra: 'bin'
|
||||
os: windows-latest
|
||||
- target: aarch64-pc-windows-msvc
|
||||
extra: msi
|
||||
os: windows-latest
|
||||
os: windows-11-arm
|
||||
- target: x86_64-unknown-linux-gnu
|
||||
os: ubuntu-22.04
|
||||
- target: x86_64-unknown-linux-musl
|
||||
@ -113,43 +149,69 @@ jobs:
|
||||
- target: armv7-unknown-linux-musleabihf
|
||||
os: ubuntu-22.04
|
||||
- target: riscv64gc-unknown-linux-gnu
|
||||
os: ubuntu-latest
|
||||
os: ubuntu-22.04
|
||||
- target: loongarch64-unknown-linux-gnu
|
||||
os: ubuntu-22.04
|
||||
|
||||
runs-on: ${{matrix.os}}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install Wix Toolset 6 for Windows
|
||||
shell: pwsh
|
||||
if: ${{ startsWith(matrix.os, 'windows') }}
|
||||
run: |
|
||||
dotnet tool install --global wix --version 6.0.0
|
||||
dotnet workload install wix
|
||||
$wixPath = "$env:USERPROFILE\.dotnet\tools"
|
||||
echo "$wixPath" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||
$env:PATH = "$wixPath;$env:PATH"
|
||||
wix --version
|
||||
|
||||
- name: Update Rust Toolchain Target
|
||||
run: |
|
||||
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1
|
||||
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
|
||||
with:
|
||||
rustflags: ''
|
||||
|
||||
- name: Setup Nushell
|
||||
uses: hustcer/setup-nu@v3.13
|
||||
uses: hustcer/setup-nu@v3
|
||||
if: ${{ matrix.os != 'windows-11-arm' }}
|
||||
with:
|
||||
version: 0.97.1
|
||||
version: 0.103.0
|
||||
|
||||
- name: Release Nu Binary
|
||||
id: nu
|
||||
if: ${{ matrix.os != 'windows-11-arm' }}
|
||||
run: nu .github/workflows/release-pkg.nu
|
||||
env:
|
||||
OS: ${{ matrix.os }}
|
||||
REF: ${{ github.ref }}
|
||||
TARGET: ${{ matrix.target }}
|
||||
_EXTRA_: ${{ matrix.extra }}
|
||||
|
||||
- name: Build Nu for Windows ARM64
|
||||
id: nu0
|
||||
shell: pwsh
|
||||
if: ${{ matrix.os == 'windows-11-arm' }}
|
||||
run: |
|
||||
$env:OS = 'windows'
|
||||
$env:REF = '${{ github.ref }}'
|
||||
$env:TARGET = '${{ matrix.target }}'
|
||||
cargo build --release --all --target aarch64-pc-windows-msvc
|
||||
cp ./target/${{ matrix.target }}/release/nu.exe .
|
||||
./nu.exe -c 'version'
|
||||
./nu.exe ${{github.workspace}}/.github/workflows/release-pkg.nu
|
||||
|
||||
- name: Create an Issue for Release Failure
|
||||
if: ${{ failure() }}
|
||||
uses: JasonEtco/create-an-issue@v2.9.2
|
||||
uses: JasonEtco/create-an-issue@v2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
@ -157,23 +219,46 @@ jobs:
|
||||
search_existing: open
|
||||
filename: .github/AUTO_ISSUE_TEMPLATE/nightly-build-fail.md
|
||||
|
||||
- name: Set Outputs of Short SHA
|
||||
id: vars
|
||||
run: |
|
||||
echo "date=$(date -u +'%Y-%m-%d')" >> $GITHUB_OUTPUT
|
||||
sha_short=$(git rev-parse --short HEAD)
|
||||
echo "sha_short=${sha_short:0:7}" >> $GITHUB_OUTPUT
|
||||
|
||||
# REF: https://github.com/marketplace/actions/gh-release
|
||||
# Create a release only in nushell/nightly repo
|
||||
- name: Publish Archive
|
||||
uses: softprops/action-gh-release@v2.0.8
|
||||
uses: softprops/action-gh-release@v2.0.9
|
||||
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
|
||||
with:
|
||||
prerelease: true
|
||||
files: ${{ steps.nu.outputs.archive }}
|
||||
tag_name: nightly-${{ steps.vars.outputs.sha_short }}
|
||||
name: Nu-nightly-${{ steps.vars.outputs.date }}-${{ steps.vars.outputs.sha_short }}
|
||||
files: |
|
||||
${{ steps.nu.outputs.msi }}
|
||||
${{ steps.nu0.outputs.msi }}
|
||||
${{ steps.nu.outputs.archive }}
|
||||
${{ steps.nu0.outputs.archive }}
|
||||
tag_name: ${{ needs.prepare.outputs.nightly_tag }}
|
||||
name: ${{ needs.prepare.outputs.build_date }}-${{ needs.prepare.outputs.nightly_tag }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
sha256sum:
|
||||
needs: [prepare, release]
|
||||
name: Create Sha256sum
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository == 'nushell/nightly'
|
||||
steps:
|
||||
- name: Download Release Archives
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: >-
|
||||
gh release download ${{ needs.prepare.outputs.nightly_tag }}
|
||||
--repo ${{ github.repository }}
|
||||
--pattern '*'
|
||||
--dir release
|
||||
- name: Create Checksums
|
||||
run: cd release && shasum -a 256 * > ../SHA256SUMS
|
||||
- name: Publish Checksums
|
||||
uses: softprops/action-gh-release@v2.0.9
|
||||
with:
|
||||
draft: false
|
||||
prerelease: true
|
||||
files: SHA256SUMS
|
||||
tag_name: ${{ needs.prepare.outputs.nightly_tag }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
@ -181,27 +266,24 @@ jobs:
|
||||
name: Cleanup
|
||||
# Should only run in nushell/nightly repo
|
||||
if: github.repository == 'nushell/nightly'
|
||||
needs: [release, sha256sum]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
# Sleep for 30 minutes, waiting for the release to be published
|
||||
- name: Waiting for Release
|
||||
run: sleep 1800
|
||||
|
||||
- uses: actions/checkout@v4.1.7
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
ref: main
|
||||
|
||||
- name: Setup Nushell
|
||||
uses: hustcer/setup-nu@v3.13
|
||||
uses: hustcer/setup-nu@v3
|
||||
with:
|
||||
version: 0.97.1
|
||||
version: 0.103.0
|
||||
|
||||
# Keep the last a few releases
|
||||
- name: Delete Older Releases
|
||||
shell: nu {0}
|
||||
run: |
|
||||
let KEEP_COUNT = 10
|
||||
let deprecated = (http get https://api.github.com/repos/nushell/nightly/releases | sort-by -r created_at | select tag_name id | range $KEEP_COUNT..)
|
||||
let deprecated = (http get https://api.github.com/repos/nushell/nightly/releases | sort-by -r created_at | select tag_name id | slice $KEEP_COUNT..)
|
||||
for release in $deprecated {
|
||||
print $'Deleting tag ($release.tag_name)'
|
||||
git push origin --delete $release.tag_name
|
||||
|
62
.github/workflows/release-msi.nu
vendored
Executable file
62
.github/workflows/release-msi.nu
vendored
Executable file
@ -0,0 +1,62 @@
|
||||
#!/usr/bin/env nu
|
||||
|
||||
# Created: 2025/05/21 19:05:20
|
||||
# Description:
|
||||
# A script to build Windows MSI packages for NuShell. Need wix 6.0 to be installed.
|
||||
# The script will download the specified NuShell release, extract it, and create an MSI package.
|
||||
# Can be run locally or in GitHub Actions.
|
||||
# To run this script locally:
|
||||
# load-env { TARGET: 'x86_64-pc-windows-msvc' REF: '0.103.0' GITHUB_REPOSITORY: 'nushell/nushell' }
|
||||
# nu .github/workflows/release-msi.nu
|
||||
|
||||
def build-msi [] {
|
||||
let target = $env.TARGET
|
||||
# We should read the version from the environment variable first
|
||||
# As we may build the MSI package for a specific version not the latest one
|
||||
let version = $env.MSI_VERSION? | default (open Cargo.toml | get package.version)
|
||||
let arch = if $nu.os-info.arch =~ 'x86_64' { 'x64' } else { 'arm64' }
|
||||
|
||||
print $'Building msi package for (ansi g)($target)(ansi reset) with version (ansi g)($version)(ansi reset) from tag (ansi g)($env.REF)(ansi reset)...'
|
||||
fetch-nu-pkg
|
||||
# Create extra Windows msi release package if dotnet and wix are available
|
||||
let installed = [dotnet wix] | all { (which $in | length) > 0 }
|
||||
if $installed and (wix --version | split row . | first | into int) >= 6 {
|
||||
|
||||
print $'(char nl)Start creating Windows msi package with the following contents...'
|
||||
cd wix; hr-line
|
||||
cp nu/README.txt .
|
||||
ls -f nu/* | print
|
||||
./nu/nu.exe -c $'NU_RELEASE_VERSION=($version) dotnet build -c Release -p:Platform=($arch)'
|
||||
glob **/*.msi | print
|
||||
# Workaround for https://github.com/softprops/action-gh-release/issues/280
|
||||
let wixRelease = (glob **/*.msi | where $it =~ bin | get 0 | str replace --all '\' '/')
|
||||
let msi = $'($wixRelease | path dirname)/nu-($version)-($target).msi'
|
||||
mv $wixRelease $msi
|
||||
print $'MSI archive: ---> ($msi)';
|
||||
# Run only in GitHub Actions
|
||||
if ($env.GITHUB_ACTIONS? | default false | into bool) {
|
||||
echo $"msi=($msi)(char nl)" o>> $env.GITHUB_OUTPUT
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def fetch-nu-pkg [] {
|
||||
mkdir wix/nu
|
||||
# See: https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/store-information-in-variables#default-environment-variables
|
||||
gh release download $env.REF --repo $env.GITHUB_REPOSITORY --pattern $'*-($env.TARGET).zip' --dir wix/nu
|
||||
cd wix/nu
|
||||
let pkg = ls *.zip | get name.0
|
||||
unzip $pkg
|
||||
rm $pkg
|
||||
ls | print
|
||||
}
|
||||
|
||||
# Print a horizontal line marker
|
||||
def 'hr-line' [
|
||||
--blank-line(-b)
|
||||
] {
|
||||
print $'(ansi g)---------------------------------------------------------------------------->(ansi reset)'
|
||||
if $blank_line { char nl }
|
||||
}
|
||||
|
||||
alias main = build-msi
|
103
.github/workflows/release-msi.yml
vendored
Normal file
103
.github/workflows/release-msi.yml
vendored
Normal file
@ -0,0 +1,103 @@
|
||||
#
|
||||
# REF:
|
||||
# 1. https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstrategymatrixinclude
|
||||
#
|
||||
name: Build Windows MSI
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag:
|
||||
required: true
|
||||
description: 'Tag to Rebuild MSI'
|
||||
version:
|
||||
description: 'Version of Rebuild MSI'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
packages: write
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
release:
|
||||
name: Nu
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
target:
|
||||
- x86_64-pc-windows-msvc
|
||||
- aarch64-pc-windows-msvc
|
||||
extra: ['bin']
|
||||
|
||||
include:
|
||||
- target: x86_64-pc-windows-msvc
|
||||
os: windows-latest
|
||||
- target: aarch64-pc-windows-msvc
|
||||
os: windows-11-arm
|
||||
|
||||
runs-on: ${{ matrix.os }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Wix Toolset 6 for Windows
|
||||
shell: pwsh
|
||||
if: ${{ startsWith(matrix.os, 'windows') }}
|
||||
run: |
|
||||
dotnet tool install --global wix --version 6.0.0
|
||||
dotnet workload install wix
|
||||
$wixPath = "$env:USERPROFILE\.dotnet\tools"
|
||||
echo "$wixPath" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||
$env:PATH = "$wixPath;$env:PATH"
|
||||
wix --version
|
||||
|
||||
- name: Setup Nushell
|
||||
uses: hustcer/setup-nu@v3
|
||||
with:
|
||||
version: nightly
|
||||
|
||||
- name: Release MSI Packages
|
||||
id: nu
|
||||
run: nu .github/workflows/release-msi.nu
|
||||
env:
|
||||
OS: ${{ matrix.os }}
|
||||
REF: ${{ inputs.tag }}
|
||||
TARGET: ${{ matrix.target }}
|
||||
MSI_VERSION: ${{ inputs.version }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# REF: https://github.com/marketplace/actions/gh-release
|
||||
- name: Publish Archive
|
||||
uses: softprops/action-gh-release@v2.0.5
|
||||
with:
|
||||
tag_name: ${{ inputs.tag }}
|
||||
files: ${{ steps.nu.outputs.msi }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
sha256sum:
|
||||
needs: release
|
||||
name: Create Sha256sum
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Release Archives
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: >-
|
||||
gh release download ${{ inputs.tag }}
|
||||
--repo ${{ github.repository }}
|
||||
--pattern '*'
|
||||
--dir release
|
||||
- name: Create Checksums
|
||||
run: cd release && rm -f SHA256SUMS && shasum -a 256 * > ../SHA256SUMS
|
||||
- name: Publish Checksums
|
||||
uses: softprops/action-gh-release@v2.0.5
|
||||
with:
|
||||
files: SHA256SUMS
|
||||
tag_name: ${{ inputs.tag }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
118
.github/workflows/release-pkg.nu
vendored
118
.github/workflows/release-pkg.nu
vendored
@ -8,10 +8,10 @@
|
||||
|
||||
# Instructions for manually creating an MSI for Winget Releases when they fail
|
||||
# Added 2022-11-29 when Windows packaging wouldn't work
|
||||
# Updated again on 2023-02-23 because msis are still failing validation
|
||||
# Updated again on 2023-02-23 because MSIs are still failing validation
|
||||
# To run this manual for windows here are the steps I take
|
||||
# checkout the release you want to publish
|
||||
# 1. git checkout 0.86.0
|
||||
# 1. git checkout 0.103.0
|
||||
# unset CARGO_TARGET_DIR if set (I have to do this in the parent shell to get it to work)
|
||||
# 2. $env:CARGO_TARGET_DIR = ""
|
||||
# 2. hide-env CARGO_TARGET_DIR
|
||||
@ -23,19 +23,13 @@
|
||||
# 7. $env.Path = ($env.Path | append 'c:\apps\7-zip')
|
||||
# make sure aria2c.exe is in your path https://github.com/aria2/aria2
|
||||
# 8. $env.Path = ($env.Path | append 'c:\path\to\aria2c')
|
||||
# make sure you have the wixtools installed https://wixtoolset.org/
|
||||
# 9. $env.Path = ($env.Path | append 'C:\Users\dschroeder\AppData\Local\tauri\WixTools')
|
||||
# You need to run the release-pkg twice. The first pass, with _EXTRA_ as 'bin', makes the output
|
||||
# folder and builds everything. The second pass, that generates the msi file, with _EXTRA_ as 'msi'
|
||||
# 10. $env._EXTRA_ = 'bin'
|
||||
# 11. source .github\workflows\release-pkg.nu
|
||||
# 12. cd ..
|
||||
# 13. $env._EXTRA_ = 'msi'
|
||||
# 14. source .github\workflows\release-pkg.nu
|
||||
# make sure you have the wix 6.0 installed: dotnet tool install --global wix --version 6.0.0
|
||||
# then build nu*.exe and the MSI installer by running:
|
||||
# 9. source .github\workflows\release-pkg.nu
|
||||
# After msi is generated, you have to update winget-pkgs repo, you'll need to patch the release
|
||||
# by deleting the existing msi and uploading this new msi. Then you'll need to update the hash
|
||||
# on the winget-pkgs PR. To generate the hash, run this command
|
||||
# 15. open target\wix\nu-0.74.0-x86_64-pc-windows-msvc.msi | hash sha256
|
||||
# 10. open wix\bin\x64\Release\nu-0.103.0-x86_64-pc-windows-msvc.msi | hash sha256
|
||||
# Then, just take the output and put it in the winget-pkgs PR for the hash on the msi
|
||||
|
||||
|
||||
@ -85,19 +79,26 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
|
||||
cargo-build-nu
|
||||
}
|
||||
'aarch64-unknown-linux-musl' => {
|
||||
aria2c https://musl.cc/aarch64-linux-musl-cross.tgz
|
||||
aria2c https://github.com/nushell/integrations/releases/download/build-tools/aarch64-linux-musl-cross.tgz
|
||||
tar -xf aarch64-linux-musl-cross.tgz -C $env.HOME
|
||||
$env.PATH = ($env.PATH | split row (char esep) | prepend $'($env.HOME)/aarch64-linux-musl-cross/bin')
|
||||
$env.CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER = 'aarch64-linux-musl-gcc'
|
||||
cargo-build-nu
|
||||
}
|
||||
'armv7-unknown-linux-musleabihf' => {
|
||||
aria2c https://musl.cc/armv7r-linux-musleabihf-cross.tgz
|
||||
aria2c https://github.com/nushell/integrations/releases/download/build-tools/armv7r-linux-musleabihf-cross.tgz
|
||||
tar -xf armv7r-linux-musleabihf-cross.tgz -C $env.HOME
|
||||
$env.PATH = ($env.PATH | split row (char esep) | prepend $'($env.HOME)/armv7r-linux-musleabihf-cross/bin')
|
||||
$env.CARGO_TARGET_ARMV7_UNKNOWN_LINUX_MUSLEABIHF_LINKER = 'armv7r-linux-musleabihf-gcc'
|
||||
cargo-build-nu
|
||||
}
|
||||
'loongarch64-unknown-linux-gnu' => {
|
||||
aria2c https://github.com/loongson/build-tools/releases/download/2024.08.08/x86_64-cross-tools-loongarch64-binutils_2.43-gcc_14.2.0-glibc_2.40.tar.xz
|
||||
tar xf x86_64-cross-tools-loongarch64-*.tar.xz
|
||||
$env.PATH = ($env.PATH | split row (char esep) | prepend $'($env.PWD)/cross-tools/bin')
|
||||
$env.CARGO_TARGET_LOONGARCH64_UNKNOWN_LINUX_GNU_LINKER = 'loongarch64-unknown-linux-gnu-gcc'
|
||||
cargo-build-nu
|
||||
}
|
||||
_ => {
|
||||
# musl-tools to fix 'Failed to find tool. Is `musl-gcc` installed?'
|
||||
# Actually just for x86_64-unknown-linux-musl target
|
||||
@ -110,14 +111,14 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
|
||||
# ----------------------------------------------------------------------------
|
||||
# Build for Windows without static-link-openssl feature
|
||||
# ----------------------------------------------------------------------------
|
||||
if $os in ['windows-latest'] {
|
||||
if $os =~ 'windows' {
|
||||
cargo-build-nu
|
||||
}
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Prepare for the release archive
|
||||
# ----------------------------------------------------------------------------
|
||||
let suffix = if $os == 'windows-latest' { '.exe' }
|
||||
let suffix = if $os =~ 'windows' { '.exe' }
|
||||
# nu, nu_plugin_* were all included
|
||||
let executable = $'target/($target)/release/($bin)*($suffix)'
|
||||
print $'Current executable file: ($executable)'
|
||||
@ -141,10 +142,10 @@ For more information, refer to https://www.nushell.sh/book/plugins.html
|
||||
[LICENSE ...(glob $executable)] | each {|it| cp -rv $it $dist } | flatten
|
||||
|
||||
print $'(char nl)Check binary release version detail:'; hr-line
|
||||
let ver = if $os == 'windows-latest' {
|
||||
(do -i { .\output\nu.exe -c 'version' }) | str join
|
||||
let ver = if $os =~ 'windows' {
|
||||
(do -i { .\output\nu.exe -c 'version' }) | default '' | str join
|
||||
} else {
|
||||
(do -i { ./output/nu -c 'version' }) | str join
|
||||
(do -i { ./output/nu -c 'version' }) | default '' | str join
|
||||
}
|
||||
if ($ver | str trim | is-empty) {
|
||||
print $'(ansi r)Incompatible Nu binary: The binary cross compiled is not runnable on current arch...(ansi reset)'
|
||||
@ -168,53 +169,60 @@ if $os in ['macos-latest'] or $USE_UBUNTU {
|
||||
tar -czf $archive $dest
|
||||
print $'archive: ---> ($archive)'; ls $archive
|
||||
# REF: https://github.blog/changelog/2022-10-11-github-actions-deprecating-save-state-and-set-output-commands/
|
||||
echo $"archive=($archive)" | save --append $env.GITHUB_OUTPUT
|
||||
echo $"archive=($archive)(char nl)" o>> $env.GITHUB_OUTPUT
|
||||
|
||||
} else if $os == 'windows-latest' {
|
||||
} else if $os =~ 'windows' {
|
||||
|
||||
let releaseStem = $'($bin)-($version)-($target)'
|
||||
let arch = if $nu.os-info.arch =~ 'x86_64' { 'x64' } else { 'arm64' }
|
||||
fetch-less $arch
|
||||
|
||||
print $'(char nl)Download less related stuffs...'; hr-line
|
||||
# todo: less-v661 is out but is released as a zip file. maybe we should switch to that and extract it?
|
||||
aria2c https://github.com/jftuga/less-Windows/releases/download/less-v608/less.exe -o less.exe
|
||||
# the below was renamed because it was failing to download for darren. it should work but it wasn't
|
||||
# todo: maybe we should get rid of this aria2c dependency and just use http get?
|
||||
#aria2c https://raw.githubusercontent.com/jftuga/less-Windows/master/LICENSE -o LICENSE-for-less.txt
|
||||
aria2c https://github.com/jftuga/less-Windows/blob/master/LICENSE -o LICENSE-for-less.txt
|
||||
|
||||
# Create Windows msi release package
|
||||
if (get-env _EXTRA_) == 'msi' {
|
||||
|
||||
let wixRelease = $'($src)/target/wix/($releaseStem).msi'
|
||||
print $'(char nl)Start creating Windows msi package with the following contents...'
|
||||
cd $src; hr-line
|
||||
# Wix need the binaries be stored in target/release/
|
||||
cp -r ($'($dist)/*' | into glob) target/release/
|
||||
ls target/release/* | print
|
||||
cargo install cargo-wix --version 0.3.8
|
||||
cargo wix --no-build --nocapture --package nu --output $wixRelease
|
||||
print $'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls | print
|
||||
let archive = $'($dist)/($releaseStem).zip'
|
||||
7z a $archive ...(glob *)
|
||||
let pkg = (ls -f $archive | get name)
|
||||
if not ($pkg | is-empty) {
|
||||
# Workaround for https://github.com/softprops/action-gh-release/issues/280
|
||||
let archive = ($wixRelease | str replace --all '\' '/')
|
||||
print $'archive: ---> ($archive)';
|
||||
echo $"archive=($archive)" | save --append $env.GITHUB_OUTPUT
|
||||
let archive = ($pkg | get 0 | str replace --all '\' '/')
|
||||
print $'archive: ---> ($archive)'
|
||||
echo $"archive=($archive)(char nl)" o>> $env.GITHUB_OUTPUT
|
||||
}
|
||||
|
||||
} else {
|
||||
# Create extra Windows msi release package if dotnet and wix are available
|
||||
let installed = [dotnet wix] | all { (which $in | length) > 0 }
|
||||
if $installed and (wix --version | split row . | first | into int) >= 6 {
|
||||
|
||||
print $'(char nl)(ansi g)Archive contents:(ansi reset)'; hr-line; ls | print
|
||||
let archive = $'($dist)/($releaseStem).zip'
|
||||
7z a $archive ...(glob *)
|
||||
let pkg = (ls -f $archive | get name)
|
||||
if not ($pkg | is-empty) {
|
||||
# Workaround for https://github.com/softprops/action-gh-release/issues/280
|
||||
let archive = ($pkg | get 0 | str replace --all '\' '/')
|
||||
print $'archive: ---> ($archive)'
|
||||
echo $"archive=($archive)" | save --append $env.GITHUB_OUTPUT
|
||||
}
|
||||
print $'(char nl)Start creating Windows msi package with the following contents...'
|
||||
cd $src; cd wix; hr-line; mkdir nu
|
||||
# Wix need the binaries be stored in nu folder
|
||||
cp -r ($'($dist)/*' | into glob) nu/
|
||||
cp $'($dist)/README.txt' .
|
||||
ls -f nu/* | print
|
||||
./nu/nu.exe -c $'NU_RELEASE_VERSION=($version) dotnet build -c Release -p:Platform=($arch)'
|
||||
glob **/*.msi | print
|
||||
# Workaround for https://github.com/softprops/action-gh-release/issues/280
|
||||
let wixRelease = (glob **/*.msi | where $it =~ bin | get 0 | str replace --all '\' '/')
|
||||
let msi = $'($wixRelease | path dirname)/nu-($version)-($target).msi'
|
||||
mv $wixRelease $msi
|
||||
print $'MSI archive: ---> ($msi)';
|
||||
echo $"msi=($msi)(char nl)" o>> $env.GITHUB_OUTPUT
|
||||
}
|
||||
}
|
||||
|
||||
def fetch-less [
|
||||
arch: string = 'x64' # The architecture to fetch
|
||||
] {
|
||||
let less_zip = $'less-($arch).zip'
|
||||
print $'Fetching less archive: (ansi g)($less_zip)(ansi reset)'
|
||||
let url = $'https://github.com/jftuga/less-Windows/releases/download/less-v668/($less_zip)'
|
||||
http get https://github.com/jftuga/less-Windows/blob/master/LICENSE | save -rf LICENSE-for-less.txt
|
||||
http get $url | save -rf $less_zip
|
||||
unzip $less_zip
|
||||
rm $less_zip lesskey.exe
|
||||
}
|
||||
|
||||
def 'cargo-build-nu' [] {
|
||||
if $os == 'windows-latest' {
|
||||
if $os =~ 'windows' {
|
||||
cargo build --release --all --target $target
|
||||
} else {
|
||||
cargo build --release --all --target $target --features=static-link-openssl
|
||||
|
93
.github/workflows/release.yml
vendored
93
.github/workflows/release.yml
vendored
@ -7,15 +7,17 @@ name: Create Release Draft
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
tags: ["[0-9]+.[0-9]+.[0-9]+*"]
|
||||
tags:
|
||||
- '[0-9]+.[0-9]+.[0-9]+*'
|
||||
- '!*nightly*' # Don't trigger release for nightly tags
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
standard:
|
||||
name: Std
|
||||
release:
|
||||
name: Nu
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
@ -32,24 +34,16 @@ jobs:
|
||||
- armv7-unknown-linux-gnueabihf
|
||||
- armv7-unknown-linux-musleabihf
|
||||
- riscv64gc-unknown-linux-gnu
|
||||
extra: ['bin']
|
||||
- loongarch64-unknown-linux-gnu
|
||||
include:
|
||||
- target: aarch64-apple-darwin
|
||||
os: macos-latest
|
||||
- target: x86_64-apple-darwin
|
||||
os: macos-latest
|
||||
- target: x86_64-pc-windows-msvc
|
||||
extra: 'bin'
|
||||
os: windows-latest
|
||||
- target: x86_64-pc-windows-msvc
|
||||
extra: msi
|
||||
os: windows-latest
|
||||
- target: aarch64-pc-windows-msvc
|
||||
extra: 'bin'
|
||||
os: windows-latest
|
||||
- target: aarch64-pc-windows-msvc
|
||||
extra: msi
|
||||
os: windows-latest
|
||||
os: windows-11-arm
|
||||
- target: x86_64-unknown-linux-gnu
|
||||
os: ubuntu-22.04
|
||||
- target: x86_64-unknown-linux-musl
|
||||
@ -63,44 +57,99 @@ jobs:
|
||||
- target: armv7-unknown-linux-musleabihf
|
||||
os: ubuntu-22.04
|
||||
- target: riscv64gc-unknown-linux-gnu
|
||||
os: ubuntu-latest
|
||||
os: ubuntu-22.04
|
||||
- target: loongarch64-unknown-linux-gnu
|
||||
os: ubuntu-22.04
|
||||
|
||||
runs-on: ${{matrix.os}}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.7
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Install Wix Toolset 6 for Windows
|
||||
shell: pwsh
|
||||
if: ${{ startsWith(matrix.os, 'windows') }}
|
||||
run: |
|
||||
dotnet tool install --global wix --version 6.0.0
|
||||
dotnet workload install wix
|
||||
$wixPath = "$env:USERPROFILE\.dotnet\tools"
|
||||
echo "$wixPath" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append
|
||||
$env:PATH = "$wixPath;$env:PATH"
|
||||
wix --version
|
||||
|
||||
- name: Update Rust Toolchain Target
|
||||
run: |
|
||||
echo "targets = ['${{matrix.target}}']" >> rust-toolchain.toml
|
||||
|
||||
- name: Setup Rust toolchain
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.9.0
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.12.0
|
||||
# WARN: Keep the rustflags to prevent from the winget submission error: `CAQuietExec: Error 0xc0000135`
|
||||
with:
|
||||
cache: false
|
||||
rustflags: ''
|
||||
|
||||
- name: Setup Nushell
|
||||
uses: hustcer/setup-nu@v3.13
|
||||
uses: hustcer/setup-nu@v3
|
||||
if: ${{ matrix.os != 'windows-11-arm' }}
|
||||
with:
|
||||
version: 0.97.1
|
||||
version: 0.103.0
|
||||
|
||||
- name: Release Nu Binary
|
||||
id: nu
|
||||
if: ${{ matrix.os != 'windows-11-arm' }}
|
||||
run: nu .github/workflows/release-pkg.nu
|
||||
env:
|
||||
OS: ${{ matrix.os }}
|
||||
REF: ${{ github.ref }}
|
||||
TARGET: ${{ matrix.target }}
|
||||
_EXTRA_: ${{ matrix.extra }}
|
||||
|
||||
# REF: https://github.com/marketplace/actions/gh-release
|
||||
- name: Build Nu for Windows ARM64
|
||||
id: nu0
|
||||
shell: pwsh
|
||||
if: ${{ matrix.os == 'windows-11-arm' }}
|
||||
run: |
|
||||
$env:OS = 'windows'
|
||||
$env:REF = '${{ github.ref }}'
|
||||
$env:TARGET = '${{ matrix.target }}'
|
||||
cargo build --release --all --target aarch64-pc-windows-msvc
|
||||
cp ./target/${{ matrix.target }}/release/nu.exe .
|
||||
./nu.exe -c 'version'
|
||||
./nu.exe ${{github.workspace}}/.github/workflows/release-pkg.nu
|
||||
|
||||
# WARN: Don't upgrade this action due to the release per asset issue.
|
||||
# See: https://github.com/softprops/action-gh-release/issues/445
|
||||
- name: Publish Archive
|
||||
uses: softprops/action-gh-release@v2.0.8
|
||||
uses: softprops/action-gh-release@v2.0.5
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
||||
with:
|
||||
draft: true
|
||||
files: ${{ steps.nu.outputs.archive }}
|
||||
files: |
|
||||
${{ steps.nu.outputs.msi }}
|
||||
${{ steps.nu0.outputs.msi }}
|
||||
${{ steps.nu.outputs.archive }}
|
||||
${{ steps.nu0.outputs.archive }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
sha256sum:
|
||||
needs: release
|
||||
name: Create Sha256sum
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download Release Archives
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: >-
|
||||
gh release download ${{ github.ref_name }}
|
||||
--repo ${{ github.repository }}
|
||||
--pattern '*'
|
||||
--dir release
|
||||
- name: Create Checksums
|
||||
run: cd release && shasum -a 256 * > ../SHA256SUMS
|
||||
- name: Publish Checksums
|
||||
uses: softprops/action-gh-release@v2.0.5
|
||||
with:
|
||||
draft: true
|
||||
files: SHA256SUMS
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
2
.github/workflows/typos.yml
vendored
2
.github/workflows/typos.yml
vendored
@ -10,4 +10,4 @@ jobs:
|
||||
uses: actions/checkout@v4.1.7
|
||||
|
||||
- name: Check spelling
|
||||
uses: crate-ci/typos@v1.24.5
|
||||
uses: crate-ci/typos@v1.33.1
|
||||
|
4
.github/workflows/winget-submission.yml
vendored
4
.github/workflows/winget-submission.yml
vendored
@ -25,5 +25,5 @@ jobs:
|
||||
installers-regex: 'msvc\.msi$'
|
||||
version: ${{ inputs.tag_name || github.event.release.tag_name }}
|
||||
release-tag: ${{ inputs.tag_name || github.event.release.tag_name }}
|
||||
token: ${{ secrets.NUSHELL_PAT }}
|
||||
fork-user: fdncred
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
fork-user: nushell
|
||||
|
@ -31,7 +31,7 @@ The review process can be summarized as follows:
|
||||
1. You want to make some change to Nushell that is more involved than simple bug-fixing.
|
||||
2. Go to [Discord](https://discordapp.com/invite/NtAbbGn) or a [GitHub issue](https://github.com/nushell/nushell/issues/new/choose) and chat with some core team members and/or other contributors about it.
|
||||
3. After getting a green light from the core team, implement the feature, open a pull request (PR) and write a concise but comprehensive description of the change.
|
||||
4. If your PR includes any use-facing features (such as adding a flag to a command), clearly list them in the PR description.
|
||||
4. If your PR includes any user-facing features (such as adding a flag to a command), clearly list them in the PR description.
|
||||
5. Then, core team members and other regular contributors will review the PR and suggest changes.
|
||||
6. When we all agree, the PR will be merged.
|
||||
7. If your PR includes any user-facing features, make sure the changes are also reflected in [the documentation](https://github.com/nushell/nushell.github.io) after the PR is merged.
|
||||
|
4602
Cargo.lock
generated
4602
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
183
Cargo.toml
183
Cargo.toml
@ -4,14 +4,14 @@ build = "scripts/build.rs"
|
||||
default-run = "nu"
|
||||
description = "A new type of shell"
|
||||
documentation = "https://www.nushell.sh/book/"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
exclude = ["images"]
|
||||
homepage = "https://www.nushell.sh"
|
||||
license = "MIT"
|
||||
name = "nu"
|
||||
repository = "https://github.com/nushell/nushell"
|
||||
rust-version = "1.79.0"
|
||||
version = "0.98.0"
|
||||
rust-version = "1.85.1"
|
||||
version = "0.105.0"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
@ -66,119 +66,130 @@ alphanumeric-sort = "1.5"
|
||||
ansi-str = "0.8"
|
||||
anyhow = "1.0.82"
|
||||
base64 = "0.22.1"
|
||||
bracoxide = "0.1.2"
|
||||
brotli = "5.0"
|
||||
bracoxide = "0.1.6"
|
||||
brotli = "7.0"
|
||||
byteorder = "1.5"
|
||||
bytes = "1"
|
||||
bytesize = "1.3"
|
||||
calamine = "0.24.0"
|
||||
bytesize = "1.3.3"
|
||||
calamine = "0.26"
|
||||
chardetng = "0.1.17"
|
||||
chrono = { default-features = false, version = "0.4.34" }
|
||||
chrono-humanize = "0.2.3"
|
||||
chrono-tz = "0.8"
|
||||
chrono-tz = "0.10"
|
||||
crossbeam-channel = "0.5.8"
|
||||
crossterm = "0.28.1"
|
||||
csv = "1.3"
|
||||
ctrlc = "3.4"
|
||||
devicons = "0.6.12"
|
||||
dialoguer = { default-features = false, version = "0.11" }
|
||||
digest = { default-features = false, version = "0.10" }
|
||||
dirs = "5.0"
|
||||
dirs-sys = "0.4"
|
||||
dtparse = "2.0"
|
||||
encoding_rs = "0.8"
|
||||
fancy-regex = "0.13"
|
||||
fancy-regex = "0.14"
|
||||
filesize = "0.2"
|
||||
filetime = "0.2"
|
||||
fuzzy-matcher = "0.3"
|
||||
heck = "0.5.0"
|
||||
human-date-parser = "0.1.1"
|
||||
indexmap = "2.5"
|
||||
human-date-parser = "0.3.0"
|
||||
indexmap = "2.9"
|
||||
indicatif = "0.17"
|
||||
interprocess = "2.2.0"
|
||||
is_executable = "1.0"
|
||||
itertools = "0.13"
|
||||
itertools = "0.14"
|
||||
libc = "0.2"
|
||||
libproc = "0.14"
|
||||
log = "0.4"
|
||||
lru = "0.12"
|
||||
lscolors = { version = "0.17", default-features = false }
|
||||
lsp-server = "0.7.5"
|
||||
lsp-types = "0.95.0"
|
||||
lscolors = { version = "0.20", default-features = false }
|
||||
lsp-server = "0.7.8"
|
||||
lsp-types = { version = "0.97.0", features = ["proposed"] }
|
||||
lsp-textdocument = "0.4.2"
|
||||
mach2 = "0.4"
|
||||
md5 = { version = "0.10", package = "md-5" }
|
||||
miette = "7.2"
|
||||
miette = "7.6"
|
||||
mime = "0.3.17"
|
||||
mime_guess = "2.0"
|
||||
mockito = { version = "1.5", default-features = false }
|
||||
multipart-rs = "0.1.11"
|
||||
mockito = { version = "1.7", default-features = false }
|
||||
multipart-rs = "0.1.13"
|
||||
native-tls = "0.2"
|
||||
nix = { version = "0.29", default-features = false }
|
||||
notify-debouncer-full = { version = "0.3", default-features = false }
|
||||
nu-ansi-term = "0.50.1"
|
||||
nucleo-matcher = "0.3"
|
||||
num-format = "0.4"
|
||||
num-traits = "0.2"
|
||||
oem_cp = "2.0.0"
|
||||
omnipath = "0.1"
|
||||
once_cell = "1.18"
|
||||
open = "5.3"
|
||||
os_pipe = { version = "1.2", features = ["io_safety"] }
|
||||
pathdiff = "0.2"
|
||||
percent-encoding = "2"
|
||||
pretty_assertions = "1.4"
|
||||
print-positions = "0.6"
|
||||
proc-macro-error = { version = "1.0", default-features = false }
|
||||
proc-macro-error2 = "2.0"
|
||||
proc-macro2 = "1.0"
|
||||
procfs = "0.16.0"
|
||||
procfs = "0.17.0"
|
||||
pwd = "1.3"
|
||||
quick-xml = "0.32.0"
|
||||
quick-xml = "0.37.0"
|
||||
quickcheck = "1.0"
|
||||
quickcheck_macros = "1.0"
|
||||
quickcheck_macros = "1.1"
|
||||
quote = "1.0"
|
||||
rand = "0.8"
|
||||
rand_chacha = "0.3.1"
|
||||
ratatui = "0.26"
|
||||
rand = "0.9"
|
||||
getrandom = "0.2" # pick same version that rand requires
|
||||
rand_chacha = "0.9"
|
||||
ratatui = "0.29"
|
||||
rayon = "1.10"
|
||||
reedline = "0.35.0"
|
||||
regex = "1.9.5"
|
||||
reedline = "0.40.0"
|
||||
rmp = "0.8"
|
||||
rmp-serde = "1.3"
|
||||
ropey = "1.6.1"
|
||||
roxmltree = "0.19"
|
||||
rstest = { version = "0.18", default-features = false }
|
||||
roxmltree = "0.20"
|
||||
rstest = { version = "0.23", default-features = false }
|
||||
rstest_reuse = "0.7"
|
||||
rusqlite = "0.31"
|
||||
rust-embed = "8.5.0"
|
||||
rust-embed = "8.7.0"
|
||||
rustls = { version = "0.23", default-features = false, features = ["std", "tls12"] }
|
||||
rustls-native-certs = "0.8"
|
||||
scopeguard = { version = "1.2.0" }
|
||||
serde = { version = "1.0" }
|
||||
serde_json = "1.0"
|
||||
serde_json = "1.0.97"
|
||||
serde_urlencoded = "0.7.1"
|
||||
serde_yaml = "0.9"
|
||||
serde_yaml = "0.9.33"
|
||||
sha2 = "0.10"
|
||||
strip-ansi-escapes = "0.2.0"
|
||||
strum = "0.26"
|
||||
strum_macros = "0.26"
|
||||
syn = "2.0"
|
||||
sysinfo = "0.30"
|
||||
tabled = { version = "0.16.0", default-features = false }
|
||||
tempfile = "3.10"
|
||||
terminal_size = "0.3"
|
||||
titlecase = "2.0"
|
||||
sysinfo = "0.33"
|
||||
tabled = { version = "0.20", default-features = false }
|
||||
tempfile = "3.20"
|
||||
titlecase = "3.5"
|
||||
toml = "0.8"
|
||||
trash = "3.3"
|
||||
trash = "5.2"
|
||||
update-informer = { version = "1.2.0", default-features = false, features = ["github", "ureq"] }
|
||||
umask = "2.1"
|
||||
unicode-segmentation = "1.11"
|
||||
unicode-width = "0.1"
|
||||
ureq = { version = "2.10", default-features = false }
|
||||
unicode-segmentation = "1.12"
|
||||
unicode-width = "0.2"
|
||||
ureq = { version = "2.12", default-features = false, features = ["socks-proxy"] }
|
||||
url = "2.2"
|
||||
uu_cp = "0.0.27"
|
||||
uu_mkdir = "0.0.27"
|
||||
uu_mktemp = "0.0.27"
|
||||
uu_mv = "0.0.27"
|
||||
uu_whoami = "0.0.27"
|
||||
uu_uname = "0.0.27"
|
||||
uucore = "0.0.27"
|
||||
uuid = "1.10.0"
|
||||
uu_cp = "0.1.0"
|
||||
uu_mkdir = "0.1.0"
|
||||
uu_mktemp = "0.1.0"
|
||||
uu_mv = "0.1.0"
|
||||
uu_touch = "0.1.0"
|
||||
uu_whoami = "0.1.0"
|
||||
uu_uname = "0.1.0"
|
||||
uucore = "0.1.0"
|
||||
uuid = "1.16.0"
|
||||
v_htmlescape = "0.15.0"
|
||||
wax = "0.6"
|
||||
which = "6.0.0"
|
||||
windows = "0.54"
|
||||
web-time = "1.1.0"
|
||||
which = "7.0.0"
|
||||
windows = "0.56"
|
||||
windows-sys = "0.48"
|
||||
winreg = "0.52"
|
||||
memchr = "2.7.4"
|
||||
webpki-roots = "1.0"
|
||||
|
||||
[workspace.lints.clippy]
|
||||
# Warning: workspace lints affect library code as well as tests, so don't enable lints that would be too noisy in tests like that.
|
||||
@ -189,22 +200,22 @@ unchecked_duration_subtraction = "warn"
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
nu-cli = { path = "./crates/nu-cli", version = "0.98.0" }
|
||||
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.98.0" }
|
||||
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.98.0" }
|
||||
nu-cmd-plugin = { path = "./crates/nu-cmd-plugin", version = "0.98.0", optional = true }
|
||||
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.98.0" }
|
||||
nu-command = { path = "./crates/nu-command", version = "0.98.0" }
|
||||
nu-engine = { path = "./crates/nu-engine", version = "0.98.0" }
|
||||
nu-explore = { path = "./crates/nu-explore", version = "0.98.0" }
|
||||
nu-lsp = { path = "./crates/nu-lsp/", version = "0.98.0" }
|
||||
nu-parser = { path = "./crates/nu-parser", version = "0.98.0" }
|
||||
nu-path = { path = "./crates/nu-path", version = "0.98.0" }
|
||||
nu-plugin-engine = { path = "./crates/nu-plugin-engine", optional = true, version = "0.98.0" }
|
||||
nu-protocol = { path = "./crates/nu-protocol", version = "0.98.0" }
|
||||
nu-std = { path = "./crates/nu-std", version = "0.98.0" }
|
||||
nu-system = { path = "./crates/nu-system", version = "0.98.0" }
|
||||
nu-utils = { path = "./crates/nu-utils", version = "0.98.0" }
|
||||
nu-cli = { path = "./crates/nu-cli", version = "0.105.0" }
|
||||
nu-cmd-base = { path = "./crates/nu-cmd-base", version = "0.105.0" }
|
||||
nu-cmd-lang = { path = "./crates/nu-cmd-lang", version = "0.105.0" }
|
||||
nu-cmd-plugin = { path = "./crates/nu-cmd-plugin", version = "0.105.0", optional = true }
|
||||
nu-cmd-extra = { path = "./crates/nu-cmd-extra", version = "0.105.0" }
|
||||
nu-command = { path = "./crates/nu-command", version = "0.105.0", default-features = false, features = ["os"] }
|
||||
nu-engine = { path = "./crates/nu-engine", version = "0.105.0" }
|
||||
nu-explore = { path = "./crates/nu-explore", version = "0.105.0" }
|
||||
nu-lsp = { path = "./crates/nu-lsp/", version = "0.105.0" }
|
||||
nu-parser = { path = "./crates/nu-parser", version = "0.105.0" }
|
||||
nu-path = { path = "./crates/nu-path", version = "0.105.0" }
|
||||
nu-plugin-engine = { path = "./crates/nu-plugin-engine", optional = true, version = "0.105.0" }
|
||||
nu-protocol = { path = "./crates/nu-protocol", version = "0.105.0" }
|
||||
nu-std = { path = "./crates/nu-std", version = "0.105.0" }
|
||||
nu-system = { path = "./crates/nu-system", version = "0.105.0" }
|
||||
nu-utils = { path = "./crates/nu-utils", version = "0.105.0" }
|
||||
reedline = { workspace = true, features = ["bashisms", "sqlite"] }
|
||||
|
||||
crossterm = { workspace = true }
|
||||
@ -212,7 +223,6 @@ ctrlc = { workspace = true }
|
||||
dirs = { workspace = true }
|
||||
log = { workspace = true }
|
||||
miette = { workspace = true, features = ["fancy-no-backtrace", "fancy"] }
|
||||
mimalloc = { version = "0.1.42", default-features = false, optional = true }
|
||||
multipart-rs = { workspace = true }
|
||||
serde_json = { workspace = true }
|
||||
simplelog = "0.12"
|
||||
@ -234,34 +244,42 @@ nix = { workspace = true, default-features = false, features = [
|
||||
] }
|
||||
|
||||
[dev-dependencies]
|
||||
nu-test-support = { path = "./crates/nu-test-support", version = "0.98.0" }
|
||||
nu-plugin-protocol = { path = "./crates/nu-plugin-protocol", version = "0.98.0" }
|
||||
nu-plugin-core = { path = "./crates/nu-plugin-core", version = "0.98.0" }
|
||||
nu-test-support = { path = "./crates/nu-test-support", version = "0.105.0" }
|
||||
nu-plugin-protocol = { path = "./crates/nu-plugin-protocol", version = "0.105.0" }
|
||||
nu-plugin-core = { path = "./crates/nu-plugin-core", version = "0.105.0" }
|
||||
assert_cmd = "2.0"
|
||||
dirs = { workspace = true }
|
||||
tango-bench = "0.5"
|
||||
tango-bench = "0.6"
|
||||
pretty_assertions = { workspace = true }
|
||||
regex = { workspace = true }
|
||||
fancy-regex = { workspace = true }
|
||||
rstest = { workspace = true, default-features = false }
|
||||
serial_test = "3.1"
|
||||
serial_test = "3.2"
|
||||
tempfile = { workspace = true }
|
||||
|
||||
[features]
|
||||
plugin = [
|
||||
"nu-plugin-engine",
|
||||
# crates
|
||||
"nu-cmd-plugin",
|
||||
"nu-plugin-engine",
|
||||
|
||||
# features
|
||||
"nu-cli/plugin",
|
||||
"nu-parser/plugin",
|
||||
"nu-cmd-lang/plugin",
|
||||
"nu-command/plugin",
|
||||
"nu-protocol/plugin",
|
||||
"nu-engine/plugin",
|
||||
"nu-engine/plugin",
|
||||
"nu-parser/plugin",
|
||||
"nu-protocol/plugin",
|
||||
]
|
||||
|
||||
native-tls = ["nu-command/native-tls"]
|
||||
rustls-tls = ["nu-command/rustls-tls"]
|
||||
|
||||
default = [
|
||||
"plugin",
|
||||
"trash-support",
|
||||
"sqlite",
|
||||
"mimalloc",
|
||||
"rustls-tls"
|
||||
]
|
||||
stable = ["default"]
|
||||
# NOTE: individual features are also passed to `nu-cmd-lang` that uses them to generate the feature matrix in the `version` command
|
||||
@ -270,7 +288,6 @@ stable = ["default"]
|
||||
# otherwise the system version will be used. Not enabled by default because it takes a while to build
|
||||
static-link-openssl = ["dep:openssl", "nu-cmd-lang/static-link-openssl"]
|
||||
|
||||
mimalloc = ["nu-cmd-lang/mimalloc", "dep:mimalloc"]
|
||||
# Optional system clipboard support in `reedline`, this behavior has problematic compatibility with some systems.
|
||||
# Missing X server/ Wayland can cause issues
|
||||
system-clipboard = [
|
||||
@ -283,7 +300,7 @@ system-clipboard = [
|
||||
trash-support = ["nu-command/trash-support", "nu-cmd-lang/trash-support"]
|
||||
|
||||
# SQLite commands for nushell
|
||||
sqlite = ["nu-command/sqlite", "nu-cmd-lang/sqlite"]
|
||||
sqlite = ["nu-command/sqlite", "nu-cmd-lang/sqlite", "nu-std/sqlite"]
|
||||
|
||||
[profile.release]
|
||||
opt-level = "s" # Optimize for size
|
||||
|
2
LICENSE
2
LICENSE
@ -1,6 +1,6 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2019 - 2023 The Nushell Project Developers
|
||||
Copyright (c) 2019 - 2025 The Nushell Project Developers
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
|
120
README.md
120
README.md
@ -4,7 +4,6 @@
|
||||
[](https://github.com/nushell/nushell/actions/workflows/nightly-build.yml)
|
||||
[](https://discord.gg/NtAbbGn)
|
||||
[](https://changelog.com/podcast/363)
|
||||
[](https://twitter.com/nu_shell)
|
||||
[](https://github.com/nushell/nushell/graphs/commit-activity)
|
||||
[](https://github.com/nushell/nushell/graphs/contributors)
|
||||
|
||||
@ -35,7 +34,7 @@ This project has reached a minimum-viable-product level of quality. Many people
|
||||
|
||||
The [Nushell book](https://www.nushell.sh/book/) is the primary source of Nushell documentation. You can find [a full list of Nu commands in the book](https://www.nushell.sh/commands/), and we have many examples of using Nu in our [cookbook](https://www.nushell.sh/cookbook/).
|
||||
|
||||
We're also active on [Discord](https://discord.gg/NtAbbGn) and [Twitter](https://twitter.com/nu_shell); come and chat with us!
|
||||
We're also active on [Discord](https://discord.gg/NtAbbGn); come and chat with us!
|
||||
|
||||
## Installation
|
||||
|
||||
@ -58,7 +57,7 @@ For details about which platforms the Nushell team actively supports, see [our p
|
||||
|
||||
## Configuration
|
||||
|
||||
The default configurations can be found at [sample_config](crates/nu-utils/src/sample_config)
|
||||
The default configurations can be found at [sample_config](crates/nu-utils/src/default_files)
|
||||
which are the configuration files one gets when they startup Nushell for the first time.
|
||||
|
||||
It sets all of the default configuration to run Nushell. From here one can
|
||||
@ -95,44 +94,44 @@ Commands that work in the pipeline fit into one of three categories:
|
||||
Commands are separated by the pipe symbol (`|`) to denote a pipeline flowing left to right.
|
||||
|
||||
```shell
|
||||
> ls | where type == "dir" | table
|
||||
╭────┬──────────┬──────┬─────────┬───────────────╮
|
||||
│ # │ name │ type │ size │ modified │
|
||||
├────┼──────────┼──────┼─────────┼───────────────┤
|
||||
│ 0 │ .cargo │ dir │ 0 B │ 9 minutes ago │
|
||||
│ 1 │ assets │ dir │ 0 B │ 2 weeks ago │
|
||||
│ 2 │ crates │ dir │ 4.0 KiB │ 2 weeks ago │
|
||||
│ 3 │ docker │ dir │ 0 B │ 2 weeks ago │
|
||||
│ 4 │ docs │ dir │ 0 B │ 2 weeks ago │
|
||||
│ 5 │ images │ dir │ 0 B │ 2 weeks ago │
|
||||
│ 6 │ pkg_mgrs │ dir │ 0 B │ 2 weeks ago │
|
||||
│ 7 │ samples │ dir │ 0 B │ 2 weeks ago │
|
||||
│ 8 │ src │ dir │ 4.0 KiB │ 2 weeks ago │
|
||||
│ 9 │ target │ dir │ 0 B │ a day ago │
|
||||
│ 10 │ tests │ dir │ 4.0 KiB │ 2 weeks ago │
|
||||
│ 11 │ wix │ dir │ 0 B │ 2 weeks ago │
|
||||
╰────┴──────────┴──────┴─────────┴───────────────╯
|
||||
ls | where type == "dir" | table
|
||||
# => ╭────┬──────────┬──────┬─────────┬───────────────╮
|
||||
# => │ # │ name │ type │ size │ modified │
|
||||
# => ├────┼──────────┼──────┼─────────┼───────────────┤
|
||||
# => │ 0 │ .cargo │ dir │ 0 B │ 9 minutes ago │
|
||||
# => │ 1 │ assets │ dir │ 0 B │ 2 weeks ago │
|
||||
# => │ 2 │ crates │ dir │ 4.0 KiB │ 2 weeks ago │
|
||||
# => │ 3 │ docker │ dir │ 0 B │ 2 weeks ago │
|
||||
# => │ 4 │ docs │ dir │ 0 B │ 2 weeks ago │
|
||||
# => │ 5 │ images │ dir │ 0 B │ 2 weeks ago │
|
||||
# => │ 6 │ pkg_mgrs │ dir │ 0 B │ 2 weeks ago │
|
||||
# => │ 7 │ samples │ dir │ 0 B │ 2 weeks ago │
|
||||
# => │ 8 │ src │ dir │ 4.0 KiB │ 2 weeks ago │
|
||||
# => │ 9 │ target │ dir │ 0 B │ a day ago │
|
||||
# => │ 10 │ tests │ dir │ 4.0 KiB │ 2 weeks ago │
|
||||
# => │ 11 │ wix │ dir │ 0 B │ 2 weeks ago │
|
||||
# => ╰────┴──────────┴──────┴─────────┴───────────────╯
|
||||
```
|
||||
|
||||
Because most of the time you'll want to see the output of a pipeline, `table` is assumed.
|
||||
We could have also written the above:
|
||||
|
||||
```shell
|
||||
> ls | where type == "dir"
|
||||
ls | where type == "dir"
|
||||
```
|
||||
|
||||
Being able to use the same commands and compose them differently is an important philosophy in Nu.
|
||||
For example, we could use the built-in `ps` command to get a list of the running processes, using the same `where` as above.
|
||||
|
||||
```shell
|
||||
> ps | where cpu > 0
|
||||
╭───┬───────┬───────────┬───────┬───────────┬───────────╮
|
||||
│ # │ pid │ name │ cpu │ mem │ virtual │
|
||||
├───┼───────┼───────────┼───────┼───────────┼───────────┤
|
||||
│ 0 │ 2240 │ Slack.exe │ 16.40 │ 178.3 MiB │ 232.6 MiB │
|
||||
│ 1 │ 16948 │ Slack.exe │ 16.32 │ 205.0 MiB │ 197.9 MiB │
|
||||
│ 2 │ 17700 │ nu.exe │ 3.77 │ 26.1 MiB │ 8.8 MiB │
|
||||
╰───┴───────┴───────────┴───────┴───────────┴───────────╯
|
||||
ps | where cpu > 0
|
||||
# => ╭───┬───────┬───────────┬───────┬───────────┬───────────╮
|
||||
# => │ # │ pid │ name │ cpu │ mem │ virtual │
|
||||
# => ├───┼───────┼───────────┼───────┼───────────┼───────────┤
|
||||
# => │ 0 │ 2240 │ Slack.exe │ 16.40 │ 178.3 MiB │ 232.6 MiB │
|
||||
# => │ 1 │ 16948 │ Slack.exe │ 16.32 │ 205.0 MiB │ 197.9 MiB │
|
||||
# => │ 2 │ 17700 │ nu.exe │ 3.77 │ 26.1 MiB │ 8.8 MiB │
|
||||
# => ╰───┴───────┴───────────┴───────┴───────────┴───────────╯
|
||||
```
|
||||
|
||||
### Opening files
|
||||
@ -141,46 +140,46 @@ Nu can load file and URL contents as raw text or structured data (if it recogniz
|
||||
For example, you can load a .toml file as structured data and explore it:
|
||||
|
||||
```shell
|
||||
> open Cargo.toml
|
||||
╭──────────────────┬────────────────────╮
|
||||
│ bin │ [table 1 row] │
|
||||
│ dependencies │ {record 25 fields} │
|
||||
│ dev-dependencies │ {record 8 fields} │
|
||||
│ features │ {record 10 fields} │
|
||||
│ package │ {record 13 fields} │
|
||||
│ patch │ {record 1 field} │
|
||||
│ profile │ {record 3 fields} │
|
||||
│ target │ {record 3 fields} │
|
||||
│ workspace │ {record 1 field} │
|
||||
╰──────────────────┴────────────────────╯
|
||||
open Cargo.toml
|
||||
# => ╭──────────────────┬────────────────────╮
|
||||
# => │ bin │ [table 1 row] │
|
||||
# => │ dependencies │ {record 25 fields} │
|
||||
# => │ dev-dependencies │ {record 8 fields} │
|
||||
# => │ features │ {record 10 fields} │
|
||||
# => │ package │ {record 13 fields} │
|
||||
# => │ patch │ {record 1 field} │
|
||||
# => │ profile │ {record 3 fields} │
|
||||
# => │ target │ {record 3 fields} │
|
||||
# => │ workspace │ {record 1 field} │
|
||||
# => ╰──────────────────┴────────────────────╯
|
||||
```
|
||||
|
||||
We can pipe this into a command that gets the contents of one of the columns:
|
||||
|
||||
```shell
|
||||
> open Cargo.toml | get package
|
||||
╭───────────────┬────────────────────────────────────╮
|
||||
│ authors │ [list 1 item] │
|
||||
│ default-run │ nu │
|
||||
│ description │ A new type of shell │
|
||||
│ documentation │ https://www.nushell.sh/book/ │
|
||||
│ edition │ 2018 │
|
||||
│ exclude │ [list 1 item] │
|
||||
│ homepage │ https://www.nushell.sh │
|
||||
│ license │ MIT │
|
||||
│ metadata │ {record 1 field} │
|
||||
│ name │ nu │
|
||||
│ repository │ https://github.com/nushell/nushell │
|
||||
│ rust-version │ 1.60 │
|
||||
│ version │ 0.72.0 │
|
||||
╰───────────────┴────────────────────────────────────╯
|
||||
open Cargo.toml | get package
|
||||
# => ╭───────────────┬────────────────────────────────────╮
|
||||
# => │ authors │ [list 1 item] │
|
||||
# => │ default-run │ nu │
|
||||
# => │ description │ A new type of shell │
|
||||
# => │ documentation │ https://www.nushell.sh/book/ │
|
||||
# => │ edition │ 2018 │
|
||||
# => │ exclude │ [list 1 item] │
|
||||
# => │ homepage │ https://www.nushell.sh │
|
||||
# => │ license │ MIT │
|
||||
# => │ metadata │ {record 1 field} │
|
||||
# => │ name │ nu │
|
||||
# => │ repository │ https://github.com/nushell/nushell │
|
||||
# => │ rust-version │ 1.60 │
|
||||
# => │ version │ 0.72.0 │
|
||||
# => ╰───────────────┴────────────────────────────────────╯
|
||||
```
|
||||
|
||||
And if needed we can drill down further:
|
||||
|
||||
```shell
|
||||
> open Cargo.toml | get package.version
|
||||
0.72.0
|
||||
open Cargo.toml | get package.version
|
||||
# => 0.72.0
|
||||
```
|
||||
|
||||
### Plugins
|
||||
@ -223,13 +222,14 @@ Please submit an issue or PR to be added to this list.
|
||||
- [Dorothy](http://github.com/bevry/dorothy)
|
||||
- [Direnv](https://github.com/direnv/direnv/blob/master/docs/hook.md#nushell)
|
||||
- [x-cmd](https://x-cmd.com/mod/nu)
|
||||
- [vfox](https://github.com/version-fox/vfox)
|
||||
|
||||
## Contributing
|
||||
|
||||
See [Contributing](CONTRIBUTING.md) for details. Thanks to all the people who already contributed!
|
||||
|
||||
<a href="https://github.com/nushell/nushell/graphs/contributors">
|
||||
<img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=750" />
|
||||
<img src="https://contributors-img.web.app/image?repo=nushell/nushell&max=750&columns=20" />
|
||||
</a>
|
||||
|
||||
## License
|
||||
|
@ -1,21 +1,19 @@
|
||||
use nu_cli::{eval_source, evaluate_commands};
|
||||
use nu_plugin_core::{Encoder, EncodingType};
|
||||
use nu_plugin_protocol::{PluginCallResponse, PluginOutput};
|
||||
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack},
|
||||
PipelineData, Signals, Span, Spanned, Value,
|
||||
engine::{EngineState, Stack},
|
||||
};
|
||||
use nu_std::load_standard_library;
|
||||
use nu_utils::{get_default_config, get_default_env};
|
||||
use std::{
|
||||
fmt::Write,
|
||||
hint::black_box,
|
||||
rc::Rc,
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
sync::{Arc, atomic::AtomicBool},
|
||||
};
|
||||
|
||||
use std::hint::black_box;
|
||||
|
||||
use tango_bench::{benchmark_fn, tango_benchmarks, tango_main, IntoBenchmarks};
|
||||
use tango_bench::{IntoBenchmarks, benchmark_fn, tango_benchmarks, tango_main};
|
||||
|
||||
fn load_bench_commands() -> EngineState {
|
||||
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
|
||||
@ -46,9 +44,6 @@ fn setup_stack_and_engine_from_command(command: &str) -> (Stack, EngineState) {
|
||||
|
||||
let mut stack = Stack::new();
|
||||
|
||||
// Support running benchmarks without IR mode
|
||||
stack.use_ir = std::env::var_os("NU_DISABLE_IR").is_none();
|
||||
|
||||
evaluate_commands(
|
||||
&commands,
|
||||
&mut engine,
|
||||
@ -73,14 +68,14 @@ fn encoding_test_data(row_cnt: usize, col_cnt: usize) -> Value {
|
||||
}
|
||||
|
||||
fn bench_command(
|
||||
name: &str,
|
||||
command: &str,
|
||||
name: impl Into<String>,
|
||||
command: impl Into<String> + Clone,
|
||||
stack: Stack,
|
||||
engine: EngineState,
|
||||
) -> impl IntoBenchmarks {
|
||||
let commands = Spanned {
|
||||
span: Span::unknown(),
|
||||
item: command.to_string(),
|
||||
item: command.into(),
|
||||
};
|
||||
[benchmark_fn(name, move |b| {
|
||||
let commands = commands.clone();
|
||||
@ -144,19 +139,16 @@ fn bench_load_standard_lib() -> impl IntoBenchmarks {
|
||||
})]
|
||||
}
|
||||
|
||||
fn create_flat_record_string(n: i32) -> String {
|
||||
let mut s = String::from("let record = {");
|
||||
fn create_flat_record_string(n: usize) -> String {
|
||||
let mut s = String::from("let record = { ");
|
||||
for i in 0..n {
|
||||
s.push_str(&format!("col_{}: {}", i, i));
|
||||
if i < n - 1 {
|
||||
s.push_str(", ");
|
||||
}
|
||||
write!(s, "col_{i}: {i}, ").unwrap();
|
||||
}
|
||||
s.push('}');
|
||||
s
|
||||
}
|
||||
|
||||
fn create_nested_record_string(depth: i32) -> String {
|
||||
fn create_nested_record_string(depth: usize) -> String {
|
||||
let mut s = String::from("let record = {");
|
||||
for _ in 0..depth {
|
||||
s.push_str("col: {");
|
||||
@ -169,7 +161,7 @@ fn create_nested_record_string(depth: i32) -> String {
|
||||
s
|
||||
}
|
||||
|
||||
fn create_example_table_nrows(n: i32) -> String {
|
||||
fn create_example_table_nrows(n: usize) -> String {
|
||||
let mut s = String::from("let table = [[foo bar baz]; ");
|
||||
for i in 0..n {
|
||||
s.push_str(&format!("[0, 1, {i}]"));
|
||||
@ -181,120 +173,153 @@ fn create_example_table_nrows(n: i32) -> String {
|
||||
s
|
||||
}
|
||||
|
||||
fn bench_record_create(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_record_create(n: usize) -> impl IntoBenchmarks {
|
||||
bench_command(
|
||||
&format!("record_create_{n}"),
|
||||
&create_flat_record_string(n),
|
||||
format!("record_create_{n}"),
|
||||
create_flat_record_string(n),
|
||||
Stack::new(),
|
||||
setup_engine(),
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_record_flat_access(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_record_flat_access(n: usize) -> impl IntoBenchmarks {
|
||||
let setup_command = create_flat_record_string(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
bench_command(
|
||||
&format!("record_flat_access_{n}"),
|
||||
format!("record_flat_access_{n}"),
|
||||
"$record.col_0 | ignore",
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_record_nested_access(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_record_nested_access(n: usize) -> impl IntoBenchmarks {
|
||||
let setup_command = create_nested_record_string(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
let nested_access = ".col".repeat(n as usize);
|
||||
let nested_access = ".col".repeat(n);
|
||||
bench_command(
|
||||
&format!("record_nested_access_{n}"),
|
||||
&format!("$record{} | ignore", nested_access),
|
||||
format!("record_nested_access_{n}"),
|
||||
format!("$record{} | ignore", nested_access),
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_table_create(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_record_insert(n: usize, m: usize) -> impl IntoBenchmarks {
|
||||
let setup_command = create_flat_record_string(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
let mut insert = String::from("$record");
|
||||
for i in n..(n + m) {
|
||||
write!(insert, " | insert col_{i} {i}").unwrap();
|
||||
}
|
||||
insert.push_str(" | ignore");
|
||||
bench_command(format!("record_insert_{n}_{m}"), insert, stack, engine)
|
||||
}
|
||||
|
||||
fn bench_table_create(n: usize) -> impl IntoBenchmarks {
|
||||
bench_command(
|
||||
&format!("table_create_{n}"),
|
||||
&create_example_table_nrows(n),
|
||||
format!("table_create_{n}"),
|
||||
create_example_table_nrows(n),
|
||||
Stack::new(),
|
||||
setup_engine(),
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_table_get(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_table_get(n: usize) -> impl IntoBenchmarks {
|
||||
let setup_command = create_example_table_nrows(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
bench_command(
|
||||
&format!("table_get_{n}"),
|
||||
format!("table_get_{n}"),
|
||||
"$table | get bar | math sum | ignore",
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_table_select(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_table_select(n: usize) -> impl IntoBenchmarks {
|
||||
let setup_command = create_example_table_nrows(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
bench_command(
|
||||
&format!("table_select_{n}"),
|
||||
format!("table_select_{n}"),
|
||||
"$table | select foo baz | ignore",
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_eval_interleave(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_table_insert_row(n: usize, m: usize) -> impl IntoBenchmarks {
|
||||
let setup_command = create_example_table_nrows(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
let mut insert = String::from("$table");
|
||||
for i in n..(n + m) {
|
||||
write!(insert, " | insert {i} {{ foo: 0, bar: 1, baz: {i} }}").unwrap();
|
||||
}
|
||||
insert.push_str(" | ignore");
|
||||
bench_command(format!("table_insert_row_{n}_{m}"), insert, stack, engine)
|
||||
}
|
||||
|
||||
fn bench_table_insert_col(n: usize, m: usize) -> impl IntoBenchmarks {
|
||||
let setup_command = create_example_table_nrows(n);
|
||||
let (stack, engine) = setup_stack_and_engine_from_command(&setup_command);
|
||||
let mut insert = String::from("$table");
|
||||
for i in 0..m {
|
||||
write!(insert, " | insert col_{i} {i}").unwrap();
|
||||
}
|
||||
insert.push_str(" | ignore");
|
||||
bench_command(format!("table_insert_col_{n}_{m}"), insert, stack, engine)
|
||||
}
|
||||
|
||||
fn bench_eval_interleave(n: usize) -> impl IntoBenchmarks {
|
||||
let engine = setup_engine();
|
||||
let stack = Stack::new();
|
||||
bench_command(
|
||||
&format!("eval_interleave_{n}"),
|
||||
&format!("seq 1 {n} | wrap a | interleave {{ seq 1 {n} | wrap b }} | ignore"),
|
||||
format!("eval_interleave_{n}"),
|
||||
format!("seq 1 {n} | wrap a | interleave {{ seq 1 {n} | wrap b }} | ignore"),
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_eval_interleave_with_interrupt(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_eval_interleave_with_interrupt(n: usize) -> impl IntoBenchmarks {
|
||||
let mut engine = setup_engine();
|
||||
engine.set_signals(Signals::new(Arc::new(AtomicBool::new(false))));
|
||||
let stack = Stack::new();
|
||||
bench_command(
|
||||
&format!("eval_interleave_with_interrupt_{n}"),
|
||||
&format!("seq 1 {n} | wrap a | interleave {{ seq 1 {n} | wrap b }} | ignore"),
|
||||
format!("eval_interleave_with_interrupt_{n}"),
|
||||
format!("seq 1 {n} | wrap a | interleave {{ seq 1 {n} | wrap b }} | ignore"),
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_eval_for(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_eval_for(n: usize) -> impl IntoBenchmarks {
|
||||
let engine = setup_engine();
|
||||
let stack = Stack::new();
|
||||
bench_command(
|
||||
&format!("eval_for_{n}"),
|
||||
&format!("(for $x in (1..{n}) {{ 1 }}) | ignore"),
|
||||
format!("eval_for_{n}"),
|
||||
format!("(for $x in (1..{n}) {{ 1 }}) | ignore"),
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_eval_each(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_eval_each(n: usize) -> impl IntoBenchmarks {
|
||||
let engine = setup_engine();
|
||||
let stack = Stack::new();
|
||||
bench_command(
|
||||
&format!("eval_each_{n}"),
|
||||
&format!("(1..{n}) | each {{|_| 1 }} | ignore"),
|
||||
format!("eval_each_{n}"),
|
||||
format!("(1..{n}) | each {{|_| 1 }} | ignore"),
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
}
|
||||
|
||||
fn bench_eval_par_each(n: i32) -> impl IntoBenchmarks {
|
||||
fn bench_eval_par_each(n: usize) -> impl IntoBenchmarks {
|
||||
let engine = setup_engine();
|
||||
let stack = Stack::new();
|
||||
bench_command(
|
||||
&format!("eval_par_each_{n}"),
|
||||
&format!("(1..{}) | par-each -t 2 {{|_| 1 }} | ignore", n),
|
||||
format!("eval_par_each_{n}"),
|
||||
format!("(1..{}) | par-each -t 2 {{|_| 1 }} | ignore", n),
|
||||
stack,
|
||||
engine,
|
||||
)
|
||||
@ -430,6 +455,14 @@ tango_benchmarks!(
|
||||
bench_record_nested_access(32),
|
||||
bench_record_nested_access(64),
|
||||
bench_record_nested_access(128),
|
||||
bench_record_insert(1, 1),
|
||||
bench_record_insert(10, 1),
|
||||
bench_record_insert(100, 1),
|
||||
bench_record_insert(1000, 1),
|
||||
bench_record_insert(1, 10),
|
||||
bench_record_insert(10, 10),
|
||||
bench_record_insert(100, 10),
|
||||
bench_record_insert(1000, 10),
|
||||
// Table
|
||||
bench_table_create(1),
|
||||
bench_table_create(10),
|
||||
@ -443,6 +476,22 @@ tango_benchmarks!(
|
||||
bench_table_select(10),
|
||||
bench_table_select(100),
|
||||
bench_table_select(1_000),
|
||||
bench_table_insert_row(1, 1),
|
||||
bench_table_insert_row(10, 1),
|
||||
bench_table_insert_row(100, 1),
|
||||
bench_table_insert_row(1000, 1),
|
||||
bench_table_insert_row(1, 10),
|
||||
bench_table_insert_row(10, 10),
|
||||
bench_table_insert_row(100, 10),
|
||||
bench_table_insert_row(1000, 10),
|
||||
bench_table_insert_col(1, 1),
|
||||
bench_table_insert_col(10, 1),
|
||||
bench_table_insert_col(100, 1),
|
||||
bench_table_insert_col(1000, 1),
|
||||
bench_table_insert_col(1, 10),
|
||||
bench_table_insert_col(10, 10),
|
||||
bench_table_insert_col(100, 10),
|
||||
bench_table_insert_col(1000, 10),
|
||||
// Eval
|
||||
// Interleave
|
||||
bench_eval_interleave(100),
|
||||
|
3
clippy/wasm/clippy.toml
Normal file
3
clippy/wasm/clippy.toml
Normal file
@ -0,0 +1,3 @@
|
||||
[[disallowed-types]]
|
||||
path = "std::time::Instant"
|
||||
reason = "WASM panics if used, use `web_time::Instant` instead"
|
@ -2,44 +2,46 @@
|
||||
authors = ["The Nushell Project Developers"]
|
||||
description = "CLI-related functionality for Nushell"
|
||||
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cli"
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
license = "MIT"
|
||||
name = "nu-cli"
|
||||
version = "0.98.0"
|
||||
version = "0.105.0"
|
||||
|
||||
[lib]
|
||||
bench = false
|
||||
|
||||
[dev-dependencies]
|
||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.98.0" }
|
||||
nu-command = { path = "../nu-command", version = "0.98.0" }
|
||||
nu-test-support = { path = "../nu-test-support", version = "0.98.0" }
|
||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.105.0" }
|
||||
nu-command = { path = "../nu-command", version = "0.105.0" }
|
||||
nu-std = { path = "../nu-std", version = "0.105.0" }
|
||||
nu-test-support = { path = "../nu-test-support", version = "0.105.0" }
|
||||
rstest = { workspace = true, default-features = false }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.98.0" }
|
||||
nu-engine = { path = "../nu-engine", version = "0.98.0" }
|
||||
nu-path = { path = "../nu-path", version = "0.98.0" }
|
||||
nu-parser = { path = "../nu-parser", version = "0.98.0" }
|
||||
nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.98.0", optional = true }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.98.0" }
|
||||
nu-utils = { path = "../nu-utils", version = "0.98.0" }
|
||||
nu-color-config = { path = "../nu-color-config", version = "0.98.0" }
|
||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.105.0" }
|
||||
nu-engine = { path = "../nu-engine", version = "0.105.0", features = ["os"] }
|
||||
nu-glob = { path = "../nu-glob", version = "0.105.0" }
|
||||
nu-path = { path = "../nu-path", version = "0.105.0" }
|
||||
nu-parser = { path = "../nu-parser", version = "0.105.0" }
|
||||
nu-plugin-engine = { path = "../nu-plugin-engine", version = "0.105.0", optional = true }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.105.0", features = ["os"] }
|
||||
nu-utils = { path = "../nu-utils", version = "0.105.0" }
|
||||
nu-color-config = { path = "../nu-color-config", version = "0.105.0" }
|
||||
nu-ansi-term = { workspace = true }
|
||||
reedline = { workspace = true, features = ["bashisms", "sqlite"] }
|
||||
|
||||
chrono = { default-features = false, features = ["std"], workspace = true }
|
||||
crossterm = { workspace = true }
|
||||
fancy-regex = { workspace = true }
|
||||
fuzzy-matcher = { workspace = true }
|
||||
is_executable = { workspace = true }
|
||||
log = { workspace = true }
|
||||
miette = { workspace = true, features = ["fancy-no-backtrace"] }
|
||||
lscolors = { workspace = true, default-features = false, features = ["nu-ansi-term"] }
|
||||
once_cell = { workspace = true }
|
||||
miette = { workspace = true, features = ["fancy-no-backtrace"] }
|
||||
nucleo-matcher = { workspace = true }
|
||||
percent-encoding = { workspace = true }
|
||||
sysinfo = { workspace = true }
|
||||
strum = { workspace = true }
|
||||
unicode-segmentation = { workspace = true }
|
||||
uuid = { workspace = true, features = ["v4"] }
|
||||
which = { workspace = true }
|
||||
@ -49,4 +51,4 @@ plugin = ["nu-plugin-engine"]
|
||||
system-clipboard = ["reedline/system_clipboard"]
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
workspace = true
|
||||
|
@ -1,9 +1,9 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
pub struct CommandlineEdit;
|
||||
|
||||
impl Command for SubCommand {
|
||||
impl Command for CommandlineEdit {
|
||||
fn name(&self) -> &str {
|
||||
"commandline edit"
|
||||
}
|
||||
@ -29,7 +29,7 @@ impl Command for SubCommand {
|
||||
.required(
|
||||
"str",
|
||||
SyntaxShape::String,
|
||||
"the string to perform the operation with",
|
||||
"The string to perform the operation with.",
|
||||
)
|
||||
.category(Category::Core)
|
||||
}
|
||||
|
@ -2,9 +2,9 @@ use nu_engine::command_prelude::*;
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
pub struct CommandlineGetCursor;
|
||||
|
||||
impl Command for SubCommand {
|
||||
impl Command for CommandlineGetCursor {
|
||||
fn name(&self) -> &str {
|
||||
"commandline get-cursor"
|
||||
}
|
||||
|
@ -4,6 +4,6 @@ mod get_cursor;
|
||||
mod set_cursor;
|
||||
|
||||
pub use commandline_::Commandline;
|
||||
pub use edit::SubCommand as CommandlineEdit;
|
||||
pub use get_cursor::SubCommand as CommandlineGetCursor;
|
||||
pub use set_cursor::SubCommand as CommandlineSetCursor;
|
||||
pub use edit::CommandlineEdit;
|
||||
pub use get_cursor::CommandlineGetCursor;
|
||||
pub use set_cursor::CommandlineSetCursor;
|
||||
|
@ -3,9 +3,9 @@ use nu_engine::command_prelude::*;
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
pub struct CommandlineSetCursor;
|
||||
|
||||
impl Command for SubCommand {
|
||||
impl Command for CommandlineSetCursor {
|
||||
fn name(&self) -> &str {
|
||||
"commandline set-cursor"
|
||||
}
|
||||
@ -18,7 +18,7 @@ impl Command for SubCommand {
|
||||
"set the current cursor position to the end of the buffer",
|
||||
Some('e'),
|
||||
)
|
||||
.optional("pos", SyntaxShape::Int, "Cursor position to be set")
|
||||
.optional("pos", SyntaxShape::Int, "Cursor position to be set.")
|
||||
.category(Category::Core)
|
||||
}
|
||||
|
||||
|
@ -17,6 +17,7 @@ pub fn add_cli_context(mut engine_state: EngineState) -> EngineState {
|
||||
CommandlineGetCursor,
|
||||
CommandlineSetCursor,
|
||||
History,
|
||||
HistoryImport,
|
||||
HistorySession,
|
||||
Keybindings,
|
||||
KeybindingsDefault,
|
||||
|
9
crates/nu-cli/src/commands/history/fields.rs
Normal file
9
crates/nu-cli/src/commands/history/fields.rs
Normal file
@ -0,0 +1,9 @@
|
||||
// Each const is named after a HistoryItem field, and the value is the field name to be displayed to
|
||||
// the user (or accept during import).
|
||||
pub const COMMAND_LINE: &str = "command";
|
||||
pub const START_TIMESTAMP: &str = "start_timestamp";
|
||||
pub const HOSTNAME: &str = "hostname";
|
||||
pub const CWD: &str = "cwd";
|
||||
pub const EXIT_STATUS: &str = "exit_status";
|
||||
pub const DURATION: &str = "duration";
|
||||
pub const SESSION_ID: &str = "session_id";
|
@ -1,10 +1,15 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::HistoryFileFormat;
|
||||
use nu_protocol::{
|
||||
HistoryFileFormat,
|
||||
shell_error::{self, io::IoError},
|
||||
};
|
||||
use reedline::{
|
||||
FileBackedHistory, History as ReedlineHistory, HistoryItem, SearchDirection, SearchQuery,
|
||||
SqliteBackedHistory,
|
||||
};
|
||||
|
||||
use super::fields;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct History;
|
||||
|
||||
@ -42,91 +47,77 @@ impl Command for History {
|
||||
let Some(history) = engine_state.history_config() else {
|
||||
return Ok(PipelineData::empty());
|
||||
};
|
||||
|
||||
// todo for sqlite history this command should be an alias to `open ~/.config/nushell/history.sqlite3 | get history`
|
||||
if let Some(config_path) = nu_path::config_dir() {
|
||||
let clear = call.has_flag(engine_state, stack, "clear")?;
|
||||
let long = call.has_flag(engine_state, stack, "long")?;
|
||||
let signals = engine_state.signals().clone();
|
||||
let Some(history_path) = history.file_path() else {
|
||||
return Err(ShellError::ConfigDirNotFound { span: Some(head) });
|
||||
};
|
||||
|
||||
let mut history_path = config_path;
|
||||
history_path.push("nushell");
|
||||
match history.file_format {
|
||||
HistoryFileFormat::Sqlite => {
|
||||
history_path.push("history.sqlite3");
|
||||
}
|
||||
HistoryFileFormat::Plaintext => {
|
||||
history_path.push("history.txt");
|
||||
}
|
||||
}
|
||||
if call.has_flag(engine_state, stack, "clear")? {
|
||||
let _ = std::fs::remove_file(history_path);
|
||||
// TODO: FIXME also clear the auxiliary files when using sqlite
|
||||
return Ok(PipelineData::empty());
|
||||
}
|
||||
|
||||
if clear {
|
||||
let _ = std::fs::remove_file(history_path);
|
||||
// TODO: FIXME also clear the auxiliary files when using sqlite
|
||||
Ok(PipelineData::empty())
|
||||
} else {
|
||||
let history_reader: Option<Box<dyn ReedlineHistory>> = match history.file_format {
|
||||
HistoryFileFormat::Sqlite => {
|
||||
SqliteBackedHistory::with_file(history_path.clone().into(), None, None)
|
||||
.map(|inner| {
|
||||
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
|
||||
boxed
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
|
||||
HistoryFileFormat::Plaintext => FileBackedHistory::with_file(
|
||||
history.max_size as usize,
|
||||
history_path.clone().into(),
|
||||
)
|
||||
let long = call.has_flag(engine_state, stack, "long")?;
|
||||
let signals = engine_state.signals().clone();
|
||||
let history_reader: Option<Box<dyn ReedlineHistory>> = match history.file_format {
|
||||
HistoryFileFormat::Sqlite => {
|
||||
SqliteBackedHistory::with_file(history_path.clone(), None, None)
|
||||
.map(|inner| {
|
||||
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
|
||||
boxed
|
||||
})
|
||||
.ok(),
|
||||
};
|
||||
|
||||
match history.file_format {
|
||||
HistoryFileFormat::Plaintext => Ok(history_reader
|
||||
.and_then(|h| {
|
||||
h.search(SearchQuery::everything(SearchDirection::Forward, None))
|
||||
.ok()
|
||||
})
|
||||
.map(move |entries| {
|
||||
entries.into_iter().enumerate().map(move |(idx, entry)| {
|
||||
Value::record(
|
||||
record! {
|
||||
"command" => Value::string(entry.command_line, head),
|
||||
"index" => Value::int(idx as i64, head),
|
||||
},
|
||||
head,
|
||||
)
|
||||
})
|
||||
})
|
||||
.ok_or(ShellError::FileNotFound {
|
||||
file: history_path.display().to_string(),
|
||||
span: head,
|
||||
})?
|
||||
.into_pipeline_data(head, signals)),
|
||||
HistoryFileFormat::Sqlite => Ok(history_reader
|
||||
.and_then(|h| {
|
||||
h.search(SearchQuery::everything(SearchDirection::Forward, None))
|
||||
.ok()
|
||||
})
|
||||
.map(move |entries| {
|
||||
entries.into_iter().enumerate().map(move |(idx, entry)| {
|
||||
create_history_record(idx, entry, long, head)
|
||||
})
|
||||
})
|
||||
.ok_or(ShellError::FileNotFound {
|
||||
file: history_path.display().to_string(),
|
||||
span: head,
|
||||
})?
|
||||
.into_pipeline_data(head, signals)),
|
||||
}
|
||||
.ok()
|
||||
}
|
||||
} else {
|
||||
Err(ShellError::ConfigDirNotFound { span: Some(head) })
|
||||
HistoryFileFormat::Plaintext => {
|
||||
FileBackedHistory::with_file(history.max_size as usize, history_path.clone())
|
||||
.map(|inner| {
|
||||
let boxed: Box<dyn ReedlineHistory> = Box::new(inner);
|
||||
boxed
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
};
|
||||
match history.file_format {
|
||||
HistoryFileFormat::Plaintext => Ok(history_reader
|
||||
.and_then(|h| {
|
||||
h.search(SearchQuery::everything(SearchDirection::Forward, None))
|
||||
.ok()
|
||||
})
|
||||
.map(move |entries| {
|
||||
entries.into_iter().enumerate().map(move |(idx, entry)| {
|
||||
Value::record(
|
||||
record! {
|
||||
fields::COMMAND_LINE => Value::string(entry.command_line, head),
|
||||
// TODO: This name is inconsistent with create_history_record.
|
||||
"index" => Value::int(idx as i64, head),
|
||||
},
|
||||
head,
|
||||
)
|
||||
})
|
||||
})
|
||||
.ok_or(IoError::new(
|
||||
shell_error::io::ErrorKind::FileNotFound,
|
||||
head,
|
||||
history_path,
|
||||
))?
|
||||
.into_pipeline_data(head, signals)),
|
||||
HistoryFileFormat::Sqlite => Ok(history_reader
|
||||
.and_then(|h| {
|
||||
h.search(SearchQuery::everything(SearchDirection::Forward, None))
|
||||
.ok()
|
||||
})
|
||||
.map(move |entries| {
|
||||
entries.into_iter().enumerate().map(move |(idx, entry)| {
|
||||
create_sqlite_history_record(idx, entry, long, head)
|
||||
})
|
||||
})
|
||||
.ok_or(IoError::new(
|
||||
shell_error::io::ErrorKind::FileNotFound,
|
||||
head,
|
||||
history_path,
|
||||
))?
|
||||
.into_pipeline_data(head, signals)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -151,7 +142,7 @@ impl Command for History {
|
||||
}
|
||||
}
|
||||
|
||||
fn create_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span) -> Value {
|
||||
fn create_sqlite_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span) -> Value {
|
||||
//1. Format all the values
|
||||
//2. Create a record of either short or long columns and values
|
||||
|
||||
@ -162,11 +153,8 @@ fn create_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span)
|
||||
.unwrap_or_default(),
|
||||
head,
|
||||
);
|
||||
let start_timestamp_value = Value::string(
|
||||
entry
|
||||
.start_timestamp
|
||||
.map(|time| time.to_string())
|
||||
.unwrap_or_default(),
|
||||
let start_timestamp_value = Value::date(
|
||||
entry.start_timestamp.unwrap_or_default().fixed_offset(),
|
||||
head,
|
||||
);
|
||||
let command_value = Value::string(entry.command_line, head);
|
||||
@ -192,13 +180,13 @@ fn create_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span)
|
||||
Value::record(
|
||||
record! {
|
||||
"item_id" => item_id_value,
|
||||
"start_timestamp" => start_timestamp_value,
|
||||
"command" => command_value,
|
||||
"session_id" => session_id_value,
|
||||
"hostname" => hostname_value,
|
||||
"cwd" => cwd_value,
|
||||
"duration" => duration_value,
|
||||
"exit_status" => exit_status_value,
|
||||
fields::START_TIMESTAMP => start_timestamp_value,
|
||||
fields::COMMAND_LINE => command_value,
|
||||
fields::SESSION_ID => session_id_value,
|
||||
fields::HOSTNAME => hostname_value,
|
||||
fields::CWD => cwd_value,
|
||||
fields::DURATION => duration_value,
|
||||
fields::EXIT_STATUS => exit_status_value,
|
||||
"idx" => index_value,
|
||||
},
|
||||
head,
|
||||
@ -206,11 +194,11 @@ fn create_history_record(idx: usize, entry: HistoryItem, long: bool, head: Span)
|
||||
} else {
|
||||
Value::record(
|
||||
record! {
|
||||
"start_timestamp" => start_timestamp_value,
|
||||
"command" => command_value,
|
||||
"cwd" => cwd_value,
|
||||
"duration" => duration_value,
|
||||
"exit_status" => exit_status_value,
|
||||
fields::START_TIMESTAMP => start_timestamp_value,
|
||||
fields::COMMAND_LINE => command_value,
|
||||
fields::CWD => cwd_value,
|
||||
fields::DURATION => duration_value,
|
||||
fields::EXIT_STATUS => exit_status_value,
|
||||
},
|
||||
head,
|
||||
)
|
||||
|
440
crates/nu-cli/src/commands/history/history_import.rs
Normal file
440
crates/nu-cli/src/commands/history/history_import.rs
Normal file
@ -0,0 +1,440 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::{
|
||||
HistoryFileFormat,
|
||||
shell_error::{self, io::IoError},
|
||||
};
|
||||
|
||||
use reedline::{
|
||||
FileBackedHistory, History, HistoryItem, ReedlineError, SearchQuery, SqliteBackedHistory,
|
||||
};
|
||||
|
||||
use super::fields;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct HistoryImport;
|
||||
|
||||
impl Command for HistoryImport {
|
||||
fn name(&self) -> &str {
|
||||
"history import"
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
"Import command line history."
|
||||
}
|
||||
|
||||
fn extra_description(&self) -> &str {
|
||||
r#"Can import history from input, either successive command lines or more detailed records. If providing records, available fields are:
|
||||
command, start_timestamp, hostname, cwd, duration, exit_status.
|
||||
|
||||
If no input is provided, will import all history items from existing history in the other format: if current history is stored in sqlite, it will store it in plain text and vice versa.
|
||||
|
||||
Note that history item IDs are ignored when importing from file."#
|
||||
}
|
||||
|
||||
fn signature(&self) -> nu_protocol::Signature {
|
||||
Signature::build("history import")
|
||||
.category(Category::History)
|
||||
.input_output_types(vec![
|
||||
(Type::Nothing, Type::Nothing),
|
||||
(Type::String, Type::Nothing),
|
||||
(Type::List(Box::new(Type::String)), Type::Nothing),
|
||||
(Type::table(), Type::Nothing),
|
||||
])
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
example: "history import",
|
||||
description: "Append all items from history in the other format to the current history",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
example: "echo foo | history import",
|
||||
description: "Append `foo` to the current history",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
example: "[[ command_line cwd ]; [ foo /home ]] | history import",
|
||||
description: "Append `foo` ran from `/home` to the current history",
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
_stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let span = call.head;
|
||||
let ok = Ok(Value::nothing(call.head).into_pipeline_data());
|
||||
|
||||
let Some(history) = engine_state.history_config() else {
|
||||
return ok;
|
||||
};
|
||||
let Some(current_history_path) = history.file_path() else {
|
||||
return Err(ShellError::ConfigDirNotFound { span: span.into() });
|
||||
};
|
||||
if let Some(bak_path) = backup(¤t_history_path, span)? {
|
||||
println!("Backed history to {}", bak_path.display());
|
||||
}
|
||||
match input {
|
||||
PipelineData::Empty => {
|
||||
let other_format = match history.file_format {
|
||||
HistoryFileFormat::Sqlite => HistoryFileFormat::Plaintext,
|
||||
HistoryFileFormat::Plaintext => HistoryFileFormat::Sqlite,
|
||||
};
|
||||
let src = new_backend(other_format, None)?;
|
||||
let mut dst = new_backend(history.file_format, Some(current_history_path))?;
|
||||
let items = src
|
||||
.search(SearchQuery::everything(
|
||||
reedline::SearchDirection::Forward,
|
||||
None,
|
||||
))
|
||||
.map_err(error_from_reedline)?
|
||||
.into_iter()
|
||||
.map(Ok);
|
||||
import(dst.as_mut(), items)
|
||||
}
|
||||
_ => {
|
||||
let input = input.into_iter().map(item_from_value);
|
||||
import(
|
||||
new_backend(history.file_format, Some(current_history_path))?.as_mut(),
|
||||
input,
|
||||
)
|
||||
}
|
||||
}?;
|
||||
|
||||
ok
|
||||
}
|
||||
}
|
||||
|
||||
fn new_backend(
|
||||
format: HistoryFileFormat,
|
||||
path: Option<PathBuf>,
|
||||
) -> Result<Box<dyn History>, ShellError> {
|
||||
let path = match path {
|
||||
Some(path) => path,
|
||||
None => {
|
||||
let Some(mut path) = nu_path::nu_config_dir() else {
|
||||
return Err(ShellError::ConfigDirNotFound { span: None });
|
||||
};
|
||||
path.push(format.default_file_name());
|
||||
path.into_std_path_buf()
|
||||
}
|
||||
};
|
||||
|
||||
fn map(
|
||||
result: Result<impl History + 'static, ReedlineError>,
|
||||
) -> Result<Box<dyn History>, ShellError> {
|
||||
result
|
||||
.map(|x| Box::new(x) as Box<dyn History>)
|
||||
.map_err(error_from_reedline)
|
||||
}
|
||||
match format {
|
||||
// Use a reasonably large value for maximum capacity.
|
||||
HistoryFileFormat::Plaintext => map(FileBackedHistory::with_file(0xfffffff, path)),
|
||||
HistoryFileFormat::Sqlite => map(SqliteBackedHistory::with_file(path, None, None)),
|
||||
}
|
||||
}
|
||||
|
||||
fn import(
|
||||
dst: &mut dyn History,
|
||||
src: impl Iterator<Item = Result<HistoryItem, ShellError>>,
|
||||
) -> Result<(), ShellError> {
|
||||
for item in src {
|
||||
let mut item = item?;
|
||||
item.id = None;
|
||||
dst.save(item).map_err(error_from_reedline)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn error_from_reedline(e: ReedlineError) -> ShellError {
|
||||
// TODO: Should we add a new ShellError variant?
|
||||
ShellError::GenericError {
|
||||
error: "Reedline error".to_owned(),
|
||||
msg: format!("{e}"),
|
||||
span: None,
|
||||
help: None,
|
||||
inner: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn item_from_value(v: Value) -> Result<HistoryItem, ShellError> {
|
||||
let span = v.span();
|
||||
match v {
|
||||
Value::Record { val, .. } => item_from_record(val.into_owned(), span),
|
||||
Value::String { val, .. } => Ok(HistoryItem {
|
||||
command_line: val,
|
||||
id: None,
|
||||
start_timestamp: None,
|
||||
session_id: None,
|
||||
hostname: None,
|
||||
cwd: None,
|
||||
duration: None,
|
||||
exit_status: None,
|
||||
more_info: None,
|
||||
}),
|
||||
_ => Err(ShellError::UnsupportedInput {
|
||||
msg: "Only list and record inputs are supported".to_owned(),
|
||||
input: v.get_type().to_string(),
|
||||
msg_span: span,
|
||||
input_span: span,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn item_from_record(mut rec: Record, span: Span) -> Result<HistoryItem, ShellError> {
|
||||
let cmd = match rec.remove(fields::COMMAND_LINE) {
|
||||
Some(v) => v.as_str()?.to_owned(),
|
||||
None => {
|
||||
return Err(ShellError::TypeMismatch {
|
||||
err_message: format!("missing column: {}", fields::COMMAND_LINE),
|
||||
span,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
fn get<T>(
|
||||
rec: &mut Record,
|
||||
field: &'static str,
|
||||
f: impl FnOnce(Value) -> Result<T, ShellError>,
|
||||
) -> Result<Option<T>, ShellError> {
|
||||
rec.remove(field).map(f).transpose()
|
||||
}
|
||||
|
||||
let rec = &mut rec;
|
||||
let item = HistoryItem {
|
||||
command_line: cmd,
|
||||
id: None,
|
||||
start_timestamp: get(rec, fields::START_TIMESTAMP, |v| Ok(v.as_date()?.to_utc()))?,
|
||||
hostname: get(rec, fields::HOSTNAME, |v| Ok(v.as_str()?.to_owned()))?,
|
||||
cwd: get(rec, fields::CWD, |v| Ok(v.as_str()?.to_owned()))?,
|
||||
exit_status: get(rec, fields::EXIT_STATUS, |v| v.as_int())?,
|
||||
duration: get(rec, fields::DURATION, |v| duration_from_value(v, span))?,
|
||||
more_info: None,
|
||||
// TODO: Currently reedline doesn't let you create session IDs.
|
||||
session_id: None,
|
||||
};
|
||||
|
||||
if !rec.is_empty() {
|
||||
let cols = rec.columns().map(|s| s.as_str()).collect::<Vec<_>>();
|
||||
return Err(ShellError::TypeMismatch {
|
||||
err_message: format!("unsupported column names: {}", cols.join(", ")),
|
||||
span,
|
||||
});
|
||||
}
|
||||
Ok(item)
|
||||
}
|
||||
|
||||
fn duration_from_value(v: Value, span: Span) -> Result<std::time::Duration, ShellError> {
|
||||
chrono::Duration::nanoseconds(v.as_duration()?)
|
||||
.to_std()
|
||||
.map_err(|_| ShellError::NeedsPositiveValue { span })
|
||||
}
|
||||
|
||||
fn find_backup_path(path: &Path, span: Span) -> Result<PathBuf, ShellError> {
|
||||
let Ok(mut bak_path) = path.to_path_buf().into_os_string().into_string() else {
|
||||
// This isn't fundamentally problem, but trying to work with OsString is a nightmare.
|
||||
return Err(ShellError::GenericError {
|
||||
error: "History path not UTF-8".to_string(),
|
||||
msg: "History path must be representable as UTF-8".to_string(),
|
||||
span: Some(span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
});
|
||||
};
|
||||
bak_path.push_str(".bak");
|
||||
if !Path::new(&bak_path).exists() {
|
||||
return Ok(bak_path.into());
|
||||
}
|
||||
let base_len = bak_path.len();
|
||||
for i in 1..100 {
|
||||
use std::fmt::Write;
|
||||
bak_path.truncate(base_len);
|
||||
write!(&mut bak_path, ".{i}").unwrap();
|
||||
if !Path::new(&bak_path).exists() {
|
||||
return Ok(PathBuf::from(bak_path));
|
||||
}
|
||||
}
|
||||
Err(ShellError::GenericError {
|
||||
error: "Too many backup files".to_string(),
|
||||
msg: "Found too many existing backup files".to_string(),
|
||||
span: Some(span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})
|
||||
}
|
||||
|
||||
fn backup(path: &Path, span: Span) -> Result<Option<PathBuf>, ShellError> {
|
||||
match path.metadata() {
|
||||
Ok(md) if md.is_file() => (),
|
||||
Ok(_) => {
|
||||
return Err(IoError::new_with_additional_context(
|
||||
shell_error::io::ErrorKind::NotAFile,
|
||||
span,
|
||||
PathBuf::from(path),
|
||||
"history path exists but is not a file",
|
||||
)
|
||||
.into());
|
||||
}
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(None),
|
||||
Err(e) => {
|
||||
return Err(IoError::new_internal(
|
||||
e,
|
||||
"Could not get metadata",
|
||||
nu_protocol::location!(),
|
||||
)
|
||||
.into());
|
||||
}
|
||||
}
|
||||
let bak_path = find_backup_path(path, span)?;
|
||||
std::fs::copy(path, &bak_path).map_err(|err| {
|
||||
IoError::new_internal(
|
||||
err.not_found_as(NotFound::File),
|
||||
"Could not copy backup",
|
||||
nu_protocol::location!(),
|
||||
)
|
||||
})?;
|
||||
Ok(Some(bak_path))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use chrono::DateTime;
|
||||
use rstest::rstest;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_item_from_value_string() -> Result<(), ShellError> {
|
||||
let item = item_from_value(Value::string("foo", Span::unknown()))?;
|
||||
assert_eq!(
|
||||
item,
|
||||
HistoryItem {
|
||||
command_line: "foo".to_string(),
|
||||
id: None,
|
||||
start_timestamp: None,
|
||||
session_id: None,
|
||||
hostname: None,
|
||||
cwd: None,
|
||||
duration: None,
|
||||
exit_status: None,
|
||||
more_info: None
|
||||
}
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_item_from_value_record() {
|
||||
let span = Span::unknown();
|
||||
let rec = new_record(&[
|
||||
("command", Value::string("foo", span)),
|
||||
(
|
||||
"start_timestamp",
|
||||
Value::date(
|
||||
DateTime::parse_from_rfc3339("1996-12-19T16:39:57-08:00").unwrap(),
|
||||
span,
|
||||
),
|
||||
),
|
||||
("hostname", Value::string("localhost", span)),
|
||||
("cwd", Value::string("/home/test", span)),
|
||||
("duration", Value::duration(100_000_000, span)),
|
||||
("exit_status", Value::int(42, span)),
|
||||
]);
|
||||
let item = item_from_value(rec).unwrap();
|
||||
assert_eq!(
|
||||
item,
|
||||
HistoryItem {
|
||||
command_line: "foo".to_string(),
|
||||
id: None,
|
||||
start_timestamp: Some(
|
||||
DateTime::parse_from_rfc3339("1996-12-19T16:39:57-08:00")
|
||||
.unwrap()
|
||||
.to_utc()
|
||||
),
|
||||
hostname: Some("localhost".to_string()),
|
||||
cwd: Some("/home/test".to_string()),
|
||||
duration: Some(std::time::Duration::from_nanos(100_000_000)),
|
||||
exit_status: Some(42),
|
||||
|
||||
session_id: None,
|
||||
more_info: None
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_item_from_value_record_extra_field() {
|
||||
let span = Span::unknown();
|
||||
let rec = new_record(&[
|
||||
("command_line", Value::string("foo", span)),
|
||||
("id_nonexistent", Value::int(1, span)),
|
||||
]);
|
||||
assert!(item_from_value(rec).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_item_from_value_record_bad_type() {
|
||||
let span = Span::unknown();
|
||||
let rec = new_record(&[
|
||||
("command_line", Value::string("foo", span)),
|
||||
("id", Value::string("one".to_string(), span)),
|
||||
]);
|
||||
assert!(item_from_value(rec).is_err());
|
||||
}
|
||||
|
||||
fn new_record(rec: &[(&'static str, Value)]) -> Value {
|
||||
let span = Span::unknown();
|
||||
let rec = Record::from_raw_cols_vals(
|
||||
rec.iter().map(|(col, _)| col.to_string()).collect(),
|
||||
rec.iter().map(|(_, val)| val.clone()).collect(),
|
||||
span,
|
||||
span,
|
||||
)
|
||||
.unwrap();
|
||||
Value::record(rec, span)
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[case::no_backup(&["history.dat"], "history.dat.bak")]
|
||||
#[case::backup_exists(&["history.dat", "history.dat.bak"], "history.dat.bak.1")]
|
||||
#[case::multiple_backups_exists( &["history.dat", "history.dat.bak", "history.dat.bak.1"], "history.dat.bak.2")]
|
||||
fn test_find_backup_path(#[case] existing: &[&str], #[case] want: &str) {
|
||||
let dir = tempfile::tempdir().unwrap();
|
||||
for name in existing {
|
||||
std::fs::File::create_new(dir.path().join(name)).unwrap();
|
||||
}
|
||||
let got = find_backup_path(&dir.path().join("history.dat"), Span::test_data()).unwrap();
|
||||
assert_eq!(got, dir.path().join(want))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_backup() {
|
||||
let dir = tempfile::tempdir().unwrap();
|
||||
let mut history = std::fs::File::create_new(dir.path().join("history.dat")).unwrap();
|
||||
use std::io::Write;
|
||||
write!(&mut history, "123").unwrap();
|
||||
let want_bak_path = dir.path().join("history.dat.bak");
|
||||
assert_eq!(
|
||||
backup(&dir.path().join("history.dat"), Span::test_data()),
|
||||
Ok(Some(want_bak_path.clone()))
|
||||
);
|
||||
let got_data = String::from_utf8(std::fs::read(want_bak_path).unwrap()).unwrap();
|
||||
assert_eq!(got_data, "123");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_backup_no_file() {
|
||||
let dir = tempfile::tempdir().unwrap();
|
||||
let bak_path = backup(&dir.path().join("history.dat"), Span::test_data()).unwrap();
|
||||
assert!(bak_path.is_none());
|
||||
}
|
||||
}
|
@ -1,5 +1,8 @@
|
||||
mod fields;
|
||||
mod history_;
|
||||
mod history_import;
|
||||
mod history_session;
|
||||
|
||||
pub use history_::History;
|
||||
pub use history_import::HistoryImport;
|
||||
pub use history_session::HistorySession;
|
||||
|
@ -1,8 +1,9 @@
|
||||
use crossterm::{
|
||||
event::Event, event::KeyCode, event::KeyEvent, execute, terminal, QueueableCommand,
|
||||
QueueableCommand, event::Event, event::KeyCode, event::KeyEvent, execute, terminal,
|
||||
};
|
||||
use nu_engine::command_prelude::*;
|
||||
use std::io::{stdout, Write};
|
||||
use nu_protocol::shell_error::io::IoError;
|
||||
use std::io::{Write, stdout};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct KeybindingsListen;
|
||||
@ -39,7 +40,13 @@ impl Command for KeybindingsListen {
|
||||
match print_events(engine_state) {
|
||||
Ok(v) => Ok(v.into_pipeline_data()),
|
||||
Err(e) => {
|
||||
terminal::disable_raw_mode()?;
|
||||
terminal::disable_raw_mode().map_err(|err| {
|
||||
IoError::new_internal(
|
||||
err,
|
||||
"Could not disable raw mode",
|
||||
nu_protocol::location!(),
|
||||
)
|
||||
})?;
|
||||
Err(ShellError::GenericError {
|
||||
error: "Error with input".into(),
|
||||
msg: "".into(),
|
||||
@ -63,8 +70,12 @@ impl Command for KeybindingsListen {
|
||||
pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
|
||||
let config = engine_state.get_config();
|
||||
|
||||
stdout().flush()?;
|
||||
terminal::enable_raw_mode()?;
|
||||
stdout().flush().map_err(|err| {
|
||||
IoError::new_internal(err, "Could not flush stdout", nu_protocol::location!())
|
||||
})?;
|
||||
terminal::enable_raw_mode().map_err(|err| {
|
||||
IoError::new_internal(err, "Could not enable raw mode", nu_protocol::location!())
|
||||
})?;
|
||||
|
||||
if config.use_kitty_protocol {
|
||||
if let Ok(false) = crossterm::terminal::supports_keyboard_enhancement() {
|
||||
@ -94,7 +105,9 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
|
||||
let mut stdout = std::io::BufWriter::new(std::io::stderr());
|
||||
|
||||
loop {
|
||||
let event = crossterm::event::read()?;
|
||||
let event = crossterm::event::read().map_err(|err| {
|
||||
IoError::new_internal(err, "Could not read event", nu_protocol::location!())
|
||||
})?;
|
||||
if event == Event::Key(KeyCode::Esc.into()) {
|
||||
break;
|
||||
}
|
||||
@ -113,9 +126,21 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
|
||||
|
||||
_ => "".to_string(),
|
||||
};
|
||||
stdout.queue(crossterm::style::Print(o))?;
|
||||
stdout.queue(crossterm::style::Print("\r\n"))?;
|
||||
stdout.flush()?;
|
||||
stdout.queue(crossterm::style::Print(o)).map_err(|err| {
|
||||
IoError::new_internal(
|
||||
err,
|
||||
"Could not print output record",
|
||||
nu_protocol::location!(),
|
||||
)
|
||||
})?;
|
||||
stdout
|
||||
.queue(crossterm::style::Print("\r\n"))
|
||||
.map_err(|err| {
|
||||
IoError::new_internal(err, "Could not print linebreak", nu_protocol::location!())
|
||||
})?;
|
||||
stdout.flush().map_err(|err| {
|
||||
IoError::new_internal(err, "Could not flush", nu_protocol::location!())
|
||||
})?;
|
||||
}
|
||||
|
||||
if config.use_kitty_protocol {
|
||||
@ -125,7 +150,9 @@ pub fn print_events(engine_state: &EngineState) -> Result<Value, ShellError> {
|
||||
);
|
||||
}
|
||||
|
||||
terminal::disable_raw_mode()?;
|
||||
terminal::disable_raw_mode().map_err(|err| {
|
||||
IoError::new_internal(err, "Could not disable raw mode", nu_protocol::location!())
|
||||
})?;
|
||||
|
||||
Ok(Value::nothing(Span::unknown()))
|
||||
}
|
||||
|
@ -7,7 +7,7 @@ mod keybindings_list;
|
||||
mod keybindings_listen;
|
||||
|
||||
pub use commandline::{Commandline, CommandlineEdit, CommandlineGetCursor, CommandlineSetCursor};
|
||||
pub use history::{History, HistorySession};
|
||||
pub use history::{History, HistoryImport, HistorySession};
|
||||
pub use keybindings::Keybindings;
|
||||
pub use keybindings_default::KeybindingsDefault;
|
||||
pub use keybindings_list::KeybindingsList;
|
||||
|
85
crates/nu-cli/src/completions/attribute_completions.rs
Normal file
85
crates/nu-cli/src/completions/attribute_completions.rs
Normal file
@ -0,0 +1,85 @@
|
||||
use super::{SemanticSuggestion, completion_options::NuMatcher};
|
||||
use crate::{
|
||||
SuggestionKind,
|
||||
completions::{Completer, CompletionOptions},
|
||||
};
|
||||
use nu_protocol::{
|
||||
Span,
|
||||
engine::{Stack, StateWorkingSet},
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
|
||||
pub struct AttributeCompletion;
|
||||
pub struct AttributableCompletion;
|
||||
|
||||
impl Completer for AttributeCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let mut matcher = NuMatcher::new(prefix, options);
|
||||
|
||||
let attr_commands =
|
||||
working_set.find_commands_by_predicate(|s| s.starts_with(b"attr "), true);
|
||||
|
||||
for (decl_id, name, desc, ty) in attr_commands {
|
||||
let name = name.strip_prefix(b"attr ").unwrap_or(&name);
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(name).into_owned(),
|
||||
description: desc,
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
append_whitespace: false,
|
||||
..Default::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Command(ty, Some(decl_id))),
|
||||
});
|
||||
}
|
||||
|
||||
matcher.results()
|
||||
}
|
||||
}
|
||||
|
||||
impl Completer for AttributableCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let mut matcher = NuMatcher::new(prefix, options);
|
||||
|
||||
for s in ["def", "extern", "export def", "export extern"] {
|
||||
let decl_id = working_set
|
||||
.find_decl(s.as_bytes())
|
||||
.expect("internal error, builtin declaration not found");
|
||||
let cmd = working_set.get_decl(decl_id);
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: cmd.name().into(),
|
||||
description: Some(cmd.description().into()),
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
append_whitespace: false,
|
||||
..Default::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Command(cmd.command_type(), None)),
|
||||
});
|
||||
}
|
||||
|
||||
matcher.results()
|
||||
}
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
use crate::completions::CompletionOptions;
|
||||
use nu_protocol::{
|
||||
DeclId, Span,
|
||||
engine::{Stack, StateWorkingSet},
|
||||
Span,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
|
||||
@ -12,10 +12,9 @@ pub trait Completer {
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: Vec<u8>,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion>;
|
||||
}
|
||||
@ -29,8 +28,15 @@ pub struct SemanticSuggestion {
|
||||
// TODO: think about name: maybe suggestion context?
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum SuggestionKind {
|
||||
Command(nu_protocol::engine::CommandType),
|
||||
Type(nu_protocol::Type),
|
||||
Command(nu_protocol::engine::CommandType, Option<DeclId>),
|
||||
Value(nu_protocol::Type),
|
||||
CellPath,
|
||||
Directory,
|
||||
File,
|
||||
Flag,
|
||||
Module,
|
||||
Operator,
|
||||
Variable,
|
||||
}
|
||||
|
||||
impl From<Suggestion> for SemanticSuggestion {
|
||||
|
153
crates/nu-cli/src/completions/cell_path_completions.rs
Normal file
153
crates/nu-cli/src/completions/cell_path_completions.rs
Normal file
@ -0,0 +1,153 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
use crate::completions::{Completer, CompletionOptions, SemanticSuggestion, SuggestionKind};
|
||||
use nu_engine::{column::get_columns, eval_variable};
|
||||
use nu_protocol::{
|
||||
ShellError, Span, Value,
|
||||
ast::{Expr, Expression, FullCellPath, PathMember},
|
||||
engine::{Stack, StateWorkingSet},
|
||||
eval_const::eval_constant,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
|
||||
use super::completion_options::NuMatcher;
|
||||
|
||||
pub struct CellPathCompletion<'a> {
|
||||
pub full_cell_path: &'a FullCellPath,
|
||||
pub position: usize,
|
||||
}
|
||||
|
||||
fn prefix_from_path_member(member: &PathMember, pos: usize) -> (String, Span) {
|
||||
let (prefix_str, start) = match member {
|
||||
PathMember::String { val, span, .. } => (val, span.start),
|
||||
PathMember::Int { val, span, .. } => (&val.to_string(), span.start),
|
||||
};
|
||||
let prefix_str = prefix_str.get(..pos + 1 - start).unwrap_or(prefix_str);
|
||||
// strip wrapping quotes
|
||||
let quotations = ['"', '\'', '`'];
|
||||
let prefix_str = prefix_str.strip_prefix(quotations).unwrap_or(prefix_str);
|
||||
(prefix_str.to_string(), Span::new(start, pos + 1))
|
||||
}
|
||||
|
||||
impl Completer for CellPathCompletion<'_> {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
_prefix: impl AsRef<str>,
|
||||
_span: Span,
|
||||
offset: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let mut prefix_str = String::new();
|
||||
// position at dots, e.g. `$env.config.<TAB>`
|
||||
let mut span = Span::new(self.position + 1, self.position + 1);
|
||||
let mut path_member_num_before_pos = 0;
|
||||
for member in self.full_cell_path.tail.iter() {
|
||||
if member.span().end <= self.position {
|
||||
path_member_num_before_pos += 1;
|
||||
} else if member.span().contains(self.position) {
|
||||
(prefix_str, span) = prefix_from_path_member(member, self.position);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let current_span = reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
};
|
||||
|
||||
let mut matcher = NuMatcher::new(prefix_str, options);
|
||||
let path_members = self
|
||||
.full_cell_path
|
||||
.tail
|
||||
.get(0..path_member_num_before_pos)
|
||||
.unwrap_or_default();
|
||||
let value = eval_cell_path(
|
||||
working_set,
|
||||
stack,
|
||||
&self.full_cell_path.head,
|
||||
path_members,
|
||||
span,
|
||||
)
|
||||
.unwrap_or_default();
|
||||
|
||||
for suggestion in get_suggestions_by_value(&value, current_span) {
|
||||
matcher.add_semantic_suggestion(suggestion);
|
||||
}
|
||||
matcher.results()
|
||||
}
|
||||
}
|
||||
|
||||
/// Follow cell path to get the value
|
||||
/// NOTE: This is a relatively lightweight implementation,
|
||||
/// so it may fail to get the exact value when the expression is complicated.
|
||||
/// One failing example would be `[$foo].0`
|
||||
pub(crate) fn eval_cell_path(
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
head: &Expression,
|
||||
path_members: &[PathMember],
|
||||
span: Span,
|
||||
) -> Result<Value, ShellError> {
|
||||
// evaluate the head expression to get its value
|
||||
let head_value = if let Expr::Var(var_id) = head.expr {
|
||||
working_set
|
||||
.get_variable(var_id)
|
||||
.const_val
|
||||
.to_owned()
|
||||
.map_or_else(
|
||||
|| eval_variable(working_set.permanent_state, stack, var_id, span),
|
||||
Ok,
|
||||
)
|
||||
} else {
|
||||
eval_constant(working_set, head)
|
||||
}?;
|
||||
head_value
|
||||
.follow_cell_path(path_members)
|
||||
.map(Cow::into_owned)
|
||||
}
|
||||
|
||||
fn get_suggestions_by_value(
|
||||
value: &Value,
|
||||
current_span: reedline::Span,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let to_suggestion = |s: String, v: Option<&Value>| {
|
||||
// Check if the string needs quoting
|
||||
let value = if s.is_empty()
|
||||
|| s.chars()
|
||||
.any(|c: char| !(c.is_ascii_alphabetic() || ['_', '-'].contains(&c)))
|
||||
{
|
||||
format!("{:?}", s)
|
||||
} else {
|
||||
s
|
||||
};
|
||||
|
||||
SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value,
|
||||
span: current_span,
|
||||
description: v.map(|v| v.get_type().to_string()),
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::CellPath),
|
||||
}
|
||||
};
|
||||
match value {
|
||||
Value::Record { val, .. } => val
|
||||
.columns()
|
||||
.map(|s| to_suggestion(s.to_string(), val.get(s)))
|
||||
.collect(),
|
||||
Value::List { vals, .. } => get_columns(vals.as_slice())
|
||||
.into_iter()
|
||||
.map(|s| {
|
||||
let sub_val = vals
|
||||
.first()
|
||||
.and_then(|v| v.as_record().ok())
|
||||
.and_then(|rv| rv.get(&s));
|
||||
to_suggestion(s, sub_val)
|
||||
})
|
||||
.collect(),
|
||||
_ => vec![],
|
||||
}
|
||||
}
|
@ -1,47 +1,37 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::{
|
||||
completions::{Completer, CompletionOptions, MatchAlgorithm},
|
||||
SuggestionKind,
|
||||
completions::{Completer, CompletionOptions},
|
||||
};
|
||||
use nu_parser::FlatShape;
|
||||
use nu_protocol::{
|
||||
engine::{CachedFile, Stack, StateWorkingSet},
|
||||
Span,
|
||||
engine::{CommandType, Stack, StateWorkingSet},
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
|
||||
use super::{completion_common::sort_suggestions, SemanticSuggestion};
|
||||
use super::{SemanticSuggestion, completion_options::NuMatcher};
|
||||
|
||||
pub struct CommandCompletion {
|
||||
flattened: Vec<(Span, FlatShape)>,
|
||||
flat_shape: FlatShape,
|
||||
force_completion_after_space: bool,
|
||||
/// Whether to include internal commands
|
||||
pub internals: bool,
|
||||
/// Whether to include external commands
|
||||
pub externals: bool,
|
||||
}
|
||||
|
||||
impl CommandCompletion {
|
||||
pub fn new(
|
||||
flattened: Vec<(Span, FlatShape)>,
|
||||
flat_shape: FlatShape,
|
||||
force_completion_after_space: bool,
|
||||
) -> Self {
|
||||
Self {
|
||||
flattened,
|
||||
flat_shape,
|
||||
force_completion_after_space,
|
||||
}
|
||||
}
|
||||
|
||||
fn external_command_completion(
|
||||
&self,
|
||||
working_set: &StateWorkingSet,
|
||||
prefix: &str,
|
||||
match_algorithm: MatchAlgorithm,
|
||||
) -> Vec<String> {
|
||||
let mut executables = vec![];
|
||||
sugg_span: reedline::Span,
|
||||
matched_internal: impl Fn(&str) -> bool,
|
||||
matcher: &mut NuMatcher<String>,
|
||||
) -> HashMap<String, SemanticSuggestion> {
|
||||
let mut suggs = HashMap::new();
|
||||
|
||||
// os agnostic way to get the PATH env var
|
||||
let paths = working_set.permanent_state.get_path_env_var();
|
||||
let paths = working_set.permanent_state.get_env_var_insensitive("path");
|
||||
|
||||
if let Some(paths) = paths {
|
||||
if let Some((_, paths)) = paths {
|
||||
if let Ok(paths) = paths.as_list() {
|
||||
for path in paths {
|
||||
let path = path.coerce_str().unwrap_or_default();
|
||||
@ -54,24 +44,43 @@ impl CommandCompletion {
|
||||
.completions
|
||||
.external
|
||||
.max_results
|
||||
> executables.len() as i64
|
||||
&& !executables.contains(
|
||||
&item
|
||||
.path()
|
||||
.file_name()
|
||||
.map(|x| x.to_string_lossy().to_string())
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
&& matches!(
|
||||
item.path().file_name().map(|x| match_algorithm
|
||||
.matches_str(&x.to_string_lossy(), prefix)),
|
||||
Some(true)
|
||||
)
|
||||
&& is_executable::is_executable(item.path())
|
||||
<= suggs.len() as i64
|
||||
{
|
||||
if let Ok(name) = item.file_name().into_string() {
|
||||
executables.push(name);
|
||||
}
|
||||
break;
|
||||
}
|
||||
let Ok(name) = item.file_name().into_string() else {
|
||||
continue;
|
||||
};
|
||||
let value = if matched_internal(&name) {
|
||||
format!("^{}", name)
|
||||
} else {
|
||||
name.clone()
|
||||
};
|
||||
if suggs.contains_key(&value) {
|
||||
continue;
|
||||
}
|
||||
// TODO: check name matching before a relative heavy IO involved
|
||||
// `is_executable` for performance consideration, should avoid
|
||||
// duplicated `match_aux` call for matched items in the future
|
||||
if matcher.matches(&name) && is_executable::is_executable(item.path()) {
|
||||
// If there's an internal command with the same name, adds ^cmd to the
|
||||
// matcher so that both the internal and external command are included
|
||||
matcher.add(&name, value.clone());
|
||||
suggs.insert(
|
||||
value.clone(),
|
||||
SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value,
|
||||
span: sugg_span,
|
||||
append_whitespace: true,
|
||||
..Default::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Command(
|
||||
CommandType::External,
|
||||
None,
|
||||
)),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -79,77 +88,7 @@ impl CommandCompletion {
|
||||
}
|
||||
}
|
||||
|
||||
executables
|
||||
}
|
||||
|
||||
fn complete_commands(
|
||||
&self,
|
||||
working_set: &StateWorkingSet,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
find_externals: bool,
|
||||
match_algorithm: MatchAlgorithm,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let partial = working_set.get_span_contents(span);
|
||||
|
||||
let filter_predicate = |command: &[u8]| match_algorithm.matches_u8(command, partial);
|
||||
|
||||
let mut results = working_set
|
||||
.find_commands_by_predicate(filter_predicate, true)
|
||||
.into_iter()
|
||||
.map(move |x| SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(&x.0).to_string(),
|
||||
description: x.1,
|
||||
span: reedline::Span::new(span.start - offset, span.end - offset),
|
||||
append_whitespace: true,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Command(x.2)),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let partial = working_set.get_span_contents(span);
|
||||
let partial = String::from_utf8_lossy(partial).to_string();
|
||||
|
||||
if find_externals {
|
||||
let results_external = self
|
||||
.external_command_completion(working_set, &partial, match_algorithm)
|
||||
.into_iter()
|
||||
.map(move |x| SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: x,
|
||||
span: reedline::Span::new(span.start - offset, span.end - offset),
|
||||
append_whitespace: true,
|
||||
..Suggestion::default()
|
||||
},
|
||||
// TODO: is there a way to create a test?
|
||||
kind: None,
|
||||
});
|
||||
|
||||
let results_strings: Vec<String> =
|
||||
results.iter().map(|x| x.suggestion.value.clone()).collect();
|
||||
|
||||
for external in results_external {
|
||||
if results_strings.contains(&external.suggestion.value) {
|
||||
results.push(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: format!("^{}", external.suggestion.value),
|
||||
span: external.suggestion.span,
|
||||
append_whitespace: true,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: external.kind,
|
||||
})
|
||||
} else {
|
||||
results.push(external)
|
||||
}
|
||||
}
|
||||
|
||||
results
|
||||
} else {
|
||||
results
|
||||
}
|
||||
suggs
|
||||
}
|
||||
}
|
||||
|
||||
@ -158,175 +97,62 @@ impl Completer for CommandCompletion {
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
prefix: Vec<u8>,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let last = self
|
||||
.flattened
|
||||
.iter()
|
||||
.rev()
|
||||
.skip_while(|x| x.0.end > pos)
|
||||
.take_while(|x| {
|
||||
matches!(
|
||||
x.1,
|
||||
FlatShape::InternalCall(_)
|
||||
| FlatShape::External
|
||||
| FlatShape::ExternalArg
|
||||
| FlatShape::Literal
|
||||
| FlatShape::String
|
||||
)
|
||||
})
|
||||
.last();
|
||||
let mut matcher = NuMatcher::new(prefix, options);
|
||||
|
||||
// The last item here would be the earliest shape that could possible by part of this subcommand
|
||||
let subcommands = if let Some(last) = last {
|
||||
self.complete_commands(
|
||||
working_set,
|
||||
Span::new(last.0.start, pos),
|
||||
offset,
|
||||
false,
|
||||
options.match_algorithm,
|
||||
)
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
let sugg_span = reedline::Span::new(span.start - offset, span.end - offset);
|
||||
|
||||
if !subcommands.is_empty() {
|
||||
return sort_suggestions(&String::from_utf8_lossy(&prefix), subcommands, options);
|
||||
}
|
||||
|
||||
let config = working_set.get_config();
|
||||
let commands = if matches!(self.flat_shape, nu_parser::FlatShape::External)
|
||||
|| matches!(self.flat_shape, nu_parser::FlatShape::InternalCall(_))
|
||||
|| ((span.end - span.start) == 0)
|
||||
|| is_passthrough_command(working_set.delta.get_file_contents())
|
||||
{
|
||||
// we're in a gap or at a command
|
||||
if working_set.get_span_contents(span).is_empty() && !self.force_completion_after_space
|
||||
{
|
||||
return vec![];
|
||||
let mut internal_suggs = HashMap::new();
|
||||
if self.internals {
|
||||
let filtered_commands = working_set.find_commands_by_predicate(
|
||||
|name| {
|
||||
let name = String::from_utf8_lossy(name);
|
||||
matcher.add(&name, name.to_string())
|
||||
},
|
||||
true,
|
||||
);
|
||||
for (decl_id, name, description, typ) in filtered_commands {
|
||||
let name = String::from_utf8_lossy(&name);
|
||||
internal_suggs.insert(
|
||||
name.to_string(),
|
||||
SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: name.to_string(),
|
||||
description,
|
||||
span: sugg_span,
|
||||
append_whitespace: true,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Command(typ, Some(decl_id))),
|
||||
},
|
||||
);
|
||||
}
|
||||
self.complete_commands(
|
||||
}
|
||||
|
||||
let mut external_suggs = if self.externals {
|
||||
self.external_command_completion(
|
||||
working_set,
|
||||
span,
|
||||
offset,
|
||||
config.completions.external.enable,
|
||||
options.match_algorithm,
|
||||
sugg_span,
|
||||
|name| internal_suggs.contains_key(name),
|
||||
&mut matcher,
|
||||
)
|
||||
} else {
|
||||
vec![]
|
||||
HashMap::new()
|
||||
};
|
||||
|
||||
sort_suggestions(&String::from_utf8_lossy(&prefix), commands, options)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_non_whitespace_index(contents: &[u8], start: usize) -> usize {
|
||||
match contents.get(start..) {
|
||||
Some(contents) => {
|
||||
contents
|
||||
.iter()
|
||||
.take_while(|x| x.is_ascii_whitespace())
|
||||
.count()
|
||||
+ start
|
||||
}
|
||||
None => start,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_passthrough_command(working_set_file_contents: &[CachedFile]) -> bool {
|
||||
for cached_file in working_set_file_contents {
|
||||
let contents = &cached_file.content;
|
||||
let last_pipe_pos_rev = contents.iter().rev().position(|x| x == &b'|');
|
||||
let last_pipe_pos = last_pipe_pos_rev.map(|x| contents.len() - x).unwrap_or(0);
|
||||
|
||||
let cur_pos = find_non_whitespace_index(contents, last_pipe_pos);
|
||||
|
||||
let result = match contents.get(cur_pos..) {
|
||||
Some(contents) => contents.starts_with(b"sudo ") || contents.starts_with(b"doas "),
|
||||
None => false,
|
||||
};
|
||||
if result {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
false
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod command_completions_tests {
|
||||
use super::*;
|
||||
use nu_protocol::engine::EngineState;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[test]
|
||||
fn test_find_non_whitespace_index() {
|
||||
let commands = [
|
||||
(" hello", 4),
|
||||
("sudo ", 0),
|
||||
(" sudo ", 2),
|
||||
(" sudo ", 2),
|
||||
(" hello ", 1),
|
||||
(" hello ", 3),
|
||||
(" hello | sudo ", 4),
|
||||
(" sudo|sudo", 5),
|
||||
("sudo | sudo ", 0),
|
||||
(" hello sud", 1),
|
||||
];
|
||||
for (idx, ele) in commands.iter().enumerate() {
|
||||
let index = find_non_whitespace_index(ele.0.as_bytes(), 0);
|
||||
assert_eq!(index, ele.1, "Failed on index {}", idx);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_last_command_passthrough() {
|
||||
let commands = [
|
||||
(" hello", false),
|
||||
(" sudo ", true),
|
||||
("sudo ", true),
|
||||
(" hello", false),
|
||||
(" sudo", false),
|
||||
(" sudo ", true),
|
||||
(" sudo ", true),
|
||||
(" sudo ", true),
|
||||
(" hello ", false),
|
||||
(" hello | sudo ", true),
|
||||
(" sudo|sudo", false),
|
||||
("sudo | sudo ", true),
|
||||
(" hello sud", false),
|
||||
(" sudo | sud ", false),
|
||||
(" sudo|sudo ", true),
|
||||
(" sudo | sudo ls | sudo ", true),
|
||||
];
|
||||
for (idx, ele) in commands.iter().enumerate() {
|
||||
let input = ele.0.as_bytes();
|
||||
|
||||
let mut engine_state = EngineState::new();
|
||||
engine_state.add_file("test.nu".into(), Arc::new([]));
|
||||
|
||||
let delta = {
|
||||
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||
let _ = working_set.add_file("child.nu".into(), input);
|
||||
working_set.render()
|
||||
};
|
||||
|
||||
let result = engine_state.merge_delta(delta);
|
||||
assert!(
|
||||
result.is_ok(),
|
||||
"Merge delta has failed: {}",
|
||||
result.err().unwrap()
|
||||
);
|
||||
|
||||
let is_passthrough_command = is_passthrough_command(engine_state.get_file_contents());
|
||||
assert_eq!(
|
||||
is_passthrough_command, ele.1,
|
||||
"index for '{}': {}",
|
||||
ele.0, idx
|
||||
);
|
||||
}
|
||||
let mut res = Vec::new();
|
||||
for cmd_name in matcher.results() {
|
||||
if let Some(sugg) = internal_suggs
|
||||
.remove(&cmd_name)
|
||||
.or_else(|| external_suggs.remove(&cmd_name))
|
||||
{
|
||||
res.push(sugg);
|
||||
}
|
||||
}
|
||||
res
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,22 +1,20 @@
|
||||
use super::MatchAlgorithm;
|
||||
use crate::{
|
||||
completions::{matches, CompletionOptions},
|
||||
SemanticSuggestion,
|
||||
};
|
||||
use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher};
|
||||
use super::{MatchAlgorithm, completion_options::NuMatcher};
|
||||
use crate::completions::CompletionOptions;
|
||||
use nu_ansi_term::Style;
|
||||
use nu_engine::env_to_string;
|
||||
use nu_path::dots::expand_ndots;
|
||||
use nu_path::{expand_to_real_path, home_dir};
|
||||
use nu_protocol::{
|
||||
Span,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
CompletionSort, Span,
|
||||
};
|
||||
use nu_utils::IgnoreCaseExt;
|
||||
use nu_utils::get_ls_colors;
|
||||
use std::path::{is_separator, Component, Path, PathBuf, MAIN_SEPARATOR as SEP};
|
||||
use std::path::{Component, MAIN_SEPARATOR as SEP, Path, PathBuf, is_separator};
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct PathBuiltFromString {
|
||||
cwd: PathBuf,
|
||||
parts: Vec<String>,
|
||||
isdir: bool,
|
||||
}
|
||||
@ -24,90 +22,122 @@ pub struct PathBuiltFromString {
|
||||
/// Recursively goes through paths that match a given `partial`.
|
||||
/// built: State struct for a valid matching path built so far.
|
||||
///
|
||||
/// `want_directory`: Whether we want only directories as completion matches.
|
||||
/// Some commands like `cd` can only be run on directories whereas others
|
||||
/// like `ls` can be run on regular files as well.
|
||||
///
|
||||
/// `isdir`: whether the current partial path has a trailing slash.
|
||||
/// Parsing a path string into a pathbuf loses that bit of information.
|
||||
///
|
||||
/// want_directory: Whether we want only directories as completion matches.
|
||||
/// Some commands like `cd` can only be run on directories whereas others
|
||||
/// like `ls` can be run on regular files as well.
|
||||
pub fn complete_rec(
|
||||
/// `enable_exact_match`: Whether match algorithm is Prefix and all previous components
|
||||
/// of the path matched a directory exactly.
|
||||
fn complete_rec(
|
||||
partial: &[&str],
|
||||
built: &PathBuiltFromString,
|
||||
cwd: &Path,
|
||||
built_paths: &[PathBuiltFromString],
|
||||
options: &CompletionOptions,
|
||||
want_directory: bool,
|
||||
isdir: bool,
|
||||
enable_exact_match: bool,
|
||||
) -> Vec<PathBuiltFromString> {
|
||||
let mut completions = vec![];
|
||||
let has_more = !partial.is_empty() && (partial.len() > 1 || isdir);
|
||||
|
||||
if let Some((&base, rest)) = partial.split_first() {
|
||||
if base.chars().all(|c| c == '.') && (isdir || !rest.is_empty()) {
|
||||
let mut built = built.clone();
|
||||
built.parts.push(base.to_string());
|
||||
built.isdir = true;
|
||||
return complete_rec(rest, &built, cwd, options, want_directory, isdir);
|
||||
}
|
||||
}
|
||||
|
||||
let mut built_path = cwd.to_path_buf();
|
||||
for part in &built.parts {
|
||||
built_path.push(part);
|
||||
}
|
||||
|
||||
let Ok(result) = built_path.read_dir() else {
|
||||
return completions;
|
||||
};
|
||||
|
||||
let mut entries = Vec::new();
|
||||
for entry in result.filter_map(|e| e.ok()) {
|
||||
let entry_name = entry.file_name().to_string_lossy().into_owned();
|
||||
let entry_isdir = entry.path().is_dir();
|
||||
let mut built = built.clone();
|
||||
built.parts.push(entry_name.clone());
|
||||
built.isdir = entry_isdir;
|
||||
|
||||
if !want_directory || entry_isdir {
|
||||
entries.push((entry_name, built));
|
||||
if base.chars().all(|c| c == '.') && has_more {
|
||||
let built_paths: Vec<_> = built_paths
|
||||
.iter()
|
||||
.map(|built| {
|
||||
let mut built = built.clone();
|
||||
built.parts.push(base.to_string());
|
||||
built.isdir = true;
|
||||
built
|
||||
})
|
||||
.collect();
|
||||
return complete_rec(
|
||||
rest,
|
||||
&built_paths,
|
||||
options,
|
||||
want_directory,
|
||||
isdir,
|
||||
enable_exact_match,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let prefix = partial.first().unwrap_or(&"");
|
||||
let sorted_entries = sort_completions(prefix, entries, options, |(entry, _)| entry);
|
||||
let mut matcher = NuMatcher::new(prefix, options);
|
||||
|
||||
for (entry_name, built) in sorted_entries {
|
||||
match partial.split_first() {
|
||||
Some((base, rest)) => {
|
||||
if matches(base, &entry_name, options) {
|
||||
// We use `isdir` to confirm that the current component has
|
||||
// at least one next component or a slash.
|
||||
// Serves as confirmation to ignore longer completions for
|
||||
// components in between.
|
||||
if !rest.is_empty() || isdir {
|
||||
completions.extend(complete_rec(
|
||||
rest,
|
||||
&built,
|
||||
cwd,
|
||||
options,
|
||||
want_directory,
|
||||
isdir,
|
||||
));
|
||||
let mut exact_match = None;
|
||||
// Only relevant for case insensitive matching
|
||||
let mut multiple_exact_matches = false;
|
||||
for built in built_paths {
|
||||
let mut path = built.cwd.clone();
|
||||
for part in &built.parts {
|
||||
path.push(part);
|
||||
}
|
||||
|
||||
let Ok(result) = path.read_dir() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
for entry in result.filter_map(|e| e.ok()) {
|
||||
let entry_name = entry.file_name().to_string_lossy().into_owned();
|
||||
let entry_isdir = entry.path().is_dir();
|
||||
let mut built = built.clone();
|
||||
built.parts.push(entry_name.clone());
|
||||
// Symlinks to directories shouldn't have a trailing slash (#13275)
|
||||
built.isdir = entry_isdir && !entry.path().is_symlink();
|
||||
|
||||
if !want_directory || entry_isdir {
|
||||
if enable_exact_match && !multiple_exact_matches && has_more {
|
||||
let matches = if options.case_sensitive {
|
||||
entry_name.eq(prefix)
|
||||
} else {
|
||||
completions.push(built);
|
||||
entry_name.eq_ignore_case(prefix)
|
||||
};
|
||||
if matches {
|
||||
if exact_match.is_none() {
|
||||
exact_match = Some(built.clone());
|
||||
} else {
|
||||
multiple_exact_matches = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
if entry_name.eq(base)
|
||||
&& matches!(options.match_algorithm, MatchAlgorithm::Prefix)
|
||||
&& isdir
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
None => {
|
||||
completions.push(built);
|
||||
|
||||
matcher.add(entry_name, built);
|
||||
}
|
||||
}
|
||||
}
|
||||
completions
|
||||
|
||||
// Don't show longer completions if we have a single exact match (#13204, #14794)
|
||||
if !multiple_exact_matches {
|
||||
if let Some(built) = exact_match {
|
||||
return complete_rec(
|
||||
&partial[1..],
|
||||
&[built],
|
||||
options,
|
||||
want_directory,
|
||||
isdir,
|
||||
true,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if has_more {
|
||||
let mut completions = vec![];
|
||||
for built in matcher.results() {
|
||||
completions.extend(complete_rec(
|
||||
&partial[1..],
|
||||
&[built],
|
||||
options,
|
||||
want_directory,
|
||||
isdir,
|
||||
false,
|
||||
));
|
||||
}
|
||||
completions
|
||||
} else {
|
||||
matcher.results()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@ -133,7 +163,7 @@ impl OriginalCwd {
|
||||
}
|
||||
}
|
||||
|
||||
fn surround_remove(partial: &str) -> String {
|
||||
pub fn surround_remove(partial: &str) -> String {
|
||||
for c in ['`', '"', '\''] {
|
||||
if partial.starts_with(c) {
|
||||
let ret = partial.strip_prefix(c).unwrap_or(partial);
|
||||
@ -147,15 +177,25 @@ fn surround_remove(partial: &str) -> String {
|
||||
partial.to_string()
|
||||
}
|
||||
|
||||
pub struct FileSuggestion {
|
||||
pub span: nu_protocol::Span,
|
||||
pub path: String,
|
||||
pub style: Option<Style>,
|
||||
pub is_dir: bool,
|
||||
}
|
||||
|
||||
/// # Parameters
|
||||
/// * `cwds` - A list of directories in which to search. The only reason this isn't a single string
|
||||
/// is because dotnu_completions searches in multiple directories at once
|
||||
pub fn complete_item(
|
||||
want_directory: bool,
|
||||
span: nu_protocol::Span,
|
||||
partial: &str,
|
||||
cwd: &str,
|
||||
cwds: &[impl AsRef<str>],
|
||||
options: &CompletionOptions,
|
||||
engine_state: &EngineState,
|
||||
stack: &Stack,
|
||||
) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
|
||||
) -> Vec<FileSuggestion> {
|
||||
let cleaned_partial = surround_remove(partial);
|
||||
let isdir = cleaned_partial.ends_with(is_separator);
|
||||
let expanded_partial = expand_ndots(Path::new(&cleaned_partial));
|
||||
@ -175,18 +215,20 @@ pub fn complete_item(
|
||||
partial.push_str(&format!("{path_separator}."));
|
||||
}
|
||||
|
||||
let cwd_pathbuf = Path::new(cwd).to_path_buf();
|
||||
let cwd_pathbufs: Vec<_> = cwds
|
||||
.iter()
|
||||
.map(|cwd| Path::new(cwd.as_ref()).to_path_buf())
|
||||
.collect();
|
||||
let ls_colors = (engine_state.config.completions.use_ls_colors
|
||||
&& engine_state.config.use_ansi_coloring)
|
||||
.then(|| {
|
||||
let ls_colors_env_str = match stack.get_env_var(engine_state, "LS_COLORS") {
|
||||
Some(v) => env_to_string("LS_COLORS", &v, engine_state, stack).ok(),
|
||||
None => None,
|
||||
};
|
||||
get_ls_colors(ls_colors_env_str)
|
||||
});
|
||||
&& engine_state.config.use_ansi_coloring.get(engine_state))
|
||||
.then(|| {
|
||||
let ls_colors_env_str = stack
|
||||
.get_env_var(engine_state, "LS_COLORS")
|
||||
.and_then(|v| env_to_string("LS_COLORS", v, engine_state, stack).ok());
|
||||
get_ls_colors(ls_colors_env_str)
|
||||
});
|
||||
|
||||
let mut cwd = cwd_pathbuf.clone();
|
||||
let mut cwds = cwd_pathbufs.clone();
|
||||
let mut prefix_len = 0;
|
||||
let mut original_cwd = OriginalCwd::None;
|
||||
|
||||
@ -194,19 +236,21 @@ pub fn complete_item(
|
||||
match components.peek().cloned() {
|
||||
Some(c @ Component::Prefix(..)) => {
|
||||
// windows only by definition
|
||||
cwd = [c, Component::RootDir].iter().collect();
|
||||
cwds = vec![[c, Component::RootDir].iter().collect()];
|
||||
prefix_len = c.as_os_str().len();
|
||||
original_cwd = OriginalCwd::Prefix(c.as_os_str().to_string_lossy().into_owned());
|
||||
}
|
||||
Some(c @ Component::RootDir) => {
|
||||
// This is kind of a hack. When joining an empty string with the rest,
|
||||
// we add the slash automagically
|
||||
cwd = PathBuf::from(c.as_os_str());
|
||||
cwds = vec![PathBuf::from(c.as_os_str())];
|
||||
prefix_len = 1;
|
||||
original_cwd = OriginalCwd::Prefix(String::new());
|
||||
}
|
||||
Some(Component::Normal(home)) if home.to_string_lossy() == "~" => {
|
||||
cwd = home_dir().map(Into::into).unwrap_or(cwd_pathbuf);
|
||||
cwds = home_dir()
|
||||
.map(|dir| vec![dir.into()])
|
||||
.unwrap_or(cwd_pathbufs);
|
||||
prefix_len = 1;
|
||||
original_cwd = OriginalCwd::Home;
|
||||
}
|
||||
@ -223,55 +267,68 @@ pub fn complete_item(
|
||||
|
||||
complete_rec(
|
||||
partial.as_slice(),
|
||||
&PathBuiltFromString::default(),
|
||||
&cwd,
|
||||
&cwds
|
||||
.into_iter()
|
||||
.map(|cwd| PathBuiltFromString {
|
||||
cwd,
|
||||
parts: Vec::new(),
|
||||
isdir: false,
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
options,
|
||||
want_directory,
|
||||
isdir,
|
||||
options.match_algorithm == MatchAlgorithm::Prefix,
|
||||
)
|
||||
.into_iter()
|
||||
.map(|mut p| {
|
||||
if should_collapse_dots {
|
||||
p = collapse_ndots(p);
|
||||
}
|
||||
let is_dir = p.isdir;
|
||||
let path = original_cwd.apply(p, path_separator);
|
||||
let real_path = expand_to_real_path(&path);
|
||||
let metadata = std::fs::symlink_metadata(&real_path).ok();
|
||||
let style = ls_colors.as_ref().map(|lsc| {
|
||||
lsc.style_for_path_with_metadata(
|
||||
&path,
|
||||
std::fs::symlink_metadata(expand_to_real_path(&path))
|
||||
.ok()
|
||||
.as_ref(),
|
||||
)
|
||||
.map(lscolors::Style::to_nu_ansi_term_style)
|
||||
.unwrap_or_default()
|
||||
lsc.style_for_path_with_metadata(&real_path, metadata.as_ref())
|
||||
.map(lscolors::Style::to_nu_ansi_term_style)
|
||||
.unwrap_or_default()
|
||||
});
|
||||
(span, escape_path(path, want_directory), style)
|
||||
FileSuggestion {
|
||||
span,
|
||||
path: escape_path(path),
|
||||
style,
|
||||
is_dir,
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
// Fix files or folders with quotes or hashes
|
||||
pub fn escape_path(path: String, dir: bool) -> String {
|
||||
pub fn escape_path(path: String) -> String {
|
||||
// make glob pattern have the highest priority.
|
||||
let glob_contaminated = path.contains(['[', '*', ']', '?']);
|
||||
if glob_contaminated {
|
||||
return if path.contains('\'') {
|
||||
// decide to use double quote, also need to escape `"` in path
|
||||
// or else users can't do anything with completed path either.
|
||||
format!("\"{}\"", path.replace('"', r#"\""#))
|
||||
if nu_glob::is_glob(path.as_str()) || path.contains('`') {
|
||||
// expand home `~` for https://github.com/nushell/nushell/issues/13905
|
||||
let pathbuf = nu_path::expand_tilde(path);
|
||||
let path = pathbuf.to_string_lossy();
|
||||
if path.contains('\'') {
|
||||
// decide to use double quotes
|
||||
// Path as Debug will do the escaping for `"`, `\`
|
||||
format!("{:?}", path)
|
||||
} else {
|
||||
format!("'{path}'")
|
||||
};
|
||||
}
|
||||
|
||||
let filename_contaminated = !dir && path.contains(['\'', '"', ' ', '#', '(', ')']);
|
||||
let dirname_contaminated = dir && path.contains(['\'', '"', ' ', '#']);
|
||||
let maybe_flag = path.starts_with('-');
|
||||
let maybe_number = path.parse::<f64>().is_ok();
|
||||
if filename_contaminated || dirname_contaminated || maybe_flag || maybe_number {
|
||||
format!("`{path}`")
|
||||
}
|
||||
} else {
|
||||
path
|
||||
let contaminated =
|
||||
path.contains(['\'', '"', ' ', '#', '(', ')', '{', '}', '[', ']', '|', ';']);
|
||||
let maybe_flag = path.starts_with('-');
|
||||
let maybe_variable = path.starts_with('$');
|
||||
let maybe_number = path.parse::<f64>().is_ok();
|
||||
if contaminated || maybe_flag || maybe_variable || maybe_number {
|
||||
format!("`{path}`")
|
||||
} else {
|
||||
path
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -282,12 +339,12 @@ pub struct AdjustView {
|
||||
}
|
||||
|
||||
pub fn adjust_if_intermediate(
|
||||
prefix: &[u8],
|
||||
prefix: &str,
|
||||
working_set: &StateWorkingSet,
|
||||
mut span: nu_protocol::Span,
|
||||
) -> AdjustView {
|
||||
let span_contents = String::from_utf8_lossy(working_set.get_span_contents(span)).to_string();
|
||||
let mut prefix = String::from_utf8_lossy(prefix).to_string();
|
||||
let mut prefix = prefix.to_string();
|
||||
|
||||
// A difference of 1 because of the cursor's unicode code point in between.
|
||||
// Using .chars().count() because unicode and Windows.
|
||||
@ -308,45 +365,6 @@ pub fn adjust_if_intermediate(
|
||||
}
|
||||
}
|
||||
|
||||
/// Convenience function to sort suggestions using [`sort_completions`]
|
||||
pub fn sort_suggestions(
|
||||
prefix: &str,
|
||||
items: Vec<SemanticSuggestion>,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
sort_completions(prefix, items, options, |it| &it.suggestion.value)
|
||||
}
|
||||
|
||||
/// # Arguments
|
||||
/// * `prefix` - What the user's typed, for sorting by fuzzy matcher score
|
||||
pub fn sort_completions<T>(
|
||||
prefix: &str,
|
||||
mut items: Vec<T>,
|
||||
options: &CompletionOptions,
|
||||
get_value: fn(&T) -> &str,
|
||||
) -> Vec<T> {
|
||||
// Sort items
|
||||
if options.sort == CompletionSort::Smart && options.match_algorithm == MatchAlgorithm::Fuzzy {
|
||||
let mut matcher = SkimMatcherV2::default();
|
||||
if options.case_sensitive {
|
||||
matcher = matcher.respect_case();
|
||||
} else {
|
||||
matcher = matcher.ignore_case();
|
||||
};
|
||||
items.sort_by(|a, b| {
|
||||
let a_str = get_value(a);
|
||||
let b_str = get_value(b);
|
||||
let a_score = matcher.fuzzy_match(a_str, prefix).unwrap_or_default();
|
||||
let b_score = matcher.fuzzy_match(b_str, prefix).unwrap_or_default();
|
||||
b_score.cmp(&a_score).then(a_str.cmp(b_str))
|
||||
});
|
||||
} else {
|
||||
items.sort_by(|a, b| get_value(a).cmp(get_value(b)));
|
||||
}
|
||||
|
||||
items
|
||||
}
|
||||
|
||||
/// Collapse multiple ".." components into n-dots.
|
||||
///
|
||||
/// It performs the reverse operation of `expand_ndots`, collapsing sequences of ".." into n-dots,
|
||||
@ -357,6 +375,7 @@ fn collapse_ndots(path: PathBuiltFromString) -> PathBuiltFromString {
|
||||
let mut result = PathBuiltFromString {
|
||||
parts: Vec::with_capacity(path.parts.len()),
|
||||
isdir: path.isdir,
|
||||
cwd: path.cwd,
|
||||
};
|
||||
|
||||
let mut dot_count = 0;
|
||||
|
@ -1,7 +1,13 @@
|
||||
use fuzzy_matcher::{skim::SkimMatcherV2, FuzzyMatcher};
|
||||
use nu_parser::trim_quotes_str;
|
||||
use nu_protocol::{CompletionAlgorithm, CompletionSort};
|
||||
use std::fmt::Display;
|
||||
use nu_utils::IgnoreCaseExt;
|
||||
use nucleo_matcher::{
|
||||
Config, Matcher, Utf32Str,
|
||||
pattern::{Atom, AtomKind, CaseMatching, Normalization},
|
||||
};
|
||||
use std::{borrow::Cow, fmt::Display};
|
||||
|
||||
use super::SemanticSuggestion;
|
||||
|
||||
/// Describes how suggestions should be matched.
|
||||
#[derive(Copy, Clone, Debug, PartialEq)]
|
||||
@ -12,6 +18,12 @@ pub enum MatchAlgorithm {
|
||||
/// "git switch" is matched by "git sw"
|
||||
Prefix,
|
||||
|
||||
/// Only show suggestions which have a substring matching with the given input
|
||||
///
|
||||
/// Example:
|
||||
/// "git checkout" is matched by "checkout"
|
||||
Substring,
|
||||
|
||||
/// Only show suggestions which contain the input chars at any place
|
||||
///
|
||||
/// Example:
|
||||
@ -19,39 +31,203 @@ pub enum MatchAlgorithm {
|
||||
Fuzzy,
|
||||
}
|
||||
|
||||
impl MatchAlgorithm {
|
||||
/// Returns whether the `needle` search text matches the given `haystack`.
|
||||
pub fn matches_str(&self, haystack: &str, needle: &str) -> bool {
|
||||
let haystack = trim_quotes_str(haystack);
|
||||
let needle = trim_quotes_str(needle);
|
||||
match *self {
|
||||
MatchAlgorithm::Prefix => haystack.starts_with(needle),
|
||||
pub struct NuMatcher<'a, T> {
|
||||
options: &'a CompletionOptions,
|
||||
needle: String,
|
||||
state: State<T>,
|
||||
}
|
||||
|
||||
enum State<T> {
|
||||
Prefix {
|
||||
/// Holds (haystack, item)
|
||||
items: Vec<(String, T)>,
|
||||
},
|
||||
Substring {
|
||||
/// Holds (haystack, item)
|
||||
items: Vec<(String, T)>,
|
||||
},
|
||||
Fuzzy {
|
||||
matcher: Matcher,
|
||||
atom: Atom,
|
||||
/// Holds (haystack, item, score)
|
||||
items: Vec<(String, T, u16)>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Filters and sorts suggestions
|
||||
impl<T> NuMatcher<'_, T> {
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `needle` - The text to search for
|
||||
pub fn new(needle: impl AsRef<str>, options: &CompletionOptions) -> NuMatcher<T> {
|
||||
let needle = trim_quotes_str(needle.as_ref());
|
||||
match options.match_algorithm {
|
||||
MatchAlgorithm::Prefix => {
|
||||
let lowercase_needle = if options.case_sensitive {
|
||||
needle.to_owned()
|
||||
} else {
|
||||
needle.to_folded_case()
|
||||
};
|
||||
NuMatcher {
|
||||
options,
|
||||
needle: lowercase_needle,
|
||||
state: State::Prefix { items: Vec::new() },
|
||||
}
|
||||
}
|
||||
MatchAlgorithm::Substring => {
|
||||
let lowercase_needle = if options.case_sensitive {
|
||||
needle.to_owned()
|
||||
} else {
|
||||
needle.to_folded_case()
|
||||
};
|
||||
NuMatcher {
|
||||
options,
|
||||
needle: lowercase_needle,
|
||||
state: State::Substring { items: Vec::new() },
|
||||
}
|
||||
}
|
||||
MatchAlgorithm::Fuzzy => {
|
||||
let matcher = SkimMatcherV2::default();
|
||||
matcher.fuzzy_match(haystack, needle).is_some()
|
||||
let atom = Atom::new(
|
||||
needle,
|
||||
if options.case_sensitive {
|
||||
CaseMatching::Respect
|
||||
} else {
|
||||
CaseMatching::Ignore
|
||||
},
|
||||
Normalization::Smart,
|
||||
AtomKind::Fuzzy,
|
||||
false,
|
||||
);
|
||||
NuMatcher {
|
||||
options,
|
||||
needle: needle.to_owned(),
|
||||
state: State::Fuzzy {
|
||||
matcher: Matcher::new(Config::DEFAULT),
|
||||
atom,
|
||||
items: Vec::new(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether the `needle` search text matches the given `haystack`.
|
||||
pub fn matches_u8(&self, haystack: &[u8], needle: &[u8]) -> bool {
|
||||
match *self {
|
||||
MatchAlgorithm::Prefix => haystack.starts_with(needle),
|
||||
MatchAlgorithm::Fuzzy => {
|
||||
let haystack_str = String::from_utf8_lossy(haystack);
|
||||
let needle_str = String::from_utf8_lossy(needle);
|
||||
|
||||
let matcher = SkimMatcherV2::default();
|
||||
matcher.fuzzy_match(&haystack_str, &needle_str).is_some()
|
||||
/// Returns whether or not the haystack matches the needle. If it does, `item` is added
|
||||
/// to the list of matches (if given).
|
||||
///
|
||||
/// Helper to avoid code duplication between [NuMatcher::add] and [NuMatcher::matches].
|
||||
fn matches_aux(&mut self, haystack: &str, item: Option<T>) -> bool {
|
||||
let haystack = trim_quotes_str(haystack);
|
||||
match &mut self.state {
|
||||
State::Prefix { items } => {
|
||||
let haystack_folded = if self.options.case_sensitive {
|
||||
Cow::Borrowed(haystack)
|
||||
} else {
|
||||
Cow::Owned(haystack.to_folded_case())
|
||||
};
|
||||
let matches = haystack_folded.starts_with(self.needle.as_str());
|
||||
if matches {
|
||||
if let Some(item) = item {
|
||||
items.push((haystack.to_string(), item));
|
||||
}
|
||||
}
|
||||
matches
|
||||
}
|
||||
State::Substring { items } => {
|
||||
let haystack_folded = if self.options.case_sensitive {
|
||||
Cow::Borrowed(haystack)
|
||||
} else {
|
||||
Cow::Owned(haystack.to_folded_case())
|
||||
};
|
||||
let matches = haystack_folded.contains(self.needle.as_str());
|
||||
if matches {
|
||||
if let Some(item) = item {
|
||||
items.push((haystack.to_string(), item));
|
||||
}
|
||||
}
|
||||
matches
|
||||
}
|
||||
State::Fuzzy {
|
||||
matcher,
|
||||
atom,
|
||||
items,
|
||||
} => {
|
||||
let mut haystack_buf = Vec::new();
|
||||
let haystack_utf32 = Utf32Str::new(trim_quotes_str(haystack), &mut haystack_buf);
|
||||
let mut indices = Vec::new();
|
||||
let Some(score) = atom.indices(haystack_utf32, matcher, &mut indices) else {
|
||||
return false;
|
||||
};
|
||||
if let Some(item) = item {
|
||||
items.push((haystack.to_string(), item, score));
|
||||
}
|
||||
true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Add the given item if the given haystack matches the needle.
|
||||
///
|
||||
/// Returns whether the item was added.
|
||||
pub fn add(&mut self, haystack: impl AsRef<str>, item: T) -> bool {
|
||||
self.matches_aux(haystack.as_ref(), Some(item))
|
||||
}
|
||||
|
||||
/// Returns whether the haystack matches the needle.
|
||||
pub fn matches(&mut self, haystack: &str) -> bool {
|
||||
self.matches_aux(haystack, None)
|
||||
}
|
||||
|
||||
/// Get all the items that matched (sorted)
|
||||
pub fn results(self) -> Vec<T> {
|
||||
match self.state {
|
||||
State::Prefix { mut items, .. } | State::Substring { mut items, .. } => {
|
||||
items.sort_by(|(haystack1, _), (haystack2, _)| {
|
||||
let cmp_sensitive = haystack1.cmp(haystack2);
|
||||
if self.options.case_sensitive {
|
||||
cmp_sensitive
|
||||
} else {
|
||||
haystack1
|
||||
.to_folded_case()
|
||||
.cmp(&haystack2.to_folded_case())
|
||||
.then(cmp_sensitive)
|
||||
}
|
||||
});
|
||||
items.into_iter().map(|(_, item)| item).collect::<Vec<_>>()
|
||||
}
|
||||
State::Fuzzy { mut items, .. } => {
|
||||
match self.options.sort {
|
||||
CompletionSort::Alphabetical => {
|
||||
items.sort_by(|(haystack1, _, _), (haystack2, _, _)| {
|
||||
haystack1.cmp(haystack2)
|
||||
});
|
||||
}
|
||||
CompletionSort::Smart => {
|
||||
items.sort_by(|(haystack1, _, score1), (haystack2, _, score2)| {
|
||||
score2.cmp(score1).then(haystack1.cmp(haystack2))
|
||||
});
|
||||
}
|
||||
}
|
||||
items
|
||||
.into_iter()
|
||||
.map(|(_, item, _)| item)
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl NuMatcher<'_, SemanticSuggestion> {
|
||||
pub fn add_semantic_suggestion(&mut self, sugg: SemanticSuggestion) -> bool {
|
||||
let value = sugg.suggestion.value.to_string();
|
||||
self.add(value, sugg)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CompletionAlgorithm> for MatchAlgorithm {
|
||||
fn from(value: CompletionAlgorithm) -> Self {
|
||||
match value {
|
||||
CompletionAlgorithm::Prefix => MatchAlgorithm::Prefix,
|
||||
CompletionAlgorithm::Substring => MatchAlgorithm::Substring,
|
||||
CompletionAlgorithm::Fuzzy => MatchAlgorithm::Fuzzy,
|
||||
}
|
||||
}
|
||||
@ -63,6 +239,7 @@ impl TryFrom<String> for MatchAlgorithm {
|
||||
fn try_from(value: String) -> Result<Self, Self::Error> {
|
||||
match value.as_str() {
|
||||
"prefix" => Ok(Self::Prefix),
|
||||
"substring" => Ok(Self::Substring),
|
||||
"fuzzy" => Ok(Self::Fuzzy),
|
||||
_ => Err(InvalidMatchAlgorithm::Unknown),
|
||||
}
|
||||
@ -87,7 +264,6 @@ impl std::error::Error for InvalidMatchAlgorithm {}
|
||||
#[derive(Clone)]
|
||||
pub struct CompletionOptions {
|
||||
pub case_sensitive: bool,
|
||||
pub positional: bool,
|
||||
pub match_algorithm: MatchAlgorithm,
|
||||
pub sort: CompletionSort,
|
||||
}
|
||||
@ -96,7 +272,6 @@ impl Default for CompletionOptions {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
case_sensitive: true,
|
||||
positional: true,
|
||||
match_algorithm: MatchAlgorithm::Prefix,
|
||||
sort: Default::default(),
|
||||
}
|
||||
@ -105,35 +280,70 @@ impl Default for CompletionOptions {
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::MatchAlgorithm;
|
||||
use rstest::rstest;
|
||||
|
||||
#[test]
|
||||
fn match_algorithm_prefix() {
|
||||
let algorithm = MatchAlgorithm::Prefix;
|
||||
use super::{CompletionOptions, MatchAlgorithm, NuMatcher};
|
||||
|
||||
assert!(algorithm.matches_str("example text", ""));
|
||||
assert!(algorithm.matches_str("example text", "examp"));
|
||||
assert!(!algorithm.matches_str("example text", "text"));
|
||||
|
||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[]));
|
||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 2]));
|
||||
assert!(!algorithm.matches_u8(&[1, 2, 3], &[2, 3]));
|
||||
#[rstest]
|
||||
#[case(MatchAlgorithm::Prefix, "example text", "", true)]
|
||||
#[case(MatchAlgorithm::Prefix, "example text", "examp", true)]
|
||||
#[case(MatchAlgorithm::Prefix, "example text", "text", false)]
|
||||
#[case(MatchAlgorithm::Substring, "example text", "", true)]
|
||||
#[case(MatchAlgorithm::Substring, "example text", "text", true)]
|
||||
#[case(MatchAlgorithm::Substring, "example text", "mplxt", false)]
|
||||
#[case(MatchAlgorithm::Fuzzy, "example text", "", true)]
|
||||
#[case(MatchAlgorithm::Fuzzy, "example text", "examp", true)]
|
||||
#[case(MatchAlgorithm::Fuzzy, "example text", "ext", true)]
|
||||
#[case(MatchAlgorithm::Fuzzy, "example text", "mplxt", true)]
|
||||
#[case(MatchAlgorithm::Fuzzy, "example text", "mpp", false)]
|
||||
fn match_algorithm_simple(
|
||||
#[case] match_algorithm: MatchAlgorithm,
|
||||
#[case] haystack: &str,
|
||||
#[case] needle: &str,
|
||||
#[case] should_match: bool,
|
||||
) {
|
||||
let options = CompletionOptions {
|
||||
match_algorithm,
|
||||
..Default::default()
|
||||
};
|
||||
let mut matcher = NuMatcher::new(needle, &options);
|
||||
matcher.add(haystack, haystack);
|
||||
if should_match {
|
||||
assert_eq!(vec![haystack], matcher.results());
|
||||
} else {
|
||||
assert_ne!(vec![haystack], matcher.results());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn match_algorithm_fuzzy() {
|
||||
let algorithm = MatchAlgorithm::Fuzzy;
|
||||
fn match_algorithm_fuzzy_sort_score() {
|
||||
let options = CompletionOptions {
|
||||
match_algorithm: MatchAlgorithm::Fuzzy,
|
||||
..Default::default()
|
||||
};
|
||||
let mut matcher = NuMatcher::new("fob", &options);
|
||||
for item in ["foo/bar", "fob", "foo bar"] {
|
||||
matcher.add(item, item);
|
||||
}
|
||||
// Sort by score, then in alphabetical order
|
||||
assert_eq!(vec!["fob", "foo bar", "foo/bar"], matcher.results());
|
||||
}
|
||||
|
||||
assert!(algorithm.matches_str("example text", ""));
|
||||
assert!(algorithm.matches_str("example text", "examp"));
|
||||
assert!(algorithm.matches_str("example text", "ext"));
|
||||
assert!(algorithm.matches_str("example text", "mplxt"));
|
||||
assert!(!algorithm.matches_str("example text", "mpp"));
|
||||
|
||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[]));
|
||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 2]));
|
||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[2, 3]));
|
||||
assert!(algorithm.matches_u8(&[1, 2, 3], &[1, 3]));
|
||||
assert!(!algorithm.matches_u8(&[1, 2, 3], &[2, 2]));
|
||||
#[test]
|
||||
fn match_algorithm_fuzzy_sort_strip() {
|
||||
let options = CompletionOptions {
|
||||
match_algorithm: MatchAlgorithm::Fuzzy,
|
||||
..Default::default()
|
||||
};
|
||||
let mut matcher = NuMatcher::new("'love spaces' ", &options);
|
||||
for item in [
|
||||
"'i love spaces'",
|
||||
"'i love spaces' so much",
|
||||
"'lovespaces' ",
|
||||
] {
|
||||
matcher.add(item, item);
|
||||
}
|
||||
// Make sure the spaces are respected
|
||||
assert_eq!(vec!["'i love spaces' so much"], matcher.results());
|
||||
}
|
||||
}
|
||||
|
@ -1,79 +1,86 @@
|
||||
use crate::completions::{
|
||||
completer::map_value_completions, Completer, CompletionOptions, MatchAlgorithm,
|
||||
SemanticSuggestion,
|
||||
Completer, CompletionOptions, MatchAlgorithm, SemanticSuggestion,
|
||||
completer::map_value_completions,
|
||||
};
|
||||
use nu_engine::eval_call;
|
||||
use nu_protocol::{
|
||||
DeclId, PipelineData, Span, Type, Value,
|
||||
ast::{Argument, Call, Expr, Expression},
|
||||
debugger::WithoutDebug,
|
||||
engine::{Stack, StateWorkingSet},
|
||||
CompletionSort, PipelineData, Span, Type, Value,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
};
|
||||
use nu_utils::IgnoreCaseExt;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use super::completion_common::sort_suggestions;
|
||||
use super::completion_options::NuMatcher;
|
||||
|
||||
pub struct CustomCompletion {
|
||||
stack: Stack,
|
||||
decl_id: usize,
|
||||
pub struct CustomCompletion<T: Completer> {
|
||||
decl_id: DeclId,
|
||||
line: String,
|
||||
line_pos: usize,
|
||||
fallback: T,
|
||||
}
|
||||
|
||||
impl CustomCompletion {
|
||||
pub fn new(stack: Stack, decl_id: usize, line: String) -> Self {
|
||||
impl<T: Completer> CustomCompletion<T> {
|
||||
pub fn new(decl_id: DeclId, line: String, line_pos: usize, fallback: T) -> Self {
|
||||
Self {
|
||||
stack,
|
||||
decl_id,
|
||||
line,
|
||||
line_pos,
|
||||
fallback,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Completer for CustomCompletion {
|
||||
impl<T: Completer> Completer for CustomCompletion<T> {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
prefix: Vec<u8>,
|
||||
stack: &Stack,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
pos: usize,
|
||||
completion_options: &CompletionOptions,
|
||||
orig_options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
// Line position
|
||||
let line_pos = pos - offset;
|
||||
|
||||
// Call custom declaration
|
||||
let result = eval_call::<WithoutDebug>(
|
||||
working_set.permanent_state,
|
||||
&mut self.stack,
|
||||
&Call {
|
||||
decl_id: self.decl_id,
|
||||
head: span,
|
||||
arguments: vec![
|
||||
Argument::Positional(Expression::new_unknown(
|
||||
Expr::String(self.line.clone()),
|
||||
Span::unknown(),
|
||||
Type::String,
|
||||
)),
|
||||
Argument::Positional(Expression::new_unknown(
|
||||
Expr::Int(line_pos as i64),
|
||||
Span::unknown(),
|
||||
Type::Int,
|
||||
)),
|
||||
],
|
||||
parser_info: HashMap::new(),
|
||||
},
|
||||
PipelineData::empty(),
|
||||
);
|
||||
let mut stack_mut = stack.clone();
|
||||
let mut eval = |engine_state: &EngineState| {
|
||||
eval_call::<WithoutDebug>(
|
||||
engine_state,
|
||||
&mut stack_mut,
|
||||
&Call {
|
||||
decl_id: self.decl_id,
|
||||
head: span,
|
||||
arguments: vec![
|
||||
Argument::Positional(Expression::new_unknown(
|
||||
Expr::String(self.line.clone()),
|
||||
Span::unknown(),
|
||||
Type::String,
|
||||
)),
|
||||
Argument::Positional(Expression::new_unknown(
|
||||
Expr::Int(self.line_pos as i64),
|
||||
Span::unknown(),
|
||||
Type::Int,
|
||||
)),
|
||||
],
|
||||
parser_info: HashMap::new(),
|
||||
},
|
||||
PipelineData::empty(),
|
||||
)
|
||||
};
|
||||
let result = if self.decl_id.get() < working_set.permanent_state.num_decls() {
|
||||
eval(working_set.permanent_state)
|
||||
} else {
|
||||
let mut engine_state = working_set.permanent_state.clone();
|
||||
let _ = engine_state.merge_delta(working_set.delta.clone());
|
||||
eval(&engine_state)
|
||||
};
|
||||
|
||||
let mut custom_completion_options = None;
|
||||
let mut completion_options = orig_options.clone();
|
||||
let mut should_sort = true;
|
||||
|
||||
// Parse result
|
||||
let suggestions = result
|
||||
.and_then(|data| data.into_value(span))
|
||||
.map(|value| match &value {
|
||||
let suggestions = match result.and_then(|data| data.into_value(span)) {
|
||||
Ok(value) => match &value {
|
||||
Value::Record { val, .. } => {
|
||||
let completions = val
|
||||
.get("completions")
|
||||
@ -86,78 +93,76 @@ impl Completer for CustomCompletion {
|
||||
let options = val.get("options");
|
||||
|
||||
if let Some(Value::Record { val: options, .. }) = &options {
|
||||
let should_sort = options
|
||||
.get("sort")
|
||||
.and_then(|val| val.as_bool().ok())
|
||||
.unwrap_or(false);
|
||||
if let Some(sort) = options.get("sort").and_then(|val| val.as_bool().ok()) {
|
||||
should_sort = sort;
|
||||
}
|
||||
|
||||
custom_completion_options = Some(CompletionOptions {
|
||||
case_sensitive: options
|
||||
.get("case_sensitive")
|
||||
.and_then(|val| val.as_bool().ok())
|
||||
.unwrap_or(true),
|
||||
positional: options
|
||||
.get("positional")
|
||||
.and_then(|val| val.as_bool().ok())
|
||||
.unwrap_or(true),
|
||||
match_algorithm: match options.get("completion_algorithm") {
|
||||
Some(option) => option
|
||||
.coerce_string()
|
||||
.ok()
|
||||
.and_then(|option| option.try_into().ok())
|
||||
.unwrap_or(MatchAlgorithm::Prefix),
|
||||
None => completion_options.match_algorithm,
|
||||
},
|
||||
sort: if should_sort {
|
||||
CompletionSort::Alphabetical
|
||||
} else {
|
||||
CompletionSort::Smart
|
||||
},
|
||||
});
|
||||
if let Some(case_sensitive) = options
|
||||
.get("case_sensitive")
|
||||
.and_then(|val| val.as_bool().ok())
|
||||
{
|
||||
completion_options.case_sensitive = case_sensitive;
|
||||
}
|
||||
let positional =
|
||||
options.get("positional").and_then(|val| val.as_bool().ok());
|
||||
if positional.is_some() {
|
||||
log::warn!(
|
||||
"Use of the positional option is deprecated. Use the substring match algorithm instead."
|
||||
);
|
||||
}
|
||||
if let Some(algorithm) = options
|
||||
.get("completion_algorithm")
|
||||
.and_then(|option| option.coerce_string().ok())
|
||||
.and_then(|option| option.try_into().ok())
|
||||
{
|
||||
completion_options.match_algorithm = algorithm;
|
||||
if let Some(false) = positional {
|
||||
if completion_options.match_algorithm == MatchAlgorithm::Prefix {
|
||||
completion_options.match_algorithm = MatchAlgorithm::Substring
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
completions
|
||||
}
|
||||
Value::List { vals, .. } => map_value_completions(vals.iter(), span, offset),
|
||||
_ => vec![],
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let options = custom_completion_options
|
||||
.as_ref()
|
||||
.unwrap_or(completion_options);
|
||||
let suggestions = filter(&prefix, suggestions, options);
|
||||
sort_suggestions(&String::from_utf8_lossy(&prefix), suggestions, options)
|
||||
}
|
||||
}
|
||||
|
||||
fn filter(
|
||||
prefix: &[u8],
|
||||
items: Vec<SemanticSuggestion>,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
items
|
||||
.into_iter()
|
||||
.filter(|it| match options.match_algorithm {
|
||||
MatchAlgorithm::Prefix => match (options.case_sensitive, options.positional) {
|
||||
(true, true) => it.suggestion.value.as_bytes().starts_with(prefix),
|
||||
(true, false) => it
|
||||
.suggestion
|
||||
.value
|
||||
.contains(std::str::from_utf8(prefix).unwrap_or("")),
|
||||
(false, positional) => {
|
||||
let value = it.suggestion.value.to_folded_case();
|
||||
let prefix = std::str::from_utf8(prefix).unwrap_or("").to_folded_case();
|
||||
if positional {
|
||||
value.starts_with(&prefix)
|
||||
} else {
|
||||
value.contains(&prefix)
|
||||
}
|
||||
Value::Nothing { .. } => {
|
||||
return self.fallback.fetch(
|
||||
working_set,
|
||||
stack,
|
||||
prefix,
|
||||
span,
|
||||
offset,
|
||||
orig_options,
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
log::error!(
|
||||
"Custom completer returned invalid value of type {}",
|
||||
value.get_type().to_string()
|
||||
);
|
||||
return vec![];
|
||||
}
|
||||
},
|
||||
MatchAlgorithm::Fuzzy => options
|
||||
.match_algorithm
|
||||
.matches_u8(it.suggestion.value.as_bytes(), prefix),
|
||||
})
|
||||
.collect()
|
||||
Err(e) => {
|
||||
log::error!("Error getting custom completions: {e}");
|
||||
return vec![];
|
||||
}
|
||||
};
|
||||
|
||||
let mut matcher = NuMatcher::new(prefix, &completion_options);
|
||||
|
||||
if should_sort {
|
||||
for sugg in suggestions {
|
||||
matcher.add_semantic_suggestion(sugg);
|
||||
}
|
||||
matcher.results()
|
||||
} else {
|
||||
suggestions
|
||||
.into_iter()
|
||||
.filter(|sugg| matcher.matches(&sugg.suggestion.value))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,38 +1,30 @@
|
||||
use crate::completions::{
|
||||
completion_common::{adjust_if_intermediate, complete_item, AdjustView},
|
||||
Completer, CompletionOptions,
|
||||
completion_common::{AdjustView, adjust_if_intermediate, complete_item},
|
||||
};
|
||||
use nu_ansi_term::Style;
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
Span,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use std::path::Path;
|
||||
|
||||
use super::SemanticSuggestion;
|
||||
use super::{SemanticSuggestion, SuggestionKind, completion_common::FileSuggestion};
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct DirectoryCompletion {}
|
||||
|
||||
impl DirectoryCompletion {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
}
|
||||
pub struct DirectoryCompletion;
|
||||
|
||||
impl Completer for DirectoryCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: Vec<u8>,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let AdjustView { prefix, span, .. } = adjust_if_intermediate(&prefix, working_set, span);
|
||||
let AdjustView { prefix, span, .. } =
|
||||
adjust_if_intermediate(prefix.as_ref(), working_set, span);
|
||||
|
||||
// Filter only the folders
|
||||
#[allow(deprecated)]
|
||||
@ -47,16 +39,15 @@ impl Completer for DirectoryCompletion {
|
||||
.into_iter()
|
||||
.map(move |x| SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: x.1,
|
||||
style: x.2,
|
||||
value: x.path,
|
||||
style: x.style,
|
||||
span: reedline::Span {
|
||||
start: x.0.start - offset,
|
||||
end: x.0.end - offset,
|
||||
start: x.span.start - offset,
|
||||
end: x.span.end - offset,
|
||||
},
|
||||
..Suggestion::default()
|
||||
},
|
||||
// TODO????
|
||||
kind: None,
|
||||
kind: Some(SuggestionKind::Directory),
|
||||
})
|
||||
.collect();
|
||||
|
||||
@ -92,6 +83,6 @@ pub fn directory_completion(
|
||||
options: &CompletionOptions,
|
||||
engine_state: &EngineState,
|
||||
stack: &Stack,
|
||||
) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
|
||||
complete_item(true, span, partial, cwd, options, engine_state, stack)
|
||||
) -> Vec<FileSuggestion> {
|
||||
complete_item(true, span, partial, &[cwd], options, engine_state, stack)
|
||||
}
|
||||
|
@ -1,20 +1,23 @@
|
||||
use crate::completions::{file_path_completion, Completer, CompletionOptions};
|
||||
use crate::completions::{
|
||||
Completer, CompletionOptions, SemanticSuggestion, SuggestionKind,
|
||||
completion_common::{FileSuggestion, surround_remove},
|
||||
completion_options::NuMatcher,
|
||||
file_path_completion,
|
||||
};
|
||||
use nu_path::expand_tilde;
|
||||
use nu_protocol::{
|
||||
engine::{Stack, StateWorkingSet},
|
||||
Span,
|
||||
engine::{Stack, StateWorkingSet, VirtualPath},
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use std::path::{is_separator, Path, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR};
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
path::{MAIN_SEPARATOR_STR, PathBuf, is_separator},
|
||||
};
|
||||
|
||||
use super::{completion_common::sort_suggestions, SemanticSuggestion};
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct DotNuCompletion {}
|
||||
|
||||
impl DotNuCompletion {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
pub struct DotNuCompletion {
|
||||
/// e.g. use std/a<tab>
|
||||
pub std_virtual_path: bool,
|
||||
}
|
||||
|
||||
impl Completer for DotNuCompletion {
|
||||
@ -22,114 +25,185 @@ impl Completer for DotNuCompletion {
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: Vec<u8>,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let prefix_str = String::from_utf8_lossy(&prefix).replace('`', "");
|
||||
let mut search_dirs: Vec<String> = vec![];
|
||||
let prefix_str = prefix.as_ref();
|
||||
let start_with_backquote = prefix_str.starts_with('`');
|
||||
let end_with_backquote = prefix_str.ends_with('`');
|
||||
let prefix_str = prefix_str.replace('`', "");
|
||||
// e.g. `./`, `..\`, `/`
|
||||
let not_lib_dirs = prefix_str
|
||||
.chars()
|
||||
.find(|c| *c != '.')
|
||||
.is_some_and(is_separator);
|
||||
let mut search_dirs: Vec<PathBuf> = vec![];
|
||||
|
||||
// If prefix_str is only a word we want to search in the current dir
|
||||
let (base, partial) = prefix_str
|
||||
.rsplit_once(is_separator)
|
||||
.unwrap_or((".", &prefix_str));
|
||||
let (base, partial) = if let Some((parent, remain)) = prefix_str.rsplit_once(is_separator) {
|
||||
// If prefix_str is only a word we want to search in the current dir.
|
||||
// "/xx" should be split to "/" and "xx".
|
||||
if parent.is_empty() {
|
||||
(MAIN_SEPARATOR_STR, remain)
|
||||
} else {
|
||||
(parent, remain)
|
||||
}
|
||||
} else {
|
||||
(".", prefix_str.as_str())
|
||||
};
|
||||
let base_dir = base.replace(is_separator, MAIN_SEPARATOR_STR);
|
||||
let mut partial = partial.to_string();
|
||||
// On windows, this standardizes paths to use \
|
||||
let mut is_current_folder = false;
|
||||
|
||||
// Fetch the lib dirs
|
||||
let lib_dirs: Vec<String> = if let Some(lib_dirs) = working_set.get_env_var("NU_LIB_DIRS") {
|
||||
lib_dirs
|
||||
.as_list()
|
||||
.into_iter()
|
||||
.flat_map(|it| {
|
||||
it.iter().map(|x| {
|
||||
x.to_path()
|
||||
.expect("internal error: failed to convert lib path")
|
||||
})
|
||||
})
|
||||
.map(|it| {
|
||||
it.into_os_string()
|
||||
.into_string()
|
||||
.expect("internal error: failed to convert OS path")
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
// NOTE: 2 ways to setup `NU_LIB_DIRS`
|
||||
// 1. `const NU_LIB_DIRS = [paths]`, equal to `nu -I paths`
|
||||
// 2. `$env.NU_LIB_DIRS = [paths]`
|
||||
let const_lib_dirs = working_set
|
||||
.find_variable(b"$NU_LIB_DIRS")
|
||||
.and_then(|vid| working_set.get_variable(vid).const_val.as_ref());
|
||||
let env_lib_dirs = working_set.get_env_var("NU_LIB_DIRS");
|
||||
let lib_dirs: HashSet<PathBuf> = [const_lib_dirs, env_lib_dirs]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.flat_map(|lib_dirs| {
|
||||
lib_dirs
|
||||
.as_list()
|
||||
.into_iter()
|
||||
.flat_map(|it| it.iter().filter_map(|x| x.to_path().ok()))
|
||||
.map(expand_tilde)
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Check if the base_dir is a folder
|
||||
// rsplit_once removes the separator
|
||||
let cwd = working_set.permanent_state.cwd(None);
|
||||
if base_dir != "." {
|
||||
// Add the base dir into the directories to be searched
|
||||
search_dirs.push(base_dir.clone());
|
||||
|
||||
// Reset the partial adding the basic dir back
|
||||
// in order to make the span replace work properly
|
||||
let mut base_dir_partial = base_dir;
|
||||
base_dir_partial.push_str(&partial);
|
||||
|
||||
partial = base_dir_partial;
|
||||
let expanded_base_dir = expand_tilde(&base_dir);
|
||||
let is_base_dir_relative = expanded_base_dir.is_relative();
|
||||
// Search in base_dir as well as lib_dirs.
|
||||
// After expanded, base_dir can be a relative path or absolute path.
|
||||
// If relative, we join "current working dir" with it to get subdirectory and add to search_dirs.
|
||||
// If absolute, we add it to search_dirs.
|
||||
if let Ok(mut cwd) = cwd {
|
||||
if is_base_dir_relative {
|
||||
cwd.push(&base_dir);
|
||||
search_dirs.push(cwd.into_std_path_buf());
|
||||
} else {
|
||||
search_dirs.push(expanded_base_dir);
|
||||
}
|
||||
}
|
||||
if !not_lib_dirs {
|
||||
search_dirs.extend(lib_dirs.into_iter().map(|mut dir| {
|
||||
dir.push(&base_dir);
|
||||
dir
|
||||
}));
|
||||
}
|
||||
} else {
|
||||
// Fetch the current folder
|
||||
#[allow(deprecated)]
|
||||
let current_folder = working_set.permanent_state.current_work_dir();
|
||||
is_current_folder = true;
|
||||
|
||||
// Add the current folder and the lib dirs into the
|
||||
// directories to be searched
|
||||
search_dirs.push(current_folder);
|
||||
search_dirs.extend(lib_dirs);
|
||||
if let Ok(cwd) = cwd {
|
||||
search_dirs.push(cwd.into_std_path_buf());
|
||||
}
|
||||
if !not_lib_dirs {
|
||||
search_dirs.extend(lib_dirs);
|
||||
}
|
||||
}
|
||||
|
||||
// Fetch the files filtering the ones that ends with .nu
|
||||
// and transform them into suggestions
|
||||
let output: Vec<SemanticSuggestion> = search_dirs
|
||||
.into_iter()
|
||||
.flat_map(|search_dir| {
|
||||
let completions = file_path_completion(
|
||||
span,
|
||||
&partial,
|
||||
&search_dir,
|
||||
options,
|
||||
working_set.permanent_state,
|
||||
stack,
|
||||
);
|
||||
completions
|
||||
.into_iter()
|
||||
.filter(move |it| {
|
||||
// Different base dir, so we list the .nu files or folders
|
||||
if !is_current_folder {
|
||||
it.1.ends_with(".nu") || it.1.ends_with(SEP)
|
||||
} else {
|
||||
// Lib dirs, so we filter only the .nu files or directory modules
|
||||
if it.1.ends_with(SEP) {
|
||||
Path::new(&search_dir).join(&it.1).join("mod.nu").exists()
|
||||
} else {
|
||||
it.1.ends_with(".nu")
|
||||
}
|
||||
}
|
||||
})
|
||||
.map(move |x| SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: x.1,
|
||||
style: x.2,
|
||||
span: reedline::Span {
|
||||
start: x.0.start - offset,
|
||||
end: x.0.end - offset,
|
||||
},
|
||||
append_whitespace: true,
|
||||
..Suggestion::default()
|
||||
},
|
||||
// TODO????
|
||||
kind: None,
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
let mut completions = file_path_completion(
|
||||
span,
|
||||
partial,
|
||||
&search_dirs
|
||||
.iter()
|
||||
.filter_map(|d| d.to_str())
|
||||
.collect::<Vec<_>>(),
|
||||
options,
|
||||
working_set.permanent_state,
|
||||
stack,
|
||||
);
|
||||
|
||||
sort_suggestions(&prefix_str, output, options)
|
||||
if self.std_virtual_path {
|
||||
let mut matcher = NuMatcher::new(partial, options);
|
||||
let base_dir = surround_remove(&base_dir);
|
||||
if base_dir == "." {
|
||||
let surround_prefix = partial
|
||||
.chars()
|
||||
.take_while(|c| "`'\"".contains(*c))
|
||||
.collect::<String>();
|
||||
for path in ["std", "std-rfc"] {
|
||||
let path = format!("{}{}", surround_prefix, path);
|
||||
matcher.add(
|
||||
path.clone(),
|
||||
FileSuggestion {
|
||||
span,
|
||||
path,
|
||||
style: None,
|
||||
is_dir: true,
|
||||
},
|
||||
);
|
||||
}
|
||||
} else if let Some(VirtualPath::Dir(sub_paths)) =
|
||||
working_set.find_virtual_path(&base_dir)
|
||||
{
|
||||
for sub_vp_id in sub_paths {
|
||||
let (path, sub_vp) = working_set.get_virtual_path(*sub_vp_id);
|
||||
let path = path
|
||||
.strip_prefix(&format!("{}/", base_dir))
|
||||
.unwrap_or(path)
|
||||
.to_string();
|
||||
matcher.add(
|
||||
path.clone(),
|
||||
FileSuggestion {
|
||||
path,
|
||||
span,
|
||||
style: None,
|
||||
is_dir: matches!(sub_vp, VirtualPath::Dir(_)),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
completions.extend(matcher.results());
|
||||
}
|
||||
|
||||
completions
|
||||
.into_iter()
|
||||
// Different base dir, so we list the .nu files or folders
|
||||
.filter(|it| {
|
||||
// for paths with spaces in them
|
||||
let path = it.path.trim_end_matches('`');
|
||||
path.ends_with(".nu") || it.is_dir
|
||||
})
|
||||
.map(|x| {
|
||||
let append_whitespace = !x.is_dir && (!start_with_backquote || end_with_backquote);
|
||||
// Re-calculate the span to replace
|
||||
let mut span_offset = 0;
|
||||
let mut value = x.path.to_string();
|
||||
// Complete only the last path component
|
||||
if base_dir == MAIN_SEPARATOR_STR {
|
||||
span_offset = base_dir.len()
|
||||
} else if base_dir != "." {
|
||||
span_offset = base_dir.len() + 1
|
||||
}
|
||||
// Retain only one '`'
|
||||
if start_with_backquote {
|
||||
value = value.trim_start_matches('`').to_string();
|
||||
span_offset += 1;
|
||||
}
|
||||
// Add the backquote back
|
||||
if end_with_backquote && !value.ends_with('`') {
|
||||
value.push('`');
|
||||
}
|
||||
let end = x.span.end - offset;
|
||||
let start = std::cmp::min(end, x.span.start - offset + span_offset);
|
||||
SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value,
|
||||
style: x.style,
|
||||
span: reedline::Span { start, end },
|
||||
append_whitespace,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Module),
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
}
|
||||
|
112
crates/nu-cli/src/completions/exportable_completions.rs
Normal file
112
crates/nu-cli/src/completions/exportable_completions.rs
Normal file
@ -0,0 +1,112 @@
|
||||
use crate::completions::{
|
||||
Completer, CompletionOptions, SemanticSuggestion, SuggestionKind,
|
||||
completion_common::surround_remove, completion_options::NuMatcher,
|
||||
};
|
||||
use nu_protocol::{
|
||||
ModuleId, Span,
|
||||
engine::{Stack, StateWorkingSet},
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
|
||||
pub struct ExportableCompletion<'a> {
|
||||
pub module_id: ModuleId,
|
||||
pub temp_working_set: Option<StateWorkingSet<'a>>,
|
||||
}
|
||||
|
||||
/// If name contains space, wrap it in quotes
|
||||
fn wrapped_name(name: String) -> String {
|
||||
if !name.contains(' ') {
|
||||
return name;
|
||||
}
|
||||
if name.contains('\'') {
|
||||
format!("\"{}\"", name.replace('"', r#"\""#))
|
||||
} else {
|
||||
format!("'{name}'")
|
||||
}
|
||||
}
|
||||
|
||||
impl Completer for ExportableCompletion<'_> {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let mut matcher = NuMatcher::<()>::new(surround_remove(prefix.as_ref()), options);
|
||||
let mut results = Vec::new();
|
||||
let span = reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
};
|
||||
// TODO: use matcher.add_lazy to lazy evaluate an item if it matches the prefix
|
||||
let mut add_suggestion = |value: String,
|
||||
description: Option<String>,
|
||||
extra: Option<Vec<String>>,
|
||||
kind: SuggestionKind| {
|
||||
results.push(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value,
|
||||
span,
|
||||
description,
|
||||
extra,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(kind),
|
||||
});
|
||||
};
|
||||
|
||||
let working_set = self.temp_working_set.as_ref().unwrap_or(working_set);
|
||||
let module = working_set.get_module(self.module_id);
|
||||
|
||||
for (name, decl_id) in &module.decls {
|
||||
let name = String::from_utf8_lossy(name).to_string();
|
||||
if matcher.matches(&name) {
|
||||
let cmd = working_set.get_decl(*decl_id);
|
||||
add_suggestion(
|
||||
wrapped_name(name),
|
||||
Some(cmd.description().to_string()),
|
||||
None,
|
||||
// `None` here avoids arguments being expanded by snippet edit style for lsp
|
||||
SuggestionKind::Command(cmd.command_type(), None),
|
||||
);
|
||||
}
|
||||
}
|
||||
for (name, module_id) in &module.submodules {
|
||||
let name = String::from_utf8_lossy(name).to_string();
|
||||
if matcher.matches(&name) {
|
||||
let comments = working_set.get_module_comments(*module_id).map(|spans| {
|
||||
spans
|
||||
.iter()
|
||||
.map(|sp| {
|
||||
String::from_utf8_lossy(working_set.get_span_contents(*sp)).into()
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
});
|
||||
add_suggestion(
|
||||
wrapped_name(name),
|
||||
Some("Submodule".into()),
|
||||
comments,
|
||||
SuggestionKind::Module,
|
||||
);
|
||||
}
|
||||
}
|
||||
for (name, var_id) in &module.constants {
|
||||
let name = String::from_utf8_lossy(name).to_string();
|
||||
if matcher.matches(&name) {
|
||||
let var = working_set.get_variable(*var_id);
|
||||
add_suggestion(
|
||||
wrapped_name(name),
|
||||
var.const_val
|
||||
.as_ref()
|
||||
.and_then(|v| v.clone().coerce_into_string().ok()),
|
||||
None,
|
||||
SuggestionKind::Variable,
|
||||
);
|
||||
}
|
||||
}
|
||||
results
|
||||
}
|
||||
}
|
@ -1,50 +1,40 @@
|
||||
use crate::completions::{
|
||||
completion_common::{adjust_if_intermediate, complete_item, AdjustView},
|
||||
Completer, CompletionOptions,
|
||||
completion_common::{AdjustView, adjust_if_intermediate, complete_item},
|
||||
};
|
||||
use nu_ansi_term::Style;
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
Span,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
};
|
||||
use nu_utils::IgnoreCaseExt;
|
||||
use reedline::Suggestion;
|
||||
use std::path::Path;
|
||||
|
||||
use super::SemanticSuggestion;
|
||||
use super::{SemanticSuggestion, SuggestionKind, completion_common::FileSuggestion};
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct FileCompletion {}
|
||||
|
||||
impl FileCompletion {
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
}
|
||||
pub struct FileCompletion;
|
||||
|
||||
impl Completer for FileCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: Vec<u8>,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let AdjustView {
|
||||
prefix,
|
||||
span,
|
||||
readjusted,
|
||||
} = adjust_if_intermediate(&prefix, working_set, span);
|
||||
} = adjust_if_intermediate(prefix.as_ref(), working_set, span);
|
||||
|
||||
#[allow(deprecated)]
|
||||
let items: Vec<_> = complete_item(
|
||||
readjusted,
|
||||
span,
|
||||
&prefix,
|
||||
&working_set.permanent_state.current_work_dir(),
|
||||
&[&working_set.permanent_state.current_work_dir()],
|
||||
options,
|
||||
working_set.permanent_state,
|
||||
stack,
|
||||
@ -52,16 +42,19 @@ impl Completer for FileCompletion {
|
||||
.into_iter()
|
||||
.map(move |x| SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: x.1,
|
||||
style: x.2,
|
||||
value: x.path,
|
||||
style: x.style,
|
||||
span: reedline::Span {
|
||||
start: x.0.start - offset,
|
||||
end: x.0.end - offset,
|
||||
start: x.span.start - offset,
|
||||
end: x.span.end - offset,
|
||||
},
|
||||
..Suggestion::default()
|
||||
},
|
||||
// TODO????
|
||||
kind: None,
|
||||
kind: Some(if x.is_dir {
|
||||
SuggestionKind::Directory
|
||||
} else {
|
||||
SuggestionKind::File
|
||||
}),
|
||||
})
|
||||
.collect();
|
||||
|
||||
@ -95,21 +88,10 @@ impl Completer for FileCompletion {
|
||||
pub fn file_path_completion(
|
||||
span: nu_protocol::Span,
|
||||
partial: &str,
|
||||
cwd: &str,
|
||||
cwds: &[impl AsRef<str>],
|
||||
options: &CompletionOptions,
|
||||
engine_state: &EngineState,
|
||||
stack: &Stack,
|
||||
) -> Vec<(nu_protocol::Span, String, Option<Style>)> {
|
||||
complete_item(false, span, partial, cwd, options, engine_state, stack)
|
||||
}
|
||||
|
||||
pub fn matches(partial: &str, from: &str, options: &CompletionOptions) -> bool {
|
||||
// Check for case sensitive
|
||||
if !options.case_sensitive {
|
||||
return options
|
||||
.match_algorithm
|
||||
.matches_str(&from.to_folded_case(), &partial.to_folded_case());
|
||||
}
|
||||
|
||||
options.match_algorithm.matches_str(from, partial)
|
||||
) -> Vec<FileSuggestion> {
|
||||
complete_item(false, span, partial, cwds, options, engine_state, stack)
|
||||
}
|
||||
|
@ -1,22 +1,15 @@
|
||||
use crate::completions::{completion_common::sort_suggestions, Completer, CompletionOptions};
|
||||
use crate::completions::{
|
||||
Completer, CompletionOptions, SemanticSuggestion, SuggestionKind, completion_options::NuMatcher,
|
||||
};
|
||||
use nu_protocol::{
|
||||
ast::{Expr, Expression},
|
||||
DeclId, Span,
|
||||
engine::{Stack, StateWorkingSet},
|
||||
Span,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
|
||||
use super::SemanticSuggestion;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FlagCompletion {
|
||||
expression: Expression,
|
||||
}
|
||||
|
||||
impl FlagCompletion {
|
||||
pub fn new(expression: Expression) -> Self {
|
||||
Self { expression }
|
||||
}
|
||||
pub decl_id: DeclId,
|
||||
}
|
||||
|
||||
impl Completer for FlagCompletion {
|
||||
@ -24,73 +17,42 @@ impl Completer for FlagCompletion {
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
prefix: Vec<u8>,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
// Check if it's a flag
|
||||
if let Expr::Call(call) = &self.expression.expr {
|
||||
let decl = working_set.get_decl(call.decl_id);
|
||||
let sig = decl.signature();
|
||||
let mut matcher = NuMatcher::new(prefix, options);
|
||||
let mut add_suggestion = |value: String, description: String| {
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value,
|
||||
description: Some(description),
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
append_whitespace: true,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Flag),
|
||||
});
|
||||
};
|
||||
|
||||
let mut output = vec![];
|
||||
|
||||
for named in &sig.named {
|
||||
let flag_desc = &named.desc;
|
||||
if let Some(short) = named.short {
|
||||
let mut named = vec![0; short.len_utf8()];
|
||||
short.encode_utf8(&mut named);
|
||||
named.insert(0, b'-');
|
||||
|
||||
if options.match_algorithm.matches_u8(&named, &prefix) {
|
||||
output.push(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(&named).to_string(),
|
||||
description: Some(flag_desc.to_string()),
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
append_whitespace: true,
|
||||
..Suggestion::default()
|
||||
},
|
||||
// TODO????
|
||||
kind: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if named.long.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut named = named.long.as_bytes().to_vec();
|
||||
named.insert(0, b'-');
|
||||
named.insert(0, b'-');
|
||||
|
||||
if options.match_algorithm.matches_u8(&named, &prefix) {
|
||||
output.push(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(&named).to_string(),
|
||||
description: Some(flag_desc.to_string()),
|
||||
span: reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
},
|
||||
append_whitespace: true,
|
||||
..Suggestion::default()
|
||||
},
|
||||
// TODO????
|
||||
kind: None,
|
||||
});
|
||||
}
|
||||
let decl = working_set.get_decl(self.decl_id);
|
||||
let sig = decl.signature();
|
||||
for named in &sig.named {
|
||||
if let Some(short) = named.short {
|
||||
let mut name = String::from("-");
|
||||
name.push(short);
|
||||
add_suggestion(name, named.desc.clone());
|
||||
}
|
||||
|
||||
return sort_suggestions(&String::from_utf8_lossy(&prefix), output, options);
|
||||
if named.long.is_empty() {
|
||||
continue;
|
||||
}
|
||||
add_suggestion(format!("--{}", named.long), named.desc.clone());
|
||||
}
|
||||
|
||||
vec![]
|
||||
matcher.results()
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,6 @@
|
||||
mod attribute_completions;
|
||||
mod base;
|
||||
mod cell_path_completions;
|
||||
mod command_completions;
|
||||
mod completer;
|
||||
mod completion_common;
|
||||
@ -6,17 +8,23 @@ mod completion_options;
|
||||
mod custom_completions;
|
||||
mod directory_completions;
|
||||
mod dotnu_completions;
|
||||
mod exportable_completions;
|
||||
mod file_completions;
|
||||
mod flag_completions;
|
||||
mod operator_completions;
|
||||
mod variable_completions;
|
||||
|
||||
pub use attribute_completions::{AttributableCompletion, AttributeCompletion};
|
||||
pub use base::{Completer, SemanticSuggestion, SuggestionKind};
|
||||
pub use cell_path_completions::CellPathCompletion;
|
||||
pub use command_completions::CommandCompletion;
|
||||
pub use completer::NuCompleter;
|
||||
pub use completion_options::{CompletionOptions, MatchAlgorithm};
|
||||
pub use custom_completions::CustomCompletion;
|
||||
pub use directory_completions::DirectoryCompletion;
|
||||
pub use dotnu_completions::DotNuCompletion;
|
||||
pub use file_completions::{file_path_completion, matches, FileCompletion};
|
||||
pub use exportable_completions::ExportableCompletion;
|
||||
pub use file_completions::{FileCompletion, file_path_completion};
|
||||
pub use flag_completions::FlagCompletion;
|
||||
pub use operator_completions::OperatorCompletion;
|
||||
pub use variable_completions::VariableCompletion;
|
||||
|
277
crates/nu-cli/src/completions/operator_completions.rs
Normal file
277
crates/nu-cli/src/completions/operator_completions.rs
Normal file
@ -0,0 +1,277 @@
|
||||
use crate::completions::{
|
||||
Completer, CompletionOptions, SemanticSuggestion, SuggestionKind, completion_options::NuMatcher,
|
||||
};
|
||||
use nu_protocol::{
|
||||
ENV_VARIABLE_ID, Span, Type, Value,
|
||||
ast::{self, Comparison, Expr, Expression},
|
||||
engine::{Stack, StateWorkingSet},
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use strum::{EnumMessage, IntoEnumIterator};
|
||||
|
||||
use super::cell_path_completions::eval_cell_path;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OperatorCompletion<'a> {
|
||||
pub left_hand_side: &'a Expression,
|
||||
}
|
||||
|
||||
struct OperatorItem {
|
||||
pub symbols: String,
|
||||
pub description: String,
|
||||
}
|
||||
|
||||
fn operator_to_item<T: EnumMessage + AsRef<str>>(op: T) -> OperatorItem {
|
||||
OperatorItem {
|
||||
symbols: op.as_ref().into(),
|
||||
description: op.get_message().unwrap_or_default().into(),
|
||||
}
|
||||
}
|
||||
|
||||
fn common_comparison_ops() -> Vec<OperatorItem> {
|
||||
vec![
|
||||
operator_to_item(Comparison::In),
|
||||
operator_to_item(Comparison::NotIn),
|
||||
operator_to_item(Comparison::Equal),
|
||||
operator_to_item(Comparison::NotEqual),
|
||||
]
|
||||
}
|
||||
|
||||
fn all_ops_for_immutable() -> Vec<OperatorItem> {
|
||||
ast::Comparison::iter()
|
||||
.map(operator_to_item)
|
||||
.chain(ast::Math::iter().map(operator_to_item))
|
||||
.chain(ast::Boolean::iter().map(operator_to_item))
|
||||
.chain(ast::Bits::iter().map(operator_to_item))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn collection_comparison_ops() -> Vec<OperatorItem> {
|
||||
let mut ops = common_comparison_ops();
|
||||
ops.push(operator_to_item(Comparison::Has));
|
||||
ops.push(operator_to_item(Comparison::NotHas));
|
||||
ops
|
||||
}
|
||||
|
||||
fn number_comparison_ops() -> Vec<OperatorItem> {
|
||||
Comparison::iter()
|
||||
.filter(|op| {
|
||||
!matches!(
|
||||
op,
|
||||
Comparison::RegexMatch
|
||||
| Comparison::NotRegexMatch
|
||||
| Comparison::StartsWith
|
||||
| Comparison::EndsWith
|
||||
| Comparison::Has
|
||||
| Comparison::NotHas
|
||||
)
|
||||
})
|
||||
.map(operator_to_item)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn math_ops() -> Vec<OperatorItem> {
|
||||
ast::Math::iter()
|
||||
.filter(|op| !matches!(op, ast::Math::Concatenate | ast::Math::Pow))
|
||||
.map(operator_to_item)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn bit_ops() -> Vec<OperatorItem> {
|
||||
ast::Bits::iter().map(operator_to_item).collect()
|
||||
}
|
||||
|
||||
fn all_assignment_ops() -> Vec<OperatorItem> {
|
||||
ast::Assignment::iter().map(operator_to_item).collect()
|
||||
}
|
||||
|
||||
fn numeric_assignment_ops() -> Vec<OperatorItem> {
|
||||
ast::Assignment::iter()
|
||||
.filter(|op| !matches!(op, ast::Assignment::ConcatenateAssign))
|
||||
.map(operator_to_item)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn concat_assignment_ops() -> Vec<OperatorItem> {
|
||||
vec![
|
||||
operator_to_item(ast::Assignment::Assign),
|
||||
operator_to_item(ast::Assignment::ConcatenateAssign),
|
||||
]
|
||||
}
|
||||
|
||||
fn valid_int_ops() -> Vec<OperatorItem> {
|
||||
let mut ops = valid_float_ops();
|
||||
ops.extend(bit_ops());
|
||||
ops
|
||||
}
|
||||
|
||||
fn valid_float_ops() -> Vec<OperatorItem> {
|
||||
let mut ops = valid_value_with_unit_ops();
|
||||
ops.push(operator_to_item(ast::Math::Pow));
|
||||
ops
|
||||
}
|
||||
|
||||
fn valid_string_ops() -> Vec<OperatorItem> {
|
||||
let mut ops: Vec<OperatorItem> = Comparison::iter().map(operator_to_item).collect();
|
||||
ops.push(operator_to_item(ast::Math::Concatenate));
|
||||
ops.push(OperatorItem {
|
||||
symbols: "like".into(),
|
||||
description: Comparison::RegexMatch
|
||||
.get_message()
|
||||
.unwrap_or_default()
|
||||
.into(),
|
||||
});
|
||||
ops.push(OperatorItem {
|
||||
symbols: "not-like".into(),
|
||||
description: Comparison::NotRegexMatch
|
||||
.get_message()
|
||||
.unwrap_or_default()
|
||||
.into(),
|
||||
});
|
||||
ops
|
||||
}
|
||||
|
||||
fn valid_list_ops() -> Vec<OperatorItem> {
|
||||
let mut ops = collection_comparison_ops();
|
||||
ops.push(operator_to_item(ast::Math::Concatenate));
|
||||
ops
|
||||
}
|
||||
|
||||
fn valid_binary_ops() -> Vec<OperatorItem> {
|
||||
let mut ops = number_comparison_ops();
|
||||
ops.extend(bit_ops());
|
||||
ops.push(operator_to_item(ast::Math::Concatenate));
|
||||
ops
|
||||
}
|
||||
|
||||
fn valid_bool_ops() -> Vec<OperatorItem> {
|
||||
let mut ops: Vec<OperatorItem> = ast::Boolean::iter().map(operator_to_item).collect();
|
||||
ops.extend(common_comparison_ops());
|
||||
ops
|
||||
}
|
||||
|
||||
fn valid_value_with_unit_ops() -> Vec<OperatorItem> {
|
||||
let mut ops = number_comparison_ops();
|
||||
ops.extend(math_ops());
|
||||
ops
|
||||
}
|
||||
|
||||
fn ops_by_value(value: &Value, mutable: bool) -> Vec<OperatorItem> {
|
||||
let mut ops = match value {
|
||||
Value::Int { .. } => valid_int_ops(),
|
||||
Value::Float { .. } => valid_float_ops(),
|
||||
Value::String { .. } => valid_string_ops(),
|
||||
Value::Binary { .. } => valid_binary_ops(),
|
||||
Value::Bool { .. } => valid_bool_ops(),
|
||||
Value::Date { .. } => number_comparison_ops(),
|
||||
Value::Filesize { .. } | Value::Duration { .. } => valid_value_with_unit_ops(),
|
||||
Value::Range { .. } | Value::Record { .. } => collection_comparison_ops(),
|
||||
Value::List { .. } => valid_list_ops(),
|
||||
_ => all_ops_for_immutable(),
|
||||
};
|
||||
if mutable {
|
||||
ops.extend(match value {
|
||||
Value::Int { .. }
|
||||
| Value::Float { .. }
|
||||
| Value::Filesize { .. }
|
||||
| Value::Duration { .. } => numeric_assignment_ops(),
|
||||
Value::String { .. } | Value::Binary { .. } | Value::List { .. } => {
|
||||
concat_assignment_ops()
|
||||
}
|
||||
Value::Bool { .. }
|
||||
| Value::Date { .. }
|
||||
| Value::Range { .. }
|
||||
| Value::Record { .. } => vec![operator_to_item(ast::Assignment::Assign)],
|
||||
_ => all_assignment_ops(),
|
||||
})
|
||||
}
|
||||
ops
|
||||
}
|
||||
|
||||
fn is_expression_mutable(expr: &Expr, working_set: &StateWorkingSet) -> bool {
|
||||
let Expr::FullCellPath(path) = expr else {
|
||||
return false;
|
||||
};
|
||||
let Expr::Var(id) = path.head.expr else {
|
||||
return false;
|
||||
};
|
||||
if id == ENV_VARIABLE_ID {
|
||||
return true;
|
||||
}
|
||||
let var = working_set.get_variable(id);
|
||||
var.mutable
|
||||
}
|
||||
|
||||
impl Completer for OperatorCompletion<'_> {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let mut needs_assignment_ops = true;
|
||||
// Complete according expression type
|
||||
// TODO: type inference on self.left_hand_side to get more accurate completions
|
||||
let mut possible_operations: Vec<OperatorItem> = match &self.left_hand_side.ty {
|
||||
Type::Int | Type::Number => valid_int_ops(),
|
||||
Type::Float => valid_float_ops(),
|
||||
Type::String => valid_string_ops(),
|
||||
Type::Binary => valid_binary_ops(),
|
||||
Type::Bool => valid_bool_ops(),
|
||||
Type::Date => number_comparison_ops(),
|
||||
Type::Filesize | Type::Duration => valid_value_with_unit_ops(),
|
||||
Type::Record(_) | Type::Range => collection_comparison_ops(),
|
||||
Type::List(_) | Type::Table(_) => valid_list_ops(),
|
||||
// Unknown type, resort to evaluated values
|
||||
Type::Any => match &self.left_hand_side.expr {
|
||||
Expr::FullCellPath(path) => {
|
||||
// for `$ <tab>`
|
||||
if matches!(path.head.expr, Expr::Garbage) {
|
||||
return vec![];
|
||||
}
|
||||
let value =
|
||||
eval_cell_path(working_set, stack, &path.head, &path.tail, path.head.span)
|
||||
.unwrap_or_default();
|
||||
let mutable = is_expression_mutable(&self.left_hand_side.expr, working_set);
|
||||
// to avoid duplication
|
||||
needs_assignment_ops = false;
|
||||
ops_by_value(&value, mutable)
|
||||
}
|
||||
_ => all_ops_for_immutable(),
|
||||
},
|
||||
_ => common_comparison_ops(),
|
||||
};
|
||||
// If the left hand side is a variable, add assignment operators if mutable
|
||||
if needs_assignment_ops && is_expression_mutable(&self.left_hand_side.expr, working_set) {
|
||||
possible_operations.extend(match &self.left_hand_side.ty {
|
||||
Type::Int | Type::Float | Type::Number => numeric_assignment_ops(),
|
||||
Type::Filesize | Type::Duration => numeric_assignment_ops(),
|
||||
Type::String | Type::Binary | Type::List(_) => concat_assignment_ops(),
|
||||
Type::Any => all_assignment_ops(),
|
||||
_ => vec![operator_to_item(ast::Assignment::Assign)],
|
||||
});
|
||||
}
|
||||
|
||||
let mut matcher = NuMatcher::new(prefix, options);
|
||||
for OperatorItem {
|
||||
symbols,
|
||||
description,
|
||||
} in possible_operations
|
||||
{
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: symbols.to_owned(),
|
||||
description: Some(description.to_owned()),
|
||||
span: reedline::Span::new(span.start - offset, span.end - offset),
|
||||
append_whitespace: true,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Operator),
|
||||
});
|
||||
}
|
||||
matcher.results()
|
||||
}
|
||||
}
|
@ -1,195 +1,67 @@
|
||||
use crate::completions::{
|
||||
Completer, CompletionOptions, MatchAlgorithm, SemanticSuggestion, SuggestionKind,
|
||||
};
|
||||
use nu_engine::{column::get_columns, eval_variable};
|
||||
use crate::completions::{Completer, CompletionOptions, SemanticSuggestion, SuggestionKind};
|
||||
use nu_protocol::{
|
||||
Span, VarId,
|
||||
engine::{Stack, StateWorkingSet},
|
||||
Span, Value,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use std::str;
|
||||
|
||||
use super::completion_common::sort_suggestions;
|
||||
use super::completion_options::NuMatcher;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct VariableCompletion {
|
||||
var_context: (Vec<u8>, Vec<Vec<u8>>), // tuple with $var and the sublevels (.b.c.d)
|
||||
}
|
||||
|
||||
impl VariableCompletion {
|
||||
pub fn new(var_context: (Vec<u8>, Vec<Vec<u8>>)) -> Self {
|
||||
Self { var_context }
|
||||
}
|
||||
}
|
||||
pub struct VariableCompletion;
|
||||
|
||||
impl Completer for VariableCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: Vec<u8>,
|
||||
_stack: &Stack,
|
||||
prefix: impl AsRef<str>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let mut output = vec![];
|
||||
let builtins = ["$nu", "$in", "$env"];
|
||||
let var_str = std::str::from_utf8(&self.var_context.0).unwrap_or("");
|
||||
let var_id = working_set.find_variable(&self.var_context.0);
|
||||
let mut matcher = NuMatcher::new(prefix, options);
|
||||
let current_span = reedline::Span {
|
||||
start: span.start - offset,
|
||||
end: span.end - offset,
|
||||
};
|
||||
let sublevels_count = self.var_context.1.len();
|
||||
let prefix_str = String::from_utf8_lossy(&prefix);
|
||||
|
||||
// Completions for the given variable
|
||||
if !var_str.is_empty() {
|
||||
// Completion for $env.<tab>
|
||||
if var_str == "$env" {
|
||||
let env_vars = stack.get_env_vars(working_set.permanent_state);
|
||||
|
||||
// Return nested values
|
||||
if sublevels_count > 0 {
|
||||
// Extract the target var ($env.<target-var>)
|
||||
let target_var = self.var_context.1[0].clone();
|
||||
let target_var_str =
|
||||
str::from_utf8(&target_var).unwrap_or_default().to_string();
|
||||
|
||||
// Everything after the target var is the nested level ($env.<target-var>.<nested_levels>...)
|
||||
let nested_levels: Vec<Vec<u8>> =
|
||||
self.var_context.1.clone().into_iter().skip(1).collect();
|
||||
|
||||
if let Some(val) = env_vars.get(&target_var_str) {
|
||||
for suggestion in nested_suggestions(val, &nested_levels, current_span) {
|
||||
if options.match_algorithm.matches_u8_insensitive(
|
||||
options.case_sensitive,
|
||||
suggestion.suggestion.value.as_bytes(),
|
||||
&prefix,
|
||||
) {
|
||||
output.push(suggestion);
|
||||
}
|
||||
}
|
||||
|
||||
return sort_suggestions(&prefix_str, output, options);
|
||||
}
|
||||
} else {
|
||||
// No nesting provided, return all env vars
|
||||
for env_var in env_vars {
|
||||
if options.match_algorithm.matches_u8_insensitive(
|
||||
options.case_sensitive,
|
||||
env_var.0.as_bytes(),
|
||||
&prefix,
|
||||
) {
|
||||
output.push(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: env_var.0,
|
||||
span: current_span,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Type(env_var.1.get_type())),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return sort_suggestions(&prefix_str, output, options);
|
||||
}
|
||||
}
|
||||
|
||||
// Completions for $nu.<tab>
|
||||
if var_str == "$nu" {
|
||||
// Eval nu var
|
||||
if let Ok(nuval) = eval_variable(
|
||||
working_set.permanent_state,
|
||||
stack,
|
||||
nu_protocol::NU_VARIABLE_ID,
|
||||
nu_protocol::Span::new(current_span.start, current_span.end),
|
||||
) {
|
||||
for suggestion in nested_suggestions(&nuval, &self.var_context.1, current_span)
|
||||
{
|
||||
if options.match_algorithm.matches_u8_insensitive(
|
||||
options.case_sensitive,
|
||||
suggestion.suggestion.value.as_bytes(),
|
||||
&prefix,
|
||||
) {
|
||||
output.push(suggestion);
|
||||
}
|
||||
}
|
||||
|
||||
return sort_suggestions(&prefix_str, output, options);
|
||||
}
|
||||
}
|
||||
|
||||
// Completion other variable types
|
||||
if let Some(var_id) = var_id {
|
||||
// Extract the variable value from the stack
|
||||
let var = stack.get_var(var_id, Span::new(span.start, span.end));
|
||||
|
||||
// If the value exists and it's of type Record
|
||||
if let Ok(value) = var {
|
||||
for suggestion in nested_suggestions(&value, &self.var_context.1, current_span)
|
||||
{
|
||||
if options.match_algorithm.matches_u8_insensitive(
|
||||
options.case_sensitive,
|
||||
suggestion.suggestion.value.as_bytes(),
|
||||
&prefix,
|
||||
) {
|
||||
output.push(suggestion);
|
||||
}
|
||||
}
|
||||
|
||||
return sort_suggestions(&prefix_str, output, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Variable completion (e.g: $en<tab> to complete $env)
|
||||
let builtins = ["$nu", "$in", "$env"];
|
||||
for builtin in builtins {
|
||||
if options.match_algorithm.matches_u8_insensitive(
|
||||
options.case_sensitive,
|
||||
builtin.as_bytes(),
|
||||
&prefix,
|
||||
) {
|
||||
output.push(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: builtin.to_string(),
|
||||
span: current_span,
|
||||
..Suggestion::default()
|
||||
},
|
||||
// TODO is there a way to get the VarId to get the type???
|
||||
kind: None,
|
||||
});
|
||||
}
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: builtin.to_string(),
|
||||
span: current_span,
|
||||
description: Some("reserved".into()),
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Variable),
|
||||
});
|
||||
}
|
||||
|
||||
let mut add_candidate = |name, var_id: &VarId| {
|
||||
matcher.add_semantic_suggestion(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(name).to_string(),
|
||||
span: current_span,
|
||||
description: Some(working_set.get_variable(*var_id).ty.to_string()),
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Variable),
|
||||
})
|
||||
};
|
||||
|
||||
// TODO: The following can be refactored (see find_commands_by_predicate() used in
|
||||
// command_completions).
|
||||
let mut removed_overlays = vec![];
|
||||
// Working set scope vars
|
||||
for scope_frame in working_set.delta.scope.iter().rev() {
|
||||
for overlay_frame in scope_frame.active_overlays(&mut removed_overlays).rev() {
|
||||
for v in &overlay_frame.vars {
|
||||
if options.match_algorithm.matches_u8_insensitive(
|
||||
options.case_sensitive,
|
||||
v.0,
|
||||
&prefix,
|
||||
) {
|
||||
output.push(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(v.0).to_string(),
|
||||
span: current_span,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Type(
|
||||
working_set.get_variable(*v.1).ty.clone(),
|
||||
)),
|
||||
});
|
||||
}
|
||||
for (name, var_id) in &overlay_frame.vars {
|
||||
add_candidate(name, var_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Permanent state vars
|
||||
// for scope in &self.engine_state.scope {
|
||||
for overlay_frame in working_set
|
||||
@ -197,118 +69,11 @@ impl Completer for VariableCompletion {
|
||||
.active_overlays(&removed_overlays)
|
||||
.rev()
|
||||
{
|
||||
for v in &overlay_frame.vars {
|
||||
if options.match_algorithm.matches_u8_insensitive(
|
||||
options.case_sensitive,
|
||||
v.0,
|
||||
&prefix,
|
||||
) {
|
||||
output.push(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: String::from_utf8_lossy(v.0).to_string(),
|
||||
span: current_span,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(SuggestionKind::Type(
|
||||
working_set.get_variable(*v.1).ty.clone(),
|
||||
)),
|
||||
});
|
||||
}
|
||||
for (name, var_id) in &overlay_frame.vars {
|
||||
add_candidate(name, var_id);
|
||||
}
|
||||
}
|
||||
|
||||
output = sort_suggestions(&prefix_str, output, options);
|
||||
|
||||
output.dedup(); // TODO: Removes only consecutive duplicates, is it intended?
|
||||
|
||||
output
|
||||
}
|
||||
}
|
||||
|
||||
// Find recursively the values for sublevels
|
||||
// if no sublevels are set it returns the current value
|
||||
fn nested_suggestions(
|
||||
val: &Value,
|
||||
sublevels: &[Vec<u8>],
|
||||
current_span: reedline::Span,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let mut output: Vec<SemanticSuggestion> = vec![];
|
||||
let value = recursive_value(val, sublevels).unwrap_or_else(Value::nothing);
|
||||
|
||||
let kind = SuggestionKind::Type(value.get_type());
|
||||
match value {
|
||||
Value::Record { val, .. } => {
|
||||
// Add all the columns as completion
|
||||
for col in val.columns() {
|
||||
output.push(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: col.clone(),
|
||||
span: current_span,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(kind.clone()),
|
||||
});
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
Value::List { vals, .. } => {
|
||||
for column_name in get_columns(vals.as_slice()) {
|
||||
output.push(SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
value: column_name,
|
||||
span: current_span,
|
||||
..Suggestion::default()
|
||||
},
|
||||
kind: Some(kind.clone()),
|
||||
});
|
||||
}
|
||||
|
||||
output
|
||||
}
|
||||
_ => output,
|
||||
}
|
||||
}
|
||||
|
||||
// Extracts the recursive value (e.g: $var.a.b.c)
|
||||
fn recursive_value(val: &Value, sublevels: &[Vec<u8>]) -> Result<Value, Span> {
|
||||
// Go to next sublevel
|
||||
if let Some((sublevel, next_sublevels)) = sublevels.split_first() {
|
||||
let span = val.span();
|
||||
match val {
|
||||
Value::Record { val, .. } => {
|
||||
if let Some((_, value)) = val.iter().find(|(key, _)| key.as_bytes() == sublevel) {
|
||||
// If matches try to fetch recursively the next
|
||||
recursive_value(value, next_sublevels)
|
||||
} else {
|
||||
// Current sublevel value not found
|
||||
Err(span)
|
||||
}
|
||||
}
|
||||
Value::List { vals, .. } => {
|
||||
for col in get_columns(vals.as_slice()) {
|
||||
if col.as_bytes() == *sublevel {
|
||||
let val = val.get_data_by_key(&col).ok_or(span)?;
|
||||
return recursive_value(&val, next_sublevels);
|
||||
}
|
||||
}
|
||||
|
||||
// Current sublevel value not found
|
||||
Err(span)
|
||||
}
|
||||
_ => Ok(val.clone()),
|
||||
}
|
||||
} else {
|
||||
Ok(val.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl MatchAlgorithm {
|
||||
pub fn matches_u8_insensitive(&self, sensitive: bool, haystack: &[u8], needle: &[u8]) -> bool {
|
||||
if sensitive {
|
||||
self.matches_u8(haystack, needle)
|
||||
} else {
|
||||
self.matches_u8(&haystack.to_ascii_lowercase(), &needle.to_ascii_lowercase())
|
||||
}
|
||||
matcher.results()
|
||||
}
|
||||
}
|
||||
|
@ -2,10 +2,11 @@ use crate::util::eval_source;
|
||||
#[cfg(feature = "plugin")]
|
||||
use nu_path::canonicalize_with;
|
||||
#[cfg(feature = "plugin")]
|
||||
use nu_protocol::{engine::StateWorkingSet, ParseError, PluginRegistryFile, Spanned};
|
||||
use nu_protocol::{ParseError, PluginRegistryFile, Spanned, engine::StateWorkingSet};
|
||||
use nu_protocol::{
|
||||
PipelineData,
|
||||
engine::{EngineState, Stack},
|
||||
report_shell_error, HistoryFileFormat, PipelineData,
|
||||
report_shell_error,
|
||||
};
|
||||
#[cfg(feature = "plugin")]
|
||||
use nu_utils::perf;
|
||||
@ -16,16 +17,9 @@ const PLUGIN_FILE: &str = "plugin.msgpackz";
|
||||
#[cfg(feature = "plugin")]
|
||||
const OLD_PLUGIN_FILE: &str = "plugin.nu";
|
||||
|
||||
const HISTORY_FILE_TXT: &str = "history.txt";
|
||||
const HISTORY_FILE_SQLITE: &str = "history.sqlite3";
|
||||
|
||||
#[cfg(feature = "plugin")]
|
||||
pub fn read_plugin_file(
|
||||
engine_state: &mut EngineState,
|
||||
plugin_file: Option<Spanned<String>>,
|
||||
storage_path: &str,
|
||||
) {
|
||||
use nu_protocol::ShellError;
|
||||
pub fn read_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Spanned<String>>) {
|
||||
use nu_protocol::{ShellError, shell_error::io::IoError};
|
||||
use std::path::Path;
|
||||
|
||||
let span = plugin_file.as_ref().map(|s| s.span);
|
||||
@ -52,11 +46,14 @@ pub fn read_plugin_file(
|
||||
let mut start_time = std::time::Instant::now();
|
||||
// Reading signatures from plugin registry file
|
||||
// The plugin.msgpackz file stores the parsed signature collected from each registered plugin
|
||||
add_plugin_file(engine_state, plugin_file.clone(), storage_path);
|
||||
add_plugin_file(engine_state, plugin_file.clone());
|
||||
perf!(
|
||||
"add plugin file to engine_state",
|
||||
start_time,
|
||||
engine_state.get_config().use_ansi_coloring
|
||||
engine_state
|
||||
.get_config()
|
||||
.use_ansi_coloring
|
||||
.get(engine_state)
|
||||
);
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
@ -70,8 +67,7 @@ pub fn read_plugin_file(
|
||||
log::warn!("Plugin file not found: {}", plugin_path.display());
|
||||
|
||||
// Try migration of an old plugin file if this wasn't a custom plugin file
|
||||
if plugin_file.is_none() && migrate_old_plugin_file(engine_state, storage_path)
|
||||
{
|
||||
if plugin_file.is_none() && migrate_old_plugin_file(engine_state) {
|
||||
let Ok(file) = std::fs::File::open(&plugin_path) else {
|
||||
log::warn!("Failed to load newly migrated plugin file");
|
||||
return;
|
||||
@ -83,16 +79,12 @@ pub fn read_plugin_file(
|
||||
} else {
|
||||
report_shell_error(
|
||||
engine_state,
|
||||
&ShellError::GenericError {
|
||||
error: format!(
|
||||
"Error while opening plugin registry file: {}",
|
||||
plugin_path.display()
|
||||
),
|
||||
msg: "plugin path defined here".into(),
|
||||
span,
|
||||
help: None,
|
||||
inner: vec![err.into()],
|
||||
},
|
||||
&ShellError::Io(IoError::new_internal_with_path(
|
||||
err,
|
||||
"Could not open plugin registry file",
|
||||
nu_protocol::location!(),
|
||||
plugin_path,
|
||||
)),
|
||||
);
|
||||
return;
|
||||
}
|
||||
@ -137,7 +129,10 @@ pub fn read_plugin_file(
|
||||
perf!(
|
||||
&format!("read plugin file {}", plugin_path.display()),
|
||||
start_time,
|
||||
engine_state.get_config().use_ansi_coloring
|
||||
engine_state
|
||||
.get_config()
|
||||
.use_ansi_coloring
|
||||
.get(engine_state)
|
||||
);
|
||||
start_time = std::time::Instant::now();
|
||||
|
||||
@ -153,17 +148,16 @@ pub fn read_plugin_file(
|
||||
perf!(
|
||||
&format!("load plugin file {}", plugin_path.display()),
|
||||
start_time,
|
||||
engine_state.get_config().use_ansi_coloring
|
||||
engine_state
|
||||
.get_config()
|
||||
.use_ansi_coloring
|
||||
.get(engine_state)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "plugin")]
|
||||
pub fn add_plugin_file(
|
||||
engine_state: &mut EngineState,
|
||||
plugin_file: Option<Spanned<String>>,
|
||||
storage_path: &str,
|
||||
) {
|
||||
pub fn add_plugin_file(engine_state: &mut EngineState, plugin_file: Option<Spanned<String>>) {
|
||||
use std::path::Path;
|
||||
|
||||
use nu_protocol::report_parse_error;
|
||||
@ -189,9 +183,8 @@ pub fn add_plugin_file(
|
||||
),
|
||||
);
|
||||
}
|
||||
} else if let Some(mut plugin_path) = nu_path::config_dir() {
|
||||
} else if let Some(plugin_path) = nu_path::nu_config_dir() {
|
||||
// Path to store plugins signatures
|
||||
plugin_path.push(storage_path);
|
||||
let mut plugin_path =
|
||||
canonicalize_with(&plugin_path, &cwd).unwrap_or(plugin_path.into());
|
||||
plugin_path.push(PLUGIN_FILE);
|
||||
@ -228,36 +221,18 @@ pub fn eval_config_contents(
|
||||
engine_state.file = prev_file;
|
||||
|
||||
// Merge the environment in case env vars changed in the config
|
||||
match engine_state.cwd(Some(stack)) {
|
||||
Ok(cwd) => {
|
||||
if let Err(e) = engine_state.merge_env(stack, cwd) {
|
||||
report_shell_error(engine_state, &e);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
report_shell_error(engine_state, &e);
|
||||
}
|
||||
if let Err(e) = engine_state.merge_env(stack) {
|
||||
report_shell_error(engine_state, &e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_history_path(storage_path: &str, mode: HistoryFileFormat) -> Option<PathBuf> {
|
||||
nu_path::config_dir().map(|mut history_path| {
|
||||
history_path.push(storage_path);
|
||||
history_path.push(match mode {
|
||||
HistoryFileFormat::Plaintext => HISTORY_FILE_TXT,
|
||||
HistoryFileFormat::Sqlite => HISTORY_FILE_SQLITE,
|
||||
});
|
||||
history_path.into()
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(feature = "plugin")]
|
||||
pub fn migrate_old_plugin_file(engine_state: &EngineState, storage_path: &str) -> bool {
|
||||
pub fn migrate_old_plugin_file(engine_state: &EngineState) -> bool {
|
||||
use nu_protocol::{
|
||||
PluginExample, PluginIdentity, PluginRegistryItem, PluginRegistryItemData, PluginSignature,
|
||||
ShellError,
|
||||
ShellError, shell_error::io::IoError,
|
||||
};
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
@ -267,10 +242,9 @@ pub fn migrate_old_plugin_file(engine_state: &EngineState, storage_path: &str) -
|
||||
return false;
|
||||
};
|
||||
|
||||
let Some(config_dir) = nu_path::config_dir().and_then(|mut dir| {
|
||||
dir.push(storage_path);
|
||||
nu_path::canonicalize_with(dir, &cwd).ok()
|
||||
}) else {
|
||||
let Some(config_dir) =
|
||||
nu_path::nu_config_dir().and_then(|dir| nu_path::canonicalize_with(dir, &cwd).ok())
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
|
||||
@ -347,7 +321,15 @@ pub fn migrate_old_plugin_file(engine_state: &EngineState, storage_path: &str) -
|
||||
// Write the new file
|
||||
let new_plugin_file_path = config_dir.join(PLUGIN_FILE);
|
||||
if let Err(err) = std::fs::File::create(&new_plugin_file_path)
|
||||
.map_err(|e| e.into())
|
||||
.map_err(|err| {
|
||||
IoError::new_internal_with_path(
|
||||
err,
|
||||
"Could not create new plugin file",
|
||||
nu_protocol::location!(),
|
||||
new_plugin_file_path.clone(),
|
||||
)
|
||||
})
|
||||
.map_err(ShellError::from)
|
||||
.and_then(|file| contents.write_to(file, None))
|
||||
{
|
||||
report_shell_error(
|
||||
@ -377,7 +359,10 @@ pub fn migrate_old_plugin_file(engine_state: &EngineState, storage_path: &str) -
|
||||
perf!(
|
||||
"migrate old plugin file",
|
||||
start_time,
|
||||
engine_state.get_config().use_ansi_coloring
|
||||
engine_state
|
||||
.get_config()
|
||||
.use_ansi_coloring
|
||||
.get(&engine_state)
|
||||
);
|
||||
true
|
||||
}
|
||||
|
@ -1,14 +1,17 @@
|
||||
use log::info;
|
||||
use nu_engine::{convert_env_values, eval_block};
|
||||
use nu_engine::eval_block;
|
||||
use nu_parser::parse;
|
||||
use nu_protocol::{
|
||||
PipelineData, ShellError, Spanned, Value,
|
||||
cli_error::report_compile_error,
|
||||
debugger::WithoutDebug,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
report_parse_error, report_parse_warning, PipelineData, ShellError, Spanned, Value,
|
||||
report_parse_error, report_parse_warning,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::util::print_pipeline;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct EvaluateCommandsOpts {
|
||||
pub table_mode: Option<Value>,
|
||||
@ -48,9 +51,6 @@ pub fn evaluate_commands(
|
||||
}
|
||||
}
|
||||
|
||||
// Translate environment variables from Strings to Values
|
||||
convert_env_values(engine_state, stack)?;
|
||||
|
||||
// Parse the source code
|
||||
let (block, delta) = {
|
||||
if let Some(ref t_mode) = table_mode {
|
||||
@ -72,7 +72,7 @@ pub fn evaluate_commands(
|
||||
|
||||
if let Some(err) = working_set.compile_errors.first() {
|
||||
report_compile_error(&working_set, err);
|
||||
// Not a fatal error, for now
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
(output, working_set.render())
|
||||
@ -93,7 +93,7 @@ pub fn evaluate_commands(
|
||||
t_mode.coerce_str()?.parse().unwrap_or_default();
|
||||
}
|
||||
|
||||
pipeline.print(engine_state, stack, no_newline, false)?;
|
||||
print_pipeline(engine_state, stack, pipeline, no_newline)?;
|
||||
|
||||
info!("evaluate {}:{}:{}", file!(), line!(), column!());
|
||||
|
||||
|
@ -1,15 +1,17 @@
|
||||
use crate::util::eval_source;
|
||||
use crate::util::{eval_source, print_pipeline};
|
||||
use log::{info, trace};
|
||||
use nu_engine::{convert_env_values, eval_block};
|
||||
use nu_engine::eval_block;
|
||||
use nu_parser::parse;
|
||||
use nu_path::canonicalize_with;
|
||||
use nu_protocol::{
|
||||
PipelineData, ShellError, Span, Value,
|
||||
cli_error::report_compile_error,
|
||||
debugger::WithoutDebug,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
report_parse_error, report_parse_warning, PipelineData, ShellError, Span, Value,
|
||||
report_parse_error, report_parse_warning,
|
||||
shell_error::io::*,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
|
||||
/// Entry point for evaluating a file.
|
||||
///
|
||||
@ -22,16 +24,16 @@ pub fn evaluate_file(
|
||||
stack: &mut Stack,
|
||||
input: PipelineData,
|
||||
) -> Result<(), ShellError> {
|
||||
// Convert environment variables from Strings to Values and store them in the engine state.
|
||||
convert_env_values(engine_state, stack)?;
|
||||
|
||||
let cwd = engine_state.cwd_as_string(Some(stack))?;
|
||||
|
||||
let file_path =
|
||||
canonicalize_with(&path, cwd).map_err(|err| ShellError::FileNotFoundCustom {
|
||||
msg: format!("Could not access file '{path}': {err}"),
|
||||
span: Span::unknown(),
|
||||
})?;
|
||||
let file_path = canonicalize_with(&path, cwd).map_err(|err| {
|
||||
IoError::new_internal_with_path(
|
||||
err.not_found_as(NotFound::File),
|
||||
"Could not access file",
|
||||
nu_protocol::location!(),
|
||||
PathBuf::from(&path),
|
||||
)
|
||||
})?;
|
||||
|
||||
let file_path_str = file_path
|
||||
.to_str()
|
||||
@ -43,18 +45,24 @@ pub fn evaluate_file(
|
||||
span: Span::unknown(),
|
||||
})?;
|
||||
|
||||
let file = std::fs::read(&file_path).map_err(|err| ShellError::FileNotFoundCustom {
|
||||
msg: format!("Could not read file '{file_path_str}': {err}"),
|
||||
span: Span::unknown(),
|
||||
let file = std::fs::read(&file_path).map_err(|err| {
|
||||
IoError::new_internal_with_path(
|
||||
err.not_found_as(NotFound::File),
|
||||
"Could not read file",
|
||||
nu_protocol::location!(),
|
||||
file_path.clone(),
|
||||
)
|
||||
})?;
|
||||
engine_state.file = Some(file_path.clone());
|
||||
|
||||
let parent = file_path
|
||||
.parent()
|
||||
.ok_or_else(|| ShellError::FileNotFoundCustom {
|
||||
msg: format!("The file path '{file_path_str}' does not have a parent"),
|
||||
span: Span::unknown(),
|
||||
})?;
|
||||
let parent = file_path.parent().ok_or_else(|| {
|
||||
IoError::new_internal_with_path(
|
||||
ErrorKind::DirectoryNotFound,
|
||||
"The file path does not have a parent",
|
||||
nu_protocol::location!(),
|
||||
file_path.clone(),
|
||||
)
|
||||
})?;
|
||||
|
||||
stack.add_env_var(
|
||||
"FILE_PWD".to_string(),
|
||||
@ -89,7 +97,7 @@ pub fn evaluate_file(
|
||||
|
||||
if let Some(err) = working_set.compile_errors.first() {
|
||||
report_compile_error(&working_set, err);
|
||||
// Not a fatal error, for now
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
// Look for blocks whose name starts with "main" and replace it with the filename.
|
||||
@ -119,7 +127,7 @@ pub fn evaluate_file(
|
||||
};
|
||||
|
||||
// Print the pipeline output of the last command of the file.
|
||||
pipeline.print(engine_state, stack, true, false)?;
|
||||
print_pipeline(engine_state, stack, pipeline, true)?;
|
||||
|
||||
// Invoke the main command with arguments.
|
||||
// Arguments with whitespaces are quoted, thus can be safely concatenated by whitespace.
|
||||
|
@ -18,10 +18,9 @@ mod validation;
|
||||
pub use commands::add_cli_context;
|
||||
pub use completions::{FileCompletion, NuCompleter, SemanticSuggestion, SuggestionKind};
|
||||
pub use config_files::eval_config_contents;
|
||||
pub use eval_cmds::{evaluate_commands, EvaluateCommandsOpts};
|
||||
pub use eval_cmds::{EvaluateCommandsOpts, evaluate_commands};
|
||||
pub use eval_file::evaluate_file;
|
||||
pub use menus::NuHelpCompleter;
|
||||
pub use nu_cmd_base::util::get_init_cwd;
|
||||
pub use nu_highlight::NuHighlight;
|
||||
pub use print::Print;
|
||||
pub use prompt::NushellPrompt;
|
||||
|
@ -1,5 +1,5 @@
|
||||
use nu_engine::documentation::{get_flags_section, HelpStyle};
|
||||
use nu_protocol::{engine::EngineState, levenshtein_distance, Config};
|
||||
use nu_engine::documentation::{HelpStyle, get_flags_section};
|
||||
use nu_protocol::{Config, engine::EngineState, levenshtein_distance};
|
||||
use nu_utils::IgnoreCaseExt;
|
||||
use reedline::{Completer, Suggestion};
|
||||
use std::{fmt::Write, sync::Arc};
|
||||
|
@ -1,16 +1,16 @@
|
||||
use nu_engine::eval_block;
|
||||
use nu_protocol::{
|
||||
BlockId, IntoPipelineData, Span, Value,
|
||||
debugger::WithoutDebug,
|
||||
engine::{EngineState, Stack},
|
||||
IntoPipelineData, Span, Value,
|
||||
};
|
||||
use reedline::{menu_functions::parse_selection_char, Completer, Suggestion};
|
||||
use reedline::{Completer, Suggestion, menu_functions::parse_selection_char};
|
||||
use std::sync::Arc;
|
||||
|
||||
const SELECTION_CHAR: char = '!';
|
||||
|
||||
pub struct NuMenuCompleter {
|
||||
block_id: usize,
|
||||
block_id: BlockId,
|
||||
span: Span,
|
||||
stack: Stack,
|
||||
engine_state: Arc<EngineState>,
|
||||
@ -19,7 +19,7 @@ pub struct NuMenuCompleter {
|
||||
|
||||
impl NuMenuCompleter {
|
||||
pub fn new(
|
||||
block_id: usize,
|
||||
block_id: BlockId,
|
||||
span: Span,
|
||||
stack: Stack,
|
||||
engine_state: Arc<EngineState>,
|
||||
@ -28,7 +28,7 @@ impl NuMenuCompleter {
|
||||
Self {
|
||||
block_id,
|
||||
span,
|
||||
stack: stack.reset_out_dest().capture(),
|
||||
stack: stack.reset_out_dest().collect_value(),
|
||||
engine_state,
|
||||
only_buffer_difference,
|
||||
}
|
||||
|
@ -1,4 +1,5 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::ByteStreamSource;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Print;
|
||||
@ -50,7 +51,7 @@ Since this command has no output, there is no point in piping it with other comm
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
mut input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let args: Vec<Value> = call.rest(engine_state, stack, 0)?;
|
||||
let no_newline = call.has_flag(engine_state, stack, "no-newline")?;
|
||||
@ -64,15 +65,24 @@ Since this command has no output, there is no point in piping it with other comm
|
||||
arg.into_pipeline_data()
|
||||
.print_raw(engine_state, no_newline, to_stderr)?;
|
||||
} else {
|
||||
arg.into_pipeline_data()
|
||||
.print(engine_state, stack, no_newline, to_stderr)?;
|
||||
arg.into_pipeline_data().print_table(
|
||||
engine_state,
|
||||
stack,
|
||||
no_newline,
|
||||
to_stderr,
|
||||
)?;
|
||||
}
|
||||
}
|
||||
} else if !input.is_nothing() {
|
||||
if let PipelineData::ByteStream(stream, _) = &mut input {
|
||||
if let ByteStreamSource::Child(child) = stream.source_mut() {
|
||||
child.ignore_error(true);
|
||||
}
|
||||
}
|
||||
if raw {
|
||||
input.print_raw(engine_state, no_newline, to_stderr)?;
|
||||
} else {
|
||||
input.print(engine_state, stack, no_newline, to_stderr)?;
|
||||
input.print_table(engine_state, stack, no_newline, to_stderr)?;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,10 +1,7 @@
|
||||
use crate::prompt_update::{
|
||||
POST_PROMPT_MARKER, PRE_PROMPT_MARKER, VSCODE_POST_PROMPT_MARKER, VSCODE_PRE_PROMPT_MARKER,
|
||||
};
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack},
|
||||
Value,
|
||||
};
|
||||
use nu_protocol::engine::{EngineState, Stack};
|
||||
#[cfg(windows)]
|
||||
use nu_utils::enable_vt_processing;
|
||||
use reedline::{
|
||||
@ -124,8 +121,11 @@ impl Prompt for NushellPrompt {
|
||||
.replace('\n', "\r\n");
|
||||
|
||||
if self.shell_integration_osc633 {
|
||||
if self.stack.get_env_var(&self.engine_state, "TERM_PROGRAM")
|
||||
== Some(Value::test_string("vscode"))
|
||||
if self
|
||||
.stack
|
||||
.get_env_var(&self.engine_state, "TERM_PROGRAM")
|
||||
.and_then(|v| v.as_str().ok())
|
||||
== Some("vscode")
|
||||
{
|
||||
// We're in vscode and we have osc633 enabled
|
||||
format!("{VSCODE_PRE_PROMPT_MARKER}{prompt}{VSCODE_POST_PROMPT_MARKER}").into()
|
||||
|
@ -1,9 +1,10 @@
|
||||
use crate::NushellPrompt;
|
||||
use log::trace;
|
||||
use log::{trace, warn};
|
||||
use nu_engine::ClosureEvalOnce;
|
||||
use nu_protocol::{
|
||||
Config, PipelineData, Value,
|
||||
engine::{EngineState, Stack},
|
||||
report_shell_error, Config, PipelineData, Value,
|
||||
report_shell_error,
|
||||
};
|
||||
use reedline::Prompt;
|
||||
|
||||
@ -30,30 +31,21 @@ pub(crate) const TRANSIENT_PROMPT_MULTILINE_INDICATOR: &str =
|
||||
pub(crate) const PRE_PROMPT_MARKER: &str = "\x1b]133;A\x1b\\";
|
||||
pub(crate) const POST_PROMPT_MARKER: &str = "\x1b]133;B\x1b\\";
|
||||
pub(crate) const PRE_EXECUTION_MARKER: &str = "\x1b]133;C\x1b\\";
|
||||
#[allow(dead_code)]
|
||||
pub(crate) const POST_EXECUTION_MARKER_PREFIX: &str = "\x1b]133;D;";
|
||||
#[allow(dead_code)]
|
||||
pub(crate) const POST_EXECUTION_MARKER_SUFFIX: &str = "\x1b\\";
|
||||
|
||||
// OSC633 is the same as OSC133 but specifically for VSCode
|
||||
pub(crate) const VSCODE_PRE_PROMPT_MARKER: &str = "\x1b]633;A\x1b\\";
|
||||
pub(crate) const VSCODE_POST_PROMPT_MARKER: &str = "\x1b]633;B\x1b\\";
|
||||
#[allow(dead_code)]
|
||||
pub(crate) const VSCODE_PRE_EXECUTION_MARKER: &str = "\x1b]633;C\x1b\\";
|
||||
#[allow(dead_code)]
|
||||
//"\x1b]633;D;{}\x1b\\"
|
||||
pub(crate) const VSCODE_POST_EXECUTION_MARKER_PREFIX: &str = "\x1b]633;D;";
|
||||
#[allow(dead_code)]
|
||||
pub(crate) const VSCODE_POST_EXECUTION_MARKER_SUFFIX: &str = "\x1b\\";
|
||||
#[allow(dead_code)]
|
||||
//"\x1b]633;E;{}\x1b\\"
|
||||
pub(crate) const VSCODE_COMMANDLINE_MARKER_PREFIX: &str = "\x1b]633;E;";
|
||||
#[allow(dead_code)]
|
||||
pub(crate) const VSCODE_COMMANDLINE_MARKER_SUFFIX: &str = "\x1b\\";
|
||||
#[allow(dead_code)]
|
||||
// "\x1b]633;P;Cwd={}\x1b\\"
|
||||
pub(crate) const VSCODE_CWD_PROPERTY_MARKER_PREFIX: &str = "\x1b]633;P;Cwd=";
|
||||
#[allow(dead_code)]
|
||||
pub(crate) const VSCODE_CWD_PROPERTY_MARKER_SUFFIX: &str = "\x1b\\";
|
||||
|
||||
pub(crate) const RESET_APPLICATION_MODE: &str = "\x1b[?1l";
|
||||
@ -68,7 +60,7 @@ fn get_prompt_string(
|
||||
.get_env_var(engine_state, prompt)
|
||||
.and_then(|v| match v {
|
||||
Value::Closure { val, .. } => {
|
||||
let result = ClosureEvalOnce::new(engine_state, stack, *val)
|
||||
let result = ClosureEvalOnce::new(engine_state, stack, val.as_ref().clone())
|
||||
.run_with_input(PipelineData::Empty);
|
||||
|
||||
trace!(
|
||||
@ -89,18 +81,19 @@ fn get_prompt_string(
|
||||
})
|
||||
.and_then(|pipeline_data| {
|
||||
let output = pipeline_data.collect_string("", config).ok();
|
||||
let ansi_output = output.map(|mut x| {
|
||||
// Always reset the color at the start of the right prompt
|
||||
// to ensure there is no ansi bleed over
|
||||
if x.is_empty() && prompt == PROMPT_COMMAND_RIGHT {
|
||||
x.insert_str(0, "\x1b[0m")
|
||||
};
|
||||
|
||||
output.map(|mut x| {
|
||||
// Just remove the very last newline.
|
||||
if x.ends_with('\n') {
|
||||
x.pop();
|
||||
}
|
||||
|
||||
if x.ends_with('\r') {
|
||||
x.pop();
|
||||
}
|
||||
x
|
||||
})
|
||||
});
|
||||
// Let's keep this for debugging purposes with nu --log-level warn
|
||||
warn!("{}:{}:{} {:?}", file!(), line!(), column!(), ansi_output);
|
||||
|
||||
ansi_output
|
||||
})
|
||||
}
|
||||
|
||||
@ -119,7 +112,11 @@ pub(crate) fn update_prompt(
|
||||
// Now that we have the prompt string lets ansify it.
|
||||
// <133 A><prompt><133 B><command><133 C><command output>
|
||||
let left_prompt_string = if config.shell_integration.osc633 {
|
||||
if stack.get_env_var(engine_state, "TERM_PROGRAM") == Some(Value::test_string("vscode")) {
|
||||
if stack
|
||||
.get_env_var(engine_state, "TERM_PROGRAM")
|
||||
.and_then(|v| v.as_str().ok())
|
||||
== Some("vscode")
|
||||
{
|
||||
// We're in vscode and we have osc633 enabled
|
||||
Some(format!(
|
||||
"{VSCODE_PRE_PROMPT_MARKER}{configured_left_prompt_string}{VSCODE_POST_PROMPT_MARKER}"
|
||||
|
@ -1,20 +1,20 @@
|
||||
use crate::{menus::NuMenuCompleter, NuHelpCompleter};
|
||||
use crate::{NuHelpCompleter, menus::NuMenuCompleter};
|
||||
use crossterm::event::{KeyCode, KeyModifiers};
|
||||
use nu_ansi_term::Style;
|
||||
use nu_color_config::{color_record_to_nustyle, lookup_ansi_color_style};
|
||||
use nu_engine::eval_block;
|
||||
use nu_parser::parse;
|
||||
use nu_protocol::{
|
||||
create_menus,
|
||||
Config, EditBindings, FromValue, ParsedKeybinding, ParsedMenu, PipelineData, Record,
|
||||
ShellError, Span, Type, Value,
|
||||
debugger::WithoutDebug,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
extract_value, Config, EditBindings, ParsedKeybinding, ParsedMenu, PipelineData, Record,
|
||||
ShellError, Span, Value,
|
||||
extract_value,
|
||||
};
|
||||
use reedline::{
|
||||
default_emacs_keybindings, default_vi_insert_keybindings, default_vi_normal_keybindings,
|
||||
ColumnarMenu, DescriptionMenu, DescriptionMode, EditCommand, IdeMenu, Keybindings, ListMenu,
|
||||
MenuBuilder, Reedline, ReedlineEvent, ReedlineMenu,
|
||||
MenuBuilder, Reedline, ReedlineEvent, ReedlineMenu, default_emacs_keybindings,
|
||||
default_vi_insert_keybindings, default_vi_normal_keybindings,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
|
||||
@ -36,6 +36,41 @@ const DEFAULT_COMPLETION_MENU: &str = r#"
|
||||
}
|
||||
}"#;
|
||||
|
||||
const DEFAULT_IDE_COMPLETION_MENU: &str = r#"
|
||||
{
|
||||
name: ide_completion_menu
|
||||
only_buffer_difference: false
|
||||
marker: "| "
|
||||
type: {
|
||||
layout: ide
|
||||
min_completion_width: 0,
|
||||
max_completion_width: 50,
|
||||
max_completion_height: 10, # will be limited by the available lines in the terminal
|
||||
padding: 0,
|
||||
border: true,
|
||||
cursor_offset: 0,
|
||||
description_mode: "prefer_right"
|
||||
min_description_width: 0
|
||||
max_description_width: 50
|
||||
max_description_height: 10
|
||||
description_offset: 1
|
||||
# If true, the cursor pos will be corrected, so the suggestions match up with the typed text
|
||||
#
|
||||
# C:\> str
|
||||
# str join
|
||||
# str trim
|
||||
# str split
|
||||
correct_cursor_pos: false
|
||||
}
|
||||
style: {
|
||||
text: green
|
||||
selected_text: { attr: r }
|
||||
description_text: yellow
|
||||
match_text: { attr: u }
|
||||
selected_match_text: { attr: ur }
|
||||
}
|
||||
}"#;
|
||||
|
||||
const DEFAULT_HISTORY_MENU: &str = r#"
|
||||
{
|
||||
name: history_menu
|
||||
@ -95,6 +130,7 @@ pub(crate) fn add_menus(
|
||||
// Checking if the default menus have been added from the config file
|
||||
let default_menus = [
|
||||
("completion_menu", DEFAULT_COMPLETION_MENU),
|
||||
("ide_completion_menu", DEFAULT_IDE_COMPLETION_MENU),
|
||||
("history_menu", DEFAULT_HISTORY_MENU),
|
||||
("help_menu", DEFAULT_HELP_MENU),
|
||||
];
|
||||
@ -122,7 +158,7 @@ pub(crate) fn add_menus(
|
||||
|
||||
engine_state.merge_delta(delta)?;
|
||||
|
||||
let mut temp_stack = Stack::new().capture();
|
||||
let mut temp_stack = Stack::new().collect_value();
|
||||
let input = PipelineData::Empty;
|
||||
menu_eval_results.push(eval_block::<WithoutDebug>(
|
||||
&engine_state,
|
||||
@ -137,15 +173,13 @@ pub(crate) fn add_menus(
|
||||
|
||||
for res in menu_eval_results.into_iter() {
|
||||
if let PipelineData::Value(value, None) = res {
|
||||
for menu in create_menus(&value)? {
|
||||
line_editor = add_menu(
|
||||
line_editor,
|
||||
&menu,
|
||||
new_engine_state_ref.clone(),
|
||||
stack,
|
||||
config.clone(),
|
||||
)?;
|
||||
}
|
||||
line_editor = add_menu(
|
||||
line_editor,
|
||||
&ParsedMenu::from_value(value)?,
|
||||
new_engine_state_ref.clone(),
|
||||
stack,
|
||||
config.clone(),
|
||||
)?;
|
||||
}
|
||||
}
|
||||
|
||||
@ -168,22 +202,22 @@ fn add_menu(
|
||||
"list" => add_list_menu(line_editor, menu, engine_state, stack, config),
|
||||
"ide" => add_ide_menu(line_editor, menu, engine_state, stack, config),
|
||||
"description" => add_description_menu(line_editor, menu, engine_state, stack, config),
|
||||
_ => Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "columnar, list, ide or description".to_string(),
|
||||
value: menu.r#type.to_abbreviated_string(&config),
|
||||
span: menu.r#type.span(),
|
||||
str => Err(ShellError::InvalidValue {
|
||||
valid: "'columnar', 'list', 'ide', or 'description'".into(),
|
||||
actual: format!("'{str}'"),
|
||||
span,
|
||||
}),
|
||||
}
|
||||
} else {
|
||||
Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "only record type".to_string(),
|
||||
value: menu.r#type.to_abbreviated_string(&config),
|
||||
span: menu.r#type.span(),
|
||||
Err(ShellError::RuntimeTypeMismatch {
|
||||
expected: Type::record(),
|
||||
actual: menu.r#type.get_type(),
|
||||
span,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn get_style(record: &Record, name: &str, span: Span) -> Option<Style> {
|
||||
fn get_style(record: &Record, name: &'static str, span: Span) -> Option<Style> {
|
||||
extract_value(name, record, span)
|
||||
.ok()
|
||||
.map(|text| match text {
|
||||
@ -262,30 +296,23 @@ pub(crate) fn add_columnar_menu(
|
||||
let only_buffer_difference = menu.only_buffer_difference.as_bool()?;
|
||||
columnar_menu = columnar_menu.with_only_buffer_difference(only_buffer_difference);
|
||||
|
||||
let span = menu.source.span();
|
||||
match &menu.source {
|
||||
Value::Nothing { .. } => {
|
||||
Ok(line_editor.with_menu(ReedlineMenu::EngineCompleter(Box::new(columnar_menu))))
|
||||
}
|
||||
Value::Closure { val, .. } => {
|
||||
let menu_completer = NuMenuCompleter::new(
|
||||
val.block_id,
|
||||
span,
|
||||
stack.captures_to_stack(val.captures.clone()),
|
||||
engine_state,
|
||||
only_buffer_difference,
|
||||
);
|
||||
Ok(line_editor.with_menu(ReedlineMenu::WithCompleter {
|
||||
menu: Box::new(columnar_menu),
|
||||
completer: Box::new(menu_completer),
|
||||
}))
|
||||
}
|
||||
_ => Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "block or omitted value".to_string(),
|
||||
value: menu.source.to_abbreviated_string(config),
|
||||
let completer = if let Some(closure) = &menu.source {
|
||||
let menu_completer = NuMenuCompleter::new(
|
||||
closure.block_id,
|
||||
span,
|
||||
}),
|
||||
}
|
||||
stack.captures_to_stack(closure.captures.clone()),
|
||||
engine_state,
|
||||
only_buffer_difference,
|
||||
);
|
||||
ReedlineMenu::WithCompleter {
|
||||
menu: Box::new(columnar_menu),
|
||||
completer: Box::new(menu_completer),
|
||||
}
|
||||
} else {
|
||||
ReedlineMenu::EngineCompleter(Box::new(columnar_menu))
|
||||
};
|
||||
|
||||
Ok(line_editor.with_menu(completer))
|
||||
}
|
||||
|
||||
// Adds a search menu to the line editor
|
||||
@ -318,30 +345,23 @@ pub(crate) fn add_list_menu(
|
||||
let only_buffer_difference = menu.only_buffer_difference.as_bool()?;
|
||||
list_menu = list_menu.with_only_buffer_difference(only_buffer_difference);
|
||||
|
||||
let span = menu.source.span();
|
||||
match &menu.source {
|
||||
Value::Nothing { .. } => {
|
||||
Ok(line_editor.with_menu(ReedlineMenu::HistoryMenu(Box::new(list_menu))))
|
||||
let completer = if let Some(closure) = &menu.source {
|
||||
let menu_completer = NuMenuCompleter::new(
|
||||
closure.block_id,
|
||||
span,
|
||||
stack.captures_to_stack(closure.captures.clone()),
|
||||
engine_state,
|
||||
only_buffer_difference,
|
||||
);
|
||||
ReedlineMenu::WithCompleter {
|
||||
menu: Box::new(list_menu),
|
||||
completer: Box::new(menu_completer),
|
||||
}
|
||||
Value::Closure { val, .. } => {
|
||||
let menu_completer = NuMenuCompleter::new(
|
||||
val.block_id,
|
||||
span,
|
||||
stack.captures_to_stack(val.captures.clone()),
|
||||
engine_state,
|
||||
only_buffer_difference,
|
||||
);
|
||||
Ok(line_editor.with_menu(ReedlineMenu::WithCompleter {
|
||||
menu: Box::new(list_menu),
|
||||
completer: Box::new(menu_completer),
|
||||
}))
|
||||
}
|
||||
_ => Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "block or omitted value".to_string(),
|
||||
value: menu.source.to_abbreviated_string(&config),
|
||||
span: menu.source.span(),
|
||||
}),
|
||||
}
|
||||
} else {
|
||||
ReedlineMenu::HistoryMenu(Box::new(list_menu))
|
||||
};
|
||||
|
||||
Ok(line_editor.with_menu(completer))
|
||||
}
|
||||
|
||||
// Adds an IDE menu to the line editor
|
||||
@ -416,9 +436,9 @@ pub(crate) fn add_ide_menu(
|
||||
vertical,
|
||||
)
|
||||
} else {
|
||||
return Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "bool or record".to_string(),
|
||||
value: border.to_abbreviated_string(&config),
|
||||
return Err(ShellError::RuntimeTypeMismatch {
|
||||
expected: Type::custom("bool or record"),
|
||||
actual: border.get_type(),
|
||||
span: border.span(),
|
||||
});
|
||||
}
|
||||
@ -439,10 +459,10 @@ pub(crate) fn add_ide_menu(
|
||||
"left" => ide_menu.with_description_mode(DescriptionMode::Left),
|
||||
"right" => ide_menu.with_description_mode(DescriptionMode::Right),
|
||||
"prefer_right" => ide_menu.with_description_mode(DescriptionMode::PreferRight),
|
||||
_ => {
|
||||
return Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "\"left\", \"right\" or \"prefer_right\"".to_string(),
|
||||
value: description_mode.to_abbreviated_string(&config),
|
||||
str => {
|
||||
return Err(ShellError::InvalidValue {
|
||||
valid: "'left', 'right', or 'prefer_right'".into(),
|
||||
actual: format!("'{str}'"),
|
||||
span: description_mode.span(),
|
||||
});
|
||||
}
|
||||
@ -499,30 +519,23 @@ pub(crate) fn add_ide_menu(
|
||||
let only_buffer_difference = menu.only_buffer_difference.as_bool()?;
|
||||
ide_menu = ide_menu.with_only_buffer_difference(only_buffer_difference);
|
||||
|
||||
let span = menu.source.span();
|
||||
match &menu.source {
|
||||
Value::Nothing { .. } => {
|
||||
Ok(line_editor.with_menu(ReedlineMenu::EngineCompleter(Box::new(ide_menu))))
|
||||
}
|
||||
Value::Closure { val, .. } => {
|
||||
let menu_completer = NuMenuCompleter::new(
|
||||
val.block_id,
|
||||
span,
|
||||
stack.captures_to_stack(val.captures.clone()),
|
||||
engine_state,
|
||||
only_buffer_difference,
|
||||
);
|
||||
Ok(line_editor.with_menu(ReedlineMenu::WithCompleter {
|
||||
menu: Box::new(ide_menu),
|
||||
completer: Box::new(menu_completer),
|
||||
}))
|
||||
}
|
||||
_ => Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "block or omitted value".to_string(),
|
||||
value: menu.source.to_abbreviated_string(&config),
|
||||
let completer = if let Some(closure) = &menu.source {
|
||||
let menu_completer = NuMenuCompleter::new(
|
||||
closure.block_id,
|
||||
span,
|
||||
}),
|
||||
}
|
||||
stack.captures_to_stack(closure.captures.clone()),
|
||||
engine_state,
|
||||
only_buffer_difference,
|
||||
);
|
||||
ReedlineMenu::WithCompleter {
|
||||
menu: Box::new(ide_menu),
|
||||
completer: Box::new(menu_completer),
|
||||
}
|
||||
} else {
|
||||
ReedlineMenu::EngineCompleter(Box::new(ide_menu))
|
||||
};
|
||||
|
||||
Ok(line_editor.with_menu(completer))
|
||||
}
|
||||
|
||||
// Adds a description menu to the line editor
|
||||
@ -587,34 +600,27 @@ pub(crate) fn add_description_menu(
|
||||
let only_buffer_difference = menu.only_buffer_difference.as_bool()?;
|
||||
description_menu = description_menu.with_only_buffer_difference(only_buffer_difference);
|
||||
|
||||
let span = menu.source.span();
|
||||
match &menu.source {
|
||||
Value::Nothing { .. } => {
|
||||
let completer = Box::new(NuHelpCompleter::new(engine_state, config));
|
||||
Ok(line_editor.with_menu(ReedlineMenu::WithCompleter {
|
||||
menu: Box::new(description_menu),
|
||||
completer,
|
||||
}))
|
||||
let completer = if let Some(closure) = &menu.source {
|
||||
let menu_completer = NuMenuCompleter::new(
|
||||
closure.block_id,
|
||||
span,
|
||||
stack.captures_to_stack(closure.captures.clone()),
|
||||
engine_state,
|
||||
only_buffer_difference,
|
||||
);
|
||||
ReedlineMenu::WithCompleter {
|
||||
menu: Box::new(description_menu),
|
||||
completer: Box::new(menu_completer),
|
||||
}
|
||||
Value::Closure { val, .. } => {
|
||||
let menu_completer = NuMenuCompleter::new(
|
||||
val.block_id,
|
||||
span,
|
||||
stack.captures_to_stack(val.captures.clone()),
|
||||
engine_state,
|
||||
only_buffer_difference,
|
||||
);
|
||||
Ok(line_editor.with_menu(ReedlineMenu::WithCompleter {
|
||||
menu: Box::new(description_menu),
|
||||
completer: Box::new(menu_completer),
|
||||
}))
|
||||
} else {
|
||||
let menu_completer = NuHelpCompleter::new(engine_state, config);
|
||||
ReedlineMenu::WithCompleter {
|
||||
menu: Box::new(description_menu),
|
||||
completer: Box::new(menu_completer),
|
||||
}
|
||||
_ => Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "closure or omitted value".to_string(),
|
||||
value: menu.source.to_abbreviated_string(&config),
|
||||
span: menu.source.span(),
|
||||
}),
|
||||
}
|
||||
};
|
||||
|
||||
Ok(line_editor.with_menu(completer))
|
||||
}
|
||||
|
||||
fn add_menu_keybindings(keybindings: &mut Keybindings) {
|
||||
@ -629,6 +635,16 @@ fn add_menu_keybindings(keybindings: &mut Keybindings) {
|
||||
]),
|
||||
);
|
||||
|
||||
keybindings.add_binding(
|
||||
KeyModifiers::CONTROL,
|
||||
KeyCode::Char(' '),
|
||||
ReedlineEvent::UntilFound(vec![
|
||||
ReedlineEvent::Menu("ide_completion_menu".to_string()),
|
||||
ReedlineEvent::MenuNext,
|
||||
ReedlineEvent::Edit(vec![EditCommand::Complete]),
|
||||
]),
|
||||
);
|
||||
|
||||
keybindings.add_binding(
|
||||
KeyModifiers::SHIFT,
|
||||
KeyCode::BackTab,
|
||||
@ -725,12 +741,18 @@ fn add_keybinding(
|
||||
let span = mode.span();
|
||||
match &mode {
|
||||
Value::String { val, .. } => match val.as_str() {
|
||||
"emacs" => add_parsed_keybinding(emacs_keybindings, keybinding, config),
|
||||
"vi_insert" => add_parsed_keybinding(insert_keybindings, keybinding, config),
|
||||
"vi_normal" => add_parsed_keybinding(normal_keybindings, keybinding, config),
|
||||
m => Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "emacs, vi_insert or vi_normal".to_string(),
|
||||
value: m.to_string(),
|
||||
str if str.eq_ignore_ascii_case("emacs") => {
|
||||
add_parsed_keybinding(emacs_keybindings, keybinding, config)
|
||||
}
|
||||
str if str.eq_ignore_ascii_case("vi_insert") => {
|
||||
add_parsed_keybinding(insert_keybindings, keybinding, config)
|
||||
}
|
||||
str if str.eq_ignore_ascii_case("vi_normal") => {
|
||||
add_parsed_keybinding(normal_keybindings, keybinding, config)
|
||||
}
|
||||
str => Err(ShellError::InvalidValue {
|
||||
valid: "'emacs', 'vi_insert', or 'vi_normal'".into(),
|
||||
actual: format!("'{str}'"),
|
||||
span,
|
||||
}),
|
||||
},
|
||||
@ -748,9 +770,9 @@ fn add_keybinding(
|
||||
|
||||
Ok(())
|
||||
}
|
||||
v => Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "string or list of strings".to_string(),
|
||||
value: v.to_abbreviated_string(config),
|
||||
v => Err(ShellError::RuntimeTypeMismatch {
|
||||
expected: Type::custom("string or list<string>"),
|
||||
actual: v.get_type(),
|
||||
span: v.span(),
|
||||
}),
|
||||
}
|
||||
@ -761,91 +783,107 @@ fn add_parsed_keybinding(
|
||||
keybinding: &ParsedKeybinding,
|
||||
config: &Config,
|
||||
) -> Result<(), ShellError> {
|
||||
let modifier = match keybinding
|
||||
.modifier
|
||||
.to_expanded_string("", config)
|
||||
.to_ascii_lowercase()
|
||||
.as_str()
|
||||
{
|
||||
"control" => KeyModifiers::CONTROL,
|
||||
"shift" => KeyModifiers::SHIFT,
|
||||
"alt" => KeyModifiers::ALT,
|
||||
"none" => KeyModifiers::NONE,
|
||||
"shift_alt" | "alt_shift" => KeyModifiers::SHIFT | KeyModifiers::ALT,
|
||||
"control_shift" | "shift_control" => KeyModifiers::CONTROL | KeyModifiers::SHIFT,
|
||||
"control_alt" | "alt_control" => KeyModifiers::CONTROL | KeyModifiers::ALT,
|
||||
"control_alt_shift" | "control_shift_alt" => {
|
||||
KeyModifiers::CONTROL | KeyModifiers::ALT | KeyModifiers::SHIFT
|
||||
}
|
||||
_ => {
|
||||
return Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "CONTROL, SHIFT, ALT or NONE".to_string(),
|
||||
value: keybinding.modifier.to_abbreviated_string(config),
|
||||
span: keybinding.modifier.span(),
|
||||
})
|
||||
}
|
||||
let Ok(modifier_str) = keybinding.modifier.as_str() else {
|
||||
return Err(ShellError::RuntimeTypeMismatch {
|
||||
expected: Type::String,
|
||||
actual: keybinding.modifier.get_type(),
|
||||
span: keybinding.modifier.span(),
|
||||
});
|
||||
};
|
||||
|
||||
let keycode = match keybinding
|
||||
.keycode
|
||||
.to_expanded_string("", config)
|
||||
.to_ascii_lowercase()
|
||||
.as_str()
|
||||
{
|
||||
"backspace" => KeyCode::Backspace,
|
||||
"enter" => KeyCode::Enter,
|
||||
c if c.starts_with("char_") => {
|
||||
let mut char_iter = c.chars().skip(5);
|
||||
let pos1 = char_iter.next();
|
||||
let pos2 = char_iter.next();
|
||||
|
||||
let char = if let (Some(char), None) = (pos1, pos2) {
|
||||
char
|
||||
} else {
|
||||
return Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "char_<CHAR: unicode codepoint>".to_string(),
|
||||
value: c.to_string(),
|
||||
span: keybinding.keycode.span(),
|
||||
});
|
||||
};
|
||||
|
||||
KeyCode::Char(char)
|
||||
let mut modifier = KeyModifiers::NONE;
|
||||
if !str::eq_ignore_ascii_case(modifier_str, "none") {
|
||||
for part in modifier_str.split('_') {
|
||||
match part.to_ascii_lowercase().as_str() {
|
||||
"control" => modifier |= KeyModifiers::CONTROL,
|
||||
"shift" => modifier |= KeyModifiers::SHIFT,
|
||||
"alt" => modifier |= KeyModifiers::ALT,
|
||||
"super" => modifier |= KeyModifiers::SUPER,
|
||||
"hyper" => modifier |= KeyModifiers::HYPER,
|
||||
"meta" => modifier |= KeyModifiers::META,
|
||||
_ => {
|
||||
return Err(ShellError::InvalidValue {
|
||||
valid: "'control', 'shift', 'alt', 'super', 'hyper', 'meta', or 'none'"
|
||||
.into(),
|
||||
actual: format!("'{part}'"),
|
||||
span: keybinding.modifier.span(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
"space" => KeyCode::Char(' '),
|
||||
"down" => KeyCode::Down,
|
||||
"up" => KeyCode::Up,
|
||||
"left" => KeyCode::Left,
|
||||
"right" => KeyCode::Right,
|
||||
"home" => KeyCode::Home,
|
||||
"end" => KeyCode::End,
|
||||
"pageup" => KeyCode::PageUp,
|
||||
"pagedown" => KeyCode::PageDown,
|
||||
"tab" => KeyCode::Tab,
|
||||
"backtab" => KeyCode::BackTab,
|
||||
"delete" => KeyCode::Delete,
|
||||
"insert" => KeyCode::Insert,
|
||||
c if c.starts_with('f') => {
|
||||
let fn_num: u8 = c[1..]
|
||||
}
|
||||
|
||||
let Ok(keycode) = keybinding.keycode.as_str() else {
|
||||
return Err(ShellError::RuntimeTypeMismatch {
|
||||
expected: Type::String,
|
||||
actual: keybinding.keycode.get_type(),
|
||||
span: keybinding.keycode.span(),
|
||||
});
|
||||
};
|
||||
|
||||
let keycode_lower = keycode.to_ascii_lowercase();
|
||||
|
||||
let keycode = if let Some(rest) = keycode_lower.strip_prefix("char_") {
|
||||
let error = |valid: &str, actual: &str| ShellError::InvalidValue {
|
||||
valid: valid.into(),
|
||||
actual: actual.into(),
|
||||
span: keybinding.keycode.span(),
|
||||
};
|
||||
|
||||
let mut char_iter = rest.chars();
|
||||
let char = match (char_iter.next(), char_iter.next()) {
|
||||
(Some(char), None) => char,
|
||||
(Some('u'), Some(_)) => {
|
||||
// This will never panic as we know there are at least two symbols
|
||||
let Ok(code_point) = u32::from_str_radix(&rest[1..], 16) else {
|
||||
return Err(error("a valid hex code", keycode));
|
||||
};
|
||||
|
||||
char::from_u32(code_point).ok_or(error("a valid Unicode code point", keycode))?
|
||||
}
|
||||
_ => return Err(error("'char_<char>' or 'char_u<hex code>'", keycode)),
|
||||
};
|
||||
|
||||
KeyCode::Char(char)
|
||||
} else {
|
||||
match keycode_lower.as_str() {
|
||||
"backspace" => KeyCode::Backspace,
|
||||
"enter" => KeyCode::Enter,
|
||||
"space" => KeyCode::Char(' '),
|
||||
"down" => KeyCode::Down,
|
||||
"up" => KeyCode::Up,
|
||||
"left" => KeyCode::Left,
|
||||
"right" => KeyCode::Right,
|
||||
"home" => KeyCode::Home,
|
||||
"end" => KeyCode::End,
|
||||
"pageup" => KeyCode::PageUp,
|
||||
"pagedown" => KeyCode::PageDown,
|
||||
"tab" => KeyCode::Tab,
|
||||
"backtab" => KeyCode::BackTab,
|
||||
"delete" => KeyCode::Delete,
|
||||
"insert" => KeyCode::Insert,
|
||||
c if c.starts_with('f') => c[1..]
|
||||
.parse()
|
||||
.ok()
|
||||
.filter(|num| matches!(num, 1..=20))
|
||||
.ok_or(ShellError::UnsupportedConfigValue {
|
||||
expected: "(f1|f2|...|f20)".to_string(),
|
||||
value: format!("unknown function key: {c}"),
|
||||
.filter(|num| (1..=35).contains(num))
|
||||
.map(KeyCode::F)
|
||||
.ok_or(ShellError::InvalidValue {
|
||||
valid: "'f1', 'f2', ..., or 'f35'".into(),
|
||||
actual: format!("'{keycode}'"),
|
||||
span: keybinding.keycode.span(),
|
||||
})?;
|
||||
KeyCode::F(fn_num)
|
||||
}
|
||||
"null" => KeyCode::Null,
|
||||
"esc" | "escape" => KeyCode::Esc,
|
||||
_ => {
|
||||
return Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "crossterm KeyCode".to_string(),
|
||||
value: keybinding.keycode.to_abbreviated_string(config),
|
||||
span: keybinding.keycode.span(),
|
||||
})
|
||||
})?,
|
||||
"null" => KeyCode::Null,
|
||||
"esc" | "escape" => KeyCode::Esc,
|
||||
_ => {
|
||||
return Err(ShellError::InvalidValue {
|
||||
valid: "a crossterm KeyCode".into(),
|
||||
actual: format!("'{keycode}'"),
|
||||
span: keybinding.keycode.span(),
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(event) = parse_event(&keybinding.event, config)? {
|
||||
keybindings.add_binding(modifier, keycode, event);
|
||||
} else {
|
||||
@ -867,8 +905,8 @@ impl<'config> EventType<'config> {
|
||||
.map(Self::Send)
|
||||
.or_else(|_| extract_value("edit", record, span).map(Self::Edit))
|
||||
.or_else(|_| extract_value("until", record, span).map(Self::Until))
|
||||
.map_err(|_| ShellError::MissingConfigValue {
|
||||
missing_value: "send, edit or until".to_string(),
|
||||
.map_err(|_| ShellError::MissingRequiredColumn {
|
||||
column: "'send', 'edit', or 'until'",
|
||||
span,
|
||||
})
|
||||
}
|
||||
@ -906,9 +944,9 @@ fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>,
|
||||
.iter()
|
||||
.map(|value| match parse_event(value, config) {
|
||||
Ok(inner) => match inner {
|
||||
None => Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "List containing valid events".to_string(),
|
||||
value: "Nothing value (null)".to_string(),
|
||||
None => Err(ShellError::RuntimeTypeMismatch {
|
||||
expected: Type::custom("record or table"),
|
||||
actual: value.get_type(),
|
||||
span: value.span(),
|
||||
}),
|
||||
Some(event) => Ok(event),
|
||||
@ -919,9 +957,9 @@ fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>,
|
||||
|
||||
Ok(Some(ReedlineEvent::UntilFound(events)))
|
||||
}
|
||||
v => Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "list of events".to_string(),
|
||||
value: v.to_abbreviated_string(config),
|
||||
v => Err(ShellError::RuntimeTypeMismatch {
|
||||
expected: Type::list(Type::Any),
|
||||
actual: v.get_type(),
|
||||
span: v.span(),
|
||||
}),
|
||||
},
|
||||
@ -931,9 +969,9 @@ fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>,
|
||||
.iter()
|
||||
.map(|value| match parse_event(value, config) {
|
||||
Ok(inner) => match inner {
|
||||
None => Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "List containing valid events".to_string(),
|
||||
value: "Nothing value (null)".to_string(),
|
||||
None => Err(ShellError::RuntimeTypeMismatch {
|
||||
expected: Type::custom("record or table"),
|
||||
actual: value.get_type(),
|
||||
span: value.span(),
|
||||
}),
|
||||
Some(event) => Ok(event),
|
||||
@ -945,9 +983,9 @@ fn parse_event(value: &Value, config: &Config) -> Result<Option<ReedlineEvent>,
|
||||
Ok(Some(ReedlineEvent::Multiple(events)))
|
||||
}
|
||||
Value::Nothing { .. } => Ok(None),
|
||||
v => Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "record or list of records, null to unbind key".to_string(),
|
||||
value: v.to_abbreviated_string(config),
|
||||
v => Err(ShellError::RuntimeTypeMismatch {
|
||||
expected: Type::custom("record, table, or nothing"),
|
||||
actual: v.get_type(),
|
||||
span: v.span(),
|
||||
}),
|
||||
}
|
||||
@ -961,47 +999,60 @@ fn event_from_record(
|
||||
) -> Result<ReedlineEvent, ShellError> {
|
||||
let event = match name {
|
||||
"none" => ReedlineEvent::None,
|
||||
"clearscreen" => ReedlineEvent::ClearScreen,
|
||||
"clearscrollback" => ReedlineEvent::ClearScrollback,
|
||||
"historyhintcomplete" => ReedlineEvent::HistoryHintComplete,
|
||||
"historyhintwordcomplete" => ReedlineEvent::HistoryHintWordComplete,
|
||||
"ctrld" => ReedlineEvent::CtrlD,
|
||||
"ctrlc" => ReedlineEvent::CtrlC,
|
||||
"clearscreen" => ReedlineEvent::ClearScreen,
|
||||
"clearscrollback" => ReedlineEvent::ClearScrollback,
|
||||
"enter" => ReedlineEvent::Enter,
|
||||
"submit" => ReedlineEvent::Submit,
|
||||
"submitornewline" => ReedlineEvent::SubmitOrNewline,
|
||||
"esc" | "escape" => ReedlineEvent::Esc,
|
||||
// Non-sensical for user configuration:
|
||||
//
|
||||
// `ReedlineEvent::Mouse` - itself a no-op
|
||||
// `ReedlineEvent::Resize` - requires size info specifically from the ANSI resize
|
||||
// event
|
||||
//
|
||||
// Handled above in `parse_event`:
|
||||
//
|
||||
// `ReedlineEvent::Edit`
|
||||
"repaint" => ReedlineEvent::Repaint,
|
||||
"previoushistory" => ReedlineEvent::PreviousHistory,
|
||||
"up" => ReedlineEvent::Up,
|
||||
"down" => ReedlineEvent::Down,
|
||||
"right" => ReedlineEvent::Right,
|
||||
"left" => ReedlineEvent::Left,
|
||||
"searchhistory" => ReedlineEvent::SearchHistory,
|
||||
"nexthistory" => ReedlineEvent::NextHistory,
|
||||
"previoushistory" => ReedlineEvent::PreviousHistory,
|
||||
"repaint" => ReedlineEvent::Repaint,
|
||||
"menudown" => ReedlineEvent::MenuDown,
|
||||
"menuup" => ReedlineEvent::MenuUp,
|
||||
"menuleft" => ReedlineEvent::MenuLeft,
|
||||
"menuright" => ReedlineEvent::MenuRight,
|
||||
"menunext" => ReedlineEvent::MenuNext,
|
||||
"menuprevious" => ReedlineEvent::MenuPrevious,
|
||||
"menupagenext" => ReedlineEvent::MenuPageNext,
|
||||
"menupageprevious" => ReedlineEvent::MenuPagePrevious,
|
||||
"openeditor" => ReedlineEvent::OpenEditor,
|
||||
"searchhistory" => ReedlineEvent::SearchHistory,
|
||||
// Handled above in `parse_event`:
|
||||
//
|
||||
// `ReedlineEvent::Multiple`
|
||||
// `ReedlineEvent::UntilFound`
|
||||
"menu" => {
|
||||
let menu = extract_value("name", record, span)?;
|
||||
ReedlineEvent::Menu(menu.to_expanded_string("", config))
|
||||
}
|
||||
"menunext" => ReedlineEvent::MenuNext,
|
||||
"menuprevious" => ReedlineEvent::MenuPrevious,
|
||||
"menuup" => ReedlineEvent::MenuUp,
|
||||
"menudown" => ReedlineEvent::MenuDown,
|
||||
"menuleft" => ReedlineEvent::MenuLeft,
|
||||
"menuright" => ReedlineEvent::MenuRight,
|
||||
"menupagenext" => ReedlineEvent::MenuPageNext,
|
||||
"menupageprevious" => ReedlineEvent::MenuPagePrevious,
|
||||
"executehostcommand" => {
|
||||
let cmd = extract_value("cmd", record, span)?;
|
||||
ReedlineEvent::ExecuteHostCommand(cmd.to_expanded_string("", config))
|
||||
}
|
||||
v => {
|
||||
return Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "Reedline event".to_string(),
|
||||
value: v.to_string(),
|
||||
"openeditor" => ReedlineEvent::OpenEditor,
|
||||
str => {
|
||||
return Err(ShellError::InvalidValue {
|
||||
valid: "a reedline event".into(),
|
||||
actual: format!("'{str}'"),
|
||||
span,
|
||||
})
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
@ -1025,7 +1076,6 @@ fn edit_from_record(
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false),
|
||||
},
|
||||
|
||||
"movetoend" => EditCommand::MoveToEnd {
|
||||
select: extract_value("select", record, span)
|
||||
.and_then(|value| value.as_bool())
|
||||
@ -1061,16 +1111,6 @@ fn edit_from_record(
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false),
|
||||
},
|
||||
"movewordrightend" => EditCommand::MoveWordRightEnd {
|
||||
select: extract_value("select", record, span)
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false),
|
||||
},
|
||||
"movebigwordrightend" => EditCommand::MoveBigWordRightEnd {
|
||||
select: extract_value("select", record, span)
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false),
|
||||
},
|
||||
"movewordrightstart" => EditCommand::MoveWordRightStart {
|
||||
select: extract_value("select", record, span)
|
||||
.and_then(|value| value.as_bool())
|
||||
@ -1081,6 +1121,16 @@ fn edit_from_record(
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false),
|
||||
},
|
||||
"movewordrightend" => EditCommand::MoveWordRightEnd {
|
||||
select: extract_value("select", record, span)
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false),
|
||||
},
|
||||
"movebigwordrightend" => EditCommand::MoveBigWordRightEnd {
|
||||
select: extract_value("select", record, span)
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false),
|
||||
},
|
||||
"movetoposition" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let select = extract_value("select", record, span)
|
||||
@ -1094,7 +1144,7 @@ fn edit_from_record(
|
||||
}
|
||||
"insertchar" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value, config)?;
|
||||
let char = extract_char(value)?;
|
||||
EditCommand::InsertChar(char)
|
||||
}
|
||||
"insertstring" => {
|
||||
@ -1102,6 +1152,13 @@ fn edit_from_record(
|
||||
EditCommand::InsertString(value.to_expanded_string("", config))
|
||||
}
|
||||
"insertnewline" => EditCommand::InsertNewline,
|
||||
"replacechar" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value)?;
|
||||
EditCommand::ReplaceChar(char)
|
||||
}
|
||||
// `EditCommand::ReplaceChars` - Internal hack not sanely implementable as a
|
||||
// standalone binding
|
||||
"backspace" => EditCommand::Backspace,
|
||||
"delete" => EditCommand::Delete,
|
||||
"cutchar" => EditCommand::CutChar,
|
||||
@ -1109,6 +1166,7 @@ fn edit_from_record(
|
||||
"deleteword" => EditCommand::DeleteWord,
|
||||
"clear" => EditCommand::Clear,
|
||||
"cleartolineend" => EditCommand::ClearToLineEnd,
|
||||
"complete" => EditCommand::Complete,
|
||||
"cutcurrentline" => EditCommand::CutCurrentLine,
|
||||
"cutfromstart" => EditCommand::CutFromStart,
|
||||
"cutfromlinestart" => EditCommand::CutFromLineStart,
|
||||
@ -1125,23 +1183,24 @@ fn edit_from_record(
|
||||
"uppercaseword" => EditCommand::UppercaseWord,
|
||||
"lowercaseword" => EditCommand::LowercaseWord,
|
||||
"capitalizechar" => EditCommand::CapitalizeChar,
|
||||
"switchcasechar" => EditCommand::SwitchcaseChar,
|
||||
"swapwords" => EditCommand::SwapWords,
|
||||
"swapgraphemes" => EditCommand::SwapGraphemes,
|
||||
"undo" => EditCommand::Undo,
|
||||
"redo" => EditCommand::Redo,
|
||||
"cutrightuntil" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value, config)?;
|
||||
let char = extract_char(value)?;
|
||||
EditCommand::CutRightUntil(char)
|
||||
}
|
||||
"cutrightbefore" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value, config)?;
|
||||
let char = extract_char(value)?;
|
||||
EditCommand::CutRightBefore(char)
|
||||
}
|
||||
"moverightuntil" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value, config)?;
|
||||
let char = extract_char(value)?;
|
||||
let select = extract_value("select", record, span)
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false);
|
||||
@ -1149,7 +1208,7 @@ fn edit_from_record(
|
||||
}
|
||||
"moverightbefore" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value, config)?;
|
||||
let char = extract_char(value)?;
|
||||
let select = extract_value("select", record, span)
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false);
|
||||
@ -1157,17 +1216,17 @@ fn edit_from_record(
|
||||
}
|
||||
"cutleftuntil" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value, config)?;
|
||||
let char = extract_char(value)?;
|
||||
EditCommand::CutLeftUntil(char)
|
||||
}
|
||||
"cutleftbefore" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value, config)?;
|
||||
let char = extract_char(value)?;
|
||||
EditCommand::CutLeftBefore(char)
|
||||
}
|
||||
"moveleftuntil" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value, config)?;
|
||||
let char = extract_char(value)?;
|
||||
let select = extract_value("select", record, span)
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false);
|
||||
@ -1175,45 +1234,100 @@ fn edit_from_record(
|
||||
}
|
||||
"moveleftbefore" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value, config)?;
|
||||
let char = extract_char(value)?;
|
||||
let select = extract_value("select", record, span)
|
||||
.and_then(|value| value.as_bool())
|
||||
.unwrap_or(false);
|
||||
EditCommand::MoveLeftBefore { c: char, select }
|
||||
}
|
||||
"complete" => EditCommand::Complete,
|
||||
"selectall" => EditCommand::SelectAll,
|
||||
"cutselection" => EditCommand::CutSelection,
|
||||
"copyselection" => EditCommand::CopySelection,
|
||||
"paste" => EditCommand::Paste,
|
||||
"copyfromstart" => EditCommand::CopyFromStart,
|
||||
"copyfromlinestart" => EditCommand::CopyFromLineStart,
|
||||
"copytoend" => EditCommand::CopyToEnd,
|
||||
"copytolineend" => EditCommand::CopyToLineEnd,
|
||||
"copycurrentline" => EditCommand::CopyCurrentLine,
|
||||
"copywordleft" => EditCommand::CopyWordLeft,
|
||||
"copybigwordleft" => EditCommand::CopyBigWordLeft,
|
||||
"copywordright" => EditCommand::CopyWordRight,
|
||||
"copybigwordright" => EditCommand::CopyBigWordRight,
|
||||
"copywordrighttonext" => EditCommand::CopyWordRightToNext,
|
||||
"copybigwordrighttonext" => EditCommand::CopyBigWordRightToNext,
|
||||
"copyleft" => EditCommand::CopyLeft,
|
||||
"copyright" => EditCommand::CopyRight,
|
||||
"copyrightuntil" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value)?;
|
||||
EditCommand::CopyRightUntil(char)
|
||||
}
|
||||
"copyrightbefore" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value)?;
|
||||
EditCommand::CopyRightBefore(char)
|
||||
}
|
||||
"copyleftuntil" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value)?;
|
||||
EditCommand::CopyLeftUntil(char)
|
||||
}
|
||||
"copyleftbefore" => {
|
||||
let value = extract_value("value", record, span)?;
|
||||
let char = extract_char(value)?;
|
||||
EditCommand::CopyLeftBefore(char)
|
||||
}
|
||||
"swapcursorandanchor" => EditCommand::SwapCursorAndAnchor,
|
||||
#[cfg(feature = "system-clipboard")]
|
||||
"cutselectionsystem" => EditCommand::CutSelectionSystem,
|
||||
"copyselection" => EditCommand::CopySelection,
|
||||
#[cfg(feature = "system-clipboard")]
|
||||
"copyselectionsystem" => EditCommand::CopySelectionSystem,
|
||||
"paste" => EditCommand::Paste,
|
||||
#[cfg(feature = "system-clipboard")]
|
||||
"pastesystem" => EditCommand::PasteSystem,
|
||||
"selectall" => EditCommand::SelectAll,
|
||||
e => {
|
||||
return Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "reedline EditCommand".to_string(),
|
||||
value: e.to_string(),
|
||||
"cutinside" => {
|
||||
let value = extract_value("left", record, span)?;
|
||||
let left = extract_char(value)?;
|
||||
let value = extract_value("right", record, span)?;
|
||||
let right = extract_char(value)?;
|
||||
EditCommand::CutInside { left, right }
|
||||
}
|
||||
"yankinside" => {
|
||||
let value = extract_value("left", record, span)?;
|
||||
let left = extract_char(value)?;
|
||||
let value = extract_value("right", record, span)?;
|
||||
let right = extract_char(value)?;
|
||||
EditCommand::YankInside { left, right }
|
||||
}
|
||||
str => {
|
||||
return Err(ShellError::InvalidValue {
|
||||
valid: "a reedline EditCommand".into(),
|
||||
actual: format!("'{str}'"),
|
||||
span,
|
||||
})
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
Ok(edit)
|
||||
}
|
||||
|
||||
fn extract_char(value: &Value, config: &Config) -> Result<char, ShellError> {
|
||||
let span = value.span();
|
||||
value
|
||||
.to_expanded_string("", config)
|
||||
.chars()
|
||||
.next()
|
||||
.ok_or_else(|| ShellError::MissingConfigValue {
|
||||
missing_value: "char to insert".to_string(),
|
||||
span,
|
||||
fn extract_char(value: &Value) -> Result<char, ShellError> {
|
||||
if let Ok(str) = value.as_str() {
|
||||
let mut chars = str.chars();
|
||||
match (chars.next(), chars.next()) {
|
||||
(Some(c), None) => Ok(c),
|
||||
_ => Err(ShellError::InvalidValue {
|
||||
valid: "a single character".into(),
|
||||
actual: format!("'{str}'"),
|
||||
span: value.span(),
|
||||
}),
|
||||
}
|
||||
} else {
|
||||
Err(ShellError::RuntimeTypeMismatch {
|
||||
expected: Type::String,
|
||||
actual: value.get_type(),
|
||||
span: value.span(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@ -1342,7 +1456,7 @@ mod test {
|
||||
|
||||
let span = Span::test_data();
|
||||
let b = EventType::try_from_record(&event, span);
|
||||
assert!(matches!(b, Err(ShellError::MissingConfigValue { .. })));
|
||||
assert!(matches!(b, Err(ShellError::MissingRequiredColumn { .. })));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -6,43 +6,44 @@ use crate::prompt_update::{
|
||||
VSCODE_PRE_EXECUTION_MARKER,
|
||||
};
|
||||
use crate::{
|
||||
NuHighlighter, NuValidator, NushellPrompt,
|
||||
completions::NuCompleter,
|
||||
nu_highlight::NoOpHighlighter,
|
||||
prompt_update,
|
||||
reedline_config::{add_menus, create_keybindings, KeybindingsMode},
|
||||
reedline_config::{KeybindingsMode, add_menus, create_keybindings},
|
||||
util::eval_source,
|
||||
NuHighlighter, NuValidator, NushellPrompt,
|
||||
};
|
||||
use crossterm::cursor::SetCursorStyle;
|
||||
use log::{error, trace, warn};
|
||||
use miette::{ErrReport, IntoDiagnostic, Result};
|
||||
use nu_cmd_base::{
|
||||
hook::eval_hook,
|
||||
util::{get_editor, get_guaranteed_cwd},
|
||||
};
|
||||
use nu_cmd_base::util::get_editor;
|
||||
use nu_color_config::StyleComputer;
|
||||
#[allow(deprecated)]
|
||||
use nu_engine::{convert_env_values, current_dir_str, env_to_strings};
|
||||
use nu_engine::env_to_strings;
|
||||
use nu_engine::exit::cleanup_exit;
|
||||
use nu_parser::{lex, parse, trim_quotes_str};
|
||||
use nu_protocol::shell_error;
|
||||
use nu_protocol::shell_error::io::IoError;
|
||||
use nu_protocol::{
|
||||
HistoryConfig, HistoryFileFormat, PipelineData, ShellError, Span, Spanned, Value,
|
||||
config::NuCursorShape,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
report_shell_error, HistoryConfig, HistoryFileFormat, PipelineData, ShellError, Span, Spanned,
|
||||
Value,
|
||||
report_shell_error,
|
||||
};
|
||||
use nu_utils::{
|
||||
filesystem::{have_permission, PermissionResult},
|
||||
filesystem::{PermissionResult, have_permission},
|
||||
perf,
|
||||
};
|
||||
use reedline::{
|
||||
CursorConfig, CwdAwareHinter, DefaultCompleter, EditCommand, Emacs, FileBackedHistory,
|
||||
HistorySessionId, Reedline, SqliteBackedHistory, Vi,
|
||||
};
|
||||
use std::sync::atomic::Ordering;
|
||||
use std::{
|
||||
collections::HashMap,
|
||||
env::temp_dir,
|
||||
io::{self, IsTerminal, Write},
|
||||
panic::{catch_unwind, AssertUnwindSafe},
|
||||
panic::{AssertUnwindSafe, catch_unwind},
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
time::{Duration, Instant},
|
||||
@ -53,7 +54,6 @@ use sysinfo::System;
|
||||
pub fn evaluate_repl(
|
||||
engine_state: &mut EngineState,
|
||||
stack: Stack,
|
||||
nushell_path: &str,
|
||||
prerun_command: Option<Spanned<String>>,
|
||||
load_std_lib: Option<Spanned<String>>,
|
||||
entire_start_time: Instant,
|
||||
@ -65,9 +65,7 @@ pub fn evaluate_repl(
|
||||
// from the Arc. This lets us avoid copying stack variables needlessly
|
||||
let mut unique_stack = stack.clone();
|
||||
let config = engine_state.get_config();
|
||||
let use_color = config.use_ansi_coloring;
|
||||
|
||||
confirm_stdin_is_terminal()?;
|
||||
let use_color = config.use_ansi_coloring.get(engine_state);
|
||||
|
||||
let mut entry_num = 0;
|
||||
|
||||
@ -85,13 +83,6 @@ pub fn evaluate_repl(
|
||||
stack.clone(),
|
||||
);
|
||||
|
||||
let start_time = std::time::Instant::now();
|
||||
// Translate environment variables from Strings to Values
|
||||
if let Err(e) = convert_env_values(engine_state, &unique_stack) {
|
||||
report_shell_error(engine_state, &e);
|
||||
}
|
||||
perf!("translate env vars", start_time, use_color);
|
||||
|
||||
// seed env vars
|
||||
unique_stack.add_env_var(
|
||||
"CMD_DURATION_MS".into(),
|
||||
@ -100,7 +91,7 @@ pub fn evaluate_repl(
|
||||
|
||||
unique_stack.set_last_exit_code(0, Span::unknown());
|
||||
|
||||
let mut line_editor = get_line_editor(engine_state, nushell_path, use_color)?;
|
||||
let mut line_editor = get_line_editor(engine_state, use_color)?;
|
||||
let temp_file = temp_dir().join(format!("{}.nu", uuid::Uuid::new_v4()));
|
||||
|
||||
if let Some(s) = prerun_command {
|
||||
@ -112,10 +103,11 @@ pub fn evaluate_repl(
|
||||
PipelineData::empty(),
|
||||
false,
|
||||
);
|
||||
let cwd = get_guaranteed_cwd(engine_state, &unique_stack);
|
||||
engine_state.merge_env(&mut unique_stack, cwd)?;
|
||||
engine_state.merge_env(&mut unique_stack)?;
|
||||
}
|
||||
|
||||
confirm_stdin_is_terminal()?;
|
||||
|
||||
let hostname = System::host_name();
|
||||
if shell_integration_osc2 {
|
||||
run_shell_integration_osc2(None, engine_state, &mut unique_stack, use_color);
|
||||
@ -136,15 +128,7 @@ pub fn evaluate_repl(
|
||||
// https://code.visualstudio.com/docs/terminal/shell-integration#_vs-code-custom-sequences-osc-633-st
|
||||
let cmd_text = line_editor.current_buffer_contents().to_string();
|
||||
|
||||
let replaced_cmd_text = cmd_text
|
||||
.chars()
|
||||
.map(|c| match c {
|
||||
'\n' => '\x0a',
|
||||
'\r' => '\x0d',
|
||||
'\x1b' => '\x1b',
|
||||
_ => c,
|
||||
})
|
||||
.collect();
|
||||
let replaced_cmd_text = escape_special_vscode_bytes(&cmd_text)?;
|
||||
|
||||
run_shell_integration_osc633(
|
||||
engine_state,
|
||||
@ -159,15 +143,30 @@ pub fn evaluate_repl(
|
||||
// Regenerate the $nu constant to contain the startup time and any other potential updates
|
||||
engine_state.generate_nu_constant();
|
||||
|
||||
if load_std_lib.is_none() && engine_state.get_config().show_banner {
|
||||
eval_source(
|
||||
engine_state,
|
||||
&mut unique_stack,
|
||||
r#"use std banner; banner"#.as_bytes(),
|
||||
"show_banner",
|
||||
PipelineData::empty(),
|
||||
false,
|
||||
);
|
||||
if load_std_lib.is_none() {
|
||||
match engine_state.get_config().show_banner {
|
||||
Value::Bool { val: false, .. } => {}
|
||||
Value::String { ref val, .. } if val == "short" => {
|
||||
eval_source(
|
||||
engine_state,
|
||||
&mut unique_stack,
|
||||
r#"banner --short"#.as_bytes(),
|
||||
"show short banner",
|
||||
PipelineData::empty(),
|
||||
false,
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
eval_source(
|
||||
engine_state,
|
||||
&mut unique_stack,
|
||||
r#"banner"#.as_bytes(),
|
||||
"show_banner",
|
||||
PipelineData::empty(),
|
||||
false,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
kitty_protocol_healthcheck(engine_state);
|
||||
@ -220,7 +219,7 @@ pub fn evaluate_repl(
|
||||
}
|
||||
Err(_) => {
|
||||
// line_editor is lost in the error case so reconstruct a new one
|
||||
line_editor = get_line_editor(engine_state, nushell_path, use_color)?;
|
||||
line_editor = get_line_editor(engine_state, use_color)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -228,11 +227,44 @@ pub fn evaluate_repl(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_line_editor(
|
||||
engine_state: &mut EngineState,
|
||||
nushell_path: &str,
|
||||
use_color: bool,
|
||||
) -> Result<Reedline> {
|
||||
fn escape_special_vscode_bytes(input: &str) -> Result<String, ShellError> {
|
||||
let bytes = input
|
||||
.chars()
|
||||
.flat_map(|c| {
|
||||
let mut buf = [0; 4]; // Buffer to hold UTF-8 bytes of the character
|
||||
let c_bytes = c.encode_utf8(&mut buf); // Get UTF-8 bytes for the character
|
||||
|
||||
if c_bytes.len() == 1 {
|
||||
let byte = c_bytes.as_bytes()[0];
|
||||
|
||||
match byte {
|
||||
// Escape bytes below 0x20
|
||||
b if b < 0x20 => format!("\\x{:02X}", byte).into_bytes(),
|
||||
// Escape semicolon as \x3B
|
||||
b';' => "\\x3B".to_string().into_bytes(),
|
||||
// Escape backslash as \\
|
||||
b'\\' => "\\\\".to_string().into_bytes(),
|
||||
// Otherwise, return the character unchanged
|
||||
_ => vec![byte],
|
||||
}
|
||||
} else {
|
||||
// pass through multi-byte characters unchanged
|
||||
c_bytes.bytes().collect()
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
String::from_utf8(bytes).map_err(|err| ShellError::CantConvert {
|
||||
to_type: "string".to_string(),
|
||||
from_type: "bytes".to_string(),
|
||||
span: Span::unknown(),
|
||||
help: Some(format!(
|
||||
"Error {err}, Unable to convert {input} to escaped bytes"
|
||||
)),
|
||||
})
|
||||
}
|
||||
|
||||
fn get_line_editor(engine_state: &mut EngineState, use_color: bool) -> Result<Reedline> {
|
||||
let mut start_time = std::time::Instant::now();
|
||||
let mut line_editor = Reedline::create();
|
||||
|
||||
@ -243,7 +275,7 @@ fn get_line_editor(
|
||||
if let Some(history) = engine_state.history_config() {
|
||||
start_time = std::time::Instant::now();
|
||||
|
||||
line_editor = setup_history(nushell_path, engine_state, line_editor, history)?;
|
||||
line_editor = setup_history(engine_state, line_editor, history)?;
|
||||
|
||||
perf!("setup history", start_time, use_color);
|
||||
}
|
||||
@ -280,17 +312,12 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
||||
hostname,
|
||||
} = ctx;
|
||||
|
||||
let cwd = get_guaranteed_cwd(engine_state, &stack);
|
||||
|
||||
let mut start_time = std::time::Instant::now();
|
||||
// Before doing anything, merge the environment from the previous REPL iteration into the
|
||||
// permanent state.
|
||||
if let Err(err) = engine_state.merge_env(&mut stack, cwd) {
|
||||
if let Err(err) = engine_state.merge_env(&mut stack) {
|
||||
report_shell_error(engine_state, &err);
|
||||
}
|
||||
// Check whether $env.NU_DISABLE_IR is set, so that the user can change it in the REPL
|
||||
// Temporary while IR eval is optional
|
||||
stack.use_ir = !stack.has_env_var(engine_state, "NU_DISABLE_IR");
|
||||
perf!("merge env", start_time, use_color);
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
@ -298,20 +325,26 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
||||
perf!("reset signals", start_time, use_color);
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
// Right before we start our prompt and take input from the user,
|
||||
// fire the "pre_prompt" hook
|
||||
if let Some(hook) = engine_state.get_config().hooks.pre_prompt.clone() {
|
||||
if let Err(err) = eval_hook(engine_state, &mut stack, None, vec![], &hook, "pre_prompt") {
|
||||
report_shell_error(engine_state, &err);
|
||||
}
|
||||
// Right before we start our prompt and take input from the user, fire the "pre_prompt" hook
|
||||
if let Err(err) = hook::eval_hooks(
|
||||
engine_state,
|
||||
&mut stack,
|
||||
vec![],
|
||||
&engine_state.get_config().hooks.pre_prompt.clone(),
|
||||
"pre_prompt",
|
||||
) {
|
||||
report_shell_error(engine_state, &err);
|
||||
}
|
||||
perf!("pre-prompt hook", start_time, use_color);
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
// Next, check all the environment variables they ask for
|
||||
// fire the "env_change" hook
|
||||
let env_change = engine_state.get_config().hooks.env_change.clone();
|
||||
if let Err(error) = hook::eval_env_change_hook(env_change, engine_state, &mut stack) {
|
||||
if let Err(error) = hook::eval_env_change_hook(
|
||||
&engine_state.get_config().hooks.env_change.clone(),
|
||||
engine_state,
|
||||
&mut stack,
|
||||
) {
|
||||
report_shell_error(engine_state, &error)
|
||||
}
|
||||
perf!("env-change hook", start_time, use_color);
|
||||
@ -354,7 +387,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
||||
)))
|
||||
.with_quick_completions(config.completions.quick)
|
||||
.with_partial_completions(config.completions.partial)
|
||||
.with_ansi_colors(config.use_ansi_coloring)
|
||||
.with_ansi_colors(config.use_ansi_coloring.get(engine_state))
|
||||
.with_cwd(Some(
|
||||
engine_state
|
||||
.cwd(None)
|
||||
@ -363,14 +396,18 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
))
|
||||
.with_cursor_config(cursor_config);
|
||||
.with_cursor_config(cursor_config)
|
||||
.with_visual_selection_style(nu_ansi_term::Style {
|
||||
is_reverse: true,
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
perf!("reedline builder", start_time, use_color);
|
||||
|
||||
let style_computer = StyleComputer::from_config(engine_state, &stack_arc);
|
||||
|
||||
start_time = std::time::Instant::now();
|
||||
line_editor = if config.use_ansi_coloring {
|
||||
line_editor = if config.use_ansi_coloring.get(engine_state) {
|
||||
line_editor.with_hinter(Box::new({
|
||||
// As of Nov 2022, "hints" color_config closures only get `null` passed in.
|
||||
let style = style_computer.compute("hints", &Value::nothing(Span::unknown()));
|
||||
@ -492,18 +529,17 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
||||
|
||||
// Right before we start running the code the user gave us, fire the `pre_execution`
|
||||
// hook
|
||||
if let Some(hook) = config.hooks.pre_execution.clone() {
|
||||
{
|
||||
// Set the REPL buffer to the current command for the "pre_execution" hook
|
||||
let mut repl = engine_state.repl_state.lock().expect("repl state mutex");
|
||||
repl.buffer = repl_cmd_line_text.to_string();
|
||||
drop(repl);
|
||||
|
||||
if let Err(err) = eval_hook(
|
||||
if let Err(err) = hook::eval_hooks(
|
||||
engine_state,
|
||||
&mut stack,
|
||||
None,
|
||||
vec![],
|
||||
&hook,
|
||||
&engine_state.get_config().hooks.pre_execution.clone(),
|
||||
"pre_execution",
|
||||
) {
|
||||
report_shell_error(engine_state, &err);
|
||||
@ -518,8 +554,10 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
||||
drop(repl);
|
||||
|
||||
if shell_integration_osc633 {
|
||||
if stack.get_env_var(engine_state, "TERM_PROGRAM")
|
||||
== Some(Value::test_string("vscode"))
|
||||
if stack
|
||||
.get_env_var(engine_state, "TERM_PROGRAM")
|
||||
.and_then(|v| v.as_str().ok())
|
||||
== Some("vscode")
|
||||
{
|
||||
start_time = Instant::now();
|
||||
|
||||
@ -657,7 +695,11 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
||||
);
|
||||
|
||||
println!();
|
||||
return (false, stack, line_editor);
|
||||
|
||||
cleanup_exit((), engine_state, 0);
|
||||
|
||||
// if cleanup_exit didn't exit, we should keep running
|
||||
return (true, stack, line_editor);
|
||||
}
|
||||
Err(err) => {
|
||||
let message = err.to_string();
|
||||
@ -736,7 +778,7 @@ fn fill_in_result_related_history_metadata(
|
||||
c.duration = Some(cmd_duration);
|
||||
c.exit_status = stack
|
||||
.get_env_var(engine_state, "LAST_EXIT_CODE")
|
||||
.and_then(|e| e.as_i64().ok());
|
||||
.and_then(|e| e.as_int().ok());
|
||||
c
|
||||
})
|
||||
.into_diagnostic()?; // todo: don't stop repl if error here?
|
||||
@ -775,8 +817,10 @@ fn parse_operation(
|
||||
) -> Result<ReplOperation, ErrReport> {
|
||||
let tokens = lex(s.as_bytes(), 0, &[], &[], false);
|
||||
// Check if this is a single call to a directory, if so auto-cd
|
||||
#[allow(deprecated)]
|
||||
let cwd = nu_engine::env::current_dir_str(engine_state, stack).unwrap_or_default();
|
||||
let cwd = engine_state
|
||||
.cwd(Some(stack))
|
||||
.map(|p| p.to_string_lossy().to_string())
|
||||
.unwrap_or_default();
|
||||
let mut orig = s.clone();
|
||||
if orig.starts_with('`') {
|
||||
orig = trim_quotes_str(&orig).to_string()
|
||||
@ -810,21 +854,26 @@ fn do_auto_cd(
|
||||
if !path.exists() {
|
||||
report_shell_error(
|
||||
engine_state,
|
||||
&ShellError::DirectoryNotFound {
|
||||
dir: path.to_string_lossy().to_string(),
|
||||
&ShellError::Io(IoError::new_with_additional_context(
|
||||
shell_error::io::ErrorKind::DirectoryNotFound,
|
||||
span,
|
||||
},
|
||||
PathBuf::from(&path),
|
||||
"Cannot change directory",
|
||||
)),
|
||||
);
|
||||
}
|
||||
path.to_string_lossy().to_string()
|
||||
};
|
||||
|
||||
if let PermissionResult::PermissionDenied(reason) = have_permission(path.clone()) {
|
||||
if let PermissionResult::PermissionDenied = have_permission(path.clone()) {
|
||||
report_shell_error(
|
||||
engine_state,
|
||||
&ShellError::IOError {
|
||||
msg: format!("Cannot change directory to {path}: {reason}"),
|
||||
},
|
||||
&ShellError::Io(IoError::new_with_additional_context(
|
||||
shell_error::io::ErrorKind::from_std(std::io::ErrorKind::PermissionDenied),
|
||||
span,
|
||||
PathBuf::from(path),
|
||||
"Cannot change directory",
|
||||
)),
|
||||
);
|
||||
return;
|
||||
}
|
||||
@ -841,7 +890,7 @@ fn do_auto_cd(
|
||||
|
||||
let shells = stack.get_env_var(engine_state, "NUSHELL_SHELLS");
|
||||
let mut shells = if let Some(v) = shells {
|
||||
v.into_list().unwrap_or_else(|_| vec![cwd])
|
||||
v.clone().into_list().unwrap_or_else(|_| vec![cwd])
|
||||
} else {
|
||||
vec![cwd]
|
||||
};
|
||||
@ -888,6 +937,9 @@ fn do_run_cmd(
|
||||
trace!("eval source: {}", s);
|
||||
|
||||
let mut cmds = s.split_whitespace();
|
||||
|
||||
let had_warning_before = engine_state.exit_warning_given.load(Ordering::SeqCst);
|
||||
|
||||
if let Some("exit") = cmds.next() {
|
||||
let mut working_set = StateWorkingSet::new(engine_state);
|
||||
let _ = parse(&mut working_set, None, s.as_bytes(), false);
|
||||
@ -896,13 +948,11 @@ fn do_run_cmd(
|
||||
match cmds.next() {
|
||||
Some(s) => {
|
||||
if let Ok(n) = s.parse::<i32>() {
|
||||
drop(line_editor);
|
||||
std::process::exit(n);
|
||||
return cleanup_exit(line_editor, engine_state, n);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
drop(line_editor);
|
||||
std::process::exit(0);
|
||||
return cleanup_exit(line_editor, engine_state, 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -921,6 +971,14 @@ fn do_run_cmd(
|
||||
false,
|
||||
);
|
||||
|
||||
// if there was a warning before, and we got to this point, it means
|
||||
// the possible call to cleanup_exit did not occur.
|
||||
if had_warning_before && engine_state.is_interactive {
|
||||
engine_state
|
||||
.exit_warning_given
|
||||
.store(false, Ordering::SeqCst);
|
||||
}
|
||||
|
||||
line_editor
|
||||
}
|
||||
|
||||
@ -935,8 +993,7 @@ fn run_shell_integration_osc2(
|
||||
stack: &mut Stack,
|
||||
use_color: bool,
|
||||
) {
|
||||
#[allow(deprecated)]
|
||||
if let Ok(path) = current_dir_str(engine_state, stack) {
|
||||
if let Ok(path) = engine_state.cwd_as_string(Some(stack)) {
|
||||
let start_time = Instant::now();
|
||||
|
||||
// Try to abbreviate string for windows title
|
||||
@ -980,8 +1037,7 @@ fn run_shell_integration_osc7(
|
||||
stack: &mut Stack,
|
||||
use_color: bool,
|
||||
) {
|
||||
#[allow(deprecated)]
|
||||
if let Ok(path) = current_dir_str(engine_state, stack) {
|
||||
if let Ok(path) = engine_state.cwd_as_string(Some(stack)) {
|
||||
let start_time = Instant::now();
|
||||
|
||||
// Otherwise, communicate the path as OSC 7 (often used for spawning new tabs in the same dir)
|
||||
@ -1004,8 +1060,7 @@ fn run_shell_integration_osc7(
|
||||
}
|
||||
|
||||
fn run_shell_integration_osc9_9(engine_state: &EngineState, stack: &mut Stack, use_color: bool) {
|
||||
#[allow(deprecated)]
|
||||
if let Ok(path) = current_dir_str(engine_state, stack) {
|
||||
if let Ok(path) = engine_state.cwd_as_string(Some(stack)) {
|
||||
let start_time = Instant::now();
|
||||
|
||||
// Otherwise, communicate the path as OSC 9;9 from ConEmu (often used for spawning new tabs in the same dir)
|
||||
@ -1029,11 +1084,14 @@ fn run_shell_integration_osc633(
|
||||
use_color: bool,
|
||||
repl_cmd_line_text: String,
|
||||
) {
|
||||
#[allow(deprecated)]
|
||||
if let Ok(path) = current_dir_str(engine_state, stack) {
|
||||
if let Ok(path) = engine_state.cwd_as_string(Some(stack)) {
|
||||
// Supported escape sequences of Microsoft's Visual Studio Code (vscode)
|
||||
// https://code.visualstudio.com/docs/terminal/shell-integration#_supported-escape-sequences
|
||||
if stack.get_env_var(engine_state, "TERM_PROGRAM") == Some(Value::test_string("vscode")) {
|
||||
if stack
|
||||
.get_env_var(engine_state, "TERM_PROGRAM")
|
||||
.and_then(|v| v.as_str().ok())
|
||||
== Some("vscode")
|
||||
{
|
||||
let start_time = Instant::now();
|
||||
|
||||
// If we're in vscode, run their specific ansi escape sequence.
|
||||
@ -1051,16 +1109,8 @@ fn run_shell_integration_osc633(
|
||||
|
||||
// escape a few things because this says so
|
||||
// https://code.visualstudio.com/docs/terminal/shell-integration#_vs-code-custom-sequences-osc-633-st
|
||||
|
||||
let replaced_cmd_text: String = repl_cmd_line_text
|
||||
.chars()
|
||||
.map(|c| match c {
|
||||
'\n' => '\x0a',
|
||||
'\r' => '\x0d',
|
||||
'\x1b' => '\x1b',
|
||||
_ => c,
|
||||
})
|
||||
.collect();
|
||||
let replaced_cmd_text =
|
||||
escape_special_vscode_bytes(&repl_cmd_line_text).unwrap_or(repl_cmd_line_text);
|
||||
|
||||
//OSC 633 ; E ; <commandline> [; <nonce] ST - Explicitly set the command line with an optional nonce.
|
||||
run_ansi_sequence(&format!(
|
||||
@ -1098,7 +1148,6 @@ fn flush_engine_state_repl_buffer(engine_state: &mut EngineState, line_editor: &
|
||||
/// Setup history management for Reedline
|
||||
///
|
||||
fn setup_history(
|
||||
nushell_path: &str,
|
||||
engine_state: &mut EngineState,
|
||||
line_editor: Reedline,
|
||||
history: HistoryConfig,
|
||||
@ -1110,7 +1159,7 @@ fn setup_history(
|
||||
None
|
||||
};
|
||||
|
||||
if let Some(path) = crate::config_files::get_history_path(nushell_path, history.file_format) {
|
||||
if let Some(path) = history.file_path() {
|
||||
return update_line_editor_history(
|
||||
engine_state,
|
||||
path,
|
||||
@ -1126,7 +1175,7 @@ fn setup_history(
|
||||
/// Setup Reedline keybindingds based on the provided config
|
||||
///
|
||||
fn setup_keybindings(engine_state: &EngineState, line_editor: Reedline) -> Reedline {
|
||||
return match create_keybindings(engine_state.get_config()) {
|
||||
match create_keybindings(engine_state.get_config()) {
|
||||
Ok(keybindings) => match keybindings {
|
||||
KeybindingsMode::Emacs(keybindings) => {
|
||||
let edit_mode = Box::new(Emacs::new(keybindings));
|
||||
@ -1144,7 +1193,7 @@ fn setup_keybindings(engine_state: &EngineState, line_editor: Reedline) -> Reedl
|
||||
report_shell_error(engine_state, &e);
|
||||
line_editor
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
///
|
||||
@ -1228,10 +1277,14 @@ fn get_command_finished_marker(
|
||||
) -> String {
|
||||
let exit_code = stack
|
||||
.get_env_var(engine_state, "LAST_EXIT_CODE")
|
||||
.and_then(|e| e.as_i64().ok());
|
||||
.and_then(|e| e.as_int().ok());
|
||||
|
||||
if shell_integration_osc633 {
|
||||
if stack.get_env_var(engine_state, "TERM_PROGRAM") == Some(Value::test_string("vscode")) {
|
||||
if stack
|
||||
.get_env_var(engine_state, "TERM_PROGRAM")
|
||||
.and_then(|v| v.as_str().ok())
|
||||
== Some("vscode")
|
||||
{
|
||||
// We're in vscode and we have osc633 enabled
|
||||
format!(
|
||||
"{}{}{}",
|
||||
@ -1280,7 +1333,11 @@ fn run_finaliziation_ansi_sequence(
|
||||
) {
|
||||
if shell_integration_osc633 {
|
||||
// Only run osc633 if we are in vscode
|
||||
if stack.get_env_var(engine_state, "TERM_PROGRAM") == Some(Value::test_string("vscode")) {
|
||||
if stack
|
||||
.get_env_var(engine_state, "TERM_PROGRAM")
|
||||
.and_then(|v| v.as_str().ok())
|
||||
== Some("vscode")
|
||||
{
|
||||
let start_time = Instant::now();
|
||||
|
||||
run_ansi_sequence(&get_command_finished_marker(
|
||||
@ -1331,10 +1388,9 @@ fn run_finaliziation_ansi_sequence(
|
||||
|
||||
// Absolute paths with a drive letter, like 'C:', 'D:\', 'E:\foo'
|
||||
#[cfg(windows)]
|
||||
static DRIVE_PATH_REGEX: once_cell::sync::Lazy<fancy_regex::Regex> =
|
||||
once_cell::sync::Lazy::new(|| {
|
||||
fancy_regex::Regex::new(r"^[a-zA-Z]:[/\\]?").expect("Internal error: regex creation")
|
||||
});
|
||||
static DRIVE_PATH_REGEX: std::sync::LazyLock<fancy_regex::Regex> = std::sync::LazyLock::new(|| {
|
||||
fancy_regex::Regex::new(r"^[a-zA-Z]:[/\\]?").expect("Internal error: regex creation")
|
||||
});
|
||||
|
||||
// A best-effort "does this string look kinda like a path?" function to determine whether to auto-cd
|
||||
fn looks_like_path(orig: &str) -> bool {
|
||||
@ -1378,8 +1434,7 @@ fn trailing_slash_looks_like_path() {
|
||||
fn are_session_ids_in_sync() {
|
||||
let engine_state = &mut EngineState::new();
|
||||
let history = engine_state.history_config().unwrap();
|
||||
let history_path =
|
||||
crate::config_files::get_history_path("nushell", history.file_format).unwrap();
|
||||
let history_path = history.file_path().unwrap();
|
||||
let line_editor = reedline::Reedline::create();
|
||||
let history_session_id = reedline::Reedline::create_history_session_id();
|
||||
let line_editor = update_line_editor_history(
|
||||
@ -1397,7 +1452,7 @@ fn are_session_ids_in_sync() {
|
||||
|
||||
#[cfg(test)]
|
||||
mod test_auto_cd {
|
||||
use super::{do_auto_cd, parse_operation, ReplOperation};
|
||||
use super::{ReplOperation, do_auto_cd, escape_special_vscode_bytes, parse_operation};
|
||||
use nu_path::AbsolutePath;
|
||||
use nu_protocol::engine::{EngineState, Stack};
|
||||
use tempfile::tempdir;
|
||||
@ -1535,6 +1590,13 @@ mod test_auto_cd {
|
||||
symlink(&dir, &link).unwrap();
|
||||
let input = if cfg!(windows) { r".\link" } else { "./link" };
|
||||
check(tempdir, input, link);
|
||||
|
||||
let dir = tempdir.join("foo").join("bar");
|
||||
std::fs::create_dir_all(&dir).unwrap();
|
||||
let link = tempdir.join("link2");
|
||||
symlink(&dir, &link).unwrap();
|
||||
let input = "..";
|
||||
check(link, input, tempdir);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -1547,4 +1609,43 @@ mod test_auto_cd {
|
||||
let input = if cfg!(windows) { r"foo\" } else { "foo/" };
|
||||
check(tempdir, input, dir);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn escape_vscode_semicolon_test() {
|
||||
let input = r#"now;is"#;
|
||||
let expected = r#"now\x3Bis"#;
|
||||
let actual = escape_special_vscode_bytes(input).unwrap();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn escape_vscode_backslash_test() {
|
||||
let input = r#"now\is"#;
|
||||
let expected = r#"now\\is"#;
|
||||
let actual = escape_special_vscode_bytes(input).unwrap();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn escape_vscode_linefeed_test() {
|
||||
let input = "now\nis";
|
||||
let expected = r#"now\x0Ais"#;
|
||||
let actual = escape_special_vscode_bytes(input).unwrap();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn escape_vscode_tab_null_cr_test() {
|
||||
let input = "now\t\0\ris";
|
||||
let expected = r#"now\x09\x00\x0Dis"#;
|
||||
let actual = escape_special_vscode_bytes(input).unwrap();
|
||||
assert_eq!(expected, actual);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn escape_vscode_multibyte_ok() {
|
||||
let input = "now🍪is";
|
||||
let actual = escape_special_vscode_bytes(input).unwrap();
|
||||
assert_eq!(input, actual);
|
||||
}
|
||||
}
|
||||
|
@ -2,11 +2,11 @@ use log::trace;
|
||||
use nu_ansi_term::Style;
|
||||
use nu_color_config::{get_matching_brackets_style, get_shape_color};
|
||||
use nu_engine::env;
|
||||
use nu_parser::{flatten_block, parse, FlatShape};
|
||||
use nu_parser::{FlatShape, flatten_block, parse};
|
||||
use nu_protocol::{
|
||||
Span,
|
||||
ast::{Block, Expr, Expression, PipelineRedirection, RecordItem},
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
Span,
|
||||
};
|
||||
use reedline::{Highlighter, StyledText};
|
||||
use std::sync::Arc;
|
||||
@ -144,8 +144,6 @@ impl Highlighter for NuHighlighter {
|
||||
}
|
||||
FlatShape::Flag => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Pipe => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::And => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Or => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Redirection => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::Custom(..) => add_colored_token(&shape.1, next_token),
|
||||
FlatShape::MatchPattern => add_colored_token(&shape.1, next_token),
|
||||
@ -311,6 +309,7 @@ fn find_matching_block_end_in_expr(
|
||||
.unwrap_or(expression.span.start);
|
||||
|
||||
return match &expression.expr {
|
||||
// TODO: Can't these be handled with an `_ => None` branch? Refactor
|
||||
Expr::Bool(_) => None,
|
||||
Expr::Int(_) => None,
|
||||
Expr::Float(_) => None,
|
||||
@ -337,6 +336,28 @@ fn find_matching_block_end_in_expr(
|
||||
Expr::Nothing => None,
|
||||
Expr::Garbage => None,
|
||||
|
||||
Expr::AttributeBlock(ab) => ab
|
||||
.attributes
|
||||
.iter()
|
||||
.find_map(|attr| {
|
||||
find_matching_block_end_in_expr(
|
||||
line,
|
||||
working_set,
|
||||
&attr.expr,
|
||||
global_span_offset,
|
||||
global_cursor_offset,
|
||||
)
|
||||
})
|
||||
.or_else(|| {
|
||||
find_matching_block_end_in_expr(
|
||||
line,
|
||||
working_set,
|
||||
&ab.item,
|
||||
global_span_offset,
|
||||
global_cursor_offset,
|
||||
)
|
||||
}),
|
||||
|
||||
Expr::Table(table) => {
|
||||
if expr_last == global_cursor_offset {
|
||||
// cursor is at table end
|
||||
|
@ -1,16 +1,18 @@
|
||||
#![allow(clippy::byte_char_slices)]
|
||||
|
||||
use nu_cmd_base::hook::eval_hook;
|
||||
use nu_engine::{eval_block, eval_block_with_early_return};
|
||||
use nu_parser::{escape_quote_string, lex, parse, unescape_unquote_string, Token, TokenContents};
|
||||
use nu_parser::{Token, TokenContents, lex, parse, unescape_unquote_string};
|
||||
use nu_protocol::{
|
||||
PipelineData, ShellError, Span, Value,
|
||||
cli_error::report_compile_error,
|
||||
debugger::WithoutDebug,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
report_parse_error, report_parse_warning, report_shell_error, PipelineData, ShellError, Span,
|
||||
Value,
|
||||
report_parse_error, report_parse_warning, report_shell_error,
|
||||
};
|
||||
#[cfg(windows)]
|
||||
use nu_utils::enable_vt_processing;
|
||||
use nu_utils::perf;
|
||||
use nu_utils::{escape_quote_string, perf};
|
||||
use std::path::Path;
|
||||
|
||||
// This will collect environment variables from std::env and adds them to a stack.
|
||||
@ -130,7 +132,7 @@ fn gather_env_vars(
|
||||
working_set.error(err);
|
||||
}
|
||||
|
||||
if working_set.parse_errors.first().is_some() {
|
||||
if !working_set.parse_errors.is_empty() {
|
||||
report_capture_error(
|
||||
engine_state,
|
||||
&String::from_utf8_lossy(contents),
|
||||
@ -174,7 +176,7 @@ fn gather_env_vars(
|
||||
working_set.error(err);
|
||||
}
|
||||
|
||||
if working_set.parse_errors.first().is_some() {
|
||||
if !working_set.parse_errors.is_empty() {
|
||||
report_capture_error(
|
||||
engine_state,
|
||||
&String::from_utf8_lossy(contents),
|
||||
@ -201,6 +203,35 @@ fn gather_env_vars(
|
||||
}
|
||||
}
|
||||
|
||||
/// Print a pipeline with formatting applied based on display_output hook.
|
||||
///
|
||||
/// This function should be preferred when printing values resulting from a completed evaluation.
|
||||
/// For values printed as part of a command's execution, such as values printed by the `print` command,
|
||||
/// the `PipelineData::print_table` function should be preferred instead as it is not config-dependent.
|
||||
///
|
||||
/// `no_newline` controls if we need to attach newline character to output.
|
||||
pub fn print_pipeline(
|
||||
engine_state: &mut EngineState,
|
||||
stack: &mut Stack,
|
||||
pipeline: PipelineData,
|
||||
no_newline: bool,
|
||||
) -> Result<(), ShellError> {
|
||||
if let Some(hook) = engine_state.get_config().hooks.display_output.clone() {
|
||||
let pipeline = eval_hook(
|
||||
engine_state,
|
||||
stack,
|
||||
Some(pipeline),
|
||||
vec![],
|
||||
&hook,
|
||||
"display_output",
|
||||
)?;
|
||||
pipeline.print_raw(engine_state, no_newline, false)
|
||||
} else {
|
||||
// if display_output isn't set, we should still prefer to print with some formatting
|
||||
pipeline.print_table(engine_state, stack, no_newline, false)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn eval_source(
|
||||
engine_state: &mut EngineState,
|
||||
stack: &mut Stack,
|
||||
@ -221,7 +252,7 @@ pub fn eval_source(
|
||||
report_shell_error(engine_state, &err);
|
||||
let code = err.exit_code();
|
||||
stack.set_last_error(&err);
|
||||
code
|
||||
code.unwrap_or(0)
|
||||
}
|
||||
};
|
||||
|
||||
@ -234,7 +265,10 @@ pub fn eval_source(
|
||||
perf!(
|
||||
&format!("eval_source {}", &fname),
|
||||
start_time,
|
||||
engine_state.get_config().use_ansi_coloring
|
||||
engine_state
|
||||
.get_config()
|
||||
.use_ansi_coloring
|
||||
.get(engine_state)
|
||||
);
|
||||
|
||||
exit_code
|
||||
@ -267,7 +301,7 @@ fn evaluate_source(
|
||||
|
||||
if let Some(err) = working_set.compile_errors.first() {
|
||||
report_compile_error(&working_set, err);
|
||||
// Not a fatal error, for now
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
(output, working_set.render())
|
||||
@ -281,21 +315,8 @@ fn evaluate_source(
|
||||
eval_block::<WithoutDebug>(engine_state, stack, &block, input)
|
||||
}?;
|
||||
|
||||
if let PipelineData::ByteStream(..) = pipeline {
|
||||
pipeline.print(engine_state, stack, false, false)
|
||||
} else if let Some(hook) = engine_state.get_config().hooks.display_output.clone() {
|
||||
let pipeline = eval_hook(
|
||||
engine_state,
|
||||
stack,
|
||||
Some(pipeline),
|
||||
vec![],
|
||||
&hook,
|
||||
"display_output",
|
||||
)?;
|
||||
pipeline.print(engine_state, stack, false, false)
|
||||
} else {
|
||||
pipeline.print(engine_state, stack, true, false)
|
||||
}?;
|
||||
let no_newline = matches!(&pipeline, &PipelineData::ByteStream(..));
|
||||
print_pipeline(engine_state, stack, pipeline, no_newline)?;
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
use nu_parser::parse;
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, StateWorkingSet},
|
||||
ParseError,
|
||||
engine::{EngineState, StateWorkingSet},
|
||||
};
|
||||
use reedline::{ValidationResult, Validator};
|
||||
use std::sync::Arc;
|
||||
|
296
crates/nu-cli/tests/commands/history_import.rs
Normal file
296
crates/nu-cli/tests/commands/history_import.rs
Normal file
@ -0,0 +1,296 @@
|
||||
use nu_protocol::HistoryFileFormat;
|
||||
use nu_test_support::{Outcome, nu};
|
||||
use reedline::{
|
||||
FileBackedHistory, History, HistoryItem, HistoryItemId, ReedlineError, SearchQuery,
|
||||
SqliteBackedHistory,
|
||||
};
|
||||
use rstest::rstest;
|
||||
use tempfile::TempDir;
|
||||
|
||||
struct Test {
|
||||
cfg_dir: TempDir,
|
||||
}
|
||||
|
||||
impl Test {
|
||||
fn new(history_format: &'static str) -> Self {
|
||||
let cfg_dir = tempfile::Builder::new()
|
||||
.prefix("history_import_test")
|
||||
.tempdir()
|
||||
.unwrap();
|
||||
// Assigning to $env.config.history.file_format seems to work only in startup
|
||||
// configuration.
|
||||
std::fs::write(
|
||||
cfg_dir.path().join("env.nu"),
|
||||
format!("$env.config.history.file_format = {history_format:?}"),
|
||||
)
|
||||
.unwrap();
|
||||
Self { cfg_dir }
|
||||
}
|
||||
|
||||
fn nu(&self, cmd: impl AsRef<str>) -> Outcome {
|
||||
let env = [(
|
||||
"XDG_CONFIG_HOME".to_string(),
|
||||
self.cfg_dir.path().to_str().unwrap().to_string(),
|
||||
)];
|
||||
let env_config = self.cfg_dir.path().join("env.nu");
|
||||
nu!(envs: env, env_config: env_config, cmd.as_ref())
|
||||
}
|
||||
|
||||
fn open_plaintext(&self) -> Result<FileBackedHistory, ReedlineError> {
|
||||
FileBackedHistory::with_file(
|
||||
100,
|
||||
self.cfg_dir
|
||||
.path()
|
||||
.join("nushell")
|
||||
.join(HistoryFileFormat::Plaintext.default_file_name()),
|
||||
)
|
||||
}
|
||||
|
||||
fn open_sqlite(&self) -> Result<SqliteBackedHistory, ReedlineError> {
|
||||
SqliteBackedHistory::with_file(
|
||||
self.cfg_dir
|
||||
.path()
|
||||
.join("nushell")
|
||||
.join(HistoryFileFormat::Sqlite.default_file_name()),
|
||||
None,
|
||||
None,
|
||||
)
|
||||
}
|
||||
|
||||
fn open_backend(&self, format: HistoryFileFormat) -> Result<Box<dyn History>, ReedlineError> {
|
||||
fn boxed(be: impl History + 'static) -> Box<dyn History> {
|
||||
Box::new(be)
|
||||
}
|
||||
use HistoryFileFormat::*;
|
||||
match format {
|
||||
Plaintext => self.open_plaintext().map(boxed),
|
||||
Sqlite => self.open_sqlite().map(boxed),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum HistorySource {
|
||||
Vec(Vec<HistoryItem>),
|
||||
Command(&'static str),
|
||||
}
|
||||
|
||||
struct TestCase {
|
||||
dst_format: HistoryFileFormat,
|
||||
dst_history: Vec<HistoryItem>,
|
||||
src_history: HistorySource,
|
||||
want_history: Vec<HistoryItem>,
|
||||
}
|
||||
|
||||
const EMPTY_TEST_CASE: TestCase = TestCase {
|
||||
dst_format: HistoryFileFormat::Plaintext,
|
||||
dst_history: Vec::new(),
|
||||
src_history: HistorySource::Vec(Vec::new()),
|
||||
want_history: Vec::new(),
|
||||
};
|
||||
|
||||
impl TestCase {
|
||||
fn run(self) {
|
||||
use HistoryFileFormat::*;
|
||||
let test = Test::new(match self.dst_format {
|
||||
Plaintext => "plaintext",
|
||||
Sqlite => "sqlite",
|
||||
});
|
||||
save_all(
|
||||
&mut *test.open_backend(self.dst_format).unwrap(),
|
||||
self.dst_history,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let outcome = match self.src_history {
|
||||
HistorySource::Vec(src_history) => {
|
||||
let src_format = match self.dst_format {
|
||||
Plaintext => Sqlite,
|
||||
Sqlite => Plaintext,
|
||||
};
|
||||
save_all(&mut *test.open_backend(src_format).unwrap(), src_history).unwrap();
|
||||
test.nu("history import")
|
||||
}
|
||||
HistorySource::Command(cmd) => {
|
||||
let mut cmd = cmd.to_string();
|
||||
cmd.push_str(" | history import");
|
||||
test.nu(cmd)
|
||||
}
|
||||
};
|
||||
assert!(outcome.status.success());
|
||||
let got = query_all(&*test.open_backend(self.dst_format).unwrap()).unwrap();
|
||||
|
||||
// Compare just the commands first, for readability.
|
||||
fn commands_only(items: &[HistoryItem]) -> Vec<&str> {
|
||||
items
|
||||
.iter()
|
||||
.map(|item| item.command_line.as_str())
|
||||
.collect()
|
||||
}
|
||||
assert_eq!(commands_only(&got), commands_only(&self.want_history));
|
||||
// If commands match, compare full items.
|
||||
assert_eq!(got, self.want_history);
|
||||
}
|
||||
}
|
||||
|
||||
fn query_all(history: &dyn History) -> Result<Vec<HistoryItem>, ReedlineError> {
|
||||
history.search(SearchQuery::everything(
|
||||
reedline::SearchDirection::Forward,
|
||||
None,
|
||||
))
|
||||
}
|
||||
|
||||
fn save_all(history: &mut dyn History, items: Vec<HistoryItem>) -> Result<(), ReedlineError> {
|
||||
for item in items {
|
||||
history.save(item)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
const EMPTY_ITEM: HistoryItem = HistoryItem {
|
||||
command_line: String::new(),
|
||||
id: None,
|
||||
start_timestamp: None,
|
||||
session_id: None,
|
||||
hostname: None,
|
||||
cwd: None,
|
||||
duration: None,
|
||||
exit_status: None,
|
||||
more_info: None,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn history_import_pipe_string() {
|
||||
TestCase {
|
||||
dst_format: HistoryFileFormat::Plaintext,
|
||||
src_history: HistorySource::Command("echo bar"),
|
||||
want_history: vec![HistoryItem {
|
||||
id: Some(HistoryItemId::new(0)),
|
||||
command_line: "bar".to_string(),
|
||||
..EMPTY_ITEM
|
||||
}],
|
||||
..EMPTY_TEST_CASE
|
||||
}
|
||||
.run();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn history_import_pipe_record() {
|
||||
TestCase {
|
||||
dst_format: HistoryFileFormat::Sqlite,
|
||||
src_history: HistorySource::Command("[[cwd command]; [/tmp some_command]]"),
|
||||
want_history: vec![HistoryItem {
|
||||
id: Some(HistoryItemId::new(1)),
|
||||
command_line: "some_command".to_string(),
|
||||
cwd: Some("/tmp".to_string()),
|
||||
..EMPTY_ITEM
|
||||
}],
|
||||
..EMPTY_TEST_CASE
|
||||
}
|
||||
.run();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn to_empty_plaintext() {
|
||||
TestCase {
|
||||
dst_format: HistoryFileFormat::Plaintext,
|
||||
src_history: HistorySource::Vec(vec![
|
||||
HistoryItem {
|
||||
command_line: "foo".to_string(),
|
||||
..EMPTY_ITEM
|
||||
},
|
||||
HistoryItem {
|
||||
command_line: "bar".to_string(),
|
||||
..EMPTY_ITEM
|
||||
},
|
||||
]),
|
||||
want_history: vec![
|
||||
HistoryItem {
|
||||
id: Some(HistoryItemId::new(0)),
|
||||
command_line: "foo".to_string(),
|
||||
..EMPTY_ITEM
|
||||
},
|
||||
HistoryItem {
|
||||
id: Some(HistoryItemId::new(1)),
|
||||
command_line: "bar".to_string(),
|
||||
..EMPTY_ITEM
|
||||
},
|
||||
],
|
||||
..EMPTY_TEST_CASE
|
||||
}
|
||||
.run()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn to_empty_sqlite() {
|
||||
TestCase {
|
||||
dst_format: HistoryFileFormat::Sqlite,
|
||||
src_history: HistorySource::Vec(vec![
|
||||
HistoryItem {
|
||||
command_line: "foo".to_string(),
|
||||
..EMPTY_ITEM
|
||||
},
|
||||
HistoryItem {
|
||||
command_line: "bar".to_string(),
|
||||
..EMPTY_ITEM
|
||||
},
|
||||
]),
|
||||
want_history: vec![
|
||||
HistoryItem {
|
||||
id: Some(HistoryItemId::new(1)),
|
||||
command_line: "foo".to_string(),
|
||||
..EMPTY_ITEM
|
||||
},
|
||||
HistoryItem {
|
||||
id: Some(HistoryItemId::new(2)),
|
||||
command_line: "bar".to_string(),
|
||||
..EMPTY_ITEM
|
||||
},
|
||||
],
|
||||
..EMPTY_TEST_CASE
|
||||
}
|
||||
.run()
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[case::plaintext(HistoryFileFormat::Plaintext)]
|
||||
#[case::sqlite(HistoryFileFormat::Sqlite)]
|
||||
fn to_existing(#[case] dst_format: HistoryFileFormat) {
|
||||
TestCase {
|
||||
dst_format,
|
||||
dst_history: vec![
|
||||
HistoryItem {
|
||||
id: Some(HistoryItemId::new(0)),
|
||||
command_line: "original-1".to_string(),
|
||||
..EMPTY_ITEM
|
||||
},
|
||||
HistoryItem {
|
||||
id: Some(HistoryItemId::new(1)),
|
||||
command_line: "original-2".to_string(),
|
||||
..EMPTY_ITEM
|
||||
},
|
||||
],
|
||||
src_history: HistorySource::Vec(vec![HistoryItem {
|
||||
id: Some(HistoryItemId::new(1)),
|
||||
command_line: "new".to_string(),
|
||||
..EMPTY_ITEM
|
||||
}]),
|
||||
want_history: vec![
|
||||
HistoryItem {
|
||||
id: Some(HistoryItemId::new(0)),
|
||||
command_line: "original-1".to_string(),
|
||||
..EMPTY_ITEM
|
||||
},
|
||||
HistoryItem {
|
||||
id: Some(HistoryItemId::new(1)),
|
||||
command_line: "original-2".to_string(),
|
||||
..EMPTY_ITEM
|
||||
},
|
||||
HistoryItem {
|
||||
id: Some(HistoryItemId::new(2)),
|
||||
command_line: "new".to_string(),
|
||||
..EMPTY_ITEM
|
||||
},
|
||||
],
|
||||
}
|
||||
.run()
|
||||
}
|
@ -1,2 +1,3 @@
|
||||
mod history_import;
|
||||
mod keybindings_list;
|
||||
mod nu_highlight;
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -2,9 +2,9 @@ use nu_engine::eval_block;
|
||||
use nu_parser::parse;
|
||||
use nu_path::{AbsolutePathBuf, PathBuf};
|
||||
use nu_protocol::{
|
||||
PipelineData, ShellError, Span, Value,
|
||||
debugger::WithoutDebug,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
PipelineData, ShellError, Span, Value,
|
||||
};
|
||||
use nu_test_support::fs;
|
||||
use reedline::Suggestion;
|
||||
@ -14,11 +14,8 @@ fn create_default_context() -> EngineState {
|
||||
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
|
||||
}
|
||||
|
||||
// creates a new engine with the current path into the completions fixtures folder
|
||||
pub fn new_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
// Target folder inside assets
|
||||
let dir = fs::fixtures().join("completions");
|
||||
let dir_str = dir
|
||||
pub fn new_engine_helper(pwd: AbsolutePathBuf) -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
let pwd_str = pwd
|
||||
.clone()
|
||||
.into_os_string()
|
||||
.into_string()
|
||||
@ -36,13 +33,13 @@ pub fn new_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
// Add pwd as env var
|
||||
stack.add_env_var(
|
||||
"PWD".to_string(),
|
||||
Value::string(dir_str.clone(), nu_protocol::Span::new(0, dir_str.len())),
|
||||
Value::string(pwd_str.clone(), nu_protocol::Span::new(0, pwd_str.len())),
|
||||
);
|
||||
stack.add_env_var(
|
||||
"TEST".to_string(),
|
||||
Value::string(
|
||||
"NUSHELL".to_string(),
|
||||
nu_protocol::Span::new(0, dir_str.len()),
|
||||
nu_protocol::Span::new(0, pwd_str.len()),
|
||||
),
|
||||
);
|
||||
#[cfg(windows)]
|
||||
@ -50,7 +47,7 @@ pub fn new_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
"Path".to_string(),
|
||||
Value::string(
|
||||
"c:\\some\\path;c:\\some\\other\\path".to_string(),
|
||||
nu_protocol::Span::new(0, dir_str.len()),
|
||||
nu_protocol::Span::new(0, pwd_str.len()),
|
||||
),
|
||||
);
|
||||
#[cfg(not(windows))]
|
||||
@ -58,135 +55,90 @@ pub fn new_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
"PATH".to_string(),
|
||||
Value::string(
|
||||
"/some/path:/some/other/path".to_string(),
|
||||
nu_protocol::Span::new(0, dir_str.len()),
|
||||
nu_protocol::Span::new(0, pwd_str.len()),
|
||||
),
|
||||
);
|
||||
|
||||
// Merge environment into the permanent state
|
||||
let merge_result = engine_state.merge_env(&mut stack, &dir);
|
||||
let merge_result = engine_state.merge_env(&mut stack);
|
||||
assert!(merge_result.is_ok());
|
||||
|
||||
(dir, dir_str, engine_state, stack)
|
||||
(pwd, pwd_str, engine_state, stack)
|
||||
}
|
||||
|
||||
// creates a new engine with the current path into the completions fixtures folder
|
||||
/// creates a new engine with the current path in the completions fixtures folder
|
||||
pub fn new_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
new_engine_helper(fs::fixtures().join("completions"))
|
||||
}
|
||||
|
||||
/// Adds pseudo PATH env for external completion tests
|
||||
pub fn new_external_engine() -> EngineState {
|
||||
let mut engine = create_default_context();
|
||||
let dir = fs::fixtures().join("external_completions").join("path");
|
||||
let dir_str = dir.to_string_lossy().to_string();
|
||||
let internal_span = nu_protocol::Span::new(0, dir_str.len());
|
||||
engine.add_env_var(
|
||||
"PATH".to_string(),
|
||||
Value::List {
|
||||
vals: vec![Value::String {
|
||||
val: dir_str,
|
||||
internal_span,
|
||||
}],
|
||||
internal_span,
|
||||
},
|
||||
);
|
||||
engine
|
||||
}
|
||||
|
||||
/// creates a new engine with the current path in the dotnu_completions fixtures folder
|
||||
pub fn new_dotnu_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
// Target folder inside assets
|
||||
let dir = fs::fixtures().join("dotnu_completions");
|
||||
let dir_str = dir
|
||||
.clone()
|
||||
.into_os_string()
|
||||
.into_string()
|
||||
.unwrap_or_default();
|
||||
let (dir, dir_str, mut engine_state, mut stack) = new_engine_helper(dir);
|
||||
let dir_span = nu_protocol::Span::new(0, dir_str.len());
|
||||
|
||||
// Create a new engine with default context
|
||||
let mut engine_state = create_default_context();
|
||||
|
||||
// Add $nu
|
||||
engine_state.generate_nu_constant();
|
||||
|
||||
// New stack
|
||||
let mut stack = Stack::new();
|
||||
|
||||
// Add pwd as env var
|
||||
stack.add_env_var("PWD".to_string(), Value::string(dir_str.clone(), dir_span));
|
||||
stack.add_env_var(
|
||||
"TEST".to_string(),
|
||||
Value::string("NUSHELL".to_string(), dir_span),
|
||||
// const $NU_LIB_DIRS
|
||||
let mut working_set = StateWorkingSet::new(&engine_state);
|
||||
let var_id = working_set.add_variable(
|
||||
b"$NU_LIB_DIRS".into(),
|
||||
Span::unknown(),
|
||||
nu_protocol::Type::List(Box::new(nu_protocol::Type::String)),
|
||||
false,
|
||||
);
|
||||
working_set.set_variable_const_val(
|
||||
var_id,
|
||||
Value::test_list(vec![
|
||||
Value::string(file(dir.join("lib-dir1")), dir_span),
|
||||
Value::string(file(dir.join("lib-dir3")), dir_span),
|
||||
]),
|
||||
);
|
||||
let _ = engine_state.merge_delta(working_set.render());
|
||||
|
||||
stack.add_env_var(
|
||||
"NU_LIB_DIRS".to_string(),
|
||||
Value::List {
|
||||
vals: vec![
|
||||
Value::string(file(dir.join("lib-dir1")), dir_span),
|
||||
Value::string(file(dir.join("lib-dir2")), dir_span),
|
||||
Value::string(file(dir.join("lib-dir3")), dir_span),
|
||||
],
|
||||
internal_span: dir_span,
|
||||
},
|
||||
"NU_LIB_DIRS".into(),
|
||||
Value::test_list(vec![
|
||||
Value::string(file(dir.join("lib-dir2")), dir_span),
|
||||
Value::string(file(dir.join("lib-dir3")), dir_span),
|
||||
]),
|
||||
);
|
||||
|
||||
// Merge environment into the permanent state
|
||||
let merge_result = engine_state.merge_env(&mut stack, &dir);
|
||||
let merge_result = engine_state.merge_env(&mut stack);
|
||||
assert!(merge_result.is_ok());
|
||||
|
||||
(dir, dir_str, engine_state, stack)
|
||||
}
|
||||
|
||||
pub fn new_quote_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
// Target folder inside assets
|
||||
let dir = fs::fixtures().join("quoted_completions");
|
||||
let dir_str = dir
|
||||
.clone()
|
||||
.into_os_string()
|
||||
.into_string()
|
||||
.unwrap_or_default();
|
||||
|
||||
// Create a new engine with default context
|
||||
let mut engine_state = create_default_context();
|
||||
|
||||
// New stack
|
||||
let mut stack = Stack::new();
|
||||
|
||||
// Add pwd as env var
|
||||
stack.add_env_var(
|
||||
"PWD".to_string(),
|
||||
Value::string(dir_str.clone(), nu_protocol::Span::new(0, dir_str.len())),
|
||||
);
|
||||
stack.add_env_var(
|
||||
"TEST".to_string(),
|
||||
Value::string(
|
||||
"NUSHELL".to_string(),
|
||||
nu_protocol::Span::new(0, dir_str.len()),
|
||||
),
|
||||
);
|
||||
|
||||
// Merge environment into the permanent state
|
||||
let merge_result = engine_state.merge_env(&mut stack, &dir);
|
||||
assert!(merge_result.is_ok());
|
||||
|
||||
(dir, dir_str, engine_state, stack)
|
||||
new_engine_helper(fs::fixtures().join("quoted_completions"))
|
||||
}
|
||||
|
||||
pub fn new_partial_engine() -> (AbsolutePathBuf, String, EngineState, Stack) {
|
||||
// Target folder inside assets
|
||||
let dir = fs::fixtures().join("partial_completions");
|
||||
let dir_str = dir
|
||||
.clone()
|
||||
.into_os_string()
|
||||
.into_string()
|
||||
.unwrap_or_default();
|
||||
|
||||
// Create a new engine with default context
|
||||
let mut engine_state = create_default_context();
|
||||
|
||||
// New stack
|
||||
let mut stack = Stack::new();
|
||||
|
||||
// Add pwd as env var
|
||||
stack.add_env_var(
|
||||
"PWD".to_string(),
|
||||
Value::string(dir_str.clone(), nu_protocol::Span::new(0, dir_str.len())),
|
||||
);
|
||||
stack.add_env_var(
|
||||
"TEST".to_string(),
|
||||
Value::string(
|
||||
"NUSHELL".to_string(),
|
||||
nu_protocol::Span::new(0, dir_str.len()),
|
||||
),
|
||||
);
|
||||
|
||||
// Merge environment into the permanent state
|
||||
let merge_result = engine_state.merge_env(&mut stack, &dir);
|
||||
assert!(merge_result.is_ok());
|
||||
|
||||
(dir, dir_str, engine_state, stack)
|
||||
new_engine_helper(fs::fixtures().join("partial_completions"))
|
||||
}
|
||||
|
||||
// match a list of suggestions with the expected values
|
||||
pub fn match_suggestions(expected: &Vec<String>, suggestions: &Vec<Suggestion>) {
|
||||
/// match a list of suggestions with the expected values
|
||||
pub fn match_suggestions(expected: &Vec<&str>, suggestions: &Vec<Suggestion>) {
|
||||
let expected_len = expected.len();
|
||||
let suggestions_len = suggestions.len();
|
||||
if expected_len != suggestions_len {
|
||||
@ -197,33 +149,38 @@ pub fn match_suggestions(expected: &Vec<String>, suggestions: &Vec<Suggestion>)
|
||||
)
|
||||
}
|
||||
|
||||
let suggestoins_str = suggestions
|
||||
let suggestions_str = suggestions
|
||||
.iter()
|
||||
.map(|it| it.value.clone())
|
||||
.map(|it| it.value.as_str())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
assert_eq!(expected, &suggestoins_str);
|
||||
assert_eq!(expected, &suggestions_str);
|
||||
}
|
||||
|
||||
// append the separator to the converted path
|
||||
/// match a list of suggestions with the expected values
|
||||
pub fn match_suggestions_by_string(expected: &[String], suggestions: &Vec<Suggestion>) {
|
||||
let expected = expected.iter().map(|it| it.as_str()).collect::<Vec<_>>();
|
||||
match_suggestions(&expected, suggestions);
|
||||
}
|
||||
|
||||
/// append the separator to the converted path
|
||||
pub fn folder(path: impl Into<PathBuf>) -> String {
|
||||
let mut converted_path = file(path);
|
||||
converted_path.push(MAIN_SEPARATOR);
|
||||
converted_path
|
||||
}
|
||||
|
||||
// convert a given path to string
|
||||
/// convert a given path to string
|
||||
pub fn file(path: impl Into<PathBuf>) -> String {
|
||||
path.into().into_os_string().into_string().unwrap()
|
||||
}
|
||||
|
||||
// merge_input executes the given input into the engine
|
||||
// and merges the state
|
||||
/// merge_input executes the given input into the engine
|
||||
/// and merges the state
|
||||
pub fn merge_input(
|
||||
input: &[u8],
|
||||
engine_state: &mut EngineState,
|
||||
stack: &mut Stack,
|
||||
dir: AbsolutePathBuf,
|
||||
) -> Result<(), ShellError> {
|
||||
let (block, delta) = {
|
||||
let mut working_set = StateWorkingSet::new(engine_state);
|
||||
@ -237,14 +194,16 @@ pub fn merge_input(
|
||||
|
||||
engine_state.merge_delta(delta)?;
|
||||
|
||||
assert!(eval_block::<WithoutDebug>(
|
||||
engine_state,
|
||||
stack,
|
||||
&block,
|
||||
PipelineData::Value(Value::nothing(Span::unknown()), None),
|
||||
)
|
||||
.is_ok());
|
||||
assert!(
|
||||
eval_block::<WithoutDebug>(
|
||||
engine_state,
|
||||
stack,
|
||||
&block,
|
||||
PipelineData::Value(Value::nothing(Span::unknown()), None),
|
||||
)
|
||||
.is_ok()
|
||||
);
|
||||
|
||||
// Merge environment into the permanent state
|
||||
engine_state.merge_env(stack, &dir)
|
||||
engine_state.merge_env(stack)
|
||||
}
|
||||
|
@ -1,3 +1,5 @@
|
||||
pub mod completions_helpers;
|
||||
|
||||
pub use completions_helpers::{file, folder, match_suggestions, merge_input, new_engine};
|
||||
pub use completions_helpers::{
|
||||
file, folder, match_suggestions, match_suggestions_by_string, merge_input, new_engine,
|
||||
};
|
||||
|
@ -1,11 +1,11 @@
|
||||
[package]
|
||||
authors = ["The Nushell Project Developers"]
|
||||
description = "The foundation tools to build Nushell commands."
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
license = "MIT"
|
||||
name = "nu-cmd-base"
|
||||
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-base"
|
||||
version = "0.98.0"
|
||||
version = "0.105.0"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
@ -13,12 +13,12 @@ version = "0.98.0"
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
nu-engine = { path = "../nu-engine", version = "0.98.0" }
|
||||
nu-parser = { path = "../nu-parser", version = "0.98.0" }
|
||||
nu-path = { path = "../nu-path", version = "0.98.0" }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.98.0" }
|
||||
nu-engine = { path = "../nu-engine", version = "0.105.0", default-features = false }
|
||||
nu-parser = { path = "../nu-parser", version = "0.105.0" }
|
||||
nu-path = { path = "../nu-path", version = "0.105.0" }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.105.0", default-features = false }
|
||||
|
||||
indexmap = { workspace = true }
|
||||
miette = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
[dev-dependencies]
|
||||
|
@ -1,4 +1,4 @@
|
||||
use indexmap::{indexset, IndexSet};
|
||||
use indexmap::{IndexSet, indexset};
|
||||
use nu_protocol::Value;
|
||||
|
||||
pub fn merge_descriptors(values: &[Value]) -> Vec<String> {
|
||||
|
@ -1,61 +1,61 @@
|
||||
use crate::util::get_guaranteed_cwd;
|
||||
use miette::Result;
|
||||
use nu_engine::{eval_block, eval_block_with_early_return};
|
||||
use nu_parser::parse;
|
||||
use nu_protocol::{
|
||||
PipelineData, PositionalArg, ShellError, Span, Type, Value, VarId,
|
||||
cli_error::{report_parse_error, report_shell_error},
|
||||
debugger::WithoutDebug,
|
||||
engine::{Closure, EngineState, Stack, StateWorkingSet},
|
||||
PipelineData, PositionalArg, ShellError, Span, Type, Value, VarId,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
|
||||
pub fn eval_env_change_hook(
|
||||
env_change_hook: Option<Value>,
|
||||
env_change_hook: &HashMap<String, Vec<Value>>,
|
||||
engine_state: &mut EngineState,
|
||||
stack: &mut Stack,
|
||||
) -> Result<(), ShellError> {
|
||||
if let Some(hook) = env_change_hook {
|
||||
match hook {
|
||||
Value::Record { val, .. } => {
|
||||
for (env_name, hook_value) in &*val {
|
||||
let before = engine_state
|
||||
.previous_env_vars
|
||||
.get(env_name)
|
||||
.cloned()
|
||||
.unwrap_or_default();
|
||||
for (env, hooks) in env_change_hook {
|
||||
let before = engine_state.previous_env_vars.get(env);
|
||||
let after = stack.get_env_var(engine_state, env);
|
||||
if before != after {
|
||||
let before = before.cloned().unwrap_or_default();
|
||||
let after = after.cloned().unwrap_or_default();
|
||||
|
||||
let after = stack
|
||||
.get_env_var(engine_state, env_name)
|
||||
.unwrap_or_default();
|
||||
eval_hooks(
|
||||
engine_state,
|
||||
stack,
|
||||
vec![("$before".into(), before), ("$after".into(), after.clone())],
|
||||
hooks,
|
||||
"env_change",
|
||||
)?;
|
||||
|
||||
if before != after {
|
||||
eval_hook(
|
||||
engine_state,
|
||||
stack,
|
||||
None,
|
||||
vec![("$before".into(), before), ("$after".into(), after.clone())],
|
||||
hook_value,
|
||||
"env_change",
|
||||
)?;
|
||||
|
||||
Arc::make_mut(&mut engine_state.previous_env_vars)
|
||||
.insert(env_name.to_string(), after);
|
||||
}
|
||||
}
|
||||
}
|
||||
x => {
|
||||
return Err(ShellError::TypeMismatch {
|
||||
err_message: "record for the 'env_change' hook".to_string(),
|
||||
span: x.span(),
|
||||
});
|
||||
}
|
||||
Arc::make_mut(&mut engine_state.previous_env_vars).insert(env.clone(), after);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn eval_hooks(
|
||||
engine_state: &mut EngineState,
|
||||
stack: &mut Stack,
|
||||
arguments: Vec<(String, Value)>,
|
||||
hooks: &[Value],
|
||||
hook_name: &str,
|
||||
) -> Result<(), ShellError> {
|
||||
for hook in hooks {
|
||||
eval_hook(
|
||||
engine_state,
|
||||
stack,
|
||||
None,
|
||||
arguments.clone(),
|
||||
hook,
|
||||
&format!("{hook_name} list, recursive"),
|
||||
)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn eval_hook(
|
||||
engine_state: &mut EngineState,
|
||||
stack: &mut Stack,
|
||||
@ -92,11 +92,12 @@ pub fn eval_hook(
|
||||
);
|
||||
if let Some(err) = working_set.parse_errors.first() {
|
||||
report_parse_error(&working_set, err);
|
||||
|
||||
return Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "valid source code".into(),
|
||||
value: "source code with syntax errors".into(),
|
||||
span,
|
||||
return Err(ShellError::GenericError {
|
||||
error: format!("Failed to run {hook_name} hook"),
|
||||
msg: "source code has errors".into(),
|
||||
span: Some(span),
|
||||
help: None,
|
||||
inner: Vec::new(),
|
||||
});
|
||||
}
|
||||
|
||||
@ -132,16 +133,7 @@ pub fn eval_hook(
|
||||
}
|
||||
}
|
||||
Value::List { vals, .. } => {
|
||||
for val in vals {
|
||||
eval_hook(
|
||||
engine_state,
|
||||
stack,
|
||||
None,
|
||||
arguments.clone(),
|
||||
val,
|
||||
&format!("{hook_name} list, recursive"),
|
||||
)?;
|
||||
}
|
||||
eval_hooks(engine_state, stack, arguments, vals, hook_name)?;
|
||||
}
|
||||
Value::Record { val, .. } => {
|
||||
// Hooks can optionally be a record in this form:
|
||||
@ -167,10 +159,10 @@ pub fn eval_hook(
|
||||
{
|
||||
val
|
||||
} else {
|
||||
return Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "boolean output".to_string(),
|
||||
value: "other PipelineData variant".to_string(),
|
||||
span: other_span,
|
||||
return Err(ShellError::RuntimeTypeMismatch {
|
||||
expected: Type::Bool,
|
||||
actual: pipeline_data.get_type(),
|
||||
span: pipeline_data.span().unwrap_or(other_span),
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -179,9 +171,9 @@ pub fn eval_hook(
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "block".to_string(),
|
||||
value: format!("{}", condition.get_type()),
|
||||
return Err(ShellError::RuntimeTypeMismatch {
|
||||
expected: Type::Closure,
|
||||
actual: condition.get_type(),
|
||||
span: other_span,
|
||||
});
|
||||
}
|
||||
@ -224,11 +216,12 @@ pub fn eval_hook(
|
||||
);
|
||||
if let Some(err) = working_set.parse_errors.first() {
|
||||
report_parse_error(&working_set, err);
|
||||
|
||||
return Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "valid source code".into(),
|
||||
value: "source code with syntax errors".into(),
|
||||
span: source_span,
|
||||
return Err(ShellError::GenericError {
|
||||
error: format!("Failed to run {hook_name} hook"),
|
||||
msg: "source code has errors".into(),
|
||||
span: Some(span),
|
||||
help: None,
|
||||
inner: Vec::new(),
|
||||
});
|
||||
}
|
||||
|
||||
@ -263,9 +256,9 @@ pub fn eval_hook(
|
||||
run_hook(engine_state, stack, val, input, arguments, source_span)?;
|
||||
}
|
||||
other => {
|
||||
return Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "block or string".to_string(),
|
||||
value: format!("{}", other.get_type()),
|
||||
return Err(ShellError::RuntimeTypeMismatch {
|
||||
expected: Type::custom("string or closure"),
|
||||
actual: other.get_type(),
|
||||
span: source_span,
|
||||
});
|
||||
}
|
||||
@ -276,16 +269,15 @@ pub fn eval_hook(
|
||||
output = run_hook(engine_state, stack, val, input, arguments, span)?;
|
||||
}
|
||||
other => {
|
||||
return Err(ShellError::UnsupportedConfigValue {
|
||||
expected: "string, block, record, or list of commands".into(),
|
||||
value: format!("{}", other.get_type()),
|
||||
return Err(ShellError::RuntimeTypeMismatch {
|
||||
expected: Type::custom("string, closure, record, or list"),
|
||||
actual: other.get_type(),
|
||||
span: other.span(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let cwd = get_guaranteed_cwd(engine_state, stack);
|
||||
engine_state.merge_env(stack, cwd)?;
|
||||
engine_state.merge_env(stack)?;
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use nu_protocol::{ast::CellPath, PipelineData, ShellError, Signals, Span, Value};
|
||||
use nu_protocol::{PipelineData, ShellError, Signals, Span, Value, ast::CellPath};
|
||||
use std::sync::Arc;
|
||||
|
||||
pub trait CmdArgument {
|
||||
|
@ -3,3 +3,6 @@ pub mod formats;
|
||||
pub mod hook;
|
||||
pub mod input_handler;
|
||||
pub mod util;
|
||||
mod wrap_call;
|
||||
|
||||
pub use wrap_call::*;
|
||||
|
@ -1,30 +1,9 @@
|
||||
use nu_path::AbsolutePathBuf;
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack},
|
||||
Range, ShellError, Span, Value,
|
||||
engine::{EngineState, Stack},
|
||||
};
|
||||
use std::ops::Bound;
|
||||
|
||||
pub fn get_init_cwd() -> AbsolutePathBuf {
|
||||
std::env::current_dir()
|
||||
.ok()
|
||||
.and_then(|path| AbsolutePathBuf::try_from(path).ok())
|
||||
.or_else(|| {
|
||||
std::env::var("PWD")
|
||||
.ok()
|
||||
.and_then(|path| AbsolutePathBuf::try_from(path).ok())
|
||||
})
|
||||
.or_else(nu_path::home_dir)
|
||||
.expect("Failed to get current working directory")
|
||||
}
|
||||
|
||||
pub fn get_guaranteed_cwd(engine_state: &EngineState, stack: &Stack) -> AbsolutePathBuf {
|
||||
engine_state
|
||||
.cwd(Some(stack))
|
||||
.ok()
|
||||
.unwrap_or_else(get_init_cwd)
|
||||
}
|
||||
|
||||
type MakeRangeError = fn(&str, Span) -> ShellError;
|
||||
|
||||
/// Returns a inclusive pair of boundary in given `range`.
|
||||
@ -99,10 +78,10 @@ pub fn get_editor(
|
||||
get_editor_commandline(&config.buffer_editor, "$env.config.buffer_editor")
|
||||
{
|
||||
Ok(buff_editor)
|
||||
} else if let Some(value) = env_vars.get("EDITOR") {
|
||||
get_editor_commandline(value, "$env.EDITOR")
|
||||
} else if let Some(value) = env_vars.get("VISUAL") {
|
||||
get_editor_commandline(value, "$env.VISUAL")
|
||||
} else if let Some(value) = env_vars.get("EDITOR") {
|
||||
get_editor_commandline(value, "$env.EDITOR")
|
||||
} else {
|
||||
Err(ShellError::GenericError {
|
||||
error: "No editor configured".into(),
|
||||
|
101
crates/nu-cmd-base/src/wrap_call.rs
Normal file
101
crates/nu-cmd-base/src/wrap_call.rs
Normal file
@ -0,0 +1,101 @@
|
||||
use nu_engine::CallExt;
|
||||
use nu_protocol::{
|
||||
DeclId, FromValue, ShellError, Span,
|
||||
engine::{Call, EngineState, Stack, StateWorkingSet},
|
||||
};
|
||||
|
||||
/// A helper utility to aid in implementing commands which have the same behavior for `run` and `run_const`.
|
||||
///
|
||||
/// Only supports functions in [`Call`] and [`CallExt`] which have a `const` suffix.
|
||||
///
|
||||
/// To use, the actual command logic should be moved to a function. Then, `eval` and `eval_const` can be implemented like this:
|
||||
/// ```rust
|
||||
/// # use nu_engine::command_prelude::*;
|
||||
/// # use nu_cmd_base::WrapCall;
|
||||
/// # fn do_command_logic(call: WrapCall) -> Result<PipelineData, ShellError> { Ok(PipelineData::Empty) }
|
||||
///
|
||||
/// # struct Command {}
|
||||
/// # impl Command {
|
||||
/// fn run(&self, engine_state: &EngineState, stack: &mut Stack, call: &Call) -> Result<PipelineData, ShellError> {
|
||||
/// let call = WrapCall::Eval(engine_state, stack, call);
|
||||
/// do_command_logic(call)
|
||||
/// }
|
||||
///
|
||||
/// fn run_const(&self, working_set: &StateWorkingSet, call: &Call) -> Result<PipelineData, ShellError> {
|
||||
/// let call = WrapCall::ConstEval(working_set, call);
|
||||
/// do_command_logic(call)
|
||||
/// }
|
||||
/// # }
|
||||
/// ```
|
||||
///
|
||||
/// Then, the typical [`Call`] and [`CallExt`] operations can be called using destructuring:
|
||||
///
|
||||
/// ```rust
|
||||
/// # use nu_engine::command_prelude::*;
|
||||
/// # use nu_cmd_base::WrapCall;
|
||||
/// # let call = WrapCall::Eval(&EngineState::new(), &mut Stack::new(), &Call::new(Span::unknown()));
|
||||
/// # fn do_command_logic(call: WrapCall) -> Result<(), ShellError> {
|
||||
/// let (call, required): (_, String) = call.req(0)?;
|
||||
/// let (call, flag): (_, Option<i64>) = call.get_flag("number")?;
|
||||
/// # Ok(())
|
||||
/// # }
|
||||
/// ```
|
||||
///
|
||||
/// A new `WrapCall` instance has to be returned after each function to ensure
|
||||
/// that there is only ever one copy of mutable [`Stack`] reference.
|
||||
pub enum WrapCall<'a> {
|
||||
Eval(&'a EngineState, &'a mut Stack, &'a Call<'a>),
|
||||
ConstEval(&'a StateWorkingSet<'a>, &'a Call<'a>),
|
||||
}
|
||||
|
||||
/// Macro to choose between the non-const and const versions of each [`Call`]/[`CallExt`] function
|
||||
macro_rules! proxy {
|
||||
($self:ident , $eval:ident , $const:ident , $( $args:expr ),*) => {
|
||||
match $self {
|
||||
WrapCall::Eval(engine_state, stack, call) => {
|
||||
Call::$eval(call, engine_state, stack, $( $args ),*)
|
||||
.map(|val| (WrapCall::Eval(engine_state, stack, call), val))
|
||||
},
|
||||
WrapCall::ConstEval(working_set, call) => {
|
||||
Call::$const(call, working_set, $( $args ),*)
|
||||
.map(|val| (WrapCall::ConstEval(working_set, call), val))
|
||||
},
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
impl WrapCall<'_> {
|
||||
pub fn head(&self) -> Span {
|
||||
match self {
|
||||
WrapCall::Eval(_, _, call) => call.head,
|
||||
WrapCall::ConstEval(_, call) => call.head,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn decl_id(&self) -> DeclId {
|
||||
match self {
|
||||
WrapCall::Eval(_, _, call) => call.decl_id,
|
||||
WrapCall::ConstEval(_, call) => call.decl_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_flag<T: FromValue>(self, flag_name: &str) -> Result<(Self, bool), ShellError> {
|
||||
proxy!(self, has_flag, has_flag_const, flag_name)
|
||||
}
|
||||
|
||||
pub fn get_flag<T: FromValue>(self, name: &str) -> Result<(Self, Option<T>), ShellError> {
|
||||
proxy!(self, get_flag, get_flag_const, name)
|
||||
}
|
||||
|
||||
pub fn req<T: FromValue>(self, pos: usize) -> Result<(Self, T), ShellError> {
|
||||
proxy!(self, req, req_const, pos)
|
||||
}
|
||||
|
||||
pub fn rest<T: FromValue>(self, pos: usize) -> Result<(Self, Vec<T>), ShellError> {
|
||||
proxy!(self, rest, rest_const, pos)
|
||||
}
|
||||
|
||||
pub fn opt<T: FromValue>(self, pos: usize) -> Result<(Self, Option<T>), ShellError> {
|
||||
proxy!(self, opt, opt_const, pos)
|
||||
}
|
||||
}
|
@ -1,11 +1,11 @@
|
||||
[package]
|
||||
authors = ["The Nushell Project Developers"]
|
||||
description = "Nushell's extra commands that are not part of the 1.0 api standard."
|
||||
edition = "2021"
|
||||
edition = "2024"
|
||||
license = "MIT"
|
||||
name = "nu-cmd-extra"
|
||||
repository = "https://github.com/nushell/nushell/tree/main/crates/nu-cmd-extra"
|
||||
version = "0.98.0"
|
||||
version = "0.105.0"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
@ -16,13 +16,13 @@ bench = false
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.98.0" }
|
||||
nu-engine = { path = "../nu-engine", version = "0.98.0" }
|
||||
nu-json = { version = "0.98.0", path = "../nu-json" }
|
||||
nu-parser = { path = "../nu-parser", version = "0.98.0" }
|
||||
nu-pretty-hex = { version = "0.98.0", path = "../nu-pretty-hex" }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.98.0" }
|
||||
nu-utils = { path = "../nu-utils", version = "0.98.0" }
|
||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.105.0" }
|
||||
nu-engine = { path = "../nu-engine", version = "0.105.0", default-features = false }
|
||||
nu-json = { version = "0.105.0", path = "../nu-json" }
|
||||
nu-parser = { path = "../nu-parser", version = "0.105.0" }
|
||||
nu-pretty-hex = { version = "0.105.0", path = "../nu-pretty-hex" }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.105.0", default-features = false }
|
||||
nu-utils = { path = "../nu-utils", version = "0.105.0", default-features = false }
|
||||
|
||||
# Potential dependencies for extras
|
||||
heck = { workspace = true }
|
||||
@ -34,8 +34,9 @@ serde = { workspace = true }
|
||||
serde_urlencoded = { workspace = true }
|
||||
v_htmlescape = { workspace = true }
|
||||
itertools = { workspace = true }
|
||||
mime = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.98.0" }
|
||||
nu-command = { path = "../nu-command", version = "0.98.0" }
|
||||
nu-test-support = { path = "../nu-test-support", version = "0.98.0" }
|
||||
nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.105.0" }
|
||||
nu-command = { path = "../nu-command", version = "0.105.0" }
|
||||
nu-test-support = { path = "../nu-test-support", version = "0.105.0" }
|
||||
|
@ -16,8 +16,8 @@ mod test_examples {
|
||||
};
|
||||
|
||||
use nu_protocol::{
|
||||
engine::{Command, EngineState, StateWorkingSet},
|
||||
Type,
|
||||
engine::{Command, EngineState, StateWorkingSet},
|
||||
};
|
||||
use std::collections::HashSet;
|
||||
|
||||
@ -43,7 +43,12 @@ mod test_examples {
|
||||
signature.operates_on_cell_paths(),
|
||||
),
|
||||
);
|
||||
check_example_evaluates_to_expected_output(&example, cwd.as_path(), &mut engine_state);
|
||||
check_example_evaluates_to_expected_output(
|
||||
cmd.name(),
|
||||
&example,
|
||||
cwd.as_path(),
|
||||
&mut engine_state,
|
||||
);
|
||||
}
|
||||
|
||||
check_all_signature_input_output_types_entries_have_examples(
|
||||
|
@ -26,7 +26,7 @@ impl Command for BitsAnd {
|
||||
.required(
|
||||
"target",
|
||||
SyntaxShape::OneOf(vec![SyntaxShape::Binary, SyntaxShape::Int]),
|
||||
"right-hand side of the operation",
|
||||
"Right-hand side of the operation.",
|
||||
)
|
||||
.named(
|
||||
"endian",
|
||||
@ -65,7 +65,7 @@ impl Command for BitsAnd {
|
||||
return Err(ShellError::TypeMismatch {
|
||||
err_message: "Endian must be one of native, little, big".to_string(),
|
||||
span: endian.span,
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -113,8 +113,7 @@ impl Command for BitsAnd {
|
||||
])),
|
||||
},
|
||||
Example {
|
||||
description:
|
||||
"Apply bitwise and to binary data of varying lengths with specified endianness",
|
||||
description: "Apply bitwise and to binary data of varying lengths with specified endianness",
|
||||
example: "0x[c0 ff ee] | bits and 0x[ff] --endian big",
|
||||
result: Some(Value::test_binary(vec![0x00, 0x00, 0xee])),
|
||||
},
|
||||
|
@ -1,6 +1,5 @@
|
||||
mod and;
|
||||
mod bits_;
|
||||
mod into;
|
||||
mod not;
|
||||
mod or;
|
||||
mod rotate_left;
|
||||
@ -11,7 +10,6 @@ mod xor;
|
||||
|
||||
pub use and::BitsAnd;
|
||||
pub use bits_::Bits;
|
||||
pub use into::BitsInto;
|
||||
pub use not::BitsNot;
|
||||
pub use or::BitsOr;
|
||||
pub use rotate_left::BitsRol;
|
||||
@ -137,7 +135,7 @@ where
|
||||
(min, max) => (rhs, lhs, max, min),
|
||||
};
|
||||
|
||||
let pad = iter::repeat(0).take(max_len - min_len);
|
||||
let pad = iter::repeat_n(0, max_len - min_len);
|
||||
|
||||
let mut a;
|
||||
let mut b;
|
||||
@ -161,9 +159,10 @@ where
|
||||
}
|
||||
(Value::Binary { .. }, Value::Int { .. }) | (Value::Int { .. }, Value::Binary { .. }) => {
|
||||
Value::error(
|
||||
ShellError::PipelineMismatch {
|
||||
ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "input, and argument, to be both int or both binary"
|
||||
.to_string(),
|
||||
wrong_type: "int and binary".to_string(),
|
||||
dst_span: rhs.span(),
|
||||
src_span: span,
|
||||
},
|
||||
|
@ -1,5 +1,5 @@
|
||||
use super::{get_number_bytes, NumberBytes};
|
||||
use nu_cmd_base::input_handler::{operate, CmdArgument};
|
||||
use super::{NumberBytes, get_number_bytes};
|
||||
use nu_cmd_base::input_handler::{CmdArgument, operate};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -100,8 +100,7 @@ impl Command for BitsNot {
|
||||
)),
|
||||
},
|
||||
Example {
|
||||
description:
|
||||
"Apply logical negation to a list of numbers, treat input as 2 bytes number",
|
||||
description: "Apply logical negation to a list of numbers, treat input as 2 bytes number",
|
||||
example: "[4 3 2] | bits not --number-bytes 2",
|
||||
result: Some(Value::list(
|
||||
vec![
|
||||
@ -113,8 +112,7 @@ impl Command for BitsNot {
|
||||
)),
|
||||
},
|
||||
Example {
|
||||
description:
|
||||
"Apply logical negation to a list of numbers, treat input as signed number",
|
||||
description: "Apply logical negation to a list of numbers, treat input as signed number",
|
||||
example: "[4 3 2] | bits not --signed",
|
||||
result: Some(Value::list(
|
||||
vec![
|
||||
|
@ -27,7 +27,7 @@ impl Command for BitsOr {
|
||||
.required(
|
||||
"target",
|
||||
SyntaxShape::OneOf(vec![SyntaxShape::Binary, SyntaxShape::Int]),
|
||||
"right-hand side of the operation",
|
||||
"Right-hand side of the operation.",
|
||||
)
|
||||
.named(
|
||||
"endian",
|
||||
@ -66,7 +66,7 @@ impl Command for BitsOr {
|
||||
return Err(ShellError::TypeMismatch {
|
||||
err_message: "Endian must be one of native, little, big".to_string(),
|
||||
span: endian.span,
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -106,8 +106,7 @@ impl Command for BitsOr {
|
||||
result: Some(Value::test_binary(vec![0xca, 0xfe])),
|
||||
},
|
||||
Example {
|
||||
description:
|
||||
"Apply bitwise or to binary data of varying lengths with specified endianness",
|
||||
description: "Apply bitwise or to binary data of varying lengths with specified endianness",
|
||||
example: "0x[c0 ff ee] | bits or 0x[ff] --endian big",
|
||||
result: Some(Value::test_binary(vec![0xc0, 0xff, 0xff])),
|
||||
},
|
||||
|
@ -1,5 +1,5 @@
|
||||
use super::{get_input_num_type, get_number_bytes, InputNumType, NumberBytes};
|
||||
use nu_cmd_base::input_handler::{operate, CmdArgument};
|
||||
use super::{InputNumType, NumberBytes, get_input_num_type, get_number_bytes};
|
||||
use nu_cmd_base::input_handler::{CmdArgument, operate};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
struct Arguments {
|
||||
@ -37,7 +37,7 @@ impl Command for BitsRol {
|
||||
),
|
||||
])
|
||||
.allow_variants_without_examples(true)
|
||||
.required("bits", SyntaxShape::Int, "number of bits to rotate left")
|
||||
.required("bits", SyntaxShape::Int, "Number of bits to rotate left.")
|
||||
.switch(
|
||||
"signed",
|
||||
"always treat input number as a signed number",
|
||||
@ -222,7 +222,8 @@ fn rotate_bytes_and_bits_left(data: &[u8], byte_shift: usize, bit_shift: usize)
|
||||
debug_assert!(byte_shift < data.len());
|
||||
debug_assert!(
|
||||
(1..8).contains(&bit_shift),
|
||||
"Bit shifts of 0 can't be handled by this impl and everything else should be part of the byteshift");
|
||||
"Bit shifts of 0 can't be handled by this impl and everything else should be part of the byteshift"
|
||||
);
|
||||
let mut bytes = Vec::with_capacity(data.len());
|
||||
let mut next_index = byte_shift;
|
||||
for _ in 0..data.len() {
|
||||
|
@ -1,5 +1,5 @@
|
||||
use super::{get_input_num_type, get_number_bytes, InputNumType, NumberBytes};
|
||||
use nu_cmd_base::input_handler::{operate, CmdArgument};
|
||||
use super::{InputNumType, NumberBytes, get_input_num_type, get_number_bytes};
|
||||
use nu_cmd_base::input_handler::{CmdArgument, operate};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
struct Arguments {
|
||||
@ -37,7 +37,7 @@ impl Command for BitsRor {
|
||||
),
|
||||
])
|
||||
.allow_variants_without_examples(true)
|
||||
.required("bits", SyntaxShape::Int, "number of bits to rotate right")
|
||||
.required("bits", SyntaxShape::Int, "Number of bits to rotate right.")
|
||||
.switch(
|
||||
"signed",
|
||||
"always treat input number as a signed number",
|
||||
|
@ -1,6 +1,6 @@
|
||||
use super::{get_input_num_type, get_number_bytes, InputNumType, NumberBytes};
|
||||
use super::{InputNumType, NumberBytes, get_input_num_type, get_number_bytes};
|
||||
use itertools::Itertools;
|
||||
use nu_cmd_base::input_handler::{operate, CmdArgument};
|
||||
use nu_cmd_base::input_handler::{CmdArgument, operate};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
use std::iter;
|
||||
@ -40,7 +40,7 @@ impl Command for BitsShl {
|
||||
),
|
||||
])
|
||||
.allow_variants_without_examples(true)
|
||||
.required("bits", SyntaxShape::Int, "number of bits to shift left")
|
||||
.required("bits", SyntaxShape::Int, "Number of bits to shift left.")
|
||||
.switch(
|
||||
"signed",
|
||||
"always treat input number as a signed number",
|
||||
@ -237,7 +237,8 @@ fn shift_bytes_left(data: &[u8], byte_shift: usize) -> Vec<u8> {
|
||||
|
||||
fn shift_bytes_and_bits_left(data: &[u8], byte_shift: usize, bit_shift: usize) -> Vec<u8> {
|
||||
use itertools::Position::*;
|
||||
debug_assert!((1..8).contains(&bit_shift),
|
||||
debug_assert!(
|
||||
(1..8).contains(&bit_shift),
|
||||
"Bit shifts of 0 can't be handled by this impl and everything else should be part of the byteshift"
|
||||
);
|
||||
data.iter()
|
||||
@ -249,7 +250,7 @@ fn shift_bytes_and_bits_left(data: &[u8], byte_shift: usize, bit_shift: usize) -
|
||||
Last | Only => lhs << bit_shift,
|
||||
_ => (lhs << bit_shift) | (rhs >> (8 - bit_shift)),
|
||||
})
|
||||
.chain(iter::repeat(0).take(byte_shift))
|
||||
.chain(iter::repeat_n(0, byte_shift))
|
||||
.collect::<Vec<u8>>()
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
use super::{get_input_num_type, get_number_bytes, InputNumType, NumberBytes};
|
||||
use nu_cmd_base::input_handler::{operate, CmdArgument};
|
||||
use super::{InputNumType, NumberBytes, get_input_num_type, get_number_bytes};
|
||||
use nu_cmd_base::input_handler::{CmdArgument, operate};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
struct Arguments {
|
||||
@ -37,7 +37,7 @@ impl Command for BitsShr {
|
||||
),
|
||||
])
|
||||
.allow_variants_without_examples(true)
|
||||
.required("bits", SyntaxShape::Int, "number of bits to shift right")
|
||||
.required("bits", SyntaxShape::Int, "Number of bits to shift right.")
|
||||
.switch(
|
||||
"signed",
|
||||
"always treat input number as a signed number",
|
||||
|
@ -27,7 +27,7 @@ impl Command for BitsXor {
|
||||
.required(
|
||||
"target",
|
||||
SyntaxShape::OneOf(vec![SyntaxShape::Binary, SyntaxShape::Int]),
|
||||
"right-hand side of the operation",
|
||||
"Right-hand side of the operation.",
|
||||
)
|
||||
.named(
|
||||
"endian",
|
||||
@ -66,7 +66,7 @@ impl Command for BitsXor {
|
||||
return Err(ShellError::TypeMismatch {
|
||||
err_message: "Endian must be one of native, little, big".to_string(),
|
||||
span: endian.span,
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -106,8 +106,7 @@ impl Command for BitsXor {
|
||||
result: Some(Value::test_binary(vec![0x70, 0x40])),
|
||||
},
|
||||
Example {
|
||||
description:
|
||||
"Apply bitwise xor to binary data of varying lengths with specified endianness",
|
||||
description: "Apply bitwise xor to binary data of varying lengths with specified endianness",
|
||||
example: "0x[ca fe] | bits xor 0x[aa] --endian big",
|
||||
result: Some(Value::test_binary(vec![0xca, 0x54])),
|
||||
},
|
||||
|
@ -1,3 +0,0 @@
|
||||
mod fmt;
|
||||
|
||||
pub(crate) use fmt::Fmt;
|
@ -1,4 +1,4 @@
|
||||
use nu_engine::{command_prelude::*, ClosureEval, ClosureEvalOnce};
|
||||
use nu_engine::{ClosureEval, ClosureEvalOnce, command_prelude::*};
|
||||
use nu_protocol::engine::Closure;
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -25,8 +25,8 @@ impl Command for EachWhile {
|
||||
)])
|
||||
.required(
|
||||
"closure",
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any, SyntaxShape::Int])),
|
||||
"the closure to run",
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||
"The closure to run.",
|
||||
)
|
||||
.category(Category::Filters)
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::{vertical_rotate_value, VerticalDirection};
|
||||
use super::{VerticalDirection, vertical_rotate_value};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::{horizontal_rotate_value, HorizontalDirection};
|
||||
use super::{HorizontalDirection, horizontal_rotate_value};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::{horizontal_rotate_value, HorizontalDirection};
|
||||
use super::{HorizontalDirection, horizontal_rotate_value};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -1,4 +1,4 @@
|
||||
use super::{vertical_rotate_value, VerticalDirection};
|
||||
use super::{VerticalDirection, vertical_rotate_value};
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -13,14 +13,17 @@ impl Command for Rotate {
|
||||
.input_output_types(vec![
|
||||
(Type::record(), Type::table()),
|
||||
(Type::table(), Type::table()),
|
||||
(Type::list(Type::Any), Type::table()),
|
||||
(Type::String, Type::table()),
|
||||
])
|
||||
.switch("ccw", "rotate counter clockwise", None)
|
||||
.rest(
|
||||
"rest",
|
||||
SyntaxShape::String,
|
||||
"the names to give columns once rotated",
|
||||
"The names to give columns once rotated.",
|
||||
)
|
||||
.category(Category::Filters)
|
||||
.allow_variants_without_examples(true)
|
||||
}
|
||||
|
||||
fn description(&self) -> &str {
|
||||
@ -33,104 +36,93 @@ impl Command for Rotate {
|
||||
description: "Rotate a record clockwise, producing a table (like `transpose` but with column order reversed)",
|
||||
example: "{a:1, b:2} | rotate",
|
||||
result: Some(Value::test_list(vec![
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_int(1),
|
||||
"column1" => Value::test_string("a"),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_int(2),
|
||||
"column1" => Value::test_string("b"),
|
||||
}),
|
||||
],
|
||||
)),
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_int(1),
|
||||
"column1" => Value::test_string("a"),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_int(2),
|
||||
"column1" => Value::test_string("b"),
|
||||
}),
|
||||
])),
|
||||
},
|
||||
Example {
|
||||
description: "Rotate 2x3 table clockwise",
|
||||
example: "[[a b]; [1 2] [3 4] [5 6]] | rotate",
|
||||
result: Some(Value::test_list(
|
||||
vec![
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_int(5),
|
||||
"column1" => Value::test_int(3),
|
||||
"column2" => Value::test_int(1),
|
||||
"column3" => Value::test_string("a"),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_int(6),
|
||||
"column1" => Value::test_int(4),
|
||||
"column2" => Value::test_int(2),
|
||||
"column3" => Value::test_string("b"),
|
||||
}),
|
||||
],
|
||||
)),
|
||||
result: Some(Value::test_list(vec![
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_int(5),
|
||||
"column1" => Value::test_int(3),
|
||||
"column2" => Value::test_int(1),
|
||||
"column3" => Value::test_string("a"),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_int(6),
|
||||
"column1" => Value::test_int(4),
|
||||
"column2" => Value::test_int(2),
|
||||
"column3" => Value::test_string("b"),
|
||||
}),
|
||||
])),
|
||||
},
|
||||
Example {
|
||||
description: "Rotate table clockwise and change columns names",
|
||||
example: "[[a b]; [1 2]] | rotate col_a col_b",
|
||||
result: Some(Value::test_list(
|
||||
vec![
|
||||
Value::test_record(record! {
|
||||
"col_a" => Value::test_int(1),
|
||||
"col_b" => Value::test_string("a"),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"col_a" => Value::test_int(2),
|
||||
"col_b" => Value::test_string("b"),
|
||||
}),
|
||||
],
|
||||
)),
|
||||
result: Some(Value::test_list(vec![
|
||||
Value::test_record(record! {
|
||||
"col_a" => Value::test_int(1),
|
||||
"col_b" => Value::test_string("a"),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"col_a" => Value::test_int(2),
|
||||
"col_b" => Value::test_string("b"),
|
||||
}),
|
||||
])),
|
||||
},
|
||||
Example {
|
||||
description: "Rotate table counter clockwise",
|
||||
example: "[[a b]; [1 2]] | rotate --ccw",
|
||||
result: Some(Value::test_list(
|
||||
vec![
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_string("b"),
|
||||
"column1" => Value::test_int(2),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_string("a"),
|
||||
"column1" => Value::test_int(1),
|
||||
}),
|
||||
],
|
||||
)),
|
||||
result: Some(Value::test_list(vec![
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_string("b"),
|
||||
"column1" => Value::test_int(2),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_string("a"),
|
||||
"column1" => Value::test_int(1),
|
||||
}),
|
||||
])),
|
||||
},
|
||||
Example {
|
||||
description: "Rotate table counter-clockwise",
|
||||
example: "[[a b]; [1 2] [3 4] [5 6]] | rotate --ccw",
|
||||
result: Some(Value::test_list(
|
||||
vec![
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_string("b"),
|
||||
"column1" => Value::test_int(2),
|
||||
"column2" => Value::test_int(4),
|
||||
"column3" => Value::test_int(6),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_string("a"),
|
||||
"column1" => Value::test_int(1),
|
||||
"column2" => Value::test_int(3),
|
||||
"column3" => Value::test_int(5),
|
||||
}),
|
||||
],
|
||||
)),
|
||||
result: Some(Value::test_list(vec![
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_string("b"),
|
||||
"column1" => Value::test_int(2),
|
||||
"column2" => Value::test_int(4),
|
||||
"column3" => Value::test_int(6),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"column0" => Value::test_string("a"),
|
||||
"column1" => Value::test_int(1),
|
||||
"column2" => Value::test_int(3),
|
||||
"column3" => Value::test_int(5),
|
||||
}),
|
||||
])),
|
||||
},
|
||||
Example {
|
||||
description: "Rotate table counter-clockwise and change columns names",
|
||||
example: "[[a b]; [1 2]] | rotate --ccw col_a col_b",
|
||||
result: Some(Value::test_list(
|
||||
vec![
|
||||
Value::test_record(record! {
|
||||
"col_a" => Value::test_string("b"),
|
||||
"col_b" => Value::test_int(2),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"col_a" => Value::test_string("a"),
|
||||
"col_b" => Value::test_int(1),
|
||||
}),
|
||||
],
|
||||
)),
|
||||
result: Some(Value::test_list(vec![
|
||||
Value::test_record(record! {
|
||||
"col_a" => Value::test_string("b"),
|
||||
"col_b" => Value::test_int(2),
|
||||
}),
|
||||
Value::test_record(record! {
|
||||
"col_a" => Value::test_string("a"),
|
||||
"col_b" => Value::test_int(1),
|
||||
}),
|
||||
])),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use nu_engine::{command_prelude::*, ClosureEval};
|
||||
use nu_protocol::{engine::Closure, PipelineIterator};
|
||||
use nu_engine::{ClosureEval, command_prelude::*};
|
||||
use nu_protocol::{PipelineIterator, engine::Closure};
|
||||
use std::collections::HashSet;
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -16,7 +16,7 @@ impl Command for UpdateCells {
|
||||
.required(
|
||||
"closure",
|
||||
SyntaxShape::Closure(Some(vec![SyntaxShape::Any])),
|
||||
"the closure to run an update for each cell",
|
||||
"The closure to run an update for each cell.",
|
||||
)
|
||||
.named(
|
||||
"columns",
|
||||
|
@ -2,4 +2,4 @@ mod from;
|
||||
mod to;
|
||||
|
||||
pub(crate) use from::url::FromUrl;
|
||||
pub(crate) use to::html::ToHtml;
|
||||
pub use to::html::ToHtml;
|
||||
|
@ -101,7 +101,7 @@ impl Command for ToHtml {
|
||||
.named(
|
||||
"theme",
|
||||
SyntaxShape::String,
|
||||
"the name of the theme to use (github, blulocolight, ...)",
|
||||
"the name of the theme to use (github, blulocolight, ...); case-insensitive",
|
||||
Some('t'),
|
||||
)
|
||||
.switch(
|
||||
@ -163,9 +163,16 @@ fn get_theme_from_asset_file(
|
||||
) -> Result<HashMap<&'static str, String>, ShellError> {
|
||||
let theme_name = match theme {
|
||||
Some(s) => &s.item,
|
||||
None => "default", // There is no theme named "default" so this will be HtmlTheme::default(), which is "nu_default".
|
||||
None => {
|
||||
return Ok(convert_html_theme_to_hash_map(
|
||||
is_dark,
|
||||
&HtmlTheme::default(),
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
let theme_span = theme.map(|s| s.span).unwrap_or(Span::unknown());
|
||||
|
||||
// 228 themes come from
|
||||
// https://github.com/mbadolato/iTerm2-Color-Schemes/tree/master/windowsterminal
|
||||
// we should find a hit on any name in there
|
||||
@ -175,8 +182,17 @@ fn get_theme_from_asset_file(
|
||||
let th = asset
|
||||
.themes
|
||||
.into_iter()
|
||||
.find(|n| n.name.eq_ignore_case(theme_name)) // case insensitive search
|
||||
.unwrap_or_default();
|
||||
.find(|n| n.name.eq_ignore_case(theme_name)); // case insensitive search
|
||||
|
||||
let th = match th {
|
||||
Some(t) => t,
|
||||
None => {
|
||||
return Err(ShellError::TypeMismatch {
|
||||
err_message: format!("Unknown HTML theme '{}'", theme_name),
|
||||
span: theme_span,
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
Ok(convert_html_theme_to_hash_map(is_dark, &th))
|
||||
}
|
||||
@ -257,18 +273,20 @@ fn to_html(
|
||||
None => head,
|
||||
};
|
||||
|
||||
let color_hm = get_theme_from_asset_file(dark, theme.as_ref());
|
||||
let color_hm = match color_hm {
|
||||
let color_hm = match get_theme_from_asset_file(dark, theme.as_ref()) {
|
||||
Ok(c) => c,
|
||||
_ => {
|
||||
return Err(ShellError::GenericError {
|
||||
error: "Error finding theme name".into(),
|
||||
msg: "Error finding theme name".into(),
|
||||
span: Some(theme_span),
|
||||
help: None,
|
||||
inner: vec![],
|
||||
})
|
||||
}
|
||||
Err(e) => match e {
|
||||
ShellError::TypeMismatch {
|
||||
err_message,
|
||||
span: _,
|
||||
} => {
|
||||
return Err(ShellError::TypeMismatch {
|
||||
err_message,
|
||||
span: theme_span,
|
||||
});
|
||||
}
|
||||
_ => return Err(e),
|
||||
},
|
||||
};
|
||||
|
||||
// change the color of the page
|
||||
@ -330,7 +348,12 @@ fn to_html(
|
||||
output_string = run_regexes(®ex_hm, &output_string);
|
||||
}
|
||||
|
||||
Ok(Value::string(output_string, head).into_pipeline_data())
|
||||
let metadata = PipelineMetadata {
|
||||
data_source: nu_protocol::DataSource::None,
|
||||
content_type: Some(mime::TEXT_HTML_UTF_8.to_string()),
|
||||
};
|
||||
|
||||
Ok(Value::string(output_string, head).into_pipeline_data_with_metadata(metadata))
|
||||
}
|
||||
|
||||
fn theme_demo(span: Span) -> PipelineData {
|
||||
@ -698,4 +721,90 @@ mod tests {
|
||||
|
||||
test_examples(ToHtml {})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn get_theme_from_asset_file_returns_default() {
|
||||
let result = super::get_theme_from_asset_file(false, None);
|
||||
|
||||
assert!(result.is_ok(), "Expected Ok result for None theme");
|
||||
|
||||
let theme_map = result.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
theme_map.get("background").map(String::as_str),
|
||||
Some("white"),
|
||||
"Expected default background color to be white"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
theme_map.get("foreground").map(String::as_str),
|
||||
Some("black"),
|
||||
"Expected default foreground color to be black"
|
||||
);
|
||||
|
||||
assert!(
|
||||
theme_map.contains_key("red"),
|
||||
"Expected default theme to have a 'red' color"
|
||||
);
|
||||
|
||||
assert!(
|
||||
theme_map.contains_key("bold_green"),
|
||||
"Expected default theme to have a 'bold_green' color"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn returns_a_valid_theme() {
|
||||
let theme_name = "Dracula".to_string().into_spanned(Span::new(0, 7));
|
||||
let result = super::get_theme_from_asset_file(false, Some(&theme_name));
|
||||
|
||||
assert!(result.is_ok(), "Expected Ok result for valid theme");
|
||||
let theme_map = result.unwrap();
|
||||
let required_keys = [
|
||||
"background",
|
||||
"foreground",
|
||||
"red",
|
||||
"green",
|
||||
"blue",
|
||||
"bold_red",
|
||||
"bold_green",
|
||||
"bold_blue",
|
||||
];
|
||||
|
||||
for key in required_keys {
|
||||
assert!(
|
||||
theme_map.contains_key(key),
|
||||
"Expected theme to contain key '{}'",
|
||||
key
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn fails_with_unknown_theme_name() {
|
||||
let result = super::get_theme_from_asset_file(
|
||||
false,
|
||||
Some(&"doesnt-exist".to_string().into_spanned(Span::new(0, 13))),
|
||||
);
|
||||
|
||||
assert!(result.is_err(), "Expected error for invalid theme name");
|
||||
|
||||
if let Err(err) = result {
|
||||
assert!(
|
||||
matches!(err, ShellError::TypeMismatch { .. }),
|
||||
"Expected TypeMismatch error, got: {:?}",
|
||||
err
|
||||
);
|
||||
|
||||
if let ShellError::TypeMismatch { err_message, span } = err {
|
||||
assert!(
|
||||
err_message.contains("doesnt-exist"),
|
||||
"Error message should mention theme name, got: {}",
|
||||
err_message
|
||||
);
|
||||
assert_eq!(span.start, 0);
|
||||
assert_eq!(span.end, 13);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
pub struct MathArcCos;
|
||||
|
||||
impl Command for SubCommand {
|
||||
impl Command for MathArcCos {
|
||||
fn name(&self) -> &str {
|
||||
"math arccos"
|
||||
}
|
||||
@ -114,6 +114,6 @@ mod test {
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
|
||||
test_examples(SubCommand {})
|
||||
test_examples(MathArcCos {})
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
pub struct MathArcCosH;
|
||||
|
||||
impl Command for SubCommand {
|
||||
impl Command for MathArcCosH {
|
||||
fn name(&self) -> &str {
|
||||
"math arccosh"
|
||||
}
|
||||
@ -100,6 +100,6 @@ mod test {
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
|
||||
test_examples(SubCommand {})
|
||||
test_examples(MathArcCosH {})
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
pub struct MathArcSin;
|
||||
|
||||
impl Command for SubCommand {
|
||||
impl Command for MathArcSin {
|
||||
fn name(&self) -> &str {
|
||||
"math arcsin"
|
||||
}
|
||||
@ -115,6 +115,6 @@ mod test {
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
|
||||
test_examples(SubCommand {})
|
||||
test_examples(MathArcSin {})
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SubCommand;
|
||||
pub struct MathArcSinH;
|
||||
|
||||
impl Command for SubCommand {
|
||||
impl Command for MathArcSinH {
|
||||
fn name(&self) -> &str {
|
||||
"math arcsinh"
|
||||
}
|
||||
@ -88,6 +88,6 @@ mod test {
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
|
||||
test_examples(SubCommand {})
|
||||
test_examples(MathArcSinH {})
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user