forked from extern/nushell
Compare commits
731 Commits
Author | SHA1 | Date | |
---|---|---|---|
41ebc6b42d | |||
b574dc6365 | |||
4af9e1de41 | |||
77d856fd53 | |||
6dceabf389 | |||
5919c6c433 | |||
339a2de0eb | |||
3e3cb15f3d | |||
5e31851070 | |||
0f626dd076 | |||
aa577bf9bf | |||
25298d35e4 | |||
78016446dc | |||
b304de8199 | |||
72838cc083 | |||
8093612cac | |||
f37f29b441 | |||
dba82ac530 | |||
0615adac94 | |||
21e508009f | |||
a9317d939f | |||
65d843c2a1 | |||
f6c62bf121 | |||
b4bc5fe9af | |||
10368d7060 | |||
68a314b5cb | |||
3c7633ae9f | |||
dba347ad00 | |||
bfba2c57f8 | |||
c69bf9f46f | |||
7ce1ddc6fd | |||
e7ce6f2fcd | |||
0c786bb890 | |||
8d31c32bda | |||
e7fb15be59 | |||
be7550822c | |||
0ce216eec4 | |||
1fe85cb91e | |||
8cadc5a4ac | |||
f9da7f7d58 | |||
367f11a62e | |||
8a45ca9cc3 | |||
e336930fd8 | |||
172ccc910e | |||
a8425daf14 | |||
b629136528 | |||
91ebb7f718 | |||
96484161c0 | |||
d21ddeeae6 | |||
4322d373e6 | |||
08571392e6 | |||
f52235b1c1 | |||
a66147da47 | |||
df778afd1f | |||
d7ddaa376b | |||
2ce892c6f0 | |||
28179ef450 | |||
2c6336c806 | |||
761fc9ae73 | |||
314c3c4a97 | |||
f7f1fba94f | |||
14817ef229 | |||
98233dcec1 | |||
6540509911 | |||
594eae1cbc | |||
5e961815fc | |||
fa9329c8e3 | |||
6c577e18ca | |||
4034129dba | |||
52cf65c19e | |||
cbbb246a6d | |||
87cc6d6f01 | |||
4b9ef5a9d0 | |||
31c703891a | |||
550bda477b | |||
219b7e64cd | |||
98c59f77b2 | |||
e8800fdd0c | |||
09f903c37a | |||
57af9b5040 | |||
16272b1b20 | |||
1dcbd89a89 | |||
eb6ef02ad1 | |||
17586bdfbd | |||
0e98cf3f1e | |||
e2a95c3e1d | |||
5cb7df57fc | |||
88f899d341 | |||
7d70b5feda | |||
fd6ee03391 | |||
9f702fe01a | |||
c9d9eec7f8 | |||
38cbfdb8a9 | |||
f9b7376949 | |||
e98ed1b43d | |||
251c3e103d | |||
d26e938436 | |||
dbadf9499e | |||
28df1559ea | |||
91784218c0 | |||
eeec5e10c3 | |||
0515ed976c | |||
f653992b4a | |||
b5f8c1cc50 | |||
f9a46ce1e7 | |||
b6ba7f97fd | |||
7a47905f11 | |||
683f4c35d9 | |||
dfa5173cf4 | |||
04b214bef6 | |||
37cb7fec77 | |||
8833969e4a | |||
bda238267c | |||
d07dc57537 | |||
d0a2888e88 | |||
cec2eff933 | |||
38b7a3e32b | |||
9dfb6c023f | |||
cde92a9fb9 | |||
5622bbdd48 | |||
3d79a9c37a | |||
a2a5b30568 | |||
768adb84a4 | |||
26b0250e22 | |||
6893850fce | |||
8834e6905e | |||
1d5f13ddca | |||
d12c16a331 | |||
ecf47bb3ab | |||
a4bb5d4ff5 | |||
e9ee7bda46 | |||
1d196394f6 | |||
cfda67ff82 | |||
59510a85d1 | |||
35edf22ac3 | |||
871fc72892 | |||
1fcf671ca4 | |||
ecebe1314a | |||
bda5db59c8 | |||
4526d757b6 | |||
e5405d7f5c | |||
201506a5ad | |||
49f9253ca2 | |||
efc879b955 | |||
3fa03eb7a4 | |||
24bad78607 | |||
8de4c9dbb7 | |||
f858e854bf | |||
87dbd3d5ac | |||
fe66b4c8ea | |||
8390cc97e1 | |||
c0a7d4e2a7 | |||
ce23a672d9 | |||
9851317aeb | |||
3fb4a5d6e6 | |||
340e701124 | |||
36938a4407 | |||
6a6589a357 | |||
b94a32e523 | |||
7db3c69984 | |||
5406450c42 | |||
d6a6e16d21 | |||
ea1b65916d | |||
cd9d9ad50b | |||
552272b37e | |||
388ce738e3 | |||
ef7fbcbe9f | |||
80941ace37 | |||
f317500873 | |||
911414a190 | |||
cca6360bcc | |||
f68503fa21 | |||
911b69dff0 | |||
4115634bfc | |||
8a0bdde17a | |||
a1e21828d6 | |||
0f193c2337 | |||
526d94d862 | |||
2fdafa52b1 | |||
f52c0655c7 | |||
97331c7b25 | |||
1fb5a419a7 | |||
4e9afd6698 | |||
8f9dd6516e | |||
e4226def16 | |||
c199a84dbb | |||
5a4ca11362 | |||
f2968c8385 | |||
8d01b019f4 | |||
bf87330d6e | |||
2bb85bdbd4 | |||
8f34c6eeda | |||
ac5543bad9 | |||
e4c56a25c6 | |||
11ff8190b1 | |||
9bd25d7427 | |||
5676713b1f | |||
b59231d32b | |||
e530cf0a9d | |||
6bfb4207c4 | |||
c63ad610f5 | |||
e38a4323b4 | |||
d40aea5d0a | |||
1ba69e4b11 | |||
f10390b1be | |||
c2b1908644 | |||
0a93335f6d | |||
fbb65cde44 | |||
8e7acd1094 | |||
c6ee6273db | |||
c77059f891 | |||
5bdda06ca6 | |||
d8303dd6d6 | |||
60ec68b097 | |||
deae66c194 | |||
0bdb6e735a | |||
7933e01e77 | |||
b443a2d713 | |||
7a28ababd1 | |||
ddb9d3a864 | |||
186b75a848 | |||
8cedd2ee5b | |||
0845572878 | |||
2e4b0b0b17 | |||
9f42d7693f | |||
3424334ce5 | |||
c68d236fd7 | |||
7c6e82c990 | |||
eb5d0d295b | |||
2eae5a2a89 | |||
595c9f2999 | |||
70d63e34e9 | |||
83ac65ced3 | |||
be140382cf | |||
d320ffe742 | |||
fbc6f01cfb | |||
3008434c0f | |||
5fbea31d15 | |||
f70c6d5d48 | |||
71e7eb7cfc | |||
339ec46961 | |||
fe53c37654 | |||
06857fbc52 | |||
1c830b5c95 | |||
a74145961e | |||
91698b2657 | |||
40fd8070a9 | |||
4d5f1f6023 | |||
bc2d65cd2e | |||
1a0b339897 | |||
8d3a937413 | |||
e85e1b2c9e | |||
c8aa8cb842 | |||
88c4473283 | |||
f4d9975dab | |||
6e8b768d79 | |||
cdb0eeafa2 | |||
388fc24191 | |||
b3c021899c | |||
bff50c6987 | |||
111fcf188e | |||
015693aea7 | |||
03a52f1988 | |||
372f6c16b3 | |||
c04da4c232 | |||
a070cb8154 | |||
bf4273776f | |||
95ca3ed4fa | |||
54c0603263 | |||
c598cd4255 | |||
2bb03d9813 | |||
9c41f581a9 | |||
6231367bc8 | |||
a7d7098b1a | |||
90aeb700ea | |||
9dfc647386 | |||
f992f5de95 | |||
946f7256e4 | |||
57d425d929 | |||
dd36bf07f4 | |||
406fb8d1d9 | |||
2d4a225e2a | |||
db218e06dc | |||
17e8a5ce38 | |||
07db14f72e | |||
412831cb9c | |||
f4dc79f4ba | |||
9cb573b3b4 | |||
ce106bfda9 | |||
a3ffc4baf0 | |||
3c3637b674 | |||
bcecd08825 | |||
55f99073ad | |||
008c60651c | |||
63667d9e46 | |||
08b770719c | |||
e0d27ebf84 | |||
0756145caf | |||
036860770b | |||
aa1ef39da3 | |||
7c8969d4ea | |||
87d58535ff | |||
1060ba2206 | |||
0401087175 | |||
f8dc06ef49 | |||
282cb46ff1 | |||
a3ff5f1246 | |||
5bb822dcd4 | |||
00b3c2036a | |||
3163b0d362 | |||
21f48577ae | |||
11e4410d1c | |||
27a950d28e | |||
f3d056110a | |||
b39c2e2f75 | |||
7cf3c6eb95 | |||
cdec0254ec | |||
02f3330812 | |||
6f013d0225 | |||
1f06f57de3 | |||
0f405f24c7 | |||
5a8128dd30 | |||
50616cc62c | |||
9d345cab07 | |||
59ab11e932 | |||
df302d4bac | |||
6bbfd0f4f6 | |||
943e0045e7 | |||
62a5250554 | |||
9043970e97 | |||
d32c9ce1b6 | |||
73d8478678 | |||
bab58576b4 | |||
41212c1ad1 | |||
4a6122905b | |||
15986c598a | |||
078342442d | |||
8855c54391 | |||
5dfc81a157 | |||
c42d97fb97 | |||
13314ad1e7 | |||
ff6026ca79 | |||
c6c6c0f295 | |||
1cca5557b1 | |||
76208110b9 | |||
56dd0282f0 | |||
c01b602b86 | |||
d6f46236e9 | |||
104b30142f | |||
f3a885d920 | |||
60445b0559 | |||
01d6287a8f | |||
0462b2db80 | |||
4cb399ed70 | |||
7ef9f7702f | |||
44a1686a76 | |||
15c6d24178 | |||
3b84e3ccfe | |||
da7d6beb22 | |||
f012eb7bdd | |||
f966394b63 | |||
889d2bb378 | |||
a2c4e485ba | |||
8860d8de8d | |||
d7b768ee9f | |||
6ea8e42331 | |||
1b784cb77a | |||
4a0ec1207c | |||
ffb2fedca9 | |||
382b1ba85f | |||
3b42655b51 | |||
e43e906f86 | |||
e51d9d0935 | |||
f57489ed92 | |||
503e521820 | |||
c317094947 | |||
243df63978 | |||
05ff102e09 | |||
cd30fac050 | |||
f589d3c795 | |||
51879d022e | |||
2260b3dda3 | |||
aa64442453 | |||
129ee45944 | |||
2fe7d105b0 | |||
136c8acba6 | |||
e92d4b2ccb | |||
6e91c96dd7 | |||
7801c03e2d | |||
763bbe1c01 | |||
0f67569cc3 | |||
0ea3527544 | |||
20dfca073f | |||
a3679f0f4e | |||
e75fdc2865 | |||
4be88ff572 | |||
992789af26 | |||
b822e13f12 | |||
cd058db046 | |||
1b3143d3d4 | |||
e31ed66610 | |||
7f18ff10b2 | |||
65ae24fbf1 | |||
b54ce921dd | |||
4935129c5a | |||
7614ce4b49 | |||
9d34ec9153 | |||
fd92271884 | |||
cea8fab307 | |||
2d44b7d296 | |||
faccb0627f | |||
a9cd6b4f7a | |||
81691e07c6 | |||
26f40dcabc | |||
3820fef801 | |||
392ff286b2 | |||
b6824d8b88 | |||
e09160e80d | |||
8ba5388438 | |||
30b6eac03d | |||
17ad07ce27 | |||
53911ebecd | |||
bc309705a9 | |||
1de80aeac3 | |||
1eaaf368ee | |||
36e40ebb85 | |||
3f600c5b82 | |||
fbd980f8b0 | |||
7d383421c6 | |||
aed386b3cd | |||
540cc4016e | |||
1b3a09495d | |||
b7af34371b | |||
105762e1c3 | |||
2706ae076d | |||
07ceec3e0b | |||
72fd1b047f | |||
178b6d4d8d | |||
d160e834eb | |||
3e8b9e7e8b | |||
c34ebfe739 | |||
571b33a11c | |||
07b90f4b4b | |||
f1630da2cc | |||
16751b5dee | |||
29ec9a436a | |||
6a7c00eaef | |||
82b24d9beb | |||
a317072e4e | |||
5b701cd197 | |||
8f035616a0 | |||
81f8ba9e4c | |||
380ab19910 | |||
4329629ee9 | |||
39fde52d8e | |||
0611f56776 | |||
8923e91e39 | |||
d6e6811bb9 | |||
f24bc5c826 | |||
c209d0d487 | |||
74dddc880d | |||
f3c41bbdf1 | |||
c45ddc8f22 | |||
84a98995bf | |||
ed83449514 | |||
9eda573a43 | |||
4f91d2512a | |||
2f5eeab567 | |||
f9fbb0eb3c | |||
43fbf4345d | |||
8262c2dd33 | |||
0e86430ea3 | |||
fc1301c92d | |||
e913e26c01 | |||
5ce4b12cc1 | |||
94429d781f | |||
321629a693 | |||
f21405399c | |||
305ca11eb5 | |||
9b1ff9b566 | |||
a0ed6ea3c8 | |||
4a6529973e | |||
9a02fac0e5 | |||
2c6a9e9e48 | |||
d91b735442 | |||
7d3025176f | |||
74111dddb7 | |||
74b0e4e541 | |||
587bb13be5 | |||
79d3237bf5 | |||
f8d44e732b | |||
0d2044e72e | |||
1bb301aafa | |||
5635b8378d | |||
294c2c600d | |||
b4c639a5d9 | |||
e7b37bee08 | |||
d32e97b812 | |||
81affaa584 | |||
f2d54f201d | |||
0373006710 | |||
ec2e35ad81 | |||
821ee5e726 | |||
5ed1ed54a6 | |||
96ef478fbc | |||
3f60c9d416 | |||
ed39377840 | |||
e250a3f213 | |||
3a99456371 | |||
bd6d8189f8 | |||
452b5c58e8 | |||
d1ebc55ed7 | |||
f20f3f56c7 | |||
65008bb912 | |||
d21389d549 | |||
f858a127ad | |||
de12393eaf | |||
b2c53a0967 | |||
65546646a7 | |||
ee8cd671cb | |||
d4df70c53f | |||
43ead45db6 | |||
22d2360c4b | |||
d38b8cf851 | |||
43cf52275b | |||
104b7824f5 | |||
a9293f62a8 | |||
0b210ce5bf | |||
38225d0dba | |||
473b6f727c | |||
63039666b0 | |||
a4a1588fbc | |||
4eafb22d5b | |||
aa09967173 | |||
7c40aed738 | |||
6c0bf6e0ab | |||
20e891db6e | |||
38b5979881 | |||
8422d40e2c | |||
de1c4e6c88 | |||
648d4865b1 | |||
7d4fec4db3 | |||
0f7e73646f | |||
bd6ca75032 | |||
341cc1ea63 | |||
2716bb020f | |||
193b00764b | |||
8ca678440a | |||
439889dcef | |||
5ec6bac7d9 | |||
af2ec60980 | |||
f0ca0312f3 | |||
3317b137e5 | |||
c2c10e2bc0 | |||
d2eb6f6646 | |||
1ad9d6f199 | |||
f8d337ad29 | |||
47150efc14 | |||
3e14de158b | |||
e18892000a | |||
c8671c719f | |||
0412c3a2f8 | |||
ef3e8eb778 | |||
fb8cfeb70d | |||
4d70255696 | |||
77c34acb03 | |||
e72bc8ea8b | |||
a882e640e4 | |||
c09d866a77 | |||
4467e59122 | |||
9c096d320a | |||
93ae5043cc | |||
b134394319 | |||
b163775112 | |||
8bd035f51d | |||
9f15017032 | |||
81fec11f88 | |||
8a6a688131 | |||
77a4de31fa | |||
09e88d127e | |||
7ff5734d5d | |||
1d19595996 | |||
7d115da782 | |||
b066775630 | |||
8bb6bcb6eb | |||
20031861b9 | |||
eb297d3b8f | |||
8faa0126eb | |||
6aec03708f | |||
2f7b1e4282 | |||
7492131142 | |||
3c6ee63e59 | |||
45ad18f654 | |||
01829f04d5 | |||
cc1c471877 | |||
de14f9fce8 | |||
6c3ed1dbc2 | |||
cf0fa3141a | |||
539e232f3c | |||
9ed889ccbb | |||
872e26b524 | |||
5bfff0c39b | |||
0505a9d6f7 | |||
9181a046ec | |||
1b0eaac470 | |||
e54cd98a9c | |||
f3eb4fb24e | |||
04854d5d99 | |||
124a814f4d | |||
2e1670fcb8 | |||
7d2747ea9a | |||
36f2b09cad | |||
be51aad9ad | |||
97695b74dd | |||
9d84e47214 | |||
9fb9adb6b4 | |||
91e6d31dc6 | |||
9a1c537854 | |||
2476c8d579 | |||
27e59ea49c | |||
27882efd6b | |||
5e98751c66 | |||
8dec2da564 | |||
27272d3754 | |||
f689434bbc | |||
03728c1868 | |||
ce771903e5 | |||
c78bce2af4 | |||
0b3c9b760e | |||
7e7eba8f4d | |||
a77c222db0 | |||
149961e8f1 | |||
caf3015e66 | |||
459bfdd783 | |||
a2f1cca85c | |||
c09b4b045f | |||
94d81445eb | |||
94744c626c | |||
e62a2509ae | |||
417ac4b69e | |||
b7bf31df99 | |||
a7a0f48286 | |||
1bf0f7110a | |||
ad53eb4e76 | |||
c81d20a069 | |||
08df76486d | |||
fe3753ea68 | |||
abf671da1b | |||
91b4d27931 | |||
310897897e | |||
8ba917b704 | |||
219da892b2 | |||
bbb4cc7d5f | |||
9d04a7cc40 | |||
70d0ae7b42 | |||
ce9e4a61e7 | |||
af8e2f6961 | |||
093b9c1c5b | |||
348d75112f | |||
3c7b1ba854 | |||
b7a8758845 | |||
3812037e2a | |||
c5fdbdb8a1 | |||
c15b5df674 | |||
00f0fd2873 | |||
7269cf7427 | |||
83d82a09b2 | |||
64345b2985 | |||
9c23d78513 | |||
ff92123d93 | |||
e1357a9541 | |||
3b7aa5124c | |||
ce947d70b0 | |||
caed87c125 | |||
e12ba5be8f | |||
d52e087453 | |||
982ebacddd | |||
ee2f54fbb0 | |||
4f5c0314cf | |||
542a3995ea | |||
4af0dbe441 | |||
78ccd4181c | |||
680aeb12c2 | |||
ddcf0b4f5f | |||
74e60fbef8 | |||
b4c783f23d | |||
6f6d2abdac | |||
b123f35d4b | |||
02d6614ae2 | |||
20de0ea01f | |||
9f352ace23 | |||
48cbc5b23c | |||
aa495f4d74 | |||
0d8768b827 | |||
0d076d97be | |||
5b5c33a86f | |||
12f34cc698 | |||
ac116f4f7c | |||
6617731d5b | |||
f7d5ddbc07 | |||
ba778eaff9 | |||
1801c006ec | |||
7a124518c3 | |||
1183d28b15 | |||
1da6ac8de7 | |||
2b89ddfb9e | |||
29734a1dce | |||
def33206d9 | |||
6aad0b8443 | |||
9891e5ab81 | |||
7113c702ff | |||
54edf571af | |||
f85968aba4 | |||
440f553aa8 | |||
a492b019fe | |||
2941740df6 | |||
f0b638063d | |||
0377efdc16 | |||
3d89d2961c | |||
8c240ca3fd | |||
85cd03f899 | |||
3480cdb3b4 | |||
fec83e5164 | |||
837d12decd | |||
ffa536bea3 | |||
a1f26d947d | |||
e6bdef696d | |||
707af3f3ca | |||
480467447e | |||
fa859f1461 | |||
556f4b2f12 |
@ -3,12 +3,27 @@ trigger:
|
||||
|
||||
strategy:
|
||||
matrix:
|
||||
linux-nightly:
|
||||
linux-stable:
|
||||
image: ubuntu-16.04
|
||||
macos-nightly:
|
||||
style: 'unflagged'
|
||||
macos-stable:
|
||||
image: macos-10.14
|
||||
windows-nightly:
|
||||
style: 'unflagged'
|
||||
windows-stable:
|
||||
image: vs2017-win2016
|
||||
style: 'unflagged'
|
||||
linux-nightly-canary:
|
||||
image: ubuntu-16.04
|
||||
style: 'canary'
|
||||
macos-nightly-canary:
|
||||
image: macos-10.14
|
||||
style: 'canary'
|
||||
windows-nightly-canary:
|
||||
image: vs2017-win2016
|
||||
style: 'canary'
|
||||
fmt:
|
||||
image: ubuntu-16.04
|
||||
style: 'fmt'
|
||||
|
||||
pool:
|
||||
vmImage: $(image)
|
||||
@ -16,13 +31,29 @@ pool:
|
||||
steps:
|
||||
- bash: |
|
||||
set -e
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain `cat rust-toolchain`
|
||||
if [ -e /etc/debian_version ]
|
||||
then
|
||||
sudo apt-get -y install libxcb-composite0-dev libx11-dev
|
||||
fi
|
||||
curl https://sh.rustup.rs -sSf | sh -s -- -y --no-modify-path --default-toolchain "stable"
|
||||
export PATH=$HOME/.cargo/bin:$PATH
|
||||
rustup update
|
||||
rustc -Vv
|
||||
echo "##vso[task.prependpath]$HOME/.cargo/bin"
|
||||
rustup component add rustfmt --toolchain `cat rust-toolchain`
|
||||
rustup component add rustfmt --toolchain "stable"
|
||||
displayName: Install Rust
|
||||
- bash: RUSTFLAGS="-D warnings" cargo test --all-features
|
||||
- bash: RUSTFLAGS="-D warnings" cargo test --all --features=stable
|
||||
condition: eq(variables['style'], 'unflagged')
|
||||
displayName: Run tests
|
||||
- bash: RUSTFLAGS="-D warnings" cargo clippy --all --features=stable -- -D clippy::result_unwrap_used -D clippy::option_unwrap_used
|
||||
condition: eq(variables['style'], 'unflagged')
|
||||
displayName: Check clippy lints
|
||||
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all --features=stable
|
||||
condition: eq(variables['style'], 'canary')
|
||||
displayName: Run tests
|
||||
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo clippy --all --features=stable -- -D clippy::result_unwrap_used -D clippy::option_unwrap_used
|
||||
condition: eq(variables['style'], 'canary')
|
||||
displayName: Check clippy lints
|
||||
- bash: cargo fmt --all -- --check
|
||||
condition: eq(variables['style'], 'fmt')
|
||||
displayName: Lint
|
||||
|
@ -0,0 +1,3 @@
|
||||
[build]
|
||||
|
||||
#rustflags = ["--cfg", "coloring_in_tokens"]
|
30
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
30
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@ -0,0 +1,30 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Screenshots**
|
||||
If applicable, add screenshots to help explain your problem.
|
||||
|
||||
**Configuration (please complete the following information):**
|
||||
- OS: [e.g. Windows]
|
||||
- Version [e.g. 0.4.0]
|
||||
- Optional features (if any)
|
||||
|
||||
Add any other context about the problem here.
|
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@ -0,0 +1,20 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
70
.github/workflows/docker-publish.yml
vendored
70
.github/workflows/docker-publish.yml
vendored
@ -2,7 +2,7 @@ name: Publish consumable Docker images
|
||||
|
||||
on:
|
||||
push:
|
||||
tags: ['*.*.*']
|
||||
tags: ['v?[0-9]+.[0-9]+.[0-9]+*']
|
||||
|
||||
jobs:
|
||||
compile:
|
||||
@ -14,7 +14,11 @@ jobs:
|
||||
- x86_64-unknown-linux-gnu
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- run: cargo install cross
|
||||
- name: Install rust-embedded/cross
|
||||
env: { VERSION: v0.1.16 }
|
||||
run: >-
|
||||
wget -nv https://github.com/rust-embedded/cross/releases/download/${VERSION}/cross-${VERSION}-x86_64-unknown-linux-gnu.tar.gz
|
||||
-O- | sudo tar xz -C /usr/local/bin/
|
||||
- name: compile for specific target
|
||||
env: { arch: '${{ matrix.arch }}' }
|
||||
run: |
|
||||
@ -31,6 +35,10 @@ jobs:
|
||||
name: Build and publish docker images
|
||||
needs: compile
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
DOCKER_REGISTRY: quay.io/nushell
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKER_REGISTRY }}
|
||||
DOCKER_USER: ${{ secrets.DOCKER_USER }}
|
||||
strategy:
|
||||
matrix:
|
||||
tag:
|
||||
@ -58,41 +66,53 @@ jobs:
|
||||
- uses: actions/download-artifact@master
|
||||
with: { name: '${{ matrix.arch }}', path: target/release }
|
||||
- name: Build and publish exact version
|
||||
run: |
|
||||
REGISTRY=${REGISTRY,,}; export TAG=${GITHUB_REF##*/}-${{ matrix.tag }};
|
||||
run: |-
|
||||
export DOCKER_TAG=${GITHUB_REF##*/}-${{ matrix.tag }}
|
||||
export NU_BINS=target/release/$( [ ${{ matrix.plugin }} = true ] && echo nu* || echo nu )
|
||||
export PATCH=$([ ${{ matrix.use-patch }} = true ] && echo .${{ matrix.tag }} || echo '')
|
||||
chmod +x $NU_BINS
|
||||
|
||||
echo ${{ secrets.DOCKER_REGISTRY }} | docker login docker.pkg.github.com -u ${{ github.actor }} --password-stdin
|
||||
echo ${DOCKER_PASSWORD} | docker login ${DOCKER_REGISTRY} -u ${DOCKER_USER} --password-stdin
|
||||
docker-compose --file docker/docker-compose.package.yml build
|
||||
docker-compose --file docker/docker-compose.package.yml push # exact version
|
||||
env:
|
||||
BASE_IMAGE: ${{ matrix.base-image }}
|
||||
REGISTRY: docker.pkg.github.com/${{ github.repository }}
|
||||
|
||||
#region semantics tagging
|
||||
- name: Retag and push without suffixing version
|
||||
run: |
|
||||
- name: Retag and push with suffixed version
|
||||
run: |-
|
||||
VERSION=${GITHUB_REF##*/}
|
||||
docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${{ matrix.tag }}
|
||||
docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION%%.*}-${{ matrix.tag }}
|
||||
docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION%.*}-${{ matrix.tag }}
|
||||
docker push ${REGISTRY,,}/nu:${VERSION%.*}-${{ matrix.tag }} # latest patch
|
||||
docker push ${REGISTRY,,}/nu:${VERSION%%.*}-${{ matrix.tag }} # latest features
|
||||
docker push ${REGISTRY,,}/nu:${{ matrix.tag }} # latest version
|
||||
env: { REGISTRY: 'docker.pkg.github.com/${{ github.repository }}' }
|
||||
|
||||
latest_version=${VERSION%%%.*}-${{ matrix.tag }}
|
||||
latest_feature=${VERSION%%.*}-${{ matrix.tag }}
|
||||
latest_patch=${VERSION%.*}-${{ matrix.tag }}
|
||||
exact_version=${VERSION}-${{ matrix.tag }}
|
||||
|
||||
tags=( ${latest_version} ${latest_feature} ${latest_patch} ${exact_version} )
|
||||
|
||||
for tag in ${tags[@]}; do
|
||||
docker tag ${DOCKER_REGISTRY}/nu:${VERSION}-${{ matrix.tag }} ${DOCKER_REGISTRY}/nu:${tag}
|
||||
docker push ${DOCKER_REGISTRY}/nu:${tag}
|
||||
done
|
||||
|
||||
# latest version
|
||||
docker tag ${DOCKER_REGISTRY}/nu:${VERSION}-${{ matrix.tag }} ${DOCKER_REGISTRY}/nu:${{ matrix.tag }}
|
||||
docker push ${DOCKER_REGISTRY}/nu:${{ matrix.tag }}
|
||||
|
||||
- name: Retag and push debian as latest
|
||||
if: matrix.tag == 'debian'
|
||||
run: |
|
||||
run: |-
|
||||
VERSION=${GITHUB_REF##*/}
|
||||
docker tag ${REGISTRY,,}/nu:${{ matrix.tag }} ${REGISTRY,,}/nu:latest
|
||||
docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION%.*}
|
||||
docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION%%.*}
|
||||
docker tag ${REGISTRY,,}/nu:${VERSION}-${{ matrix.tag }} ${REGISTRY,,}/nu:${VERSION}
|
||||
docker push ${REGISTRY,,}/nu:${VERSION} # exact version
|
||||
docker push ${REGISTRY,,}/nu:${VERSION%%.*} # latest features
|
||||
docker push ${REGISTRY,,}/nu:${VERSION%.*} # latest patch
|
||||
docker push ${REGISTRY,,}/nu:latest # latest version
|
||||
env: { REGISTRY: 'docker.pkg.github.com/${{ github.repository }}' }
|
||||
|
||||
# ${latest features} ${latest patch} ${exact version}
|
||||
tags=( ${VERSION%%.*} ${VERSION%.*} ${VERSION} )
|
||||
|
||||
for tag in ${tags[@]}; do
|
||||
docker tag ${DOCKER_REGISTRY}/nu:${VERSION}-${{ matrix.tag }} ${DOCKER_REGISTRY}/nu:${tag}
|
||||
docker push ${DOCKER_REGISTRY}/nu:${tag}
|
||||
done
|
||||
|
||||
# latest version
|
||||
docker tag ${DOCKER_REGISTRY}/nu:${{ matrix.tag }} ${DOCKER_REGISTRY}/nu:latest
|
||||
docker push ${DOCKER_REGISTRY}/nu:latest
|
||||
#endregion semantics tagging
|
||||
|
7
.gitpod.Dockerfile
vendored
Normal file
7
.gitpod.Dockerfile
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
FROM gitpod/workspace-full
|
||||
USER root
|
||||
RUN apt-get update && apt-get install -y libssl-dev \
|
||||
libxcb-composite0-dev \
|
||||
pkg-config \
|
||||
curl \
|
||||
rustc
|
21
.gitpod.yml
Normal file
21
.gitpod.yml
Normal file
@ -0,0 +1,21 @@
|
||||
image:
|
||||
file: .gitpod.Dockerfile
|
||||
tasks:
|
||||
- init: cargo install nu --features=stable
|
||||
command: nu
|
||||
github:
|
||||
prebuilds:
|
||||
# enable for the master/default branch (defaults to true)
|
||||
master: true
|
||||
# enable for all branches in this repo (defaults to false)
|
||||
branches: true
|
||||
# enable for pull requests coming from this repo (defaults to true)
|
||||
pullRequests: true
|
||||
# enable for pull requests coming from forks (defaults to false)
|
||||
pullRequestsFromForks: true
|
||||
# add a "Review in Gitpod" button as a comment to pull requests (defaults to true)
|
||||
addComment: true
|
||||
# add a "Review in Gitpod" button to pull requests (defaults to false)
|
||||
addBadge: false
|
||||
# add a label once the prebuild is ready to pull requests (defaults to false)
|
||||
addLabel: prebuilt-in-gitpod
|
3801
Cargo.lock
generated
3801
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
239
Cargo.toml
239
Cargo.toml
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "nu"
|
||||
version = "0.3.0"
|
||||
version = "0.8.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||
description = "A shell for the GitHub era"
|
||||
license = "MIT"
|
||||
@ -8,91 +8,155 @@ edition = "2018"
|
||||
readme = "README.md"
|
||||
default-run = "nu"
|
||||
repository = "https://github.com/nushell/nushell"
|
||||
homepage = "http://nushell.sh"
|
||||
documentation = "https://book.nushell.sh"
|
||||
homepage = "https://www.nushell.sh"
|
||||
documentation = "https://www.nushell.sh/book/"
|
||||
|
||||
[workspace]
|
||||
|
||||
members = [
|
||||
"crates/nu-macros",
|
||||
"crates/nu-errors",
|
||||
"crates/nu-source",
|
||||
"crates/nu_plugin_average",
|
||||
"crates/nu_plugin_binaryview",
|
||||
"crates/nu_plugin_fetch",
|
||||
"crates/nu_plugin_inc",
|
||||
"crates/nu_plugin_match",
|
||||
"crates/nu_plugin_post",
|
||||
"crates/nu_plugin_ps",
|
||||
"crates/nu_plugin_str",
|
||||
"crates/nu_plugin_sum",
|
||||
"crates/nu_plugin_sys",
|
||||
"crates/nu_plugin_textview",
|
||||
"crates/nu_plugin_tree",
|
||||
"crates/nu-protocol",
|
||||
"crates/nu-plugin",
|
||||
"crates/nu-parser",
|
||||
"crates/nu-value-ext",
|
||||
"crates/nu-build"
|
||||
]
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
rustyline = "5.0.3"
|
||||
chrono = { version = "0.4.9", features = ["serde"] }
|
||||
nu-source = { version = "0.8.0", path = "./crates/nu-source" }
|
||||
nu-plugin = { version = "0.8.0", path = "./crates/nu-plugin" }
|
||||
nu-protocol = { version = "0.8.0", path = "./crates/nu-protocol" }
|
||||
nu-errors = { version = "0.8.0", path = "./crates/nu-errors" }
|
||||
nu-parser = { version = "0.8.0", path = "./crates/nu-parser" }
|
||||
nu-value-ext = { version = "0.8.0", path = "./crates/nu-value-ext" }
|
||||
nu_plugin_average = {version = "0.8.0", path = "./crates/nu_plugin_average", optional=true}
|
||||
nu_plugin_binaryview = {version = "0.8.0", path = "./crates/nu_plugin_binaryview", optional=true}
|
||||
nu_plugin_fetch = {version = "0.8.0", path = "./crates/nu_plugin_fetch", optional=true}
|
||||
nu_plugin_inc = {version = "0.8.0", path = "./crates/nu_plugin_inc", optional=true}
|
||||
nu_plugin_match = {version = "0.8.0", path = "./crates/nu_plugin_match", optional=true}
|
||||
nu_plugin_post = {version = "0.8.0", path = "./crates/nu_plugin_post", optional=true}
|
||||
nu_plugin_ps = {version = "0.8.0", path = "./crates/nu_plugin_ps", optional=true}
|
||||
nu_plugin_str = {version = "0.8.0", path = "./crates/nu_plugin_str", optional=true}
|
||||
nu_plugin_sum = {version = "0.8.0", path = "./crates/nu_plugin_sum", optional=true}
|
||||
nu_plugin_sys = {version = "0.8.0", path = "./crates/nu_plugin_sys", optional=true}
|
||||
nu_plugin_textview = {version = "0.8.0", path = "./crates/nu_plugin_textview", optional=true}
|
||||
nu_plugin_tree = {version = "0.8.0", path = "./crates/nu_plugin_tree", optional=true}
|
||||
nu-macros = { version = "0.8.0", path = "./crates/nu-macros" }
|
||||
|
||||
|
||||
query_interface = "0.3.5"
|
||||
typetag = "0.1.4"
|
||||
rustyline = "5.0.6"
|
||||
chrono = { version = "0.4.10", features = ["serde"] }
|
||||
derive-new = "0.5.8"
|
||||
prettytable-rs = "0.8.0"
|
||||
itertools = "0.8.0"
|
||||
itertools = "0.8.2"
|
||||
ansi_term = "0.12.1"
|
||||
nom = "5.0.0"
|
||||
nom = "5.0.1"
|
||||
dunce = "1.0.0"
|
||||
indexmap = { version = "1.2.0", features = ["serde-1"] }
|
||||
indexmap = { version = "1.3.0", features = ["serde-1"] }
|
||||
chrono-humanize = "0.0.11"
|
||||
byte-unit = "3.0.1"
|
||||
base64 = "0.10.1"
|
||||
futures-preview = { version = "=0.3.0-alpha.18", features = ["compat", "io-compat"] }
|
||||
futures-async-stream = "=0.1.0-alpha.5"
|
||||
byte-unit = "3.0.3"
|
||||
base64 = "0.11"
|
||||
futures-preview = { version = "=0.3.0-alpha.19", features = ["compat", "io-compat"] }
|
||||
async-stream = "0.1.2"
|
||||
futures_codec = "0.2.5"
|
||||
num-traits = "0.2.8"
|
||||
num-traits = "0.2.10"
|
||||
term = "0.5.2"
|
||||
bytes = "0.4.12"
|
||||
log = "0.4.8"
|
||||
pretty_env_logger = "0.3.1"
|
||||
serde = { version = "1.0.100", features = ["derive"] }
|
||||
serde = { version = "1.0.103", features = ["derive"] }
|
||||
bson = { version = "0.14.0", features = ["decimal128"] }
|
||||
serde_json = "1.0.40"
|
||||
serde_json = "1.0.44"
|
||||
serde-hjson = "0.9.1"
|
||||
serde_yaml = "0.8"
|
||||
serde_bytes = "0.11.2"
|
||||
getset = "0.0.8"
|
||||
language-reporting = "0.3.1"
|
||||
serde_bytes = "0.11.3"
|
||||
getset = "0.0.9"
|
||||
language-reporting = "0.4.0"
|
||||
app_dirs = "1.2.1"
|
||||
csv = "1.1"
|
||||
toml = "0.5.3"
|
||||
toml = "0.5.5"
|
||||
clap = "2.33.0"
|
||||
git2 = { version = "0.10.1", default_features = false }
|
||||
git2 = { version = "0.10.2", default_features = false }
|
||||
dirs = "2.0.2"
|
||||
glob = "0.3.0"
|
||||
ctrlc = "3.1.3"
|
||||
surf = "1.0.2"
|
||||
url = "2.1.0"
|
||||
roxmltree = "0.7.0"
|
||||
roxmltree = "0.7.3"
|
||||
nom_locate = "1.0.0"
|
||||
enum-utils = "0.1.1"
|
||||
nom-tracable = "0.4.1"
|
||||
unicode-xid = "0.2.0"
|
||||
serde_ini = "0.2.0"
|
||||
subprocess = "0.1.18"
|
||||
mime = "0.3.14"
|
||||
pretty-hex = "0.1.0"
|
||||
hex = "0.3.2"
|
||||
pretty-hex = "0.1.1"
|
||||
hex = "0.4"
|
||||
tempfile = "3.1.0"
|
||||
semver = "0.9.0"
|
||||
which = "2.0.1"
|
||||
uuid = {version = "0.7.4", features = [ "v4", "serde" ]}
|
||||
ichwh = "0.2"
|
||||
textwrap = {version = "0.11.0", features = ["term_size"]}
|
||||
shellexpand = "1.0.0"
|
||||
futures-timer = "0.4.0"
|
||||
pin-utils = "0.1.0-alpha.4"
|
||||
num-bigint = { version = "0.2.3", features = ["serde"] }
|
||||
bigdecimal = { version = "0.1.0", features = ["serde"] }
|
||||
natural = "0.3.0"
|
||||
serde_urlencoded = "0.6.1"
|
||||
sublime_fuzzy = "0.5"
|
||||
trash = "1.0.0"
|
||||
regex = "1"
|
||||
cfg-if = "0.1"
|
||||
strip-ansi-escapes = "0.1.0"
|
||||
calamine = "0.16"
|
||||
umask = "0.1"
|
||||
futures-util = "0.3.1"
|
||||
termcolor = "1.0.5"
|
||||
natural = "0.3.0"
|
||||
parking_lot = "0.10.0"
|
||||
|
||||
neso = { version = "0.5.0", optional = true }
|
||||
crossterm = { version = "0.10.2", optional = true }
|
||||
clipboard = {version = "0.5", optional = true }
|
||||
ptree = {version = "0.2" }
|
||||
starship = { version = "0.28", optional = true}
|
||||
heim = {version = "0.0.9", optional = true}
|
||||
battery = {version = "0.7.5", optional = true}
|
||||
syntect = {version = "3.2.0", optional = true }
|
||||
onig_sys = {version = "=69.1.0", optional = true }
|
||||
heim = {version = "0.0.8-alpha.1", optional = true }
|
||||
battery = {version = "0.7.4", optional = true }
|
||||
rawkey = {version = "0.1.2", optional = true }
|
||||
clipboard = {version = "0.5", optional = true }
|
||||
ptree = {version = "0.2", optional = true }
|
||||
image = { version = "0.22.2", default_features = false, features = ["png_codec", "jpeg"], optional = true }
|
||||
crossterm = {version = "0.10.2", optional = true}
|
||||
futures-timer = {version = "1.0.2", optional = true}
|
||||
url = {version = "2.1.0", optional = true}
|
||||
|
||||
[features]
|
||||
default = ["textview", "sys", "ps"]
|
||||
raw-key = ["rawkey", "neso"]
|
||||
textview = ["syntect", "onig_sys", "crossterm"]
|
||||
binaryview = ["image", "crossterm"]
|
||||
default = ["sys", "ps", "textview", "inc", "str"]
|
||||
stable = ["sys", "ps", "textview", "inc", "str", "starship-prompt", "binaryview", "match", "tree", "average", "sum", "post", "fetch", "clipboard"]
|
||||
|
||||
# Default
|
||||
sys = ["heim", "battery"]
|
||||
ps = ["heim"]
|
||||
ps = ["heim", "futures-timer"]
|
||||
textview = ["crossterm", "syntect", "onig_sys", "url"]
|
||||
inc = ["nu_plugin_inc"]
|
||||
str = ["nu_plugin_str"]
|
||||
|
||||
# Stable
|
||||
average = ["nu_plugin_average"]
|
||||
binaryview = ["nu_plugin_binaryview"]
|
||||
fetch = ["nu_plugin_fetch"]
|
||||
match = ["nu_plugin_match"]
|
||||
post = ["nu_plugin_post"]
|
||||
starship-prompt = ["starship"]
|
||||
sum = ["nu_plugin_sum"]
|
||||
trace = ["nu-parser/trace"]
|
||||
tree = ["nu_plugin_tree"]
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "0.20.0"
|
||||
@ -100,64 +164,47 @@ features = ["bundled", "blob"]
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = "0.6.1"
|
||||
nu-test-support = { version = "0.8.0", path = "./crates/nu-test-support" }
|
||||
|
||||
[build-dependencies]
|
||||
toml = "0.5.5"
|
||||
serde = { version = "1.0.103", features = ["derive"] }
|
||||
nu-build = { version = "0.8.0", path = "./crates/nu-build" }
|
||||
|
||||
[lib]
|
||||
name = "nu"
|
||||
doctest = false
|
||||
path = "src/lib.rs"
|
||||
|
||||
# Core plugins that ship with `cargo install nu` by default
|
||||
# Currently, Cargo limits us to installing only one binary
|
||||
# unless we use [[bin]], so we use this as a workaround
|
||||
[[bin]]
|
||||
name = "nu_plugin_inc"
|
||||
path = "src/plugins/inc.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_sum"
|
||||
path = "src/plugins/sum.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_embed"
|
||||
path = "src/plugins/embed.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_add"
|
||||
path = "src/plugins/add.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_edit"
|
||||
path = "src/plugins/edit.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_str"
|
||||
path = "src/plugins/str.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_skip"
|
||||
path = "src/plugins/skip.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_sys"
|
||||
path = "src/plugins/sys.rs"
|
||||
required-features = ["sys"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_ps"
|
||||
path = "src/plugins/ps.rs"
|
||||
required-features = ["ps"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_tree"
|
||||
path = "src/plugins/tree.rs"
|
||||
required-features = ["tree"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_binaryview"
|
||||
path = "src/plugins/binaryview.rs"
|
||||
required-features = ["binaryview"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_textview"
|
||||
path = "src/plugins/textview.rs"
|
||||
name = "nu_plugin_core_textview"
|
||||
path = "src/plugins/nu_plugin_core_textview.rs"
|
||||
required-features = ["textview"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_core_inc"
|
||||
path = "src/plugins/nu_plugin_core_inc.rs"
|
||||
required-features = ["inc"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_core_ps"
|
||||
path = "src/plugins/nu_plugin_core_ps.rs"
|
||||
required-features = ["ps"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_core_str"
|
||||
path = "src/plugins/nu_plugin_core_str.rs"
|
||||
required-features = ["str"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_core_sys"
|
||||
path = "src/plugins/nu_plugin_core_sys.rs"
|
||||
required-features = ["sys"]
|
||||
|
||||
# Main nu binary
|
||||
[[bin]]
|
||||
name = "nu"
|
||||
path = "src/main.rs"
|
||||
|
@ -4,21 +4,12 @@ command = "lalrpop"
|
||||
args = ["src/parser/parser.lalrpop"]
|
||||
|
||||
[tasks.baseline]
|
||||
dependencies = ["lalrpop"]
|
||||
|
||||
[tasks.build]
|
||||
command = "cargo"
|
||||
args = ["build"]
|
||||
dependencies = ["lalrpop"]
|
||||
args = ["build", "--bins"]
|
||||
|
||||
[tasks.run]
|
||||
command = "cargo"
|
||||
args = ["run", "--release"]
|
||||
dependencies = ["baseline"]
|
||||
|
||||
[tasks.release]
|
||||
command = "cargo"
|
||||
args = ["build", "--release"]
|
||||
args = ["run"]
|
||||
dependencies = ["baseline"]
|
||||
|
||||
[tasks.test]
|
||||
|
129
README.md
129
README.md
@ -1,35 +1,40 @@
|
||||
[](https://crates.io/crates/nu)
|
||||
[](https://dev.azure.com/nushell/nushell/_build/latest?definitionId=2&branchName=master)
|
||||
[](https://dev.azure.com/nushell/nushell/_build/latest?definitionId=2&branchName=master)
|
||||
[](https://discord.gg/NtAbbGn)
|
||||
[](https://changelog.com/podcast/363)
|
||||
|
||||
|
||||
|
||||
# Nu Shell
|
||||
|
||||
A modern shell for the GitHub era
|
||||
A modern shell for the GitHub era.
|
||||
|
||||

|
||||
|
||||
# Status
|
||||
|
||||
This project has reached a minimum-viable product level of quality. While contributors dogfood it as their daily driver, it may be unstable for some commands. Future releases will work fill out missing features and improve stability. Its design is also subject to change as it matures.
|
||||
This project has reached a minimum-viable product level of quality. While contributors dogfood it as their daily driver, it may be unstable for some commands. Future releases will work to fill out missing features and improve stability. Its design is also subject to change as it matures.
|
||||
|
||||
Nu comes with a set of built-in commands (listed below). If a command is unknown, the command will shell-out and execute it (using cmd on Windows or bash on Linux and MacOS), correctly passing through stdin, stdout and stderr, so things like your daily git workflows and even `vim` will work just fine.
|
||||
Nu comes with a set of built-in commands (listed below). If a command is unknown, the command will shell-out and execute it (using cmd on Windows or bash on Linux and macOS), correctly passing through stdin, stdout, and stderr, so things like your daily git workflows and even `vim` will work just fine.
|
||||
|
||||
# Learning more
|
||||
|
||||
There are a few good resources to learn about Nu. First, there is a [book](https://book.nushell.sh) about Nu, currently in progress. The book focuses on using Nu and its core concepts.
|
||||
There are a few good resources to learn about Nu. There is a [book](https://www.nushell.sh/book/) about Nu that is currently in progress. The book focuses on using Nu and its core concepts.
|
||||
|
||||
If you're a developer who would like to contribute to Nu, we're also working on a [book for developers](https://github.com/nushell/contributor-book/tree/master/en) to help get started. There are also [good first issues](https://github.com/nushell/nushell/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) to help you dive in.
|
||||
If you're a developer who would like to contribute to Nu, we're also working on a [book for developers](https://www.nushell.sh/contributor-book/) to help you get started. There are also [good first issues](https://github.com/nushell/nushell/issues?q=is%3Aopen+is%3Aissue+label%3A%22good+first+issue%22) to help you dive in.
|
||||
|
||||
We also have an active [discord](https://discord.gg/NtAbbGn) and [twitter](https://twitter.com/nu_shell) if you'd like to come chat with us.
|
||||
We also have an active [Discord](https://discord.gg/NtAbbGn) and [Twitter](https://twitter.com/nu_shell) if you'd like to come and chat with us.
|
||||
|
||||
Try it in Gitpod.
|
||||
|
||||
[](https://gitpod.io/#https://github.com/nushell/nushell)
|
||||
|
||||
# Installation
|
||||
|
||||
## Local
|
||||
## Local
|
||||
|
||||
Up-to-date installation instructions can be found in the [installation chapter of the book](https://book.nushell.sh/en/installation).
|
||||
Up-to-date installation instructions can be found in the [installation chapter of the book](https://www.nushell.sh/book/en/installation.html). **Windows users**: please note that Nu works on Windows 10 and does not currently have Windows 7/8.1 support.
|
||||
|
||||
To build Nu, you will need to use the **nightly** version of the compiler.
|
||||
To build Nu, you will need to use the **latest stable (1.39 or later)** version of the compiler.
|
||||
|
||||
Required dependencies:
|
||||
|
||||
@ -41,16 +46,16 @@ Optional dependencies:
|
||||
* To use Nu with all possible optional features enabled, you'll also need the following:
|
||||
* on Linux (on Debian/Ubuntu): `apt install libxcb-composite0-dev libx11-dev`
|
||||
|
||||
To install Nu via cargo:
|
||||
To install Nu via cargo (make sure you have installed [rustup](https://rustup.rs/) and the latest stable compiler via `rustup install stable`):
|
||||
|
||||
```
|
||||
cargo +nightly install nu
|
||||
cargo install nu
|
||||
```
|
||||
|
||||
You can also install Nu with all the bells and whistles:
|
||||
You can also build Nu yourself with all the bells and whistles (be sure to have installed the [dependencies](https://www.nushell.sh/book/en/installation.html#dependencies) for your platform), once you have checked out this repo with git:
|
||||
|
||||
```
|
||||
cargo +nightly install nu --all-features
|
||||
cargo build --all --features=stable
|
||||
```
|
||||
|
||||
## Docker
|
||||
@ -71,13 +76,13 @@ To build the base image:
|
||||
|
||||
```bash
|
||||
$ docker build -f docker/Dockerfile.nu-base -t nushell/nu-base .
|
||||
```
|
||||
```
|
||||
|
||||
And then to build the smaller container (using a Multistage build):
|
||||
|
||||
```bash
|
||||
$ docker build -f docker/Dockerfile -t nushell/nu .
|
||||
```
|
||||
```
|
||||
|
||||
Either way, you can run either container as follows:
|
||||
|
||||
@ -87,42 +92,44 @@ $ docker run -it nushell/nu
|
||||
/> exit
|
||||
```
|
||||
|
||||
The second container is a bit smaller, if size is important to you.
|
||||
The second container is a bit smaller if the size is important to you.
|
||||
|
||||
## Packaging status
|
||||
|
||||
[](https://repology.org/project/nushell/versions)
|
||||
|
||||
### Fedora
|
||||
|
||||
[COPR repo](https://copr.fedorainfracloud.org/coprs/atim/nushell/): `sudo dnf copr enable atim/nushell -y && sudo dnf install nushell -y`
|
||||
|
||||
# Philosophy
|
||||
|
||||
Nu draws inspiration from projects like PowerShell, functional programming languages, and modern cli tools. Rather than thinking of files and services as raw streams of text, Nu looks at each input as something with structure. For example, when you list the contents of a directory, what you get back is a table of rows, where each row represents an item in that directory. These values can be piped through a series of steps, in a series of commands called a 'pipeline'.
|
||||
Nu draws inspiration from projects like PowerShell, functional programming languages, and modern CLI tools. Rather than thinking of files and services as raw streams of text, Nu looks at each input as something with structure. For example, when you list the contents of a directory, what you get back is a table of rows, where each row represents an item in that directory. These values can be piped through a series of steps, in a series of commands called a 'pipeline'.
|
||||
|
||||
## Pipelines
|
||||
|
||||
In Unix, it's common to pipe between commands to split up a sophisticated command over multiple steps. Nu takes this a step further and builds heavily on the idea of _pipelines_. Just as the Unix philosophy, Nu allows commands to output from stdout and read from stdin. Additionally, commands can output structured data (you can think of this as a third kind of stream). Commands that work in the pipeline fit into one of three categories
|
||||
In Unix, it's common to pipe between commands to split up a sophisticated command over multiple steps. Nu takes this a step further and builds heavily on the idea of _pipelines_. Just as the Unix philosophy, Nu allows commands to output from stdout and read from stdin. Additionally, commands can output structured data (you can think of this as a third kind of stream). Commands that work in the pipeline fit into one of three categories:
|
||||
|
||||
* Commands that produce a stream (eg, `ls`)
|
||||
* Commands that filter a stream (eg, `where type == "Directory"`)
|
||||
* Commands that consumes the output of the pipeline (eg, `autoview`)
|
||||
* Commands that consume the output of the pipeline (eg, `autoview`)
|
||||
|
||||
Commands are separated by the pipe symbol (`|`) to denote a pipeline flowing left to right.
|
||||
|
||||
```
|
||||
/home/jonathan/Source/nushell(master)> ls | where type == "Directory" | autoview
|
||||
━━━━┯━━━━━━━━━━━┯━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━┯━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||
# │ name │ type │ readonly │ size │ accessed │ modified
|
||||
────┼───────────┼───────────┼──────────┼────────┼──────────────┼────────────────
|
||||
0 │ .azure │ Directory │ │ 4.1 KB │ 2 months ago │ a day ago
|
||||
1 │ target │ Directory │ │ 4.1 KB │ 3 days ago │ 3 days ago
|
||||
2 │ images │ Directory │ │ 4.1 KB │ 2 months ago │ 2 weeks ago
|
||||
3 │ tests │ Directory │ │ 4.1 KB │ 2 months ago │ 37 minutes ago
|
||||
4 │ tmp │ Directory │ │ 4.1 KB │ 2 weeks ago │ 2 weeks ago
|
||||
5 │ src │ Directory │ │ 4.1 KB │ 2 months ago │ 37 minutes ago
|
||||
6 │ assets │ Directory │ │ 4.1 KB │ a month ago │ a month ago
|
||||
0 │ .azure │ Directory │ │ 4.1 KB │ 2 months ago │ a day ago
|
||||
1 │ target │ Directory │ │ 4.1 KB │ 3 days ago │ 3 days ago
|
||||
2 │ images │ Directory │ │ 4.1 KB │ 2 months ago │ 2 weeks ago
|
||||
3 │ tests │ Directory │ │ 4.1 KB │ 2 months ago │ 37 minutes ago
|
||||
4 │ tmp │ Directory │ │ 4.1 KB │ 2 weeks ago │ 2 weeks ago
|
||||
5 │ src │ Directory │ │ 4.1 KB │ 2 months ago │ 37 minutes ago
|
||||
6 │ assets │ Directory │ │ 4.1 KB │ a month ago │ a month ago
|
||||
7 │ docs │ Directory │ │ 4.1 KB │ 2 months ago │ 2 months ago
|
||||
━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━
|
||||
━━━━┷━━━━━━━━━━━┷━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━┷━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
Because most of the time you'll want to see the output of a pipeline, `autoview` is assumed. We could have also written the above:
|
||||
@ -136,12 +143,12 @@ Being able to use the same commands and compose them differently is an important
|
||||
```text
|
||||
/home/jonathan/Source/nushell(master)> ps | where cpu > 0
|
||||
━━━┯━━━━━━━┯━━━━━━━━━━━━━━━━━┯━━━━━━━━━━┯━━━━━━━━━━
|
||||
# │ pid │ name │ status │ cpu
|
||||
# │ pid │ name │ status │ cpu
|
||||
───┼───────┼─────────────────┼──────────┼──────────
|
||||
0 │ 992 │ chrome │ Sleeping │ 6.988768
|
||||
1 │ 4240 │ chrome │ Sleeping │ 5.645982
|
||||
2 │ 13973 │ qemu-system-x86 │ Sleeping │ 4.996551
|
||||
3 │ 15746 │ nu │ Sleeping │ 84.59905
|
||||
0 │ 992 │ chrome │ Sleeping │ 6.988768
|
||||
1 │ 4240 │ chrome │ Sleeping │ 5.645982
|
||||
2 │ 13973 │ qemu-system-x86 │ Sleeping │ 4.996551
|
||||
3 │ 15746 │ nu │ Sleeping │ 84.59905
|
||||
━━━┷━━━━━━━┷━━━━━━━━━━━━━━━━━┷━━━━━━━━━━┷━━━━━━━━━━
|
||||
|
||||
```
|
||||
@ -153,9 +160,9 @@ Nu can load file and URL contents as raw text or as structured data (if it recog
|
||||
```
|
||||
/home/jonathan/Source/nushell(master)> open Cargo.toml
|
||||
━━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━
|
||||
bin │ dependencies │ dev-dependencies
|
||||
bin │ dependencies │ dev-dependencies
|
||||
──────────────────┼────────────────┼──────────────────
|
||||
[table: 12 rows] │ [table: 1 row] │ [table: 1 row]
|
||||
[table: 12 rows] │ [table: 1 row] │ [table: 1 row]
|
||||
━━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━
|
||||
```
|
||||
|
||||
@ -164,9 +171,9 @@ We can pipeline this into a command that gets the contents of one of the columns
|
||||
```
|
||||
/home/jonathan/Source/nushell(master)> open Cargo.toml | get package
|
||||
━━━━━━━━━━━━━━━━━┯━━━━━━━━━━━━━━━━━━━━━━━━━━━━┯━━━━━━━━━┯━━━━━━━━━┯━━━━━━┯━━━━━━━━━
|
||||
authors │ description │ edition │ license │ name │ version
|
||||
authors │ description │ edition │ license │ name │ version
|
||||
─────────────────┼────────────────────────────┼─────────┼─────────┼──────┼─────────
|
||||
[table: 3 rows] │ A shell for the GitHub era │ 2018 │ ISC │ nu │ 0.3.0
|
||||
[table: 3 rows] │ A shell for the GitHub era │ 2018 │ MIT │ nu │ 0.6.1
|
||||
━━━━━━━━━━━━━━━━━┷━━━━━━━━━━━━━━━━━━━━━━━━━━━━┷━━━━━━━━━┷━━━━━━━━━┷━━━━━━┷━━━━━━━━━
|
||||
```
|
||||
|
||||
@ -174,14 +181,33 @@ Finally, we can use commands outside of Nu once we have the data we want:
|
||||
|
||||
```
|
||||
/home/jonathan/Source/nushell(master)> open Cargo.toml | get package.version | echo $it
|
||||
0.3.0
|
||||
0.6.1
|
||||
```
|
||||
|
||||
Here we use the variable `$it` to refer to the value being piped to the external command.
|
||||
|
||||
## Configuration
|
||||
|
||||
Nu has early support for configuring the shell. It currently supports the following settings:
|
||||
|
||||
| Variable | Type | Description |
|
||||
| ------------- | ------------- | ----- |
|
||||
| path | table of strings | PATH to use to find binaries |
|
||||
| env | row | the environment variables to pass to external commands |
|
||||
| ctrlc_exit | boolean | whether or not to exit Nu after multiple ctrl-c presses |
|
||||
| table_mode | "light" or other | enable lightweight or normal tables |
|
||||
| edit_mode | "vi" or "emacs" | changes line editing to "vi" or "emacs" mode |
|
||||
|
||||
To set one of these variables, you can use `config --set`. For example:
|
||||
|
||||
```
|
||||
> config --set [edit_mode "vi"]
|
||||
> config --set [path $nu.path]
|
||||
```
|
||||
|
||||
## Shells
|
||||
|
||||
By default, Nu will work inside of a single directory and allow you to navigate around your filesystem. Sometimes, you'll want to work in multiple directories at the same time. For this, Nu offers a way of adding additional working directories that you can jump between.
|
||||
Nu will work inside of a single directory and allow you to navigate around your filesystem by default. Nu also offers a way of adding additional working directories that you can jump between, allowing you to work in multiple directories at the same time.
|
||||
|
||||
To do so, use the `enter` command, which will allow you create a new "shell" and enter it at the specified path. You can toggle between this new shell and the original shell with the `p` (for previous) and `n` (for next), allowing you to navigate around a ring buffer of shells. Once you're done with a shell, you can `exit` it and remove it from the ring buffer.
|
||||
|
||||
@ -193,7 +219,7 @@ Nu supports plugins that offer additional functionality to the shell and follow
|
||||
|
||||
There are a few examples in the `plugins` directory.
|
||||
|
||||
Plugins are binaries that are available in your path and follow a "nu_plugin_*" naming convention. These binaries interact with nu via a simple JSON-RPC protocol where the command identifies itself and passes along its configuration, which then makes it available for use. If the plugin is a filter, data streams to it one element at a time, and it can stream data back in return via stdin/stdout. If the plugin is a sink, it is given the full vector of final data and is given free reign over stdin/stdout to use as it pleases.
|
||||
Plugins are binaries that are available in your path and follow a `nu_plugin_*` naming convention. These binaries interact with nu via a simple JSON-RPC protocol where the command identifies itself and passes along its configuration, which then makes it available for use. If the plugin is a filter, data streams to it one element at a time, and it can stream data back in return via stdin/stdout. If the plugin is a sink, it is given the full vector of final data and is given free reign over stdin/stdout to use as it pleases.
|
||||
|
||||
# Goals
|
||||
|
||||
@ -230,6 +256,8 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat
|
||||
| version | Display Nu version |
|
||||
|
||||
## Shell commands
|
||||
| command | description |
|
||||
| ------- | ----------- |
|
||||
| exit (--now) | Exit the current shell (or all shells) |
|
||||
| enter (path) | Create a new shell and begin at this path |
|
||||
| p | Go to previous shell |
|
||||
@ -239,20 +267,29 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat
|
||||
## Filters on tables (structured data)
|
||||
| command | description |
|
||||
| ------------- | ------------- |
|
||||
| add column-or-column-path value | Add a new column to the table |
|
||||
| append row-data | Append a row to the end of the table |
|
||||
| compact ...columns | Remove rows where given columns are empty |
|
||||
| count | Show the total number of rows |
|
||||
| default column row-data | Sets a default row's column if missing |
|
||||
| edit column-or-column-path value | Edit an existing column to have a new value |
|
||||
| embed column | Creates a new table of one column with the given name, and places the current table inside of it |
|
||||
| first amount | Show only the first number of rows |
|
||||
| format pattern | Format table row data as a string following the given pattern |
|
||||
| get column-or-column-path | Open column and get data from the corresponding cells |
|
||||
| group-by column | Creates a new table with the data from the table rows grouped by the column given |
|
||||
| histogram column ...column-names | Creates a new table with a histogram based on the column name passed in, optionally give the frequency column name
|
||||
| inc (column-or-column-path) | Increment a value or version. Optionally use the column of a table |
|
||||
| insert column-or-column-path value | Insert a new column to the table |
|
||||
| last amount | Show only the last number of rows |
|
||||
| nth row-number | Return only the selected row |
|
||||
| nth ...row-numbers | Return only the selected rows |
|
||||
| pick ...columns | Down-select table to only these columns |
|
||||
| pivot --header-row <headers> | Pivot the tables, making columns into rows and vice versa |
|
||||
| prepend row-data | Prepend a row to the beginning of the table |
|
||||
| reject ...columns | Remove the given columns from the table |
|
||||
| reverse | Reverses the table. |
|
||||
| skip amount | Skip a number of rows |
|
||||
| skip-while condition | Skips rows while the condition matches. |
|
||||
| skip-while condition | Skips rows while the condition matches |
|
||||
| split-by column | Creates a new table with the data from the inner tables splitted by the column given |
|
||||
| sort-by ...columns | Sort by the given columns |
|
||||
| str (column) | Apply string function. Optionally use the column of a table |
|
||||
| sum | Sum a column of values |
|
||||
@ -275,12 +312,14 @@ Nu adheres closely to a set of goals that make up its design philosophy. As feat
|
||||
| from-ini | Parse text as .ini and create table |
|
||||
| from-json | Parse text as .json and create table |
|
||||
| from-sqlite | Parse binary data as sqlite .db and create table |
|
||||
| from-ssv --minimum-spaces <minimum number of spaces to count as a separator> | Parse text as space-separated values and create table |
|
||||
| from-toml | Parse text as .toml and create table |
|
||||
| from-tsv | Parse text as .tsv and create table |
|
||||
| from-url | Parse urlencoded string and create a table |
|
||||
| from-xml | Parse text as .xml and create a table |
|
||||
| from-yaml | Parse text as a .yaml/.yml and create a table |
|
||||
| lines | Split single string into rows, one per line |
|
||||
| parse pattern | Convert text to a table by matching the given pattern |
|
||||
| size | Gather word count statistics on the text |
|
||||
| split-column sep ...column-names | Split row contents across multiple columns via the separator, optionally give the columns names |
|
||||
| split-row sep | Split row contents over multiple rows via the separator |
|
||||
|
52
TODO.md
Normal file
52
TODO.md
Normal file
@ -0,0 +1,52 @@
|
||||
This pattern is extremely repetitive and can be abstracted:
|
||||
|
||||
```rs
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let tag = args.name_tag();
|
||||
let input = args.input;
|
||||
|
||||
let stream = async_stream! {
|
||||
let values: Vec<Value> = input.values.collect().await;
|
||||
|
||||
let mut concat_string = String::new();
|
||||
let mut latest_tag: Option<Tag> = None;
|
||||
|
||||
for value in values {
|
||||
latest_tag = Some(value_tag.clone());
|
||||
let value_span = value.tag.span;
|
||||
|
||||
match &value.value {
|
||||
UntaggedValue::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
concat_string.push_str("\n");
|
||||
}
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
name_span,
|
||||
"value originates from here",
|
||||
value_span,
|
||||
)),
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
Mandatory and Optional in parse_command
|
||||
|
||||
trace_remaining?
|
||||
|
||||
select_fields and select_fields take unnecessary Tag
|
||||
|
||||
Value#value should be Value#untagged
|
||||
|
||||
Unify dictionary building, probably around a macro
|
||||
|
||||
sys plugin in own crate
|
||||
|
||||
textview in own crate
|
||||
|
||||
Combine atomic and atomic_parse in parser
|
||||
|
||||
at_end_possible_ws needs to be comment and separator sensitive
|
3
build.rs
Normal file
3
build.rs
Normal file
@ -0,0 +1,3 @@
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
nu_build::build()
|
||||
}
|
16
crates/nu-build/Cargo.toml
Normal file
16
crates/nu-build/Cargo.toml
Normal file
@ -0,0 +1,16 @@
|
||||
[package]
|
||||
name = "nu-build"
|
||||
version = "0.8.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||
edition = "2018"
|
||||
description = "Core build system for nushell"
|
||||
license = "MIT"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
serde = { version = "1.0.103", features = ["derive"] }
|
||||
lazy_static = "1.4.0"
|
||||
serde_json = "1.0.44"
|
||||
toml = "0.5.5"
|
80
crates/nu-build/src/lib.rs
Normal file
80
crates/nu-build/src/lib.rs
Normal file
@ -0,0 +1,80 @@
|
||||
use lazy_static::lazy_static;
|
||||
use serde::Deserialize;
|
||||
use std::collections::BTreeMap;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::HashSet;
|
||||
use std::env;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Mutex;
|
||||
|
||||
lazy_static! {
|
||||
static ref WORKSPACES: Mutex<BTreeMap<String, &'static Path>> = Mutex::new(BTreeMap::new());
|
||||
}
|
||||
|
||||
// got from https://github.com/mitsuhiko/insta/blob/b113499249584cb650150d2d01ed96ee66db6b30/src/runtime.rs#L67-L88
|
||||
|
||||
fn get_cargo_workspace(manifest_dir: &str) -> Result<Option<&Path>, Box<dyn std::error::Error>> {
|
||||
let mut workspaces = WORKSPACES.lock()?;
|
||||
if let Some(rv) = workspaces.get(manifest_dir) {
|
||||
Ok(Some(rv))
|
||||
} else {
|
||||
#[derive(Deserialize)]
|
||||
struct Manifest {
|
||||
workspace_root: String,
|
||||
}
|
||||
let output = std::process::Command::new(env!("CARGO"))
|
||||
.arg("metadata")
|
||||
.arg("--format-version=1")
|
||||
.current_dir(manifest_dir)
|
||||
.output()?;
|
||||
let manifest: Manifest = serde_json::from_slice(&output.stdout)?;
|
||||
let path = Box::leak(Box::new(PathBuf::from(manifest.workspace_root)));
|
||||
workspaces.insert(manifest_dir.to_string(), path.as_path());
|
||||
Ok(workspaces.get(manifest_dir).cloned())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Feature {
|
||||
#[allow(unused)]
|
||||
description: String,
|
||||
enabled: bool,
|
||||
}
|
||||
|
||||
pub fn build() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let input = env::var("CARGO_MANIFEST_DIR")?;
|
||||
|
||||
let all_on = env::var("NUSHELL_ENABLE_ALL_FLAGS").is_ok();
|
||||
let flags: HashSet<String> = env::var("NUSHELL_ENABLE_FLAGS")
|
||||
.map(|s| s.split(',').map(|s| s.to_string()).collect())
|
||||
.unwrap_or_else(|_| HashSet::new());
|
||||
|
||||
if all_on && !flags.is_empty() {
|
||||
println!(
|
||||
"cargo:warning=Both NUSHELL_ENABLE_ALL_FLAGS and NUSHELL_ENABLE_FLAGS were set. You don't need both."
|
||||
);
|
||||
}
|
||||
|
||||
let workspace = match get_cargo_workspace(&input)? {
|
||||
// If the crate is being downloaded from crates.io, it won't have a workspace root, and that's ok
|
||||
None => return Ok(()),
|
||||
Some(workspace) => workspace,
|
||||
};
|
||||
|
||||
let path = Path::new(&workspace).join("features.toml");
|
||||
|
||||
// If the crate is being downloaded from crates.io, it won't have a features.toml, and that's ok
|
||||
if !path.exists() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let toml: HashMap<String, Feature> = toml::from_str(&std::fs::read_to_string(path)?)?;
|
||||
|
||||
for (key, value) in toml.iter() {
|
||||
if value.enabled || all_on || flags.contains(key) {
|
||||
println!("cargo:rustc-cfg={}", key);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
32
crates/nu-errors/Cargo.toml
Normal file
32
crates/nu-errors/Cargo.toml
Normal file
@ -0,0 +1,32 @@
|
||||
[package]
|
||||
name = "nu-errors"
|
||||
version = "0.8.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||
edition = "2018"
|
||||
description = "Core error subsystem for Nushell"
|
||||
license = "MIT"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-source = { path = "../nu-source", version = "0.8.0" }
|
||||
|
||||
ansi_term = "0.12.1"
|
||||
bigdecimal = { version = "0.1.0", features = ["serde"] }
|
||||
derive-new = "0.5.8"
|
||||
language-reporting = "0.4.0"
|
||||
num-bigint = { version = "0.2.3", features = ["serde"] }
|
||||
num-traits = "0.2.10"
|
||||
serde = { version = "1.0.103", features = ["derive"] }
|
||||
nom = "5.0.1"
|
||||
nom_locate = "1.0.0"
|
||||
|
||||
# implement conversions
|
||||
subprocess = "0.1.18"
|
||||
serde_yaml = "0.8"
|
||||
toml = "0.5.5"
|
||||
serde_json = "1.0.44"
|
||||
|
||||
[build-dependencies]
|
||||
nu-build = { version = "0.8.0", path = "../nu-build" }
|
3
crates/nu-errors/build.rs
Normal file
3
crates/nu-errors/build.rs
Normal file
@ -0,0 +1,3 @@
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
nu_build::build()
|
||||
}
|
912
crates/nu-errors/src/lib.rs
Normal file
912
crates/nu-errors/src/lib.rs
Normal file
@ -0,0 +1,912 @@
|
||||
use ansi_term::Color;
|
||||
use bigdecimal::BigDecimal;
|
||||
use derive_new::new;
|
||||
use language_reporting::{Diagnostic, Label, Severity};
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebug, Span, Spanned, SpannedItem, TracableContext};
|
||||
use num_bigint::BigInt;
|
||||
use num_traits::ToPrimitive;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt;
|
||||
use std::ops::Range;
|
||||
|
||||
/// A structured reason for a ParseError. Note that parsing in nu is more like macro expansion in
|
||||
/// other languages, so the kinds of errors that can occur during parsing are more contextual than
|
||||
/// you might expect.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ParseErrorReason {
|
||||
/// The parser encountered an EOF rather than what it was expecting
|
||||
Eof { expected: &'static str, span: Span },
|
||||
/// The parser expected to see the end of a token stream (possibly the token
|
||||
/// stream from inside a delimited token node), but found something else.
|
||||
ExtraTokens { actual: Spanned<String> },
|
||||
/// The parser encountered something other than what it was expecting
|
||||
Mismatch {
|
||||
expected: &'static str,
|
||||
actual: Spanned<String>,
|
||||
},
|
||||
|
||||
/// An unexpected internal error has occurred
|
||||
InternalError { message: Spanned<String> },
|
||||
|
||||
/// The parser tried to parse an argument for a command, but it failed for
|
||||
/// some reason
|
||||
ArgumentError {
|
||||
command: Spanned<String>,
|
||||
error: ArgumentError,
|
||||
},
|
||||
}
|
||||
|
||||
/// A newtype for `ParseErrorReason`
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ParseError {
|
||||
reason: ParseErrorReason,
|
||||
}
|
||||
|
||||
impl ParseError {
|
||||
/// Construct a [ParseErrorReason::Eof](ParseErrorReason::Eof)
|
||||
pub fn unexpected_eof(expected: &'static str, span: Span) -> ParseError {
|
||||
ParseError {
|
||||
reason: ParseErrorReason::Eof { expected, span },
|
||||
}
|
||||
}
|
||||
|
||||
/// Construct a [ParseErrorReason::ExtraTokens](ParseErrorReason::ExtraTokens)
|
||||
pub fn extra_tokens(actual: Spanned<impl Into<String>>) -> ParseError {
|
||||
let Spanned { span, item } = actual;
|
||||
|
||||
ParseError {
|
||||
reason: ParseErrorReason::ExtraTokens {
|
||||
actual: item.into().spanned(span),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Construct a [ParseErrorReason::Mismatch](ParseErrorReason::Mismatch)
|
||||
pub fn mismatch(expected: &'static str, actual: Spanned<impl Into<String>>) -> ParseError {
|
||||
let Spanned { span, item } = actual;
|
||||
|
||||
ParseError {
|
||||
reason: ParseErrorReason::Mismatch {
|
||||
expected,
|
||||
actual: item.into().spanned(span),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Construct a [ParseErrorReason::InternalError](ParseErrorReason::InternalError)
|
||||
pub fn internal_error(message: Spanned<impl Into<String>>) -> ParseError {
|
||||
ParseError {
|
||||
reason: ParseErrorReason::InternalError {
|
||||
message: message.item.into().spanned(message.span),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// Construct a [ParseErrorReason::ArgumentError](ParseErrorReason::ArgumentError)
|
||||
pub fn argument_error(command: Spanned<impl Into<String>>, kind: ArgumentError) -> ParseError {
|
||||
ParseError {
|
||||
reason: ParseErrorReason::ArgumentError {
|
||||
command: command.item.into().spanned(command.span),
|
||||
error: kind,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert a [ParseError](ParseError) into a [ShellError](ShellError)
|
||||
impl From<ParseError> for ShellError {
|
||||
fn from(error: ParseError) -> ShellError {
|
||||
match error.reason {
|
||||
ParseErrorReason::Eof { expected, span } => ShellError::unexpected_eof(expected, span),
|
||||
ParseErrorReason::ExtraTokens { actual } => ShellError::type_error("nothing", actual),
|
||||
ParseErrorReason::Mismatch { actual, expected } => {
|
||||
ShellError::type_error(expected, actual)
|
||||
}
|
||||
ParseErrorReason::InternalError { message } => ShellError::labeled_error(
|
||||
format!("Internal error: {}", message.item),
|
||||
&message.item,
|
||||
&message.span,
|
||||
),
|
||||
ParseErrorReason::ArgumentError { command, error } => {
|
||||
ShellError::argument_error(command, error)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// ArgumentError describes various ways that the parser could fail because of unexpected arguments.
|
||||
/// Nu commands are like a combination of functions and macros, and these errors correspond to
|
||||
/// problems that could be identified during expansion based on the syntactic signature of a
|
||||
/// command.
|
||||
#[derive(Debug, Eq, PartialEq, Clone, Ord, Hash, PartialOrd, Serialize, Deserialize)]
|
||||
pub enum ArgumentError {
|
||||
/// The command specified a mandatory flag, but it was missing.
|
||||
MissingMandatoryFlag(String),
|
||||
/// The command specified a mandatory positional argument, but it was missing.
|
||||
MissingMandatoryPositional(String),
|
||||
/// A flag was found, and it should have been followed by a value, but no value was found
|
||||
MissingValueForName(String),
|
||||
/// A sequence of characters was found that was not syntactically valid (but would have
|
||||
/// been valid if the command was an external command)
|
||||
InvalidExternalWord,
|
||||
}
|
||||
|
||||
impl PrettyDebug for ArgumentError {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
match self {
|
||||
ArgumentError::MissingMandatoryFlag(flag) => {
|
||||
b::description("missing `")
|
||||
+ b::description(flag)
|
||||
+ b::description("` as mandatory flag")
|
||||
}
|
||||
ArgumentError::MissingMandatoryPositional(pos) => {
|
||||
b::description("missing `")
|
||||
+ b::description(pos)
|
||||
+ b::description("` as mandatory positional argument")
|
||||
}
|
||||
ArgumentError::MissingValueForName(name) => {
|
||||
b::description("missing value for flag `")
|
||||
+ b::description(name)
|
||||
+ b::description("`")
|
||||
}
|
||||
ArgumentError::InvalidExternalWord => b::description("invalid word"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A `ShellError` is a proximate error and a possible cause, which could have its own cause,
|
||||
/// creating a cause chain.
|
||||
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Clone, Serialize, Deserialize, Hash)]
|
||||
pub struct ShellError {
|
||||
error: ProximateShellError,
|
||||
cause: Option<Box<ShellError>>,
|
||||
}
|
||||
|
||||
/// `PrettyDebug` is for internal debugging. For user-facing debugging, [into_diagnostic](ShellError::into_diagnostic)
|
||||
/// is used, which prints an error, highlighting spans.
|
||||
impl PrettyDebug for ShellError {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
match &self.error {
|
||||
ProximateShellError::SyntaxError { problem } => {
|
||||
b::error("Syntax Error")
|
||||
+ b::space()
|
||||
+ b::delimit("(", b::description(&problem.item), ")")
|
||||
}
|
||||
ProximateShellError::UnexpectedEof { .. } => b::error("Unexpected end"),
|
||||
ProximateShellError::TypeError { expected, actual } => {
|
||||
b::error("Type Error")
|
||||
+ b::space()
|
||||
+ b::delimit(
|
||||
"(",
|
||||
b::description("expected:")
|
||||
+ b::space()
|
||||
+ b::description(expected)
|
||||
+ b::description(",")
|
||||
+ b::space()
|
||||
+ b::description("actual:")
|
||||
+ b::space()
|
||||
+ b::option(actual.item.as_ref().map(b::description)),
|
||||
")",
|
||||
)
|
||||
}
|
||||
ProximateShellError::MissingProperty { subpath, expr } => {
|
||||
b::error("Missing Property")
|
||||
+ b::space()
|
||||
+ b::delimit(
|
||||
"(",
|
||||
b::description("expr:")
|
||||
+ b::space()
|
||||
+ b::description(&expr.item)
|
||||
+ b::description(",")
|
||||
+ b::space()
|
||||
+ b::description("subpath:")
|
||||
+ b::space()
|
||||
+ b::description(&subpath.item),
|
||||
")",
|
||||
)
|
||||
}
|
||||
ProximateShellError::InvalidIntegerIndex { subpath, .. } => {
|
||||
b::error("Invalid integer index")
|
||||
+ b::space()
|
||||
+ b::delimit(
|
||||
"(",
|
||||
b::description("subpath:") + b::space() + b::description(&subpath.item),
|
||||
")",
|
||||
)
|
||||
}
|
||||
ProximateShellError::MissingValue { reason, .. } => {
|
||||
b::error("Missing Value")
|
||||
+ b::space()
|
||||
+ b::delimit(
|
||||
"(",
|
||||
b::description("reason:") + b::space() + b::description(reason),
|
||||
")",
|
||||
)
|
||||
}
|
||||
ProximateShellError::ArgumentError { command, error } => {
|
||||
b::error("Argument Error")
|
||||
+ b::space()
|
||||
+ b::delimit(
|
||||
"(",
|
||||
b::description("command:")
|
||||
+ b::space()
|
||||
+ b::description(&command.item)
|
||||
+ b::description(",")
|
||||
+ b::space()
|
||||
+ b::description("error:")
|
||||
+ b::space()
|
||||
+ error.pretty(),
|
||||
")",
|
||||
)
|
||||
}
|
||||
ProximateShellError::RangeError {
|
||||
kind,
|
||||
actual_kind,
|
||||
operation,
|
||||
} => {
|
||||
b::error("Range Error")
|
||||
+ b::space()
|
||||
+ b::delimit(
|
||||
"(",
|
||||
b::description("expected:")
|
||||
+ b::space()
|
||||
+ kind.pretty()
|
||||
+ b::description(",")
|
||||
+ b::space()
|
||||
+ b::description("actual:")
|
||||
+ b::space()
|
||||
+ b::description(&actual_kind.item)
|
||||
+ b::description(",")
|
||||
+ b::space()
|
||||
+ b::description("operation:")
|
||||
+ b::space()
|
||||
+ b::description(operation),
|
||||
")",
|
||||
)
|
||||
}
|
||||
ProximateShellError::Diagnostic(_) => b::error("diagnostic"),
|
||||
ProximateShellError::CoerceError { left, right } => {
|
||||
b::error("Coercion Error")
|
||||
+ b::space()
|
||||
+ b::delimit(
|
||||
"(",
|
||||
b::description("left:")
|
||||
+ b::space()
|
||||
+ b::description(&left.item)
|
||||
+ b::description(",")
|
||||
+ b::space()
|
||||
+ b::description("right:")
|
||||
+ b::space()
|
||||
+ b::description(&right.item),
|
||||
")",
|
||||
)
|
||||
}
|
||||
ProximateShellError::UntaggedRuntimeError { reason } => {
|
||||
b::error("Unknown Error") + b::delimit("(", b::description(reason), ")")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ShellError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.pretty().display())
|
||||
}
|
||||
}
|
||||
|
||||
impl serde::de::Error for ShellError {
|
||||
fn custom<T>(msg: T) -> Self
|
||||
where
|
||||
T: std::fmt::Display,
|
||||
{
|
||||
ShellError::untagged_runtime_error(msg.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl ShellError {
|
||||
pub fn type_error(
|
||||
expected: impl Into<String>,
|
||||
actual: Spanned<impl Into<String>>,
|
||||
) -> ShellError {
|
||||
ProximateShellError::TypeError {
|
||||
expected: expected.into(),
|
||||
actual: actual.map(|i| Some(i.into())),
|
||||
}
|
||||
.start()
|
||||
}
|
||||
|
||||
pub fn missing_property(
|
||||
subpath: Spanned<impl Into<String>>,
|
||||
expr: Spanned<impl Into<String>>,
|
||||
) -> ShellError {
|
||||
ProximateShellError::MissingProperty {
|
||||
subpath: subpath.map(|s| s.into()),
|
||||
expr: expr.map(|e| e.into()),
|
||||
}
|
||||
.start()
|
||||
}
|
||||
|
||||
pub fn invalid_integer_index(
|
||||
subpath: Spanned<impl Into<String>>,
|
||||
integer: impl Into<Span>,
|
||||
) -> ShellError {
|
||||
ProximateShellError::InvalidIntegerIndex {
|
||||
subpath: subpath.map(|s| s.into()),
|
||||
integer: integer.into(),
|
||||
}
|
||||
.start()
|
||||
}
|
||||
|
||||
pub fn untagged_runtime_error(error: impl Into<String>) -> ShellError {
|
||||
ProximateShellError::UntaggedRuntimeError {
|
||||
reason: error.into(),
|
||||
}
|
||||
.start()
|
||||
}
|
||||
|
||||
pub fn unexpected_eof(expected: impl Into<String>, span: impl Into<Span>) -> ShellError {
|
||||
ProximateShellError::UnexpectedEof {
|
||||
expected: expected.into(),
|
||||
span: span.into(),
|
||||
}
|
||||
.start()
|
||||
}
|
||||
|
||||
pub fn range_error(
|
||||
expected: impl Into<ExpectedRange>,
|
||||
actual: &Spanned<impl fmt::Debug>,
|
||||
operation: impl Into<String>,
|
||||
) -> ShellError {
|
||||
ProximateShellError::RangeError {
|
||||
kind: expected.into(),
|
||||
actual_kind: format!("{:?}", actual.item).spanned(actual.span),
|
||||
operation: operation.into(),
|
||||
}
|
||||
.start()
|
||||
}
|
||||
|
||||
pub fn syntax_error(problem: Spanned<impl Into<String>>) -> ShellError {
|
||||
ProximateShellError::SyntaxError {
|
||||
problem: problem.map(|p| p.into()),
|
||||
}
|
||||
.start()
|
||||
}
|
||||
|
||||
pub fn coerce_error(
|
||||
left: Spanned<impl Into<String>>,
|
||||
right: Spanned<impl Into<String>>,
|
||||
) -> ShellError {
|
||||
ProximateShellError::CoerceError {
|
||||
left: left.map(|l| l.into()),
|
||||
right: right.map(|r| r.into()),
|
||||
}
|
||||
.start()
|
||||
}
|
||||
|
||||
pub fn argument_error(command: Spanned<impl Into<String>>, kind: ArgumentError) -> ShellError {
|
||||
ProximateShellError::ArgumentError {
|
||||
command: command.map(|c| c.into()),
|
||||
error: kind,
|
||||
}
|
||||
.start()
|
||||
}
|
||||
|
||||
pub fn parse_error(
|
||||
error: nom::Err<(
|
||||
nom_locate::LocatedSpanEx<&str, TracableContext>,
|
||||
nom::error::ErrorKind,
|
||||
)>,
|
||||
) -> ShellError {
|
||||
use language_reporting::*;
|
||||
|
||||
match error {
|
||||
nom::Err::Incomplete(_) => {
|
||||
// TODO: Get span of EOF
|
||||
let diagnostic = Diagnostic::new(
|
||||
Severity::Error,
|
||||
"Parse Error: Unexpected end of line".to_string(),
|
||||
);
|
||||
|
||||
ShellError::diagnostic(diagnostic)
|
||||
}
|
||||
nom::Err::Failure(span) | nom::Err::Error(span) => {
|
||||
let diagnostic = Diagnostic::new(Severity::Error, "Parse Error".to_string())
|
||||
.with_label(Label::new_primary(Span::from(span.0)));
|
||||
|
||||
ShellError::diagnostic(diagnostic)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn diagnostic(diagnostic: Diagnostic<Span>) -> ShellError {
|
||||
ProximateShellError::Diagnostic(ShellDiagnostic { diagnostic }).start()
|
||||
}
|
||||
|
||||
pub fn into_diagnostic(self) -> Diagnostic<Span> {
|
||||
match self.error {
|
||||
ProximateShellError::MissingValue { span, reason } => {
|
||||
let mut d = Diagnostic::new(
|
||||
Severity::Bug,
|
||||
format!("Internal Error (missing value) :: {}", reason),
|
||||
);
|
||||
|
||||
if let Some(span) = span {
|
||||
d = d.with_label(Label::new_primary(span));
|
||||
}
|
||||
|
||||
d
|
||||
}
|
||||
ProximateShellError::ArgumentError {
|
||||
command,
|
||||
error,
|
||||
} => match error {
|
||||
ArgumentError::InvalidExternalWord => Diagnostic::new(
|
||||
Severity::Error,
|
||||
"Invalid bare word for Nu command (did you intend to invoke an external command?)".to_string())
|
||||
.with_label(Label::new_primary(command.span)),
|
||||
ArgumentError::MissingMandatoryFlag(name) => Diagnostic::new(
|
||||
Severity::Error,
|
||||
format!(
|
||||
"{} requires {}{}",
|
||||
Color::Cyan.paint(&command.item),
|
||||
Color::Black.bold().paint("--"),
|
||||
Color::Black.bold().paint(name)
|
||||
),
|
||||
)
|
||||
.with_label(Label::new_primary(command.span)),
|
||||
ArgumentError::MissingMandatoryPositional(name) => Diagnostic::new(
|
||||
Severity::Error,
|
||||
format!(
|
||||
"{} requires {} parameter",
|
||||
Color::Cyan.paint(&command.item),
|
||||
Color::Green.bold().paint(name.clone())
|
||||
),
|
||||
)
|
||||
.with_label(
|
||||
Label::new_primary(command.span).with_message(format!("requires {} parameter", name)),
|
||||
),
|
||||
ArgumentError::MissingValueForName(name) => Diagnostic::new(
|
||||
Severity::Error,
|
||||
format!(
|
||||
"{} is missing value for flag {}{}",
|
||||
Color::Cyan.paint(&command.item),
|
||||
Color::Black.bold().paint("--"),
|
||||
Color::Black.bold().paint(name)
|
||||
),
|
||||
)
|
||||
.with_label(Label::new_primary(command.span)),
|
||||
},
|
||||
ProximateShellError::TypeError {
|
||||
expected,
|
||||
actual:
|
||||
Spanned {
|
||||
item: Some(actual),
|
||||
span,
|
||||
},
|
||||
} => Diagnostic::new(Severity::Error, "Type Error").with_label(
|
||||
Label::new_primary(span)
|
||||
.with_message(format!("Expected {}, found {}", expected, actual)),
|
||||
),
|
||||
ProximateShellError::TypeError {
|
||||
expected,
|
||||
actual:
|
||||
Spanned {
|
||||
item: None,
|
||||
span
|
||||
},
|
||||
} => Diagnostic::new(Severity::Error, "Type Error")
|
||||
.with_label(Label::new_primary(span).with_message(expected)),
|
||||
|
||||
ProximateShellError::UnexpectedEof {
|
||||
expected, span
|
||||
} => Diagnostic::new(Severity::Error, "Unexpected end of input".to_string())
|
||||
.with_label(Label::new_primary(span).with_message(format!("Expected {}", expected))),
|
||||
|
||||
ProximateShellError::RangeError {
|
||||
kind,
|
||||
operation,
|
||||
actual_kind:
|
||||
Spanned {
|
||||
item,
|
||||
span
|
||||
},
|
||||
} => Diagnostic::new(Severity::Error, "Range Error").with_label(
|
||||
Label::new_primary(span).with_message(format!(
|
||||
"Expected to convert {} to {} while {}, but it was out of range",
|
||||
item,
|
||||
kind.display(),
|
||||
operation
|
||||
)),
|
||||
),
|
||||
|
||||
ProximateShellError::SyntaxError {
|
||||
problem:
|
||||
Spanned {
|
||||
span,
|
||||
item
|
||||
},
|
||||
} => Diagnostic::new(Severity::Error, "Syntax Error")
|
||||
.with_label(Label::new_primary(span).with_message(item)),
|
||||
|
||||
ProximateShellError::MissingProperty { subpath, expr, .. } => {
|
||||
|
||||
let mut diag = Diagnostic::new(Severity::Error, "Missing property");
|
||||
|
||||
if subpath.span == Span::unknown() {
|
||||
diag.message = format!("Missing property (for {})", subpath.item);
|
||||
} else {
|
||||
let subpath = Label::new_primary(subpath.span).with_message(subpath.item);
|
||||
diag = diag.with_label(subpath);
|
||||
|
||||
if expr.span != Span::unknown() {
|
||||
let expr = Label::new_primary(expr.span).with_message(expr.item);
|
||||
diag = diag.with_label(expr)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
diag
|
||||
}
|
||||
|
||||
ProximateShellError::InvalidIntegerIndex { subpath,integer } => {
|
||||
let mut diag = Diagnostic::new(Severity::Error, "Invalid integer property");
|
||||
|
||||
if subpath.span == Span::unknown() {
|
||||
diag.message = format!("Invalid integer property (for {})", subpath.item)
|
||||
} else {
|
||||
let label = Label::new_primary(subpath.span).with_message(subpath.item);
|
||||
diag = diag.with_label(label)
|
||||
}
|
||||
|
||||
diag = diag.with_label(Label::new_secondary(integer).with_message("integer"));
|
||||
|
||||
diag
|
||||
}
|
||||
|
||||
ProximateShellError::Diagnostic(diag) => diag.diagnostic,
|
||||
ProximateShellError::CoerceError { left, right } => {
|
||||
Diagnostic::new(Severity::Error, "Coercion error")
|
||||
.with_label(Label::new_primary(left.span).with_message(left.item))
|
||||
.with_label(Label::new_secondary(right.span).with_message(right.item))
|
||||
}
|
||||
|
||||
ProximateShellError::UntaggedRuntimeError { reason } => Diagnostic::new(Severity::Error, format!("Error: {}", reason))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn labeled_error(
|
||||
msg: impl Into<String>,
|
||||
label: impl Into<String>,
|
||||
span: impl Into<Span>,
|
||||
) -> ShellError {
|
||||
ShellError::diagnostic(
|
||||
Diagnostic::new(Severity::Error, msg.into())
|
||||
.with_label(Label::new_primary(span.into()).with_message(label.into())),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn labeled_error_with_secondary(
|
||||
msg: impl Into<String>,
|
||||
primary_label: impl Into<String>,
|
||||
primary_span: impl Into<Span>,
|
||||
secondary_label: impl Into<String>,
|
||||
secondary_span: impl Into<Span>,
|
||||
) -> ShellError {
|
||||
ShellError::diagnostic(
|
||||
Diagnostic::new_error(msg.into())
|
||||
.with_label(
|
||||
Label::new_primary(primary_span.into()).with_message(primary_label.into()),
|
||||
)
|
||||
.with_label(
|
||||
Label::new_secondary(secondary_span.into())
|
||||
.with_message(secondary_label.into()),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn unimplemented(title: impl Into<String>) -> ShellError {
|
||||
ShellError::untagged_runtime_error(&format!("Unimplemented: {}", title.into()))
|
||||
}
|
||||
|
||||
pub fn unexpected(title: impl Into<String>) -> ShellError {
|
||||
ShellError::untagged_runtime_error(&format!("Unexpected: {}", title.into()))
|
||||
}
|
||||
}
|
||||
|
||||
/// `ExpectedRange` describes a range of values that was expected by a command. In addition
|
||||
/// to typical ranges, this enum allows an error to specify that the range of allowed values
|
||||
/// corresponds to a particular numeric type (which is a dominant use-case for the
|
||||
/// [RangeError](ProximateShellError::RangeError) error type).
|
||||
#[derive(Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Clone, Serialize, Deserialize)]
|
||||
pub enum ExpectedRange {
|
||||
I8,
|
||||
I16,
|
||||
I32,
|
||||
I64,
|
||||
I128,
|
||||
U8,
|
||||
U16,
|
||||
U32,
|
||||
U64,
|
||||
U128,
|
||||
F32,
|
||||
F64,
|
||||
Usize,
|
||||
Size,
|
||||
BigInt,
|
||||
BigDecimal,
|
||||
Range { start: usize, end: usize },
|
||||
}
|
||||
|
||||
/// Convert a Rust range into an [ExpectedRange](ExpectedRange).
|
||||
impl From<Range<usize>> for ExpectedRange {
|
||||
fn from(range: Range<usize>) -> Self {
|
||||
ExpectedRange::Range {
|
||||
start: range.start,
|
||||
end: range.end,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for ExpectedRange {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
b::description(match self {
|
||||
ExpectedRange::I8 => "an 8-bit signed integer",
|
||||
ExpectedRange::I16 => "a 16-bit signed integer",
|
||||
ExpectedRange::I32 => "a 32-bit signed integer",
|
||||
ExpectedRange::I64 => "a 64-bit signed integer",
|
||||
ExpectedRange::I128 => "a 128-bit signed integer",
|
||||
ExpectedRange::U8 => "an 8-bit unsigned integer",
|
||||
ExpectedRange::U16 => "a 16-bit unsigned integer",
|
||||
ExpectedRange::U32 => "a 32-bit unsigned integer",
|
||||
ExpectedRange::U64 => "a 64-bit unsigned integer",
|
||||
ExpectedRange::U128 => "a 128-bit unsigned integer",
|
||||
ExpectedRange::F32 => "a 32-bit float",
|
||||
ExpectedRange::F64 => "a 64-bit float",
|
||||
ExpectedRange::Usize => "an list index",
|
||||
ExpectedRange::Size => "a list offset",
|
||||
ExpectedRange::BigDecimal => "a decimal",
|
||||
ExpectedRange::BigInt => "an integer",
|
||||
ExpectedRange::Range { start, end } => {
|
||||
return b::description(format!("{} to {}", start, end))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize, Hash)]
|
||||
pub enum ProximateShellError {
|
||||
SyntaxError {
|
||||
problem: Spanned<String>,
|
||||
},
|
||||
UnexpectedEof {
|
||||
expected: String,
|
||||
span: Span,
|
||||
},
|
||||
TypeError {
|
||||
expected: String,
|
||||
actual: Spanned<Option<String>>,
|
||||
},
|
||||
MissingProperty {
|
||||
subpath: Spanned<String>,
|
||||
expr: Spanned<String>,
|
||||
},
|
||||
InvalidIntegerIndex {
|
||||
subpath: Spanned<String>,
|
||||
integer: Span,
|
||||
},
|
||||
MissingValue {
|
||||
span: Option<Span>,
|
||||
reason: String,
|
||||
},
|
||||
ArgumentError {
|
||||
command: Spanned<String>,
|
||||
error: ArgumentError,
|
||||
},
|
||||
RangeError {
|
||||
kind: ExpectedRange,
|
||||
actual_kind: Spanned<String>,
|
||||
operation: String,
|
||||
},
|
||||
Diagnostic(ShellDiagnostic),
|
||||
CoerceError {
|
||||
left: Spanned<String>,
|
||||
right: Spanned<String>,
|
||||
},
|
||||
UntaggedRuntimeError {
|
||||
reason: String,
|
||||
},
|
||||
}
|
||||
|
||||
impl ProximateShellError {
|
||||
fn start(self) -> ShellError {
|
||||
ShellError {
|
||||
cause: None,
|
||||
error: self,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ShellDiagnostic {
|
||||
pub(crate) diagnostic: Diagnostic<Span>,
|
||||
}
|
||||
|
||||
impl std::hash::Hash for ShellDiagnostic {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
self.diagnostic.severity.hash(state);
|
||||
self.diagnostic.code.hash(state);
|
||||
self.diagnostic.message.hash(state);
|
||||
|
||||
for label in &self.diagnostic.labels {
|
||||
label.span.hash(state);
|
||||
label.message.hash(state);
|
||||
match label.style {
|
||||
language_reporting::LabelStyle::Primary => 0.hash(state),
|
||||
language_reporting::LabelStyle::Secondary => 1.hash(state),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for ShellDiagnostic {
|
||||
fn eq(&self, _other: &ShellDiagnostic) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for ShellDiagnostic {}
|
||||
|
||||
impl std::cmp::PartialOrd for ShellDiagnostic {
|
||||
fn partial_cmp(&self, _other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(std::cmp::Ordering::Less)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::cmp::Ord for ShellDiagnostic {
|
||||
fn cmp(&self, _other: &Self) -> std::cmp::Ordering {
|
||||
std::cmp::Ordering::Less
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, new, Clone, Serialize, Deserialize)]
|
||||
pub struct StringError {
|
||||
title: String,
|
||||
error: String,
|
||||
}
|
||||
|
||||
impl std::error::Error for ShellError {}
|
||||
|
||||
impl std::convert::From<Box<dyn std::error::Error>> for ShellError {
|
||||
fn from(input: Box<dyn std::error::Error>) -> ShellError {
|
||||
ShellError::untagged_runtime_error(format!("{}", input))
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::From<std::io::Error> for ShellError {
|
||||
fn from(input: std::io::Error) -> ShellError {
|
||||
ShellError::untagged_runtime_error(format!("{}", input))
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::From<subprocess::PopenError> for ShellError {
|
||||
fn from(input: subprocess::PopenError) -> ShellError {
|
||||
ShellError::untagged_runtime_error(format!("{}", input))
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::From<serde_yaml::Error> for ShellError {
|
||||
fn from(input: serde_yaml::Error) -> ShellError {
|
||||
ShellError::untagged_runtime_error(format!("{:?}", input))
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::From<toml::ser::Error> for ShellError {
|
||||
fn from(input: toml::ser::Error) -> ShellError {
|
||||
ShellError::untagged_runtime_error(format!("{:?}", input))
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::From<serde_json::Error> for ShellError {
|
||||
fn from(input: serde_json::Error) -> ShellError {
|
||||
ShellError::untagged_runtime_error(format!("{:?}", input))
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::From<Box<dyn std::error::Error + Send + Sync>> for ShellError {
|
||||
fn from(input: Box<dyn std::error::Error + Send + Sync>) -> ShellError {
|
||||
ShellError::untagged_runtime_error(format!("{:?}", input))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait CoerceInto<U> {
|
||||
fn coerce_into(self, operation: impl Into<String>) -> Result<U, ShellError>;
|
||||
}
|
||||
|
||||
trait ToExpectedRange {
|
||||
fn to_expected_range() -> ExpectedRange;
|
||||
}
|
||||
|
||||
macro_rules! ranged_int {
|
||||
($ty:tt -> $op:tt -> $variant:tt) => {
|
||||
impl ToExpectedRange for $ty {
|
||||
fn to_expected_range() -> ExpectedRange {
|
||||
ExpectedRange::$variant
|
||||
}
|
||||
}
|
||||
|
||||
impl CoerceInto<$ty> for nu_source::Tagged<BigInt> {
|
||||
fn coerce_into(self, operation: impl Into<String>) -> Result<$ty, ShellError> {
|
||||
match self.$op() {
|
||||
Some(v) => Ok(v),
|
||||
None => Err(ShellError::range_error(
|
||||
$ty::to_expected_range(),
|
||||
&self.item.spanned(self.tag.span),
|
||||
operation.into(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CoerceInto<$ty> for nu_source::Tagged<&BigInt> {
|
||||
fn coerce_into(self, operation: impl Into<String>) -> Result<$ty, ShellError> {
|
||||
match self.$op() {
|
||||
Some(v) => Ok(v),
|
||||
None => Err(ShellError::range_error(
|
||||
$ty::to_expected_range(),
|
||||
&self.item.spanned(self.tag.span),
|
||||
operation.into(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
ranged_int!(u8 -> to_u8 -> U8);
|
||||
ranged_int!(u16 -> to_u16 -> U16);
|
||||
ranged_int!(u32 -> to_u32 -> U32);
|
||||
ranged_int!(u64 -> to_u64 -> U64);
|
||||
ranged_int!(i8 -> to_i8 -> I8);
|
||||
ranged_int!(i16 -> to_i16 -> I16);
|
||||
ranged_int!(i32 -> to_i32 -> I32);
|
||||
ranged_int!(i64 -> to_i64 -> I64);
|
||||
|
||||
macro_rules! ranged_decimal {
|
||||
($ty:tt -> $op:tt -> $variant:tt) => {
|
||||
impl ToExpectedRange for $ty {
|
||||
fn to_expected_range() -> ExpectedRange {
|
||||
ExpectedRange::$variant
|
||||
}
|
||||
}
|
||||
|
||||
impl CoerceInto<$ty> for nu_source::Tagged<BigDecimal> {
|
||||
fn coerce_into(self, operation: impl Into<String>) -> Result<$ty, ShellError> {
|
||||
match self.$op() {
|
||||
Some(v) => Ok(v),
|
||||
None => Err(ShellError::range_error(
|
||||
$ty::to_expected_range(),
|
||||
&self.item.spanned(self.tag.span),
|
||||
operation.into(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl CoerceInto<$ty> for nu_source::Tagged<&BigDecimal> {
|
||||
fn coerce_into(self, operation: impl Into<String>) -> Result<$ty, ShellError> {
|
||||
match self.$op() {
|
||||
Some(v) => Ok(v),
|
||||
None => Err(ShellError::range_error(
|
||||
$ty::to_expected_range(),
|
||||
&self.item.spanned(self.tag.span),
|
||||
operation.into(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
ranged_decimal!(f32 -> to_f32 -> F32);
|
||||
ranged_decimal!(f64 -> to_f64 -> F64);
|
13
crates/nu-macros/Cargo.toml
Normal file
13
crates/nu-macros/Cargo.toml
Normal file
@ -0,0 +1,13 @@
|
||||
[package]
|
||||
name = "nu-macros"
|
||||
version = "0.8.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||
edition = "2018"
|
||||
description = "Core macros for building Nushell"
|
||||
license = "MIT"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.8.0" }
|
25
crates/nu-macros/src/lib.rs
Normal file
25
crates/nu-macros/src/lib.rs
Normal file
@ -0,0 +1,25 @@
|
||||
#[macro_export]
|
||||
macro_rules! signature {
|
||||
(def $name:tt {
|
||||
$usage:tt
|
||||
$(
|
||||
$positional_name:tt $positional_ty:tt - $positional_desc:tt
|
||||
)*
|
||||
}) => {{
|
||||
let signature = Signature::new(stringify!($name)).desc($usage);
|
||||
$(
|
||||
$crate::positional! { signature, $positional_name $positional_ty - $positional_desc }
|
||||
)*
|
||||
signature
|
||||
}};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! positional {
|
||||
($ident:tt, $name:tt (optional $shape:tt) - $desc:tt) => {
|
||||
let $ident = $ident.optional(stringify!($name), SyntaxShape::$shape, $desc);
|
||||
};
|
||||
($ident:tt, $name:tt ($shape:tt)- $desc:tt) => {
|
||||
let $ident = $ident.required(stringify!($name), SyntaxShape::$shape, $desc);
|
||||
};
|
||||
}
|
48
crates/nu-parser/Cargo.toml
Normal file
48
crates/nu-parser/Cargo.toml
Normal file
@ -0,0 +1,48 @@
|
||||
[package]
|
||||
name = "nu-parser"
|
||||
version = "0.8.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||
edition = "2018"
|
||||
description = "Core parser used in Nushell"
|
||||
license = "MIT"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-errors = { path = "../nu-errors", version = "0.8.0" }
|
||||
nu-source = { path = "../nu-source", version = "0.8.0" }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.8.0" }
|
||||
|
||||
pretty_env_logger = "0.3.1"
|
||||
pretty = "0.5.2"
|
||||
termcolor = "1.0.5"
|
||||
log = "0.4.8"
|
||||
indexmap = { version = "1.3.0", features = ["serde-1"] }
|
||||
serde = { version = "1.0.102", features = ["derive"] }
|
||||
nom = "5.0.1"
|
||||
nom_locate = "1.0.0"
|
||||
nom-tracable = "0.4.1"
|
||||
num-traits = "0.2.8"
|
||||
num-bigint = { version = "0.2.3", features = ["serde"] }
|
||||
bigdecimal = { version = "0.1.0", features = ["serde"] }
|
||||
derive-new = "0.5.8"
|
||||
getset = "0.0.9"
|
||||
cfg-if = "0.1"
|
||||
itertools = "0.8.1"
|
||||
shellexpand = "1.0.0"
|
||||
ansi_term = "0.12.1"
|
||||
ptree = {version = "0.2" }
|
||||
language-reporting = "0.4.0"
|
||||
unicode-xid = "0.2.0"
|
||||
enumflags2 = "0.6.2"
|
||||
|
||||
[dev-dependencies]
|
||||
pretty_assertions = "0.6.1"
|
||||
|
||||
[build-dependencies]
|
||||
nu-build = { version = "0.8.0", path = "../nu-build" }
|
||||
|
||||
[features]
|
||||
stable = []
|
||||
trace = ["nom-tracable/trace"]
|
3
crates/nu-parser/build.rs
Normal file
3
crates/nu-parser/build.rs
Normal file
@ -0,0 +1,3 @@
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
nu_build::build()
|
||||
}
|
35
crates/nu-parser/src/commands.rs
Normal file
35
crates/nu-parser/src/commands.rs
Normal file
@ -0,0 +1,35 @@
|
||||
pub mod classified;
|
||||
|
||||
use crate::commands::classified::external::{ExternalArg, ExternalArgs, ExternalCommand};
|
||||
use crate::commands::classified::ClassifiedCommand;
|
||||
use crate::hir::expand_external_tokens::ExternalTokensShape;
|
||||
use crate::hir::syntax_shape::{expand_syntax, ExpandContext};
|
||||
use crate::hir::tokens_iterator::TokensIterator;
|
||||
use nu_errors::ParseError;
|
||||
use nu_source::{Spanned, Tagged};
|
||||
|
||||
// Classify this command as an external command, which doesn't give special meaning
|
||||
// to nu syntactic constructs, and passes all arguments to the external command as
|
||||
// strings.
|
||||
pub(crate) fn external_command(
|
||||
tokens: &mut TokensIterator,
|
||||
context: &ExpandContext,
|
||||
name: Tagged<&str>,
|
||||
) -> Result<ClassifiedCommand, ParseError> {
|
||||
let Spanned { item, span } = expand_syntax(&ExternalTokensShape, tokens, context)?.tokens;
|
||||
|
||||
Ok(ClassifiedCommand::External(ExternalCommand {
|
||||
name: name.to_string(),
|
||||
name_tag: name.tag(),
|
||||
args: ExternalArgs {
|
||||
list: item
|
||||
.iter()
|
||||
.map(|x| ExternalArg {
|
||||
tag: x.span.into(),
|
||||
arg: x.item.clone(),
|
||||
})
|
||||
.collect(),
|
||||
span,
|
||||
},
|
||||
}))
|
||||
}
|
92
crates/nu-parser/src/commands/classified.rs
Normal file
92
crates/nu-parser/src/commands/classified.rs
Normal file
@ -0,0 +1,92 @@
|
||||
pub mod external;
|
||||
pub mod internal;
|
||||
|
||||
use crate::commands::classified::external::ExternalCommand;
|
||||
use crate::commands::classified::internal::InternalCommand;
|
||||
use crate::hir;
|
||||
use crate::parse::token_tree::TokenNode;
|
||||
use derive_new::new;
|
||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span};
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub enum ClassifiedCommand {
|
||||
#[allow(unused)]
|
||||
Expr(TokenNode),
|
||||
#[allow(unused)]
|
||||
Dynamic(hir::Call),
|
||||
Internal(InternalCommand),
|
||||
External(ExternalCommand),
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for ClassifiedCommand {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self {
|
||||
ClassifiedCommand::Expr(token) => b::typed("command", token.pretty_debug(source)),
|
||||
ClassifiedCommand::Dynamic(call) => b::typed("command", call.pretty_debug(source)),
|
||||
ClassifiedCommand::Internal(internal) => internal.pretty_debug(source),
|
||||
ClassifiedCommand::External(external) => external.pretty_debug(source),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for ClassifiedCommand {
|
||||
fn span(&self) -> Span {
|
||||
match self {
|
||||
ClassifiedCommand::Expr(node) => node.span(),
|
||||
ClassifiedCommand::Internal(command) => command.span(),
|
||||
ClassifiedCommand::Dynamic(call) => call.span,
|
||||
ClassifiedCommand::External(command) => command.span(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(new, Debug, Eq, PartialEq)]
|
||||
pub(crate) struct DynamicCommand {
|
||||
pub(crate) args: hir::Call,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Commands {
|
||||
pub list: Vec<ClassifiedCommand>,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl std::ops::Deref for Commands {
|
||||
type Target = [ClassifiedCommand];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.list
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ClassifiedPipeline {
|
||||
pub commands: Commands,
|
||||
}
|
||||
|
||||
impl ClassifiedPipeline {
|
||||
pub fn commands(list: Vec<ClassifiedCommand>, span: impl Into<Span>) -> ClassifiedPipeline {
|
||||
ClassifiedPipeline {
|
||||
commands: Commands {
|
||||
list,
|
||||
span: span.into(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for ClassifiedPipeline {
|
||||
fn span(&self) -> Span {
|
||||
self.commands.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for ClassifiedPipeline {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::intersperse(
|
||||
self.commands.iter().map(|c| c.pretty_debug(source)),
|
||||
b::operator(" | "),
|
||||
)
|
||||
.or(b::delimit("<", b::description("empty pipeline"), ">"))
|
||||
}
|
||||
}
|
65
crates/nu-parser/src/commands/classified/external.rs
Normal file
65
crates/nu-parser/src/commands/classified/external.rs
Normal file
@ -0,0 +1,65 @@
|
||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebug, Span, Tag};
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct ExternalArg {
|
||||
pub arg: String,
|
||||
pub tag: Tag,
|
||||
}
|
||||
|
||||
impl std::ops::Deref for ExternalArg {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &str {
|
||||
&self.arg
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct ExternalArgs {
|
||||
pub list: Vec<ExternalArg>,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl ExternalArgs {
|
||||
pub fn iter(&self) -> impl Iterator<Item = &ExternalArg> {
|
||||
self.list.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for ExternalArgs {
|
||||
type Target = [ExternalArg];
|
||||
|
||||
fn deref(&self) -> &[ExternalArg] {
|
||||
&self.list
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||
pub struct ExternalCommand {
|
||||
pub name: String,
|
||||
|
||||
pub name_tag: Tag,
|
||||
pub args: ExternalArgs,
|
||||
}
|
||||
|
||||
impl PrettyDebug for ExternalCommand {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
b::typed(
|
||||
"external command",
|
||||
b::description(&self.name)
|
||||
+ b::preceded(
|
||||
b::space(),
|
||||
b::intersperse(
|
||||
self.args.iter().map(|a| b::primitive(a.arg.to_string())),
|
||||
b::space(),
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for ExternalCommand {
|
||||
fn span(&self) -> Span {
|
||||
self.name_tag.span.until(self.args.span)
|
||||
}
|
||||
}
|
28
crates/nu-parser/src/commands/classified/internal.rs
Normal file
28
crates/nu-parser/src/commands/classified/internal.rs
Normal file
@ -0,0 +1,28 @@
|
||||
use crate::hir;
|
||||
|
||||
use derive_new::new;
|
||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Tag};
|
||||
|
||||
#[derive(new, Debug, Clone, Eq, PartialEq)]
|
||||
pub struct InternalCommand {
|
||||
pub name: String,
|
||||
pub name_tag: Tag,
|
||||
pub args: hir::Call,
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for InternalCommand {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::typed(
|
||||
"internal command",
|
||||
b::description(&self.name) + b::space() + self.args.pretty_debug(source),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for InternalCommand {
|
||||
fn span(&self) -> Span {
|
||||
let start = self.name_tag.span;
|
||||
|
||||
start.until(self.args.span)
|
||||
}
|
||||
}
|
399
crates/nu-parser/src/hir.rs
Normal file
399
crates/nu-parser/src/hir.rs
Normal file
@ -0,0 +1,399 @@
|
||||
pub(crate) mod baseline_parse;
|
||||
pub(crate) mod binary;
|
||||
pub(crate) mod expand_external_tokens;
|
||||
pub(crate) mod external_command;
|
||||
pub(crate) mod named;
|
||||
pub(crate) mod path;
|
||||
pub(crate) mod range;
|
||||
pub(crate) mod signature;
|
||||
pub mod syntax_shape;
|
||||
pub(crate) mod tokens_iterator;
|
||||
|
||||
use crate::hir::syntax_shape::Member;
|
||||
use crate::parse::operator::CompareOperator;
|
||||
use crate::parse::parser::Number;
|
||||
use crate::parse::unit::Unit;
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use nu_protocol::{PathMember, ShellTypeName};
|
||||
use nu_source::{
|
||||
b, DebugDocBuilder, HasSpan, PrettyDebug, PrettyDebugWithSource, Span, Spanned, SpannedItem,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::parse::tokens::RawNumber;
|
||||
|
||||
pub(crate) use self::binary::Binary;
|
||||
pub(crate) use self::path::Path;
|
||||
pub(crate) use self::range::Range;
|
||||
pub(crate) use self::syntax_shape::ExpandContext;
|
||||
pub(crate) use self::tokens_iterator::TokensIterator;
|
||||
|
||||
pub use self::external_command::ExternalCommand;
|
||||
pub use self::named::{NamedArguments, NamedValue};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Signature {
|
||||
unspanned: nu_protocol::Signature,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl Signature {
|
||||
pub fn new(unspanned: nu_protocol::Signature, span: impl Into<Span>) -> Signature {
|
||||
Signature {
|
||||
unspanned,
|
||||
span: span.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for Signature {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Signature {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
self.unspanned.pretty_debug(source)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)]
|
||||
pub struct Call {
|
||||
#[get = "pub(crate)"]
|
||||
pub head: Box<Expression>,
|
||||
#[get = "pub(crate)"]
|
||||
pub positional: Option<Vec<Expression>>,
|
||||
#[get = "pub(crate)"]
|
||||
pub named: Option<NamedArguments>,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Call {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::delimit(
|
||||
"(",
|
||||
self.head.pretty_debug(source)
|
||||
+ b::preceded_option(
|
||||
Some(b::space()),
|
||||
self.positional.as_ref().map(|pos| {
|
||||
b::intersperse(pos.iter().map(|expr| expr.pretty_debug(source)), b::space())
|
||||
}),
|
||||
)
|
||||
+ b::preceded_option(
|
||||
Some(b::space()),
|
||||
self.named.as_ref().map(|named| named.pretty_debug(source)),
|
||||
),
|
||||
")",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub enum RawExpression {
|
||||
Literal(Literal),
|
||||
ExternalWord,
|
||||
Synthetic(Synthetic),
|
||||
Variable(Variable),
|
||||
Binary(Box<Binary>),
|
||||
Range(Box<Range>),
|
||||
Block(Vec<Expression>),
|
||||
List(Vec<Expression>),
|
||||
Path(Box<Path>),
|
||||
|
||||
FilePath(PathBuf),
|
||||
ExternalCommand(ExternalCommand),
|
||||
Command(Span),
|
||||
|
||||
Boolean(bool),
|
||||
}
|
||||
|
||||
impl ShellTypeName for RawExpression {
|
||||
fn type_name(&self) -> &'static str {
|
||||
match self {
|
||||
RawExpression::Literal(literal) => literal.type_name(),
|
||||
RawExpression::Synthetic(synthetic) => synthetic.type_name(),
|
||||
RawExpression::Command(..) => "command",
|
||||
RawExpression::ExternalWord => "external word",
|
||||
RawExpression::FilePath(..) => "file path",
|
||||
RawExpression::Variable(..) => "variable",
|
||||
RawExpression::List(..) => "list",
|
||||
RawExpression::Binary(..) => "binary",
|
||||
RawExpression::Range(..) => "range",
|
||||
RawExpression::Block(..) => "block",
|
||||
RawExpression::Path(..) => "variable path",
|
||||
RawExpression::Boolean(..) => "boolean",
|
||||
RawExpression::ExternalCommand(..) => "external",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub enum Synthetic {
|
||||
String(String),
|
||||
}
|
||||
|
||||
impl ShellTypeName for Synthetic {
|
||||
fn type_name(&self) -> &'static str {
|
||||
match self {
|
||||
Synthetic::String(_) => "string",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RawExpression {
|
||||
pub fn into_expr(self, span: impl Into<Span>) -> Expression {
|
||||
Expression {
|
||||
expr: self,
|
||||
span: span.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_unspanned_expr(self) -> Expression {
|
||||
Expression {
|
||||
expr: self,
|
||||
span: Span::unknown(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub struct Expression {
|
||||
pub expr: RawExpression,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl std::ops::Deref for Expression {
|
||||
type Target = RawExpression;
|
||||
|
||||
fn deref(&self) -> &RawExpression {
|
||||
&self.expr
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for Expression {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Expression {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match &self.expr {
|
||||
RawExpression::Literal(literal) => literal.spanned(self.span).pretty_debug(source),
|
||||
RawExpression::ExternalWord => {
|
||||
b::typed("external word", b::primitive(self.span.slice(source)))
|
||||
}
|
||||
RawExpression::Synthetic(s) => match s {
|
||||
Synthetic::String(s) => b::typed("synthetic", b::primitive(format!("{:?}", s))),
|
||||
},
|
||||
RawExpression::Variable(_) => b::keyword(self.span.slice(source)),
|
||||
RawExpression::Binary(binary) => binary.pretty_debug(source),
|
||||
RawExpression::Range(range) => range.pretty_debug(source),
|
||||
RawExpression::Block(_) => b::opaque("block"),
|
||||
RawExpression::List(list) => b::delimit(
|
||||
"[",
|
||||
b::intersperse(
|
||||
list.iter().map(|item| item.pretty_debug(source)),
|
||||
b::space(),
|
||||
),
|
||||
"]",
|
||||
),
|
||||
RawExpression::Path(path) => path.pretty_debug(source),
|
||||
RawExpression::FilePath(path) => b::typed("path", b::primitive(path.display())),
|
||||
RawExpression::ExternalCommand(external) => b::typed(
|
||||
"external command",
|
||||
b::primitive(external.name.slice(source)),
|
||||
),
|
||||
RawExpression::Command(command) => {
|
||||
b::typed("command", b::primitive(command.slice(source)))
|
||||
}
|
||||
RawExpression::Boolean(boolean) => match boolean {
|
||||
true => b::primitive("$yes"),
|
||||
false => b::primitive("$no"),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Expression {
|
||||
pub fn number(i: impl Into<Number>, span: impl Into<Span>) -> Expression {
|
||||
let span = span.into();
|
||||
|
||||
RawExpression::Literal(RawLiteral::Number(i.into()).into_literal(span)).into_expr(span)
|
||||
}
|
||||
|
||||
pub fn size(i: impl Into<Number>, unit: impl Into<Unit>, span: impl Into<Span>) -> Expression {
|
||||
let span = span.into();
|
||||
|
||||
RawExpression::Literal(RawLiteral::Size(i.into(), unit.into()).into_literal(span))
|
||||
.into_expr(span)
|
||||
}
|
||||
|
||||
pub fn synthetic_string(s: impl Into<String>) -> Expression {
|
||||
RawExpression::Synthetic(Synthetic::String(s.into())).into_unspanned_expr()
|
||||
}
|
||||
|
||||
pub fn string(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
||||
let outer = outer.into();
|
||||
|
||||
RawExpression::Literal(RawLiteral::String(inner.into()).into_literal(outer))
|
||||
.into_expr(outer)
|
||||
}
|
||||
|
||||
pub fn column_path(members: Vec<Member>, span: impl Into<Span>) -> Expression {
|
||||
let span = span.into();
|
||||
|
||||
RawExpression::Literal(RawLiteral::ColumnPath(members).into_literal(span)).into_expr(span)
|
||||
}
|
||||
|
||||
pub fn path(
|
||||
head: Expression,
|
||||
tail: Vec<impl Into<PathMember>>,
|
||||
span: impl Into<Span>,
|
||||
) -> Expression {
|
||||
let tail = tail.into_iter().map(|t| t.into()).collect();
|
||||
RawExpression::Path(Box::new(Path::new(head, tail))).into_expr(span.into())
|
||||
}
|
||||
|
||||
pub fn dot_member(head: Expression, next: impl Into<PathMember>) -> Expression {
|
||||
let Expression { expr: item, span } = head;
|
||||
let next = next.into();
|
||||
|
||||
let new_span = head.span.until(next.span);
|
||||
|
||||
match item {
|
||||
RawExpression::Path(path) => {
|
||||
let (head, mut tail) = path.parts();
|
||||
|
||||
tail.push(next);
|
||||
Expression::path(head, tail, new_span)
|
||||
}
|
||||
|
||||
other => Expression::path(other.into_expr(span), vec![next], new_span),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn infix(
|
||||
left: Expression,
|
||||
op: Spanned<impl Into<CompareOperator>>,
|
||||
right: Expression,
|
||||
) -> Expression {
|
||||
let new_span = left.span.until(right.span);
|
||||
|
||||
RawExpression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right)))
|
||||
.into_expr(new_span)
|
||||
}
|
||||
|
||||
pub fn range(left: Expression, op: Span, right: Expression) -> Expression {
|
||||
let new_span = left.span.until(right.span);
|
||||
|
||||
RawExpression::Range(Box::new(Range::new(left, op, right))).into_expr(new_span)
|
||||
}
|
||||
|
||||
pub fn file_path(path: impl Into<PathBuf>, outer: impl Into<Span>) -> Expression {
|
||||
RawExpression::FilePath(path.into()).into_expr(outer)
|
||||
}
|
||||
|
||||
pub fn list(list: Vec<Expression>, span: impl Into<Span>) -> Expression {
|
||||
RawExpression::List(list).into_expr(span)
|
||||
}
|
||||
|
||||
pub fn bare(span: impl Into<Span>) -> Expression {
|
||||
let span = span.into();
|
||||
|
||||
RawExpression::Literal(RawLiteral::Bare.into_literal(span)).into_expr(span)
|
||||
}
|
||||
|
||||
pub fn pattern(inner: impl Into<String>, outer: impl Into<Span>) -> Expression {
|
||||
let outer = outer.into();
|
||||
|
||||
RawExpression::Literal(RawLiteral::GlobPattern(inner.into()).into_literal(outer))
|
||||
.into_expr(outer)
|
||||
}
|
||||
|
||||
pub fn variable(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
||||
RawExpression::Variable(Variable::Other(inner.into())).into_expr(outer)
|
||||
}
|
||||
|
||||
pub fn external_command(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
||||
RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).into_expr(outer)
|
||||
}
|
||||
|
||||
pub fn it_variable(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
||||
RawExpression::Variable(Variable::It(inner.into())).into_expr(outer)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Spanned<Path>> for Expression {
|
||||
fn from(path: Spanned<Path>) -> Expression {
|
||||
RawExpression::Path(Box::new(path.item)).into_expr(path.span)
|
||||
}
|
||||
}
|
||||
|
||||
/// Literals are expressions that are:
|
||||
///
|
||||
/// 1. Copy
|
||||
/// 2. Can be evaluated without additional context
|
||||
/// 3. Evaluation cannot produce an error
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub enum RawLiteral {
|
||||
Number(Number),
|
||||
Size(Number, Unit),
|
||||
String(Span),
|
||||
GlobPattern(String),
|
||||
ColumnPath(Vec<Member>),
|
||||
Bare,
|
||||
}
|
||||
|
||||
impl RawLiteral {
|
||||
pub fn into_literal(self, span: impl Into<Span>) -> Literal {
|
||||
Literal {
|
||||
literal: self,
|
||||
span: span.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub struct Literal {
|
||||
pub literal: RawLiteral,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl ShellTypeName for Literal {
|
||||
fn type_name(&self) -> &'static str {
|
||||
match &self.literal {
|
||||
RawLiteral::Number(..) => "number",
|
||||
RawLiteral::Size(..) => "size",
|
||||
RawLiteral::String(..) => "string",
|
||||
RawLiteral::ColumnPath(..) => "column path",
|
||||
RawLiteral::Bare => "string",
|
||||
RawLiteral::GlobPattern(_) => "pattern",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Literal {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match &self.literal {
|
||||
RawLiteral::Number(number) => number.pretty(),
|
||||
RawLiteral::Size(number, unit) => (number.pretty() + unit.pretty()).group(),
|
||||
RawLiteral::String(string) => b::primitive(format!("{:?}", string.slice(source))),
|
||||
RawLiteral::GlobPattern(pattern) => b::typed("pattern", b::primitive(pattern)),
|
||||
RawLiteral::ColumnPath(path) => b::typed(
|
||||
"column path",
|
||||
b::intersperse_with_source(path.iter(), b::space(), source),
|
||||
),
|
||||
RawLiteral::Bare => b::primitive(self.span.slice(source)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub enum Variable {
|
||||
It(Span),
|
||||
Other(Span),
|
||||
}
|
2
crates/nu-parser/src/hir/baseline_parse.rs
Normal file
2
crates/nu-parser/src/hir/baseline_parse.rs
Normal file
@ -0,0 +1,2 @@
|
||||
#[cfg(test)]
|
||||
pub mod tests;
|
174
crates/nu-parser/src/hir/baseline_parse/tests.rs
Normal file
174
crates/nu-parser/src/hir/baseline_parse/tests.rs
Normal file
@ -0,0 +1,174 @@
|
||||
use crate::commands::classified::{internal::InternalCommand, ClassifiedCommand};
|
||||
use crate::hir::TokensIterator;
|
||||
use crate::hir::{self, named::NamedValue, syntax_shape::*, NamedArguments};
|
||||
use crate::parse::files::Files;
|
||||
use crate::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b};
|
||||
use crate::TokenNode;
|
||||
use derive_new::new;
|
||||
use indexmap::IndexMap;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{PathMember, Signature, SyntaxShape};
|
||||
use nu_source::{HasSpan, Span, Tag, Text};
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::fmt::Debug;
|
||||
|
||||
#[test]
|
||||
fn test_parse_string() {
|
||||
parse_tokens(StringShape, vec![b::string("hello")], |tokens| {
|
||||
hir::Expression::string(inner_string_span(tokens[0].span()), tokens[0].span())
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_path() {
|
||||
parse_tokens(
|
||||
VariablePathShape,
|
||||
vec![b::var("it"), b::dot(), b::bare("cpu")],
|
||||
|tokens| {
|
||||
let (outer_var, inner_var) = tokens[0].expect_var();
|
||||
let bare = tokens[2].expect_bare();
|
||||
hir::Expression::path(
|
||||
hir::Expression::it_variable(inner_var, outer_var),
|
||||
vec![PathMember::string("cpu", bare)],
|
||||
outer_var.until(bare),
|
||||
)
|
||||
},
|
||||
);
|
||||
|
||||
parse_tokens(
|
||||
VariablePathShape,
|
||||
vec![
|
||||
b::var("cpu"),
|
||||
b::dot(),
|
||||
b::bare("amount"),
|
||||
b::dot(),
|
||||
b::string("max ghz"),
|
||||
],
|
||||
|tokens| {
|
||||
let (outer_var, inner_var) = tokens[0].expect_var();
|
||||
let amount = tokens[2].expect_bare();
|
||||
let (outer_max_ghz, _) = tokens[4].expect_string();
|
||||
|
||||
hir::Expression::path(
|
||||
hir::Expression::variable(inner_var, outer_var),
|
||||
vec![
|
||||
PathMember::string("amount", amount),
|
||||
PathMember::string("max ghz", outer_max_ghz),
|
||||
],
|
||||
outer_var.until(outer_max_ghz),
|
||||
)
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_command() {
|
||||
parse_tokens(
|
||||
ClassifiedCommandShape,
|
||||
vec![b::bare("ls"), b::sp(), b::pattern("*.txt")],
|
||||
|tokens| {
|
||||
let bare = tokens[0].expect_bare();
|
||||
let pat = tokens[2].expect_pattern();
|
||||
|
||||
let mut map = IndexMap::new();
|
||||
map.insert("full".to_string(), NamedValue::AbsentSwitch);
|
||||
|
||||
ClassifiedCommand::Internal(InternalCommand::new(
|
||||
"ls".to_string(),
|
||||
Tag {
|
||||
span: bare,
|
||||
anchor: None,
|
||||
},
|
||||
hir::Call {
|
||||
head: Box::new(hir::RawExpression::Command(bare).into_expr(bare)),
|
||||
positional: Some(vec![hir::Expression::pattern("*.txt", pat)]),
|
||||
named: Some(NamedArguments { named: map }),
|
||||
span: bare.until(pat),
|
||||
},
|
||||
))
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(new)]
|
||||
struct TestRegistry {
|
||||
#[new(default)]
|
||||
signatures: indexmap::IndexMap<String, Signature>,
|
||||
}
|
||||
|
||||
impl TestRegistry {
|
||||
fn insert(&mut self, key: &str, value: Signature) {
|
||||
self.signatures.insert(key.to_string(), value);
|
||||
}
|
||||
}
|
||||
|
||||
impl SignatureRegistry for TestRegistry {
|
||||
fn has(&self, name: &str) -> Result<bool, ShellError> {
|
||||
Ok(self.signatures.contains_key(name))
|
||||
}
|
||||
fn get(&self, name: &str) -> Result<Option<Signature>, ShellError> {
|
||||
Ok(self.signatures.get(name).cloned())
|
||||
}
|
||||
}
|
||||
|
||||
fn with_empty_context(source: &Text, callback: impl FnOnce(ExpandContext)) {
|
||||
let mut registry = TestRegistry::new();
|
||||
registry.insert(
|
||||
"ls",
|
||||
Signature::build("ls")
|
||||
.optional(
|
||||
"path",
|
||||
SyntaxShape::Pattern,
|
||||
"a path to get the directory contents from",
|
||||
)
|
||||
.switch("full", "list all available columns for each entry"),
|
||||
);
|
||||
|
||||
callback(ExpandContext::new(Box::new(registry), source, None))
|
||||
}
|
||||
|
||||
fn parse_tokens<T: Eq + HasSpan + Clone + Debug + 'static>(
|
||||
shape: impl ExpandSyntax<Output = T>,
|
||||
tokens: Vec<CurriedToken>,
|
||||
expected: impl FnOnce(&[TokenNode]) -> T,
|
||||
) {
|
||||
let tokens = b::token_list(tokens);
|
||||
let (tokens, source) = b::build(tokens);
|
||||
let text = Text::from(source);
|
||||
|
||||
with_empty_context(&text, |context| {
|
||||
let tokens = tokens.expect_list();
|
||||
let mut iterator = TokensIterator::all(tokens.item, text.clone(), tokens.span);
|
||||
|
||||
let expr = expand_syntax(&shape, &mut iterator, &context);
|
||||
|
||||
let expr = match expr {
|
||||
Ok(expr) => expr,
|
||||
Err(err) => {
|
||||
print_err(err.into(), &context.source().clone());
|
||||
panic!("Parse failed");
|
||||
}
|
||||
};
|
||||
|
||||
assert_eq!(expr, expected(tokens.item));
|
||||
})
|
||||
}
|
||||
|
||||
fn inner_string_span(span: Span) -> Span {
|
||||
Span::new(span.start() + 1, span.end() - 1)
|
||||
}
|
||||
|
||||
pub fn print_err(err: ShellError, source: &Text) {
|
||||
let diag = err.into_diagnostic();
|
||||
|
||||
let writer = termcolor::StandardStream::stderr(termcolor::ColorChoice::Auto);
|
||||
let mut source = source.to_string();
|
||||
source.push_str(" ");
|
||||
let files = Files::new(source);
|
||||
let _ = language_reporting::emit(
|
||||
&mut writer.lock(),
|
||||
&files,
|
||||
&diag,
|
||||
&language_reporting::DefaultConfig,
|
||||
);
|
||||
}
|
31
crates/nu-parser/src/hir/binary.rs
Normal file
31
crates/nu-parser/src/hir/binary.rs
Normal file
@ -0,0 +1,31 @@
|
||||
use crate::{hir::Expression, CompareOperator};
|
||||
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Spanned};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(
|
||||
Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new,
|
||||
)]
|
||||
#[get = "pub"]
|
||||
pub struct Binary {
|
||||
left: Expression,
|
||||
op: Spanned<CompareOperator>,
|
||||
right: Expression,
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Binary {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::delimit(
|
||||
"<",
|
||||
self.left.pretty_debug(source)
|
||||
+ b::space()
|
||||
+ b::keyword(self.op.span.slice(source))
|
||||
+ b::space()
|
||||
+ self.right.pretty_debug(source),
|
||||
">",
|
||||
)
|
||||
.group()
|
||||
}
|
||||
}
|
306
crates/nu-parser/src/hir/expand_external_tokens.rs
Normal file
306
crates/nu-parser/src/hir/expand_external_tokens.rs
Normal file
@ -0,0 +1,306 @@
|
||||
use crate::{
|
||||
hir::syntax_shape::{
|
||||
color_syntax, expand_atom, expand_expr, expand_syntax, AtomicToken, ColorSyntax,
|
||||
ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, MaybeSpaceShape,
|
||||
UnspannedAtomicToken,
|
||||
},
|
||||
hir::Expression,
|
||||
TokensIterator,
|
||||
};
|
||||
use nu_errors::ParseError;
|
||||
use nu_protocol::SpannedTypeName;
|
||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebug, Span, Spanned, SpannedItem};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ExternalTokensSyntax {
|
||||
pub tokens: Spanned<Vec<Spanned<String>>>,
|
||||
}
|
||||
|
||||
impl HasSpan for ExternalTokensSyntax {
|
||||
fn span(&self) -> Span {
|
||||
self.tokens.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for ExternalTokensSyntax {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
b::intersperse(
|
||||
self.tokens
|
||||
.iter()
|
||||
.map(|token| b::primitive(format!("{:?}", token.item))),
|
||||
b::space(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ExternalTokensShape;
|
||||
|
||||
impl ExpandSyntax for ExternalTokensShape {
|
||||
type Output = ExternalTokensSyntax;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"external command"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
let mut out: Vec<Spanned<String>> = vec![];
|
||||
|
||||
let start = token_nodes.span_at_cursor();
|
||||
|
||||
loop {
|
||||
match expand_syntax(&ExternalExpressionShape, token_nodes, context) {
|
||||
Err(_) | Ok(None) => break,
|
||||
Ok(Some(span)) => out.push(span.spanned_string(context.source())),
|
||||
}
|
||||
}
|
||||
|
||||
let end = token_nodes.span_at_cursor();
|
||||
|
||||
Ok(ExternalTokensSyntax {
|
||||
tokens: out.spanned(start.until(end)),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ColorSyntax for ExternalTokensShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"ExternalTokensShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Self::Info {
|
||||
loop {
|
||||
// Allow a space
|
||||
color_syntax(&MaybeSpaceShape, token_nodes, context);
|
||||
|
||||
// Process an external expression. External expressions are mostly words, with a
|
||||
// few exceptions (like $variables and path expansion rules)
|
||||
match color_syntax(&ExternalExpressionShape, token_nodes, context).1 {
|
||||
ExternalExpressionResult::Eof => break,
|
||||
ExternalExpressionResult::Processed => continue,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ExternalExpressionShape;
|
||||
|
||||
impl ExpandSyntax for ExternalExpressionShape {
|
||||
type Output = Option<Span>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"external expression"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
expand_syntax(&MaybeSpaceShape, token_nodes, context)?;
|
||||
|
||||
let first = expand_atom(
|
||||
token_nodes,
|
||||
"external command",
|
||||
context,
|
||||
ExpansionRule::new().allow_external_word(),
|
||||
)?
|
||||
.span;
|
||||
|
||||
let mut last = first;
|
||||
|
||||
loop {
|
||||
let continuation = expand_expr(&ExternalContinuationShape, token_nodes, context);
|
||||
|
||||
if let Ok(continuation) = continuation {
|
||||
last = continuation.span;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Some(first.until(last)))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct ExternalHeadShape;
|
||||
|
||||
impl ExpandExpression for ExternalHeadShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"external argument"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Expression, ParseError> {
|
||||
let atom = expand_atom(
|
||||
token_nodes,
|
||||
"external argument",
|
||||
context,
|
||||
ExpansionRule::new()
|
||||
.allow_external_word()
|
||||
.treat_size_as_word(),
|
||||
)?;
|
||||
|
||||
let span = atom.span;
|
||||
|
||||
Ok(match &atom.unspanned {
|
||||
UnspannedAtomicToken::Eof { .. } => unreachable!("ExpansionRule doesn't allow EOF"),
|
||||
UnspannedAtomicToken::Error { .. } => unreachable!("ExpansionRule doesn't allow Error"),
|
||||
UnspannedAtomicToken::Size { .. } => unreachable!("ExpansionRule treats size as word"),
|
||||
UnspannedAtomicToken::Whitespace { .. } => {
|
||||
unreachable!("ExpansionRule doesn't allow Whitespace")
|
||||
}
|
||||
UnspannedAtomicToken::Separator { .. } => {
|
||||
unreachable!("ExpansionRule doesn't allow Separator")
|
||||
}
|
||||
UnspannedAtomicToken::Comment { .. } => {
|
||||
unreachable!("ExpansionRule doesn't allow Comment")
|
||||
}
|
||||
UnspannedAtomicToken::ShorthandFlag { .. }
|
||||
| UnspannedAtomicToken::SquareDelimited { .. }
|
||||
| UnspannedAtomicToken::RoundDelimited { .. } => {
|
||||
return Err(ParseError::mismatch(
|
||||
"external command name",
|
||||
atom.spanned_type_name(),
|
||||
))
|
||||
}
|
||||
UnspannedAtomicToken::ExternalCommand { command } => {
|
||||
Expression::external_command(*command, span)
|
||||
}
|
||||
UnspannedAtomicToken::Number { number } => {
|
||||
Expression::number(number.to_number(context.source()), span)
|
||||
}
|
||||
UnspannedAtomicToken::String { body } => Expression::string(*body, span),
|
||||
UnspannedAtomicToken::ItVariable { name } => Expression::it_variable(*name, span),
|
||||
UnspannedAtomicToken::Variable { name } => Expression::variable(*name, span),
|
||||
UnspannedAtomicToken::ExternalWord { .. }
|
||||
| UnspannedAtomicToken::GlobPattern { .. }
|
||||
| UnspannedAtomicToken::Word { .. }
|
||||
| UnspannedAtomicToken::Dot { .. }
|
||||
| UnspannedAtomicToken::DotDot { .. }
|
||||
| UnspannedAtomicToken::CompareOperator { .. } => {
|
||||
Expression::external_command(span, span)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct ExternalContinuationShape;
|
||||
|
||||
impl ExpandExpression for ExternalContinuationShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"external argument"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Expression, ParseError> {
|
||||
let atom = expand_atom(
|
||||
token_nodes,
|
||||
"external argument",
|
||||
context,
|
||||
ExpansionRule::new()
|
||||
.allow_external_word()
|
||||
.treat_size_as_word(),
|
||||
)?;
|
||||
|
||||
let span = atom.span;
|
||||
|
||||
Ok(match &atom.unspanned {
|
||||
UnspannedAtomicToken::Eof { .. } => unreachable!("ExpansionRule doesn't allow EOF"),
|
||||
UnspannedAtomicToken::Error { .. } => unreachable!("ExpansionRule doesn't allow Error"),
|
||||
UnspannedAtomicToken::Number { number } => {
|
||||
Expression::number(number.to_number(context.source()), span)
|
||||
}
|
||||
UnspannedAtomicToken::Size { .. } => unreachable!("ExpansionRule treats size as word"),
|
||||
UnspannedAtomicToken::ExternalCommand { .. } => {
|
||||
unreachable!("ExpansionRule doesn't allow ExternalCommand")
|
||||
}
|
||||
UnspannedAtomicToken::Whitespace { .. } => {
|
||||
unreachable!("ExpansionRule doesn't allow Whitespace")
|
||||
}
|
||||
UnspannedAtomicToken::Separator { .. } => {
|
||||
unreachable!("ExpansionRule doesn't allow Separator")
|
||||
}
|
||||
UnspannedAtomicToken::Comment { .. } => {
|
||||
unreachable!("ExpansionRule doesn't allow Comment")
|
||||
}
|
||||
UnspannedAtomicToken::String { body } => Expression::string(*body, span),
|
||||
UnspannedAtomicToken::ItVariable { name } => Expression::it_variable(*name, span),
|
||||
UnspannedAtomicToken::Variable { name } => Expression::variable(*name, span),
|
||||
UnspannedAtomicToken::ExternalWord { .. }
|
||||
| UnspannedAtomicToken::GlobPattern { .. }
|
||||
| UnspannedAtomicToken::Word { .. }
|
||||
| UnspannedAtomicToken::ShorthandFlag { .. }
|
||||
| UnspannedAtomicToken::Dot { .. }
|
||||
| UnspannedAtomicToken::DotDot { .. }
|
||||
| UnspannedAtomicToken::CompareOperator { .. } => Expression::bare(span),
|
||||
UnspannedAtomicToken::SquareDelimited { .. }
|
||||
| UnspannedAtomicToken::RoundDelimited { .. } => {
|
||||
return Err(ParseError::mismatch(
|
||||
"external argument",
|
||||
atom.spanned_type_name(),
|
||||
))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ColorSyntax for ExternalExpressionShape {
|
||||
type Info = ExternalExpressionResult;
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"ExternalExpressionShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> ExternalExpressionResult {
|
||||
let atom = match expand_atom(
|
||||
token_nodes,
|
||||
"external word",
|
||||
context,
|
||||
ExpansionRule::permissive(),
|
||||
) {
|
||||
Err(_) => unreachable!("TODO: separate infallible expand_atom"),
|
||||
Ok(AtomicToken {
|
||||
unspanned: UnspannedAtomicToken::Eof { .. },
|
||||
..
|
||||
}) => return ExternalExpressionResult::Eof,
|
||||
Ok(atom) => atom,
|
||||
};
|
||||
|
||||
token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes));
|
||||
ExternalExpressionResult::Processed
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub enum ExternalExpressionResult {
|
||||
Eof,
|
||||
Processed,
|
||||
}
|
12
crates/nu-parser/src/hir/external_command.rs
Normal file
12
crates/nu-parser/src/hir/external_command.rs
Normal file
@ -0,0 +1,12 @@
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use nu_source::Span;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(
|
||||
Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new,
|
||||
)]
|
||||
#[get = "pub"]
|
||||
pub struct ExternalCommand {
|
||||
pub(crate) name: Span,
|
||||
}
|
84
crates/nu-parser/src/hir/named.rs
Normal file
84
crates/nu-parser/src/hir/named.rs
Normal file
@ -0,0 +1,84 @@
|
||||
use crate::hir::Expression;
|
||||
use crate::Flag;
|
||||
use indexmap::IndexMap;
|
||||
use log::trace;
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Tag};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
||||
pub enum NamedValue {
|
||||
AbsentSwitch,
|
||||
PresentSwitch(Tag),
|
||||
AbsentValue,
|
||||
Value(Expression),
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for NamedValue {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self {
|
||||
NamedValue::AbsentSwitch => b::typed("switch", b::description("absent")),
|
||||
NamedValue::PresentSwitch(_) => b::typed("switch", b::description("present")),
|
||||
NamedValue::AbsentValue => b::description("absent"),
|
||||
NamedValue::Value(value) => value.pretty_debug(source),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
||||
pub struct NamedArguments {
|
||||
pub named: IndexMap<String, NamedValue>,
|
||||
}
|
||||
|
||||
impl NamedArguments {
|
||||
pub fn new() -> NamedArguments {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = (&String, &NamedValue)> {
|
||||
self.named.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl NamedArguments {
|
||||
pub fn insert_switch(&mut self, name: impl Into<String>, switch: Option<Flag>) {
|
||||
let name = name.into();
|
||||
trace!("Inserting switch -- {} = {:?}", name, switch);
|
||||
|
||||
match switch {
|
||||
None => self.named.insert(name, NamedValue::AbsentSwitch),
|
||||
Some(flag) => self.named.insert(
|
||||
name,
|
||||
NamedValue::PresentSwitch(Tag {
|
||||
span: *flag.name(),
|
||||
anchor: None,
|
||||
}),
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn insert_optional(&mut self, name: impl Into<String>, expr: Option<Expression>) {
|
||||
match expr {
|
||||
None => self.named.insert(name.into(), NamedValue::AbsentValue),
|
||||
Some(expr) => self.named.insert(name.into(), NamedValue::Value(expr)),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn insert_mandatory(&mut self, name: impl Into<String>, expr: Expression) {
|
||||
self.named.insert(name.into(), NamedValue::Value(expr));
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for NamedArguments {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::delimit(
|
||||
"(",
|
||||
b::intersperse(
|
||||
self.named
|
||||
.iter()
|
||||
.map(|(key, value)| b::key(key) + b::equals() + value.pretty_debug(source)),
|
||||
b::space(),
|
||||
),
|
||||
")",
|
||||
)
|
||||
}
|
||||
}
|
41
crates/nu-parser/src/hir/path.rs
Normal file
41
crates/nu-parser/src/hir/path.rs
Normal file
@ -0,0 +1,41 @@
|
||||
use crate::hir::Expression;
|
||||
use derive_new::new;
|
||||
use getset::{Getters, MutGetters};
|
||||
use nu_protocol::PathMember;
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebug, PrettyDebugWithSource};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(
|
||||
Debug,
|
||||
Clone,
|
||||
Eq,
|
||||
PartialEq,
|
||||
Ord,
|
||||
PartialOrd,
|
||||
Hash,
|
||||
Getters,
|
||||
MutGetters,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
new,
|
||||
)]
|
||||
#[get = "pub"]
|
||||
pub struct Path {
|
||||
head: Expression,
|
||||
#[get_mut = "pub(crate)"]
|
||||
tail: Vec<PathMember>,
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Path {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
self.head.pretty_debug(source)
|
||||
+ b::operator(".")
|
||||
+ b::intersperse(self.tail.iter().map(|m| m.pretty()), b::operator("."))
|
||||
}
|
||||
}
|
||||
|
||||
impl Path {
|
||||
pub(crate) fn parts(self) -> (Expression, Vec<PathMember>) {
|
||||
(self.head, self.tail)
|
||||
}
|
||||
}
|
33
crates/nu-parser/src/hir/range.rs
Normal file
33
crates/nu-parser/src/hir/range.rs
Normal file
@ -0,0 +1,33 @@
|
||||
use crate::hir::Expression;
|
||||
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Span};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(
|
||||
Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new,
|
||||
)]
|
||||
pub struct Range {
|
||||
#[get = "pub"]
|
||||
left: Expression,
|
||||
#[get = "pub"]
|
||||
dotdot: Span,
|
||||
#[get = "pub"]
|
||||
right: Expression,
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Range {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::delimit(
|
||||
"<",
|
||||
self.left.pretty_debug(source)
|
||||
+ b::space()
|
||||
+ b::keyword(self.dotdot.slice(source))
|
||||
+ b::space()
|
||||
+ self.right.pretty_debug(source),
|
||||
">",
|
||||
)
|
||||
.group()
|
||||
}
|
||||
}
|
475
crates/nu-parser/src/hir/signature.rs
Normal file
475
crates/nu-parser/src/hir/signature.rs
Normal file
@ -0,0 +1,475 @@
|
||||
use crate::hir;
|
||||
use crate::hir::syntax_shape::{
|
||||
expand_atom, expand_syntax, BareShape, ExpandContext, ExpandSyntax, ExpansionRule,
|
||||
UnspannedAtomicToken, WhitespaceShape,
|
||||
};
|
||||
use crate::hir::tokens_iterator::TokensIterator;
|
||||
use crate::parse::comment::Comment;
|
||||
use derive_new::new;
|
||||
use nu_errors::ParseError;
|
||||
use nu_protocol::{RowType, SpannedTypeName, Type};
|
||||
use nu_source::{
|
||||
b, DebugDocBuilder, HasFallibleSpan, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem,
|
||||
};
|
||||
use std::fmt::Debug;
|
||||
|
||||
// A Signature is a command without implementation.
|
||||
//
|
||||
// In Nu, a command is a function combined with macro expansion rules.
|
||||
//
|
||||
// def cd
|
||||
// # Change to a new path.
|
||||
// optional directory(Path) # the directory to change to
|
||||
// end
|
||||
|
||||
#[derive(new)]
|
||||
struct Expander<'a, 'b, 'c, 'd> {
|
||||
iterator: &'b mut TokensIterator<'a>,
|
||||
context: &'d ExpandContext<'c>,
|
||||
}
|
||||
|
||||
impl<'a, 'b, 'c, 'd> Expander<'a, 'b, 'c, 'd> {
|
||||
fn expand<O>(&mut self, syntax: impl ExpandSyntax<Output = O>) -> Result<O, ParseError>
|
||||
where
|
||||
O: HasFallibleSpan + Clone + std::fmt::Debug + 'static,
|
||||
{
|
||||
expand_syntax(&syntax, self.iterator, self.context)
|
||||
}
|
||||
|
||||
fn optional<O>(&mut self, syntax: impl ExpandSyntax<Output = O>) -> Option<O>
|
||||
where
|
||||
O: HasFallibleSpan + Clone + std::fmt::Debug + 'static,
|
||||
{
|
||||
match expand_syntax(&syntax, self.iterator, self.context) {
|
||||
Err(_) => None,
|
||||
Ok(value) => Some(value),
|
||||
}
|
||||
}
|
||||
|
||||
fn pos(&mut self) -> Span {
|
||||
self.iterator.span_at_cursor()
|
||||
}
|
||||
|
||||
fn slice_string(&mut self, span: impl Into<Span>) -> String {
|
||||
span.into().slice(self.context.source()).to_string()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct SignatureShape;
|
||||
|
||||
impl ExpandSyntax for SignatureShape {
|
||||
type Output = hir::Signature;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"signature"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
token_nodes.atomic_parse(|token_nodes| {
|
||||
let mut expander = Expander::new(token_nodes, context);
|
||||
let start = expander.pos();
|
||||
expander.expand(keyword("def"))?;
|
||||
expander.expand(WhitespaceShape)?;
|
||||
let name = expander.expand(BareShape)?;
|
||||
expander.expand(SeparatorShape)?;
|
||||
let usage = expander.expand(CommentShape)?;
|
||||
expander.expand(SeparatorShape)?;
|
||||
let end = expander.pos();
|
||||
|
||||
Ok(hir::Signature::new(
|
||||
nu_protocol::Signature::new(&name.word).desc(expander.slice_string(usage.text)),
|
||||
start.until(end),
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn keyword(kw: &'static str) -> KeywordShape {
|
||||
KeywordShape { keyword: kw }
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct KeywordShape {
|
||||
keyword: &'static str,
|
||||
}
|
||||
|
||||
impl ExpandSyntax for KeywordShape {
|
||||
type Output = Span;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"keyword"
|
||||
}
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
let atom = expand_atom(token_nodes, "keyword", context, ExpansionRule::new())?;
|
||||
|
||||
if let UnspannedAtomicToken::Word { text } = &atom.unspanned {
|
||||
let word = text.slice(context.source());
|
||||
|
||||
if word == self.keyword {
|
||||
return Ok(atom.span);
|
||||
}
|
||||
}
|
||||
|
||||
Err(ParseError::mismatch(self.keyword, atom.spanned_type_name()))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct SeparatorShape;
|
||||
|
||||
impl ExpandSyntax for SeparatorShape {
|
||||
type Output = Span;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"separator"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
let atom = expand_atom(token_nodes, "separator", context, ExpansionRule::new())?;
|
||||
|
||||
match &atom.unspanned {
|
||||
UnspannedAtomicToken::Separator { text } => Ok(*text),
|
||||
_ => Err(ParseError::mismatch("separator", atom.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct CommentShape;
|
||||
|
||||
impl ExpandSyntax for CommentShape {
|
||||
type Output = Comment;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"comment"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
let atom = expand_atom(token_nodes, "comment", context, ExpansionRule::new())?;
|
||||
|
||||
match &atom.unspanned {
|
||||
UnspannedAtomicToken::Comment { body } => Ok(Comment::line(body, atom.span)),
|
||||
_ => Err(ParseError::mismatch("separator", atom.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, new)]
|
||||
struct TupleShape<A, B> {
|
||||
first: A,
|
||||
second: B,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, new)]
|
||||
struct TupleSyntax<A, B> {
|
||||
first: A,
|
||||
second: B,
|
||||
}
|
||||
|
||||
impl<A, B> PrettyDebugWithSource for TupleSyntax<A, B>
|
||||
where
|
||||
A: PrettyDebugWithSource,
|
||||
B: PrettyDebugWithSource,
|
||||
{
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::typed(
|
||||
"pair",
|
||||
self.first.pretty_debug(source) + b::space() + self.second.pretty_debug(source),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<A, B> HasFallibleSpan for TupleSyntax<A, B>
|
||||
where
|
||||
A: HasFallibleSpan + Debug + Clone,
|
||||
B: HasFallibleSpan + Debug + Clone,
|
||||
{
|
||||
fn maybe_span(&self) -> Option<Span> {
|
||||
match (self.first.maybe_span(), self.second.maybe_span()) {
|
||||
(Some(first), Some(second)) => Some(first.until(second)),
|
||||
(Some(first), None) => Some(first),
|
||||
(None, Some(second)) => Some(second),
|
||||
(None, None) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<A, B, AOut, BOut> ExpandSyntax for TupleShape<A, B>
|
||||
where
|
||||
A: ExpandSyntax<Output = AOut> + Debug + Copy,
|
||||
B: ExpandSyntax<Output = BOut> + Debug + Copy,
|
||||
AOut: HasFallibleSpan + Debug + Clone + 'static,
|
||||
BOut: HasFallibleSpan + Debug + Clone + 'static,
|
||||
{
|
||||
type Output = TupleSyntax<AOut, BOut>;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"pair"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
token_nodes.atomic_parse(|token_nodes| {
|
||||
let first = expand_syntax(&self.first, token_nodes, context)?;
|
||||
let second = expand_syntax(&self.second, token_nodes, context)?;
|
||||
|
||||
Ok(TupleSyntax { first, second })
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PositionalParam {
|
||||
optional: Option<Span>,
|
||||
name: Identifier,
|
||||
ty: Spanned<Type>,
|
||||
desc: Spanned<String>,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl HasSpan for PositionalParam {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for PositionalParam {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
(match self.optional {
|
||||
Some(_) => b::description("optional") + b::space(),
|
||||
None => b::blank(),
|
||||
}) + self.ty.pretty_debug(source)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct PositionalParamShape;
|
||||
|
||||
impl ExpandSyntax for PositionalParamShape {
|
||||
type Output = PositionalParam;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"positional param"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
token_nodes.atomic_parse(|token_nodes| {
|
||||
let mut expander = Expander::new(token_nodes, context);
|
||||
|
||||
let optional = expander
|
||||
.optional(TupleShape::new(keyword("optional"), WhitespaceShape))
|
||||
.map(|s| s.first);
|
||||
|
||||
let name = expander.expand(IdentifierShape)?;
|
||||
|
||||
expander.optional(WhitespaceShape);
|
||||
|
||||
let _ty = expander.expand(TypeShape)?;
|
||||
|
||||
Ok(PositionalParam {
|
||||
optional,
|
||||
name,
|
||||
ty: Type::Nothing.spanned(Span::unknown()),
|
||||
desc: format!("").spanned(Span::unknown()),
|
||||
span: Span::unknown(),
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct Identifier {
|
||||
body: String,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl HasSpan for Identifier {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Identifier {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::typed("id", b::description(self.span.slice(source)))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct IdentifierShape;
|
||||
|
||||
impl ExpandSyntax for IdentifierShape {
|
||||
type Output = Identifier;
|
||||
fn name(&self) -> &'static str {
|
||||
"identifier"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
let atom = expand_atom(token_nodes, "identifier", context, ExpansionRule::new())?;
|
||||
|
||||
if let UnspannedAtomicToken::Word { text } = atom.unspanned {
|
||||
let body = text.slice(context.source());
|
||||
if is_id(body) {
|
||||
return Ok(Identifier {
|
||||
body: body.to_string(),
|
||||
span: text,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Err(ParseError::mismatch("identifier", atom.spanned_type_name()))
|
||||
}
|
||||
}
|
||||
|
||||
fn is_id(input: &str) -> bool {
|
||||
let source = nu_source::nom_input(input);
|
||||
match crate::parse::parser::ident(source) {
|
||||
Err(_) => false,
|
||||
Ok((input, _)) => input.fragment.is_empty(),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, new)]
|
||||
struct TypeSyntax {
|
||||
ty: Type,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl HasSpan for TypeSyntax {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for TypeSyntax {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
self.ty.pretty_debug(source)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct TypeShape;
|
||||
|
||||
impl ExpandSyntax for TypeShape {
|
||||
type Output = TypeSyntax;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"type"
|
||||
}
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
let atom = expand_atom(token_nodes, "type", context, ExpansionRule::new())?;
|
||||
|
||||
match atom.unspanned {
|
||||
UnspannedAtomicToken::Word { text } => {
|
||||
let word = text.slice(context.source());
|
||||
|
||||
Ok(TypeSyntax::new(
|
||||
match word {
|
||||
"nothing" => Type::Nothing,
|
||||
"integer" => Type::Int,
|
||||
"decimal" => Type::Decimal,
|
||||
"bytesize" => Type::Bytesize,
|
||||
"string" => Type::String,
|
||||
"column-path" => Type::ColumnPath,
|
||||
"pattern" => Type::Pattern,
|
||||
"boolean" => Type::Boolean,
|
||||
"date" => Type::Date,
|
||||
"duration" => Type::Duration,
|
||||
"filename" => Type::Path,
|
||||
"binary" => Type::Binary,
|
||||
"row" => Type::Row(RowType::new()),
|
||||
"table" => Type::Table(vec![]),
|
||||
"block" => Type::Block,
|
||||
_ => return Err(ParseError::mismatch("type", atom.spanned_type_name())),
|
||||
},
|
||||
atom.span,
|
||||
))
|
||||
}
|
||||
_ => Err(ParseError::mismatch("type", atom.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct TypeAnnotation;
|
||||
|
||||
impl ExpandSyntax for TypeAnnotation {
|
||||
type Output = TypeSyntax;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"type annotation"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
let atom = expand_atom(
|
||||
token_nodes,
|
||||
"type annotation",
|
||||
context,
|
||||
ExpansionRule::new(),
|
||||
)?;
|
||||
|
||||
match atom.unspanned {
|
||||
UnspannedAtomicToken::RoundDelimited { nodes, .. } => {
|
||||
token_nodes.atomic_parse(|token_nodes| {
|
||||
token_nodes.child(
|
||||
(&nodes[..]).spanned(atom.span),
|
||||
context.source().clone(),
|
||||
|token_nodes| {
|
||||
let ty = expand_syntax(&TypeShape, token_nodes, context)?;
|
||||
|
||||
let next = token_nodes.peek_non_ws();
|
||||
|
||||
match next.node {
|
||||
None => Ok(ty),
|
||||
Some(node) => {
|
||||
Err(ParseError::extra_tokens(node.spanned_type_name()))
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
_ => Err(ParseError::mismatch(
|
||||
"type annotation",
|
||||
atom.spanned_type_name(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
1259
crates/nu-parser/src/hir/syntax_shape.rs
Normal file
1259
crates/nu-parser/src/hir/syntax_shape.rs
Normal file
File diff suppressed because it is too large
Load Diff
251
crates/nu-parser/src/hir/syntax_shape/block.rs
Normal file
251
crates/nu-parser/src/hir/syntax_shape/block.rs
Normal file
@ -0,0 +1,251 @@
|
||||
use crate::{
|
||||
hir,
|
||||
hir::syntax_shape::{
|
||||
color_fallible_syntax, color_syntax_with, continue_expression, expand_expr, expand_syntax,
|
||||
DelimitedShape, ExpandContext, ExpandExpression, ExpressionContinuationShape,
|
||||
ExpressionListShape, FallibleColorSyntax, MemberShape, PathTailShape, PathTailSyntax,
|
||||
VariablePathShape,
|
||||
},
|
||||
hir::tokens_iterator::TokensIterator,
|
||||
parse::token_tree::Delimiter,
|
||||
};
|
||||
use nu_errors::{ParseError, ShellError};
|
||||
use nu_source::Span;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct AnyBlockShape;
|
||||
|
||||
impl FallibleColorSyntax for AnyBlockShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"AnyBlockShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
let block = token_nodes.peek_non_ws().not_eof("block");
|
||||
|
||||
let block = match block {
|
||||
Err(_) => return Ok(()),
|
||||
Ok(block) => block,
|
||||
};
|
||||
|
||||
// is it just a block?
|
||||
let block = block.node.as_block();
|
||||
|
||||
if let Some((children, spans)) = block {
|
||||
token_nodes.child(children, context.source.clone(), |token_nodes| {
|
||||
color_syntax_with(
|
||||
&DelimitedShape,
|
||||
&(Delimiter::Brace, spans.0, spans.1),
|
||||
token_nodes,
|
||||
context,
|
||||
);
|
||||
});
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Otherwise, look for a shorthand block. If none found, fail
|
||||
color_fallible_syntax(&ShorthandBlock, token_nodes, context)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandExpression for AnyBlockShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"any block"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
let block = token_nodes.peek_non_ws().not_eof("block")?;
|
||||
|
||||
// is it just a block?
|
||||
let block = block.node.as_block();
|
||||
|
||||
if let Some((block, _tags)) = block {
|
||||
let mut iterator =
|
||||
TokensIterator::new(&block.item, block.span, context.source.clone(), false);
|
||||
|
||||
let exprs = expand_syntax(&ExpressionListShape, &mut iterator, context)?.exprs;
|
||||
|
||||
return Ok(hir::RawExpression::Block(exprs.item).into_expr(block.span));
|
||||
}
|
||||
|
||||
expand_syntax(&ShorthandBlock, token_nodes, context)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ShorthandBlock;
|
||||
|
||||
impl FallibleColorSyntax for ShorthandBlock {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"ShorthandBlock"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
// Try to find a shorthand head. If none found, fail
|
||||
color_fallible_syntax(&ShorthandPath, token_nodes, context)?;
|
||||
|
||||
loop {
|
||||
// Check to see whether there's any continuation after the head expression
|
||||
let result = color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context);
|
||||
|
||||
match result {
|
||||
// if no continuation was found, we're done
|
||||
Err(_) => break,
|
||||
// if a continuation was found, look for another one
|
||||
Ok(_) => continue,
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandExpression for ShorthandBlock {
|
||||
fn name(&self) -> &'static str {
|
||||
"shorthand block"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
let path = expand_expr(&ShorthandPath, token_nodes, context)?;
|
||||
let start = path.span;
|
||||
let expr = continue_expression(path, token_nodes, context);
|
||||
let end = expr.span;
|
||||
let block = hir::RawExpression::Block(vec![expr]).into_expr(start.until(end));
|
||||
|
||||
Ok(block)
|
||||
}
|
||||
}
|
||||
|
||||
/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ShorthandPath;
|
||||
|
||||
impl FallibleColorSyntax for ShorthandPath {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"ShorthandPath"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
token_nodes.atomic(|token_nodes| {
|
||||
let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context);
|
||||
|
||||
if variable.is_ok() {
|
||||
// if it's a variable path, that's the head part
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// otherwise, we'll try to find a member path
|
||||
|
||||
// look for a member (`<member>` -> `$it.<member>`)
|
||||
color_fallible_syntax(&MemberShape, token_nodes, context)?;
|
||||
|
||||
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
|
||||
// like any other path.
|
||||
// It's ok if there's no path tail; a single member is sufficient
|
||||
let _ = color_fallible_syntax(&PathTailShape, token_nodes, context);
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandExpression for ShorthandPath {
|
||||
fn name(&self) -> &'static str {
|
||||
"shorthand path"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
// if it's a variable path, that's the head part
|
||||
let path = expand_expr(&VariablePathShape, token_nodes, context);
|
||||
|
||||
if let Ok(path) = path {
|
||||
return Ok(path);
|
||||
}
|
||||
|
||||
// Synthesize the head of the shorthand path (`<member>` -> `$it.<member>`)
|
||||
let mut head = expand_expr(&ShorthandHeadShape, token_nodes, context)?;
|
||||
|
||||
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
|
||||
// like any other path.
|
||||
let tail = expand_syntax(&PathTailShape, token_nodes, context);
|
||||
|
||||
match tail {
|
||||
Err(_) => Ok(head),
|
||||
Ok(PathTailSyntax { tail, .. }) => {
|
||||
// For each member that `PathTailShape` expanded, join it onto the existing expression
|
||||
// to form a new path
|
||||
for member in tail {
|
||||
head = hir::Expression::dot_member(head, member);
|
||||
}
|
||||
|
||||
Ok(head)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ShorthandHeadShape;
|
||||
|
||||
impl ExpandExpression for ShorthandHeadShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"shorthand head"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
let head = expand_syntax(&MemberShape, token_nodes, context)?;
|
||||
let head = head.to_path_member(context.source);
|
||||
|
||||
// Synthesize an `$it` expression
|
||||
let it = synthetic_it();
|
||||
let span = head.span;
|
||||
|
||||
Ok(hir::Expression::path(it, vec![head], span))
|
||||
}
|
||||
}
|
||||
|
||||
fn synthetic_it() -> hir::Expression {
|
||||
hir::Expression::it_variable(Span::unknown(), Span::unknown())
|
||||
}
|
325
crates/nu-parser/src/hir/syntax_shape/expression.rs
Normal file
325
crates/nu-parser/src/hir/syntax_shape/expression.rs
Normal file
@ -0,0 +1,325 @@
|
||||
pub(crate) mod atom;
|
||||
pub(crate) mod delimited;
|
||||
pub(crate) mod file_path;
|
||||
pub(crate) mod list;
|
||||
pub(crate) mod number;
|
||||
pub(crate) mod pattern;
|
||||
pub(crate) mod range;
|
||||
pub(crate) mod string;
|
||||
pub(crate) mod unit;
|
||||
pub(crate) mod variable_path;
|
||||
|
||||
use crate::hir::syntax_shape::{
|
||||
color_delimited_square, color_fallible_syntax, color_fallible_syntax_with, expand_atom,
|
||||
expand_delimited_square, expand_expr, expand_syntax, BareShape, ColorableDotShape, DotShape,
|
||||
ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, ExpressionContinuation,
|
||||
ExpressionContinuationShape, FallibleColorSyntax, FlatShape, UnspannedAtomicToken,
|
||||
};
|
||||
use crate::{
|
||||
hir,
|
||||
hir::{Expression, TokensIterator},
|
||||
};
|
||||
use nu_errors::{ParseError, ShellError};
|
||||
use nu_source::{HasSpan, Span, Spanned, SpannedItem, Tag};
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct AnyExpressionShape;
|
||||
|
||||
impl ExpandExpression for AnyExpressionShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"any expression"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
// Look for an expression at the cursor
|
||||
let head = expand_expr(&AnyExpressionStartShape, token_nodes, context)?;
|
||||
|
||||
Ok(continue_expression(head, token_nodes, context))
|
||||
}
|
||||
}
|
||||
|
||||
impl FallibleColorSyntax for AnyExpressionShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"AnyExpressionShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
// Look for an expression at the cursor
|
||||
color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context)?;
|
||||
|
||||
match continue_coloring_expression(token_nodes, context) {
|
||||
Err(_) => {
|
||||
// it's fine for there to be no continuation
|
||||
}
|
||||
|
||||
Ok(()) => {}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn continue_expression(
|
||||
mut head: hir::Expression,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> hir::Expression {
|
||||
loop {
|
||||
// Check to see whether there's any continuation after the head expression
|
||||
let continuation = expand_syntax(&ExpressionContinuationShape, token_nodes, context);
|
||||
|
||||
match continuation {
|
||||
// If there's no continuation, return the head
|
||||
Err(_) => return head,
|
||||
// Otherwise, form a new expression by combining the head with the continuation
|
||||
Ok(continuation) => match continuation {
|
||||
// If the continuation is a `.member`, form a path with the new member
|
||||
ExpressionContinuation::DotSuffix(_dot, member) => {
|
||||
head = Expression::dot_member(head, member);
|
||||
}
|
||||
|
||||
// Otherwise, if the continuation is an infix suffix, form an infix expression
|
||||
ExpressionContinuation::InfixSuffix(op, expr) => {
|
||||
head = Expression::infix(head, op, expr);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn continue_coloring_expression(
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
// if there's not even one expression continuation, fail
|
||||
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context)?;
|
||||
|
||||
loop {
|
||||
// Check to see whether there's any continuation after the head expression
|
||||
let result = color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context);
|
||||
|
||||
if result.is_err() {
|
||||
// We already saw one continuation, so just return
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct AnyExpressionStartShape;
|
||||
|
||||
impl ExpandExpression for AnyExpressionStartShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"any expression start"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
let atom = expand_atom(token_nodes, "expression", context, ExpansionRule::new())?;
|
||||
|
||||
match atom.unspanned {
|
||||
UnspannedAtomicToken::Size { number, unit } => Ok(hir::Expression::size(
|
||||
number.to_number(context.source),
|
||||
unit.item,
|
||||
Tag {
|
||||
span: atom.span,
|
||||
anchor: None,
|
||||
},
|
||||
)),
|
||||
|
||||
UnspannedAtomicToken::SquareDelimited { nodes, .. } => {
|
||||
expand_delimited_square(&nodes, atom.span, context)
|
||||
}
|
||||
|
||||
UnspannedAtomicToken::Word { .. } => {
|
||||
let end = expand_syntax(&BareTailShape, token_nodes, context)?;
|
||||
Ok(hir::Expression::bare(atom.span.until_option(end)))
|
||||
}
|
||||
|
||||
other => other
|
||||
.into_atomic_token(atom.span)
|
||||
.to_hir(context, "expression"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FallibleColorSyntax for AnyExpressionStartShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"AnyExpressionStartShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
let atom = token_nodes.spanned(|token_nodes| {
|
||||
expand_atom(
|
||||
token_nodes,
|
||||
"expression",
|
||||
context,
|
||||
ExpansionRule::permissive(),
|
||||
)
|
||||
});
|
||||
|
||||
let atom = match atom {
|
||||
Spanned {
|
||||
item: Err(_err),
|
||||
span,
|
||||
} => {
|
||||
token_nodes.color_shape(FlatShape::Error.spanned(span));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
Spanned {
|
||||
item: Ok(value), ..
|
||||
} => value,
|
||||
};
|
||||
|
||||
match atom.unspanned {
|
||||
UnspannedAtomicToken::Size { number, unit } => token_nodes.color_shape(
|
||||
FlatShape::Size {
|
||||
number: number.span(),
|
||||
unit: unit.span,
|
||||
}
|
||||
.spanned(atom.span),
|
||||
),
|
||||
|
||||
UnspannedAtomicToken::SquareDelimited { nodes, spans } => {
|
||||
token_nodes.child(
|
||||
(&nodes[..]).spanned(atom.span),
|
||||
context.source.clone(),
|
||||
|tokens| {
|
||||
color_delimited_square(spans, tokens, atom.span, context);
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
UnspannedAtomicToken::Word { .. } | UnspannedAtomicToken::Dot { .. } => {
|
||||
token_nodes.color_shape(FlatShape::Word.spanned(atom.span));
|
||||
}
|
||||
|
||||
_ => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct BareTailShape;
|
||||
|
||||
impl FallibleColorSyntax for BareTailShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"BareTailShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
let len = token_nodes.state().shapes().len();
|
||||
|
||||
loop {
|
||||
let word =
|
||||
color_fallible_syntax_with(&BareShape, &FlatShape::Word, token_nodes, context);
|
||||
|
||||
if word.is_ok() {
|
||||
// if a word was found, continue
|
||||
continue;
|
||||
}
|
||||
|
||||
// if a word wasn't found, try to find a dot
|
||||
|
||||
// try to find a dot
|
||||
let dot = color_fallible_syntax_with(
|
||||
&ColorableDotShape,
|
||||
&FlatShape::Word,
|
||||
token_nodes,
|
||||
context,
|
||||
);
|
||||
|
||||
match dot {
|
||||
// if a dot was found, try to find another word
|
||||
Ok(_) => continue,
|
||||
// otherwise, we're done
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
|
||||
if token_nodes.state().shapes().len() > len {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(ShellError::syntax_error(
|
||||
"No tokens matched BareTailShape".spanned_unknown(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandSyntax for BareTailShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"word continuation"
|
||||
}
|
||||
|
||||
type Output = Option<Span>;
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Option<Span>, ParseError> {
|
||||
let mut end: Option<Span> = None;
|
||||
|
||||
loop {
|
||||
match expand_syntax(&BareShape, token_nodes, context) {
|
||||
Ok(bare) => {
|
||||
end = Some(bare.span);
|
||||
continue;
|
||||
}
|
||||
|
||||
Err(_) => match expand_syntax(&DotShape, token_nodes, context) {
|
||||
Ok(dot) => {
|
||||
end = Some(dot);
|
||||
continue;
|
||||
}
|
||||
|
||||
Err(_) => break,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
Ok(end)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expand_file_path(string: &str, context: &ExpandContext) -> PathBuf {
|
||||
let expanded = shellexpand::tilde_with_context(string, || context.homedir());
|
||||
|
||||
PathBuf::from(expanded.as_ref())
|
||||
}
|
760
crates/nu-parser/src/hir/syntax_shape/expression/atom.rs
Normal file
760
crates/nu-parser/src/hir/syntax_shape/expression/atom.rs
Normal file
@ -0,0 +1,760 @@
|
||||
use crate::hir::syntax_shape::FlatShape;
|
||||
use crate::hir::syntax_shape::{
|
||||
expand_syntax, expression::expand_file_path, parse_single_node, BarePathShape,
|
||||
BarePatternShape, ExpandContext, UnitShape, UnitSyntax,
|
||||
};
|
||||
use crate::parse::operator::EvaluationOperator;
|
||||
use crate::parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
|
||||
use crate::parse::tokens::UnspannedToken;
|
||||
use crate::parse::unit::Unit;
|
||||
use crate::{
|
||||
hir,
|
||||
hir::{Expression, RawNumber, TokensIterator},
|
||||
parse::flag::{Flag, FlagKind},
|
||||
};
|
||||
use nu_errors::{ParseError, ShellError};
|
||||
use nu_protocol::{ShellTypeName, SpannedTypeName};
|
||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
||||
use std::ops::Deref;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum UnspannedAtomicToken<'tokens> {
|
||||
Eof {
|
||||
span: Span,
|
||||
},
|
||||
Error {
|
||||
error: Spanned<ShellError>,
|
||||
},
|
||||
Number {
|
||||
number: RawNumber,
|
||||
},
|
||||
Size {
|
||||
number: RawNumber,
|
||||
unit: Spanned<Unit>,
|
||||
},
|
||||
String {
|
||||
body: Span,
|
||||
},
|
||||
ItVariable {
|
||||
name: Span,
|
||||
},
|
||||
Variable {
|
||||
name: Span,
|
||||
},
|
||||
ExternalCommand {
|
||||
command: Span,
|
||||
},
|
||||
ExternalWord {
|
||||
text: Span,
|
||||
},
|
||||
GlobPattern {
|
||||
pattern: Span,
|
||||
},
|
||||
Word {
|
||||
text: Span,
|
||||
},
|
||||
SquareDelimited {
|
||||
spans: (Span, Span),
|
||||
nodes: &'tokens Vec<TokenNode>,
|
||||
},
|
||||
#[allow(unused)]
|
||||
RoundDelimited {
|
||||
spans: (Span, Span),
|
||||
nodes: &'tokens Vec<TokenNode>,
|
||||
},
|
||||
ShorthandFlag {
|
||||
name: Span,
|
||||
},
|
||||
CompareOperator {
|
||||
text: Span,
|
||||
},
|
||||
Dot {
|
||||
text: Span,
|
||||
},
|
||||
DotDot {
|
||||
text: Span,
|
||||
},
|
||||
Whitespace {
|
||||
text: Span,
|
||||
},
|
||||
Separator {
|
||||
text: Span,
|
||||
},
|
||||
Comment {
|
||||
body: Span,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'tokens> UnspannedAtomicToken<'tokens> {
|
||||
pub fn into_atomic_token(self, span: impl Into<Span>) -> AtomicToken<'tokens> {
|
||||
AtomicToken {
|
||||
unspanned: self,
|
||||
span: span.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tokens> ShellTypeName for AtomicToken<'tokens> {
|
||||
fn type_name(&self) -> &'static str {
|
||||
self.unspanned.type_name()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tokens> ShellTypeName for UnspannedAtomicToken<'tokens> {
|
||||
fn type_name(&self) -> &'static str {
|
||||
match &self {
|
||||
UnspannedAtomicToken::Eof { .. } => "eof",
|
||||
UnspannedAtomicToken::Error { .. } => "error",
|
||||
UnspannedAtomicToken::CompareOperator { .. } => "compare operator",
|
||||
UnspannedAtomicToken::ShorthandFlag { .. } => "shorthand flag",
|
||||
UnspannedAtomicToken::Whitespace { .. } => "whitespace",
|
||||
UnspannedAtomicToken::Separator { .. } => "separator",
|
||||
UnspannedAtomicToken::Comment { .. } => "comment",
|
||||
UnspannedAtomicToken::Dot { .. } => "dot",
|
||||
UnspannedAtomicToken::DotDot { .. } => "dotdot",
|
||||
UnspannedAtomicToken::Number { .. } => "number",
|
||||
UnspannedAtomicToken::Size { .. } => "size",
|
||||
UnspannedAtomicToken::String { .. } => "string",
|
||||
UnspannedAtomicToken::ItVariable { .. } => "$it",
|
||||
UnspannedAtomicToken::Variable { .. } => "variable",
|
||||
UnspannedAtomicToken::ExternalCommand { .. } => "external command",
|
||||
UnspannedAtomicToken::ExternalWord { .. } => "external word",
|
||||
UnspannedAtomicToken::GlobPattern { .. } => "file pattern",
|
||||
UnspannedAtomicToken::Word { .. } => "word",
|
||||
UnspannedAtomicToken::SquareDelimited { .. } => "array literal",
|
||||
UnspannedAtomicToken::RoundDelimited { .. } => "paren delimited",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AtomicToken<'tokens> {
|
||||
pub unspanned: UnspannedAtomicToken<'tokens>,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl<'tokens> HasSpan for AtomicToken<'tokens> {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tokens> Deref for AtomicToken<'tokens> {
|
||||
type Target = UnspannedAtomicToken<'tokens>;
|
||||
|
||||
fn deref(&self) -> &UnspannedAtomicToken<'tokens> {
|
||||
&self.unspanned
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tokens> AtomicToken<'tokens> {
|
||||
pub fn to_hir(
|
||||
&self,
|
||||
context: &ExpandContext,
|
||||
expected: &'static str,
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
Ok(match &self.unspanned {
|
||||
UnspannedAtomicToken::Eof { .. } => {
|
||||
return Err(ParseError::mismatch(
|
||||
expected,
|
||||
"eof atomic token".spanned(self.span),
|
||||
))
|
||||
}
|
||||
UnspannedAtomicToken::Error { .. } => {
|
||||
return Err(ParseError::mismatch(expected, "error".spanned(self.span)))
|
||||
}
|
||||
UnspannedAtomicToken::RoundDelimited { .. }
|
||||
| UnspannedAtomicToken::CompareOperator { .. }
|
||||
| UnspannedAtomicToken::ShorthandFlag { .. }
|
||||
| UnspannedAtomicToken::Whitespace { .. }
|
||||
| UnspannedAtomicToken::Separator { .. }
|
||||
| UnspannedAtomicToken::Comment { .. }
|
||||
| UnspannedAtomicToken::Dot { .. }
|
||||
| UnspannedAtomicToken::DotDot { .. }
|
||||
| UnspannedAtomicToken::SquareDelimited { .. } => {
|
||||
return Err(ParseError::mismatch(expected, self.spanned_type_name()));
|
||||
}
|
||||
UnspannedAtomicToken::Number { number } => {
|
||||
Expression::number(number.to_number(context.source), self.span)
|
||||
}
|
||||
UnspannedAtomicToken::Size { number, unit } => {
|
||||
Expression::size(number.to_number(context.source), **unit, self.span)
|
||||
}
|
||||
UnspannedAtomicToken::String { body } => Expression::string(*body, self.span),
|
||||
UnspannedAtomicToken::ItVariable { name } => Expression::it_variable(*name, self.span),
|
||||
UnspannedAtomicToken::Variable { name } => Expression::variable(*name, self.span),
|
||||
UnspannedAtomicToken::ExternalCommand { command } => {
|
||||
Expression::external_command(*command, self.span)
|
||||
}
|
||||
UnspannedAtomicToken::ExternalWord { text } => Expression::string(*text, self.span),
|
||||
UnspannedAtomicToken::GlobPattern { pattern } => Expression::pattern(
|
||||
expand_file_path(pattern.slice(context.source), context).to_string_lossy(),
|
||||
self.span,
|
||||
),
|
||||
UnspannedAtomicToken::Word { text } => Expression::string(*text, *text),
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn color_tokens(&self, shapes: &mut Vec<Spanned<FlatShape>>) {
|
||||
match &self.unspanned {
|
||||
UnspannedAtomicToken::Eof { .. } => {}
|
||||
UnspannedAtomicToken::Error { .. } => shapes.push(FlatShape::Error.spanned(self.span)),
|
||||
UnspannedAtomicToken::CompareOperator { .. } => {
|
||||
shapes.push(FlatShape::CompareOperator.spanned(self.span))
|
||||
}
|
||||
UnspannedAtomicToken::ShorthandFlag { .. } => {
|
||||
shapes.push(FlatShape::ShorthandFlag.spanned(self.span))
|
||||
}
|
||||
UnspannedAtomicToken::Whitespace { .. } => {
|
||||
shapes.push(FlatShape::Whitespace.spanned(self.span))
|
||||
}
|
||||
UnspannedAtomicToken::Number {
|
||||
number: RawNumber::Decimal(_),
|
||||
} => shapes.push(FlatShape::Decimal.spanned(self.span)),
|
||||
UnspannedAtomicToken::Number {
|
||||
number: RawNumber::Int(_),
|
||||
} => shapes.push(FlatShape::Int.spanned(self.span)),
|
||||
UnspannedAtomicToken::Size { number, unit } => shapes.push(
|
||||
FlatShape::Size {
|
||||
number: number.span(),
|
||||
unit: unit.span,
|
||||
}
|
||||
.spanned(self.span),
|
||||
),
|
||||
UnspannedAtomicToken::String { .. } => {
|
||||
shapes.push(FlatShape::String.spanned(self.span))
|
||||
}
|
||||
UnspannedAtomicToken::ItVariable { .. } => {
|
||||
shapes.push(FlatShape::ItVariable.spanned(self.span))
|
||||
}
|
||||
UnspannedAtomicToken::Variable { .. } => {
|
||||
shapes.push(FlatShape::Variable.spanned(self.span))
|
||||
}
|
||||
UnspannedAtomicToken::ExternalCommand { .. } => {
|
||||
shapes.push(FlatShape::ExternalCommand.spanned(self.span))
|
||||
}
|
||||
UnspannedAtomicToken::ExternalWord { .. } => {
|
||||
shapes.push(FlatShape::ExternalWord.spanned(self.span))
|
||||
}
|
||||
UnspannedAtomicToken::GlobPattern { .. } => {
|
||||
shapes.push(FlatShape::GlobPattern.spanned(self.span))
|
||||
}
|
||||
UnspannedAtomicToken::Word { .. } => shapes.push(FlatShape::Word.spanned(self.span)),
|
||||
_ => shapes.push(FlatShape::Error.spanned(self.span)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for AtomicToken<'_> {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
fn atom(value: DebugDocBuilder) -> DebugDocBuilder {
|
||||
b::delimit("(", b::kind("atom") + b::space() + value.group(), ")").group()
|
||||
}
|
||||
|
||||
fn atom_kind(kind: impl std::fmt::Display, value: DebugDocBuilder) -> DebugDocBuilder {
|
||||
b::delimit(
|
||||
"(",
|
||||
(b::kind("atom") + b::delimit("[", b::kind(kind), "]")).group()
|
||||
+ b::space()
|
||||
+ value.group(),
|
||||
")",
|
||||
)
|
||||
.group()
|
||||
}
|
||||
|
||||
atom(match &self.unspanned {
|
||||
UnspannedAtomicToken::Eof { .. } => b::description("eof"),
|
||||
UnspannedAtomicToken::Error { .. } => b::error("error"),
|
||||
UnspannedAtomicToken::Number { number } => number.pretty_debug(source),
|
||||
UnspannedAtomicToken::Size { number, unit } => {
|
||||
number.pretty_debug(source) + b::keyword(unit.span.slice(source))
|
||||
}
|
||||
UnspannedAtomicToken::String { body } => b::primitive(body.slice(source)),
|
||||
UnspannedAtomicToken::ItVariable { .. } | UnspannedAtomicToken::Variable { .. } => {
|
||||
b::keyword(self.span.slice(source))
|
||||
}
|
||||
UnspannedAtomicToken::ExternalCommand { .. } => b::primitive(self.span.slice(source)),
|
||||
UnspannedAtomicToken::ExternalWord { text } => {
|
||||
atom_kind("external word", b::primitive(text.slice(source)))
|
||||
}
|
||||
UnspannedAtomicToken::GlobPattern { pattern } => {
|
||||
atom_kind("pattern", b::primitive(pattern.slice(source)))
|
||||
}
|
||||
UnspannedAtomicToken::Word { text } => {
|
||||
atom_kind("word", b::primitive(text.slice(source)))
|
||||
}
|
||||
UnspannedAtomicToken::SquareDelimited { nodes, .. } => b::delimit(
|
||||
"[",
|
||||
b::intersperse_with_source(nodes.iter(), b::space(), source),
|
||||
"]",
|
||||
),
|
||||
UnspannedAtomicToken::RoundDelimited { nodes, .. } => b::delimit(
|
||||
"(",
|
||||
b::intersperse_with_source(nodes.iter(), b::space(), source),
|
||||
")",
|
||||
),
|
||||
UnspannedAtomicToken::ShorthandFlag { name } => {
|
||||
atom_kind("shorthand flag", b::key(name.slice(source)))
|
||||
}
|
||||
UnspannedAtomicToken::Dot { .. } => atom(b::kind("dot")),
|
||||
UnspannedAtomicToken::DotDot { .. } => atom(b::kind("dotdot")),
|
||||
UnspannedAtomicToken::CompareOperator { text } => {
|
||||
atom_kind("operator", b::keyword(text.slice(source)))
|
||||
}
|
||||
UnspannedAtomicToken::Whitespace { text } => atom_kind(
|
||||
"whitespace",
|
||||
b::description(format!("{:?}", text.slice(source))),
|
||||
),
|
||||
UnspannedAtomicToken::Separator { text } => atom_kind(
|
||||
"separator",
|
||||
b::description(format!("{:?}", text.slice(source))),
|
||||
),
|
||||
UnspannedAtomicToken::Comment { body } => {
|
||||
atom_kind("comment", b::description(body.slice(source)))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum WhitespaceHandling {
|
||||
#[allow(unused)]
|
||||
AllowWhitespace,
|
||||
RejectWhitespace,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ExpansionRule {
|
||||
pub(crate) allow_external_command: bool,
|
||||
pub(crate) allow_external_word: bool,
|
||||
pub(crate) allow_cmp_operator: bool,
|
||||
pub(crate) allow_eval_operator: bool,
|
||||
pub(crate) allow_eof: bool,
|
||||
pub(crate) allow_separator: bool,
|
||||
pub(crate) treat_size_as_word: bool,
|
||||
pub(crate) separate_members: bool,
|
||||
pub(crate) commit_errors: bool,
|
||||
pub(crate) whitespace: WhitespaceHandling,
|
||||
pub(crate) allow_comments: bool,
|
||||
}
|
||||
|
||||
impl ExpansionRule {
|
||||
pub fn new() -> ExpansionRule {
|
||||
ExpansionRule {
|
||||
allow_external_command: false,
|
||||
allow_external_word: false,
|
||||
allow_eval_operator: false,
|
||||
allow_cmp_operator: false,
|
||||
allow_eof: false,
|
||||
treat_size_as_word: false,
|
||||
separate_members: false,
|
||||
commit_errors: false,
|
||||
allow_separator: false,
|
||||
whitespace: WhitespaceHandling::RejectWhitespace,
|
||||
allow_comments: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// The intent of permissive mode is to return an atomic token for every possible
|
||||
/// input token. This is important for error-correcting parsing, such as the
|
||||
/// syntax highlighter.
|
||||
pub fn permissive() -> ExpansionRule {
|
||||
ExpansionRule {
|
||||
allow_external_command: true,
|
||||
allow_external_word: true,
|
||||
allow_cmp_operator: true,
|
||||
allow_eval_operator: true,
|
||||
allow_eof: true,
|
||||
separate_members: false,
|
||||
treat_size_as_word: false,
|
||||
commit_errors: true,
|
||||
allow_separator: true,
|
||||
allow_comments: true,
|
||||
whitespace: WhitespaceHandling::AllowWhitespace,
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn allow_external_command(mut self) -> ExpansionRule {
|
||||
self.allow_external_command = true;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn allow_cmp_operator(mut self) -> ExpansionRule {
|
||||
self.allow_cmp_operator = true;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn no_cmp_operator(mut self) -> ExpansionRule {
|
||||
self.allow_cmp_operator = false;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn allow_eval_operator(mut self) -> ExpansionRule {
|
||||
self.allow_eval_operator = true;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn no_operator(mut self) -> ExpansionRule {
|
||||
self.allow_eval_operator = false;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn no_external_command(mut self) -> ExpansionRule {
|
||||
self.allow_external_command = false;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn allow_external_word(mut self) -> ExpansionRule {
|
||||
self.allow_external_word = true;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn no_external_word(mut self) -> ExpansionRule {
|
||||
self.allow_external_word = false;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn treat_size_as_word(mut self) -> ExpansionRule {
|
||||
self.treat_size_as_word = true;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn separate_members(mut self) -> ExpansionRule {
|
||||
self.separate_members = true;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn no_separate_members(mut self) -> ExpansionRule {
|
||||
self.separate_members = false;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn commit_errors(mut self) -> ExpansionRule {
|
||||
self.commit_errors = true;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn allow_whitespace(mut self) -> ExpansionRule {
|
||||
self.whitespace = WhitespaceHandling::AllowWhitespace;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn reject_whitespace(mut self) -> ExpansionRule {
|
||||
self.whitespace = WhitespaceHandling::RejectWhitespace;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn allow_separator(mut self) -> ExpansionRule {
|
||||
self.allow_separator = true;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn reject_separator(mut self) -> ExpansionRule {
|
||||
self.allow_separator = false;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn allow_comments(mut self) -> ExpansionRule {
|
||||
self.allow_comments = true;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn reject_comments(mut self) -> ExpansionRule {
|
||||
self.allow_comments = false;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expand_atom<'me, 'content>(
|
||||
token_nodes: &'me mut TokensIterator<'content>,
|
||||
expected: &'static str,
|
||||
context: &ExpandContext,
|
||||
rule: ExpansionRule,
|
||||
) -> Result<AtomicToken<'content>, ParseError> {
|
||||
token_nodes.with_expand_tracer(|_, tracer| tracer.start("atom"));
|
||||
|
||||
let result = expand_atom_inner(token_nodes, expected, context, rule);
|
||||
|
||||
token_nodes.with_expand_tracer(|_, tracer| match &result {
|
||||
Ok(result) => {
|
||||
tracer.add_result(result.clone());
|
||||
tracer.success();
|
||||
}
|
||||
|
||||
Err(err) => tracer.failed(err),
|
||||
});
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// If the caller of expand_atom throws away the returned atomic token returned, it
|
||||
/// must use a checkpoint to roll it back.
|
||||
fn expand_atom_inner<'me, 'content>(
|
||||
token_nodes: &'me mut TokensIterator<'content>,
|
||||
expected: &'static str,
|
||||
context: &ExpandContext,
|
||||
rule: ExpansionRule,
|
||||
) -> Result<AtomicToken<'content>, ParseError> {
|
||||
if token_nodes.at_end() {
|
||||
if rule.allow_eof {
|
||||
return Ok(UnspannedAtomicToken::Eof {
|
||||
span: Span::unknown(),
|
||||
}
|
||||
.into_atomic_token(Span::unknown()));
|
||||
} else {
|
||||
return Err(ParseError::unexpected_eof("anything", Span::unknown()));
|
||||
}
|
||||
}
|
||||
|
||||
// First, we'll need to handle the situation where more than one token corresponds
|
||||
// to a single atomic token
|
||||
|
||||
// If treat_size_as_word, don't try to parse the head of the token stream
|
||||
// as a size.
|
||||
if !rule.treat_size_as_word {
|
||||
match expand_syntax(&UnitShape, token_nodes, context) {
|
||||
// If the head of the stream isn't a valid unit, we'll try to parse
|
||||
// it again next as a word
|
||||
Err(_) => {}
|
||||
|
||||
// But if it was a valid unit, we're done here
|
||||
Ok(UnitSyntax {
|
||||
unit: (number, unit),
|
||||
span,
|
||||
}) => return Ok(UnspannedAtomicToken::Size { number, unit }.into_atomic_token(span)),
|
||||
}
|
||||
}
|
||||
|
||||
if rule.separate_members {
|
||||
let mut next = token_nodes.peek_any();
|
||||
|
||||
match next.node {
|
||||
Some(token) if token.is_word() => {
|
||||
next.commit();
|
||||
return Ok(UnspannedAtomicToken::Word { text: token.span() }
|
||||
.into_atomic_token(token.span()));
|
||||
}
|
||||
|
||||
Some(token) if token.is_int() => {
|
||||
next.commit();
|
||||
return Ok(UnspannedAtomicToken::Number {
|
||||
number: RawNumber::Int(token.span()),
|
||||
}
|
||||
.into_atomic_token(token.span()));
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Try to parse the head of the stream as a bare path. A bare path includes
|
||||
// words as well as `.`s, connected together without whitespace.
|
||||
match expand_syntax(&BarePathShape, token_nodes, context) {
|
||||
// If we didn't find a bare path
|
||||
Err(_) => {}
|
||||
Ok(span) => {
|
||||
let next = token_nodes.peek_any();
|
||||
|
||||
match next.node {
|
||||
Some(token) if token.is_pattern() => {
|
||||
// if the very next token is a pattern, we're looking at a glob, not a
|
||||
// word, and we should try to parse it as a glob next
|
||||
}
|
||||
|
||||
_ => return Ok(UnspannedAtomicToken::Word { text: span }.into_atomic_token(span)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try to parse the head of the stream as a pattern. A pattern includes
|
||||
// words, words with `*` as well as `.`s, connected together without whitespace.
|
||||
match expand_syntax(&BarePatternShape, token_nodes, context) {
|
||||
// If we didn't find a bare path
|
||||
Err(_) => {}
|
||||
Ok(span) => {
|
||||
return Ok(UnspannedAtomicToken::GlobPattern { pattern: span }.into_atomic_token(span))
|
||||
}
|
||||
}
|
||||
|
||||
// The next token corresponds to at most one atomic token
|
||||
|
||||
// We need to `peek` because `parse_single_node` doesn't cover all of the
|
||||
// cases that `expand_atom` covers. We should probably collapse the two
|
||||
// if possible.
|
||||
let peeked = token_nodes.peek_any().not_eof(expected)?;
|
||||
|
||||
match peeked.node {
|
||||
TokenNode::Token(_) => {
|
||||
// handle this next
|
||||
}
|
||||
|
||||
TokenNode::Error(error) => {
|
||||
peeked.commit();
|
||||
return Ok(UnspannedAtomicToken::Error {
|
||||
error: error.clone(),
|
||||
}
|
||||
.into_atomic_token(error.span));
|
||||
}
|
||||
|
||||
TokenNode::Separator(span) if rule.allow_separator => {
|
||||
peeked.commit();
|
||||
return Ok(UnspannedAtomicToken::Separator { text: *span }.into_atomic_token(span));
|
||||
}
|
||||
|
||||
TokenNode::Comment(comment) if rule.allow_comments => {
|
||||
peeked.commit();
|
||||
return Ok(UnspannedAtomicToken::Comment { body: comment.text }
|
||||
.into_atomic_token(comment.span()));
|
||||
}
|
||||
|
||||
// [ ... ]
|
||||
TokenNode::Delimited(Spanned {
|
||||
item:
|
||||
DelimitedNode {
|
||||
delimiter: Delimiter::Square,
|
||||
spans,
|
||||
children,
|
||||
},
|
||||
span,
|
||||
}) => {
|
||||
peeked.commit();
|
||||
let span = *span;
|
||||
return Ok(UnspannedAtomicToken::SquareDelimited {
|
||||
nodes: children,
|
||||
spans: *spans,
|
||||
}
|
||||
.into_atomic_token(span));
|
||||
}
|
||||
|
||||
TokenNode::Flag(Flag {
|
||||
kind: FlagKind::Shorthand,
|
||||
name,
|
||||
span,
|
||||
}) => {
|
||||
peeked.commit();
|
||||
return Ok(UnspannedAtomicToken::ShorthandFlag { name: *name }.into_atomic_token(*span));
|
||||
}
|
||||
|
||||
TokenNode::Flag(Flag {
|
||||
kind: FlagKind::Longhand,
|
||||
name,
|
||||
span,
|
||||
}) => {
|
||||
peeked.commit();
|
||||
return Ok(UnspannedAtomicToken::ShorthandFlag { name: *name }.into_atomic_token(*span));
|
||||
}
|
||||
|
||||
// If we see whitespace, process the whitespace according to the whitespace
|
||||
// handling rules
|
||||
TokenNode::Whitespace(span) => match rule.whitespace {
|
||||
// if whitespace is allowed, return a whitespace token
|
||||
WhitespaceHandling::AllowWhitespace => {
|
||||
peeked.commit();
|
||||
return Ok(
|
||||
UnspannedAtomicToken::Whitespace { text: *span }.into_atomic_token(*span)
|
||||
);
|
||||
}
|
||||
|
||||
// if whitespace is disallowed, return an error
|
||||
WhitespaceHandling::RejectWhitespace => {
|
||||
return Err(ParseError::mismatch(expected, "whitespace".spanned(*span)))
|
||||
}
|
||||
},
|
||||
|
||||
other => {
|
||||
let span = peeked.node.span();
|
||||
|
||||
peeked.commit();
|
||||
return Ok(UnspannedAtomicToken::Error {
|
||||
error: ShellError::type_error("token", other.type_name().spanned(span))
|
||||
.spanned(span),
|
||||
}
|
||||
.into_atomic_token(span));
|
||||
}
|
||||
}
|
||||
|
||||
parse_single_node(token_nodes, expected, |token, token_span, err| {
|
||||
Ok(match token {
|
||||
// First, the error cases. Each error case corresponds to a expansion rule
|
||||
// flag that can be used to allow the case
|
||||
|
||||
// rule.allow_cmp_operator
|
||||
UnspannedToken::CompareOperator(_) if !rule.allow_cmp_operator => {
|
||||
return Err(err.error())
|
||||
}
|
||||
|
||||
// rule.allow_eval_operator
|
||||
UnspannedToken::EvaluationOperator(_) if !rule.allow_eval_operator => {
|
||||
return Err(err.error())
|
||||
}
|
||||
|
||||
// rule.allow_external_command
|
||||
UnspannedToken::ExternalCommand(_) if !rule.allow_external_command => {
|
||||
return Err(ParseError::mismatch(
|
||||
expected,
|
||||
token.type_name().spanned(token_span),
|
||||
))
|
||||
}
|
||||
// rule.allow_external_word
|
||||
UnspannedToken::ExternalWord if !rule.allow_external_word => {
|
||||
return Err(ParseError::mismatch(
|
||||
expected,
|
||||
"external word".spanned(token_span),
|
||||
))
|
||||
}
|
||||
|
||||
UnspannedToken::Number(number) => {
|
||||
UnspannedAtomicToken::Number { number }.into_atomic_token(token_span)
|
||||
}
|
||||
UnspannedToken::CompareOperator(_) => {
|
||||
UnspannedAtomicToken::CompareOperator { text: token_span }
|
||||
.into_atomic_token(token_span)
|
||||
}
|
||||
UnspannedToken::EvaluationOperator(EvaluationOperator::Dot) => {
|
||||
UnspannedAtomicToken::Dot { text: token_span }.into_atomic_token(token_span)
|
||||
}
|
||||
UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot) => {
|
||||
UnspannedAtomicToken::DotDot { text: token_span }.into_atomic_token(token_span)
|
||||
}
|
||||
UnspannedToken::String(body) => {
|
||||
UnspannedAtomicToken::String { body }.into_atomic_token(token_span)
|
||||
}
|
||||
UnspannedToken::Variable(name) if name.slice(context.source) == "it" => {
|
||||
UnspannedAtomicToken::ItVariable { name }.into_atomic_token(token_span)
|
||||
}
|
||||
UnspannedToken::Variable(name) => {
|
||||
UnspannedAtomicToken::Variable { name }.into_atomic_token(token_span)
|
||||
}
|
||||
UnspannedToken::ExternalCommand(command) => {
|
||||
UnspannedAtomicToken::ExternalCommand { command }.into_atomic_token(token_span)
|
||||
}
|
||||
UnspannedToken::ExternalWord => UnspannedAtomicToken::ExternalWord { text: token_span }
|
||||
.into_atomic_token(token_span),
|
||||
UnspannedToken::GlobPattern => UnspannedAtomicToken::GlobPattern {
|
||||
pattern: token_span,
|
||||
}
|
||||
.into_atomic_token(token_span),
|
||||
UnspannedToken::Bare => {
|
||||
UnspannedAtomicToken::Word { text: token_span }.into_atomic_token(token_span)
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
@ -0,0 +1,55 @@
|
||||
use crate::hir::syntax_shape::{
|
||||
color_syntax, expand_syntax, ColorSyntax, ExpandContext, ExpressionListShape, TokenNode,
|
||||
};
|
||||
use crate::{hir, hir::TokensIterator, Delimiter, FlatShape};
|
||||
use nu_errors::ParseError;
|
||||
use nu_source::{Span, SpannedItem, Tag};
|
||||
|
||||
pub fn expand_delimited_square(
|
||||
children: &[TokenNode],
|
||||
span: Span,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
let mut tokens = TokensIterator::new(&children, span, context.source.clone(), false);
|
||||
|
||||
let list = expand_syntax(&ExpressionListShape, &mut tokens, context);
|
||||
|
||||
Ok(hir::Expression::list(
|
||||
list?.exprs.item,
|
||||
Tag { span, anchor: None },
|
||||
))
|
||||
}
|
||||
|
||||
pub fn color_delimited_square(
|
||||
(open, close): (Span, Span),
|
||||
token_nodes: &mut TokensIterator,
|
||||
_span: Span,
|
||||
context: &ExpandContext,
|
||||
) {
|
||||
token_nodes.color_shape(FlatShape::OpenDelimiter(Delimiter::Square).spanned(open));
|
||||
let _list = color_syntax(&ExpressionListShape, token_nodes, context);
|
||||
token_nodes.color_shape(FlatShape::CloseDelimiter(Delimiter::Square).spanned(close));
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct DelimitedShape;
|
||||
|
||||
impl ColorSyntax for DelimitedShape {
|
||||
type Info = ();
|
||||
type Input = (Delimiter, Span, Span);
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"DelimitedShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
(delimiter, open, close): &(Delimiter, Span, Span),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Self::Info {
|
||||
token_nodes.color_shape(FlatShape::OpenDelimiter(*delimiter).spanned(*open));
|
||||
color_syntax(&ExpressionListShape, token_nodes, context);
|
||||
token_nodes.color_shape(FlatShape::CloseDelimiter(*delimiter).spanned(*close));
|
||||
}
|
||||
}
|
@ -0,0 +1,88 @@
|
||||
use crate::hir::syntax_shape::expression::atom::{
|
||||
expand_atom, ExpansionRule, UnspannedAtomicToken,
|
||||
};
|
||||
use crate::hir::syntax_shape::{
|
||||
expression::expand_file_path, ExpandContext, ExpandExpression, FallibleColorSyntax, FlatShape,
|
||||
};
|
||||
use crate::{hir, hir::TokensIterator};
|
||||
use nu_errors::{ParseError, ShellError};
|
||||
use nu_source::SpannedItem;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct FilePathShape;
|
||||
|
||||
impl FallibleColorSyntax for FilePathShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"FilePathShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
let atom = expand_atom(
|
||||
token_nodes,
|
||||
"file path",
|
||||
context,
|
||||
ExpansionRule::permissive(),
|
||||
);
|
||||
|
||||
let atom = match atom {
|
||||
Err(_) => return Ok(()),
|
||||
Ok(atom) => atom,
|
||||
};
|
||||
|
||||
match atom.unspanned {
|
||||
UnspannedAtomicToken::Word { .. }
|
||||
| UnspannedAtomicToken::String { .. }
|
||||
| UnspannedAtomicToken::Number { .. }
|
||||
| UnspannedAtomicToken::Size { .. } => {
|
||||
token_nodes.color_shape(FlatShape::Path.spanned(atom.span));
|
||||
}
|
||||
|
||||
_ => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandExpression for FilePathShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"file path"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
let atom = expand_atom(
|
||||
token_nodes,
|
||||
"file path",
|
||||
context,
|
||||
ExpansionRule::new().allow_external_word(),
|
||||
)?;
|
||||
|
||||
match atom.unspanned {
|
||||
UnspannedAtomicToken::Word { text: body }
|
||||
| UnspannedAtomicToken::ExternalWord { text: body }
|
||||
| UnspannedAtomicToken::String { body } => {
|
||||
let path = expand_file_path(body.slice(context.source), context);
|
||||
Ok(hir::Expression::file_path(path, atom.span))
|
||||
}
|
||||
|
||||
UnspannedAtomicToken::Number { .. } | UnspannedAtomicToken::Size { .. } => {
|
||||
let path = atom.span.slice(context.source);
|
||||
Ok(hir::Expression::file_path(path, atom.span))
|
||||
}
|
||||
|
||||
_ => atom.to_hir(context, "file path"),
|
||||
}
|
||||
}
|
||||
}
|
209
crates/nu-parser/src/hir/syntax_shape/expression/list.rs
Normal file
209
crates/nu-parser/src/hir/syntax_shape/expression/list.rs
Normal file
@ -0,0 +1,209 @@
|
||||
use crate::{
|
||||
hir,
|
||||
hir::syntax_shape::{
|
||||
color_fallible_syntax, color_syntax, expand_atom, expand_expr, maybe_spaced, spaced,
|
||||
AnyExpressionShape, ColorSyntax, ExpandContext, ExpandSyntax, ExpansionRule,
|
||||
MaybeSpaceShape, SpaceShape,
|
||||
},
|
||||
hir::TokensIterator,
|
||||
};
|
||||
use nu_errors::ParseError;
|
||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ExpressionListSyntax {
|
||||
pub exprs: Spanned<Vec<hir::Expression>>,
|
||||
}
|
||||
|
||||
impl HasSpan for ExpressionListSyntax {
|
||||
fn span(&self) -> Span {
|
||||
self.exprs.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for ExpressionListSyntax {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::intersperse(
|
||||
self.exprs.iter().map(|e| e.pretty_debug(source)),
|
||||
b::space(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ExpressionListShape;
|
||||
|
||||
impl ExpandSyntax for ExpressionListShape {
|
||||
type Output = ExpressionListSyntax;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"expression list"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<ExpressionListSyntax, ParseError> {
|
||||
let mut exprs = vec![];
|
||||
|
||||
let start = token_nodes.span_at_cursor();
|
||||
|
||||
if token_nodes.at_end_possible_ws() {
|
||||
return Ok(ExpressionListSyntax {
|
||||
exprs: exprs.spanned(start),
|
||||
});
|
||||
}
|
||||
|
||||
let expr = expand_expr(&maybe_spaced(AnyExpressionShape), token_nodes, context)?;
|
||||
|
||||
exprs.push(expr);
|
||||
|
||||
loop {
|
||||
if token_nodes.at_end_possible_ws() {
|
||||
let end = token_nodes.span_at_cursor();
|
||||
return Ok(ExpressionListSyntax {
|
||||
exprs: exprs.spanned(start.until(end)),
|
||||
});
|
||||
}
|
||||
|
||||
let expr = expand_expr(&spaced(AnyExpressionShape), token_nodes, context)?;
|
||||
|
||||
exprs.push(expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ColorSyntax for ExpressionListShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"ExpressionListShape"
|
||||
}
|
||||
|
||||
/// The intent of this method is to fully color an expression list shape infallibly.
|
||||
/// This means that if we can't expand a token into an expression, we fall back to
|
||||
/// a simpler coloring strategy.
|
||||
///
|
||||
/// This would apply to something like `where x >`, which includes an incomplete
|
||||
/// binary operator. Since we will fail to process it as a binary operator, we'll
|
||||
/// fall back to a simpler coloring and move on.
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) {
|
||||
// We encountered a parsing error and will continue with simpler coloring ("backoff
|
||||
// coloring mode")
|
||||
let mut backoff = false;
|
||||
|
||||
// Consume any leading whitespace
|
||||
color_syntax(&MaybeSpaceShape, token_nodes, context);
|
||||
|
||||
loop {
|
||||
// If we reached the very end of the token stream, we're done
|
||||
if token_nodes.at_end() {
|
||||
return;
|
||||
}
|
||||
|
||||
if backoff {
|
||||
let len = token_nodes.state().shapes().len();
|
||||
|
||||
// If we previously encountered a parsing error, use backoff coloring mode
|
||||
color_syntax(&SimplestExpression, token_nodes, context);
|
||||
|
||||
if len == token_nodes.state().shapes().len() && !token_nodes.at_end() {
|
||||
// This should never happen, but if it does, a panic is better than an infinite loop
|
||||
panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression")
|
||||
}
|
||||
} else {
|
||||
// Try to color the head of the stream as an expression
|
||||
if color_fallible_syntax(&AnyExpressionShape, token_nodes, context).is_err() {
|
||||
// If no expression was found, switch to backoff coloring mode
|
||||
|
||||
backoff = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
// If an expression was found, consume a space
|
||||
if color_fallible_syntax(&SpaceShape, token_nodes, context).is_err() {
|
||||
// If no space was found, we're either at the end or there's an error.
|
||||
// Either way, switch to backoff coloring mode. If we're at the end
|
||||
// it won't have any consequences.
|
||||
backoff = true;
|
||||
}
|
||||
// Otherwise, move on to the next expression
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// BackoffColoringMode consumes all of the remaining tokens in an infallible way
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct BackoffColoringMode;
|
||||
|
||||
impl ColorSyntax for BackoffColoringMode {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"BackoffColoringMode"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &Self::Input,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Self::Info {
|
||||
loop {
|
||||
if token_nodes.at_end() {
|
||||
break;
|
||||
}
|
||||
|
||||
let len = token_nodes.state().shapes().len();
|
||||
color_syntax(&SimplestExpression, token_nodes, context);
|
||||
|
||||
if len == token_nodes.state().shapes().len() && !token_nodes.at_end() {
|
||||
// This shouldn't happen, but if it does, a panic is better than an infinite loop
|
||||
panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, token_nodes.state().shapes());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The point of `SimplestExpression` is to serve as an infallible base case for coloring.
|
||||
/// As a last ditch effort, if we can't find any way to parse the head of the stream as an
|
||||
/// expression, fall back to simple coloring.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct SimplestExpression;
|
||||
|
||||
impl ColorSyntax for SimplestExpression {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"SimplestExpression"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) {
|
||||
let atom = expand_atom(
|
||||
token_nodes,
|
||||
"any token",
|
||||
context,
|
||||
ExpansionRule::permissive(),
|
||||
);
|
||||
|
||||
match atom {
|
||||
Err(_) => {}
|
||||
Ok(atom) => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
|
||||
}
|
||||
}
|
||||
}
|
169
crates/nu-parser/src/hir/syntax_shape/expression/number.rs
Normal file
169
crates/nu-parser/src/hir/syntax_shape/expression/number.rs
Normal file
@ -0,0 +1,169 @@
|
||||
use crate::hir::syntax_shape::{
|
||||
expand_atom, parse_single_node, ExpandContext, ExpandExpression, ExpansionRule,
|
||||
FallibleColorSyntax, FlatShape, TestSyntax,
|
||||
};
|
||||
use crate::hir::tokens_iterator::Peeked;
|
||||
use crate::parse::tokens::UnspannedToken;
|
||||
use crate::{
|
||||
hir,
|
||||
hir::{RawNumber, TokensIterator},
|
||||
};
|
||||
use nu_errors::{ParseError, ShellError};
|
||||
use nu_source::{Spanned, SpannedItem};
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct NumberShape;
|
||||
|
||||
impl ExpandExpression for NumberShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"number"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
parse_single_node(token_nodes, "Number", |token, token_span, err| {
|
||||
Ok(match token {
|
||||
UnspannedToken::GlobPattern
|
||||
| UnspannedToken::CompareOperator(..)
|
||||
| UnspannedToken::EvaluationOperator(..) => return Err(err.error()),
|
||||
UnspannedToken::Variable(tag) if tag.slice(context.source) == "it" => {
|
||||
hir::Expression::it_variable(tag, token_span)
|
||||
}
|
||||
UnspannedToken::ExternalCommand(tag) => {
|
||||
hir::Expression::external_command(tag, token_span)
|
||||
}
|
||||
UnspannedToken::ExternalWord => {
|
||||
return Err(ParseError::mismatch(
|
||||
"number",
|
||||
"syntax error".spanned(token_span),
|
||||
))
|
||||
}
|
||||
UnspannedToken::Variable(tag) => hir::Expression::variable(tag, token_span),
|
||||
UnspannedToken::Number(number) => {
|
||||
hir::Expression::number(number.to_number(context.source), token_span)
|
||||
}
|
||||
UnspannedToken::Bare => hir::Expression::bare(token_span),
|
||||
UnspannedToken::String(tag) => hir::Expression::string(tag, token_span),
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl FallibleColorSyntax for NumberShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"NumberShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
let atom = token_nodes.spanned(|token_nodes| {
|
||||
expand_atom(token_nodes, "number", context, ExpansionRule::permissive())
|
||||
});
|
||||
|
||||
let atom = match atom {
|
||||
Spanned { item: Err(_), span } => {
|
||||
token_nodes.color_shape(FlatShape::Error.spanned(span));
|
||||
return Ok(());
|
||||
}
|
||||
Spanned { item: Ok(atom), .. } => atom,
|
||||
};
|
||||
|
||||
token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct IntShape;
|
||||
|
||||
impl ExpandExpression for IntShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"integer"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
parse_single_node(token_nodes, "Integer", |token, token_span, err| {
|
||||
Ok(match token {
|
||||
UnspannedToken::GlobPattern
|
||||
| UnspannedToken::CompareOperator(..)
|
||||
| UnspannedToken::EvaluationOperator(..)
|
||||
| UnspannedToken::ExternalWord => return Err(err.error()),
|
||||
UnspannedToken::Variable(span) if span.slice(context.source) == "it" => {
|
||||
hir::Expression::it_variable(span, token_span)
|
||||
}
|
||||
UnspannedToken::ExternalCommand(span) => {
|
||||
hir::Expression::external_command(span, token_span)
|
||||
}
|
||||
UnspannedToken::Variable(span) => hir::Expression::variable(span, token_span),
|
||||
UnspannedToken::Number(number @ RawNumber::Int(_)) => {
|
||||
hir::Expression::number(number.to_number(context.source), token_span)
|
||||
}
|
||||
UnspannedToken::Number(_) => return Err(err.error()),
|
||||
UnspannedToken::Bare => hir::Expression::bare(token_span),
|
||||
UnspannedToken::String(span) => hir::Expression::string(span, token_span),
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl FallibleColorSyntax for IntShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"IntShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
let atom = token_nodes.spanned(|token_nodes| {
|
||||
expand_atom(token_nodes, "integer", context, ExpansionRule::permissive())
|
||||
});
|
||||
|
||||
let atom = match atom {
|
||||
Spanned { item: Err(_), span } => {
|
||||
token_nodes.color_shape(FlatShape::Error.spanned(span));
|
||||
return Ok(());
|
||||
}
|
||||
Spanned { item: Ok(atom), .. } => atom,
|
||||
};
|
||||
|
||||
token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl TestSyntax for NumberShape {
|
||||
fn test<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
_context: &ExpandContext,
|
||||
) -> Option<Peeked<'a, 'b>> {
|
||||
let peeked = token_nodes.peek_any();
|
||||
|
||||
match peeked.node {
|
||||
Some(token) if token.is_number() => Some(peeked),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
110
crates/nu-parser/src/hir/syntax_shape/expression/pattern.rs
Normal file
110
crates/nu-parser/src/hir/syntax_shape/expression/pattern.rs
Normal file
@ -0,0 +1,110 @@
|
||||
use crate::hir::syntax_shape::{
|
||||
expand_atom, expand_bare, expression::expand_file_path, ExpandContext, ExpandExpression,
|
||||
ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, UnspannedAtomicToken,
|
||||
};
|
||||
use crate::parse::operator::EvaluationOperator;
|
||||
use crate::parse::tokens::{Token, UnspannedToken};
|
||||
use crate::{hir, hir::TokensIterator, TokenNode};
|
||||
use nu_errors::{ParseError, ShellError};
|
||||
|
||||
use nu_protocol::ShellTypeName;
|
||||
use nu_source::{Span, SpannedItem};
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct PatternShape;
|
||||
|
||||
impl FallibleColorSyntax for PatternShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"PatternShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
token_nodes.atomic(|token_nodes| {
|
||||
let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?;
|
||||
|
||||
match &atom.unspanned {
|
||||
UnspannedAtomicToken::GlobPattern { .. } | UnspannedAtomicToken::Word { .. } => {
|
||||
token_nodes.color_shape(FlatShape::GlobPattern.spanned(atom.span));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
other => Err(ShellError::type_error(
|
||||
"pattern",
|
||||
other.type_name().spanned(atom.span),
|
||||
)),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandExpression for PatternShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"glob pattern"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
let atom = expand_atom(
|
||||
token_nodes,
|
||||
"pattern",
|
||||
context,
|
||||
ExpansionRule::new().allow_external_word(),
|
||||
)?;
|
||||
|
||||
match atom.unspanned {
|
||||
UnspannedAtomicToken::Word { text: body }
|
||||
| UnspannedAtomicToken::String { body }
|
||||
| UnspannedAtomicToken::ExternalWord { text: body }
|
||||
| UnspannedAtomicToken::GlobPattern { pattern: body } => {
|
||||
let path = expand_file_path(body.slice(context.source), context);
|
||||
Ok(hir::Expression::pattern(path.to_string_lossy(), atom.span))
|
||||
}
|
||||
_ => atom.to_hir(context, "pattern"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct BarePatternShape;
|
||||
|
||||
impl ExpandSyntax for BarePatternShape {
|
||||
type Output = Span;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"bare pattern"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Span, ParseError> {
|
||||
expand_bare(token_nodes, context, |token| match token {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::Bare,
|
||||
..
|
||||
})
|
||||
| TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot),
|
||||
..
|
||||
})
|
||||
| TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::GlobPattern,
|
||||
..
|
||||
}) => true,
|
||||
|
||||
_ => false,
|
||||
})
|
||||
}
|
||||
}
|
103
crates/nu-parser/src/hir/syntax_shape/expression/range.rs
Normal file
103
crates/nu-parser/src/hir/syntax_shape/expression/range.rs
Normal file
@ -0,0 +1,103 @@
|
||||
use crate::hir::syntax_shape::expression::UnspannedAtomicToken;
|
||||
use crate::hir::syntax_shape::{
|
||||
color_fallible_syntax, expand_atom, expand_expr, AnyExpressionShape, ExpandContext,
|
||||
ExpandExpression, ExpansionRule, FallibleColorSyntax, FlatShape,
|
||||
};
|
||||
use crate::parse::operator::EvaluationOperator;
|
||||
use crate::parse::token_tree::TokenNode;
|
||||
use crate::parse::tokens::{Token, UnspannedToken};
|
||||
use crate::{hir, hir::TokensIterator};
|
||||
use nu_errors::{ParseError, ShellError};
|
||||
use nu_protocol::SpannedTypeName;
|
||||
use nu_source::SpannedItem;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct RangeShape;
|
||||
|
||||
impl ExpandExpression for RangeShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"range"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
token_nodes.atomic_parse(|token_nodes| {
|
||||
let left = expand_expr(&AnyExpressionShape, token_nodes, context)?;
|
||||
|
||||
let atom = expand_atom(
|
||||
token_nodes,
|
||||
"..",
|
||||
context,
|
||||
ExpansionRule::new().allow_eval_operator(),
|
||||
)?;
|
||||
|
||||
let span = match atom.unspanned {
|
||||
UnspannedAtomicToken::DotDot { text } => text,
|
||||
_ => return Err(ParseError::mismatch("..", atom.spanned_type_name())),
|
||||
};
|
||||
|
||||
let right = expand_expr(&AnyExpressionShape, token_nodes, context)?;
|
||||
|
||||
Ok(hir::Expression::range(left, span, right))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl FallibleColorSyntax for RangeShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"RangeShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
token_nodes.atomic_parse(|token_nodes| {
|
||||
color_fallible_syntax(&AnyExpressionShape, token_nodes, context)?;
|
||||
color_fallible_syntax(&DotDotShape, token_nodes, context)?;
|
||||
color_fallible_syntax(&AnyExpressionShape, token_nodes, context)
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct DotDotShape;
|
||||
|
||||
impl FallibleColorSyntax for DotDotShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
".."
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &Self::Input,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
_context: &ExpandContext,
|
||||
) -> Result<Self::Info, ShellError> {
|
||||
let peeked = token_nodes.peek_any().not_eof("..")?;
|
||||
match &peeked.node {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot),
|
||||
span,
|
||||
}) => {
|
||||
peeked.commit();
|
||||
token_nodes.color_shape(FlatShape::DotDot.spanned(span));
|
||||
Ok(())
|
||||
}
|
||||
token => Err(ShellError::type_error("..", token.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
}
|
90
crates/nu-parser/src/hir/syntax_shape/expression/string.rs
Normal file
90
crates/nu-parser/src/hir/syntax_shape/expression/string.rs
Normal file
@ -0,0 +1,90 @@
|
||||
use crate::hir::syntax_shape::{
|
||||
expand_atom, expand_variable, parse_single_node, AtomicToken, ExpandContext, ExpandExpression,
|
||||
ExpansionRule, FallibleColorSyntax, FlatShape, TestSyntax, UnspannedAtomicToken,
|
||||
};
|
||||
use crate::hir::tokens_iterator::Peeked;
|
||||
use crate::parse::tokens::UnspannedToken;
|
||||
use crate::{hir, hir::TokensIterator};
|
||||
use nu_errors::{ParseError, ShellError};
|
||||
use nu_source::SpannedItem;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct StringShape;
|
||||
|
||||
impl FallibleColorSyntax for StringShape {
|
||||
type Info = ();
|
||||
type Input = FlatShape;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"StringShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
input: &FlatShape,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive());
|
||||
|
||||
let atom = match atom {
|
||||
Err(_) => return Ok(()),
|
||||
Ok(atom) => atom,
|
||||
};
|
||||
|
||||
match atom {
|
||||
AtomicToken {
|
||||
unspanned: UnspannedAtomicToken::String { .. },
|
||||
span,
|
||||
} => token_nodes.color_shape((*input).spanned(span)),
|
||||
atom => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandExpression for StringShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"string"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
parse_single_node(token_nodes, "String", |token, token_span, err| {
|
||||
Ok(match token {
|
||||
UnspannedToken::GlobPattern
|
||||
| UnspannedToken::CompareOperator(..)
|
||||
| UnspannedToken::EvaluationOperator(..)
|
||||
| UnspannedToken::ExternalWord => return Err(err.error()),
|
||||
UnspannedToken::Variable(span) => {
|
||||
expand_variable(span, token_span, &context.source)
|
||||
}
|
||||
UnspannedToken::ExternalCommand(span) => {
|
||||
hir::Expression::external_command(span, token_span)
|
||||
}
|
||||
UnspannedToken::Number(_) => hir::Expression::bare(token_span),
|
||||
UnspannedToken::Bare => hir::Expression::bare(token_span),
|
||||
UnspannedToken::String(span) => hir::Expression::string(span, token_span),
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl TestSyntax for StringShape {
|
||||
fn test<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
_context: &ExpandContext,
|
||||
) -> Option<Peeked<'a, 'b>> {
|
||||
let peeked = token_nodes.peek_any();
|
||||
|
||||
match peeked.node {
|
||||
Some(token) if token.is_string() => Some(peeked),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
124
crates/nu-parser/src/hir/syntax_shape/expression/unit.rs
Normal file
124
crates/nu-parser/src/hir/syntax_shape/expression/unit.rs
Normal file
@ -0,0 +1,124 @@
|
||||
use crate::hir::syntax_shape::{ExpandContext, ExpandSyntax};
|
||||
use crate::parse::tokens::RawNumber;
|
||||
use crate::parse::tokens::Token;
|
||||
use crate::parse::tokens::UnspannedToken;
|
||||
use crate::parse::unit::Unit;
|
||||
use crate::{hir::TokensIterator, TokenNode};
|
||||
use nom::branch::alt;
|
||||
use nom::bytes::complete::tag;
|
||||
use nom::character::complete::digit1;
|
||||
use nom::combinator::{all_consuming, opt, value};
|
||||
use nom::IResult;
|
||||
use nu_errors::ParseError;
|
||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct UnitSyntax {
|
||||
pub unit: (RawNumber, Spanned<Unit>),
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for UnitSyntax {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::typed(
|
||||
"unit",
|
||||
self.unit.0.pretty_debug(source) + b::space() + self.unit.1.pretty_debug(source),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for UnitSyntax {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct UnitShape;
|
||||
|
||||
impl ExpandSyntax for UnitShape {
|
||||
type Output = UnitSyntax;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"unit"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<UnitSyntax, ParseError> {
|
||||
let peeked = token_nodes.peek_any().not_eof("unit")?;
|
||||
|
||||
let span = match peeked.node {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::Bare,
|
||||
span,
|
||||
}) => *span,
|
||||
_ => return Err(peeked.type_error("unit")),
|
||||
};
|
||||
|
||||
let unit = unit_size(span.slice(context.source), span);
|
||||
|
||||
let (_, (number, unit)) = match unit {
|
||||
Err(_) => return Err(ParseError::mismatch("unit", "word".spanned(span))),
|
||||
Ok((number, unit)) => (number, unit),
|
||||
};
|
||||
|
||||
peeked.commit();
|
||||
Ok(UnitSyntax {
|
||||
unit: (number, unit),
|
||||
span,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn unit_size(input: &str, bare_span: Span) -> IResult<&str, (RawNumber, Spanned<Unit>)> {
|
||||
let (input, digits) = digit1(input)?;
|
||||
|
||||
let (input, dot) = opt(tag("."))(input)?;
|
||||
|
||||
let (input, number) = match dot {
|
||||
Some(dot) => {
|
||||
let (input, rest) = digit1(input)?;
|
||||
(
|
||||
input,
|
||||
RawNumber::decimal(Span::new(
|
||||
bare_span.start(),
|
||||
bare_span.start() + digits.len() + dot.len() + rest.len(),
|
||||
)),
|
||||
)
|
||||
}
|
||||
|
||||
None => (
|
||||
input,
|
||||
RawNumber::int(Span::new(
|
||||
bare_span.start(),
|
||||
bare_span.start() + digits.len(),
|
||||
)),
|
||||
),
|
||||
};
|
||||
|
||||
let (input, unit) = all_consuming(alt((
|
||||
value(Unit::Byte, alt((tag("B"), tag("b")))),
|
||||
value(Unit::Kilobyte, alt((tag("KB"), tag("kb"), tag("Kb")))),
|
||||
value(Unit::Megabyte, alt((tag("MB"), tag("mb"), tag("Mb")))),
|
||||
value(Unit::Gigabyte, alt((tag("GB"), tag("gb"), tag("Gb")))),
|
||||
value(Unit::Terabyte, alt((tag("TB"), tag("tb"), tag("Tb")))),
|
||||
value(Unit::Petabyte, alt((tag("PB"), tag("pb"), tag("Pb")))),
|
||||
value(Unit::Second, tag("s")),
|
||||
value(Unit::Minute, tag("m")),
|
||||
value(Unit::Hour, tag("h")),
|
||||
value(Unit::Day, tag("d")),
|
||||
value(Unit::Week, tag("w")),
|
||||
value(Unit::Month, tag("M")),
|
||||
value(Unit::Year, tag("y")),
|
||||
)))(input)?;
|
||||
|
||||
let start_span = number.span().end();
|
||||
|
||||
Ok((
|
||||
input,
|
||||
(number, unit.spanned(Span::new(start_span, bare_span.end()))),
|
||||
))
|
||||
}
|
@ -0,0 +1,980 @@
|
||||
use crate::hir::syntax_shape::{
|
||||
color_fallible_syntax, color_fallible_syntax_with, expand_atom, expand_expr, expand_syntax,
|
||||
parse_single_node, AnyExpressionShape, BareShape, ExpandContext, ExpandExpression,
|
||||
ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, ParseError, Peeked, SkipSyntax,
|
||||
StringShape, TestSyntax, UnspannedAtomicToken, WhitespaceShape,
|
||||
};
|
||||
use crate::parse::tokens::{RawNumber, UnspannedToken};
|
||||
use crate::{hir, hir::Expression, hir::TokensIterator, CompareOperator, EvaluationOperator};
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{PathMember, ShellTypeName};
|
||||
use nu_source::{
|
||||
b, DebugDocBuilder, HasSpan, PrettyDebug, PrettyDebugWithSource, Span, Spanned, SpannedItem,
|
||||
Tag, Tagged, TaggedItem, Text,
|
||||
};
|
||||
use num_bigint::BigInt;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct VariablePathShape;
|
||||
|
||||
impl ExpandExpression for VariablePathShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"variable path"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
// 1. let the head be the first token, expecting a variable
|
||||
// 2. let the tail be an empty list of members
|
||||
// 2. while the next token (excluding ws) is a dot:
|
||||
// 1. consume the dot
|
||||
// 2. consume the next token as a member and push it onto tail
|
||||
|
||||
let head = expand_expr(&VariableShape, token_nodes, context)?;
|
||||
let start = head.span;
|
||||
let mut end = start;
|
||||
let mut tail: Vec<PathMember> = vec![];
|
||||
|
||||
loop {
|
||||
if DotShape.skip(token_nodes, context).is_err() {
|
||||
break;
|
||||
}
|
||||
|
||||
let member = expand_syntax(&MemberShape, token_nodes, context)?;
|
||||
let member = member.to_path_member(context.source);
|
||||
|
||||
end = member.span;
|
||||
tail.push(member);
|
||||
}
|
||||
|
||||
Ok(hir::Expression::path(head, tail, start.until(end)))
|
||||
}
|
||||
}
|
||||
|
||||
impl FallibleColorSyntax for VariablePathShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"VariablePathShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
token_nodes.atomic(|token_nodes| {
|
||||
// If the head of the token stream is not a variable, fail
|
||||
color_fallible_syntax(&VariableShape, token_nodes, context)?;
|
||||
|
||||
loop {
|
||||
// look for a dot at the head of a stream
|
||||
if color_fallible_syntax_with(
|
||||
&ColorableDotShape,
|
||||
&FlatShape::Dot,
|
||||
token_nodes,
|
||||
context,
|
||||
)
|
||||
.is_err()
|
||||
{
|
||||
// if there's no dot, we're done
|
||||
break;
|
||||
}
|
||||
|
||||
// otherwise, look for a member, and if you don't find one, fail
|
||||
color_fallible_syntax(&MemberShape, token_nodes, context)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct PathTailShape;
|
||||
|
||||
/// The failure mode of `PathTailShape` is a dot followed by a non-member
|
||||
impl FallibleColorSyntax for PathTailShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"PathTailShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
token_nodes.atomic(|token_nodes| loop {
|
||||
let result = color_fallible_syntax_with(
|
||||
&ColorableDotShape,
|
||||
&FlatShape::Dot,
|
||||
token_nodes,
|
||||
context,
|
||||
);
|
||||
|
||||
if result.is_err() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// If we've seen a dot but not a member, fail
|
||||
color_fallible_syntax(&MemberShape, token_nodes, context)?;
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PathTailSyntax {
|
||||
pub tail: Vec<PathMember>,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl HasSpan for PathTailSyntax {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for PathTailSyntax {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
b::typed("tail", b::intersperse(self.tail.iter(), b::space()))
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandSyntax for PathTailShape {
|
||||
type Output = PathTailSyntax;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"path continuation"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
let mut end: Option<Span> = None;
|
||||
let mut tail: Vec<PathMember> = vec![];
|
||||
|
||||
loop {
|
||||
if DotShape.skip(token_nodes, context).is_err() {
|
||||
break;
|
||||
}
|
||||
|
||||
let member = expand_syntax(&MemberShape, token_nodes, context)?;
|
||||
let member = member.to_path_member(context.source);
|
||||
end = Some(member.span);
|
||||
tail.push(member);
|
||||
}
|
||||
|
||||
match end {
|
||||
None => Err(ParseError::mismatch(
|
||||
"path tail",
|
||||
token_nodes.typed_span_at_cursor(),
|
||||
)),
|
||||
|
||||
Some(end) => Ok(PathTailSyntax { tail, span: end }),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ExpressionContinuation {
|
||||
DotSuffix(Span, PathMember),
|
||||
InfixSuffix(Spanned<CompareOperator>, Expression),
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for ExpressionContinuation {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self {
|
||||
ExpressionContinuation::DotSuffix(_, suffix) => {
|
||||
b::operator(".") + suffix.pretty_debug(source)
|
||||
}
|
||||
ExpressionContinuation::InfixSuffix(op, expr) => {
|
||||
op.pretty_debug(source) + b::space() + expr.pretty_debug(source)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for ExpressionContinuation {
|
||||
fn span(&self) -> Span {
|
||||
match self {
|
||||
ExpressionContinuation::DotSuffix(dot, column) => dot.until(column.span),
|
||||
ExpressionContinuation::InfixSuffix(operator, expression) => {
|
||||
operator.span.until(expression.span)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An expression continuation
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ExpressionContinuationShape;
|
||||
|
||||
impl ExpandSyntax for ExpressionContinuationShape {
|
||||
type Output = ExpressionContinuation;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"expression continuation"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<ExpressionContinuation, ParseError> {
|
||||
// Try to expand a `.`
|
||||
let dot = expand_syntax(&DotShape, token_nodes, context);
|
||||
|
||||
match dot {
|
||||
// If a `.` was matched, it's a `Path`, and we expect a `Member` next
|
||||
Ok(dot) => {
|
||||
let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
|
||||
let member = syntax.to_path_member(context.source);
|
||||
|
||||
Ok(ExpressionContinuation::DotSuffix(dot, member))
|
||||
}
|
||||
|
||||
// Otherwise, we expect an infix operator and an expression next
|
||||
Err(_) => {
|
||||
let (_, op, _) = expand_syntax(&InfixShape, token_nodes, context)?.infix.item;
|
||||
let next = expand_expr(&AnyExpressionShape, token_nodes, context)?;
|
||||
|
||||
Ok(ExpressionContinuation::InfixSuffix(op.operator, next))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum ContinuationInfo {
|
||||
Dot,
|
||||
Infix,
|
||||
}
|
||||
|
||||
impl FallibleColorSyntax for ExpressionContinuationShape {
|
||||
type Info = ContinuationInfo;
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"ExpressionContinuationShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<ContinuationInfo, ShellError> {
|
||||
token_nodes.atomic(|token_nodes| {
|
||||
// Try to expand a `.`
|
||||
let dot = color_fallible_syntax_with(
|
||||
&ColorableDotShape,
|
||||
&FlatShape::Dot,
|
||||
token_nodes,
|
||||
context,
|
||||
);
|
||||
|
||||
match dot {
|
||||
Ok(_) => {
|
||||
// we found a dot, so let's keep looking for a member; if no member was found, fail
|
||||
color_fallible_syntax(&MemberShape, token_nodes, context)?;
|
||||
|
||||
Ok(ContinuationInfo::Dot)
|
||||
}
|
||||
Err(_) => {
|
||||
let result = token_nodes.atomic(|token_nodes| {
|
||||
// we didn't find a dot, so let's see if we're looking at an infix. If not found, fail
|
||||
color_fallible_syntax(&InfixShape, token_nodes, context)?;
|
||||
|
||||
// now that we've seen an infix shape, look for any expression. If not found, fail
|
||||
color_fallible_syntax(&AnyExpressionShape, token_nodes, context)?;
|
||||
|
||||
Ok(ContinuationInfo::Infix)
|
||||
})?;
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct VariableShape;
|
||||
|
||||
impl ExpandExpression for VariableShape {
|
||||
fn name(&self) -> &'static str {
|
||||
"variable"
|
||||
}
|
||||
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ParseError> {
|
||||
parse_single_node(token_nodes, "variable", |token, token_tag, err| {
|
||||
Ok(match token {
|
||||
UnspannedToken::Variable(tag) => {
|
||||
if tag.slice(context.source) == "it" {
|
||||
hir::Expression::it_variable(tag, token_tag)
|
||||
} else {
|
||||
hir::Expression::variable(tag, token_tag)
|
||||
}
|
||||
}
|
||||
_ => return Err(err.error()),
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl FallibleColorSyntax for VariableShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"VariableShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
let atom = expand_atom(
|
||||
token_nodes,
|
||||
"variable",
|
||||
context,
|
||||
ExpansionRule::permissive(),
|
||||
);
|
||||
|
||||
let atom = match atom {
|
||||
Err(err) => return Err(err.into()),
|
||||
Ok(atom) => atom,
|
||||
};
|
||||
|
||||
match &atom.unspanned {
|
||||
UnspannedAtomicToken::Variable { .. } => {
|
||||
token_nodes.color_shape(FlatShape::Variable.spanned(atom.span));
|
||||
Ok(())
|
||||
}
|
||||
UnspannedAtomicToken::ItVariable { .. } => {
|
||||
token_nodes.color_shape(FlatShape::ItVariable.spanned(atom.span));
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(ParseError::mismatch("variable", atom.type_name().spanned(atom.span)).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub enum Member {
|
||||
String(/* outer */ Span, /* inner */ Span),
|
||||
Int(BigInt, Span),
|
||||
Bare(Span),
|
||||
}
|
||||
|
||||
impl ShellTypeName for Member {
|
||||
fn type_name(&self) -> &'static str {
|
||||
match self {
|
||||
Member::String(_, _) => "string",
|
||||
Member::Int(_, _) => "integer",
|
||||
Member::Bare(_) => "word",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Member {
|
||||
pub fn to_path_member(&self, source: &Text) -> PathMember {
|
||||
match self {
|
||||
Member::String(outer, inner) => PathMember::string(inner.slice(source), *outer),
|
||||
Member::Int(int, span) => PathMember::int(int.clone(), *span),
|
||||
Member::Bare(span) => PathMember::string(span.slice(source), *span),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Member {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self {
|
||||
Member::String(outer, _) => b::value(outer.slice(source)),
|
||||
Member::Int(int, _) => b::value(format!("{}", int)),
|
||||
Member::Bare(span) => b::value(span.slice(source)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for Member {
|
||||
fn span(&self) -> Span {
|
||||
match self {
|
||||
Member::String(outer, ..) => *outer,
|
||||
Member::Int(_, int) => *int,
|
||||
Member::Bare(name) => *name,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Member {
|
||||
pub fn to_expr(&self) -> hir::Expression {
|
||||
match self {
|
||||
Member::String(outer, inner) => hir::Expression::string(*inner, *outer),
|
||||
Member::Int(number, span) => hir::Expression::number(number.clone(), *span),
|
||||
Member::Bare(span) => hir::Expression::string(*span, *span),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn span(&self) -> Span {
|
||||
match self {
|
||||
Member::String(outer, _inner) => *outer,
|
||||
Member::Int(_, span) => *span,
|
||||
Member::Bare(span) => *span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum ColumnPathState {
|
||||
Initial,
|
||||
LeadingDot(Span),
|
||||
Dot(Span, Vec<Member>, Span),
|
||||
Member(Span, Vec<Member>),
|
||||
Error(ParseError),
|
||||
}
|
||||
|
||||
impl ColumnPathState {
|
||||
pub fn dot(self, dot: Span) -> ColumnPathState {
|
||||
match self {
|
||||
ColumnPathState::Initial => ColumnPathState::LeadingDot(dot),
|
||||
ColumnPathState::LeadingDot(_) => {
|
||||
ColumnPathState::Error(ParseError::mismatch("column", "dot".spanned(dot)))
|
||||
}
|
||||
ColumnPathState::Dot(..) => {
|
||||
ColumnPathState::Error(ParseError::mismatch("column", "dot".spanned(dot)))
|
||||
}
|
||||
ColumnPathState::Member(tag, members) => ColumnPathState::Dot(tag, members, dot),
|
||||
ColumnPathState::Error(err) => ColumnPathState::Error(err),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn member(self, member: Member) -> ColumnPathState {
|
||||
match self {
|
||||
ColumnPathState::Initial => ColumnPathState::Member(member.span(), vec![member]),
|
||||
ColumnPathState::LeadingDot(tag) => {
|
||||
ColumnPathState::Member(tag.until(member.span()), vec![member])
|
||||
}
|
||||
|
||||
ColumnPathState::Dot(tag, mut tags, _) => {
|
||||
ColumnPathState::Member(tag.until(member.span()), {
|
||||
tags.push(member);
|
||||
tags
|
||||
})
|
||||
}
|
||||
ColumnPathState::Member(..) => ColumnPathState::Error(ParseError::mismatch(
|
||||
"column",
|
||||
member.type_name().spanned(member.span()),
|
||||
)),
|
||||
ColumnPathState::Error(err) => ColumnPathState::Error(err),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_path(self, next: Peeked) -> Result<Tagged<Vec<Member>>, ParseError> {
|
||||
match self {
|
||||
ColumnPathState::Initial => Err(next.type_error("column path")),
|
||||
ColumnPathState::LeadingDot(dot) => {
|
||||
Err(ParseError::mismatch("column", "dot".spanned(dot)))
|
||||
}
|
||||
ColumnPathState::Dot(_tag, _members, dot) => {
|
||||
Err(ParseError::mismatch("column", "dot".spanned(dot)))
|
||||
}
|
||||
ColumnPathState::Member(tag, tags) => Ok(tags.tagged(tag)),
|
||||
ColumnPathState::Error(err) => Err(err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expand_column_path<'a, 'b>(
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<ColumnPathSyntax, ParseError> {
|
||||
let mut state = ColumnPathState::Initial;
|
||||
|
||||
loop {
|
||||
let member = expand_syntax(&MemberShape, token_nodes, context);
|
||||
|
||||
match member {
|
||||
Err(_) => break,
|
||||
Ok(member) => state = state.member(member),
|
||||
}
|
||||
|
||||
let dot = expand_syntax(&DotShape, token_nodes, context);
|
||||
|
||||
match dot {
|
||||
Err(_) => break,
|
||||
Ok(dot) => state = state.dot(dot),
|
||||
}
|
||||
}
|
||||
|
||||
let path = state.into_path(token_nodes.peek_non_ws())?;
|
||||
|
||||
Ok(ColumnPathSyntax {
|
||||
path: path.item,
|
||||
tag: path.tag,
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ColumnPathShape;
|
||||
|
||||
impl FallibleColorSyntax for ColumnPathShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"ColumnPathShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
// If there's not even one member shape, fail
|
||||
color_fallible_syntax(&MemberShape, token_nodes, context)?;
|
||||
|
||||
loop {
|
||||
let checkpoint = token_nodes.checkpoint();
|
||||
|
||||
match color_fallible_syntax_with(
|
||||
&ColorableDotShape,
|
||||
&FlatShape::Dot,
|
||||
checkpoint.iterator,
|
||||
context,
|
||||
) {
|
||||
Err(_) => {
|
||||
// we already saw at least one member shape, so return successfully
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
Ok(_) => {
|
||||
match color_fallible_syntax(&MemberShape, checkpoint.iterator, context) {
|
||||
Err(_) => {
|
||||
// we saw a dot but not a member (but we saw at least one member),
|
||||
// so don't commit the dot but return successfully
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
Ok(_) => {
|
||||
// we saw a dot and a member, so commit it and continue on
|
||||
checkpoint.commit();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ColumnPathSyntax {
|
||||
pub path: Vec<Member>,
|
||||
pub tag: Tag,
|
||||
}
|
||||
|
||||
impl HasSpan for ColumnPathSyntax {
|
||||
fn span(&self) -> Span {
|
||||
self.tag.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for ColumnPathSyntax {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::typed(
|
||||
"column path",
|
||||
b::intersperse(
|
||||
self.path.iter().map(|member| member.pretty_debug(source)),
|
||||
b::space(),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandSyntax for ColumnPathShape {
|
||||
type Output = ColumnPathSyntax;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"column path"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
expand_column_path(token_nodes, context)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct MemberShape;
|
||||
|
||||
impl FallibleColorSyntax for MemberShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"MemberShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
let bare =
|
||||
color_fallible_syntax_with(&BareShape, &FlatShape::BareMember, token_nodes, context);
|
||||
|
||||
if bare.is_ok() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// If we don't have a bare word, we'll look for a string
|
||||
|
||||
// Look for a string token. If we don't find one, fail
|
||||
color_fallible_syntax_with(&StringShape, &FlatShape::StringMember, token_nodes, context)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct IntMemberShape;
|
||||
|
||||
impl ExpandSyntax for IntMemberShape {
|
||||
type Output = Member;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"integer member"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
token_nodes.atomic_parse(|token_nodes| {
|
||||
let next = expand_atom(
|
||||
token_nodes,
|
||||
"integer member",
|
||||
context,
|
||||
ExpansionRule::new().separate_members(),
|
||||
)?;
|
||||
|
||||
match next.unspanned {
|
||||
UnspannedAtomicToken::Number {
|
||||
number: RawNumber::Int(int),
|
||||
} => Ok(Member::Int(
|
||||
BigInt::from_str(int.slice(context.source)).map_err(|_| {
|
||||
ParseError::internal_error(
|
||||
"can't convert from string to big int".spanned(int),
|
||||
)
|
||||
})?,
|
||||
int,
|
||||
)),
|
||||
|
||||
UnspannedAtomicToken::Word { text } => {
|
||||
let int = BigInt::from_str(text.slice(context.source));
|
||||
|
||||
match int {
|
||||
Ok(int) => Ok(Member::Int(int, text)),
|
||||
Err(_) => Err(ParseError::mismatch("integer member", "word".spanned(text))),
|
||||
}
|
||||
}
|
||||
|
||||
other => Err(ParseError::mismatch(
|
||||
"integer member",
|
||||
other.type_name().spanned(next.span),
|
||||
)),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandSyntax for MemberShape {
|
||||
type Output = Member;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"column"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Member, ParseError> {
|
||||
if let Ok(int) = expand_syntax(&IntMemberShape, token_nodes, context) {
|
||||
return Ok(int);
|
||||
}
|
||||
|
||||
let bare = BareShape.test(token_nodes, context);
|
||||
if let Some(peeked) = bare {
|
||||
let node = peeked.not_eof("column")?.commit();
|
||||
return Ok(Member::Bare(node.span()));
|
||||
}
|
||||
|
||||
/* KATZ */
|
||||
/* let number = NumberShape.test(token_nodes, context);
|
||||
|
||||
if let Some(peeked) = number {
|
||||
let node = peeked.not_eof("column")?.commit();
|
||||
let (n, span) = node.as_number().ok_or_else(|| {
|
||||
ParseError::internal_error("can't convert node to number".spanned(node.span()))
|
||||
})?;
|
||||
|
||||
return Ok(Member::Number(n, span))
|
||||
}*/
|
||||
|
||||
let string = StringShape.test(token_nodes, context);
|
||||
|
||||
if let Some(peeked) = string {
|
||||
let node = peeked.not_eof("column")?.commit();
|
||||
let (outer, inner) = node.as_string().ok_or_else(|| {
|
||||
ParseError::internal_error("can't convert node to string".spanned(node.span()))
|
||||
})?;
|
||||
|
||||
return Ok(Member::String(outer, inner));
|
||||
}
|
||||
|
||||
Err(token_nodes.peek_any().type_error("column"))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct DotShape;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ColorableDotShape;
|
||||
|
||||
impl FallibleColorSyntax for ColorableDotShape {
|
||||
type Info = ();
|
||||
type Input = FlatShape;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"ColorableDotShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
input: &FlatShape,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
_context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
let peeked = token_nodes.peek_any().not_eof("dot")?;
|
||||
|
||||
match peeked.node {
|
||||
node if node.is_dot() => {
|
||||
peeked.commit();
|
||||
token_nodes.color_shape((*input).spanned(node.span()));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
other => Err(ShellError::type_error(
|
||||
"dot",
|
||||
other.type_name().spanned(other.span()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SkipSyntax for DotShape {
|
||||
fn skip<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
expand_syntax(self, token_nodes, context)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandSyntax for DotShape {
|
||||
type Output = Span;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"dot"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
_context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
parse_single_node(token_nodes, "dot", |token, token_span, _| {
|
||||
Ok(match token {
|
||||
UnspannedToken::EvaluationOperator(EvaluationOperator::Dot) => token_span,
|
||||
_ => {
|
||||
return Err(ParseError::mismatch(
|
||||
"dot",
|
||||
token.type_name().spanned(token_span),
|
||||
))
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct InfixShape;
|
||||
|
||||
impl FallibleColorSyntax for InfixShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"InfixShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
let checkpoint = token_nodes.checkpoint();
|
||||
|
||||
// An infix operator must be prefixed by whitespace. If no whitespace was found, fail
|
||||
color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context)?;
|
||||
|
||||
// Parse the next TokenNode after the whitespace
|
||||
let operator_span = parse_single_node(
|
||||
checkpoint.iterator,
|
||||
"infix operator",
|
||||
|token, token_span, _| {
|
||||
match token {
|
||||
// If it's an operator (and not `.`), it's a match
|
||||
UnspannedToken::CompareOperator(_operator) => Ok(token_span),
|
||||
|
||||
// Otherwise, it's not a match
|
||||
_ => Err(ParseError::mismatch(
|
||||
"infix operator",
|
||||
token.type_name().spanned(token_span),
|
||||
)),
|
||||
}
|
||||
},
|
||||
)?;
|
||||
|
||||
checkpoint
|
||||
.iterator
|
||||
.color_shape(FlatShape::CompareOperator.spanned(operator_span));
|
||||
|
||||
// An infix operator must be followed by whitespace. If no whitespace was found, fail
|
||||
color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context)?;
|
||||
|
||||
checkpoint.commit();
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct InfixSyntax {
|
||||
infix: Spanned<(Span, InfixInnerSyntax, Span)>,
|
||||
}
|
||||
|
||||
impl HasSpan for InfixSyntax {
|
||||
fn span(&self) -> Span {
|
||||
self.infix.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for InfixSyntax {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
self.infix.1.pretty_debug(source)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandSyntax for InfixShape {
|
||||
type Output = InfixSyntax;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"infix operator"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
let mut checkpoint = token_nodes.checkpoint();
|
||||
|
||||
// An infix operator must be prefixed by whitespace
|
||||
let start = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?;
|
||||
|
||||
// Parse the next TokenNode after the whitespace
|
||||
let operator = expand_syntax(&InfixInnerShape, &mut checkpoint.iterator, context)?;
|
||||
|
||||
// An infix operator must be followed by whitespace
|
||||
let end = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?;
|
||||
|
||||
checkpoint.commit();
|
||||
|
||||
Ok(InfixSyntax {
|
||||
infix: (start, operator, end).spanned(start.until(end)),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct InfixInnerSyntax {
|
||||
pub operator: Spanned<CompareOperator>,
|
||||
}
|
||||
|
||||
impl HasSpan for InfixInnerSyntax {
|
||||
fn span(&self) -> Span {
|
||||
self.operator.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for InfixInnerSyntax {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
self.operator.pretty()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct InfixInnerShape;
|
||||
|
||||
impl ExpandSyntax for InfixInnerShape {
|
||||
type Output = InfixInnerSyntax;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"infix inner"
|
||||
}
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
_context: &ExpandContext,
|
||||
) -> Result<Self::Output, ParseError> {
|
||||
parse_single_node(token_nodes, "infix operator", |token, token_span, err| {
|
||||
Ok(match token {
|
||||
// If it's a comparison operator, it's a match
|
||||
UnspannedToken::CompareOperator(operator) => InfixInnerSyntax {
|
||||
operator: operator.spanned(token_span),
|
||||
},
|
||||
|
||||
// Otherwise, it's not a match
|
||||
_ => return Err(err.error()),
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
108
crates/nu-parser/src/hir/syntax_shape/flat_shape.rs
Normal file
108
crates/nu-parser/src/hir/syntax_shape/flat_shape.rs
Normal file
@ -0,0 +1,108 @@
|
||||
use crate::parse::flag::{Flag, FlagKind};
|
||||
use crate::parse::operator::EvaluationOperator;
|
||||
use crate::parse::token_tree::{Delimiter, TokenNode};
|
||||
use crate::parse::tokens::{RawNumber, UnspannedToken};
|
||||
use nu_source::{HasSpan, Span, Spanned, SpannedItem, Text};
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub enum FlatShape {
|
||||
OpenDelimiter(Delimiter),
|
||||
CloseDelimiter(Delimiter),
|
||||
ItVariable,
|
||||
Variable,
|
||||
CompareOperator,
|
||||
Dot,
|
||||
DotDot,
|
||||
InternalCommand,
|
||||
ExternalCommand,
|
||||
ExternalWord,
|
||||
BareMember,
|
||||
StringMember,
|
||||
String,
|
||||
Path,
|
||||
Word,
|
||||
Pipe,
|
||||
GlobPattern,
|
||||
Flag,
|
||||
ShorthandFlag,
|
||||
Int,
|
||||
Decimal,
|
||||
Whitespace,
|
||||
Separator,
|
||||
Error,
|
||||
Comment,
|
||||
Size { number: Span, unit: Span },
|
||||
}
|
||||
|
||||
impl FlatShape {
|
||||
pub fn from(token: &TokenNode, source: &Text, shapes: &mut Vec<Spanned<FlatShape>>) {
|
||||
match token {
|
||||
TokenNode::Token(token) => match token.unspanned {
|
||||
UnspannedToken::Number(RawNumber::Int(_)) => {
|
||||
shapes.push(FlatShape::Int.spanned(token.span))
|
||||
}
|
||||
UnspannedToken::Number(RawNumber::Decimal(_)) => {
|
||||
shapes.push(FlatShape::Decimal.spanned(token.span))
|
||||
}
|
||||
UnspannedToken::EvaluationOperator(EvaluationOperator::Dot) => {
|
||||
shapes.push(FlatShape::Dot.spanned(token.span))
|
||||
}
|
||||
UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot) => {
|
||||
shapes.push(FlatShape::DotDot.spanned(token.span))
|
||||
}
|
||||
UnspannedToken::CompareOperator(_) => {
|
||||
shapes.push(FlatShape::CompareOperator.spanned(token.span))
|
||||
}
|
||||
UnspannedToken::String(_) => shapes.push(FlatShape::String.spanned(token.span)),
|
||||
UnspannedToken::Variable(v) if v.slice(source) == "it" => {
|
||||
shapes.push(FlatShape::ItVariable.spanned(token.span))
|
||||
}
|
||||
UnspannedToken::Variable(_) => shapes.push(FlatShape::Variable.spanned(token.span)),
|
||||
UnspannedToken::ExternalCommand(_) => {
|
||||
shapes.push(FlatShape::ExternalCommand.spanned(token.span))
|
||||
}
|
||||
UnspannedToken::ExternalWord => {
|
||||
shapes.push(FlatShape::ExternalWord.spanned(token.span))
|
||||
}
|
||||
UnspannedToken::GlobPattern => {
|
||||
shapes.push(FlatShape::GlobPattern.spanned(token.span))
|
||||
}
|
||||
UnspannedToken::Bare => shapes.push(FlatShape::Word.spanned(token.span)),
|
||||
},
|
||||
TokenNode::Call(_) => unimplemented!(),
|
||||
TokenNode::Nodes(nodes) => {
|
||||
for node in &nodes.item {
|
||||
FlatShape::from(node, source, shapes);
|
||||
}
|
||||
}
|
||||
TokenNode::Delimited(v) => {
|
||||
shapes.push(FlatShape::OpenDelimiter(v.item.delimiter).spanned(v.item.spans.0));
|
||||
for token in &v.item.children {
|
||||
FlatShape::from(token, source, shapes);
|
||||
}
|
||||
shapes.push(FlatShape::CloseDelimiter(v.item.delimiter).spanned(v.item.spans.1));
|
||||
}
|
||||
TokenNode::Pipeline(pipeline) => {
|
||||
for part in &pipeline.parts {
|
||||
if part.pipe.is_some() {
|
||||
shapes.push(FlatShape::Pipe.spanned(part.span()));
|
||||
}
|
||||
}
|
||||
}
|
||||
TokenNode::Flag(Flag {
|
||||
kind: FlagKind::Longhand,
|
||||
span,
|
||||
..
|
||||
}) => shapes.push(FlatShape::Flag.spanned(*span)),
|
||||
TokenNode::Flag(Flag {
|
||||
kind: FlagKind::Shorthand,
|
||||
span,
|
||||
..
|
||||
}) => shapes.push(FlatShape::ShorthandFlag.spanned(*span)),
|
||||
TokenNode::Whitespace(_) => shapes.push(FlatShape::Whitespace.spanned(token.span())),
|
||||
TokenNode::Separator(_) => shapes.push(FlatShape::Separator.spanned(token.span())),
|
||||
TokenNode::Comment(_) => shapes.push(FlatShape::Comment.spanned(token.span())),
|
||||
TokenNode::Error(v) => shapes.push(FlatShape::Error.spanned(v.span)),
|
||||
}
|
||||
}
|
||||
}
|
774
crates/nu-parser/src/hir/tokens_iterator.rs
Normal file
774
crates/nu-parser/src/hir/tokens_iterator.rs
Normal file
@ -0,0 +1,774 @@
|
||||
pub(crate) mod debug;
|
||||
|
||||
use self::debug::{ColorTracer, ExpandTracer};
|
||||
|
||||
use crate::hir::syntax_shape::FlatShape;
|
||||
use crate::hir::Expression;
|
||||
use crate::TokenNode;
|
||||
use getset::{Getters, MutGetters};
|
||||
use nu_errors::{ParseError, ShellError};
|
||||
use nu_protocol::SpannedTypeName;
|
||||
use nu_source::{HasFallibleSpan, HasSpan, Span, Spanned, SpannedItem, Tag, Text};
|
||||
|
||||
#[derive(Getters, Debug)]
|
||||
pub struct TokensIteratorState<'content> {
|
||||
tokens: &'content [TokenNode],
|
||||
span: Span,
|
||||
skip_ws: bool,
|
||||
index: usize,
|
||||
seen: indexmap::IndexSet<usize>,
|
||||
#[get = "pub"]
|
||||
shapes: Vec<Spanned<FlatShape>>,
|
||||
}
|
||||
|
||||
#[derive(Getters, MutGetters, Debug)]
|
||||
pub struct TokensIterator<'content> {
|
||||
#[get = "pub"]
|
||||
#[get_mut = "pub"]
|
||||
state: TokensIteratorState<'content>,
|
||||
#[get = "pub"]
|
||||
#[get_mut = "pub"]
|
||||
color_tracer: ColorTracer,
|
||||
#[get = "pub"]
|
||||
#[get_mut = "pub"]
|
||||
expand_tracer: ExpandTracer,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Checkpoint<'content, 'me> {
|
||||
pub(crate) iterator: &'me mut TokensIterator<'content>,
|
||||
index: usize,
|
||||
seen: indexmap::IndexSet<usize>,
|
||||
|
||||
shape_start: usize,
|
||||
committed: bool,
|
||||
}
|
||||
|
||||
impl<'content, 'me> Checkpoint<'content, 'me> {
|
||||
pub(crate) fn commit(mut self) {
|
||||
self.committed = true;
|
||||
}
|
||||
}
|
||||
|
||||
impl<'content, 'me> std::ops::Drop for Checkpoint<'content, 'me> {
|
||||
fn drop(&mut self) {
|
||||
if !self.committed {
|
||||
let state = &mut self.iterator.state;
|
||||
|
||||
state.index = self.index;
|
||||
state.seen = self.seen.clone();
|
||||
|
||||
state.shapes.truncate(self.shape_start);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Peeked<'content, 'me> {
|
||||
pub(crate) node: Option<&'content TokenNode>,
|
||||
iterator: &'me mut TokensIterator<'content>,
|
||||
pub from: usize,
|
||||
pub to: usize,
|
||||
}
|
||||
|
||||
impl<'content, 'me> Peeked<'content, 'me> {
|
||||
pub fn commit(&mut self) -> Option<&'content TokenNode> {
|
||||
let Peeked {
|
||||
node,
|
||||
iterator,
|
||||
from,
|
||||
to,
|
||||
} = self;
|
||||
|
||||
let node = (*node)?;
|
||||
iterator.commit(*from, *to);
|
||||
Some(node)
|
||||
}
|
||||
|
||||
pub fn not_eof(self, expected: &'static str) -> Result<PeekedNode<'content, 'me>, ParseError> {
|
||||
match self.node {
|
||||
None => Err(ParseError::unexpected_eof(
|
||||
expected,
|
||||
self.iterator.eof_span(),
|
||||
)),
|
||||
Some(node) => Ok(PeekedNode {
|
||||
node,
|
||||
iterator: self.iterator,
|
||||
from: self.from,
|
||||
to: self.to,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn type_error(&self, expected: &'static str) -> ParseError {
|
||||
peek_error(self.node, self.iterator.eof_span(), expected)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct PeekedNode<'content, 'me> {
|
||||
pub(crate) node: &'content TokenNode,
|
||||
iterator: &'me mut TokensIterator<'content>,
|
||||
from: usize,
|
||||
to: usize,
|
||||
}
|
||||
|
||||
impl<'content, 'me> PeekedNode<'content, 'me> {
|
||||
pub fn commit(self) -> &'content TokenNode {
|
||||
let PeekedNode {
|
||||
node,
|
||||
iterator,
|
||||
from,
|
||||
to,
|
||||
} = self;
|
||||
|
||||
iterator.commit(from, to);
|
||||
node
|
||||
}
|
||||
|
||||
pub fn rollback(self) {}
|
||||
|
||||
pub fn type_error(&self, expected: &'static str) -> ParseError {
|
||||
peek_error(Some(self.node), self.iterator.eof_span(), expected)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn peek_error(node: Option<&TokenNode>, eof_span: Span, expected: &'static str) -> ParseError {
|
||||
match node {
|
||||
None => ParseError::unexpected_eof(expected, eof_span),
|
||||
Some(node) => ParseError::mismatch(expected, node.spanned_type_name()),
|
||||
}
|
||||
}
|
||||
|
||||
impl<'content> TokensIterator<'content> {
|
||||
pub fn new(
|
||||
items: &'content [TokenNode],
|
||||
span: Span,
|
||||
source: Text,
|
||||
skip_ws: bool,
|
||||
) -> TokensIterator<'content> {
|
||||
TokensIterator {
|
||||
state: TokensIteratorState {
|
||||
tokens: items,
|
||||
span,
|
||||
skip_ws,
|
||||
index: 0,
|
||||
seen: indexmap::IndexSet::new(),
|
||||
shapes: vec![],
|
||||
},
|
||||
color_tracer: ColorTracer::new(source.clone()),
|
||||
expand_tracer: ExpandTracer::new(source),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn all(
|
||||
tokens: &'content [TokenNode],
|
||||
source: Text,
|
||||
span: Span,
|
||||
) -> TokensIterator<'content> {
|
||||
TokensIterator::new(tokens, span, source, false)
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.state.tokens.len()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
|
||||
pub fn spanned<T>(
|
||||
&mut self,
|
||||
block: impl FnOnce(&mut TokensIterator<'content>) -> T,
|
||||
) -> Spanned<T> {
|
||||
let start = self.span_at_cursor();
|
||||
|
||||
let result = block(self);
|
||||
|
||||
let end = self.span_at_cursor();
|
||||
|
||||
result.spanned(start.until(end))
|
||||
}
|
||||
|
||||
pub fn color_shape(&mut self, shape: Spanned<FlatShape>) {
|
||||
self.with_color_tracer(|_, tracer| tracer.add_shape(shape));
|
||||
self.state.shapes.push(shape);
|
||||
}
|
||||
|
||||
pub fn mutate_shapes(&mut self, block: impl FnOnce(&mut Vec<Spanned<FlatShape>>)) {
|
||||
let new_shapes: Vec<Spanned<FlatShape>> = {
|
||||
let shapes = &mut self.state.shapes;
|
||||
let len = shapes.len();
|
||||
block(shapes);
|
||||
(len..(shapes.len())).map(|i| shapes[i]).collect()
|
||||
};
|
||||
|
||||
self.with_color_tracer(|_, tracer| {
|
||||
for shape in new_shapes {
|
||||
tracer.add_shape(shape)
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
pub fn silently_mutate_shapes(&mut self, block: impl FnOnce(&mut Vec<Spanned<FlatShape>>)) {
|
||||
let shapes = &mut self.state.shapes;
|
||||
block(shapes);
|
||||
}
|
||||
|
||||
pub fn sort_shapes(&mut self) {
|
||||
// This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring
|
||||
// this solution.
|
||||
|
||||
self.state
|
||||
.shapes
|
||||
.sort_by(|a, b| a.span.start().cmp(&b.span.start()));
|
||||
}
|
||||
|
||||
pub fn child<'me, T>(
|
||||
&'me mut self,
|
||||
tokens: Spanned<&'me [TokenNode]>,
|
||||
source: Text,
|
||||
block: impl FnOnce(&mut TokensIterator<'me>) -> T,
|
||||
) -> T {
|
||||
let mut shapes = vec![];
|
||||
std::mem::swap(&mut shapes, &mut self.state.shapes);
|
||||
|
||||
let mut color_tracer = ColorTracer::new(source.clone());
|
||||
std::mem::swap(&mut color_tracer, &mut self.color_tracer);
|
||||
|
||||
let mut expand_tracer = ExpandTracer::new(source);
|
||||
std::mem::swap(&mut expand_tracer, &mut self.expand_tracer);
|
||||
|
||||
let mut iterator = TokensIterator {
|
||||
state: TokensIteratorState {
|
||||
tokens: tokens.item,
|
||||
span: tokens.span,
|
||||
skip_ws: false,
|
||||
index: 0,
|
||||
seen: indexmap::IndexSet::new(),
|
||||
shapes,
|
||||
},
|
||||
color_tracer,
|
||||
expand_tracer,
|
||||
};
|
||||
|
||||
let result = block(&mut iterator);
|
||||
|
||||
std::mem::swap(&mut iterator.state.shapes, &mut self.state.shapes);
|
||||
std::mem::swap(&mut iterator.color_tracer, &mut self.color_tracer);
|
||||
std::mem::swap(&mut iterator.expand_tracer, &mut self.expand_tracer);
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
pub fn with_color_tracer(
|
||||
&mut self,
|
||||
block: impl FnOnce(&mut TokensIteratorState, &mut ColorTracer),
|
||||
) {
|
||||
let state = &mut self.state;
|
||||
let color_tracer = &mut self.color_tracer;
|
||||
|
||||
block(state, color_tracer)
|
||||
}
|
||||
|
||||
pub fn with_expand_tracer(
|
||||
&mut self,
|
||||
block: impl FnOnce(&mut TokensIteratorState, &mut ExpandTracer),
|
||||
) {
|
||||
let state = &mut self.state;
|
||||
let tracer = &mut self.expand_tracer;
|
||||
|
||||
block(state, tracer)
|
||||
}
|
||||
|
||||
pub fn color_frame<T>(
|
||||
&mut self,
|
||||
desc: &'static str,
|
||||
block: impl FnOnce(&mut TokensIterator) -> T,
|
||||
) -> T {
|
||||
self.with_color_tracer(|_, tracer| tracer.start(desc));
|
||||
|
||||
let result = block(self);
|
||||
|
||||
self.with_color_tracer(|_, tracer| {
|
||||
tracer.success();
|
||||
});
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
pub fn expand_frame<T>(
|
||||
&mut self,
|
||||
desc: &'static str,
|
||||
block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, ParseError>,
|
||||
) -> Result<T, ParseError>
|
||||
where
|
||||
T: std::fmt::Debug + Clone + HasFallibleSpan + 'static,
|
||||
{
|
||||
self.with_expand_tracer(|_, tracer| tracer.start(desc));
|
||||
|
||||
let result = block(self);
|
||||
|
||||
self.with_expand_tracer(|_, tracer| match &result {
|
||||
Ok(result) => {
|
||||
tracer.add_result(result.clone());
|
||||
tracer.success();
|
||||
}
|
||||
|
||||
Err(err) => tracer.failed(err),
|
||||
});
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
pub fn expand_expr_frame(
|
||||
&mut self,
|
||||
desc: &'static str,
|
||||
block: impl FnOnce(&mut TokensIterator) -> Result<Expression, ParseError>,
|
||||
) -> Result<Expression, ParseError> {
|
||||
self.with_expand_tracer(|_, tracer| tracer.start(desc));
|
||||
|
||||
let result = block(self);
|
||||
|
||||
self.with_expand_tracer(|_, tracer| match &result {
|
||||
Ok(expr) => {
|
||||
tracer.add_expr(expr.clone());
|
||||
tracer.success()
|
||||
}
|
||||
|
||||
Err(err) => tracer.failed(err),
|
||||
});
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
pub fn color_fallible_frame<T>(
|
||||
&mut self,
|
||||
desc: &'static str,
|
||||
block: impl FnOnce(&mut TokensIterator) -> Result<T, ShellError>,
|
||||
) -> Result<T, ShellError> {
|
||||
self.with_color_tracer(|_, tracer| tracer.start(desc));
|
||||
|
||||
if self.at_end() {
|
||||
self.with_color_tracer(|_, tracer| tracer.eof_frame());
|
||||
return Err(ShellError::unexpected_eof("coloring", Tag::unknown()));
|
||||
}
|
||||
|
||||
let result = block(self);
|
||||
|
||||
self.with_color_tracer(|_, tracer| match &result {
|
||||
Ok(_) => {
|
||||
tracer.success();
|
||||
}
|
||||
|
||||
Err(err) => tracer.failed(err),
|
||||
});
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
|
||||
/// that you'll succeed.
|
||||
pub fn checkpoint<'me>(&'me mut self) -> Checkpoint<'content, 'me> {
|
||||
let state = &mut self.state;
|
||||
|
||||
let index = state.index;
|
||||
|
||||
let shape_start = state.shapes.len();
|
||||
let seen = state.seen.clone();
|
||||
|
||||
Checkpoint {
|
||||
iterator: self,
|
||||
index,
|
||||
seen,
|
||||
committed: false,
|
||||
|
||||
shape_start,
|
||||
}
|
||||
}
|
||||
|
||||
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
|
||||
/// that you'll succeed.
|
||||
pub fn atomic<'me, T>(
|
||||
&'me mut self,
|
||||
block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, ShellError>,
|
||||
) -> Result<T, ShellError> {
|
||||
let state = &mut self.state;
|
||||
|
||||
let index = state.index;
|
||||
|
||||
let shape_start = state.shapes.len();
|
||||
let seen = state.seen.clone();
|
||||
|
||||
let checkpoint = Checkpoint {
|
||||
iterator: self,
|
||||
index,
|
||||
seen,
|
||||
committed: false,
|
||||
|
||||
shape_start,
|
||||
};
|
||||
|
||||
let value = block(checkpoint.iterator)?;
|
||||
|
||||
checkpoint.commit();
|
||||
Ok(value)
|
||||
}
|
||||
|
||||
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
|
||||
/// that you'll succeed.
|
||||
pub fn atomic_parse<'me, T, E>(
|
||||
&'me mut self,
|
||||
block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, E>,
|
||||
) -> Result<T, E> {
|
||||
let state = &mut self.state;
|
||||
|
||||
let index = state.index;
|
||||
|
||||
let shape_start = state.shapes.len();
|
||||
let seen = state.seen.clone();
|
||||
|
||||
let checkpoint = Checkpoint {
|
||||
iterator: self,
|
||||
index,
|
||||
seen,
|
||||
committed: false,
|
||||
|
||||
shape_start,
|
||||
};
|
||||
|
||||
let value = block(checkpoint.iterator)?;
|
||||
|
||||
checkpoint.commit();
|
||||
Ok(value)
|
||||
}
|
||||
|
||||
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
|
||||
/// that you'll succeed.
|
||||
pub fn atomic_returning_shapes<'me, T>(
|
||||
&'me mut self,
|
||||
block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, ShellError>,
|
||||
) -> (Result<T, ShellError>, Vec<Spanned<FlatShape>>) {
|
||||
let index = self.state.index;
|
||||
let mut shapes = vec![];
|
||||
|
||||
let seen = self.state.seen.clone();
|
||||
std::mem::swap(&mut self.state.shapes, &mut shapes);
|
||||
|
||||
let checkpoint = Checkpoint {
|
||||
iterator: self,
|
||||
index,
|
||||
seen,
|
||||
committed: false,
|
||||
shape_start: 0,
|
||||
};
|
||||
|
||||
let value = block(checkpoint.iterator);
|
||||
|
||||
let value = match value {
|
||||
Err(err) => {
|
||||
drop(checkpoint);
|
||||
std::mem::swap(&mut self.state.shapes, &mut shapes);
|
||||
return (Err(err), vec![]);
|
||||
}
|
||||
|
||||
Ok(value) => value,
|
||||
};
|
||||
|
||||
checkpoint.commit();
|
||||
std::mem::swap(&mut self.state.shapes, &mut shapes);
|
||||
(Ok(value), shapes)
|
||||
}
|
||||
|
||||
fn eof_span(&self) -> Span {
|
||||
Span::new(self.state.span.end(), self.state.span.end())
|
||||
}
|
||||
|
||||
pub fn typed_span_at_cursor(&mut self) -> Spanned<&'static str> {
|
||||
let next = self.peek_any();
|
||||
|
||||
match next.node {
|
||||
None => "end".spanned(self.eof_span()),
|
||||
Some(node) => node.spanned_type_name(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn span_at_cursor(&mut self) -> Span {
|
||||
let next = self.peek_any();
|
||||
|
||||
match next.node {
|
||||
None => self.eof_span(),
|
||||
Some(node) => node.span(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, position: usize) {
|
||||
self.state.seen.insert(position);
|
||||
}
|
||||
|
||||
pub fn at_end(&self) -> bool {
|
||||
peek(self, self.state.skip_ws).is_none()
|
||||
}
|
||||
|
||||
pub fn at_end_possible_ws(&self) -> bool {
|
||||
peek(self, true).is_none()
|
||||
}
|
||||
|
||||
pub fn advance(&mut self) {
|
||||
self.state.seen.insert(self.state.index);
|
||||
self.state.index += 1;
|
||||
}
|
||||
|
||||
pub fn extract<T>(&mut self, f: impl Fn(&TokenNode) -> Option<T>) -> Option<(usize, T)> {
|
||||
let state = &mut self.state;
|
||||
|
||||
for (i, item) in state.tokens.iter().enumerate() {
|
||||
if state.seen.contains(&i) {
|
||||
continue;
|
||||
}
|
||||
|
||||
match f(item) {
|
||||
None => {
|
||||
continue;
|
||||
}
|
||||
Some(value) => {
|
||||
state.seen.insert(i);
|
||||
return Some((i, value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn move_to(&mut self, pos: usize) {
|
||||
self.state.index = pos;
|
||||
}
|
||||
|
||||
pub fn restart(&mut self) {
|
||||
self.state.index = 0;
|
||||
}
|
||||
|
||||
// pub fn clone(&self) -> TokensIterator<'content> {
|
||||
// let state = &self.state;
|
||||
// TokensIterator {
|
||||
// state: TokensIteratorState {
|
||||
// tokens: state.tokens,
|
||||
// span: state.span,
|
||||
// index: state.index,
|
||||
// seen: state.seen.clone(),
|
||||
// skip_ws: state.skip_ws,
|
||||
//
|
||||
// shapes: state.shapes.clone(),
|
||||
// },
|
||||
// color_tracer: self.color_tracer.clone(),
|
||||
// expand_tracer: self.expand_tracer.clone(),
|
||||
// }
|
||||
// }
|
||||
|
||||
// Peek the next token, not including whitespace
|
||||
pub fn peek_non_ws<'me>(&'me mut self) -> Peeked<'content, 'me> {
|
||||
start_next(self, true)
|
||||
}
|
||||
|
||||
// Peek the next token, including whitespace
|
||||
pub fn peek_any<'me>(&'me mut self) -> Peeked<'content, 'me> {
|
||||
start_next(self, false)
|
||||
}
|
||||
|
||||
// Peek the next token, including whitespace, but not EOF
|
||||
pub fn peek_any_token<'me, T>(
|
||||
&'me mut self,
|
||||
expected: &'static str,
|
||||
block: impl FnOnce(&'content TokenNode) -> Result<T, ParseError>,
|
||||
) -> Result<T, ParseError> {
|
||||
let peeked = start_next(self, false);
|
||||
let peeked = peeked.not_eof(expected);
|
||||
|
||||
match peeked {
|
||||
Err(err) => Err(err),
|
||||
Ok(peeked) => match block(peeked.node) {
|
||||
Err(err) => Err(err),
|
||||
Ok(val) => {
|
||||
peeked.commit();
|
||||
Ok(val)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn commit(&mut self, from: usize, to: usize) {
|
||||
for index in from..to {
|
||||
self.state.seen.insert(index);
|
||||
}
|
||||
|
||||
self.state.index = to;
|
||||
}
|
||||
|
||||
pub fn pos(&self, skip_ws: bool) -> Option<usize> {
|
||||
peek_pos(self, skip_ws)
|
||||
}
|
||||
|
||||
pub fn debug_remaining(&self) -> Vec<TokenNode> {
|
||||
// TODO: TODO: TODO: Clean up
|
||||
vec![]
|
||||
// let mut tokens = self.clone();
|
||||
// tokens.restart();
|
||||
// tokens.cloned().collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'content> Iterator for TokensIterator<'content> {
|
||||
type Item = &'content TokenNode;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
next(self, self.state.skip_ws)
|
||||
}
|
||||
}
|
||||
|
||||
fn peek<'content, 'me>(
|
||||
iterator: &'me TokensIterator<'content>,
|
||||
skip_ws: bool,
|
||||
) -> Option<&'me TokenNode> {
|
||||
let state = iterator.state();
|
||||
|
||||
let mut to = state.index;
|
||||
|
||||
loop {
|
||||
if to >= state.tokens.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if state.seen.contains(&to) {
|
||||
to += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if to >= state.tokens.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let node = &state.tokens[to];
|
||||
|
||||
match node {
|
||||
TokenNode::Whitespace(_) if skip_ws => {
|
||||
to += 1;
|
||||
}
|
||||
_ => {
|
||||
return Some(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn peek_pos(iterator: &TokensIterator<'_>, skip_ws: bool) -> Option<usize> {
|
||||
let state = iterator.state();
|
||||
|
||||
let mut to = state.index;
|
||||
|
||||
loop {
|
||||
if to >= state.tokens.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if state.seen.contains(&to) {
|
||||
to += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if to >= state.tokens.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let node = &state.tokens[to];
|
||||
|
||||
match node {
|
||||
TokenNode::Whitespace(_) if skip_ws => {
|
||||
to += 1;
|
||||
}
|
||||
_ => return Some(to),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn start_next<'content, 'me>(
|
||||
iterator: &'me mut TokensIterator<'content>,
|
||||
skip_ws: bool,
|
||||
) -> Peeked<'content, 'me> {
|
||||
let state = iterator.state();
|
||||
|
||||
let from = state.index;
|
||||
let mut to = state.index;
|
||||
|
||||
loop {
|
||||
if to >= state.tokens.len() {
|
||||
return Peeked {
|
||||
node: None,
|
||||
iterator,
|
||||
from,
|
||||
to,
|
||||
};
|
||||
}
|
||||
|
||||
if state.seen.contains(&to) {
|
||||
to += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if to >= state.tokens.len() {
|
||||
return Peeked {
|
||||
node: None,
|
||||
iterator,
|
||||
from,
|
||||
to,
|
||||
};
|
||||
}
|
||||
|
||||
let node = &state.tokens[to];
|
||||
|
||||
match node {
|
||||
TokenNode::Whitespace(_) if skip_ws => {
|
||||
to += 1;
|
||||
}
|
||||
_ => {
|
||||
to += 1;
|
||||
return Peeked {
|
||||
node: Some(node),
|
||||
iterator,
|
||||
from,
|
||||
to,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn next<'me, 'content>(
|
||||
iterator: &'me mut TokensIterator<'content>,
|
||||
skip_ws: bool,
|
||||
) -> Option<&'content TokenNode> {
|
||||
loop {
|
||||
if iterator.state().index >= iterator.state().tokens.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if iterator.state().seen.contains(&iterator.state().index) {
|
||||
iterator.advance();
|
||||
continue;
|
||||
}
|
||||
|
||||
if iterator.state().index >= iterator.state().tokens.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
match &iterator.state().tokens[iterator.state().index] {
|
||||
TokenNode::Whitespace(_) if skip_ws => {
|
||||
iterator.advance();
|
||||
}
|
||||
other => {
|
||||
iterator.advance();
|
||||
return Some(other);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
36
crates/nu-parser/src/hir/tokens_iterator/debug.rs
Normal file
36
crates/nu-parser/src/hir/tokens_iterator/debug.rs
Normal file
@ -0,0 +1,36 @@
|
||||
#![allow(unused)]
|
||||
|
||||
pub(crate) mod color_trace;
|
||||
pub(crate) mod expand_trace;
|
||||
|
||||
pub(crate) use self::color_trace::*;
|
||||
pub(crate) use self::expand_trace::*;
|
||||
|
||||
use crate::hir::tokens_iterator::TokensIteratorState;
|
||||
use nu_source::{PrettyDebug, PrettyDebugWithSource, Text};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum DebugIteratorToken {
|
||||
Seen(String),
|
||||
Unseen(String),
|
||||
Cursor,
|
||||
}
|
||||
|
||||
pub(crate) fn debug_tokens(state: &TokensIteratorState, source: &str) -> Vec<DebugIteratorToken> {
|
||||
let mut out = vec![];
|
||||
|
||||
for (i, token) in state.tokens.iter().enumerate() {
|
||||
if state.index == i {
|
||||
out.push(DebugIteratorToken::Cursor);
|
||||
}
|
||||
|
||||
let msg = token.debug(source).to_string();
|
||||
if state.seen.contains(&i) {
|
||||
out.push(DebugIteratorToken::Seen(msg));
|
||||
} else {
|
||||
out.push(DebugIteratorToken::Unseen(msg));
|
||||
}
|
||||
}
|
||||
|
||||
out
|
||||
}
|
345
crates/nu-parser/src/hir/tokens_iterator/debug/color_trace.rs
Normal file
345
crates/nu-parser/src/hir/tokens_iterator/debug/color_trace.rs
Normal file
@ -0,0 +1,345 @@
|
||||
use crate::hir::syntax_shape::FlatShape;
|
||||
use ansi_term::Color;
|
||||
use log::trace;
|
||||
use nu_errors::ShellError;
|
||||
use nu_source::{Spanned, Text};
|
||||
use ptree::*;
|
||||
use std::borrow::Cow;
|
||||
use std::io;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum FrameChild {
|
||||
#[allow(unused)]
|
||||
Shape(Spanned<FlatShape>),
|
||||
Frame(ColorFrame),
|
||||
}
|
||||
|
||||
impl FrameChild {
|
||||
fn colored_leaf_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> {
|
||||
match self {
|
||||
FrameChild::Shape(shape) => write!(
|
||||
f,
|
||||
"{} {:?}",
|
||||
Color::White
|
||||
.bold()
|
||||
.on(Color::Green)
|
||||
.paint(format!("{:?}", shape.item)),
|
||||
shape.span.slice(text)
|
||||
),
|
||||
|
||||
FrameChild::Frame(frame) => frame.colored_leaf_description(f),
|
||||
}
|
||||
}
|
||||
|
||||
fn into_tree_child(self, text: &Text) -> TreeChild {
|
||||
match self {
|
||||
FrameChild::Shape(shape) => TreeChild::Shape(shape, text.clone()),
|
||||
FrameChild::Frame(frame) => TreeChild::Frame(frame, text.clone()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ColorFrame {
|
||||
description: &'static str,
|
||||
children: Vec<FrameChild>,
|
||||
error: Option<ShellError>,
|
||||
}
|
||||
|
||||
impl ColorFrame {
|
||||
fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
||||
if self.has_only_error_descendents() {
|
||||
if self.children.is_empty() {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
Color::White.bold().on(Color::Red).paint(self.description)
|
||||
)
|
||||
} else {
|
||||
write!(f, "{}", Color::Red.normal().paint(self.description))
|
||||
}
|
||||
} else if self.has_descendent_shapes() {
|
||||
write!(f, "{}", Color::Green.normal().paint(self.description))
|
||||
} else {
|
||||
write!(f, "{}", Color::Yellow.bold().paint(self.description))
|
||||
}
|
||||
}
|
||||
|
||||
fn colored_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> {
|
||||
if self.children.len() == 1 {
|
||||
let child = &self.children[0];
|
||||
|
||||
self.colored_leaf_description(f)?;
|
||||
write!(f, " -> ")?;
|
||||
child.colored_leaf_description(text, f)
|
||||
} else {
|
||||
self.colored_leaf_description(f)
|
||||
}
|
||||
}
|
||||
|
||||
fn children_for_formatting(&self, text: &Text) -> Vec<TreeChild> {
|
||||
if self.children.len() == 1 {
|
||||
let child = &self.children[0];
|
||||
|
||||
match child {
|
||||
FrameChild::Shape(_) => vec![],
|
||||
FrameChild::Frame(frame) => frame.tree_children(text),
|
||||
}
|
||||
} else {
|
||||
self.tree_children(text)
|
||||
}
|
||||
}
|
||||
|
||||
fn tree_children(&self, text: &Text) -> Vec<TreeChild> {
|
||||
self.children
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(|c| c.into_tree_child(text))
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
fn add_shape(&mut self, shape: Spanned<FlatShape>) {
|
||||
self.children.push(FrameChild::Shape(shape))
|
||||
}
|
||||
|
||||
fn has_child_shapes(&self) -> bool {
|
||||
self.any_child_shape(|_| true)
|
||||
}
|
||||
|
||||
fn any_child_shape(&self, predicate: impl Fn(Spanned<FlatShape>) -> bool) -> bool {
|
||||
for item in &self.children {
|
||||
if let FrameChild::Shape(shape) = item {
|
||||
if predicate(*shape) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
fn any_child_frame(&self, predicate: impl Fn(&ColorFrame) -> bool) -> bool {
|
||||
for item in &self.children {
|
||||
if let FrameChild::Frame(frame) = item {
|
||||
if predicate(frame) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
fn has_descendent_shapes(&self) -> bool {
|
||||
if self.has_child_shapes() {
|
||||
true
|
||||
} else {
|
||||
self.any_child_frame(|frame| frame.has_descendent_shapes())
|
||||
}
|
||||
}
|
||||
|
||||
fn has_only_error_descendents(&self) -> bool {
|
||||
if self.children.is_empty() {
|
||||
// if this frame has no children at all, it has only error descendents if this frame
|
||||
// is an error
|
||||
self.error.is_some()
|
||||
} else {
|
||||
// otherwise, it has only error descendents if all of its children terminate in an
|
||||
// error (transitively)
|
||||
|
||||
let mut seen_error = false;
|
||||
|
||||
for child in &self.children {
|
||||
match child {
|
||||
// if this frame has at least one child shape, this frame has non-error descendents
|
||||
FrameChild::Shape(_) => return false,
|
||||
FrameChild::Frame(frame) => {
|
||||
// if the chi
|
||||
if frame.has_only_error_descendents() {
|
||||
seen_error = true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
seen_error
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum TreeChild {
|
||||
Shape(Spanned<FlatShape>, Text),
|
||||
Frame(ColorFrame, Text),
|
||||
}
|
||||
|
||||
impl TreeChild {
|
||||
fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
||||
match self {
|
||||
TreeChild::Shape(shape, text) => write!(
|
||||
f,
|
||||
"{} {:?}",
|
||||
Color::White
|
||||
.bold()
|
||||
.on(Color::Green)
|
||||
.paint(format!("{:?}", shape.item)),
|
||||
shape.span.slice(text)
|
||||
),
|
||||
|
||||
TreeChild::Frame(frame, _) => frame.colored_leaf_description(f),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TreeItem for TreeChild {
|
||||
type Child = TreeChild;
|
||||
|
||||
fn write_self<W: io::Write>(&self, f: &mut W, _style: &Style) -> io::Result<()> {
|
||||
match self {
|
||||
shape @ TreeChild::Shape(..) => shape.colored_leaf_description(f),
|
||||
|
||||
TreeChild::Frame(frame, text) => frame.colored_description(text, f),
|
||||
}
|
||||
}
|
||||
|
||||
fn children(&self) -> Cow<[Self::Child]> {
|
||||
match self {
|
||||
TreeChild::Shape(..) => Cow::Borrowed(&[]),
|
||||
TreeChild::Frame(frame, text) => Cow::Owned(frame.children_for_formatting(text)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ColorTracer {
|
||||
frame_stack: Vec<ColorFrame>,
|
||||
source: Text,
|
||||
}
|
||||
|
||||
impl ColorTracer {
|
||||
pub fn print(self, source: Text) -> PrintTracer {
|
||||
PrintTracer {
|
||||
tracer: self,
|
||||
source,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new(source: Text) -> ColorTracer {
|
||||
let root = ColorFrame {
|
||||
description: "Trace",
|
||||
children: vec![],
|
||||
error: None,
|
||||
};
|
||||
|
||||
ColorTracer {
|
||||
frame_stack: vec![root],
|
||||
source,
|
||||
}
|
||||
}
|
||||
|
||||
fn current_frame(&mut self) -> &mut ColorFrame {
|
||||
let frames = &mut self.frame_stack;
|
||||
let last = frames.len() - 1;
|
||||
&mut frames[last]
|
||||
}
|
||||
|
||||
fn pop_frame(&mut self) -> ColorFrame {
|
||||
trace!(target: "nu::color_syntax", "Popping {:#?}", self);
|
||||
|
||||
let result = self.frame_stack.pop().expect("Can't pop root tracer frame");
|
||||
|
||||
if self.frame_stack.is_empty() {
|
||||
panic!("Can't pop root tracer frame {:#?}", self);
|
||||
}
|
||||
|
||||
self.debug();
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
pub fn start(&mut self, description: &'static str) {
|
||||
let frame = ColorFrame {
|
||||
description,
|
||||
children: vec![],
|
||||
error: None,
|
||||
};
|
||||
|
||||
self.frame_stack.push(frame);
|
||||
self.debug();
|
||||
}
|
||||
|
||||
pub fn eof_frame(&mut self) {
|
||||
let current = self.pop_frame();
|
||||
self.current_frame()
|
||||
.children
|
||||
.push(FrameChild::Frame(current));
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn finish(&mut self) {
|
||||
loop {
|
||||
if self.frame_stack.len() == 1 {
|
||||
break;
|
||||
}
|
||||
|
||||
let frame = self.pop_frame();
|
||||
self.current_frame().children.push(FrameChild::Frame(frame));
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn add_shape(&mut self, shape: Spanned<FlatShape>) {
|
||||
self.current_frame().add_shape(shape);
|
||||
}
|
||||
|
||||
pub fn success(&mut self) {
|
||||
let current = self.pop_frame();
|
||||
self.current_frame()
|
||||
.children
|
||||
.push(FrameChild::Frame(current));
|
||||
}
|
||||
|
||||
pub fn failed(&mut self, error: &ShellError) {
|
||||
let mut current = self.pop_frame();
|
||||
current.error = Some(error.clone());
|
||||
self.current_frame()
|
||||
.children
|
||||
.push(FrameChild::Frame(current));
|
||||
}
|
||||
|
||||
fn debug(&self) {
|
||||
trace!(target: "nu::color_syntax",
|
||||
"frames = {:?}",
|
||||
self.frame_stack
|
||||
.iter()
|
||||
.map(|f| f.description)
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
|
||||
trace!(target: "nu::color_syntax", "{:#?}", self);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PrintTracer {
|
||||
tracer: ColorTracer,
|
||||
source: Text,
|
||||
}
|
||||
|
||||
impl TreeItem for PrintTracer {
|
||||
type Child = TreeChild;
|
||||
|
||||
fn write_self<W: io::Write>(&self, f: &mut W, style: &Style) -> io::Result<()> {
|
||||
write!(f, "{}", style.paint("Color Trace"))
|
||||
}
|
||||
|
||||
fn children(&self) -> Cow<[Self::Child]> {
|
||||
Cow::Owned(vec![TreeChild::Frame(
|
||||
self.tracer.frame_stack[0].clone(),
|
||||
self.source.clone(),
|
||||
)])
|
||||
}
|
||||
}
|
359
crates/nu-parser/src/hir/tokens_iterator/debug/expand_trace.rs
Normal file
359
crates/nu-parser/src/hir/tokens_iterator/debug/expand_trace.rs
Normal file
@ -0,0 +1,359 @@
|
||||
use crate::hir::Expression;
|
||||
use ansi_term::Color;
|
||||
use log::trace;
|
||||
use nu_errors::ParseError;
|
||||
use nu_protocol::ShellTypeName;
|
||||
use nu_source::{DebugDoc, PrettyDebug, PrettyDebugWithSource, Text};
|
||||
use ptree::*;
|
||||
use std::borrow::Cow;
|
||||
use std::io;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum FrameChild {
|
||||
Expr(Expression),
|
||||
Frame(ExprFrame),
|
||||
Result(DebugDoc),
|
||||
}
|
||||
|
||||
impl FrameChild {
|
||||
fn get_error_leaf(&self) -> Option<&'static str> {
|
||||
match self {
|
||||
FrameChild::Frame(frame) if frame.error.is_some() => {
|
||||
if frame.children.is_empty() {
|
||||
Some(frame.description)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn to_tree_child(&self, text: &Text) -> TreeChild {
|
||||
match self {
|
||||
FrameChild::Expr(expr) => TreeChild::OkExpr(expr.clone(), text.clone()),
|
||||
FrameChild::Result(result) => {
|
||||
let result = result.display();
|
||||
TreeChild::OkNonExpr(result)
|
||||
}
|
||||
FrameChild::Frame(frame) => {
|
||||
if frame.error.is_some() {
|
||||
if frame.children.is_empty() {
|
||||
TreeChild::ErrorLeaf(vec![frame.description])
|
||||
} else {
|
||||
TreeChild::ErrorFrame(frame.to_tree_frame(text), text.clone())
|
||||
}
|
||||
} else {
|
||||
TreeChild::OkFrame(frame.to_tree_frame(text), text.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ExprFrame {
|
||||
description: &'static str,
|
||||
children: Vec<FrameChild>,
|
||||
error: Option<ParseError>,
|
||||
}
|
||||
|
||||
impl ExprFrame {
|
||||
fn to_tree_frame(&self, text: &Text) -> TreeFrame {
|
||||
let mut children = vec![];
|
||||
let mut errors = vec![];
|
||||
|
||||
for child in &self.children {
|
||||
if let Some(error_leaf) = child.get_error_leaf() {
|
||||
errors.push(error_leaf);
|
||||
continue;
|
||||
} else if !errors.is_empty() {
|
||||
children.push(TreeChild::ErrorLeaf(errors));
|
||||
errors = vec![];
|
||||
}
|
||||
|
||||
children.push(child.to_tree_child(text));
|
||||
}
|
||||
|
||||
if !errors.is_empty() {
|
||||
children.push(TreeChild::ErrorLeaf(errors));
|
||||
}
|
||||
|
||||
TreeFrame {
|
||||
description: self.description,
|
||||
children,
|
||||
error: self.error.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn add_expr(&mut self, expr: Expression) {
|
||||
self.children.push(FrameChild::Expr(expr))
|
||||
}
|
||||
|
||||
fn add_result(&mut self, result: impl PrettyDebug) {
|
||||
self.children.push(FrameChild::Result(result.to_doc()))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct TreeFrame {
|
||||
description: &'static str,
|
||||
children: Vec<TreeChild>,
|
||||
error: Option<ParseError>,
|
||||
}
|
||||
|
||||
impl TreeFrame {
|
||||
fn leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
||||
if self.children.len() == 1 {
|
||||
if self.error.is_some() {
|
||||
write!(f, "{}", Color::Red.normal().paint(self.description))?;
|
||||
} else if self.has_descendent_green() {
|
||||
write!(f, "{}", Color::Green.normal().paint(self.description))?;
|
||||
} else {
|
||||
write!(f, "{}", Color::Yellow.bold().paint(self.description))?;
|
||||
}
|
||||
|
||||
write!(f, " -> ")?;
|
||||
self.children[0].leaf_description(f)
|
||||
} else if self.error.is_some() {
|
||||
if self.children.is_empty() {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
Color::White.bold().on(Color::Red).paint(self.description)
|
||||
)
|
||||
} else {
|
||||
write!(f, "{}", Color::Red.normal().paint(self.description))
|
||||
}
|
||||
} else if self.has_descendent_green() {
|
||||
write!(f, "{}", Color::Green.normal().paint(self.description))
|
||||
} else {
|
||||
write!(f, "{}", Color::Yellow.bold().paint(self.description))
|
||||
}
|
||||
}
|
||||
|
||||
fn has_child_green(&self) -> bool {
|
||||
self.children.iter().any(|item| match item {
|
||||
TreeChild::OkFrame(..) | TreeChild::ErrorFrame(..) | TreeChild::ErrorLeaf(..) => false,
|
||||
TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) => true,
|
||||
})
|
||||
}
|
||||
|
||||
fn any_child_frame(&self, predicate: impl Fn(&TreeFrame) -> bool) -> bool {
|
||||
for item in &self.children {
|
||||
if let TreeChild::OkFrame(frame, ..) = item {
|
||||
if predicate(frame) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
fn has_descendent_green(&self) -> bool {
|
||||
if self.has_child_green() {
|
||||
true
|
||||
} else {
|
||||
self.any_child_frame(|frame| frame.has_child_green())
|
||||
}
|
||||
}
|
||||
|
||||
fn children_for_formatting(&self, text: &Text) -> Vec<TreeChild> {
|
||||
if self.children.len() == 1 {
|
||||
let child: &TreeChild = &self.children[0];
|
||||
match child {
|
||||
TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) | TreeChild::ErrorLeaf(..) => {
|
||||
vec![]
|
||||
}
|
||||
TreeChild::OkFrame(frame, _) | TreeChild::ErrorFrame(frame, _) => {
|
||||
frame.children_for_formatting(text)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
self.children.clone()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum TreeChild {
|
||||
OkNonExpr(String),
|
||||
OkExpr(Expression, Text),
|
||||
OkFrame(TreeFrame, Text),
|
||||
ErrorFrame(TreeFrame, Text),
|
||||
ErrorLeaf(Vec<&'static str>),
|
||||
}
|
||||
|
||||
impl TreeChild {
|
||||
fn leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
||||
match self {
|
||||
TreeChild::OkExpr(expr, text) => write!(
|
||||
f,
|
||||
"{} {} {}",
|
||||
Color::Cyan.normal().paint("returns"),
|
||||
Color::White.bold().on(Color::Green).paint(expr.type_name()),
|
||||
expr.span.slice(text)
|
||||
),
|
||||
|
||||
TreeChild::OkNonExpr(result) => write!(
|
||||
f,
|
||||
"{} {}",
|
||||
Color::Cyan.normal().paint("returns"),
|
||||
Color::White
|
||||
.bold()
|
||||
.on(Color::Green)
|
||||
.paint(result.to_string())
|
||||
),
|
||||
|
||||
TreeChild::ErrorLeaf(desc) => {
|
||||
let last = desc.len() - 1;
|
||||
|
||||
for (i, item) in desc.iter().enumerate() {
|
||||
write!(f, "{}", Color::White.bold().on(Color::Red).paint(*item))?;
|
||||
|
||||
if i != last {
|
||||
write!(f, "{}", Color::White.normal().paint(", "))?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
TreeChild::ErrorFrame(frame, _) | TreeChild::OkFrame(frame, _) => {
|
||||
frame.leaf_description(f)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TreeItem for TreeChild {
|
||||
type Child = TreeChild;
|
||||
|
||||
fn write_self<W: io::Write>(&self, f: &mut W, _style: &Style) -> io::Result<()> {
|
||||
self.leaf_description(f)
|
||||
}
|
||||
|
||||
fn children(&self) -> Cow<[Self::Child]> {
|
||||
match self {
|
||||
TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) | TreeChild::ErrorLeaf(..) => {
|
||||
Cow::Borrowed(&[])
|
||||
}
|
||||
TreeChild::OkFrame(frame, text) | TreeChild::ErrorFrame(frame, text) => {
|
||||
Cow::Owned(frame.children_for_formatting(text))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ExpandTracer {
|
||||
frame_stack: Vec<ExprFrame>,
|
||||
source: Text,
|
||||
}
|
||||
|
||||
impl ExpandTracer {
|
||||
pub fn print(&self, source: Text) -> PrintTracer {
|
||||
let root = self.frame_stack[0].to_tree_frame(&source);
|
||||
|
||||
PrintTracer { root, source }
|
||||
}
|
||||
|
||||
pub fn new(source: Text) -> ExpandTracer {
|
||||
let root = ExprFrame {
|
||||
description: "Trace",
|
||||
children: vec![],
|
||||
error: None,
|
||||
};
|
||||
|
||||
ExpandTracer {
|
||||
frame_stack: vec![root],
|
||||
source,
|
||||
}
|
||||
}
|
||||
|
||||
fn current_frame(&mut self) -> &mut ExprFrame {
|
||||
let frames = &mut self.frame_stack;
|
||||
let last = frames.len() - 1;
|
||||
&mut frames[last]
|
||||
}
|
||||
|
||||
fn pop_frame(&mut self) -> ExprFrame {
|
||||
let result = self.frame_stack.pop().expect("Can't pop root tracer frame");
|
||||
|
||||
if self.frame_stack.is_empty() {
|
||||
panic!("Can't pop root tracer frame");
|
||||
}
|
||||
|
||||
self.debug();
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
pub fn start(&mut self, description: &'static str) {
|
||||
let frame = ExprFrame {
|
||||
description,
|
||||
children: vec![],
|
||||
error: None,
|
||||
};
|
||||
|
||||
self.frame_stack.push(frame);
|
||||
self.debug();
|
||||
}
|
||||
|
||||
pub fn add_expr(&mut self, shape: Expression) {
|
||||
self.current_frame().add_expr(shape);
|
||||
}
|
||||
|
||||
pub fn add_result(&mut self, result: impl PrettyDebugWithSource) {
|
||||
let source = self.source.clone();
|
||||
self.current_frame().add_result(result.debuggable(source));
|
||||
}
|
||||
|
||||
pub fn success(&mut self) {
|
||||
trace!(target: "parser::expand_syntax", "success {:#?}", self);
|
||||
|
||||
let current = self.pop_frame();
|
||||
self.current_frame()
|
||||
.children
|
||||
.push(FrameChild::Frame(current));
|
||||
}
|
||||
|
||||
pub fn failed(&mut self, error: &ParseError) {
|
||||
let mut current = self.pop_frame();
|
||||
current.error = Some(error.clone());
|
||||
self.current_frame()
|
||||
.children
|
||||
.push(FrameChild::Frame(current));
|
||||
}
|
||||
|
||||
fn debug(&self) {
|
||||
trace!(target: "nu::parser::expand",
|
||||
"frames = {:?}",
|
||||
self.frame_stack
|
||||
.iter()
|
||||
.map(|f| f.description)
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
|
||||
trace!(target: "nu::parser::expand", "{:#?}", self);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PrintTracer {
|
||||
root: TreeFrame,
|
||||
source: Text,
|
||||
}
|
||||
|
||||
impl TreeItem for PrintTracer {
|
||||
type Child = TreeChild;
|
||||
|
||||
fn write_self<W: io::Write>(&self, f: &mut W, style: &Style) -> io::Result<()> {
|
||||
write!(f, "{}", style.paint("Expansion Trace"))
|
||||
}
|
||||
|
||||
fn children(&self) -> Cow<[Self::Child]> {
|
||||
Cow::Borrowed(&self.root.children)
|
||||
}
|
||||
}
|
16
crates/nu-parser/src/hir/tokens_iterator/tests.rs
Normal file
16
crates/nu-parser/src/hir/tokens_iterator/tests.rs
Normal file
@ -0,0 +1,16 @@
|
||||
use crate::hir::TokensIterator;
|
||||
use crate::parse::token_tree_builder::TokenTreeBuilder as b;
|
||||
use crate::Span;
|
||||
|
||||
#[test]
|
||||
fn supplies_tokens() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let tokens = b::token_list(vec![b::var("it"), b::op("."), b::bare("cpu")]);
|
||||
let (tokens, _) = b::build(tokens);
|
||||
|
||||
let tokens = tokens.expect_list();
|
||||
let mut iterator = TokensIterator::all(tokens, Span::unknown());
|
||||
|
||||
iterator.next()?.expect_var();
|
||||
iterator.next()?.expect_dot();
|
||||
iterator.next()?.expect_bare();
|
||||
}
|
43
crates/nu-parser/src/lib.rs
Normal file
43
crates/nu-parser/src/lib.rs
Normal file
@ -0,0 +1,43 @@
|
||||
#![allow(clippy::large_enum_variant, clippy::type_complexity)]
|
||||
|
||||
pub mod commands;
|
||||
pub mod hir;
|
||||
pub mod parse;
|
||||
pub mod parse_command;
|
||||
|
||||
pub use crate::commands::classified::{
|
||||
external::ExternalCommand, internal::InternalCommand, ClassifiedCommand, ClassifiedPipeline,
|
||||
};
|
||||
pub use crate::hir::syntax_shape::flat_shape::FlatShape;
|
||||
pub use crate::hir::syntax_shape::{
|
||||
expand_syntax, ExpandContext, ExpandSyntax, PipelineShape, SignatureRegistry,
|
||||
};
|
||||
pub use crate::hir::tokens_iterator::TokensIterator;
|
||||
pub use crate::parse::files::Files;
|
||||
pub use crate::parse::flag::Flag;
|
||||
pub use crate::parse::operator::{CompareOperator, EvaluationOperator};
|
||||
pub use crate::parse::parser::Number;
|
||||
pub use crate::parse::parser::{module, pipeline};
|
||||
pub use crate::parse::token_tree::{Delimiter, TokenNode};
|
||||
pub use crate::parse::token_tree_builder::TokenTreeBuilder;
|
||||
|
||||
use nu_errors::ShellError;
|
||||
use nu_source::nom_input;
|
||||
|
||||
pub fn parse(input: &str) -> Result<TokenNode, ShellError> {
|
||||
let _ = pretty_env_logger::try_init();
|
||||
|
||||
match pipeline(nom_input(input)) {
|
||||
Ok((_rest, val)) => Ok(val),
|
||||
Err(err) => Err(ShellError::parse_error(err)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_script(input: &str) -> Result<TokenNode, ShellError> {
|
||||
let _ = pretty_env_logger::try_init();
|
||||
|
||||
match module(nom_input(input)) {
|
||||
Ok((_rest, val)) => Ok(val),
|
||||
Err(err) => Err(ShellError::parse_error(err)),
|
||||
}
|
||||
}
|
@ -1,10 +1,10 @@
|
||||
pub(crate) mod call_node;
|
||||
pub(crate) mod comment;
|
||||
pub(crate) mod files;
|
||||
pub(crate) mod flag;
|
||||
pub(crate) mod operator;
|
||||
pub(crate) mod parser;
|
||||
pub(crate) mod pipeline;
|
||||
pub(crate) mod text;
|
||||
pub(crate) mod token_tree;
|
||||
pub(crate) mod token_tree_builder;
|
||||
pub(crate) mod tokens;
|
45
crates/nu-parser/src/parse/call_node.rs
Normal file
45
crates/nu-parser/src/parse/call_node.rs
Normal file
@ -0,0 +1,45 @@
|
||||
use crate::TokenNode;
|
||||
use getset::Getters;
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource};
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)]
|
||||
pub struct CallNode {
|
||||
#[get = "pub(crate)"]
|
||||
head: Box<TokenNode>,
|
||||
#[get = "pub(crate)"]
|
||||
children: Option<Vec<TokenNode>>,
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for CallNode {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::typed(
|
||||
"call",
|
||||
self.head.pretty_debug(source)
|
||||
+ b::preceded(
|
||||
b::space(),
|
||||
b::intersperse(
|
||||
self.children.iter().flat_map(|children| {
|
||||
children.iter().map(|child| child.pretty_debug(source))
|
||||
}),
|
||||
b::space(),
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl CallNode {
|
||||
pub fn new(head: Box<TokenNode>, children: Vec<TokenNode>) -> CallNode {
|
||||
if children.is_empty() {
|
||||
CallNode {
|
||||
head,
|
||||
children: None,
|
||||
}
|
||||
} else {
|
||||
CallNode {
|
||||
head,
|
||||
children: Some(children),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
42
crates/nu-parser/src/parse/comment.rs
Normal file
42
crates/nu-parser/src/parse/comment.rs
Normal file
@ -0,0 +1,42 @@
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||
pub enum CommentKind {
|
||||
Line,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Getters, new)]
|
||||
pub struct Comment {
|
||||
pub(crate) kind: CommentKind,
|
||||
pub(crate) text: Span,
|
||||
pub(crate) span: Span,
|
||||
}
|
||||
|
||||
impl Comment {
|
||||
pub fn line(text: impl Into<Span>, outer: impl Into<Span>) -> Comment {
|
||||
Comment {
|
||||
kind: CommentKind::Line,
|
||||
text: text.into(),
|
||||
span: outer.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Comment {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
let prefix = match self.kind {
|
||||
CommentKind::Line => b::description("#"),
|
||||
};
|
||||
|
||||
prefix + b::description(self.text.slice(source))
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for Comment {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
151
crates/nu-parser/src/parse/files.rs
Normal file
151
crates/nu-parser/src/parse/files.rs
Normal file
@ -0,0 +1,151 @@
|
||||
use derive_new::new;
|
||||
use language_reporting::{FileName, Location};
|
||||
use log::trace;
|
||||
use nu_source::Span;
|
||||
|
||||
#[derive(new, Debug, Clone)]
|
||||
pub struct Files {
|
||||
snippet: String,
|
||||
}
|
||||
|
||||
impl language_reporting::ReportingFiles for Files {
|
||||
type Span = Span;
|
||||
type FileId = usize;
|
||||
|
||||
fn byte_span(
|
||||
&self,
|
||||
_file: Self::FileId,
|
||||
from_index: usize,
|
||||
to_index: usize,
|
||||
) -> Option<Self::Span> {
|
||||
Some(Span::new(from_index, to_index))
|
||||
}
|
||||
|
||||
fn file_id(&self, _tag: Self::Span) -> Self::FileId {
|
||||
0
|
||||
}
|
||||
|
||||
fn file_name(&self, _file: Self::FileId) -> FileName {
|
||||
FileName::Verbatim("shell".to_string())
|
||||
}
|
||||
|
||||
fn byte_index(&self, _file: Self::FileId, _line: usize, _column: usize) -> Option<usize> {
|
||||
unimplemented!("byte_index")
|
||||
}
|
||||
|
||||
fn location(&self, _file: Self::FileId, byte_index: usize) -> Option<Location> {
|
||||
trace!("finding location for {}", byte_index);
|
||||
|
||||
let source = &self.snippet;
|
||||
let mut seen_lines = 0;
|
||||
let mut seen_bytes = 0;
|
||||
|
||||
for (pos, slice) in source.match_indices('\n') {
|
||||
trace!(
|
||||
"searching byte_index={} seen_bytes={} pos={} slice={:?} slice.len={} source={:?}",
|
||||
byte_index,
|
||||
seen_bytes,
|
||||
pos,
|
||||
slice,
|
||||
source.len(),
|
||||
source
|
||||
);
|
||||
|
||||
if pos >= byte_index {
|
||||
trace!(
|
||||
"returning {}:{} seen_lines={} byte_index={} pos={} seen_bytes={}",
|
||||
seen_lines,
|
||||
byte_index,
|
||||
pos,
|
||||
seen_lines,
|
||||
byte_index,
|
||||
seen_bytes
|
||||
);
|
||||
|
||||
return Some(language_reporting::Location::new(
|
||||
seen_lines,
|
||||
byte_index - pos,
|
||||
));
|
||||
} else {
|
||||
seen_lines += 1;
|
||||
seen_bytes = pos;
|
||||
}
|
||||
}
|
||||
|
||||
if seen_lines == 0 {
|
||||
trace!("seen_lines=0 end={}", source.len() - 1);
|
||||
|
||||
// if we got here, there were no newlines in the source
|
||||
Some(language_reporting::Location::new(0, source.len() - 1))
|
||||
} else {
|
||||
trace!(
|
||||
"last line seen_lines={} end={}",
|
||||
seen_lines,
|
||||
source.len() - 1 - byte_index
|
||||
);
|
||||
|
||||
// if we got here and we didn't return, it should mean that we're talking about
|
||||
// the last line
|
||||
Some(language_reporting::Location::new(
|
||||
seen_lines,
|
||||
source.len() - 1 - byte_index,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
fn line_span(&self, _file: Self::FileId, lineno: usize) -> Option<Self::Span> {
|
||||
trace!("finding line_span for {}", lineno);
|
||||
|
||||
let source = &self.snippet;
|
||||
let mut seen_lines = 0;
|
||||
let mut seen_bytes = 0;
|
||||
|
||||
for (pos, _) in source.match_indices('\n') {
|
||||
trace!(
|
||||
"lineno={} seen_lines={} seen_bytes={} pos={}",
|
||||
lineno,
|
||||
seen_lines,
|
||||
seen_bytes,
|
||||
pos
|
||||
);
|
||||
|
||||
if seen_lines == lineno {
|
||||
trace!("returning start={} end={}", seen_bytes, pos);
|
||||
// If the number of seen lines is the lineno, seen_bytes is the start of the
|
||||
// line and pos is the end of the line
|
||||
return Some(Span::new(seen_bytes, pos));
|
||||
} else {
|
||||
// If it's not, increment seen_lines, and move seen_bytes to the beginning of
|
||||
// the next line
|
||||
seen_lines += 1;
|
||||
seen_bytes = pos + 1;
|
||||
}
|
||||
}
|
||||
|
||||
if seen_lines == 0 {
|
||||
trace!("returning start={} end={}", 0, self.snippet.len() - 1);
|
||||
|
||||
// if we got here, there were no newlines in the source
|
||||
Some(Span::new(0, self.snippet.len() - 1))
|
||||
} else {
|
||||
trace!(
|
||||
"returning start={} end={}",
|
||||
seen_bytes,
|
||||
self.snippet.len() - 1
|
||||
);
|
||||
|
||||
// if we got here and we didn't return, it should mean that we're talking about
|
||||
// the last line
|
||||
Some(Span::new(seen_bytes, self.snippet.len() - 1))
|
||||
}
|
||||
}
|
||||
|
||||
fn source(&self, span: Self::Span) -> Option<String> {
|
||||
trace!("source(tag={:?}) snippet={:?}", span, self.snippet);
|
||||
|
||||
if span.start() > span.end() || span.end() > self.snippet.len() {
|
||||
return None;
|
||||
}
|
||||
Some(span.slice(&self.snippet).to_string())
|
||||
}
|
||||
}
|
39
crates/nu-parser/src/parse/flag.rs
Normal file
39
crates/nu-parser/src/parse/flag.rs
Normal file
@ -0,0 +1,39 @@
|
||||
use crate::hir::syntax_shape::flat_shape::FlatShape;
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||
pub enum FlagKind {
|
||||
Shorthand,
|
||||
Longhand,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Getters, new)]
|
||||
#[get = "pub(crate)"]
|
||||
pub struct Flag {
|
||||
pub(crate) kind: FlagKind,
|
||||
pub(crate) name: Span,
|
||||
pub(crate) span: Span,
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Flag {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
let prefix = match self.kind {
|
||||
FlagKind::Longhand => b::description("--"),
|
||||
FlagKind::Shorthand => b::description("-"),
|
||||
};
|
||||
|
||||
prefix + b::description(self.name.slice(source))
|
||||
}
|
||||
}
|
||||
|
||||
impl Flag {
|
||||
pub fn color(&self) -> Spanned<FlatShape> {
|
||||
match self.kind {
|
||||
FlagKind::Longhand => FlatShape::Flag.spanned(self.span),
|
||||
FlagKind::Shorthand => FlatShape::ShorthandFlag.spanned(self.span),
|
||||
}
|
||||
}
|
||||
}
|
114
crates/nu-parser/src/parse/operator.rs
Normal file
114
crates/nu-parser/src/parse/operator.rs
Normal file
@ -0,0 +1,114 @@
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||
pub enum CompareOperator {
|
||||
Equal,
|
||||
NotEqual,
|
||||
LessThan,
|
||||
GreaterThan,
|
||||
LessThanOrEqual,
|
||||
GreaterThanOrEqual,
|
||||
Contains,
|
||||
NotContains,
|
||||
}
|
||||
|
||||
impl PrettyDebug for CompareOperator {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
b::operator(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl CompareOperator {
|
||||
pub fn print(self) -> String {
|
||||
self.as_str().to_string()
|
||||
}
|
||||
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
CompareOperator::Equal => "==",
|
||||
CompareOperator::NotEqual => "!=",
|
||||
CompareOperator::LessThan => "<",
|
||||
CompareOperator::GreaterThan => ">",
|
||||
CompareOperator::LessThanOrEqual => "<=",
|
||||
CompareOperator::GreaterThanOrEqual => ">=",
|
||||
CompareOperator::Contains => "=~",
|
||||
CompareOperator::NotContains => "!~",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for CompareOperator {
|
||||
fn from(input: &str) -> CompareOperator {
|
||||
if let Ok(output) = CompareOperator::from_str(input) {
|
||||
output
|
||||
} else {
|
||||
unreachable!("Internal error: CompareOperator from failed")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for CompareOperator {
|
||||
type Err = ();
|
||||
fn from_str(input: &str) -> Result<Self, <Self as std::str::FromStr>::Err> {
|
||||
match input {
|
||||
"==" => Ok(CompareOperator::Equal),
|
||||
"!=" => Ok(CompareOperator::NotEqual),
|
||||
"<" => Ok(CompareOperator::LessThan),
|
||||
">" => Ok(CompareOperator::GreaterThan),
|
||||
"<=" => Ok(CompareOperator::LessThanOrEqual),
|
||||
">=" => Ok(CompareOperator::GreaterThanOrEqual),
|
||||
"=~" => Ok(CompareOperator::Contains),
|
||||
"!~" => Ok(CompareOperator::NotContains),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||
pub enum EvaluationOperator {
|
||||
Dot,
|
||||
DotDot,
|
||||
}
|
||||
|
||||
impl PrettyDebug for EvaluationOperator {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
b::operator(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl EvaluationOperator {
|
||||
pub fn print(self) -> String {
|
||||
self.as_str().to_string()
|
||||
}
|
||||
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
EvaluationOperator::Dot => ".",
|
||||
EvaluationOperator::DotDot => "..",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for EvaluationOperator {
|
||||
fn from(input: &str) -> EvaluationOperator {
|
||||
if let Ok(output) = EvaluationOperator::from_str(input) {
|
||||
output
|
||||
} else {
|
||||
unreachable!("Internal error: EvaluationOperator 'from' failed")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for EvaluationOperator {
|
||||
type Err = ();
|
||||
fn from_str(input: &str) -> Result<Self, <Self as std::str::FromStr>::Err> {
|
||||
match input {
|
||||
"." => Ok(EvaluationOperator::Dot),
|
||||
".." => Ok(EvaluationOperator::DotDot),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
1720
crates/nu-parser/src/parse/parser.rs
Normal file
1720
crates/nu-parser/src/parse/parser.rs
Normal file
File diff suppressed because it is too large
Load Diff
75
crates/nu-parser/src/parse/pipeline.rs
Normal file
75
crates/nu-parser/src/parse/pipeline.rs
Normal file
@ -0,0 +1,75 @@
|
||||
use crate::TokenNode;
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Getters, new)]
|
||||
pub struct Pipeline {
|
||||
#[get = "pub"]
|
||||
pub(crate) parts: Vec<PipelineElement>,
|
||||
pub(crate) span: Span,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
||||
pub struct Tokens {
|
||||
pub(crate) tokens: Vec<TokenNode>,
|
||||
pub(crate) span: Span,
|
||||
}
|
||||
|
||||
impl Tokens {
|
||||
pub fn iter(&self) -> impl Iterator<Item = &TokenNode> {
|
||||
self.tokens.iter()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)]
|
||||
pub struct PipelineElement {
|
||||
pub pipe: Option<Span>,
|
||||
pub tokens: Tokens,
|
||||
}
|
||||
|
||||
impl HasSpan for PipelineElement {
|
||||
fn span(&self) -> Span {
|
||||
match self.pipe {
|
||||
Option::None => self.tokens.span,
|
||||
Option::Some(pipe) => pipe.until(self.tokens.span),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PipelineElement {
|
||||
pub fn new(pipe: Option<Span>, tokens: Spanned<Vec<TokenNode>>) -> PipelineElement {
|
||||
PipelineElement {
|
||||
pipe,
|
||||
tokens: Tokens {
|
||||
tokens: tokens.item,
|
||||
span: tokens.span,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tokens(&self) -> &[TokenNode] {
|
||||
&self.tokens.tokens
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Pipeline {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::intersperse(
|
||||
self.parts.iter().map(|token| token.pretty_debug(source)),
|
||||
b::operator(" | "),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for PipelineElement {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::intersperse(
|
||||
self.tokens.iter().map(|token| match token {
|
||||
TokenNode::Whitespace(_) => b::blank(),
|
||||
token => token.pretty_debug(source),
|
||||
}),
|
||||
b::space(),
|
||||
)
|
||||
}
|
||||
}
|
447
crates/nu-parser/src/parse/token_tree.rs
Normal file
447
crates/nu-parser/src/parse/token_tree.rs
Normal file
@ -0,0 +1,447 @@
|
||||
use crate::parse::{call_node::*, comment::*, flag::*, operator::*, pipeline::*, tokens::*};
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use nu_errors::{ParseError, ShellError};
|
||||
use nu_protocol::ShellTypeName;
|
||||
use nu_source::{
|
||||
b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem, Tagged,
|
||||
TaggedItem, Text,
|
||||
};
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub enum TokenNode {
|
||||
Token(Token),
|
||||
|
||||
Call(Spanned<CallNode>),
|
||||
Nodes(Spanned<Vec<TokenNode>>),
|
||||
Delimited(Spanned<DelimitedNode>),
|
||||
Pipeline(Pipeline),
|
||||
Flag(Flag),
|
||||
Comment(Comment),
|
||||
Whitespace(Span),
|
||||
Separator(Span),
|
||||
|
||||
Error(Spanned<ShellError>),
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for TokenNode {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self {
|
||||
TokenNode::Token(token) => token.pretty_debug(source),
|
||||
TokenNode::Call(call) => call.pretty_debug(source),
|
||||
TokenNode::Nodes(nodes) => b::intersperse(
|
||||
nodes.iter().map(|node| node.pretty_debug(source)),
|
||||
b::space(),
|
||||
),
|
||||
TokenNode::Delimited(delimited) => delimited.pretty_debug(source),
|
||||
TokenNode::Pipeline(pipeline) => pipeline.pretty_debug(source),
|
||||
TokenNode::Flag(flag) => flag.pretty_debug(source),
|
||||
TokenNode::Whitespace(space) => b::typed(
|
||||
"whitespace",
|
||||
b::description(format!("{:?}", space.slice(source))),
|
||||
),
|
||||
TokenNode::Separator(span) => b::typed(
|
||||
"separator",
|
||||
b::description(format!("{:?}", span.slice(source))),
|
||||
),
|
||||
TokenNode::Comment(comment) => {
|
||||
b::typed("comment", b::description(comment.text.slice(source)))
|
||||
}
|
||||
TokenNode::Error(_) => b::error("error"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ShellTypeName for TokenNode {
|
||||
fn type_name(&self) -> &'static str {
|
||||
match self {
|
||||
TokenNode::Token(t) => t.type_name(),
|
||||
TokenNode::Nodes(_) => "nodes",
|
||||
TokenNode::Call(_) => "command",
|
||||
TokenNode::Delimited(d) => d.type_name(),
|
||||
TokenNode::Pipeline(_) => "pipeline",
|
||||
TokenNode::Flag(_) => "flag",
|
||||
TokenNode::Whitespace(_) => "whitespace",
|
||||
TokenNode::Separator(_) => "separator",
|
||||
TokenNode::Comment(_) => "comment",
|
||||
TokenNode::Error(_) => "error",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DebugTokenNode<'a> {
|
||||
node: &'a TokenNode,
|
||||
source: &'a Text,
|
||||
}
|
||||
|
||||
impl fmt::Debug for DebugTokenNode<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self.node {
|
||||
TokenNode::Token(t) => write!(f, "{:?}", t.debug(self.source)),
|
||||
TokenNode::Call(s) => {
|
||||
write!(f, "(")?;
|
||||
|
||||
write!(f, "{}", s.head().debug(self.source))?;
|
||||
|
||||
if let Some(children) = s.children() {
|
||||
for child in children {
|
||||
write!(f, "{}", child.debug(self.source))?;
|
||||
}
|
||||
}
|
||||
|
||||
write!(f, ")")
|
||||
}
|
||||
|
||||
TokenNode::Delimited(d) => {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
match d.delimiter {
|
||||
Delimiter::Brace => "{",
|
||||
Delimiter::Paren => "(",
|
||||
Delimiter::Square => "[",
|
||||
}
|
||||
)?;
|
||||
|
||||
for child in d.children() {
|
||||
write!(f, "{:?}", child.old_debug(self.source))?;
|
||||
}
|
||||
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
match d.delimiter {
|
||||
Delimiter::Brace => "}",
|
||||
Delimiter::Paren => ")",
|
||||
Delimiter::Square => "]",
|
||||
}
|
||||
)
|
||||
}
|
||||
TokenNode::Pipeline(pipeline) => write!(f, "{}", pipeline.debug(self.source)),
|
||||
TokenNode::Error(_) => write!(f, "<error>"),
|
||||
rest => write!(f, "{}", rest.span().slice(self.source)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&TokenNode> for Span {
|
||||
fn from(token: &TokenNode) -> Span {
|
||||
token.span()
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for TokenNode {
|
||||
fn span(&self) -> Span {
|
||||
match self {
|
||||
TokenNode::Token(t) => t.span,
|
||||
TokenNode::Nodes(t) => t.span,
|
||||
TokenNode::Call(s) => s.span,
|
||||
TokenNode::Delimited(s) => s.span,
|
||||
TokenNode::Pipeline(s) => s.span,
|
||||
TokenNode::Flag(s) => s.span,
|
||||
TokenNode::Whitespace(s) => *s,
|
||||
TokenNode::Separator(s) => *s,
|
||||
TokenNode::Comment(c) => c.span(),
|
||||
TokenNode::Error(s) => s.span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenNode {
|
||||
pub fn tagged_type_name(&self) -> Tagged<&'static str> {
|
||||
self.type_name().tagged(self.span())
|
||||
}
|
||||
|
||||
pub fn old_debug<'a>(&'a self, source: &'a Text) -> DebugTokenNode<'a> {
|
||||
DebugTokenNode { node: self, source }
|
||||
}
|
||||
|
||||
pub fn as_external_arg(&self, source: &Text) -> String {
|
||||
self.span().slice(source).to_string()
|
||||
}
|
||||
|
||||
pub fn source<'a>(&self, source: &'a Text) -> &'a str {
|
||||
self.span().slice(source)
|
||||
}
|
||||
|
||||
pub fn get_variable(&self) -> Result<(Span, Span), ShellError> {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::Variable(inner_span),
|
||||
span: outer_span,
|
||||
}) => Ok((*outer_span, *inner_span)),
|
||||
_ => Err(ShellError::type_error(
|
||||
"variable",
|
||||
self.type_name().spanned(self.span()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_bare(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::Bare,
|
||||
..
|
||||
}) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_string(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::String(_),
|
||||
..
|
||||
}) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_number(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::Number(_),
|
||||
..
|
||||
}) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_string(&self) -> Option<(Span, Span)> {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::String(inner_span),
|
||||
span: outer_span,
|
||||
}) => Some((*outer_span, *inner_span)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_pattern(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::GlobPattern,
|
||||
..
|
||||
}) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_word(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::Bare,
|
||||
..
|
||||
}) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_int(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::Number(RawNumber::Int(_)),
|
||||
..
|
||||
}) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_dot(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot),
|
||||
..
|
||||
}) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_block(&self) -> Option<(Spanned<&[TokenNode]>, (Span, Span))> {
|
||||
match self {
|
||||
TokenNode::Delimited(Spanned {
|
||||
item:
|
||||
DelimitedNode {
|
||||
delimiter,
|
||||
children,
|
||||
spans,
|
||||
},
|
||||
span,
|
||||
}) if *delimiter == Delimiter::Brace => Some(((&children[..]).spanned(*span), *spans)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_external(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::ExternalCommand(..),
|
||||
..
|
||||
}) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option<Flag> {
|
||||
match self {
|
||||
TokenNode::Flag(flag @ Flag { .. }) if value == flag.name().slice(source) => {
|
||||
Some(*flag)
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_pipeline(&self) -> Result<Pipeline, ParseError> {
|
||||
match self {
|
||||
TokenNode::Pipeline(pipeline) => Ok(pipeline.clone()),
|
||||
other => Err(ParseError::mismatch(
|
||||
"pipeline",
|
||||
other.type_name().spanned(other.span()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_whitespace(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Whitespace(_) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
||||
#[get = "pub(crate)"]
|
||||
pub struct DelimitedNode {
|
||||
pub(crate) delimiter: Delimiter,
|
||||
pub(crate) spans: (Span, Span),
|
||||
pub(crate) children: Vec<TokenNode>,
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for DelimitedNode {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::delimit(
|
||||
self.delimiter.open(),
|
||||
b::intersperse(
|
||||
self.children.iter().map(|child| child.pretty_debug(source)),
|
||||
b::space(),
|
||||
),
|
||||
self.delimiter.close(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl DelimitedNode {
|
||||
pub fn type_name(&self) -> &'static str {
|
||||
match self.delimiter {
|
||||
Delimiter::Brace => "braced expression",
|
||||
Delimiter::Paren => "parenthesized expression",
|
||||
Delimiter::Square => "array literal or index operator",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub enum Delimiter {
|
||||
Paren,
|
||||
Brace,
|
||||
Square,
|
||||
}
|
||||
|
||||
impl Delimiter {
|
||||
pub(crate) fn open(self) -> &'static str {
|
||||
match self {
|
||||
Delimiter::Paren => "(",
|
||||
Delimiter::Brace => "{",
|
||||
Delimiter::Square => "[",
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn close(self) -> &'static str {
|
||||
match self {
|
||||
Delimiter::Paren => ")",
|
||||
Delimiter::Brace => "}",
|
||||
Delimiter::Square => "]",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
||||
#[get = "pub(crate)"]
|
||||
pub struct PathNode {
|
||||
head: Box<TokenNode>,
|
||||
tail: Vec<TokenNode>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
impl TokenNode {
|
||||
pub fn expect_external(&self) -> Span {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::ExternalCommand(span),
|
||||
..
|
||||
}) => *span,
|
||||
other => panic!(
|
||||
"Only call expect_external if you checked is_external first, found {:?}",
|
||||
other
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_string(&self) -> (Span, Span) {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::String(inner_span),
|
||||
span: outer_span,
|
||||
}) => (*outer_span, *inner_span),
|
||||
other => panic!("Expected string, found {:?}", other),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_list(&self) -> Spanned<&[TokenNode]> {
|
||||
match self {
|
||||
TokenNode::Nodes(token_nodes) => token_nodes[..].spanned(token_nodes.span),
|
||||
other => panic!("Expected list, found {:?}", other),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_pattern(&self) -> Span {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::GlobPattern,
|
||||
span: outer_span,
|
||||
}) => *outer_span,
|
||||
other => panic!("Expected pattern, found {:?}", other),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_var(&self) -> (Span, Span) {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::Variable(inner_span),
|
||||
span: outer_span,
|
||||
}) => (*outer_span, *inner_span),
|
||||
other => panic!("Expected var, found {:?}", other),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_dot(&self) -> Span {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot),
|
||||
span,
|
||||
}) => *span,
|
||||
other => panic!("Expected dot, found {:?}", other),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_bare(&self) -> Span {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::Bare,
|
||||
span,
|
||||
}) => *span,
|
||||
other => panic!("Expected bare, found {:?}", other),
|
||||
}
|
||||
}
|
||||
}
|
473
crates/nu-parser/src/parse/token_tree_builder.rs
Normal file
473
crates/nu-parser/src/parse/token_tree_builder.rs
Normal file
@ -0,0 +1,473 @@
|
||||
use crate::parse::call_node::CallNode;
|
||||
use crate::parse::comment::Comment;
|
||||
use crate::parse::flag::{Flag, FlagKind};
|
||||
use crate::parse::operator::{CompareOperator, EvaluationOperator};
|
||||
use crate::parse::pipeline::{Pipeline, PipelineElement};
|
||||
use crate::parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
|
||||
use crate::parse::tokens::{RawNumber, UnspannedToken};
|
||||
use bigdecimal::BigDecimal;
|
||||
use nu_source::{Span, Spanned, SpannedItem};
|
||||
use num_bigint::BigInt;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct TokenTreeBuilder {
|
||||
pos: usize,
|
||||
output: String,
|
||||
}
|
||||
|
||||
impl TokenTreeBuilder {
|
||||
pub fn new() -> Self {
|
||||
Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub type CurriedToken = Box<dyn FnOnce(&mut TokenTreeBuilder) -> TokenNode + 'static>;
|
||||
pub type CurriedCall = Box<dyn FnOnce(&mut TokenTreeBuilder) -> Spanned<CallNode> + 'static>;
|
||||
|
||||
impl TokenTreeBuilder {
|
||||
pub fn build(block: impl FnOnce(&mut Self) -> TokenNode) -> (TokenNode, String) {
|
||||
let mut builder = TokenTreeBuilder::new();
|
||||
let node = block(&mut builder);
|
||||
(node, builder.output)
|
||||
}
|
||||
|
||||
fn build_spanned<T>(
|
||||
&mut self,
|
||||
callback: impl FnOnce(&mut TokenTreeBuilder) -> T,
|
||||
) -> Spanned<T> {
|
||||
let start = self.pos;
|
||||
let ret = callback(self);
|
||||
let end = self.pos;
|
||||
|
||||
ret.spanned(Span::new(start, end))
|
||||
}
|
||||
|
||||
pub fn pipeline(input: Vec<Vec<CurriedToken>>) -> CurriedToken {
|
||||
Box::new(move |b| {
|
||||
let start = b.pos;
|
||||
|
||||
let mut out: Vec<PipelineElement> = vec![];
|
||||
|
||||
let mut input = input.into_iter().peekable();
|
||||
let head = input
|
||||
.next()
|
||||
.expect("A pipeline must contain at least one element");
|
||||
|
||||
let pipe = None;
|
||||
let head = b.build_spanned(|b| head.into_iter().map(|node| node(b)).collect());
|
||||
|
||||
out.push(PipelineElement::new(pipe, head));
|
||||
|
||||
loop {
|
||||
match input.next() {
|
||||
None => break,
|
||||
Some(node) => {
|
||||
let pipe = Some(b.consume_span("|"));
|
||||
let node =
|
||||
b.build_spanned(|b| node.into_iter().map(|node| node(b)).collect());
|
||||
|
||||
out.push(PipelineElement::new(pipe, node));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let end = b.pos;
|
||||
|
||||
TokenTreeBuilder::spanned_pipeline(out, Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_pipeline(input: Vec<PipelineElement>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Pipeline(Pipeline::new(input, span.into()))
|
||||
}
|
||||
|
||||
pub fn token_list(input: Vec<CurriedToken>) -> CurriedToken {
|
||||
Box::new(move |b| {
|
||||
let start = b.pos;
|
||||
let tokens = input.into_iter().map(|i| i(b)).collect();
|
||||
let end = b.pos;
|
||||
|
||||
TokenTreeBuilder::spanned_token_list(tokens, Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_token_list(input: Vec<TokenNode>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Nodes(input.spanned(span.into()))
|
||||
}
|
||||
|
||||
pub fn op(input: impl Into<CompareOperator>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, end) = b.consume(input.as_str());
|
||||
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::spanned_cmp_op(input, Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_cmp_op(input: impl Into<CompareOperator>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::CompareOperator(input.into()).into_token(span))
|
||||
}
|
||||
|
||||
pub fn dot() -> CurriedToken {
|
||||
Box::new(move |b| {
|
||||
let (start, end) = b.consume(".");
|
||||
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::spanned_eval_op(".", Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn dotdot() -> CurriedToken {
|
||||
Box::new(move |b| {
|
||||
let (start, end) = b.consume("..");
|
||||
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::spanned_eval_op("..", Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_eval_op(
|
||||
input: impl Into<EvaluationOperator>,
|
||||
span: impl Into<Span>,
|
||||
) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::EvaluationOperator(input.into()).into_token(span))
|
||||
}
|
||||
|
||||
pub fn string(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, _) = b.consume("\"");
|
||||
let (inner_start, inner_end) = b.consume(&input);
|
||||
let (_, end) = b.consume("\"");
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::spanned_string(
|
||||
Span::new(inner_start, inner_end),
|
||||
Span::new(start, end),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_string(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::String(input.into()).into_token(span))
|
||||
}
|
||||
|
||||
pub fn bare(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, end) = b.consume(&input);
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::spanned_bare(Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_bare(span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::Bare.into_token(span))
|
||||
}
|
||||
|
||||
pub fn pattern(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, end) = b.consume(&input);
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::spanned_pattern(Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_pattern(input: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::GlobPattern.into_token(input))
|
||||
}
|
||||
|
||||
pub fn external_word(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, end) = b.consume(&input);
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::spanned_external_word(Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_external_word(input: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::ExternalWord.into_token(input))
|
||||
}
|
||||
|
||||
pub fn external_command(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (outer_start, _) = b.consume("^");
|
||||
let (inner_start, end) = b.consume(&input);
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::spanned_external_command(
|
||||
Span::new(inner_start, end),
|
||||
Span::new(outer_start, end),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_external_command(inner: impl Into<Span>, outer: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::ExternalCommand(inner.into()).into_token(outer))
|
||||
}
|
||||
|
||||
pub fn int(input: impl Into<BigInt>) -> CurriedToken {
|
||||
let int = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, end) = b.consume(&int.to_string());
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::spanned_number(
|
||||
RawNumber::Int(Span::new(start, end)),
|
||||
Span::new(start, end),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn decimal(input: impl Into<BigDecimal>) -> CurriedToken {
|
||||
let decimal = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, end) = b.consume(&decimal.to_string());
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::spanned_number(
|
||||
RawNumber::Decimal(Span::new(start, end)),
|
||||
Span::new(start, end),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_number(input: impl Into<RawNumber>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::Number(input.into()).into_token(span))
|
||||
}
|
||||
|
||||
pub fn var(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, _) = b.consume("$");
|
||||
let (inner_start, end) = b.consume(&input);
|
||||
|
||||
TokenTreeBuilder::spanned_var(Span::new(inner_start, end), Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_var(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::Variable(input.into()).into_token(span))
|
||||
}
|
||||
|
||||
pub fn flag(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, _) = b.consume("--");
|
||||
let (inner_start, end) = b.consume(&input);
|
||||
|
||||
TokenTreeBuilder::spanned_flag(Span::new(inner_start, end), Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_flag(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Flag(Flag::new(FlagKind::Longhand, input.into(), span.into()))
|
||||
}
|
||||
|
||||
pub fn shorthand(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, _) = b.consume("-");
|
||||
let (inner_start, end) = b.consume(&input);
|
||||
|
||||
TokenTreeBuilder::spanned_shorthand((inner_start, end), (start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_shorthand(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into(), span.into()))
|
||||
}
|
||||
|
||||
pub fn call(head: CurriedToken, input: Vec<CurriedToken>) -> CurriedCall {
|
||||
Box::new(move |b| {
|
||||
let start = b.pos;
|
||||
|
||||
let head_node = head(b);
|
||||
|
||||
let mut nodes = vec![head_node];
|
||||
for item in input {
|
||||
nodes.push(item(b));
|
||||
}
|
||||
|
||||
let end = b.pos;
|
||||
|
||||
TokenTreeBuilder::spanned_call(nodes, Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_call(input: Vec<TokenNode>, span: impl Into<Span>) -> Spanned<CallNode> {
|
||||
if input.is_empty() {
|
||||
panic!("BUG: spanned call (TODO)")
|
||||
}
|
||||
|
||||
let mut input = input.into_iter();
|
||||
|
||||
if let Some(head) = input.next() {
|
||||
let tail = input.collect();
|
||||
|
||||
CallNode::new(Box::new(head), tail).spanned(span.into())
|
||||
} else {
|
||||
unreachable!("Internal error: spanned_call failed")
|
||||
}
|
||||
}
|
||||
|
||||
fn consume_delimiter(
|
||||
&mut self,
|
||||
input: Vec<CurriedToken>,
|
||||
_open: &str,
|
||||
_close: &str,
|
||||
) -> (Span, Span, Span, Vec<TokenNode>) {
|
||||
let (start_open_paren, end_open_paren) = self.consume("(");
|
||||
let mut output = vec![];
|
||||
for item in input {
|
||||
output.push(item(self));
|
||||
}
|
||||
|
||||
let (start_close_paren, end_close_paren) = self.consume(")");
|
||||
|
||||
let open = Span::new(start_open_paren, end_open_paren);
|
||||
let close = Span::new(start_close_paren, end_close_paren);
|
||||
let whole = Span::new(start_open_paren, end_close_paren);
|
||||
|
||||
(open, close, whole, output)
|
||||
}
|
||||
|
||||
pub fn parens(input: Vec<CurriedToken>) -> CurriedToken {
|
||||
Box::new(move |b| {
|
||||
let (open, close, whole, output) = b.consume_delimiter(input, "(", ")");
|
||||
|
||||
TokenTreeBuilder::spanned_parens(output, (open, close), whole)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_parens(
|
||||
input: impl Into<Vec<TokenNode>>,
|
||||
spans: (Span, Span),
|
||||
span: impl Into<Span>,
|
||||
) -> TokenNode {
|
||||
TokenNode::Delimited(
|
||||
DelimitedNode::new(Delimiter::Paren, spans, input.into()).spanned(span.into()),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn square(input: Vec<CurriedToken>) -> CurriedToken {
|
||||
Box::new(move |b| {
|
||||
let (open, close, whole, tokens) = b.consume_delimiter(input, "[", "]");
|
||||
|
||||
TokenTreeBuilder::spanned_square(tokens, (open, close), whole)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_square(
|
||||
input: impl Into<Vec<TokenNode>>,
|
||||
spans: (Span, Span),
|
||||
span: impl Into<Span>,
|
||||
) -> TokenNode {
|
||||
TokenNode::Delimited(
|
||||
DelimitedNode::new(Delimiter::Square, spans, input.into()).spanned(span.into()),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn braced(input: Vec<CurriedToken>) -> CurriedToken {
|
||||
Box::new(move |b| {
|
||||
let (open, close, whole, tokens) = b.consume_delimiter(input, "{", "}");
|
||||
|
||||
TokenTreeBuilder::spanned_brace(tokens, (open, close), whole)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_brace(
|
||||
input: impl Into<Vec<TokenNode>>,
|
||||
spans: (Span, Span),
|
||||
span: impl Into<Span>,
|
||||
) -> TokenNode {
|
||||
TokenNode::Delimited(
|
||||
DelimitedNode::new(Delimiter::Brace, spans, input.into()).spanned(span.into()),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn sp() -> CurriedToken {
|
||||
Box::new(|b| {
|
||||
let (start, end) = b.consume(" ");
|
||||
TokenNode::Whitespace(Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn ws(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, end) = b.consume(&input);
|
||||
TokenTreeBuilder::spanned_ws(Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_ws(span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Whitespace(span.into())
|
||||
}
|
||||
|
||||
pub fn sep(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, end) = b.consume(&input);
|
||||
TokenTreeBuilder::spanned_sep(Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_sep(span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Separator(span.into())
|
||||
}
|
||||
|
||||
pub fn comment(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let outer_start = b.pos;
|
||||
b.consume("#");
|
||||
let (start, end) = b.consume(&input);
|
||||
let outer_end = b.pos;
|
||||
|
||||
TokenTreeBuilder::spanned_comment((start, end), (outer_start, outer_end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_comment(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Comment(Comment::line(input, span))
|
||||
}
|
||||
|
||||
fn consume(&mut self, input: &str) -> (usize, usize) {
|
||||
let start = self.pos;
|
||||
self.pos += input.len();
|
||||
self.output.push_str(input);
|
||||
(start, self.pos)
|
||||
}
|
||||
|
||||
fn consume_span(&mut self, input: &str) -> Span {
|
||||
let start = self.pos;
|
||||
self.pos += input.len();
|
||||
self.output.push_str(input);
|
||||
Span::new(start, self.pos)
|
||||
}
|
||||
}
|
231
crates/nu-parser/src/parse/tokens.rs
Normal file
231
crates/nu-parser/src/parse/tokens.rs
Normal file
@ -0,0 +1,231 @@
|
||||
use crate::parse::parser::Number;
|
||||
use crate::{CompareOperator, EvaluationOperator};
|
||||
use bigdecimal::BigDecimal;
|
||||
use nu_protocol::ShellTypeName;
|
||||
use nu_source::{
|
||||
b, DebugDocBuilder, HasSpan, PrettyDebug, PrettyDebugWithSource, Span, Spanned, SpannedItem,
|
||||
Text,
|
||||
};
|
||||
use num_bigint::BigInt;
|
||||
use std::fmt;
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub enum UnspannedToken {
|
||||
Number(RawNumber),
|
||||
CompareOperator(CompareOperator),
|
||||
EvaluationOperator(EvaluationOperator),
|
||||
String(Span),
|
||||
Variable(Span),
|
||||
ExternalCommand(Span),
|
||||
ExternalWord,
|
||||
GlobPattern,
|
||||
Bare,
|
||||
}
|
||||
|
||||
impl UnspannedToken {
|
||||
pub fn into_token(self, span: impl Into<Span>) -> Token {
|
||||
Token {
|
||||
unspanned: self,
|
||||
span: span.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ShellTypeName for UnspannedToken {
|
||||
fn type_name(&self) -> &'static str {
|
||||
match self {
|
||||
UnspannedToken::Number(_) => "number",
|
||||
UnspannedToken::CompareOperator(..) => "comparison operator",
|
||||
UnspannedToken::EvaluationOperator(EvaluationOperator::Dot) => "dot",
|
||||
UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot) => "dotdot",
|
||||
UnspannedToken::String(_) => "string",
|
||||
UnspannedToken::Variable(_) => "variable",
|
||||
UnspannedToken::ExternalCommand(_) => "syntax error",
|
||||
UnspannedToken::ExternalWord => "syntax error",
|
||||
UnspannedToken::GlobPattern => "glob pattern",
|
||||
UnspannedToken::Bare => "string",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub enum RawNumber {
|
||||
Int(Span),
|
||||
Decimal(Span),
|
||||
}
|
||||
|
||||
impl HasSpan for RawNumber {
|
||||
fn span(&self) -> Span {
|
||||
match self {
|
||||
RawNumber::Int(span) => *span,
|
||||
RawNumber::Decimal(span) => *span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for RawNumber {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self {
|
||||
RawNumber::Int(span) => b::primitive(span.slice(source)),
|
||||
RawNumber::Decimal(span) => b::primitive(span.slice(source)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RawNumber {
|
||||
pub fn int(span: impl Into<Span>) -> RawNumber {
|
||||
let span = span.into();
|
||||
|
||||
RawNumber::Int(span)
|
||||
}
|
||||
|
||||
pub fn decimal(span: impl Into<Span>) -> RawNumber {
|
||||
let span = span.into();
|
||||
|
||||
RawNumber::Decimal(span)
|
||||
}
|
||||
|
||||
pub(crate) fn to_number(self, source: &Text) -> Number {
|
||||
match self {
|
||||
RawNumber::Int(tag) => {
|
||||
if let Ok(int) = BigInt::from_str(tag.slice(source)) {
|
||||
Number::Int(int)
|
||||
} else {
|
||||
unreachable!("Internal error: to_number failed")
|
||||
}
|
||||
}
|
||||
RawNumber::Decimal(tag) => {
|
||||
if let Ok(decimal) = BigDecimal::from_str(tag.slice(source)) {
|
||||
Number::Decimal(decimal)
|
||||
} else {
|
||||
unreachable!("Internal error: to_number failed")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub struct Token {
|
||||
pub unspanned: UnspannedToken,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl std::ops::Deref for Token {
|
||||
type Target = UnspannedToken;
|
||||
|
||||
fn deref(&self) -> &UnspannedToken {
|
||||
&self.unspanned
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Token {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self.unspanned {
|
||||
UnspannedToken::Number(number) => number.pretty_debug(source),
|
||||
UnspannedToken::CompareOperator(operator) => operator.pretty(),
|
||||
UnspannedToken::EvaluationOperator(operator) => operator.pretty(),
|
||||
UnspannedToken::String(_) => b::primitive(self.span.slice(source)),
|
||||
UnspannedToken::Variable(_) => b::var(self.span.slice(source)),
|
||||
UnspannedToken::ExternalCommand(_) => b::primitive(self.span.slice(source)),
|
||||
UnspannedToken::ExternalWord => {
|
||||
b::typed("external", b::description(self.span.slice(source)))
|
||||
}
|
||||
UnspannedToken::GlobPattern => {
|
||||
b::typed("pattern", b::description(self.span.slice(source)))
|
||||
}
|
||||
UnspannedToken::Bare => b::primitive(self.span.slice(source)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Token {
|
||||
pub fn debug<'a>(&self, source: &'a Text) -> DebugToken<'a> {
|
||||
DebugToken {
|
||||
node: *self,
|
||||
source,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_number(&self) -> Option<RawNumber> {
|
||||
match self.unspanned {
|
||||
UnspannedToken::Number(number) => Some(number),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_int(&self) -> Option<(Span, Span)> {
|
||||
match self.unspanned {
|
||||
UnspannedToken::Number(RawNumber::Int(int)) => Some((int, self.span)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_decimal(&self) -> Option<(Span, Span)> {
|
||||
match self.unspanned {
|
||||
UnspannedToken::Number(RawNumber::Decimal(decimal)) => Some((decimal, self.span)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_operator(&self) -> Option<Spanned<CompareOperator>> {
|
||||
match self.unspanned {
|
||||
UnspannedToken::CompareOperator(operator) => Some(operator.spanned(self.span)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_string(&self) -> Option<(Span, Span)> {
|
||||
match self.unspanned {
|
||||
UnspannedToken::String(span) => Some((span, self.span)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_variable(&self) -> Option<(Span, Span)> {
|
||||
match self.unspanned {
|
||||
UnspannedToken::Variable(span) => Some((span, self.span)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_external_command(&self) -> Option<(Span, Span)> {
|
||||
match self.unspanned {
|
||||
UnspannedToken::ExternalCommand(span) => Some((span, self.span)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_external_word(&self) -> Option<Span> {
|
||||
match self.unspanned {
|
||||
UnspannedToken::ExternalWord => Some(self.span),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_glob_pattern(&self) -> Option<Span> {
|
||||
match self.unspanned {
|
||||
UnspannedToken::GlobPattern => Some(self.span),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_bare(&self) -> Option<Span> {
|
||||
match self.unspanned {
|
||||
UnspannedToken::Bare => Some(self.span),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DebugToken<'a> {
|
||||
node: Token,
|
||||
source: &'a Text,
|
||||
}
|
||||
|
||||
impl fmt::Debug for DebugToken<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}", self.node.span.slice(self.source))
|
||||
}
|
||||
}
|
127
crates/nu-parser/src/parse/unit.rs
Normal file
127
crates/nu-parser/src/parse/unit.rs
Normal file
@ -0,0 +1,127 @@
|
||||
use crate::parse::parser::Number;
|
||||
use nu_protocol::{Primitive, UntaggedValue};
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
||||
use num_traits::ToPrimitive;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||
pub enum Unit {
|
||||
// Filesize units
|
||||
Byte,
|
||||
Kilobyte,
|
||||
Megabyte,
|
||||
Gigabyte,
|
||||
Terabyte,
|
||||
Petabyte,
|
||||
|
||||
// Duration units
|
||||
Second,
|
||||
Minute,
|
||||
Hour,
|
||||
Day,
|
||||
Week,
|
||||
Month,
|
||||
Year,
|
||||
}
|
||||
|
||||
impl PrettyDebug for Unit {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
b::keyword(self.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_number_to_u64(number: &Number) -> u64 {
|
||||
match number {
|
||||
Number::Int(big_int) => {
|
||||
if let Some(x) = big_int.to_u64() {
|
||||
x
|
||||
} else {
|
||||
unreachable!("Internal error: convert_number_to_u64 given incompatible number")
|
||||
}
|
||||
}
|
||||
Number::Decimal(big_decimal) => {
|
||||
if let Some(x) = big_decimal.to_u64() {
|
||||
x
|
||||
} else {
|
||||
unreachable!("Internal error: convert_number_to_u64 given incompatible number")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Unit {
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Unit::Byte => "B",
|
||||
Unit::Kilobyte => "KB",
|
||||
Unit::Megabyte => "MB",
|
||||
Unit::Gigabyte => "GB",
|
||||
Unit::Terabyte => "TB",
|
||||
Unit::Petabyte => "PB",
|
||||
Unit::Second => "s",
|
||||
Unit::Minute => "m",
|
||||
Unit::Hour => "h",
|
||||
Unit::Day => "d",
|
||||
Unit::Week => "w",
|
||||
Unit::Month => "M",
|
||||
Unit::Year => "y",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn compute(self, size: &Number) -> UntaggedValue {
|
||||
let size = size.clone();
|
||||
|
||||
match self {
|
||||
Unit::Byte => number(size),
|
||||
Unit::Kilobyte => number(size * 1024),
|
||||
Unit::Megabyte => number(size * 1024 * 1024),
|
||||
Unit::Gigabyte => number(size * 1024 * 1024 * 1024),
|
||||
Unit::Terabyte => number(size * 1024 * 1024 * 1024 * 1024),
|
||||
Unit::Petabyte => number(size * 1024 * 1024 * 1024 * 1024 * 1024),
|
||||
Unit::Second => duration(convert_number_to_u64(&size)),
|
||||
Unit::Minute => duration(60 * convert_number_to_u64(&size)),
|
||||
Unit::Hour => duration(60 * 60 * convert_number_to_u64(&size)),
|
||||
Unit::Day => duration(24 * 60 * 60 * convert_number_to_u64(&size)),
|
||||
Unit::Week => duration(7 * 24 * 60 * 60 * convert_number_to_u64(&size)),
|
||||
Unit::Month => duration(30 * 24 * 60 * 60 * convert_number_to_u64(&size)),
|
||||
Unit::Year => duration(365 * 24 * 60 * 60 * convert_number_to_u64(&size)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn number(number: impl Into<Number>) -> UntaggedValue {
|
||||
let number = number.into();
|
||||
|
||||
match number {
|
||||
Number::Int(int) => UntaggedValue::Primitive(Primitive::Int(int)),
|
||||
Number::Decimal(decimal) => UntaggedValue::Primitive(Primitive::Decimal(decimal)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn duration(secs: u64) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Duration(secs))
|
||||
}
|
||||
|
||||
impl FromStr for Unit {
|
||||
type Err = ();
|
||||
fn from_str(input: &str) -> Result<Self, <Self as std::str::FromStr>::Err> {
|
||||
match input {
|
||||
"B" | "b" => Ok(Unit::Byte),
|
||||
"KB" | "kb" | "Kb" | "K" | "k" => Ok(Unit::Kilobyte),
|
||||
"MB" | "mb" | "Mb" => Ok(Unit::Megabyte),
|
||||
"GB" | "gb" | "Gb" => Ok(Unit::Gigabyte),
|
||||
"TB" | "tb" | "Tb" => Ok(Unit::Terabyte),
|
||||
"PB" | "pb" | "Pb" => Ok(Unit::Petabyte),
|
||||
"s" => Ok(Unit::Second),
|
||||
"m" => Ok(Unit::Minute),
|
||||
"h" => Ok(Unit::Hour),
|
||||
"d" => Ok(Unit::Day),
|
||||
"w" => Ok(Unit::Week),
|
||||
"M" => Ok(Unit::Month),
|
||||
"y" => Ok(Unit::Year),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
424
crates/nu-parser/src/parse_command.rs
Normal file
424
crates/nu-parser/src/parse_command.rs
Normal file
@ -0,0 +1,424 @@
|
||||
use crate::hir::syntax_shape::{
|
||||
color_fallible_syntax, color_syntax, expand_expr, flat_shape::FlatShape, spaced,
|
||||
BackoffColoringMode, ColorSyntax, MaybeSpaceShape,
|
||||
};
|
||||
use crate::TokensIterator;
|
||||
use crate::{
|
||||
hir::{self, ExpandContext, NamedArguments},
|
||||
Flag,
|
||||
};
|
||||
use log::trace;
|
||||
use nu_source::{PrettyDebugWithSource, Span, Spanned, SpannedItem, Text};
|
||||
|
||||
use nu_errors::{ArgumentError, ParseError};
|
||||
use nu_protocol::{NamedType, PositionalType, Signature};
|
||||
|
||||
pub fn parse_command_tail(
|
||||
config: &Signature,
|
||||
context: &ExpandContext,
|
||||
tail: &mut TokensIterator,
|
||||
command_span: Span,
|
||||
) -> Result<Option<(Option<Vec<hir::Expression>>, Option<NamedArguments>)>, ParseError> {
|
||||
let mut named = NamedArguments::new();
|
||||
trace_remaining("nodes", &tail, context.source());
|
||||
|
||||
for (name, kind) in &config.named {
|
||||
trace!(target: "nu::parse", "looking for {} : {:?}", name, kind);
|
||||
|
||||
match &kind.0 {
|
||||
NamedType::Switch => {
|
||||
let flag = extract_switch(name, tail, context.source());
|
||||
|
||||
named.insert_switch(name, flag);
|
||||
}
|
||||
NamedType::Mandatory(syntax_type) => {
|
||||
match extract_mandatory(config, name, tail, context.source(), command_span) {
|
||||
Err(err) => return Err(err), // produce a correct diagnostic
|
||||
Ok((pos, flag)) => {
|
||||
tail.move_to(pos);
|
||||
|
||||
if tail.at_end() {
|
||||
return Err(ParseError::argument_error(
|
||||
config.name.clone().spanned(flag.span),
|
||||
ArgumentError::MissingValueForName(name.to_string()),
|
||||
));
|
||||
}
|
||||
|
||||
let expr = expand_expr(&spaced(*syntax_type), tail, context)?;
|
||||
|
||||
tail.restart();
|
||||
named.insert_mandatory(name, expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
NamedType::Optional(syntax_type) => {
|
||||
match extract_optional(name, tail, context.source()) {
|
||||
Err(err) => return Err(err), // produce a correct diagnostic
|
||||
Ok(Some((pos, flag))) => {
|
||||
tail.move_to(pos);
|
||||
|
||||
if tail.at_end() {
|
||||
return Err(ParseError::argument_error(
|
||||
config.name.clone().spanned(flag.span),
|
||||
ArgumentError::MissingValueForName(name.to_string()),
|
||||
));
|
||||
}
|
||||
|
||||
let expr = expand_expr(&spaced(*syntax_type), tail, context);
|
||||
|
||||
match expr {
|
||||
Err(_) => named.insert_optional(name, None),
|
||||
Ok(expr) => named.insert_optional(name, Some(expr)),
|
||||
}
|
||||
|
||||
tail.restart();
|
||||
}
|
||||
|
||||
Ok(None) => {
|
||||
tail.restart();
|
||||
named.insert_optional(name, None);
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
trace_remaining("after named", &tail, context.source());
|
||||
|
||||
let mut positional = vec![];
|
||||
|
||||
for arg in &config.positional {
|
||||
trace!(target: "nu::parse", "Processing positional {:?}", arg);
|
||||
|
||||
match &arg.0 {
|
||||
PositionalType::Mandatory(..) => {
|
||||
if tail.at_end_possible_ws() {
|
||||
return Err(ParseError::argument_error(
|
||||
config.name.clone().spanned(command_span),
|
||||
ArgumentError::MissingMandatoryPositional(arg.0.name().to_string()),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
PositionalType::Optional(..) => {
|
||||
if tail.at_end_possible_ws() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let result = expand_expr(&spaced(arg.0.syntax_type()), tail, context)?;
|
||||
|
||||
positional.push(result);
|
||||
}
|
||||
|
||||
trace_remaining("after positional", &tail, context.source());
|
||||
|
||||
if let Some((syntax_type, _)) = config.rest_positional {
|
||||
let mut out = vec![];
|
||||
|
||||
loop {
|
||||
if tail.at_end_possible_ws() {
|
||||
break;
|
||||
}
|
||||
|
||||
let next = expand_expr(&spaced(syntax_type), tail, context)?;
|
||||
|
||||
out.push(next);
|
||||
}
|
||||
|
||||
positional.extend(out);
|
||||
}
|
||||
|
||||
trace_remaining("after rest", &tail, context.source());
|
||||
|
||||
trace!(target: "nu::parse", "Constructed positional={:?} named={:?}", positional, named);
|
||||
|
||||
let positional = if positional.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(positional)
|
||||
};
|
||||
|
||||
// TODO: Error if extra unconsumed positional arguments
|
||||
|
||||
let named = if named.named.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(named)
|
||||
};
|
||||
|
||||
trace!(target: "nu::parse", "Normalized positional={:?} named={:?}", positional, named);
|
||||
|
||||
Ok(Some((positional, named)))
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ColoringArgs {
|
||||
vec: Vec<Option<Vec<Spanned<FlatShape>>>>,
|
||||
}
|
||||
|
||||
impl ColoringArgs {
|
||||
fn new(len: usize) -> ColoringArgs {
|
||||
let vec = vec![None; len];
|
||||
ColoringArgs { vec }
|
||||
}
|
||||
|
||||
fn insert(&mut self, pos: usize, shapes: Vec<Spanned<FlatShape>>) {
|
||||
self.vec[pos] = Some(shapes);
|
||||
}
|
||||
|
||||
fn spread_shapes(self, shapes: &mut Vec<Spanned<FlatShape>>) {
|
||||
for item in self.vec {
|
||||
match item {
|
||||
None => {}
|
||||
Some(vec) => {
|
||||
shapes.extend(vec);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct CommandTailShape;
|
||||
|
||||
impl ColorSyntax for CommandTailShape {
|
||||
type Info = ();
|
||||
type Input = Signature;
|
||||
|
||||
fn name(&self) -> &'static str {
|
||||
"CommandTailShape"
|
||||
}
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
signature: &Signature,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Self::Info {
|
||||
use nu_protocol::SyntaxShape;
|
||||
|
||||
let mut args = ColoringArgs::new(token_nodes.len());
|
||||
trace_remaining("nodes", &token_nodes, context.source());
|
||||
|
||||
fn insert_flag(
|
||||
token_nodes: &mut TokensIterator,
|
||||
syntax_type: SyntaxShape,
|
||||
args: &mut ColoringArgs,
|
||||
flag: Flag,
|
||||
pos: usize,
|
||||
context: &ExpandContext,
|
||||
) {
|
||||
let (_, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| {
|
||||
token_nodes.color_shape(flag.color());
|
||||
token_nodes.move_to(pos);
|
||||
|
||||
if token_nodes.at_end() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// We still want to color the flag even if the following tokens don't match, so don't
|
||||
// propagate the error to the parent atomic block if it fails
|
||||
let _ = token_nodes.atomic(|token_nodes| {
|
||||
// We can live with unmatched syntax after a mandatory flag
|
||||
color_syntax(&MaybeSpaceShape, token_nodes, context);
|
||||
|
||||
// If the part after a mandatory flag isn't present, that's ok, but we
|
||||
// should roll back any whitespace we chomped
|
||||
color_fallible_syntax(&syntax_type, token_nodes, context)?;
|
||||
|
||||
Ok(())
|
||||
});
|
||||
|
||||
Ok(())
|
||||
});
|
||||
|
||||
args.insert(pos, shapes);
|
||||
token_nodes.restart();
|
||||
}
|
||||
|
||||
for (name, kind) in &signature.named {
|
||||
trace!(target: "nu::color_syntax", "looking for {} : {:?}", name, kind);
|
||||
|
||||
match &kind.0 {
|
||||
NamedType::Switch => {
|
||||
if let Some((pos, flag)) =
|
||||
token_nodes.extract(|t| t.as_flag(name, context.source()))
|
||||
{
|
||||
args.insert(pos, vec![flag.color()])
|
||||
}
|
||||
}
|
||||
NamedType::Mandatory(syntax_type) => {
|
||||
match extract_mandatory(
|
||||
signature,
|
||||
name,
|
||||
token_nodes,
|
||||
context.source(),
|
||||
Span::unknown(),
|
||||
) {
|
||||
Err(_) => {
|
||||
// The mandatory flag didn't exist at all, so there's nothing to color
|
||||
}
|
||||
Ok((pos, flag)) => {
|
||||
insert_flag(token_nodes, *syntax_type, &mut args, flag, pos, context)
|
||||
}
|
||||
}
|
||||
}
|
||||
NamedType::Optional(syntax_type) => {
|
||||
match extract_optional(name, token_nodes, context.source()) {
|
||||
Err(_) => {
|
||||
// The optional flag didn't exist at all, so there's nothing to color
|
||||
}
|
||||
Ok(Some((pos, flag))) => {
|
||||
insert_flag(token_nodes, *syntax_type, &mut args, flag, pos, context)
|
||||
}
|
||||
|
||||
Ok(None) => {
|
||||
token_nodes.restart();
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
trace_remaining("after named", &token_nodes, context.source());
|
||||
|
||||
for arg in &signature.positional {
|
||||
trace!("Processing positional {:?}", arg);
|
||||
|
||||
match &arg.0 {
|
||||
PositionalType::Mandatory(..) => {
|
||||
if token_nodes.at_end() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
PositionalType::Optional(..) => {
|
||||
if token_nodes.at_end() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let pos = token_nodes.pos(false);
|
||||
|
||||
match pos {
|
||||
None => break,
|
||||
Some(pos) => {
|
||||
// We can live with an unmatched positional argument. Hopefully it will be
|
||||
// matched by a future token
|
||||
let (_, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| {
|
||||
color_syntax(&MaybeSpaceShape, token_nodes, context);
|
||||
|
||||
// If no match, we should roll back any whitespace we chomped
|
||||
color_fallible_syntax(&arg.0.syntax_type(), token_nodes, context)?;
|
||||
|
||||
Ok(())
|
||||
});
|
||||
|
||||
args.insert(pos, shapes);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trace_remaining("after positional", &token_nodes, context.source());
|
||||
|
||||
if let Some((syntax_type, _)) = signature.rest_positional {
|
||||
loop {
|
||||
if token_nodes.at_end_possible_ws() {
|
||||
break;
|
||||
}
|
||||
|
||||
let pos = token_nodes.pos(false);
|
||||
|
||||
match pos {
|
||||
None => break,
|
||||
Some(pos) => {
|
||||
// If any arguments don't match, we'll fall back to backoff coloring mode
|
||||
let (result, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| {
|
||||
color_syntax(&MaybeSpaceShape, token_nodes, context);
|
||||
|
||||
// If no match, we should roll back any whitespace we chomped
|
||||
color_fallible_syntax(&syntax_type, token_nodes, context)?;
|
||||
|
||||
Ok(())
|
||||
});
|
||||
|
||||
args.insert(pos, shapes);
|
||||
|
||||
match result {
|
||||
Err(_) => break,
|
||||
Ok(_) => continue,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
token_nodes.silently_mutate_shapes(|shapes| args.spread_shapes(shapes));
|
||||
|
||||
// Consume any remaining tokens with backoff coloring mode
|
||||
color_syntax(&BackoffColoringMode, token_nodes, context);
|
||||
|
||||
// This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring
|
||||
// this solution.
|
||||
token_nodes.sort_shapes()
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_switch(name: &str, tokens: &mut hir::TokensIterator<'_>, source: &Text) -> Option<Flag> {
|
||||
tokens.extract(|t| t.as_flag(name, source)).map(|f| f.1)
|
||||
}
|
||||
|
||||
fn extract_mandatory(
|
||||
config: &Signature,
|
||||
name: &str,
|
||||
tokens: &mut hir::TokensIterator<'_>,
|
||||
source: &Text,
|
||||
span: Span,
|
||||
) -> Result<(usize, Flag), ParseError> {
|
||||
let flag = tokens.extract(|t| t.as_flag(name, source));
|
||||
|
||||
match flag {
|
||||
None => Err(ParseError::argument_error(
|
||||
config.name.clone().spanned(span),
|
||||
ArgumentError::MissingMandatoryFlag(name.to_string()),
|
||||
)),
|
||||
|
||||
Some((pos, flag)) => {
|
||||
tokens.remove(pos);
|
||||
Ok((pos, flag))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_optional(
|
||||
name: &str,
|
||||
tokens: &mut hir::TokensIterator<'_>,
|
||||
source: &Text,
|
||||
) -> Result<Option<(usize, Flag)>, ParseError> {
|
||||
let flag = tokens.extract(|t| t.as_flag(name, source));
|
||||
|
||||
match flag {
|
||||
None => Ok(None),
|
||||
Some((pos, flag)) => {
|
||||
tokens.remove(pos);
|
||||
Ok(Some((pos, flag)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn trace_remaining(desc: &'static str, tail: &hir::TokensIterator<'_>, source: &Text) {
|
||||
trace!(
|
||||
target: "nu::parse",
|
||||
"{} = {:?}",
|
||||
desc,
|
||||
itertools::join(
|
||||
tail.debug_remaining()
|
||||
.iter()
|
||||
.map(|i| format!("%{}%", i.debug(source))),
|
||||
" "
|
||||
)
|
||||
);
|
||||
}
|
24
crates/nu-plugin/Cargo.toml
Normal file
24
crates/nu-plugin/Cargo.toml
Normal file
@ -0,0 +1,24 @@
|
||||
[package]
|
||||
name = "nu-plugin"
|
||||
version = "0.8.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||
edition = "2018"
|
||||
description = "Nushell Plugin"
|
||||
license = "MIT"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.8.0" }
|
||||
nu-source = { path = "../nu-source", version = "0.8.0" }
|
||||
nu-errors = { path = "../nu-errors", version = "0.8.0" }
|
||||
nu-value-ext = { path = "../nu-value-ext", version = "0.8.0" }
|
||||
|
||||
indexmap = { version = "1.3.0", features = ["serde-1"] }
|
||||
serde = { version = "1.0.103", features = ["derive"] }
|
||||
num-bigint = { version = "0.2.3", features = ["serde"] }
|
||||
serde_json = "1.0.44"
|
||||
|
||||
[build-dependencies]
|
||||
nu-build = { version = "0.8.0", path = "../nu-build" }
|
3
crates/nu-plugin/build.rs
Normal file
3
crates/nu-plugin/build.rs
Normal file
@ -0,0 +1,3 @@
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
nu_build::build()
|
||||
}
|
4
crates/nu-plugin/src/lib.rs
Normal file
4
crates/nu-plugin/src/lib.rs
Normal file
@ -0,0 +1,4 @@
|
||||
mod plugin;
|
||||
pub mod test_helpers;
|
||||
|
||||
pub use crate::plugin::{serve_plugin, Plugin};
|
@ -1,6 +1,5 @@
|
||||
use crate::Signature;
|
||||
use crate::Tagged;
|
||||
use crate::{CallInfo, ReturnValue, ShellError, Value};
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{outln, CallInfo, ReturnValue, Signature, Value};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::io;
|
||||
|
||||
@ -11,7 +10,7 @@ pub trait Plugin {
|
||||
Ok(vec![])
|
||||
}
|
||||
|
||||
fn filter(&mut self, _input: Tagged<Value>) -> Result<Vec<ReturnValue>, ShellError> {
|
||||
fn filter(&mut self, _input: Value) -> Result<Vec<ReturnValue>, ShellError> {
|
||||
Ok(vec![])
|
||||
}
|
||||
|
||||
@ -19,20 +18,20 @@ pub trait Plugin {
|
||||
Ok(vec![])
|
||||
}
|
||||
|
||||
fn sink(&mut self, _call_info: CallInfo, _input: Vec<Tagged<Value>>) {}
|
||||
fn sink(&mut self, _call_info: CallInfo, _input: Vec<Value>) {}
|
||||
|
||||
fn quit(&mut self) {}
|
||||
}
|
||||
|
||||
pub fn serve_plugin(plugin: &mut dyn Plugin) {
|
||||
let args = std::env::args();
|
||||
let mut args = std::env::args();
|
||||
if args.len() > 1 {
|
||||
let input = args.skip(1).next();
|
||||
let input = args.nth(1);
|
||||
|
||||
let input = match input {
|
||||
Some(arg) => std::fs::read_to_string(arg),
|
||||
None => {
|
||||
send_response(ShellError::string(format!("No input given.")));
|
||||
send_response(ShellError::untagged_runtime_error("No input given."));
|
||||
return;
|
||||
}
|
||||
};
|
||||
@ -64,7 +63,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) {
|
||||
return;
|
||||
}
|
||||
e => {
|
||||
send_response(ShellError::string(format!(
|
||||
send_response(ShellError::untagged_runtime_error(format!(
|
||||
"Could not handle plugin message: {} {:?}",
|
||||
input, e
|
||||
)));
|
||||
@ -102,7 +101,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) {
|
||||
break;
|
||||
}
|
||||
e => {
|
||||
send_response(ShellError::string(format!(
|
||||
send_response(ShellError::untagged_runtime_error(format!(
|
||||
"Could not handle plugin message: {} {:?}",
|
||||
input, e
|
||||
)));
|
||||
@ -111,7 +110,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) {
|
||||
}
|
||||
}
|
||||
e => {
|
||||
send_response(ShellError::string(format!(
|
||||
send_response(ShellError::untagged_runtime_error(format!(
|
||||
"Could not handle plugin message: {:?}",
|
||||
e,
|
||||
)));
|
||||
@ -143,8 +142,8 @@ fn send_response<T: Serialize>(result: T) {
|
||||
let response_raw = serde_json::to_string(&response);
|
||||
|
||||
match response_raw {
|
||||
Ok(response) => println!("{}", response),
|
||||
Err(err) => println!("{}", err),
|
||||
Ok(response) => outln!("{}", response),
|
||||
Err(err) => outln!("{}", err),
|
||||
}
|
||||
}
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
@ -152,15 +151,9 @@ fn send_response<T: Serialize>(result: T) {
|
||||
#[allow(non_camel_case_types)]
|
||||
pub enum NuCommand {
|
||||
config,
|
||||
begin_filter {
|
||||
params: CallInfo,
|
||||
},
|
||||
filter {
|
||||
params: Tagged<Value>,
|
||||
},
|
||||
begin_filter { params: CallInfo },
|
||||
filter { params: Value },
|
||||
end_filter,
|
||||
sink {
|
||||
params: (CallInfo, Vec<Tagged<Value>>),
|
||||
},
|
||||
sink { params: (CallInfo, Vec<Value>) },
|
||||
quit,
|
||||
}
|
213
crates/nu-plugin/src/test_helpers.rs
Normal file
213
crates/nu-plugin/src/test_helpers.rs
Normal file
@ -0,0 +1,213 @@
|
||||
use crate::Plugin;
|
||||
use indexmap::IndexMap;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{CallInfo, EvaluatedArgs, ReturnSuccess, ReturnValue, UntaggedValue, Value};
|
||||
use nu_source::Tag;
|
||||
|
||||
pub struct PluginTest<'a, T: Plugin> {
|
||||
plugin: &'a mut T,
|
||||
call_info: CallInfo,
|
||||
input: Value,
|
||||
}
|
||||
|
||||
impl<'a, T: Plugin> PluginTest<'a, T> {
|
||||
pub fn for_plugin(plugin: &'a mut T) -> Self {
|
||||
PluginTest {
|
||||
plugin,
|
||||
call_info: CallStub::new().create(),
|
||||
input: UntaggedValue::nothing().into_value(Tag::unknown()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn args(&mut self, call_info: CallInfo) -> &mut PluginTest<'a, T> {
|
||||
self.call_info = call_info;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn configure(&mut self, callback: impl FnOnce(Vec<String>)) -> &mut PluginTest<'a, T> {
|
||||
let signature = self
|
||||
.plugin
|
||||
.config()
|
||||
.expect("There was a problem configuring the plugin.");
|
||||
callback(signature.named.keys().map(String::from).collect());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn input(&mut self, value: Value) -> &mut PluginTest<'a, T> {
|
||||
self.input = value;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn test(&mut self) -> Result<Vec<ReturnValue>, ShellError> {
|
||||
let return_values = self.plugin.filter(self.input.clone());
|
||||
|
||||
let mut return_values = match return_values {
|
||||
Ok(filtered) => filtered,
|
||||
Err(reason) => return Err(reason),
|
||||
};
|
||||
|
||||
let end = self.plugin.end_filter();
|
||||
|
||||
match end {
|
||||
Ok(filter_ended) => return_values.extend(filter_ended),
|
||||
Err(reason) => return Err(reason),
|
||||
}
|
||||
|
||||
self.plugin.quit();
|
||||
Ok(return_values)
|
||||
}
|
||||
|
||||
pub fn setup(
|
||||
&mut self,
|
||||
callback: impl FnOnce(&mut T, Result<Vec<ReturnValue>, ShellError>),
|
||||
) -> &mut PluginTest<'a, T> {
|
||||
let call_stub = self.call_info.clone();
|
||||
|
||||
self.configure(|flags_configured| {
|
||||
let flags_registered = &call_stub.args.named;
|
||||
|
||||
let flag_passed = match flags_registered {
|
||||
Some(names) => Some(names.keys().map(String::from).collect::<Vec<String>>()),
|
||||
None => None,
|
||||
};
|
||||
|
||||
if let Some(flags) = flag_passed {
|
||||
for flag in flags {
|
||||
assert!(
|
||||
flags_configured.iter().any(|f| *f == flag),
|
||||
format!(
|
||||
"The flag you passed ({}) is not configured in the plugin.",
|
||||
flag
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let began = self.plugin.begin_filter(call_stub);
|
||||
|
||||
let return_values = match began {
|
||||
Ok(values) => Ok(values),
|
||||
Err(reason) => Err(reason),
|
||||
};
|
||||
|
||||
callback(self.plugin, return_values);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub fn plugin<T: Plugin>(plugin: &mut T) -> PluginTest<T> {
|
||||
PluginTest::for_plugin(plugin)
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct CallStub {
|
||||
positionals: Vec<Value>,
|
||||
flags: IndexMap<String, Value>,
|
||||
}
|
||||
|
||||
impl CallStub {
|
||||
pub fn new() -> Self {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
pub fn with_named_parameter(&mut self, name: &str, value: Value) -> &mut Self {
|
||||
self.flags.insert(name.to_string(), value);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_long_flag(&mut self, name: &str) -> &mut Self {
|
||||
self.flags.insert(
|
||||
name.to_string(),
|
||||
UntaggedValue::boolean(true).into_value(Tag::unknown()),
|
||||
);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_parameter(&mut self, name: &str) -> Result<&mut Self, ShellError> {
|
||||
let fields: Vec<Value> = name
|
||||
.split('.')
|
||||
.map(|s| UntaggedValue::string(s.to_string()).into_value(Tag::unknown()))
|
||||
.collect();
|
||||
|
||||
self.positionals.push(value::column_path(&fields)?);
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
pub fn create(&self) -> CallInfo {
|
||||
CallInfo {
|
||||
args: EvaluatedArgs::new(Some(self.positionals.clone()), Some(self.flags.clone())),
|
||||
name_tag: Tag::unknown(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_return_value_at(
|
||||
for_results: Result<Vec<Result<ReturnSuccess, ShellError>>, ShellError>,
|
||||
at: usize,
|
||||
) -> Value {
|
||||
let return_values = for_results
|
||||
.expect("Failed! This seems to be an error getting back the results from the plugin.");
|
||||
|
||||
for (idx, item) in return_values.iter().enumerate() {
|
||||
let item = match item {
|
||||
Ok(return_value) => return_value,
|
||||
Err(reason) => panic!(format!("{}", reason)),
|
||||
};
|
||||
|
||||
if idx == at {
|
||||
if let Some(value) = item.raw_value() {
|
||||
return value;
|
||||
} else {
|
||||
panic!("Internal error: could not get raw value in expect_return_value_at")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
panic!(format!(
|
||||
"Couldn't get return value from stream at {}. (There are {} items)",
|
||||
at,
|
||||
return_values.len() - 1
|
||||
))
|
||||
}
|
||||
|
||||
pub mod value {
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{Primitive, TaggedDictBuilder, UntaggedValue, Value};
|
||||
use nu_source::Tag;
|
||||
use nu_value_ext::ValueExt;
|
||||
use num_bigint::BigInt;
|
||||
|
||||
pub fn get_data(for_value: Value, key: &str) -> Value {
|
||||
for_value.get_data(&key.to_string()).borrow().clone()
|
||||
}
|
||||
|
||||
pub fn int(i: impl Into<BigInt>) -> Value {
|
||||
UntaggedValue::Primitive(Primitive::Int(i.into())).into_untagged_value()
|
||||
}
|
||||
|
||||
pub fn string(input: impl Into<String>) -> Value {
|
||||
UntaggedValue::string(input.into()).into_untagged_value()
|
||||
}
|
||||
|
||||
pub fn structured_sample_record(key: &str, value: &str) -> Value {
|
||||
let mut record = TaggedDictBuilder::new(Tag::unknown());
|
||||
record.insert_untagged(key, UntaggedValue::string(value));
|
||||
record.into_value()
|
||||
}
|
||||
|
||||
pub fn unstructured_sample_record(value: &str) -> Value {
|
||||
UntaggedValue::string(value).into_value(Tag::unknown())
|
||||
}
|
||||
|
||||
pub fn table(list: &[Value]) -> Value {
|
||||
UntaggedValue::table(list).into_untagged_value()
|
||||
}
|
||||
|
||||
pub fn column_path(paths: &[Value]) -> Result<Value, ShellError> {
|
||||
Ok(UntaggedValue::Primitive(Primitive::ColumnPath(
|
||||
table(&paths.to_vec()).as_column_path()?.item,
|
||||
))
|
||||
.into_untagged_value())
|
||||
}
|
||||
}
|
43
crates/nu-protocol/Cargo.toml
Normal file
43
crates/nu-protocol/Cargo.toml
Normal file
@ -0,0 +1,43 @@
|
||||
[package]
|
||||
name = "nu-protocol"
|
||||
version = "0.8.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||
edition = "2018"
|
||||
description = "Core values and protocols for Nushell"
|
||||
license = "MIT"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-source = { path = "../nu-source", version = "0.8.0" }
|
||||
nu-errors = { path = "../nu-errors", version = "0.8.0" }
|
||||
|
||||
serde = { version = "1.0.103", features = ["derive"] }
|
||||
indexmap = { version = "1.3.0", features = ["serde-1"] }
|
||||
num-bigint = { version = "0.2.3", features = ["serde"] }
|
||||
bigdecimal = { version = "0.1.0", features = ["serde"] }
|
||||
chrono = { version = "0.4.10", features = ["serde"] }
|
||||
num-traits = "0.2.8"
|
||||
serde_bytes = "0.11.3"
|
||||
getset = "0.0.9"
|
||||
derive-new = "0.5.8"
|
||||
ansi_term = "0.12.1"
|
||||
language-reporting = "0.4.0"
|
||||
nom = "5.0.1"
|
||||
nom_locate = "1.0.0"
|
||||
nom-tracable = "0.4.1"
|
||||
typetag = "0.1.4"
|
||||
query_interface = "0.3.5"
|
||||
byte-unit = "3.0.3"
|
||||
chrono-humanize = "0.0.11"
|
||||
natural = "0.3.0"
|
||||
|
||||
# implement conversions
|
||||
subprocess = "0.1.18"
|
||||
serde_yaml = "0.8"
|
||||
toml = "0.5.5"
|
||||
serde_json = "1.0.44"
|
||||
|
||||
[build-dependencies]
|
||||
nu-build = { version = "0.8.0", path = "../nu-build" }
|
3
crates/nu-protocol/build.rs
Normal file
3
crates/nu-protocol/build.rs
Normal file
@ -0,0 +1,3 @@
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
nu_build::build()
|
||||
}
|
97
crates/nu-protocol/src/call_info.rs
Normal file
97
crates/nu-protocol/src/call_info.rs
Normal file
@ -0,0 +1,97 @@
|
||||
use crate::value::Value;
|
||||
use derive_new::new;
|
||||
use indexmap::IndexMap;
|
||||
use nu_errors::ShellError;
|
||||
use nu_source::Tag;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
pub struct CallInfo {
|
||||
pub args: EvaluatedArgs,
|
||||
pub name_tag: Tag,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, new, Serialize, Deserialize, Clone)]
|
||||
pub struct EvaluatedArgs {
|
||||
pub positional: Option<Vec<Value>>,
|
||||
pub named: Option<IndexMap<String, Value>>,
|
||||
}
|
||||
|
||||
impl EvaluatedArgs {
|
||||
pub fn slice_from(&self, from: usize) -> Vec<Value> {
|
||||
let positional = &self.positional;
|
||||
|
||||
match positional {
|
||||
None => vec![],
|
||||
Some(list) => list[from..].to_vec(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn nth(&self, pos: usize) -> Option<&Value> {
|
||||
match &self.positional {
|
||||
None => None,
|
||||
Some(array) => array.get(pos),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_nth(&self, pos: usize) -> Result<&Value, ShellError> {
|
||||
match &self.positional {
|
||||
None => Err(ShellError::unimplemented("Better error: expect_nth")),
|
||||
Some(array) => match array.get(pos) {
|
||||
None => Err(ShellError::unimplemented("Better error: expect_nth")),
|
||||
Some(item) => Ok(item),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
match &self.positional {
|
||||
None => 0,
|
||||
Some(array) => array.len(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
|
||||
pub fn has(&self, name: &str) -> bool {
|
||||
match &self.named {
|
||||
None => false,
|
||||
Some(named) => named.contains_key(name),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get(&self, name: &str) -> Option<&Value> {
|
||||
match &self.named {
|
||||
None => None,
|
||||
Some(named) => named.get(name),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn positional_iter(&self) -> PositionalIter<'_> {
|
||||
match &self.positional {
|
||||
None => PositionalIter::Empty,
|
||||
Some(v) => {
|
||||
let iter = v.iter();
|
||||
PositionalIter::Array(iter)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum PositionalIter<'a> {
|
||||
Empty,
|
||||
Array(std::slice::Iter<'a, Value>),
|
||||
}
|
||||
|
||||
impl<'a> Iterator for PositionalIter<'a> {
|
||||
type Item = &'a Value;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
match self {
|
||||
PositionalIter::Empty => None,
|
||||
PositionalIter::Array(iter) => iter.next(),
|
||||
}
|
||||
}
|
||||
}
|
26
crates/nu-protocol/src/lib.rs
Normal file
26
crates/nu-protocol/src/lib.rs
Normal file
@ -0,0 +1,26 @@
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
|
||||
mod call_info;
|
||||
mod maybe_owned;
|
||||
mod return_value;
|
||||
mod signature;
|
||||
mod syntax_shape;
|
||||
mod type_name;
|
||||
mod type_shape;
|
||||
mod value;
|
||||
|
||||
pub use crate::call_info::{CallInfo, EvaluatedArgs};
|
||||
pub use crate::maybe_owned::MaybeOwned;
|
||||
pub use crate::return_value::{CommandAction, ReturnSuccess, ReturnValue};
|
||||
pub use crate::signature::{NamedType, PositionalType, Signature};
|
||||
pub use crate::syntax_shape::SyntaxShape;
|
||||
pub use crate::type_name::{PrettyType, ShellTypeName, SpannedTypeName};
|
||||
pub use crate::type_shape::{Row as RowType, Type};
|
||||
pub use crate::value::column_path::{did_you_mean, ColumnPath, PathMember, UnspannedPathMember};
|
||||
pub use crate::value::dict::{Dictionary, TaggedDictBuilder};
|
||||
pub use crate::value::evaluate::{Evaluate, EvaluateTrait, Scope};
|
||||
pub use crate::value::primitive::format_primitive;
|
||||
pub use crate::value::primitive::Primitive;
|
||||
pub use crate::value::range::{Range, RangeInclusion};
|
||||
pub use crate::value::{UntaggedValue, Value};
|
12
crates/nu-protocol/src/macros.rs
Normal file
12
crates/nu-protocol/src/macros.rs
Normal file
@ -0,0 +1,12 @@
|
||||
// These macros exist to differentiate between intentional writing to stdout
|
||||
// and stray printlns left by accident
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! outln {
|
||||
($($tokens:tt)*) => { println!($($tokens)*) }
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! errln {
|
||||
($($tokens:tt)*) => { eprintln!($($tokens)*) }
|
||||
}
|
16
crates/nu-protocol/src/maybe_owned.rs
Normal file
16
crates/nu-protocol/src/maybe_owned.rs
Normal file
@ -0,0 +1,16 @@
|
||||
#![allow(clippy::should_implement_trait)]
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum MaybeOwned<'a, T> {
|
||||
Owned(T),
|
||||
Borrowed(&'a T),
|
||||
}
|
||||
|
||||
impl<T> MaybeOwned<'_, T> {
|
||||
pub fn borrow(&self) -> &T {
|
||||
match self {
|
||||
MaybeOwned::Owned(v) => v,
|
||||
MaybeOwned::Borrowed(v) => v,
|
||||
}
|
||||
}
|
||||
}
|
88
crates/nu-protocol/src/return_value.rs
Normal file
88
crates/nu-protocol/src/return_value.rs
Normal file
@ -0,0 +1,88 @@
|
||||
use crate::value::Value;
|
||||
use nu_errors::ShellError;
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum CommandAction {
|
||||
ChangePath(String),
|
||||
Exit,
|
||||
Error(ShellError),
|
||||
EnterShell(String),
|
||||
AutoConvert(Value, String),
|
||||
EnterValueShell(Value),
|
||||
EnterHelpShell(Value),
|
||||
PreviousShell,
|
||||
NextShell,
|
||||
LeaveShell,
|
||||
}
|
||||
|
||||
impl PrettyDebug for CommandAction {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
match self {
|
||||
CommandAction::ChangePath(path) => b::typed("change path", b::description(path)),
|
||||
CommandAction::Exit => b::description("exit"),
|
||||
CommandAction::Error(_) => b::error("error"),
|
||||
CommandAction::AutoConvert(_, extension) => {
|
||||
b::typed("auto convert", b::description(extension))
|
||||
}
|
||||
CommandAction::EnterShell(s) => b::typed("enter shell", b::description(s)),
|
||||
CommandAction::EnterValueShell(v) => b::typed("enter value shell", v.pretty()),
|
||||
CommandAction::EnterHelpShell(v) => b::typed("enter help shell", v.pretty()),
|
||||
CommandAction::PreviousShell => b::description("previous shell"),
|
||||
CommandAction::NextShell => b::description("next shell"),
|
||||
CommandAction::LeaveShell => b::description("leave shell"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum ReturnSuccess {
|
||||
Value(Value),
|
||||
DebugValue(Value),
|
||||
Action(CommandAction),
|
||||
}
|
||||
|
||||
impl PrettyDebug for ReturnSuccess {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
match self {
|
||||
ReturnSuccess::Value(value) => b::typed("value", value.pretty()),
|
||||
ReturnSuccess::DebugValue(value) => b::typed("debug value", value.pretty()),
|
||||
ReturnSuccess::Action(action) => b::typed("action", action.pretty()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub type ReturnValue = Result<ReturnSuccess, ShellError>;
|
||||
|
||||
impl Into<ReturnValue> for Value {
|
||||
fn into(self) -> ReturnValue {
|
||||
Ok(ReturnSuccess::Value(self))
|
||||
}
|
||||
}
|
||||
|
||||
impl ReturnSuccess {
|
||||
pub fn raw_value(&self) -> Option<Value> {
|
||||
match self {
|
||||
ReturnSuccess::Value(raw) => Some(raw.clone()),
|
||||
ReturnSuccess::DebugValue(raw) => Some(raw.clone()),
|
||||
ReturnSuccess::Action(_) => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn change_cwd(path: String) -> ReturnValue {
|
||||
Ok(ReturnSuccess::Action(CommandAction::ChangePath(path)))
|
||||
}
|
||||
|
||||
pub fn value(input: impl Into<Value>) -> ReturnValue {
|
||||
Ok(ReturnSuccess::Value(input.into()))
|
||||
}
|
||||
|
||||
pub fn debug_value(input: impl Into<Value>) -> ReturnValue {
|
||||
Ok(ReturnSuccess::DebugValue(input.into()))
|
||||
}
|
||||
|
||||
pub fn action(input: CommandAction) -> ReturnValue {
|
||||
Ok(ReturnSuccess::Action(input))
|
||||
}
|
||||
}
|
204
crates/nu-protocol/src/signature.rs
Normal file
204
crates/nu-protocol/src/signature.rs
Normal file
@ -0,0 +1,204 @@
|
||||
use crate::syntax_shape::SyntaxShape;
|
||||
use crate::type_shape::Type;
|
||||
use indexmap::IndexMap;
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebug, PrettyDebugWithSource};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub enum NamedType {
|
||||
Switch,
|
||||
Mandatory(SyntaxShape),
|
||||
Optional(SyntaxShape),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum PositionalType {
|
||||
Mandatory(String, SyntaxShape),
|
||||
Optional(String, SyntaxShape),
|
||||
}
|
||||
|
||||
impl PrettyDebug for PositionalType {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
match self {
|
||||
PositionalType::Mandatory(string, shape) => {
|
||||
b::description(string) + b::delimit("(", shape.pretty(), ")").into_kind().group()
|
||||
}
|
||||
PositionalType::Optional(string, shape) => {
|
||||
b::description(string)
|
||||
+ b::operator("?")
|
||||
+ b::delimit("(", shape.pretty(), ")").into_kind().group()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PositionalType {
|
||||
pub fn mandatory(name: &str, ty: SyntaxShape) -> PositionalType {
|
||||
PositionalType::Mandatory(name.to_string(), ty)
|
||||
}
|
||||
|
||||
pub fn mandatory_any(name: &str) -> PositionalType {
|
||||
PositionalType::Mandatory(name.to_string(), SyntaxShape::Any)
|
||||
}
|
||||
|
||||
pub fn mandatory_block(name: &str) -> PositionalType {
|
||||
PositionalType::Mandatory(name.to_string(), SyntaxShape::Block)
|
||||
}
|
||||
|
||||
pub fn optional(name: &str, ty: SyntaxShape) -> PositionalType {
|
||||
PositionalType::Optional(name.to_string(), ty)
|
||||
}
|
||||
|
||||
pub fn optional_any(name: &str) -> PositionalType {
|
||||
PositionalType::Optional(name.to_string(), SyntaxShape::Any)
|
||||
}
|
||||
|
||||
pub fn name(&self) -> &str {
|
||||
match self {
|
||||
PositionalType::Mandatory(s, _) => s,
|
||||
PositionalType::Optional(s, _) => s,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn syntax_type(&self) -> SyntaxShape {
|
||||
match *self {
|
||||
PositionalType::Mandatory(_, t) => t,
|
||||
PositionalType::Optional(_, t) => t,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type Description = String;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Signature {
|
||||
pub name: String,
|
||||
pub usage: String,
|
||||
pub positional: Vec<(PositionalType, Description)>,
|
||||
pub rest_positional: Option<(SyntaxShape, Description)>,
|
||||
pub named: IndexMap<String, (NamedType, Description)>,
|
||||
pub yields: Option<Type>,
|
||||
pub input: Option<Type>,
|
||||
pub is_filter: bool,
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Signature {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::typed(
|
||||
"signature",
|
||||
b::description(&self.name)
|
||||
+ b::preceded(
|
||||
b::space(),
|
||||
b::intersperse(
|
||||
self.positional
|
||||
.iter()
|
||||
.map(|(ty, _)| ty.pretty_debug(source)),
|
||||
b::space(),
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Signature {
|
||||
pub fn new(name: impl Into<String>) -> Signature {
|
||||
Signature {
|
||||
name: name.into(),
|
||||
usage: String::new(),
|
||||
positional: vec![],
|
||||
rest_positional: None,
|
||||
named: IndexMap::new(),
|
||||
is_filter: false,
|
||||
yields: None,
|
||||
input: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build(name: impl Into<String>) -> Signature {
|
||||
Signature::new(name.into())
|
||||
}
|
||||
|
||||
pub fn desc(mut self, usage: impl Into<String>) -> Signature {
|
||||
self.usage = usage.into();
|
||||
self
|
||||
}
|
||||
|
||||
pub fn required(
|
||||
mut self,
|
||||
name: impl Into<String>,
|
||||
ty: impl Into<SyntaxShape>,
|
||||
desc: impl Into<String>,
|
||||
) -> Signature {
|
||||
self.positional.push((
|
||||
PositionalType::Mandatory(name.into(), ty.into()),
|
||||
desc.into(),
|
||||
));
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub fn optional(
|
||||
mut self,
|
||||
name: impl Into<String>,
|
||||
ty: impl Into<SyntaxShape>,
|
||||
desc: impl Into<String>,
|
||||
) -> Signature {
|
||||
self.positional.push((
|
||||
PositionalType::Optional(name.into(), ty.into()),
|
||||
desc.into(),
|
||||
));
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub fn named(
|
||||
mut self,
|
||||
name: impl Into<String>,
|
||||
ty: impl Into<SyntaxShape>,
|
||||
desc: impl Into<String>,
|
||||
) -> Signature {
|
||||
self.named
|
||||
.insert(name.into(), (NamedType::Optional(ty.into()), desc.into()));
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub fn required_named(
|
||||
mut self,
|
||||
name: impl Into<String>,
|
||||
ty: impl Into<SyntaxShape>,
|
||||
desc: impl Into<String>,
|
||||
) -> Signature {
|
||||
self.named
|
||||
.insert(name.into(), (NamedType::Mandatory(ty.into()), desc.into()));
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub fn switch(mut self, name: impl Into<String>, desc: impl Into<String>) -> Signature {
|
||||
self.named
|
||||
.insert(name.into(), (NamedType::Switch, desc.into()));
|
||||
|
||||
self
|
||||
}
|
||||
|
||||
pub fn filter(mut self) -> Signature {
|
||||
self.is_filter = true;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn rest(mut self, ty: SyntaxShape, desc: impl Into<String>) -> Signature {
|
||||
self.rest_positional = Some((ty, desc.into()));
|
||||
self
|
||||
}
|
||||
|
||||
pub fn yields(mut self, ty: Type) -> Signature {
|
||||
self.yields = Some(ty);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn input(mut self, ty: Type) -> Signature {
|
||||
self.input = Some(ty);
|
||||
self
|
||||
}
|
||||
}
|
33
crates/nu-protocol/src/syntax_shape.rs
Normal file
33
crates/nu-protocol/src/syntax_shape.rs
Normal file
@ -0,0 +1,33 @@
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
|
||||
pub enum SyntaxShape {
|
||||
Any,
|
||||
String,
|
||||
Member,
|
||||
ColumnPath,
|
||||
Number,
|
||||
Range,
|
||||
Int,
|
||||
Path,
|
||||
Pattern,
|
||||
Block,
|
||||
}
|
||||
|
||||
impl PrettyDebug for SyntaxShape {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
b::kind(match self {
|
||||
SyntaxShape::Any => "any shape",
|
||||
SyntaxShape::String => "string shape",
|
||||
SyntaxShape::Member => "member shape",
|
||||
SyntaxShape::ColumnPath => "column path shape",
|
||||
SyntaxShape::Number => "number shape",
|
||||
SyntaxShape::Range => "range shape",
|
||||
SyntaxShape::Int => "integer shape",
|
||||
SyntaxShape::Path => "file path shape",
|
||||
SyntaxShape::Pattern => "pattern shape",
|
||||
SyntaxShape::Block => "block shape",
|
||||
})
|
||||
}
|
||||
}
|
37
crates/nu-protocol/src/type_name.rs
Normal file
37
crates/nu-protocol/src/type_name.rs
Normal file
@ -0,0 +1,37 @@
|
||||
use nu_source::{DebugDocBuilder, HasSpan, Spanned, SpannedItem, Tagged};
|
||||
|
||||
pub trait ShellTypeName {
|
||||
fn type_name(&self) -> &'static str;
|
||||
}
|
||||
|
||||
impl<T: ShellTypeName> ShellTypeName for Spanned<T> {
|
||||
fn type_name(&self) -> &'static str {
|
||||
self.item.type_name()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ShellTypeName> ShellTypeName for &T {
|
||||
fn type_name(&self) -> &'static str {
|
||||
(*self).type_name()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait SpannedTypeName {
|
||||
fn spanned_type_name(&self) -> Spanned<&'static str>;
|
||||
}
|
||||
|
||||
impl<T: ShellTypeName + HasSpan> SpannedTypeName for T {
|
||||
fn spanned_type_name(&self) -> Spanned<&'static str> {
|
||||
self.type_name().spanned(self.span())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ShellTypeName> SpannedTypeName for Tagged<T> {
|
||||
fn spanned_type_name(&self) -> Spanned<&'static str> {
|
||||
self.item.type_name().spanned(self.tag.span)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait PrettyType {
|
||||
fn pretty_type(&self) -> DebugDocBuilder;
|
||||
}
|
382
crates/nu-protocol/src/type_shape.rs
Normal file
382
crates/nu-protocol/src/type_shape.rs
Normal file
@ -0,0 +1,382 @@
|
||||
use crate::value::dict::Dictionary;
|
||||
use crate::value::primitive::Primitive;
|
||||
use crate::value::range::RangeInclusion;
|
||||
use crate::value::{UntaggedValue, Value};
|
||||
use derive_new::new;
|
||||
use nu_source::{b, DebugDoc, DebugDocBuilder, PrettyDebug};
|
||||
use serde::{Deserialize, Deserializer, Serialize};
|
||||
use std::collections::BTreeMap;
|
||||
use std::fmt::Debug;
|
||||
use std::hash::Hash;
|
||||
|
||||
/**
|
||||
This file describes the structural types of the nushell system.
|
||||
|
||||
Its primary purpose today is to identify "equivalent" values for the purpose
|
||||
of merging rows into a single table or identify rows in a table that have the
|
||||
same shape for reflection.
|
||||
*/
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize, new)]
|
||||
pub struct RangeType {
|
||||
from: (Type, RangeInclusion),
|
||||
to: (Type, RangeInclusion),
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub enum Type {
|
||||
Nothing,
|
||||
Int,
|
||||
Range(Box<RangeType>),
|
||||
Decimal,
|
||||
Bytesize,
|
||||
String,
|
||||
Line,
|
||||
ColumnPath,
|
||||
Pattern,
|
||||
Boolean,
|
||||
Date,
|
||||
Duration,
|
||||
Path,
|
||||
Binary,
|
||||
|
||||
Row(Row),
|
||||
Table(Vec<Type>),
|
||||
|
||||
// TODO: Block arguments
|
||||
Block,
|
||||
// TODO: Error type
|
||||
Error,
|
||||
|
||||
// Stream markers (used as bookend markers rather than actual values)
|
||||
BeginningOfStream,
|
||||
EndOfStream,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, new)]
|
||||
pub struct Row {
|
||||
#[new(default)]
|
||||
map: BTreeMap<Column, Type>,
|
||||
}
|
||||
|
||||
impl Serialize for Row {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
serializer.collect_map(self.map.iter())
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for Row {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
struct RowVisitor;
|
||||
|
||||
impl<'de> serde::de::Visitor<'de> for RowVisitor {
|
||||
type Value = Row;
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
write!(formatter, "a row")
|
||||
}
|
||||
|
||||
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
|
||||
where
|
||||
A: serde::de::MapAccess<'de>,
|
||||
{
|
||||
let mut new_map = BTreeMap::new();
|
||||
|
||||
loop {
|
||||
let entry = map.next_entry()?;
|
||||
|
||||
match entry {
|
||||
None => return Ok(Row { map: new_map }),
|
||||
Some((key, value)) => {
|
||||
new_map.insert(key, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
deserializer.deserialize_map(RowVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
impl Type {
|
||||
pub fn from_primitive(primitive: &Primitive) -> Type {
|
||||
match primitive {
|
||||
Primitive::Nothing => Type::Nothing,
|
||||
Primitive::Int(_) => Type::Int,
|
||||
Primitive::Range(range) => {
|
||||
let (left_value, left_inclusion) = &range.from;
|
||||
let (right_value, right_inclusion) = &range.to;
|
||||
|
||||
let left_type = (Type::from_primitive(left_value), *left_inclusion);
|
||||
let right_type = (Type::from_primitive(right_value), *right_inclusion);
|
||||
|
||||
let range = RangeType::new(left_type, right_type);
|
||||
Type::Range(Box::new(range))
|
||||
}
|
||||
Primitive::Decimal(_) => Type::Decimal,
|
||||
Primitive::Bytes(_) => Type::Bytesize,
|
||||
Primitive::String(_) => Type::String,
|
||||
Primitive::Line(_) => Type::Line,
|
||||
Primitive::ColumnPath(_) => Type::ColumnPath,
|
||||
Primitive::Pattern(_) => Type::Pattern,
|
||||
Primitive::Boolean(_) => Type::Boolean,
|
||||
Primitive::Date(_) => Type::Date,
|
||||
Primitive::Duration(_) => Type::Duration,
|
||||
Primitive::Path(_) => Type::Path,
|
||||
Primitive::Binary(_) => Type::Binary,
|
||||
Primitive::BeginningOfStream => Type::BeginningOfStream,
|
||||
Primitive::EndOfStream => Type::EndOfStream,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_dictionary(dictionary: &Dictionary) -> Type {
|
||||
let mut map = BTreeMap::new();
|
||||
|
||||
for (key, value) in dictionary.entries.iter() {
|
||||
let column = Column::String(key.clone());
|
||||
map.insert(column, Type::from_value(value));
|
||||
}
|
||||
|
||||
Type::Row(Row { map })
|
||||
}
|
||||
|
||||
pub fn from_table<'a>(table: impl IntoIterator<Item = &'a Value>) -> Type {
|
||||
let mut vec = vec![];
|
||||
|
||||
for item in table.into_iter() {
|
||||
vec.push(Type::from_value(item))
|
||||
}
|
||||
|
||||
Type::Table(vec)
|
||||
}
|
||||
|
||||
pub fn from_value<'a>(value: impl Into<&'a UntaggedValue>) -> Type {
|
||||
match value.into() {
|
||||
UntaggedValue::Primitive(p) => Type::from_primitive(p),
|
||||
UntaggedValue::Row(row) => Type::from_dictionary(row),
|
||||
UntaggedValue::Table(table) => Type::from_table(table.iter()),
|
||||
UntaggedValue::Error(_) => Type::Error,
|
||||
UntaggedValue::Block(_) => Type::Block,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for Type {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
match self {
|
||||
Type::Nothing => ty("nothing"),
|
||||
Type::Int => ty("integer"),
|
||||
Type::Range(range) => {
|
||||
let (left, left_inclusion) = &range.from;
|
||||
let (right, right_inclusion) = &range.to;
|
||||
|
||||
let left_bracket = b::delimiter(match left_inclusion {
|
||||
RangeInclusion::Exclusive => "(",
|
||||
RangeInclusion::Inclusive => "[",
|
||||
});
|
||||
|
||||
let right_bracket = b::delimiter(match right_inclusion {
|
||||
RangeInclusion::Exclusive => ")",
|
||||
RangeInclusion::Inclusive => "]",
|
||||
});
|
||||
|
||||
b::typed(
|
||||
"range",
|
||||
(left_bracket
|
||||
+ left.pretty()
|
||||
+ b::operator(",")
|
||||
+ b::space()
|
||||
+ right.pretty()
|
||||
+ right_bracket)
|
||||
.group(),
|
||||
)
|
||||
}
|
||||
Type::Decimal => ty("decimal"),
|
||||
Type::Bytesize => ty("bytesize"),
|
||||
Type::String => ty("string"),
|
||||
Type::Line => ty("line"),
|
||||
Type::ColumnPath => ty("column-path"),
|
||||
Type::Pattern => ty("pattern"),
|
||||
Type::Boolean => ty("boolean"),
|
||||
Type::Date => ty("date"),
|
||||
Type::Duration => ty("duration"),
|
||||
Type::Path => ty("path"),
|
||||
Type::Binary => ty("binary"),
|
||||
Type::Error => b::error("error"),
|
||||
Type::BeginningOfStream => b::keyword("beginning-of-stream"),
|
||||
Type::EndOfStream => b::keyword("end-of-stream"),
|
||||
Type::Row(row) => (b::kind("row")
|
||||
+ b::space()
|
||||
+ b::intersperse(
|
||||
row.map.iter().map(|(key, ty)| {
|
||||
(b::key(match key {
|
||||
Column::String(string) => string.clone(),
|
||||
Column::Value => "<value>".to_string(),
|
||||
}) + b::delimit("(", ty.pretty(), ")").into_kind())
|
||||
.nest()
|
||||
}),
|
||||
b::space(),
|
||||
)
|
||||
.nest())
|
||||
.nest(),
|
||||
|
||||
Type::Table(table) => {
|
||||
let mut group: Group<DebugDoc, Vec<(usize, usize)>> = Group::new();
|
||||
|
||||
for (i, item) in table.iter().enumerate() {
|
||||
group.add(item.to_doc(), i);
|
||||
}
|
||||
|
||||
(b::kind("table") + b::space() + b::keyword("of")).group()
|
||||
+ b::space()
|
||||
+ (if group.len() == 1 {
|
||||
let (doc, _) = group.into_iter().collect::<Vec<_>>()[0].clone();
|
||||
DebugDocBuilder::from_doc(doc)
|
||||
} else {
|
||||
b::intersperse(
|
||||
group.into_iter().map(|(doc, rows)| {
|
||||
(b::intersperse(
|
||||
rows.iter().map(|(from, to)| {
|
||||
if from == to {
|
||||
b::description(from)
|
||||
} else {
|
||||
(b::description(from)
|
||||
+ b::space()
|
||||
+ b::keyword("to")
|
||||
+ b::space()
|
||||
+ b::description(to))
|
||||
.group()
|
||||
}
|
||||
}),
|
||||
b::description(", "),
|
||||
) + b::description(":")
|
||||
+ b::space()
|
||||
+ DebugDocBuilder::from_doc(doc))
|
||||
.nest()
|
||||
}),
|
||||
b::space(),
|
||||
)
|
||||
})
|
||||
}
|
||||
Type::Block => ty("block"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, new)]
|
||||
struct DebugEntry<'a> {
|
||||
key: &'a Column,
|
||||
value: &'a Type,
|
||||
}
|
||||
|
||||
impl<'a> PrettyDebug for DebugEntry<'a> {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
(b::key(match self.key {
|
||||
Column::String(string) => string.clone(),
|
||||
Column::Value => "<value>".to_string(),
|
||||
}) + b::delimit("(", self.value.pretty(), ")").into_kind())
|
||||
}
|
||||
}
|
||||
|
||||
fn ty(name: impl std::fmt::Display) -> DebugDocBuilder {
|
||||
b::kind(format!("{}", name))
|
||||
}
|
||||
|
||||
pub trait GroupedValue: Debug + Clone {
|
||||
type Item;
|
||||
|
||||
fn new() -> Self;
|
||||
fn merge(&mut self, value: Self::Item);
|
||||
}
|
||||
|
||||
impl GroupedValue for Vec<(usize, usize)> {
|
||||
type Item = usize;
|
||||
|
||||
fn new() -> Vec<(usize, usize)> {
|
||||
vec![]
|
||||
}
|
||||
|
||||
fn merge(&mut self, new_value: usize) {
|
||||
match self.last_mut() {
|
||||
Some(value) if value.1 == new_value - 1 => {
|
||||
value.1 += 1;
|
||||
}
|
||||
|
||||
_ => self.push((new_value, new_value)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Group<K: Debug + Eq + Hash, V: GroupedValue> {
|
||||
values: indexmap::IndexMap<K, V>,
|
||||
}
|
||||
|
||||
impl<K, G> Group<K, G>
|
||||
where
|
||||
K: Debug + Eq + Hash,
|
||||
G: GroupedValue,
|
||||
{
|
||||
pub fn new() -> Group<K, G> {
|
||||
Group {
|
||||
values: indexmap::IndexMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.values.len()
|
||||
}
|
||||
|
||||
pub fn into_iter(self) -> impl Iterator<Item = (K, G)> {
|
||||
self.values.into_iter()
|
||||
}
|
||||
|
||||
pub fn add(&mut self, key: impl Into<K>, value: impl Into<G::Item>) {
|
||||
let key = key.into();
|
||||
let value = value.into();
|
||||
|
||||
let group = self.values.get_mut(&key);
|
||||
|
||||
match group {
|
||||
None => {
|
||||
self.values.insert(key, {
|
||||
let mut group = G::new();
|
||||
group.merge(value);
|
||||
group
|
||||
});
|
||||
}
|
||||
Some(group) => {
|
||||
group.merge(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
|
||||
pub enum Column {
|
||||
String(String),
|
||||
Value,
|
||||
}
|
||||
|
||||
impl Into<Column> for String {
|
||||
fn into(self) -> Column {
|
||||
Column::String(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<Column> for &String {
|
||||
fn into(self) -> Column {
|
||||
Column::String(self.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<Column> for &str {
|
||||
fn into(self) -> Column {
|
||||
Column::String(self.to_string())
|
||||
}
|
||||
}
|
300
crates/nu-protocol/src/value.rs
Normal file
300
crates/nu-protocol/src/value.rs
Normal file
@ -0,0 +1,300 @@
|
||||
pub mod column_path;
|
||||
mod convert;
|
||||
mod debug;
|
||||
pub mod dict;
|
||||
pub mod evaluate;
|
||||
pub mod primitive;
|
||||
pub mod range;
|
||||
mod serde_bigdecimal;
|
||||
mod serde_bigint;
|
||||
|
||||
use crate::type_name::{ShellTypeName, SpannedTypeName};
|
||||
use crate::value::dict::Dictionary;
|
||||
use crate::value::evaluate::Evaluate;
|
||||
use crate::value::primitive::Primitive;
|
||||
use crate::value::range::{Range, RangeInclusion};
|
||||
use crate::{ColumnPath, PathMember};
|
||||
use bigdecimal::BigDecimal;
|
||||
use indexmap::IndexMap;
|
||||
use nu_errors::ShellError;
|
||||
use nu_source::{AnchorLocation, HasSpan, Span, Spanned, Tag};
|
||||
use num_bigint::BigInt;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
use std::time::SystemTime;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
|
||||
pub enum UntaggedValue {
|
||||
Primitive(Primitive),
|
||||
Row(Dictionary),
|
||||
Table(Vec<Value>),
|
||||
|
||||
// Errors are a type of value too
|
||||
Error(ShellError),
|
||||
|
||||
Block(Evaluate),
|
||||
}
|
||||
|
||||
impl UntaggedValue {
|
||||
pub fn retag(self, tag: impl Into<Tag>) -> Value {
|
||||
Value {
|
||||
value: self,
|
||||
tag: tag.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn data_descriptors(&self) -> Vec<String> {
|
||||
match self {
|
||||
UntaggedValue::Primitive(_) => vec![],
|
||||
UntaggedValue::Row(columns) => columns.entries.keys().map(|x| x.to_string()).collect(),
|
||||
UntaggedValue::Block(_) => vec![],
|
||||
UntaggedValue::Table(_) => vec![],
|
||||
UntaggedValue::Error(_) => vec![],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_value(self, tag: impl Into<Tag>) -> Value {
|
||||
Value {
|
||||
value: self,
|
||||
tag: tag.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_untagged_value(self) -> Value {
|
||||
Value {
|
||||
value: self,
|
||||
tag: Tag::unknown(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_true(&self) -> bool {
|
||||
match self {
|
||||
UntaggedValue::Primitive(Primitive::Boolean(true)) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_some(&self) -> bool {
|
||||
!self.is_none()
|
||||
}
|
||||
|
||||
pub fn is_none(&self) -> bool {
|
||||
match self {
|
||||
UntaggedValue::Primitive(Primitive::Nothing) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_error(&self) -> bool {
|
||||
match self {
|
||||
UntaggedValue::Error(_err) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_error(&self) -> ShellError {
|
||||
match self {
|
||||
UntaggedValue::Error(err) => err.clone(),
|
||||
_ => panic!("Don't call expect_error without first calling is_error"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_string(&self) -> &str {
|
||||
match self {
|
||||
UntaggedValue::Primitive(Primitive::String(string)) => &string[..],
|
||||
_ => panic!("expect_string assumes that the value must be a string"),
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn row(entries: IndexMap<String, Value>) -> UntaggedValue {
|
||||
UntaggedValue::Row(entries.into())
|
||||
}
|
||||
|
||||
pub fn table(list: &[Value]) -> UntaggedValue {
|
||||
UntaggedValue::Table(list.to_vec())
|
||||
}
|
||||
|
||||
pub fn string(s: impl Into<String>) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::String(s.into()))
|
||||
}
|
||||
|
||||
pub fn line(s: impl Into<String>) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Line(s.into()))
|
||||
}
|
||||
|
||||
pub fn column_path(s: Vec<impl Into<PathMember>>) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::ColumnPath(ColumnPath::new(
|
||||
s.into_iter().map(|p| p.into()).collect(),
|
||||
)))
|
||||
}
|
||||
|
||||
pub fn int(i: impl Into<BigInt>) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Int(i.into()))
|
||||
}
|
||||
|
||||
pub fn pattern(s: impl Into<String>) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::String(s.into()))
|
||||
}
|
||||
|
||||
pub fn path(s: impl Into<PathBuf>) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Path(s.into()))
|
||||
}
|
||||
|
||||
pub fn bytes(s: impl Into<u64>) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Bytes(s.into()))
|
||||
}
|
||||
|
||||
pub fn decimal(s: impl Into<BigDecimal>) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Decimal(s.into()))
|
||||
}
|
||||
|
||||
pub fn binary(binary: Vec<u8>) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Binary(binary))
|
||||
}
|
||||
|
||||
pub fn range(
|
||||
left: (Spanned<Primitive>, RangeInclusion),
|
||||
right: (Spanned<Primitive>, RangeInclusion),
|
||||
) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Range(Box::new(Range::new(left, right))))
|
||||
}
|
||||
|
||||
pub fn boolean(s: impl Into<bool>) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Boolean(s.into()))
|
||||
}
|
||||
|
||||
pub fn duration(secs: u64) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Duration(secs))
|
||||
}
|
||||
|
||||
pub fn system_date(s: SystemTime) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Date(s.into()))
|
||||
}
|
||||
|
||||
pub fn nothing() -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::Nothing)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialOrd, PartialEq, Ord, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct Value {
|
||||
pub value: UntaggedValue,
|
||||
pub tag: Tag,
|
||||
}
|
||||
|
||||
impl std::ops::Deref for Value {
|
||||
type Target = UntaggedValue;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.value
|
||||
}
|
||||
}
|
||||
|
||||
impl Value {
|
||||
pub fn anchor(&self) -> Option<AnchorLocation> {
|
||||
self.tag.anchor()
|
||||
}
|
||||
|
||||
pub fn anchor_name(&self) -> Option<String> {
|
||||
self.tag.anchor_name()
|
||||
}
|
||||
|
||||
pub fn tag(&self) -> Tag {
|
||||
self.tag.clone()
|
||||
}
|
||||
|
||||
pub fn as_string(&self) -> Result<String, ShellError> {
|
||||
match &self.value {
|
||||
UntaggedValue::Primitive(Primitive::String(string)) => Ok(string.clone()),
|
||||
UntaggedValue::Primitive(Primitive::Line(line)) => Ok(line.clone() + "\n"),
|
||||
_ => Err(ShellError::type_error("string", self.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_forgiving_string(&self) -> Result<&str, ShellError> {
|
||||
match &self.value {
|
||||
UntaggedValue::Primitive(Primitive::String(string)) => Ok(&string[..]),
|
||||
_ => Err(ShellError::type_error("string", self.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_path(&self) -> Result<PathBuf, ShellError> {
|
||||
match &self.value {
|
||||
UntaggedValue::Primitive(Primitive::Path(path)) => Ok(path.clone()),
|
||||
UntaggedValue::Primitive(Primitive::String(path_str)) => Ok(PathBuf::from(&path_str)),
|
||||
_ => Err(ShellError::type_error("Path", self.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_primitive(&self) -> Result<Primitive, ShellError> {
|
||||
match &self.value {
|
||||
UntaggedValue::Primitive(primitive) => Ok(primitive.clone()),
|
||||
_ => Err(ShellError::type_error(
|
||||
"Primitive",
|
||||
self.spanned_type_name(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_u64(&self) -> Result<u64, ShellError> {
|
||||
match &self.value {
|
||||
UntaggedValue::Primitive(primitive) => primitive.as_u64(self.tag.span),
|
||||
_ => Err(ShellError::type_error("integer", self.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<UntaggedValue> for &str {
|
||||
fn into(self) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::String(self.to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<UntaggedValue> for Value {
|
||||
fn into(self) -> UntaggedValue {
|
||||
self.value
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Into<&'a UntaggedValue> for &'a Value {
|
||||
fn into(self) -> &'a UntaggedValue {
|
||||
&self.value
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for Value {
|
||||
fn span(&self) -> Span {
|
||||
self.tag.span
|
||||
}
|
||||
}
|
||||
|
||||
impl ShellTypeName for Value {
|
||||
fn type_name(&self) -> &'static str {
|
||||
ShellTypeName::type_name(&self.value)
|
||||
}
|
||||
}
|
||||
|
||||
impl ShellTypeName for UntaggedValue {
|
||||
fn type_name(&self) -> &'static str {
|
||||
match &self {
|
||||
UntaggedValue::Primitive(p) => p.type_name(),
|
||||
UntaggedValue::Row(_) => "row",
|
||||
UntaggedValue::Table(_) => "table",
|
||||
UntaggedValue::Error(_) => "error",
|
||||
UntaggedValue::Block(_) => "block",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Primitive> for UntaggedValue {
|
||||
fn from(input: Primitive) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(input)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for UntaggedValue {
|
||||
fn from(input: String) -> UntaggedValue {
|
||||
UntaggedValue::Primitive(Primitive::String(input))
|
||||
}
|
||||
}
|
114
crates/nu-protocol/src/value/column_path.rs
Normal file
114
crates/nu-protocol/src/value/column_path.rs
Normal file
@ -0,0 +1,114 @@
|
||||
use crate::Value;
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use nu_source::{b, span_for_spanned_list, DebugDocBuilder, HasFallibleSpan, PrettyDebug, Span};
|
||||
use num_bigint::BigInt;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub enum UnspannedPathMember {
|
||||
String(String),
|
||||
Int(BigInt),
|
||||
}
|
||||
|
||||
impl UnspannedPathMember {
|
||||
pub fn into_path_member(self, span: impl Into<Span>) -> PathMember {
|
||||
PathMember {
|
||||
unspanned: self,
|
||||
span: span.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub struct PathMember {
|
||||
pub unspanned: UnspannedPathMember,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl PrettyDebug for &PathMember {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
match &self.unspanned {
|
||||
UnspannedPathMember::String(string) => b::primitive(format!("{:?}", string)),
|
||||
UnspannedPathMember::Int(int) => b::primitive(format!("{}", int)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Debug, Hash, Serialize, Deserialize, Ord, PartialOrd, Eq, PartialEq, Getters, Clone, new,
|
||||
)]
|
||||
pub struct ColumnPath {
|
||||
#[get = "pub"]
|
||||
members: Vec<PathMember>,
|
||||
}
|
||||
|
||||
impl ColumnPath {
|
||||
pub fn iter(&self) -> impl Iterator<Item = &PathMember> {
|
||||
self.members.iter()
|
||||
}
|
||||
|
||||
pub fn split_last(&self) -> Option<(&PathMember, &[PathMember])> {
|
||||
self.members.split_last()
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for ColumnPath {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
let members: Vec<DebugDocBuilder> =
|
||||
self.members.iter().map(|member| member.pretty()).collect();
|
||||
|
||||
b::delimit(
|
||||
"(",
|
||||
b::description("path") + b::equals() + b::intersperse(members, b::space()),
|
||||
")",
|
||||
)
|
||||
.nest()
|
||||
}
|
||||
}
|
||||
|
||||
impl HasFallibleSpan for ColumnPath {
|
||||
fn maybe_span(&self) -> Option<Span> {
|
||||
if self.members.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(span_for_spanned_list(self.members.iter().map(|m| m.span)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PathMember {
|
||||
pub fn string(string: impl Into<String>, span: impl Into<Span>) -> PathMember {
|
||||
UnspannedPathMember::String(string.into()).into_path_member(span)
|
||||
}
|
||||
|
||||
pub fn int(int: impl Into<BigInt>, span: impl Into<Span>) -> PathMember {
|
||||
UnspannedPathMember::Int(int.into()).into_path_member(span)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn did_you_mean(obj_source: &Value, field_tried: &PathMember) -> Option<Vec<(usize, String)>> {
|
||||
let field_tried = match &field_tried.unspanned {
|
||||
UnspannedPathMember::String(string) => string.clone(),
|
||||
UnspannedPathMember::Int(int) => format!("{}", int),
|
||||
};
|
||||
|
||||
let possibilities = obj_source.data_descriptors();
|
||||
|
||||
let mut possible_matches: Vec<_> = possibilities
|
||||
.into_iter()
|
||||
.map(|x| {
|
||||
let word = x;
|
||||
let distance = natural::distance::levenshtein_distance(&word, &field_tried);
|
||||
|
||||
(distance, word)
|
||||
})
|
||||
.collect();
|
||||
|
||||
if !possible_matches.is_empty() {
|
||||
possible_matches.sort();
|
||||
Some(possible_matches)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
55
crates/nu-protocol/src/value/convert.rs
Normal file
55
crates/nu-protocol/src/value/convert.rs
Normal file
@ -0,0 +1,55 @@
|
||||
use crate::type_name::SpannedTypeName;
|
||||
use crate::value::dict::Dictionary;
|
||||
use crate::value::primitive::Primitive;
|
||||
use crate::value::{UntaggedValue, Value};
|
||||
use nu_errors::{CoerceInto, ShellError};
|
||||
use nu_source::TaggedItem;
|
||||
|
||||
impl std::convert::TryFrom<&Value> for i64 {
|
||||
type Error = ShellError;
|
||||
|
||||
fn try_from(value: &Value) -> Result<i64, ShellError> {
|
||||
match &value.value {
|
||||
UntaggedValue::Primitive(Primitive::Int(int)) => {
|
||||
int.tagged(&value.tag).coerce_into("converting to i64")
|
||||
}
|
||||
_ => Err(ShellError::type_error("Integer", value.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::TryFrom<&Value> for String {
|
||||
type Error = ShellError;
|
||||
|
||||
fn try_from(value: &Value) -> Result<String, ShellError> {
|
||||
match &value.value {
|
||||
UntaggedValue::Primitive(Primitive::String(s)) => Ok(s.clone()),
|
||||
_ => Err(ShellError::type_error("String", value.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::TryFrom<&Value> for Vec<u8> {
|
||||
type Error = ShellError;
|
||||
|
||||
fn try_from(value: &Value) -> Result<Vec<u8>, ShellError> {
|
||||
match &value.value {
|
||||
UntaggedValue::Primitive(Primitive::Binary(b)) => Ok(b.clone()),
|
||||
_ => Err(ShellError::type_error("Binary", value.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> std::convert::TryFrom<&'a Value> for &'a Dictionary {
|
||||
type Error = ShellError;
|
||||
|
||||
fn try_from(value: &'a Value) -> Result<&'a Dictionary, ShellError> {
|
||||
match &value.value {
|
||||
UntaggedValue::Row(d) => Ok(d),
|
||||
_ => Err(ShellError::type_error(
|
||||
"Dictionary",
|
||||
value.spanned_type_name(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
99
crates/nu-protocol/src/value/debug.rs
Normal file
99
crates/nu-protocol/src/value/debug.rs
Normal file
@ -0,0 +1,99 @@
|
||||
use crate::type_name::PrettyType;
|
||||
use crate::value::primitive::Primitive;
|
||||
use crate::value::{UntaggedValue, Value};
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebug};
|
||||
|
||||
impl PrettyDebug for &Value {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
PrettyDebug::pretty(*self)
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for Value {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
match &self.value {
|
||||
UntaggedValue::Primitive(p) => p.pretty(),
|
||||
UntaggedValue::Row(row) => row.pretty_builder().nest(1).group().into(),
|
||||
UntaggedValue::Table(table) => {
|
||||
b::delimit("[", b::intersperse(table, b::space()), "]").nest()
|
||||
}
|
||||
UntaggedValue::Error(_) => b::error("error"),
|
||||
UntaggedValue::Block(_) => b::opaque("block"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyType for Primitive {
|
||||
fn pretty_type(&self) -> DebugDocBuilder {
|
||||
match self {
|
||||
Primitive::Nothing => ty("nothing"),
|
||||
Primitive::Int(_) => ty("integer"),
|
||||
Primitive::Range(_) => ty("range"),
|
||||
Primitive::Decimal(_) => ty("decimal"),
|
||||
Primitive::Bytes(_) => ty("bytesize"),
|
||||
Primitive::String(_) => ty("string"),
|
||||
Primitive::Line(_) => ty("line"),
|
||||
Primitive::ColumnPath(_) => ty("column-path"),
|
||||
Primitive::Pattern(_) => ty("pattern"),
|
||||
Primitive::Boolean(_) => ty("boolean"),
|
||||
Primitive::Date(_) => ty("date"),
|
||||
Primitive::Duration(_) => ty("duration"),
|
||||
Primitive::Path(_) => ty("path"),
|
||||
Primitive::Binary(_) => ty("binary"),
|
||||
Primitive::BeginningOfStream => b::keyword("beginning-of-stream"),
|
||||
Primitive::EndOfStream => b::keyword("end-of-stream"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for Primitive {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
match self {
|
||||
Primitive::Nothing => b::primitive("nothing"),
|
||||
Primitive::Int(int) => prim(format_args!("{}", int)),
|
||||
Primitive::Decimal(decimal) => prim(format_args!("{}", decimal)),
|
||||
Primitive::Range(range) => {
|
||||
let (left, left_inclusion) = &range.from;
|
||||
let (right, right_inclusion) = &range.to;
|
||||
|
||||
b::typed(
|
||||
"range",
|
||||
(left_inclusion.debug_left_bracket()
|
||||
+ left.pretty()
|
||||
+ b::operator(",")
|
||||
+ b::space()
|
||||
+ right.pretty()
|
||||
+ right_inclusion.debug_right_bracket())
|
||||
.group(),
|
||||
)
|
||||
}
|
||||
Primitive::Bytes(bytes) => primitive_doc(bytes, "bytesize"),
|
||||
Primitive::String(string) => prim(string),
|
||||
Primitive::Line(string) => prim(string),
|
||||
Primitive::ColumnPath(path) => path.pretty(),
|
||||
Primitive::Pattern(pattern) => primitive_doc(pattern, "pattern"),
|
||||
Primitive::Boolean(boolean) => match boolean {
|
||||
true => b::primitive("$yes"),
|
||||
false => b::primitive("$no"),
|
||||
},
|
||||
Primitive::Date(date) => primitive_doc(date, "date"),
|
||||
Primitive::Duration(duration) => primitive_doc(duration, "seconds"),
|
||||
Primitive::Path(path) => primitive_doc(path, "path"),
|
||||
Primitive::Binary(_) => b::opaque("binary"),
|
||||
Primitive::BeginningOfStream => b::keyword("beginning-of-stream"),
|
||||
Primitive::EndOfStream => b::keyword("end-of-stream"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn prim(name: impl std::fmt::Debug) -> DebugDocBuilder {
|
||||
b::primitive(format!("{:?}", name))
|
||||
}
|
||||
|
||||
fn primitive_doc(name: impl std::fmt::Debug, ty: impl Into<String>) -> DebugDocBuilder {
|
||||
b::primitive(format!("{:?}", name)) + b::delimit("(", b::kind(ty.into()), ")")
|
||||
}
|
||||
|
||||
fn ty(name: impl std::fmt::Debug) -> DebugDocBuilder {
|
||||
b::kind(format!("{:?}", name))
|
||||
}
|
207
crates/nu-protocol/src/value/dict.rs
Normal file
207
crates/nu-protocol/src/value/dict.rs
Normal file
@ -0,0 +1,207 @@
|
||||
use crate::maybe_owned::MaybeOwned;
|
||||
use crate::value::primitive::Primitive;
|
||||
use crate::value::{UntaggedValue, Value};
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use indexmap::IndexMap;
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebug, Spanned, Tag};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::cmp::{Ord, Ordering, PartialOrd};
|
||||
use std::hash::{Hash, Hasher};
|
||||
|
||||
#[derive(Debug, Default, Serialize, Deserialize, PartialEq, Eq, Clone, Getters, new)]
|
||||
pub struct Dictionary {
|
||||
#[get = "pub"]
|
||||
pub entries: IndexMap<String, Value>,
|
||||
}
|
||||
|
||||
#[allow(clippy::derive_hash_xor_eq)]
|
||||
impl Hash for Dictionary {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
let mut entries = self.entries.clone();
|
||||
entries.sort_keys();
|
||||
entries.keys().collect::<Vec<&String>>().hash(state);
|
||||
entries.values().collect::<Vec<&Value>>().hash(state);
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for Dictionary {
|
||||
fn partial_cmp(&self, other: &Dictionary) -> Option<Ordering> {
|
||||
let this: Vec<&String> = self.entries.keys().collect();
|
||||
let that: Vec<&String> = other.entries.keys().collect();
|
||||
|
||||
if this != that {
|
||||
return this.partial_cmp(&that);
|
||||
}
|
||||
|
||||
let this: Vec<&Value> = self.entries.values().collect();
|
||||
let that: Vec<&Value> = self.entries.values().collect();
|
||||
|
||||
this.partial_cmp(&that)
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Dictionary {
|
||||
fn cmp(&self, other: &Dictionary) -> Ordering {
|
||||
let this: Vec<&String> = self.entries.keys().collect();
|
||||
let that: Vec<&String> = other.entries.keys().collect();
|
||||
|
||||
if this != that {
|
||||
return this.cmp(&that);
|
||||
}
|
||||
|
||||
let this: Vec<&Value> = self.entries.values().collect();
|
||||
let that: Vec<&Value> = self.entries.values().collect();
|
||||
|
||||
this.cmp(&that)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<Value> for Dictionary {
|
||||
fn eq(&self, other: &Value) -> bool {
|
||||
match &other.value {
|
||||
UntaggedValue::Row(d) => self == d,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, new)]
|
||||
struct DebugEntry<'a> {
|
||||
key: &'a str,
|
||||
value: &'a Value,
|
||||
}
|
||||
|
||||
impl<'a> PrettyDebug for DebugEntry<'a> {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
(b::key(self.key.to_string()) + b::equals() + self.value.pretty().into_value()).group()
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebug for Dictionary {
|
||||
fn pretty(&self) -> DebugDocBuilder {
|
||||
b::delimit(
|
||||
"(",
|
||||
b::intersperse(
|
||||
self.entries()
|
||||
.iter()
|
||||
.map(|(key, value)| DebugEntry::new(key, value)),
|
||||
b::space(),
|
||||
),
|
||||
")",
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<IndexMap<String, Value>> for Dictionary {
|
||||
fn from(input: IndexMap<String, Value>) -> Dictionary {
|
||||
let mut out = IndexMap::default();
|
||||
|
||||
for (key, value) in input {
|
||||
out.insert(key, value);
|
||||
}
|
||||
|
||||
Dictionary::new(out)
|
||||
}
|
||||
}
|
||||
|
||||
impl Dictionary {
|
||||
pub fn get_data(&self, desc: &str) -> MaybeOwned<'_, Value> {
|
||||
match self.entries.get(desc) {
|
||||
Some(v) => MaybeOwned::Borrowed(v),
|
||||
None => MaybeOwned::Owned(
|
||||
UntaggedValue::Primitive(Primitive::Nothing).into_untagged_value(),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn keys(&self) -> impl Iterator<Item = &String> {
|
||||
self.entries.keys()
|
||||
}
|
||||
|
||||
pub fn get_data_by_key(&self, name: Spanned<&str>) -> Option<Value> {
|
||||
let result = self
|
||||
.entries
|
||||
.iter()
|
||||
.find(|(desc_name, _)| *desc_name == name.item)?
|
||||
.1;
|
||||
|
||||
Some(
|
||||
result
|
||||
.value
|
||||
.clone()
|
||||
.into_value(Tag::new(result.tag.anchor(), name.span)),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn get_mut_data_by_key(&mut self, name: &str) -> Option<&mut Value> {
|
||||
match self
|
||||
.entries
|
||||
.iter_mut()
|
||||
.find(|(desc_name, _)| *desc_name == name)
|
||||
{
|
||||
Some((_, v)) => Some(v),
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert_data_at_key(&mut self, name: &str, value: Value) {
|
||||
self.entries.insert(name.to_string(), value);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TaggedDictBuilder {
|
||||
tag: Tag,
|
||||
dict: IndexMap<String, Value>,
|
||||
}
|
||||
|
||||
impl TaggedDictBuilder {
|
||||
pub fn new(tag: impl Into<Tag>) -> TaggedDictBuilder {
|
||||
TaggedDictBuilder {
|
||||
tag: tag.into(),
|
||||
dict: IndexMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn build(tag: impl Into<Tag>, block: impl FnOnce(&mut TaggedDictBuilder)) -> Value {
|
||||
let mut builder = TaggedDictBuilder::new(tag);
|
||||
block(&mut builder);
|
||||
builder.into_value()
|
||||
}
|
||||
|
||||
pub fn with_capacity(tag: impl Into<Tag>, n: usize) -> TaggedDictBuilder {
|
||||
TaggedDictBuilder {
|
||||
tag: tag.into(),
|
||||
dict: IndexMap::with_capacity(n),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert_untagged(&mut self, key: impl Into<String>, value: impl Into<UntaggedValue>) {
|
||||
self.dict
|
||||
.insert(key.into(), value.into().into_value(&self.tag));
|
||||
}
|
||||
|
||||
pub fn insert_value(&mut self, key: impl Into<String>, value: impl Into<Value>) {
|
||||
self.dict.insert(key.into(), value.into());
|
||||
}
|
||||
|
||||
pub fn into_value(self) -> Value {
|
||||
let tag = self.tag.clone();
|
||||
self.into_untagged_value().into_value(tag)
|
||||
}
|
||||
|
||||
pub fn into_untagged_value(self) -> UntaggedValue {
|
||||
UntaggedValue::Row(Dictionary { entries: self.dict })
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.dict.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<TaggedDictBuilder> for Value {
|
||||
fn from(input: TaggedDictBuilder) -> Value {
|
||||
input.into_value()
|
||||
}
|
||||
}
|
102
crates/nu-protocol/src/value/evaluate.rs
Normal file
102
crates/nu-protocol/src/value/evaluate.rs
Normal file
@ -0,0 +1,102 @@
|
||||
use crate::value::{Primitive, UntaggedValue, Value};
|
||||
use indexmap::IndexMap;
|
||||
use nu_errors::ShellError;
|
||||
use query_interface::{interfaces, vtable_for, Object, ObjectHash};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::cmp::{Ord, Ordering, PartialOrd};
|
||||
use std::fmt::Debug;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Scope {
|
||||
pub it: Value,
|
||||
pub vars: IndexMap<String, Value>,
|
||||
}
|
||||
|
||||
impl Scope {
|
||||
pub fn new(it: Value) -> Scope {
|
||||
Scope {
|
||||
it,
|
||||
vars: IndexMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Scope {
|
||||
pub fn empty() -> Scope {
|
||||
Scope {
|
||||
it: UntaggedValue::Primitive(Primitive::Nothing).into_untagged_value(),
|
||||
vars: IndexMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn it_value(value: Value) -> Scope {
|
||||
Scope {
|
||||
it: value,
|
||||
vars: IndexMap::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[typetag::serde(tag = "type")]
|
||||
pub trait EvaluateTrait: Debug + Send + Sync + Object + ObjectHash + 'static {
|
||||
fn invoke(&self, scope: &Scope) -> Result<Value, ShellError>;
|
||||
fn clone_box(&self) -> Evaluate;
|
||||
}
|
||||
|
||||
interfaces!(Evaluate: dyn ObjectHash);
|
||||
|
||||
#[typetag::serde]
|
||||
impl EvaluateTrait for Evaluate {
|
||||
fn invoke(&self, scope: &Scope) -> Result<Value, ShellError> {
|
||||
self.expr.invoke(scope)
|
||||
}
|
||||
|
||||
fn clone_box(&self) -> Evaluate {
|
||||
self.expr.clone_box()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Evaluate {
|
||||
expr: Box<dyn EvaluateTrait>,
|
||||
}
|
||||
|
||||
impl Evaluate {
|
||||
pub fn new(evaluate: impl EvaluateTrait) -> Evaluate {
|
||||
Evaluate {
|
||||
expr: Box::new(evaluate),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::hash::Hash for Evaluate {
|
||||
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
|
||||
self.expr.obj_hash(state)
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Evaluate {
|
||||
fn clone(&self) -> Evaluate {
|
||||
self.expr.clone_box()
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Evaluate {
|
||||
fn cmp(&self, _: &Self) -> Ordering {
|
||||
Ordering::Equal
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for Evaluate {
|
||||
fn partial_cmp(&self, _: &Evaluate) -> Option<Ordering> {
|
||||
Some(Ordering::Equal)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Evaluate {
|
||||
fn eq(&self, _: &Evaluate) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Evaluate {}
|
173
crates/nu-protocol/src/value/primitive.rs
Normal file
173
crates/nu-protocol/src/value/primitive.rs
Normal file
@ -0,0 +1,173 @@
|
||||
use crate::type_name::ShellTypeName;
|
||||
use crate::value::column_path::ColumnPath;
|
||||
use crate::value::range::Range;
|
||||
use crate::value::{serde_bigdecimal, serde_bigint};
|
||||
use bigdecimal::BigDecimal;
|
||||
use chrono::{DateTime, Utc};
|
||||
use chrono_humanize::Humanize;
|
||||
use nu_errors::{ExpectedRange, ShellError};
|
||||
use nu_source::{PrettyDebug, Span, SpannedItem};
|
||||
use num_bigint::BigInt;
|
||||
use num_traits::cast::{FromPrimitive, ToPrimitive};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Deserialize, Serialize)]
|
||||
pub enum Primitive {
|
||||
Nothing,
|
||||
#[serde(with = "serde_bigint")]
|
||||
Int(BigInt),
|
||||
#[serde(with = "serde_bigdecimal")]
|
||||
Decimal(BigDecimal),
|
||||
Bytes(u64),
|
||||
String(String),
|
||||
Line(String),
|
||||
ColumnPath(ColumnPath),
|
||||
Pattern(String),
|
||||
Boolean(bool),
|
||||
Date(DateTime<Utc>),
|
||||
Duration(u64), // Duration in seconds
|
||||
Range(Box<Range>),
|
||||
Path(PathBuf),
|
||||
#[serde(with = "serde_bytes")]
|
||||
Binary(Vec<u8>),
|
||||
|
||||
// Stream markers (used as bookend markers rather than actual values)
|
||||
BeginningOfStream,
|
||||
EndOfStream,
|
||||
}
|
||||
|
||||
impl Primitive {
|
||||
pub fn as_u64(&self, span: Span) -> Result<u64, ShellError> {
|
||||
match self {
|
||||
Primitive::Int(int) => match int.to_u64() {
|
||||
None => Err(ShellError::range_error(
|
||||
ExpectedRange::U64,
|
||||
&format!("{}", int).spanned(span),
|
||||
"converting an integer into a 64-bit integer",
|
||||
)),
|
||||
Some(num) => Ok(num),
|
||||
},
|
||||
other => Err(ShellError::type_error(
|
||||
"integer",
|
||||
other.type_name().spanned(span),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<BigDecimal> for Primitive {
|
||||
fn from(decimal: BigDecimal) -> Primitive {
|
||||
Primitive::Decimal(decimal)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<f64> for Primitive {
|
||||
fn from(float: f64) -> Primitive {
|
||||
if let Some(f) = BigDecimal::from_f64(float) {
|
||||
Primitive::Decimal(f)
|
||||
} else {
|
||||
unreachable!("Internal error: protocol did not use f64-compatible decimal")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ShellTypeName for Primitive {
|
||||
fn type_name(&self) -> &'static str {
|
||||
match self {
|
||||
Primitive::Nothing => "nothing",
|
||||
Primitive::Int(_) => "integer",
|
||||
Primitive::Range(_) => "range",
|
||||
Primitive::Decimal(_) => "decimal",
|
||||
Primitive::Bytes(_) => "bytes",
|
||||
Primitive::String(_) => "string",
|
||||
Primitive::Line(_) => "line",
|
||||
Primitive::ColumnPath(_) => "column path",
|
||||
Primitive::Pattern(_) => "pattern",
|
||||
Primitive::Boolean(_) => "boolean",
|
||||
Primitive::Date(_) => "date",
|
||||
Primitive::Duration(_) => "duration",
|
||||
Primitive::Path(_) => "file path",
|
||||
Primitive::Binary(_) => "binary",
|
||||
Primitive::BeginningOfStream => "marker<beginning of stream>",
|
||||
Primitive::EndOfStream => "marker<end of stream>",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn format_primitive(primitive: &Primitive, field_name: Option<&String>) -> String {
|
||||
match primitive {
|
||||
Primitive::Nothing => String::new(),
|
||||
Primitive::BeginningOfStream => String::new(),
|
||||
Primitive::EndOfStream => String::new(),
|
||||
Primitive::Path(p) => format!("{}", p.display()),
|
||||
Primitive::Bytes(b) => {
|
||||
let byte = byte_unit::Byte::from_bytes(*b as u128);
|
||||
|
||||
if byte.get_bytes() == 0u128 {
|
||||
return "—".to_string();
|
||||
}
|
||||
|
||||
let byte = byte.get_appropriate_unit(false);
|
||||
|
||||
match byte.get_unit() {
|
||||
byte_unit::ByteUnit::B => format!("{} B ", byte.get_value()),
|
||||
_ => byte.format(1),
|
||||
}
|
||||
}
|
||||
Primitive::Duration(sec) => format_duration(*sec),
|
||||
Primitive::Int(i) => i.to_string(),
|
||||
Primitive::Decimal(decimal) => format!("{:.4}", decimal),
|
||||
Primitive::Range(range) => format!(
|
||||
"{}..{}",
|
||||
format_primitive(&range.from.0.item, None),
|
||||
format_primitive(&range.to.0.item, None)
|
||||
),
|
||||
Primitive::Pattern(s) => s.to_string(),
|
||||
Primitive::String(s) => s.to_owned(),
|
||||
Primitive::Line(s) => s.to_owned(),
|
||||
Primitive::ColumnPath(p) => {
|
||||
let mut members = p.iter();
|
||||
let mut f = String::new();
|
||||
|
||||
f.push_str(
|
||||
&members
|
||||
.next()
|
||||
.expect("BUG: column path with zero members")
|
||||
.display(),
|
||||
);
|
||||
|
||||
for member in members {
|
||||
f.push_str(".");
|
||||
f.push_str(&member.display())
|
||||
}
|
||||
|
||||
f
|
||||
}
|
||||
Primitive::Boolean(b) => match (b, field_name) {
|
||||
(true, None) => "Yes",
|
||||
(false, None) => "No",
|
||||
(true, Some(s)) if !s.is_empty() => s,
|
||||
(false, Some(s)) if !s.is_empty() => "",
|
||||
(true, Some(_)) => "Yes",
|
||||
(false, Some(_)) => "No",
|
||||
}
|
||||
.to_owned(),
|
||||
Primitive::Binary(_) => "<binary>".to_owned(),
|
||||
Primitive::Date(d) => d.humanize(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn format_duration(sec: u64) -> String {
|
||||
let (minutes, seconds) = (sec / 60, sec % 60);
|
||||
let (hours, minutes) = (minutes / 60, minutes % 60);
|
||||
let (days, hours) = (hours / 24, hours % 24);
|
||||
|
||||
match (days, hours, minutes, seconds) {
|
||||
(0, 0, 0, 1) => "1 sec".to_owned(),
|
||||
(0, 0, 0, s) => format!("{} secs", s),
|
||||
(0, 0, m, s) => format!("{}:{:02}", m, s),
|
||||
(0, h, m, s) => format!("{}:{:02}:{:02}", h, m, s),
|
||||
(d, h, m, s) => format!("{}:{:02}:{:02}:{:02}", d, h, m, s),
|
||||
}
|
||||
}
|
32
crates/nu-protocol/src/value/range.rs
Normal file
32
crates/nu-protocol/src/value/range.rs
Normal file
@ -0,0 +1,32 @@
|
||||
use crate::value::Primitive;
|
||||
use derive_new::new;
|
||||
use nu_source::{b, DebugDocBuilder, Spanned};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize)]
|
||||
pub enum RangeInclusion {
|
||||
Inclusive,
|
||||
Exclusive,
|
||||
}
|
||||
|
||||
impl RangeInclusion {
|
||||
pub fn debug_left_bracket(self) -> DebugDocBuilder {
|
||||
b::delimiter(match self {
|
||||
RangeInclusion::Exclusive => "(",
|
||||
RangeInclusion::Inclusive => "[",
|
||||
})
|
||||
}
|
||||
|
||||
pub fn debug_right_bracket(self) -> DebugDocBuilder {
|
||||
b::delimiter(match self {
|
||||
RangeInclusion::Exclusive => ")",
|
||||
RangeInclusion::Inclusive => "]",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Serialize, Deserialize, new)]
|
||||
pub struct Range {
|
||||
pub from: (Spanned<Primitive>, RangeInclusion),
|
||||
pub to: (Spanned<Primitive>, RangeInclusion),
|
||||
}
|
24
crates/nu-protocol/src/value/serde_bigdecimal.rs
Normal file
24
crates/nu-protocol/src/value/serde_bigdecimal.rs
Normal file
@ -0,0 +1,24 @@
|
||||
use bigdecimal::BigDecimal;
|
||||
use num_traits::cast::FromPrimitive;
|
||||
use num_traits::cast::ToPrimitive;
|
||||
|
||||
pub fn serialize<S>(big_decimal: &BigDecimal, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
serde::Serialize::serialize(
|
||||
&big_decimal
|
||||
.to_f64()
|
||||
.ok_or_else(|| serde::ser::Error::custom("expected a f64-sized bignum"))?,
|
||||
serializer,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn deserialize<'de, D>(deserializer: D) -> Result<BigDecimal, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let x: f64 = serde::Deserialize::deserialize(deserializer)?;
|
||||
Ok(BigDecimal::from_f64(x)
|
||||
.ok_or_else(|| serde::de::Error::custom("expected a f64-sized bigdecimal"))?)
|
||||
}
|
24
crates/nu-protocol/src/value/serde_bigint.rs
Normal file
24
crates/nu-protocol/src/value/serde_bigint.rs
Normal file
@ -0,0 +1,24 @@
|
||||
use num_bigint::BigInt;
|
||||
use num_traits::cast::FromPrimitive;
|
||||
use num_traits::cast::ToPrimitive;
|
||||
|
||||
pub fn serialize<S>(big_int: &BigInt, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
serde::Serialize::serialize(
|
||||
&big_int
|
||||
.to_i64()
|
||||
.ok_or_else(|| serde::ser::Error::custom("expected a i64-sized bignum"))?,
|
||||
serializer,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn deserialize<'de, D>(deserializer: D) -> Result<BigInt, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
let x: i64 = serde::Deserialize::deserialize(deserializer)?;
|
||||
Ok(BigInt::from_i64(x)
|
||||
.ok_or_else(|| serde::de::Error::custom("expected a i64-sized bignum"))?)
|
||||
}
|
23
crates/nu-source/Cargo.toml
Normal file
23
crates/nu-source/Cargo.toml
Normal file
@ -0,0 +1,23 @@
|
||||
[package]
|
||||
name = "nu-source"
|
||||
version = "0.8.0"
|
||||
authors = ["Yehuda Katz <wycats@gmail.com>", "Jonathan Turner <jonathan.d.turner@gmail.com>", "Andrés N. Robalino <andres@androbtech.com>"]
|
||||
edition = "2018"
|
||||
description = "A source string characterizer for Nushell"
|
||||
license = "MIT"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
serde = { version = "1.0.103", features = ["derive"] }
|
||||
derive-new = "0.5.8"
|
||||
getset = "0.0.9"
|
||||
nom_locate = "1.0.0"
|
||||
nom-tracable = "0.4.1"
|
||||
language-reporting = "0.4.0"
|
||||
termcolor = "1.0.5"
|
||||
pretty = "0.5.2"
|
||||
|
||||
[build-dependencies]
|
||||
nu-build = { version = "0.8.0", path = "../nu-build" }
|
3
crates/nu-source/build.rs
Normal file
3
crates/nu-source/build.rs
Normal file
@ -0,0 +1,3 @@
|
||||
fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
nu_build::build()
|
||||
}
|
16
crates/nu-source/src/lib.rs
Normal file
16
crates/nu-source/src/lib.rs
Normal file
@ -0,0 +1,16 @@
|
||||
mod meta;
|
||||
mod pretty;
|
||||
mod term_colored;
|
||||
mod text;
|
||||
mod tracable;
|
||||
|
||||
pub use self::meta::{
|
||||
span_for_spanned_list, tag_for_tagged_list, AnchorLocation, HasFallibleSpan, HasSpan, HasTag,
|
||||
Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem,
|
||||
};
|
||||
pub use self::pretty::{
|
||||
b, DebugDoc, DebugDocBuilder, PrettyDebug, PrettyDebugWithSource, ShellAnnotation,
|
||||
};
|
||||
pub use self::term_colored::TermColored;
|
||||
pub use self::text::Text;
|
||||
pub use self::tracable::{nom_input, NomSpan, TracableContext};
|
654
crates/nu-source/src/meta.rs
Normal file
654
crates/nu-source/src/meta.rs
Normal file
@ -0,0 +1,654 @@
|
||||
use crate::pretty::{b, DebugDocBuilder, PrettyDebugWithSource};
|
||||
use crate::text::Text;
|
||||
use crate::tracable::TracableContext;
|
||||
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub enum AnchorLocation {
|
||||
Url(String),
|
||||
File(String),
|
||||
Source(Text),
|
||||
}
|
||||
|
||||
pub trait HasTag {
|
||||
fn tag(&self) -> Tag;
|
||||
}
|
||||
|
||||
#[derive(new, Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
|
||||
pub struct Spanned<T> {
|
||||
pub span: Span,
|
||||
pub item: T,
|
||||
}
|
||||
|
||||
impl<T> Spanned<T> {
|
||||
pub fn map<U>(self, input: impl FnOnce(T) -> U) -> Spanned<U> {
|
||||
let span = self.span;
|
||||
|
||||
let mapped = input(self.item);
|
||||
mapped.spanned(span)
|
||||
}
|
||||
}
|
||||
|
||||
impl Spanned<String> {
|
||||
pub fn items<'a, U>(
|
||||
items: impl Iterator<Item = &'a Spanned<String>>,
|
||||
) -> impl Iterator<Item = &'a str> {
|
||||
items.map(|item| &item.item[..])
|
||||
}
|
||||
}
|
||||
|
||||
impl Spanned<String> {
|
||||
pub fn borrow_spanned(&self) -> Spanned<&str> {
|
||||
let span = self.span;
|
||||
self.item[..].spanned(span)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait SpannedItem: Sized {
|
||||
fn spanned(self, span: impl Into<Span>) -> Spanned<Self> {
|
||||
Spanned {
|
||||
item: self,
|
||||
span: span.into(),
|
||||
}
|
||||
}
|
||||
|
||||
fn spanned_unknown(self) -> Spanned<Self> {
|
||||
Spanned {
|
||||
item: self,
|
||||
span: Span::unknown(),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<T> SpannedItem for T {}
|
||||
|
||||
impl<T> std::ops::Deref for Spanned<T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &T {
|
||||
&self.item
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(new, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
|
||||
pub struct Tagged<T> {
|
||||
pub tag: Tag,
|
||||
pub item: T,
|
||||
}
|
||||
|
||||
impl Tagged<String> {
|
||||
pub fn borrow_spanned(&self) -> Spanned<&str> {
|
||||
let span = self.tag.span;
|
||||
self.item[..].spanned(span)
|
||||
}
|
||||
|
||||
pub fn borrow_tagged(&self) -> Tagged<&str> {
|
||||
self.item[..].tagged(self.tag.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Tagged<Vec<T>> {
|
||||
pub fn items(&self) -> impl Iterator<Item = &T> {
|
||||
self.item.iter()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> HasTag for Tagged<T> {
|
||||
fn tag(&self) -> Tag {
|
||||
self.tag.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<Path> for Tagged<PathBuf> {
|
||||
fn as_ref(&self) -> &Path {
|
||||
self.item.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait TaggedItem: Sized {
|
||||
fn tagged(self, tag: impl Into<Tag>) -> Tagged<Self> {
|
||||
Tagged {
|
||||
item: self,
|
||||
tag: tag.into(),
|
||||
}
|
||||
}
|
||||
|
||||
// For now, this is a temporary facility. In many cases, there are other useful spans that we
|
||||
// could be using, such as the original source spans of JSON or Toml files, but we don't yet
|
||||
// have the infrastructure to make that work.
|
||||
fn tagged_unknown(self) -> Tagged<Self> {
|
||||
Tagged {
|
||||
item: self,
|
||||
tag: Tag {
|
||||
span: Span::unknown(),
|
||||
anchor: None,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> TaggedItem for T {}
|
||||
|
||||
impl<T> std::ops::Deref for Tagged<T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &T {
|
||||
&self.item
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Tagged<T> {
|
||||
pub fn map<U>(self, input: impl FnOnce(T) -> U) -> Tagged<U> {
|
||||
let tag = self.tag();
|
||||
|
||||
let mapped = input(self.item);
|
||||
mapped.tagged(tag)
|
||||
}
|
||||
|
||||
pub fn map_anchored(self, anchor: &Option<AnchorLocation>) -> Tagged<T> {
|
||||
let mut tag = self.tag;
|
||||
|
||||
tag.anchor = anchor.clone();
|
||||
|
||||
Tagged {
|
||||
item: self.item,
|
||||
tag,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn transpose(&self) -> Tagged<&T> {
|
||||
Tagged {
|
||||
item: &self.item,
|
||||
tag: self.tag.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tag(&self) -> Tag {
|
||||
self.tag.clone()
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
self.tag.span
|
||||
}
|
||||
|
||||
pub fn anchor(&self) -> Option<AnchorLocation> {
|
||||
self.tag.anchor.clone()
|
||||
}
|
||||
|
||||
pub fn anchor_name(&self) -> Option<String> {
|
||||
match self.tag.anchor {
|
||||
Some(AnchorLocation::File(ref file)) => Some(file.clone()),
|
||||
Some(AnchorLocation::Url(ref url)) => Some(url.clone()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn item(&self) -> &T {
|
||||
&self.item
|
||||
}
|
||||
|
||||
pub fn into_parts(self) -> (T, Tag) {
|
||||
(self.item, self.tag)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Tag> for Tag {
|
||||
fn from(input: &Tag) -> Tag {
|
||||
input.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<nom_locate::LocatedSpanEx<&str, T>> for Span {
|
||||
fn from(input: nom_locate::LocatedSpanEx<&str, T>) -> Span {
|
||||
Span::new(input.offset, input.offset + input.fragment.len())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T>
|
||||
From<(
|
||||
nom_locate::LocatedSpanEx<T, u64>,
|
||||
nom_locate::LocatedSpanEx<T, u64>,
|
||||
)> for Span
|
||||
{
|
||||
fn from(
|
||||
input: (
|
||||
nom_locate::LocatedSpanEx<T, u64>,
|
||||
nom_locate::LocatedSpanEx<T, u64>,
|
||||
),
|
||||
) -> Span {
|
||||
Span::new(input.0.offset, input.1.offset)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<(usize, usize)> for Span {
|
||||
fn from(input: (usize, usize)) -> Span {
|
||||
Span::new(input.0, input.1)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&std::ops::Range<usize>> for Span {
|
||||
fn from(input: &std::ops::Range<usize>) -> Span {
|
||||
Span::new(input.start, input.end)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Debug, Clone, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, new,
|
||||
)]
|
||||
pub struct Tag {
|
||||
pub anchor: Option<AnchorLocation>,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl From<Span> for Tag {
|
||||
fn from(span: Span) -> Self {
|
||||
Tag { anchor: None, span }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Span> for Tag {
|
||||
fn from(span: &Span) -> Self {
|
||||
Tag {
|
||||
anchor: None,
|
||||
span: *span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<(usize, usize, TracableContext)> for Tag {
|
||||
fn from((start, end, _context): (usize, usize, TracableContext)) -> Self {
|
||||
Tag {
|
||||
anchor: None,
|
||||
span: Span::new(start, end),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<(usize, usize, AnchorLocation)> for Tag {
|
||||
fn from((start, end, anchor): (usize, usize, AnchorLocation)) -> Self {
|
||||
Tag {
|
||||
anchor: Some(anchor),
|
||||
span: Span::new(start, end),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<(usize, usize, Option<AnchorLocation>)> for Tag {
|
||||
fn from((start, end, anchor): (usize, usize, Option<AnchorLocation>)) -> Self {
|
||||
Tag {
|
||||
anchor,
|
||||
span: Span::new(start, end),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<nom_locate::LocatedSpanEx<&str, TracableContext>> for Tag {
|
||||
fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Tag {
|
||||
Tag {
|
||||
anchor: None,
|
||||
span: Span::new(input.offset, input.offset + input.fragment.len()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Tag> for Span {
|
||||
fn from(tag: Tag) -> Self {
|
||||
tag.span
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Tag> for Span {
|
||||
fn from(tag: &Tag) -> Self {
|
||||
tag.span
|
||||
}
|
||||
}
|
||||
|
||||
impl Tag {
|
||||
pub fn unknown_anchor(span: Span) -> Tag {
|
||||
Tag { anchor: None, span }
|
||||
}
|
||||
|
||||
pub fn for_char(pos: usize, anchor: AnchorLocation) -> Tag {
|
||||
Tag {
|
||||
anchor: Some(anchor),
|
||||
span: Span::new(pos, pos + 1),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unknown_span(anchor: AnchorLocation) -> Tag {
|
||||
Tag {
|
||||
anchor: Some(anchor),
|
||||
span: Span::unknown(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unknown() -> Tag {
|
||||
Tag {
|
||||
anchor: None,
|
||||
span: Span::unknown(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn anchor(&self) -> Option<AnchorLocation> {
|
||||
self.anchor.clone()
|
||||
}
|
||||
|
||||
pub fn until(&self, other: impl Into<Tag>) -> Tag {
|
||||
let other = other.into();
|
||||
debug_assert!(
|
||||
self.anchor == other.anchor,
|
||||
"Can only merge two tags with the same anchor"
|
||||
);
|
||||
|
||||
Tag {
|
||||
span: Span::new(self.span.start, other.span.end),
|
||||
anchor: self.anchor.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn until_option(&self, other: Option<impl Into<Tag>>) -> Tag {
|
||||
match other {
|
||||
Some(other) => {
|
||||
let other = other.into();
|
||||
debug_assert!(
|
||||
self.anchor == other.anchor,
|
||||
"Can only merge two tags with the same anchor"
|
||||
);
|
||||
|
||||
Tag {
|
||||
span: Span::new(self.span.start, other.span.end),
|
||||
anchor: self.anchor.clone(),
|
||||
}
|
||||
}
|
||||
None => self.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn slice<'a>(&self, source: &'a str) -> &'a str {
|
||||
self.span.slice(source)
|
||||
}
|
||||
|
||||
pub fn string<'a>(&self, source: &'a str) -> String {
|
||||
self.span.slice(source).to_string()
|
||||
}
|
||||
|
||||
pub fn tagged_slice<'a>(&self, source: &'a str) -> Tagged<&'a str> {
|
||||
self.span.slice(source).tagged(self)
|
||||
}
|
||||
|
||||
pub fn tagged_string<'a>(&self, source: &'a str) -> Tagged<String> {
|
||||
self.span.slice(source).to_string().tagged(self)
|
||||
}
|
||||
|
||||
pub fn anchor_name(&self) -> Option<String> {
|
||||
match self.anchor {
|
||||
Some(AnchorLocation::File(ref file)) => Some(file.clone()),
|
||||
Some(AnchorLocation::Url(ref url)) => Some(url.clone()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn tag_for_tagged_list(mut iter: impl Iterator<Item = Tag>) -> Tag {
|
||||
let first = iter.next();
|
||||
|
||||
let first = match first {
|
||||
None => return Tag::unknown(),
|
||||
Some(first) => first,
|
||||
};
|
||||
|
||||
let last = iter.last();
|
||||
|
||||
match last {
|
||||
None => first,
|
||||
Some(last) => first.until(last),
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn span_for_spanned_list(mut iter: impl Iterator<Item = Span>) -> Span {
|
||||
let first = iter.next();
|
||||
|
||||
let first = match first {
|
||||
None => return Span::unknown(),
|
||||
Some(first) => first,
|
||||
};
|
||||
|
||||
let last = iter.last();
|
||||
|
||||
match last {
|
||||
None => first,
|
||||
Some(last) => first.until(last),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
|
||||
pub struct Span {
|
||||
start: usize,
|
||||
end: usize,
|
||||
}
|
||||
|
||||
impl From<&Span> for Span {
|
||||
fn from(span: &Span) -> Span {
|
||||
*span
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Option<Span>> for Span {
|
||||
fn from(input: Option<Span>) -> Span {
|
||||
match input {
|
||||
None => Span::new(0, 0),
|
||||
Some(span) => span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Span {
|
||||
pub fn unknown() -> Span {
|
||||
Span::new(0, 0)
|
||||
}
|
||||
|
||||
pub fn new(start: usize, end: usize) -> Span {
|
||||
assert!(
|
||||
end >= start,
|
||||
"Can't create a Span whose end < start, start={}, end={}",
|
||||
start,
|
||||
end
|
||||
);
|
||||
|
||||
Span { start, end }
|
||||
}
|
||||
|
||||
pub fn for_char(pos: usize) -> Span {
|
||||
Span {
|
||||
start: pos,
|
||||
end: pos + 1,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn until(&self, other: impl Into<Span>) -> Span {
|
||||
let other = other.into();
|
||||
|
||||
Span::new(self.start, other.end)
|
||||
}
|
||||
|
||||
pub fn until_option(&self, other: Option<impl Into<Span>>) -> Span {
|
||||
match other {
|
||||
Some(other) => {
|
||||
let other = other.into();
|
||||
|
||||
Span::new(self.start, other.end)
|
||||
}
|
||||
None => *self,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn string<'a>(&self, source: &'a str) -> String {
|
||||
self.slice(source).to_string()
|
||||
}
|
||||
|
||||
pub fn spanned_slice<'a>(&self, source: &'a str) -> Spanned<&'a str> {
|
||||
self.slice(source).spanned(*self)
|
||||
}
|
||||
|
||||
pub fn spanned_string<'a>(&self, source: &'a str) -> Spanned<String> {
|
||||
self.slice(source).to_string().spanned(*self)
|
||||
}
|
||||
|
||||
pub fn start(&self) -> usize {
|
||||
self.start
|
||||
}
|
||||
|
||||
pub fn end(&self) -> usize {
|
||||
self.end
|
||||
}
|
||||
|
||||
pub fn is_unknown(&self) -> bool {
|
||||
self.start == 0 && self.end == 0
|
||||
}
|
||||
|
||||
pub fn slice<'a>(&self, source: &'a str) -> &'a str {
|
||||
&source[self.start..self.end]
|
||||
}
|
||||
}
|
||||
|
||||
impl language_reporting::ReportingSpan for Span {
|
||||
fn with_start(&self, start: usize) -> Self {
|
||||
if self.end < start {
|
||||
Span::new(start, start)
|
||||
} else {
|
||||
Span::new(start, self.end)
|
||||
}
|
||||
}
|
||||
|
||||
fn with_end(&self, end: usize) -> Self {
|
||||
if end < self.start {
|
||||
Span::new(end, end)
|
||||
} else {
|
||||
Span::new(self.start, end)
|
||||
}
|
||||
}
|
||||
|
||||
fn start(&self) -> usize {
|
||||
self.start
|
||||
}
|
||||
|
||||
fn end(&self) -> usize {
|
||||
self.end
|
||||
}
|
||||
}
|
||||
|
||||
pub trait HasSpan: PrettyDebugWithSource {
|
||||
fn span(&self) -> Span;
|
||||
}
|
||||
|
||||
pub trait HasFallibleSpan: PrettyDebugWithSource {
|
||||
fn maybe_span(&self) -> Option<Span>;
|
||||
}
|
||||
|
||||
impl<T: HasSpan> HasFallibleSpan for T {
|
||||
fn maybe_span(&self) -> Option<Span> {
|
||||
Some(HasSpan::span(self))
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> HasSpan for Spanned<T>
|
||||
where
|
||||
Spanned<T>: PrettyDebugWithSource,
|
||||
{
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Option<Span> {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self {
|
||||
None => b::description("no span"),
|
||||
Some(span) => span.pretty_debug(source),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HasFallibleSpan for Option<Span> {
|
||||
fn maybe_span(&self) -> Option<Span> {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Span {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::typed(
|
||||
"spanned",
|
||||
b::keyword("for") + b::space() + b::description(format!("{:?}", source)),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for Span {
|
||||
fn span(&self) -> Span {
|
||||
*self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PrettyDebugWithSource for Option<Spanned<T>>
|
||||
where
|
||||
Spanned<T>: PrettyDebugWithSource,
|
||||
{
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self {
|
||||
None => b::description("nothing"),
|
||||
Some(v) => v.pretty_debug(source),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> HasFallibleSpan for Option<Spanned<T>>
|
||||
where
|
||||
Spanned<T>: PrettyDebugWithSource,
|
||||
{
|
||||
fn maybe_span(&self) -> Option<Span> {
|
||||
match self {
|
||||
None => None,
|
||||
Some(value) => Some(value.span),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> PrettyDebugWithSource for Option<Tagged<T>>
|
||||
where
|
||||
Tagged<T>: PrettyDebugWithSource,
|
||||
{
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self {
|
||||
None => b::description("nothing"),
|
||||
Some(d) => d.pretty_debug(source),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> HasFallibleSpan for Option<Tagged<T>>
|
||||
where
|
||||
Tagged<T>: PrettyDebugWithSource,
|
||||
{
|
||||
fn maybe_span(&self) -> Option<Span> {
|
||||
match self {
|
||||
None => None,
|
||||
Some(value) => Some(value.tag.span),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> HasSpan for Tagged<T>
|
||||
where
|
||||
Tagged<T>: PrettyDebugWithSource,
|
||||
{
|
||||
fn span(&self) -> Span {
|
||||
self.tag.span
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user