2021-09-02 10:25:22 +02:00
|
|
|
use crate::{
|
2023-03-24 12:54:06 +01:00
|
|
|
lex::{lex, lex_signature},
|
2023-07-03 07:45:10 +02:00
|
|
|
lite_parser::{lite_parse, LiteCommand, LiteElement, LitePipeline},
|
2022-12-22 12:41:44 +01:00
|
|
|
parse_mut,
|
2023-12-20 18:52:28 +01:00
|
|
|
parse_patterns::parse_pattern,
|
2023-10-05 23:31:40 +02:00
|
|
|
parse_shape_specs::{parse_shape_name, parse_type, ShapeDescriptorUse},
|
2023-07-14 05:20:35 +02:00
|
|
|
type_check::{self, math_result_type, type_compatible},
|
2023-04-07 02:35:45 +02:00
|
|
|
Token, TokenContents,
|
2021-08-17 01:00:00 +02:00
|
|
|
};
|
2021-07-02 03:42:25 +02:00
|
|
|
|
2023-04-05 18:56:48 +02:00
|
|
|
use nu_engine::DIR_VAR_PARSER_INFO;
|
2021-09-02 03:29:43 +02:00
|
|
|
use nu_protocol::{
|
2021-09-04 23:52:57 +02:00
|
|
|
ast::{
|
2022-11-11 07:51:08 +01:00
|
|
|
Argument, Assignment, Bits, Block, Boolean, Call, CellPath, Comparison, Expr, Expression,
|
Allow spreading arguments to commands (#11289)
<!--
if this PR closes one or more issues, you can automatically link the PR
with
them by using one of the [*linking
keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword),
e.g.
- this PR should close #xxxx
- fixes #xxxx
you can also mention related issues, PRs or discussions!
-->
Finishes implementing https://github.com/nushell/nushell/issues/10598,
which asks for a spread operator in lists, in records, and when calling
commands.
# Description
<!--
Thank you for improving Nushell. Please, check our [contributing
guide](../CONTRIBUTING.md) and talk to the core team before making major
changes.
Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.
-->
This PR will allow spreading arguments to commands (both internal and
external). It will also deprecate spreading arguments automatically when
passing to external commands.
# User-Facing Changes
<!-- List of all changes that impact the user experience here. This
helps us keep track of breaking changes. -->
- Users will be able to use `...` to spread arguments to custom/builtin
commands that have rest parameters or allow unknown arguments, or to any
external command
- If a custom command doesn't have a rest parameter and it doesn't allow
unknown arguments either, the spread operator will not be allowed
- Passing lists to external commands without `...` will work for now but
will cause a deprecation warning saying that it'll stop working in 0.91
(is 2 versions enough time?)
Here's a function to help with demonstrating some behavior:
```nushell
> def foo [ a, b, c?, d?, ...rest ] { [$a $b $c $d $rest] | to nuon }
```
You can pass a list of arguments to fill in the `rest` parameter using
`...`:
```nushell
> foo 1 2 3 4 ...[5 6]
[1, 2, 3, 4, [5, 6]]
```
If you don't use `...`, the list `[5 6]` will be treated as a single
argument:
```nushell
> foo 1 2 3 4 [5 6] # Note the double [[]]
[1, 2, 3, 4, [[5, 6]]]
```
You can omit optional parameters before the spread arguments:
```nushell
> foo 1 2 3 ...[4 5] # d is omitted here
[1, 2, 3, null, [4, 5]]
```
If you have multiple lists, you can spread them all:
```nushell
> foo 1 2 3 ...[4 5] 6 7 ...[8] ...[]
[1, 2, 3, null, [4, 5, 6, 7, 8]]
```
Here's the kind of error you get when you try to spread arguments to a
command with no rest parameter:
![image](https://github.com/nushell/nushell/assets/45539777/93faceae-00eb-4e59-ac3f-17f98436e6e4)
And this is the warning you get when you pass a list to an external now
(without `...`):
![image](https://github.com/nushell/nushell/assets/45539777/d368f590-201e-49fb-8b20-68476ced415e)
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to
check that you're using the standard code style
- `cargo test --workspace` to check that all tests pass (on Windows make
sure to [enable developer
mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
- `cargo run -- -c "use std testing; testing run-tests --path
crates/nu-std"` to run the tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
Added tests to cover the following cases:
- Spreading arguments to a command that doesn't have a rest parameter
(unexpected spread argument error)
- Spreading arguments to a command that doesn't have a rest parameter
*but* there's also a missing positional argument (missing positional
error)
- Spreading arguments to a command that doesn't have a rest parameter
but does allow unknown arguments, such as `exec` (allowed)
- Spreading a list literal containing arguments of the wrong type (parse
error)
- Spreading a non-list value, both to internal and external commands
- Having named arguments in the middle of rest arguments
- `explain`ing a command call that spreads its arguments
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
# Examples
Suppose you have multiple tables:
```nushell
let people = [[id name age]; [0 alice 100] [1 bob 200] [2 eve 300]]
let evil_twins = [[id name age]; [0 ecila 100] [-1 bob 200] [-2 eve 300]]
```
Maybe you often find yourself needing to merge multiple tables and want
a utility to do that. You could write a function like this:
```nushell
def merge_all [ ...tables ] { $tables | reduce { |it, acc| $acc | merge $it } }
```
Then you can use it like this:
```nushell
> merge_all ...([$people $evil_twins] | each { |$it| $it | select name age })
╭───┬───────┬─────╮
│ # │ name │ age │
├───┼───────┼─────┤
│ 0 │ ecila │ 100 │
│ 1 │ bob │ 200 │
│ 2 │ eve │ 300 │
╰───┴───────┴─────╯
```
Except they had duplicate columns, so now you first want to suffix every
column with a number to tell you which table the column came from. You
can make a command for that:
```nushell
def select_and_merge [ --cols: list<string>, ...tables ] {
let renamed_tables = $tables
| enumerate
| each { |it|
$it.item | select $cols | rename ...($cols | each { |col| $col + ($it.index | into string) })
};
merge_all ...$renamed_tables
}
```
And call it like this:
```nushell
> select_and_merge --cols [name age] $people $evil_twins
╭───┬───────┬──────┬───────┬──────╮
│ # │ name0 │ age0 │ name1 │ age1 │
├───┼───────┼──────┼───────┼──────┤
│ 0 │ alice │ 100 │ ecila │ 100 │
│ 1 │ bob │ 200 │ bob │ 200 │
│ 2 │ eve │ 300 │ eve │ 300 │
╰───┴───────┴──────┴───────┴──────╯
```
---
Suppose someone's made a command to search for APT packages:
```nushell
# The main command
def search-pkgs [
--install # Whether to install any packages it finds
log_level: int # Pretend it's a good idea to make this a required positional parameter
exclude?: list<string> # Packages to exclude
repositories?: list<string> # Which repositories to look in (searches in all if not given)
...pkgs # Package names to search for
] {
{ install: $install, log_level: $log_level, exclude: ($exclude | to nuon), repositories: ($repositories | to nuon), pkgs: ($pkgs | to nuon) }
}
```
It has a lot of parameters to configure it, so you might make your own
helper commands to wrap around it for specific cases. Here's one
example:
```nushell
# Only look for packages locally
def search-pkgs-local [
--install # Whether to install any packages it finds
log_level: int
exclude?: list<string> # Packages to exclude
...pkgs # Package names to search for
] {
# All required and optional positional parameters are given
search-pkgs --install=$install $log_level [] ["<local URI or something>"] ...$pkgs
}
```
And you can run it like this:
```nushell
> search-pkgs-local --install=false 5 ...["python2.7" "vim"]
╭──────────────┬──────────────────────────────╮
│ install │ false │
│ log_level │ 5 │
│ exclude │ [] │
│ repositories │ ["<local URI or something>"] │
│ pkgs │ ["python2.7", vim] │
╰──────────────┴──────────────────────────────╯
```
One thing I realized when writing this was that if we decide to not
allow passing optional arguments using the spread operator, then you can
(mis?)use the spread operator to skip optional parameters. Here, I
didn't want to give `exclude` explicitly, so I used a spread operator to
pass the packages to install. Without it, I would've needed to do
`search-pkgs-local --install=false 5 [] "python2.7" "vim"` (explicitly
pass `[]` (or `null`, in the general case) to `exclude`). There are
probably more idiomatic ways to do this, but I just thought it was
something interesting.
If you're a virologist of the [xkcd](https://xkcd.com/350/) kind,
another helper command you might make is this:
```nushell
# Install any packages it finds
def live-dangerously [ ...pkgs ] {
# One optional argument was given (exclude), while another was not (repositories)
search-pkgs 0 [] ...$pkgs --install # Flags can go after spread arguments
}
```
Running it:
```nushell
> live-dangerously "git" "*vi*" # *vi* because I don't feel like typing out vim and neovim
╭──────────────┬─────────────╮
│ install │ true │
│ log_level │ 0 │
│ exclude │ [] │
│ repositories │ null │
│ pkgs │ [git, *vi*] │
╰──────────────┴─────────────╯
```
Here's an example that uses the spread operator more than once within
the same command call:
```nushell
let extras = [ chrome firefox python java git ]
def search-pkgs-curated [ ...pkgs ] {
(search-pkgs
1
[emacs]
["example.com", "foo.com"]
vim # A must for everyone!
...($pkgs | filter { |p| not ($p | str contains "*") }) # Remove packages with globs
python # Good tool to have
...$extras
--install=false
python3) # I forget, did I already put Python in extras?
}
```
Running it:
```nushell
> search-pkgs-curated "git" "*vi*"
╭──────────────┬───────────────────────────────────────────────────────────────────╮
│ install │ false │
│ log_level │ 1 │
│ exclude │ [emacs] │
│ repositories │ [example.com, foo.com] │
│ pkgs │ [vim, git, python, chrome, firefox, python, java, git, "python3"] │
╰──────────────┴───────────────────────────────────────────────────────────────────╯
```
2023-12-28 08:43:20 +01:00
|
|
|
ExternalArgument, FullCellPath, ImportPattern, ImportPatternHead, ImportPatternMember,
|
|
|
|
MatchPattern, Math, Operator, PathMember, Pattern, Pipeline, PipelineElement,
|
|
|
|
RangeInclusion, RangeOperator, RecordItem,
|
2021-09-04 23:52:57 +02:00
|
|
|
},
|
2021-09-02 20:21:37 +02:00
|
|
|
engine::StateWorkingSet,
|
2023-12-04 20:13:47 +01:00
|
|
|
eval_const::eval_constant,
|
2024-01-25 07:16:49 +01:00
|
|
|
span, BlockId, DidYouMean, Flag, ParseError, PositionalArg, Signature, Span, Spanned,
|
|
|
|
SyntaxShape, Type, Unit, VarId, ENV_VARIABLE_ID, IN_VARIABLE_ID,
|
2021-09-02 03:29:43 +02:00
|
|
|
};
|
2021-07-23 23:19:30 +02:00
|
|
|
|
2021-09-26 20:39:19 +02:00
|
|
|
use crate::parse_keywords::{
|
2023-07-03 07:45:10 +02:00
|
|
|
find_dirs_var, is_unaliasable_parser_keyword, parse_alias, parse_const, parse_def,
|
|
|
|
parse_def_predecl, parse_export_in_block, parse_extern, parse_for, parse_hide, parse_keyword,
|
|
|
|
parse_let, parse_module, parse_overlay_hide, parse_overlay_new, parse_overlay_use,
|
|
|
|
parse_source, parse_use, parse_where, parse_where_expr, LIB_DIRS_VAR,
|
2021-09-02 03:29:43 +02:00
|
|
|
};
|
2021-07-23 23:19:30 +02:00
|
|
|
|
2022-06-17 20:11:48 +02:00
|
|
|
use itertools::Itertools;
|
2022-01-01 22:42:50 +01:00
|
|
|
use log::trace;
|
revert: move to ahash (#9464)
This PR reverts https://github.com/nushell/nushell/pull/9391
We try not to revert PRs like this, though after discussion with the
Nushell team, we decided to revert this one.
The main reason is that Nushell, as a codebase, isn't ready for these
kinds of optimisations. It's in the part of the development cycle where
our main focus should be on improving the algorithms inside of Nushell
itself. Once we have matured our algorithms, then we can look for
opportunities to switch out technologies we're using for alternate
forms.
Much of Nushell still has lots of opportunities for tuning the codebase,
paying down technical debt, and making the codebase generally cleaner
and more robust. This should be the focus. Performance improvements
should flow out of that work.
Said another, optimisation that isn't part of tuning the codebase is
premature at this stage. We need to focus on doing the hard work of
making the engine, parser, etc better.
# User-Facing Changes
Reverts the HashMap -> ahash change.
cc @FilipAndersson245
2023-06-18 05:27:57 +02:00
|
|
|
use std::collections::{HashMap, HashSet};
|
2023-06-10 18:41:58 +02:00
|
|
|
use std::{num::ParseIntError, str};
|
2021-11-30 07:14:05 +01:00
|
|
|
|
2021-11-02 21:56:00 +01:00
|
|
|
#[cfg(feature = "plugin")]
|
2021-12-03 15:29:55 +01:00
|
|
|
use crate::parse_keywords::parse_register;
|
2021-11-02 21:56:00 +01:00
|
|
|
|
2021-09-26 20:39:19 +02:00
|
|
|
pub fn garbage(span: Span) -> Expression {
|
2021-07-01 02:01:04 +02:00
|
|
|
Expression::garbage(span)
|
|
|
|
}
|
|
|
|
|
2022-02-15 20:31:14 +01:00
|
|
|
pub fn garbage_pipeline(spans: &[Span]) -> Pipeline {
|
|
|
|
Pipeline::from_vec(vec![garbage(span(spans))])
|
2021-09-10 09:28:43 +02:00
|
|
|
}
|
|
|
|
|
2021-07-01 03:31:02 +02:00
|
|
|
fn is_identifier_byte(b: u8) -> bool {
|
2022-07-27 04:08:54 +02:00
|
|
|
b != b'.'
|
|
|
|
&& b != b'['
|
|
|
|
&& b != b'('
|
|
|
|
&& b != b'{'
|
|
|
|
&& b != b'+'
|
|
|
|
&& b != b'-'
|
|
|
|
&& b != b'*'
|
|
|
|
&& b != b'^'
|
|
|
|
&& b != b'/'
|
|
|
|
&& b != b'='
|
|
|
|
&& b != b'!'
|
|
|
|
&& b != b'<'
|
|
|
|
&& b != b'>'
|
|
|
|
&& b != b'&'
|
|
|
|
&& b != b'|'
|
2021-07-01 03:31:02 +02:00
|
|
|
}
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn is_math_expression_like(working_set: &mut StateWorkingSet, span: Span) -> bool {
|
2022-04-07 04:01:31 +02:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
2022-02-22 16:55:28 +01:00
|
|
|
if bytes.is_empty() {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2023-03-22 21:14:10 +01:00
|
|
|
if bytes == b"true"
|
|
|
|
|| bytes == b"false"
|
|
|
|
|| bytes == b"null"
|
|
|
|
|| bytes == b"not"
|
|
|
|
|| bytes == b"if"
|
2023-03-24 02:52:01 +01:00
|
|
|
|| bytes == b"match"
|
2023-03-22 21:14:10 +01:00
|
|
|
{
|
2022-03-03 01:55:03 +01:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2022-02-22 16:55:28 +01:00
|
|
|
let b = bytes[0];
|
|
|
|
|
2023-02-16 03:30:56 +01:00
|
|
|
if b == b'(' || b == b'{' || b == b'[' || b == b'$' || b == b'"' || b == b'\'' || b == b'-' {
|
2022-04-07 04:01:31 +02:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
let starting_error_count = working_set.parse_errors.len();
|
|
|
|
|
|
|
|
// Number
|
|
|
|
parse_number(working_set, span);
|
|
|
|
if working_set.parse_errors.len() == starting_error_count {
|
2022-04-07 04:01:31 +02:00
|
|
|
return true;
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2022-04-07 04:01:31 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
// Filesize
|
|
|
|
parse_filesize(working_set, span);
|
|
|
|
if working_set.parse_errors.len() == starting_error_count {
|
2022-04-07 04:01:31 +02:00
|
|
|
return true;
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2022-04-07 04:01:31 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
parse_duration(working_set, span);
|
|
|
|
if working_set.parse_errors.len() == starting_error_count {
|
2022-04-07 04:01:31 +02:00
|
|
|
return true;
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2022-04-07 04:01:31 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
parse_datetime(working_set, span);
|
|
|
|
if working_set.parse_errors.len() == starting_error_count {
|
2022-04-07 08:02:28 +02:00
|
|
|
return true;
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2022-04-07 08:02:28 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
parse_binary(working_set, span);
|
|
|
|
if working_set.parse_errors.len() == starting_error_count {
|
2022-04-07 04:01:31 +02:00
|
|
|
return true;
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2022-04-07 04:01:31 +02:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_range(working_set, span);
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
if working_set.parse_errors.len() == starting_error_count {
|
2022-04-07 04:01:31 +02:00
|
|
|
return true;
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2022-04-07 04:01:31 +02:00
|
|
|
|
|
|
|
false
|
2021-10-27 23:52:59 +02:00
|
|
|
}
|
|
|
|
|
2021-07-01 03:31:02 +02:00
|
|
|
fn is_identifier(bytes: &[u8]) -> bool {
|
|
|
|
bytes.iter().all(|x| is_identifier_byte(*x))
|
|
|
|
}
|
|
|
|
|
2023-03-24 02:52:01 +01:00
|
|
|
pub fn is_variable(bytes: &[u8]) -> bool {
|
2021-07-01 03:31:02 +02:00
|
|
|
if bytes.len() > 1 && bytes[0] == b'$' {
|
|
|
|
is_identifier(&bytes[1..])
|
|
|
|
} else {
|
|
|
|
is_identifier(bytes)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-14 20:40:26 +01:00
|
|
|
pub fn trim_quotes(bytes: &[u8]) -> &[u8] {
|
|
|
|
if (bytes.starts_with(b"\"") && bytes.ends_with(b"\"") && bytes.len() > 1)
|
|
|
|
|| (bytes.starts_with(b"\'") && bytes.ends_with(b"\'") && bytes.len() > 1)
|
2022-04-04 22:42:26 +02:00
|
|
|
|| (bytes.starts_with(b"`") && bytes.ends_with(b"`") && bytes.len() > 1)
|
2021-11-14 20:40:26 +01:00
|
|
|
{
|
|
|
|
&bytes[1..(bytes.len() - 1)]
|
|
|
|
} else {
|
|
|
|
bytes
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-03 02:37:38 +02:00
|
|
|
pub fn trim_quotes_str(s: &str) -> &str {
|
|
|
|
if (s.starts_with('"') && s.ends_with('"') && s.len() > 1)
|
|
|
|
|| (s.starts_with('\'') && s.ends_with('\'') && s.len() > 1)
|
|
|
|
|| (s.starts_with('`') && s.ends_with('`') && s.len() > 1)
|
|
|
|
{
|
|
|
|
&s[1..(s.len() - 1)]
|
|
|
|
} else {
|
|
|
|
s
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn check_call(working_set: &mut StateWorkingSet, command: Span, sig: &Signature, call: &Call) {
|
2021-10-13 19:53:27 +02:00
|
|
|
// Allow the call to pass if they pass in the help flag
|
2022-04-09 07:17:48 +02:00
|
|
|
if call.named_iter().any(|(n, _, _)| n.item == "help") {
|
2023-04-07 02:35:45 +02:00
|
|
|
return;
|
2021-10-13 19:53:27 +02:00
|
|
|
}
|
|
|
|
|
2022-04-09 04:55:02 +02:00
|
|
|
if call.positional_len() < sig.required_positional.len() {
|
2021-12-27 20:13:52 +01:00
|
|
|
// Comparing the types of all signature positional arguments against the parsed
|
|
|
|
// expressions found in the call. If one type is not found then it could be assumed
|
|
|
|
// that that positional argument is missing from the parsed call
|
|
|
|
for argument in &sig.required_positional {
|
2022-04-09 04:55:02 +02:00
|
|
|
let found = call.positional_iter().fold(false, |ac, expr| {
|
2022-02-14 18:33:47 +01:00
|
|
|
if argument.shape.to_type() == expr.ty || argument.shape == SyntaxShape::Any {
|
2021-12-27 20:13:52 +01:00
|
|
|
true
|
|
|
|
} else {
|
|
|
|
ac
|
|
|
|
}
|
|
|
|
});
|
|
|
|
if !found {
|
2022-04-09 04:55:02 +02:00
|
|
|
if let Some(last) = call.positional_iter().last() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::MissingPositional(
|
2022-01-04 00:14:33 +01:00
|
|
|
argument.name.clone(),
|
2022-12-03 10:44:12 +01:00
|
|
|
Span::new(last.span.end, last.span.end),
|
2022-01-04 00:14:33 +01:00
|
|
|
sig.call_signature(),
|
|
|
|
));
|
2023-04-07 02:35:45 +02:00
|
|
|
return;
|
2022-01-04 00:14:33 +01:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::MissingPositional(
|
2022-01-04 00:14:33 +01:00
|
|
|
argument.name.clone(),
|
2022-12-03 10:44:12 +01:00
|
|
|
Span::new(command.end, command.end),
|
2022-01-04 00:14:33 +01:00
|
|
|
sig.call_signature(),
|
|
|
|
));
|
2023-04-07 02:35:45 +02:00
|
|
|
return;
|
2022-01-04 00:14:33 +01:00
|
|
|
}
|
2021-12-27 20:13:52 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-09 04:55:02 +02:00
|
|
|
let missing = &sig.required_positional[call.positional_len()];
|
|
|
|
if let Some(last) = call.positional_iter().last() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::MissingPositional(
|
2022-01-04 00:14:33 +01:00
|
|
|
missing.name.clone(),
|
2022-12-03 10:44:12 +01:00
|
|
|
Span::new(last.span.end, last.span.end),
|
2022-01-04 00:14:33 +01:00
|
|
|
sig.call_signature(),
|
|
|
|
))
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::MissingPositional(
|
2022-01-04 00:14:33 +01:00
|
|
|
missing.name.clone(),
|
2022-12-03 10:44:12 +01:00
|
|
|
Span::new(command.end, command.end),
|
2022-01-04 00:14:33 +01:00
|
|
|
sig.call_signature(),
|
|
|
|
))
|
|
|
|
}
|
2021-07-02 04:22:54 +02:00
|
|
|
} else {
|
|
|
|
for req_flag in sig.named.iter().filter(|x| x.required) {
|
2022-04-09 07:17:48 +02:00
|
|
|
if call.named_iter().all(|(n, _, _)| n.item != req_flag.long) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::MissingRequiredFlag(
|
2021-07-02 04:22:54 +02:00
|
|
|
req_flag.long.clone(),
|
|
|
|
command,
|
|
|
|
));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn check_name<'a>(working_set: &mut StateWorkingSet, spans: &'a [Span]) -> Option<&'a Span> {
|
2022-08-22 23:19:47 +02:00
|
|
|
let command_len = if !spans.is_empty() {
|
|
|
|
if working_set.get_span_contents(spans[0]) == b"export" {
|
|
|
|
2
|
|
|
|
} else {
|
|
|
|
1
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
return None;
|
|
|
|
};
|
|
|
|
|
2021-09-13 21:59:11 +02:00
|
|
|
if spans.len() == 1 {
|
|
|
|
None
|
2022-08-22 23:19:47 +02:00
|
|
|
} else if spans.len() < command_len + 3 {
|
|
|
|
if working_set.get_span_contents(spans[command_len]) == b"=" {
|
|
|
|
let name =
|
|
|
|
String::from_utf8_lossy(working_set.get_span_contents(span(&spans[..command_len])));
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::AssignmentMismatch(
|
|
|
|
format!("{name} missing name"),
|
|
|
|
"missing name".into(),
|
|
|
|
spans[command_len],
|
|
|
|
));
|
|
|
|
Some(&spans[command_len])
|
2021-09-13 21:59:11 +02:00
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
2022-08-22 23:19:47 +02:00
|
|
|
} else if working_set.get_span_contents(spans[command_len + 1]) != b"=" {
|
|
|
|
let name =
|
|
|
|
String::from_utf8_lossy(working_set.get_span_contents(span(&spans[..command_len])));
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::AssignmentMismatch(
|
|
|
|
format!("{name} missing sign"),
|
|
|
|
"missing equal sign".into(),
|
|
|
|
spans[command_len + 1],
|
|
|
|
));
|
|
|
|
Some(&spans[command_len + 1])
|
2021-09-10 09:28:43 +02:00
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
Allow spreading arguments to commands (#11289)
<!--
if this PR closes one or more issues, you can automatically link the PR
with
them by using one of the [*linking
keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword),
e.g.
- this PR should close #xxxx
- fixes #xxxx
you can also mention related issues, PRs or discussions!
-->
Finishes implementing https://github.com/nushell/nushell/issues/10598,
which asks for a spread operator in lists, in records, and when calling
commands.
# Description
<!--
Thank you for improving Nushell. Please, check our [contributing
guide](../CONTRIBUTING.md) and talk to the core team before making major
changes.
Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.
-->
This PR will allow spreading arguments to commands (both internal and
external). It will also deprecate spreading arguments automatically when
passing to external commands.
# User-Facing Changes
<!-- List of all changes that impact the user experience here. This
helps us keep track of breaking changes. -->
- Users will be able to use `...` to spread arguments to custom/builtin
commands that have rest parameters or allow unknown arguments, or to any
external command
- If a custom command doesn't have a rest parameter and it doesn't allow
unknown arguments either, the spread operator will not be allowed
- Passing lists to external commands without `...` will work for now but
will cause a deprecation warning saying that it'll stop working in 0.91
(is 2 versions enough time?)
Here's a function to help with demonstrating some behavior:
```nushell
> def foo [ a, b, c?, d?, ...rest ] { [$a $b $c $d $rest] | to nuon }
```
You can pass a list of arguments to fill in the `rest` parameter using
`...`:
```nushell
> foo 1 2 3 4 ...[5 6]
[1, 2, 3, 4, [5, 6]]
```
If you don't use `...`, the list `[5 6]` will be treated as a single
argument:
```nushell
> foo 1 2 3 4 [5 6] # Note the double [[]]
[1, 2, 3, 4, [[5, 6]]]
```
You can omit optional parameters before the spread arguments:
```nushell
> foo 1 2 3 ...[4 5] # d is omitted here
[1, 2, 3, null, [4, 5]]
```
If you have multiple lists, you can spread them all:
```nushell
> foo 1 2 3 ...[4 5] 6 7 ...[8] ...[]
[1, 2, 3, null, [4, 5, 6, 7, 8]]
```
Here's the kind of error you get when you try to spread arguments to a
command with no rest parameter:
![image](https://github.com/nushell/nushell/assets/45539777/93faceae-00eb-4e59-ac3f-17f98436e6e4)
And this is the warning you get when you pass a list to an external now
(without `...`):
![image](https://github.com/nushell/nushell/assets/45539777/d368f590-201e-49fb-8b20-68476ced415e)
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to
check that you're using the standard code style
- `cargo test --workspace` to check that all tests pass (on Windows make
sure to [enable developer
mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
- `cargo run -- -c "use std testing; testing run-tests --path
crates/nu-std"` to run the tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
Added tests to cover the following cases:
- Spreading arguments to a command that doesn't have a rest parameter
(unexpected spread argument error)
- Spreading arguments to a command that doesn't have a rest parameter
*but* there's also a missing positional argument (missing positional
error)
- Spreading arguments to a command that doesn't have a rest parameter
but does allow unknown arguments, such as `exec` (allowed)
- Spreading a list literal containing arguments of the wrong type (parse
error)
- Spreading a non-list value, both to internal and external commands
- Having named arguments in the middle of rest arguments
- `explain`ing a command call that spreads its arguments
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
# Examples
Suppose you have multiple tables:
```nushell
let people = [[id name age]; [0 alice 100] [1 bob 200] [2 eve 300]]
let evil_twins = [[id name age]; [0 ecila 100] [-1 bob 200] [-2 eve 300]]
```
Maybe you often find yourself needing to merge multiple tables and want
a utility to do that. You could write a function like this:
```nushell
def merge_all [ ...tables ] { $tables | reduce { |it, acc| $acc | merge $it } }
```
Then you can use it like this:
```nushell
> merge_all ...([$people $evil_twins] | each { |$it| $it | select name age })
╭───┬───────┬─────╮
│ # │ name │ age │
├───┼───────┼─────┤
│ 0 │ ecila │ 100 │
│ 1 │ bob │ 200 │
│ 2 │ eve │ 300 │
╰───┴───────┴─────╯
```
Except they had duplicate columns, so now you first want to suffix every
column with a number to tell you which table the column came from. You
can make a command for that:
```nushell
def select_and_merge [ --cols: list<string>, ...tables ] {
let renamed_tables = $tables
| enumerate
| each { |it|
$it.item | select $cols | rename ...($cols | each { |col| $col + ($it.index | into string) })
};
merge_all ...$renamed_tables
}
```
And call it like this:
```nushell
> select_and_merge --cols [name age] $people $evil_twins
╭───┬───────┬──────┬───────┬──────╮
│ # │ name0 │ age0 │ name1 │ age1 │
├───┼───────┼──────┼───────┼──────┤
│ 0 │ alice │ 100 │ ecila │ 100 │
│ 1 │ bob │ 200 │ bob │ 200 │
│ 2 │ eve │ 300 │ eve │ 300 │
╰───┴───────┴──────┴───────┴──────╯
```
---
Suppose someone's made a command to search for APT packages:
```nushell
# The main command
def search-pkgs [
--install # Whether to install any packages it finds
log_level: int # Pretend it's a good idea to make this a required positional parameter
exclude?: list<string> # Packages to exclude
repositories?: list<string> # Which repositories to look in (searches in all if not given)
...pkgs # Package names to search for
] {
{ install: $install, log_level: $log_level, exclude: ($exclude | to nuon), repositories: ($repositories | to nuon), pkgs: ($pkgs | to nuon) }
}
```
It has a lot of parameters to configure it, so you might make your own
helper commands to wrap around it for specific cases. Here's one
example:
```nushell
# Only look for packages locally
def search-pkgs-local [
--install # Whether to install any packages it finds
log_level: int
exclude?: list<string> # Packages to exclude
...pkgs # Package names to search for
] {
# All required and optional positional parameters are given
search-pkgs --install=$install $log_level [] ["<local URI or something>"] ...$pkgs
}
```
And you can run it like this:
```nushell
> search-pkgs-local --install=false 5 ...["python2.7" "vim"]
╭──────────────┬──────────────────────────────╮
│ install │ false │
│ log_level │ 5 │
│ exclude │ [] │
│ repositories │ ["<local URI or something>"] │
│ pkgs │ ["python2.7", vim] │
╰──────────────┴──────────────────────────────╯
```
One thing I realized when writing this was that if we decide to not
allow passing optional arguments using the spread operator, then you can
(mis?)use the spread operator to skip optional parameters. Here, I
didn't want to give `exclude` explicitly, so I used a spread operator to
pass the packages to install. Without it, I would've needed to do
`search-pkgs-local --install=false 5 [] "python2.7" "vim"` (explicitly
pass `[]` (or `null`, in the general case) to `exclude`). There are
probably more idiomatic ways to do this, but I just thought it was
something interesting.
If you're a virologist of the [xkcd](https://xkcd.com/350/) kind,
another helper command you might make is this:
```nushell
# Install any packages it finds
def live-dangerously [ ...pkgs ] {
# One optional argument was given (exclude), while another was not (repositories)
search-pkgs 0 [] ...$pkgs --install # Flags can go after spread arguments
}
```
Running it:
```nushell
> live-dangerously "git" "*vi*" # *vi* because I don't feel like typing out vim and neovim
╭──────────────┬─────────────╮
│ install │ true │
│ log_level │ 0 │
│ exclude │ [] │
│ repositories │ null │
│ pkgs │ [git, *vi*] │
╰──────────────┴─────────────╯
```
Here's an example that uses the spread operator more than once within
the same command call:
```nushell
let extras = [ chrome firefox python java git ]
def search-pkgs-curated [ ...pkgs ] {
(search-pkgs
1
[emacs]
["example.com", "foo.com"]
vim # A must for everyone!
...($pkgs | filter { |p| not ($p | str contains "*") }) # Remove packages with globs
python # Good tool to have
...$extras
--install=false
python3) # I forget, did I already put Python in extras?
}
```
Running it:
```nushell
> search-pkgs-curated "git" "*vi*"
╭──────────────┬───────────────────────────────────────────────────────────────────╮
│ install │ false │
│ log_level │ 1 │
│ exclude │ [emacs] │
│ repositories │ [example.com, foo.com] │
│ pkgs │ [vim, git, python, chrome, firefox, python, java, git, "python3"] │
╰──────────────┴───────────────────────────────────────────────────────────────────╯
```
2023-12-28 08:43:20 +01:00
|
|
|
fn parse_external_arg(working_set: &mut StateWorkingSet, span: Span) -> ExternalArgument {
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
let contents = working_set.get_span_contents(span);
|
|
|
|
|
|
|
|
if contents.starts_with(b"$") || contents.starts_with(b"(") {
|
Allow spreading arguments to commands (#11289)
<!--
if this PR closes one or more issues, you can automatically link the PR
with
them by using one of the [*linking
keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword),
e.g.
- this PR should close #xxxx
- fixes #xxxx
you can also mention related issues, PRs or discussions!
-->
Finishes implementing https://github.com/nushell/nushell/issues/10598,
which asks for a spread operator in lists, in records, and when calling
commands.
# Description
<!--
Thank you for improving Nushell. Please, check our [contributing
guide](../CONTRIBUTING.md) and talk to the core team before making major
changes.
Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.
-->
This PR will allow spreading arguments to commands (both internal and
external). It will also deprecate spreading arguments automatically when
passing to external commands.
# User-Facing Changes
<!-- List of all changes that impact the user experience here. This
helps us keep track of breaking changes. -->
- Users will be able to use `...` to spread arguments to custom/builtin
commands that have rest parameters or allow unknown arguments, or to any
external command
- If a custom command doesn't have a rest parameter and it doesn't allow
unknown arguments either, the spread operator will not be allowed
- Passing lists to external commands without `...` will work for now but
will cause a deprecation warning saying that it'll stop working in 0.91
(is 2 versions enough time?)
Here's a function to help with demonstrating some behavior:
```nushell
> def foo [ a, b, c?, d?, ...rest ] { [$a $b $c $d $rest] | to nuon }
```
You can pass a list of arguments to fill in the `rest` parameter using
`...`:
```nushell
> foo 1 2 3 4 ...[5 6]
[1, 2, 3, 4, [5, 6]]
```
If you don't use `...`, the list `[5 6]` will be treated as a single
argument:
```nushell
> foo 1 2 3 4 [5 6] # Note the double [[]]
[1, 2, 3, 4, [[5, 6]]]
```
You can omit optional parameters before the spread arguments:
```nushell
> foo 1 2 3 ...[4 5] # d is omitted here
[1, 2, 3, null, [4, 5]]
```
If you have multiple lists, you can spread them all:
```nushell
> foo 1 2 3 ...[4 5] 6 7 ...[8] ...[]
[1, 2, 3, null, [4, 5, 6, 7, 8]]
```
Here's the kind of error you get when you try to spread arguments to a
command with no rest parameter:
![image](https://github.com/nushell/nushell/assets/45539777/93faceae-00eb-4e59-ac3f-17f98436e6e4)
And this is the warning you get when you pass a list to an external now
(without `...`):
![image](https://github.com/nushell/nushell/assets/45539777/d368f590-201e-49fb-8b20-68476ced415e)
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to
check that you're using the standard code style
- `cargo test --workspace` to check that all tests pass (on Windows make
sure to [enable developer
mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
- `cargo run -- -c "use std testing; testing run-tests --path
crates/nu-std"` to run the tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
Added tests to cover the following cases:
- Spreading arguments to a command that doesn't have a rest parameter
(unexpected spread argument error)
- Spreading arguments to a command that doesn't have a rest parameter
*but* there's also a missing positional argument (missing positional
error)
- Spreading arguments to a command that doesn't have a rest parameter
but does allow unknown arguments, such as `exec` (allowed)
- Spreading a list literal containing arguments of the wrong type (parse
error)
- Spreading a non-list value, both to internal and external commands
- Having named arguments in the middle of rest arguments
- `explain`ing a command call that spreads its arguments
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
# Examples
Suppose you have multiple tables:
```nushell
let people = [[id name age]; [0 alice 100] [1 bob 200] [2 eve 300]]
let evil_twins = [[id name age]; [0 ecila 100] [-1 bob 200] [-2 eve 300]]
```
Maybe you often find yourself needing to merge multiple tables and want
a utility to do that. You could write a function like this:
```nushell
def merge_all [ ...tables ] { $tables | reduce { |it, acc| $acc | merge $it } }
```
Then you can use it like this:
```nushell
> merge_all ...([$people $evil_twins] | each { |$it| $it | select name age })
╭───┬───────┬─────╮
│ # │ name │ age │
├───┼───────┼─────┤
│ 0 │ ecila │ 100 │
│ 1 │ bob │ 200 │
│ 2 │ eve │ 300 │
╰───┴───────┴─────╯
```
Except they had duplicate columns, so now you first want to suffix every
column with a number to tell you which table the column came from. You
can make a command for that:
```nushell
def select_and_merge [ --cols: list<string>, ...tables ] {
let renamed_tables = $tables
| enumerate
| each { |it|
$it.item | select $cols | rename ...($cols | each { |col| $col + ($it.index | into string) })
};
merge_all ...$renamed_tables
}
```
And call it like this:
```nushell
> select_and_merge --cols [name age] $people $evil_twins
╭───┬───────┬──────┬───────┬──────╮
│ # │ name0 │ age0 │ name1 │ age1 │
├───┼───────┼──────┼───────┼──────┤
│ 0 │ alice │ 100 │ ecila │ 100 │
│ 1 │ bob │ 200 │ bob │ 200 │
│ 2 │ eve │ 300 │ eve │ 300 │
╰───┴───────┴──────┴───────┴──────╯
```
---
Suppose someone's made a command to search for APT packages:
```nushell
# The main command
def search-pkgs [
--install # Whether to install any packages it finds
log_level: int # Pretend it's a good idea to make this a required positional parameter
exclude?: list<string> # Packages to exclude
repositories?: list<string> # Which repositories to look in (searches in all if not given)
...pkgs # Package names to search for
] {
{ install: $install, log_level: $log_level, exclude: ($exclude | to nuon), repositories: ($repositories | to nuon), pkgs: ($pkgs | to nuon) }
}
```
It has a lot of parameters to configure it, so you might make your own
helper commands to wrap around it for specific cases. Here's one
example:
```nushell
# Only look for packages locally
def search-pkgs-local [
--install # Whether to install any packages it finds
log_level: int
exclude?: list<string> # Packages to exclude
...pkgs # Package names to search for
] {
# All required and optional positional parameters are given
search-pkgs --install=$install $log_level [] ["<local URI or something>"] ...$pkgs
}
```
And you can run it like this:
```nushell
> search-pkgs-local --install=false 5 ...["python2.7" "vim"]
╭──────────────┬──────────────────────────────╮
│ install │ false │
│ log_level │ 5 │
│ exclude │ [] │
│ repositories │ ["<local URI or something>"] │
│ pkgs │ ["python2.7", vim] │
╰──────────────┴──────────────────────────────╯
```
One thing I realized when writing this was that if we decide to not
allow passing optional arguments using the spread operator, then you can
(mis?)use the spread operator to skip optional parameters. Here, I
didn't want to give `exclude` explicitly, so I used a spread operator to
pass the packages to install. Without it, I would've needed to do
`search-pkgs-local --install=false 5 [] "python2.7" "vim"` (explicitly
pass `[]` (or `null`, in the general case) to `exclude`). There are
probably more idiomatic ways to do this, but I just thought it was
something interesting.
If you're a virologist of the [xkcd](https://xkcd.com/350/) kind,
another helper command you might make is this:
```nushell
# Install any packages it finds
def live-dangerously [ ...pkgs ] {
# One optional argument was given (exclude), while another was not (repositories)
search-pkgs 0 [] ...$pkgs --install # Flags can go after spread arguments
}
```
Running it:
```nushell
> live-dangerously "git" "*vi*" # *vi* because I don't feel like typing out vim and neovim
╭──────────────┬─────────────╮
│ install │ true │
│ log_level │ 0 │
│ exclude │ [] │
│ repositories │ null │
│ pkgs │ [git, *vi*] │
╰──────────────┴─────────────╯
```
Here's an example that uses the spread operator more than once within
the same command call:
```nushell
let extras = [ chrome firefox python java git ]
def search-pkgs-curated [ ...pkgs ] {
(search-pkgs
1
[emacs]
["example.com", "foo.com"]
vim # A must for everyone!
...($pkgs | filter { |p| not ($p | str contains "*") }) # Remove packages with globs
python # Good tool to have
...$extras
--install=false
python3) # I forget, did I already put Python in extras?
}
```
Running it:
```nushell
> search-pkgs-curated "git" "*vi*"
╭──────────────┬───────────────────────────────────────────────────────────────────╮
│ install │ false │
│ log_level │ 1 │
│ exclude │ [emacs] │
│ repositories │ [example.com, foo.com] │
│ pkgs │ [vim, git, python, chrome, firefox, python, java, git, "python3"] │
╰──────────────┴───────────────────────────────────────────────────────────────────╯
```
2023-12-28 08:43:20 +01:00
|
|
|
ExternalArgument::Regular(parse_dollar_expr(working_set, span))
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
} else if contents.starts_with(b"[") {
|
Allow spreading arguments to commands (#11289)
<!--
if this PR closes one or more issues, you can automatically link the PR
with
them by using one of the [*linking
keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword),
e.g.
- this PR should close #xxxx
- fixes #xxxx
you can also mention related issues, PRs or discussions!
-->
Finishes implementing https://github.com/nushell/nushell/issues/10598,
which asks for a spread operator in lists, in records, and when calling
commands.
# Description
<!--
Thank you for improving Nushell. Please, check our [contributing
guide](../CONTRIBUTING.md) and talk to the core team before making major
changes.
Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.
-->
This PR will allow spreading arguments to commands (both internal and
external). It will also deprecate spreading arguments automatically when
passing to external commands.
# User-Facing Changes
<!-- List of all changes that impact the user experience here. This
helps us keep track of breaking changes. -->
- Users will be able to use `...` to spread arguments to custom/builtin
commands that have rest parameters or allow unknown arguments, or to any
external command
- If a custom command doesn't have a rest parameter and it doesn't allow
unknown arguments either, the spread operator will not be allowed
- Passing lists to external commands without `...` will work for now but
will cause a deprecation warning saying that it'll stop working in 0.91
(is 2 versions enough time?)
Here's a function to help with demonstrating some behavior:
```nushell
> def foo [ a, b, c?, d?, ...rest ] { [$a $b $c $d $rest] | to nuon }
```
You can pass a list of arguments to fill in the `rest` parameter using
`...`:
```nushell
> foo 1 2 3 4 ...[5 6]
[1, 2, 3, 4, [5, 6]]
```
If you don't use `...`, the list `[5 6]` will be treated as a single
argument:
```nushell
> foo 1 2 3 4 [5 6] # Note the double [[]]
[1, 2, 3, 4, [[5, 6]]]
```
You can omit optional parameters before the spread arguments:
```nushell
> foo 1 2 3 ...[4 5] # d is omitted here
[1, 2, 3, null, [4, 5]]
```
If you have multiple lists, you can spread them all:
```nushell
> foo 1 2 3 ...[4 5] 6 7 ...[8] ...[]
[1, 2, 3, null, [4, 5, 6, 7, 8]]
```
Here's the kind of error you get when you try to spread arguments to a
command with no rest parameter:
![image](https://github.com/nushell/nushell/assets/45539777/93faceae-00eb-4e59-ac3f-17f98436e6e4)
And this is the warning you get when you pass a list to an external now
(without `...`):
![image](https://github.com/nushell/nushell/assets/45539777/d368f590-201e-49fb-8b20-68476ced415e)
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to
check that you're using the standard code style
- `cargo test --workspace` to check that all tests pass (on Windows make
sure to [enable developer
mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
- `cargo run -- -c "use std testing; testing run-tests --path
crates/nu-std"` to run the tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
Added tests to cover the following cases:
- Spreading arguments to a command that doesn't have a rest parameter
(unexpected spread argument error)
- Spreading arguments to a command that doesn't have a rest parameter
*but* there's also a missing positional argument (missing positional
error)
- Spreading arguments to a command that doesn't have a rest parameter
but does allow unknown arguments, such as `exec` (allowed)
- Spreading a list literal containing arguments of the wrong type (parse
error)
- Spreading a non-list value, both to internal and external commands
- Having named arguments in the middle of rest arguments
- `explain`ing a command call that spreads its arguments
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
# Examples
Suppose you have multiple tables:
```nushell
let people = [[id name age]; [0 alice 100] [1 bob 200] [2 eve 300]]
let evil_twins = [[id name age]; [0 ecila 100] [-1 bob 200] [-2 eve 300]]
```
Maybe you often find yourself needing to merge multiple tables and want
a utility to do that. You could write a function like this:
```nushell
def merge_all [ ...tables ] { $tables | reduce { |it, acc| $acc | merge $it } }
```
Then you can use it like this:
```nushell
> merge_all ...([$people $evil_twins] | each { |$it| $it | select name age })
╭───┬───────┬─────╮
│ # │ name │ age │
├───┼───────┼─────┤
│ 0 │ ecila │ 100 │
│ 1 │ bob │ 200 │
│ 2 │ eve │ 300 │
╰───┴───────┴─────╯
```
Except they had duplicate columns, so now you first want to suffix every
column with a number to tell you which table the column came from. You
can make a command for that:
```nushell
def select_and_merge [ --cols: list<string>, ...tables ] {
let renamed_tables = $tables
| enumerate
| each { |it|
$it.item | select $cols | rename ...($cols | each { |col| $col + ($it.index | into string) })
};
merge_all ...$renamed_tables
}
```
And call it like this:
```nushell
> select_and_merge --cols [name age] $people $evil_twins
╭───┬───────┬──────┬───────┬──────╮
│ # │ name0 │ age0 │ name1 │ age1 │
├───┼───────┼──────┼───────┼──────┤
│ 0 │ alice │ 100 │ ecila │ 100 │
│ 1 │ bob │ 200 │ bob │ 200 │
│ 2 │ eve │ 300 │ eve │ 300 │
╰───┴───────┴──────┴───────┴──────╯
```
---
Suppose someone's made a command to search for APT packages:
```nushell
# The main command
def search-pkgs [
--install # Whether to install any packages it finds
log_level: int # Pretend it's a good idea to make this a required positional parameter
exclude?: list<string> # Packages to exclude
repositories?: list<string> # Which repositories to look in (searches in all if not given)
...pkgs # Package names to search for
] {
{ install: $install, log_level: $log_level, exclude: ($exclude | to nuon), repositories: ($repositories | to nuon), pkgs: ($pkgs | to nuon) }
}
```
It has a lot of parameters to configure it, so you might make your own
helper commands to wrap around it for specific cases. Here's one
example:
```nushell
# Only look for packages locally
def search-pkgs-local [
--install # Whether to install any packages it finds
log_level: int
exclude?: list<string> # Packages to exclude
...pkgs # Package names to search for
] {
# All required and optional positional parameters are given
search-pkgs --install=$install $log_level [] ["<local URI or something>"] ...$pkgs
}
```
And you can run it like this:
```nushell
> search-pkgs-local --install=false 5 ...["python2.7" "vim"]
╭──────────────┬──────────────────────────────╮
│ install │ false │
│ log_level │ 5 │
│ exclude │ [] │
│ repositories │ ["<local URI or something>"] │
│ pkgs │ ["python2.7", vim] │
╰──────────────┴──────────────────────────────╯
```
One thing I realized when writing this was that if we decide to not
allow passing optional arguments using the spread operator, then you can
(mis?)use the spread operator to skip optional parameters. Here, I
didn't want to give `exclude` explicitly, so I used a spread operator to
pass the packages to install. Without it, I would've needed to do
`search-pkgs-local --install=false 5 [] "python2.7" "vim"` (explicitly
pass `[]` (or `null`, in the general case) to `exclude`). There are
probably more idiomatic ways to do this, but I just thought it was
something interesting.
If you're a virologist of the [xkcd](https://xkcd.com/350/) kind,
another helper command you might make is this:
```nushell
# Install any packages it finds
def live-dangerously [ ...pkgs ] {
# One optional argument was given (exclude), while another was not (repositories)
search-pkgs 0 [] ...$pkgs --install # Flags can go after spread arguments
}
```
Running it:
```nushell
> live-dangerously "git" "*vi*" # *vi* because I don't feel like typing out vim and neovim
╭──────────────┬─────────────╮
│ install │ true │
│ log_level │ 0 │
│ exclude │ [] │
│ repositories │ null │
│ pkgs │ [git, *vi*] │
╰──────────────┴─────────────╯
```
Here's an example that uses the spread operator more than once within
the same command call:
```nushell
let extras = [ chrome firefox python java git ]
def search-pkgs-curated [ ...pkgs ] {
(search-pkgs
1
[emacs]
["example.com", "foo.com"]
vim # A must for everyone!
...($pkgs | filter { |p| not ($p | str contains "*") }) # Remove packages with globs
python # Good tool to have
...$extras
--install=false
python3) # I forget, did I already put Python in extras?
}
```
Running it:
```nushell
> search-pkgs-curated "git" "*vi*"
╭──────────────┬───────────────────────────────────────────────────────────────────╮
│ install │ false │
│ log_level │ 1 │
│ exclude │ [emacs] │
│ repositories │ [example.com, foo.com] │
│ pkgs │ [vim, git, python, chrome, firefox, python, java, git, "python3"] │
╰──────────────┴───────────────────────────────────────────────────────────────────╯
```
2023-12-28 08:43:20 +01:00
|
|
|
ExternalArgument::Regular(parse_list_expression(working_set, span, &SyntaxShape::Any))
|
|
|
|
} else if contents.len() > 3
|
|
|
|
&& contents.starts_with(b"...")
|
|
|
|
&& (contents[3] == b'$' || contents[3] == b'[' || contents[3] == b'(')
|
|
|
|
{
|
|
|
|
ExternalArgument::Spread(parse_value(
|
|
|
|
working_set,
|
|
|
|
Span::new(span.start + 3, span.end),
|
|
|
|
&SyntaxShape::List(Box::new(SyntaxShape::Any)),
|
|
|
|
))
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
} else {
|
|
|
|
// Eval stage trims the quotes, so we don't have to do the same thing when parsing.
|
|
|
|
let contents = if contents.starts_with(b"\"") {
|
|
|
|
let (contents, err) = unescape_string(contents, span);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err)
|
|
|
|
}
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
String::from_utf8_lossy(&contents).to_string()
|
|
|
|
} else {
|
|
|
|
String::from_utf8_lossy(contents).to_string()
|
|
|
|
};
|
|
|
|
|
Allow spreading arguments to commands (#11289)
<!--
if this PR closes one or more issues, you can automatically link the PR
with
them by using one of the [*linking
keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword),
e.g.
- this PR should close #xxxx
- fixes #xxxx
you can also mention related issues, PRs or discussions!
-->
Finishes implementing https://github.com/nushell/nushell/issues/10598,
which asks for a spread operator in lists, in records, and when calling
commands.
# Description
<!--
Thank you for improving Nushell. Please, check our [contributing
guide](../CONTRIBUTING.md) and talk to the core team before making major
changes.
Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.
-->
This PR will allow spreading arguments to commands (both internal and
external). It will also deprecate spreading arguments automatically when
passing to external commands.
# User-Facing Changes
<!-- List of all changes that impact the user experience here. This
helps us keep track of breaking changes. -->
- Users will be able to use `...` to spread arguments to custom/builtin
commands that have rest parameters or allow unknown arguments, or to any
external command
- If a custom command doesn't have a rest parameter and it doesn't allow
unknown arguments either, the spread operator will not be allowed
- Passing lists to external commands without `...` will work for now but
will cause a deprecation warning saying that it'll stop working in 0.91
(is 2 versions enough time?)
Here's a function to help with demonstrating some behavior:
```nushell
> def foo [ a, b, c?, d?, ...rest ] { [$a $b $c $d $rest] | to nuon }
```
You can pass a list of arguments to fill in the `rest` parameter using
`...`:
```nushell
> foo 1 2 3 4 ...[5 6]
[1, 2, 3, 4, [5, 6]]
```
If you don't use `...`, the list `[5 6]` will be treated as a single
argument:
```nushell
> foo 1 2 3 4 [5 6] # Note the double [[]]
[1, 2, 3, 4, [[5, 6]]]
```
You can omit optional parameters before the spread arguments:
```nushell
> foo 1 2 3 ...[4 5] # d is omitted here
[1, 2, 3, null, [4, 5]]
```
If you have multiple lists, you can spread them all:
```nushell
> foo 1 2 3 ...[4 5] 6 7 ...[8] ...[]
[1, 2, 3, null, [4, 5, 6, 7, 8]]
```
Here's the kind of error you get when you try to spread arguments to a
command with no rest parameter:
![image](https://github.com/nushell/nushell/assets/45539777/93faceae-00eb-4e59-ac3f-17f98436e6e4)
And this is the warning you get when you pass a list to an external now
(without `...`):
![image](https://github.com/nushell/nushell/assets/45539777/d368f590-201e-49fb-8b20-68476ced415e)
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to
check that you're using the standard code style
- `cargo test --workspace` to check that all tests pass (on Windows make
sure to [enable developer
mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
- `cargo run -- -c "use std testing; testing run-tests --path
crates/nu-std"` to run the tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
Added tests to cover the following cases:
- Spreading arguments to a command that doesn't have a rest parameter
(unexpected spread argument error)
- Spreading arguments to a command that doesn't have a rest parameter
*but* there's also a missing positional argument (missing positional
error)
- Spreading arguments to a command that doesn't have a rest parameter
but does allow unknown arguments, such as `exec` (allowed)
- Spreading a list literal containing arguments of the wrong type (parse
error)
- Spreading a non-list value, both to internal and external commands
- Having named arguments in the middle of rest arguments
- `explain`ing a command call that spreads its arguments
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
# Examples
Suppose you have multiple tables:
```nushell
let people = [[id name age]; [0 alice 100] [1 bob 200] [2 eve 300]]
let evil_twins = [[id name age]; [0 ecila 100] [-1 bob 200] [-2 eve 300]]
```
Maybe you often find yourself needing to merge multiple tables and want
a utility to do that. You could write a function like this:
```nushell
def merge_all [ ...tables ] { $tables | reduce { |it, acc| $acc | merge $it } }
```
Then you can use it like this:
```nushell
> merge_all ...([$people $evil_twins] | each { |$it| $it | select name age })
╭───┬───────┬─────╮
│ # │ name │ age │
├───┼───────┼─────┤
│ 0 │ ecila │ 100 │
│ 1 │ bob │ 200 │
│ 2 │ eve │ 300 │
╰───┴───────┴─────╯
```
Except they had duplicate columns, so now you first want to suffix every
column with a number to tell you which table the column came from. You
can make a command for that:
```nushell
def select_and_merge [ --cols: list<string>, ...tables ] {
let renamed_tables = $tables
| enumerate
| each { |it|
$it.item | select $cols | rename ...($cols | each { |col| $col + ($it.index | into string) })
};
merge_all ...$renamed_tables
}
```
And call it like this:
```nushell
> select_and_merge --cols [name age] $people $evil_twins
╭───┬───────┬──────┬───────┬──────╮
│ # │ name0 │ age0 │ name1 │ age1 │
├───┼───────┼──────┼───────┼──────┤
│ 0 │ alice │ 100 │ ecila │ 100 │
│ 1 │ bob │ 200 │ bob │ 200 │
│ 2 │ eve │ 300 │ eve │ 300 │
╰───┴───────┴──────┴───────┴──────╯
```
---
Suppose someone's made a command to search for APT packages:
```nushell
# The main command
def search-pkgs [
--install # Whether to install any packages it finds
log_level: int # Pretend it's a good idea to make this a required positional parameter
exclude?: list<string> # Packages to exclude
repositories?: list<string> # Which repositories to look in (searches in all if not given)
...pkgs # Package names to search for
] {
{ install: $install, log_level: $log_level, exclude: ($exclude | to nuon), repositories: ($repositories | to nuon), pkgs: ($pkgs | to nuon) }
}
```
It has a lot of parameters to configure it, so you might make your own
helper commands to wrap around it for specific cases. Here's one
example:
```nushell
# Only look for packages locally
def search-pkgs-local [
--install # Whether to install any packages it finds
log_level: int
exclude?: list<string> # Packages to exclude
...pkgs # Package names to search for
] {
# All required and optional positional parameters are given
search-pkgs --install=$install $log_level [] ["<local URI or something>"] ...$pkgs
}
```
And you can run it like this:
```nushell
> search-pkgs-local --install=false 5 ...["python2.7" "vim"]
╭──────────────┬──────────────────────────────╮
│ install │ false │
│ log_level │ 5 │
│ exclude │ [] │
│ repositories │ ["<local URI or something>"] │
│ pkgs │ ["python2.7", vim] │
╰──────────────┴──────────────────────────────╯
```
One thing I realized when writing this was that if we decide to not
allow passing optional arguments using the spread operator, then you can
(mis?)use the spread operator to skip optional parameters. Here, I
didn't want to give `exclude` explicitly, so I used a spread operator to
pass the packages to install. Without it, I would've needed to do
`search-pkgs-local --install=false 5 [] "python2.7" "vim"` (explicitly
pass `[]` (or `null`, in the general case) to `exclude`). There are
probably more idiomatic ways to do this, but I just thought it was
something interesting.
If you're a virologist of the [xkcd](https://xkcd.com/350/) kind,
another helper command you might make is this:
```nushell
# Install any packages it finds
def live-dangerously [ ...pkgs ] {
# One optional argument was given (exclude), while another was not (repositories)
search-pkgs 0 [] ...$pkgs --install # Flags can go after spread arguments
}
```
Running it:
```nushell
> live-dangerously "git" "*vi*" # *vi* because I don't feel like typing out vim and neovim
╭──────────────┬─────────────╮
│ install │ true │
│ log_level │ 0 │
│ exclude │ [] │
│ repositories │ null │
│ pkgs │ [git, *vi*] │
╰──────────────┴─────────────╯
```
Here's an example that uses the spread operator more than once within
the same command call:
```nushell
let extras = [ chrome firefox python java git ]
def search-pkgs-curated [ ...pkgs ] {
(search-pkgs
1
[emacs]
["example.com", "foo.com"]
vim # A must for everyone!
...($pkgs | filter { |p| not ($p | str contains "*") }) # Remove packages with globs
python # Good tool to have
...$extras
--install=false
python3) # I forget, did I already put Python in extras?
}
```
Running it:
```nushell
> search-pkgs-curated "git" "*vi*"
╭──────────────┬───────────────────────────────────────────────────────────────────╮
│ install │ false │
│ log_level │ 1 │
│ exclude │ [emacs] │
│ repositories │ [example.com, foo.com] │
│ pkgs │ [vim, git, python, chrome, firefox, python, java, git, "python3"] │
╰──────────────┴───────────────────────────────────────────────────────────────────╯
```
2023-12-28 08:43:20 +01:00
|
|
|
ExternalArgument::Regular(Expression {
|
2023-04-07 02:35:45 +02:00
|
|
|
expr: Expr::String(contents),
|
|
|
|
span,
|
|
|
|
ty: Type::String,
|
|
|
|
custom_completion: None,
|
Allow spreading arguments to commands (#11289)
<!--
if this PR closes one or more issues, you can automatically link the PR
with
them by using one of the [*linking
keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword),
e.g.
- this PR should close #xxxx
- fixes #xxxx
you can also mention related issues, PRs or discussions!
-->
Finishes implementing https://github.com/nushell/nushell/issues/10598,
which asks for a spread operator in lists, in records, and when calling
commands.
# Description
<!--
Thank you for improving Nushell. Please, check our [contributing
guide](../CONTRIBUTING.md) and talk to the core team before making major
changes.
Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.
-->
This PR will allow spreading arguments to commands (both internal and
external). It will also deprecate spreading arguments automatically when
passing to external commands.
# User-Facing Changes
<!-- List of all changes that impact the user experience here. This
helps us keep track of breaking changes. -->
- Users will be able to use `...` to spread arguments to custom/builtin
commands that have rest parameters or allow unknown arguments, or to any
external command
- If a custom command doesn't have a rest parameter and it doesn't allow
unknown arguments either, the spread operator will not be allowed
- Passing lists to external commands without `...` will work for now but
will cause a deprecation warning saying that it'll stop working in 0.91
(is 2 versions enough time?)
Here's a function to help with demonstrating some behavior:
```nushell
> def foo [ a, b, c?, d?, ...rest ] { [$a $b $c $d $rest] | to nuon }
```
You can pass a list of arguments to fill in the `rest` parameter using
`...`:
```nushell
> foo 1 2 3 4 ...[5 6]
[1, 2, 3, 4, [5, 6]]
```
If you don't use `...`, the list `[5 6]` will be treated as a single
argument:
```nushell
> foo 1 2 3 4 [5 6] # Note the double [[]]
[1, 2, 3, 4, [[5, 6]]]
```
You can omit optional parameters before the spread arguments:
```nushell
> foo 1 2 3 ...[4 5] # d is omitted here
[1, 2, 3, null, [4, 5]]
```
If you have multiple lists, you can spread them all:
```nushell
> foo 1 2 3 ...[4 5] 6 7 ...[8] ...[]
[1, 2, 3, null, [4, 5, 6, 7, 8]]
```
Here's the kind of error you get when you try to spread arguments to a
command with no rest parameter:
![image](https://github.com/nushell/nushell/assets/45539777/93faceae-00eb-4e59-ac3f-17f98436e6e4)
And this is the warning you get when you pass a list to an external now
(without `...`):
![image](https://github.com/nushell/nushell/assets/45539777/d368f590-201e-49fb-8b20-68476ced415e)
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to
check that you're using the standard code style
- `cargo test --workspace` to check that all tests pass (on Windows make
sure to [enable developer
mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
- `cargo run -- -c "use std testing; testing run-tests --path
crates/nu-std"` to run the tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
Added tests to cover the following cases:
- Spreading arguments to a command that doesn't have a rest parameter
(unexpected spread argument error)
- Spreading arguments to a command that doesn't have a rest parameter
*but* there's also a missing positional argument (missing positional
error)
- Spreading arguments to a command that doesn't have a rest parameter
but does allow unknown arguments, such as `exec` (allowed)
- Spreading a list literal containing arguments of the wrong type (parse
error)
- Spreading a non-list value, both to internal and external commands
- Having named arguments in the middle of rest arguments
- `explain`ing a command call that spreads its arguments
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
# Examples
Suppose you have multiple tables:
```nushell
let people = [[id name age]; [0 alice 100] [1 bob 200] [2 eve 300]]
let evil_twins = [[id name age]; [0 ecila 100] [-1 bob 200] [-2 eve 300]]
```
Maybe you often find yourself needing to merge multiple tables and want
a utility to do that. You could write a function like this:
```nushell
def merge_all [ ...tables ] { $tables | reduce { |it, acc| $acc | merge $it } }
```
Then you can use it like this:
```nushell
> merge_all ...([$people $evil_twins] | each { |$it| $it | select name age })
╭───┬───────┬─────╮
│ # │ name │ age │
├───┼───────┼─────┤
│ 0 │ ecila │ 100 │
│ 1 │ bob │ 200 │
│ 2 │ eve │ 300 │
╰───┴───────┴─────╯
```
Except they had duplicate columns, so now you first want to suffix every
column with a number to tell you which table the column came from. You
can make a command for that:
```nushell
def select_and_merge [ --cols: list<string>, ...tables ] {
let renamed_tables = $tables
| enumerate
| each { |it|
$it.item | select $cols | rename ...($cols | each { |col| $col + ($it.index | into string) })
};
merge_all ...$renamed_tables
}
```
And call it like this:
```nushell
> select_and_merge --cols [name age] $people $evil_twins
╭───┬───────┬──────┬───────┬──────╮
│ # │ name0 │ age0 │ name1 │ age1 │
├───┼───────┼──────┼───────┼──────┤
│ 0 │ alice │ 100 │ ecila │ 100 │
│ 1 │ bob │ 200 │ bob │ 200 │
│ 2 │ eve │ 300 │ eve │ 300 │
╰───┴───────┴──────┴───────┴──────╯
```
---
Suppose someone's made a command to search for APT packages:
```nushell
# The main command
def search-pkgs [
--install # Whether to install any packages it finds
log_level: int # Pretend it's a good idea to make this a required positional parameter
exclude?: list<string> # Packages to exclude
repositories?: list<string> # Which repositories to look in (searches in all if not given)
...pkgs # Package names to search for
] {
{ install: $install, log_level: $log_level, exclude: ($exclude | to nuon), repositories: ($repositories | to nuon), pkgs: ($pkgs | to nuon) }
}
```
It has a lot of parameters to configure it, so you might make your own
helper commands to wrap around it for specific cases. Here's one
example:
```nushell
# Only look for packages locally
def search-pkgs-local [
--install # Whether to install any packages it finds
log_level: int
exclude?: list<string> # Packages to exclude
...pkgs # Package names to search for
] {
# All required and optional positional parameters are given
search-pkgs --install=$install $log_level [] ["<local URI or something>"] ...$pkgs
}
```
And you can run it like this:
```nushell
> search-pkgs-local --install=false 5 ...["python2.7" "vim"]
╭──────────────┬──────────────────────────────╮
│ install │ false │
│ log_level │ 5 │
│ exclude │ [] │
│ repositories │ ["<local URI or something>"] │
│ pkgs │ ["python2.7", vim] │
╰──────────────┴──────────────────────────────╯
```
One thing I realized when writing this was that if we decide to not
allow passing optional arguments using the spread operator, then you can
(mis?)use the spread operator to skip optional parameters. Here, I
didn't want to give `exclude` explicitly, so I used a spread operator to
pass the packages to install. Without it, I would've needed to do
`search-pkgs-local --install=false 5 [] "python2.7" "vim"` (explicitly
pass `[]` (or `null`, in the general case) to `exclude`). There are
probably more idiomatic ways to do this, but I just thought it was
something interesting.
If you're a virologist of the [xkcd](https://xkcd.com/350/) kind,
another helper command you might make is this:
```nushell
# Install any packages it finds
def live-dangerously [ ...pkgs ] {
# One optional argument was given (exclude), while another was not (repositories)
search-pkgs 0 [] ...$pkgs --install # Flags can go after spread arguments
}
```
Running it:
```nushell
> live-dangerously "git" "*vi*" # *vi* because I don't feel like typing out vim and neovim
╭──────────────┬─────────────╮
│ install │ true │
│ log_level │ 0 │
│ exclude │ [] │
│ repositories │ null │
│ pkgs │ [git, *vi*] │
╰──────────────┴─────────────╯
```
Here's an example that uses the spread operator more than once within
the same command call:
```nushell
let extras = [ chrome firefox python java git ]
def search-pkgs-curated [ ...pkgs ] {
(search-pkgs
1
[emacs]
["example.com", "foo.com"]
vim # A must for everyone!
...($pkgs | filter { |p| not ($p | str contains "*") }) # Remove packages with globs
python # Good tool to have
...$extras
--install=false
python3) # I forget, did I already put Python in extras?
}
```
Running it:
```nushell
> search-pkgs-curated "git" "*vi*"
╭──────────────┬───────────────────────────────────────────────────────────────────╮
│ install │ false │
│ log_level │ 1 │
│ exclude │ [emacs] │
│ repositories │ [example.com, foo.com] │
│ pkgs │ [vim, git, python, chrome, firefox, python, java, git, "python3"] │
╰──────────────┴───────────────────────────────────────────────────────────────────╯
```
2023-12-28 08:43:20 +01:00
|
|
|
})
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
pub fn parse_external_call(
|
2021-10-08 23:51:47 +02:00
|
|
|
working_set: &mut StateWorkingSet,
|
2021-09-02 10:25:22 +02:00
|
|
|
spans: &[Span],
|
Make external command substitution works friendly(like fish shell, trailing ending newlines) (#7156)
# Description
As title, when execute external sub command, auto-trimming end
new-lines, like how fish shell does.
And if the command is executed directly like: `cat tmp`, the result
won't change.
Fixes: #6816
Fixes: #3980
Note that although nushell works correctly by directly replace output of
external command to variable(or other places like string interpolation),
it's not friendly to user, and users almost want to use `str trim` to
trim trailing newline, I think that's why fish shell do this
automatically.
If the pr is ok, as a result, no more `str trim -r` is required when
user is writing scripts which using external commands.
# User-Facing Changes
Before:
<img width="523" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468810-86b04dbb-c147-459a-96a5-e0095eeaab3d.png">
After:
<img width="505" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468599-7b537488-3d6b-458e-9d75-d85780826db0.png">
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace --features=extra -- -D warnings -D
clippy::unwrap_used -A clippy::needless_collect` to check that you're
using the standard code style
- `cargo test --workspace --features=extra` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2022-11-23 04:51:57 +01:00
|
|
|
is_subexpression: bool,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2022-03-21 23:57:48 +01:00
|
|
|
trace!("parse external");
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut args = vec![];
|
2022-01-13 09:17:45 +01:00
|
|
|
|
|
|
|
let head_contents = working_set.get_span_contents(spans[0]);
|
|
|
|
|
|
|
|
let head_span = if head_contents.starts_with(b"^") {
|
2022-12-03 10:44:12 +01:00
|
|
|
Span::new(spans[0].start + 1, spans[0].end)
|
2022-01-06 11:20:31 +01:00
|
|
|
} else {
|
2022-01-13 09:17:45 +01:00
|
|
|
spans[0]
|
2022-01-06 11:20:31 +01:00
|
|
|
};
|
|
|
|
|
2022-03-03 20:05:55 +01:00
|
|
|
let head_contents = working_set.get_span_contents(head_span).to_vec();
|
|
|
|
|
2022-01-13 09:17:45 +01:00
|
|
|
let head = if head_contents.starts_with(b"$") || head_contents.starts_with(b"(") {
|
Make external command substitution works friendly(like fish shell, trailing ending newlines) (#7156)
# Description
As title, when execute external sub command, auto-trimming end
new-lines, like how fish shell does.
And if the command is executed directly like: `cat tmp`, the result
won't change.
Fixes: #6816
Fixes: #3980
Note that although nushell works correctly by directly replace output of
external command to variable(or other places like string interpolation),
it's not friendly to user, and users almost want to use `str trim` to
trim trailing newline, I think that's why fish shell do this
automatically.
If the pr is ok, as a result, no more `str trim -r` is required when
user is writing scripts which using external commands.
# User-Facing Changes
Before:
<img width="523" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468810-86b04dbb-c147-459a-96a5-e0095eeaab3d.png">
After:
<img width="505" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468599-7b537488-3d6b-458e-9d75-d85780826db0.png">
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace --features=extra -- -D warnings -D
clippy::unwrap_used -A clippy::needless_collect` to check that you're
using the standard code style
- `cargo test --workspace --features=extra` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2022-11-23 04:51:57 +01:00
|
|
|
// the expression is inside external_call, so it's a subexpression
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_expression(working_set, &[head_span], true);
|
2022-01-13 09:17:45 +01:00
|
|
|
Box::new(arg)
|
|
|
|
} else {
|
2022-05-01 21:26:29 +02:00
|
|
|
let (contents, err) = unescape_unquote_string(&head_contents, head_span);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err)
|
|
|
|
}
|
2022-05-01 21:26:29 +02:00
|
|
|
|
2022-01-13 09:17:45 +01:00
|
|
|
Box::new(Expression {
|
2022-05-01 21:26:29 +02:00
|
|
|
expr: Expr::String(contents),
|
2022-01-13 09:17:45 +01:00
|
|
|
span: head_span,
|
|
|
|
ty: Type::String,
|
|
|
|
custom_completion: None,
|
|
|
|
})
|
|
|
|
};
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
for span in &spans[1..] {
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_external_arg(working_set, *span);
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
args.push(arg);
|
2021-07-01 02:01:04 +02:00
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
Expression {
|
|
|
|
expr: Expr::ExternalCall(head, args, is_subexpression),
|
|
|
|
span: span(spans),
|
|
|
|
ty: Type::Any,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-01 02:01:04 +02:00
|
|
|
|
2023-12-20 11:07:19 +01:00
|
|
|
fn ensure_flag_arg_type(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
arg_name: String,
|
|
|
|
arg: Expression,
|
|
|
|
arg_shape: &SyntaxShape,
|
|
|
|
long_name_span: Span,
|
|
|
|
) -> (Spanned<String>, Expression) {
|
|
|
|
if !type_compatible(&arg.ty, &arg_shape.to_type()) {
|
|
|
|
working_set.error(ParseError::TypeMismatch(
|
|
|
|
arg_shape.to_type(),
|
|
|
|
arg.ty,
|
|
|
|
arg.span,
|
|
|
|
));
|
|
|
|
(
|
|
|
|
Spanned {
|
|
|
|
item: arg_name,
|
|
|
|
span: long_name_span,
|
|
|
|
},
|
|
|
|
Expression::garbage(arg.span),
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
(
|
|
|
|
Spanned {
|
|
|
|
item: arg_name,
|
|
|
|
span: long_name_span,
|
|
|
|
},
|
|
|
|
arg,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
fn parse_long_flag(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
spans: &[Span],
|
|
|
|
spans_idx: &mut usize,
|
|
|
|
sig: &Signature,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> (Option<Spanned<String>>, Option<Expression>) {
|
2021-09-02 10:25:22 +02:00
|
|
|
let arg_span = spans[*spans_idx];
|
|
|
|
let arg_contents = working_set.get_span_contents(arg_span);
|
|
|
|
|
|
|
|
if arg_contents.starts_with(b"--") {
|
2021-10-12 19:44:23 +02:00
|
|
|
// FIXME: only use the first flag you find?
|
2021-09-02 10:25:22 +02:00
|
|
|
let split: Vec<_> = arg_contents.split(|x| *x == b'=').collect();
|
|
|
|
let long_name = String::from_utf8(split[0].into());
|
|
|
|
if let Ok(long_name) = long_name {
|
2021-10-13 19:53:27 +02:00
|
|
|
let long_name = long_name[2..].to_string();
|
2021-09-02 10:25:22 +02:00
|
|
|
if let Some(flag) = sig.get_long_flag(&long_name) {
|
|
|
|
if let Some(arg_shape) = &flag.arg {
|
|
|
|
if split.len() > 1 {
|
|
|
|
// and we also have the argument
|
2022-01-27 02:20:12 +01:00
|
|
|
let long_name_len = long_name.len();
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut span = arg_span;
|
2022-01-27 02:20:12 +01:00
|
|
|
span.start += long_name_len + 3; //offset by long flag and '='
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_value(working_set, span, arg_shape);
|
2023-12-20 11:07:19 +01:00
|
|
|
let (arg_name, val_expression) = ensure_flag_arg_type(
|
|
|
|
working_set,
|
|
|
|
long_name,
|
|
|
|
arg,
|
|
|
|
arg_shape,
|
|
|
|
Span::new(arg_span.start, arg_span.start + long_name_len + 2),
|
|
|
|
);
|
|
|
|
(Some(arg_name), Some(val_expression))
|
2021-09-02 10:25:22 +02:00
|
|
|
} else if let Some(arg) = spans.get(*spans_idx + 1) {
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_value(working_set, *arg, arg_shape);
|
2021-09-02 10:25:22 +02:00
|
|
|
|
|
|
|
*spans_idx += 1;
|
2023-12-20 11:07:19 +01:00
|
|
|
let (arg_name, val_expression) =
|
|
|
|
ensure_flag_arg_type(working_set, long_name, arg, arg_shape, arg_span);
|
|
|
|
(Some(arg_name), Some(val_expression))
|
2021-07-08 22:29:00 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::MissingFlagParam(
|
|
|
|
arg_shape.to_string(),
|
|
|
|
arg_span,
|
|
|
|
));
|
2021-09-02 10:25:22 +02:00
|
|
|
(
|
2022-01-27 02:20:12 +01:00
|
|
|
Some(Spanned {
|
|
|
|
item: long_name,
|
|
|
|
span: arg_span,
|
|
|
|
}),
|
2021-09-02 10:25:22 +02:00
|
|
|
None,
|
|
|
|
)
|
2021-07-08 22:29:00 +02:00
|
|
|
}
|
|
|
|
} else {
|
2021-09-02 10:25:22 +02:00
|
|
|
// A flag with no argument
|
2023-11-22 23:57:37 +01:00
|
|
|
// It can also takes a boolean value like --x=true
|
|
|
|
if split.len() > 1 {
|
|
|
|
// and we also have the argument
|
|
|
|
let long_name_len = long_name.len();
|
|
|
|
let mut span = arg_span;
|
|
|
|
span.start += long_name_len + 3; //offset by long flag and '='
|
|
|
|
|
|
|
|
let arg = parse_value(working_set, span, &SyntaxShape::Boolean);
|
|
|
|
|
2023-12-20 11:07:19 +01:00
|
|
|
let (arg_name, val_expression) = ensure_flag_arg_type(
|
|
|
|
working_set,
|
|
|
|
long_name,
|
|
|
|
arg,
|
|
|
|
&SyntaxShape::Boolean,
|
|
|
|
Span::new(arg_span.start, arg_span.start + long_name_len + 2),
|
|
|
|
);
|
|
|
|
(Some(arg_name), Some(val_expression))
|
2023-11-22 23:57:37 +01:00
|
|
|
} else {
|
|
|
|
(
|
|
|
|
Some(Spanned {
|
|
|
|
item: long_name,
|
|
|
|
span: arg_span,
|
|
|
|
}),
|
|
|
|
None,
|
|
|
|
)
|
|
|
|
}
|
2021-07-08 22:29:00 +02:00
|
|
|
}
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownFlag(
|
|
|
|
sig.name.clone(),
|
|
|
|
long_name.clone(),
|
|
|
|
arg_span,
|
|
|
|
sig.clone().formatted_flags(),
|
|
|
|
));
|
2021-09-02 10:25:22 +02:00
|
|
|
(
|
2022-01-27 02:20:12 +01:00
|
|
|
Some(Spanned {
|
|
|
|
item: long_name.clone(),
|
|
|
|
span: arg_span,
|
|
|
|
}),
|
2021-09-02 10:25:22 +02:00
|
|
|
None,
|
|
|
|
)
|
2021-07-08 22:29:00 +02:00
|
|
|
}
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::NonUtf8(arg_span));
|
2022-01-27 02:20:12 +01:00
|
|
|
(
|
|
|
|
Some(Spanned {
|
|
|
|
item: "--".into(),
|
|
|
|
span: arg_span,
|
|
|
|
}),
|
|
|
|
None,
|
|
|
|
)
|
2021-07-08 22:29:00 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
(None, None)
|
2021-07-08 22:29:00 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-08 22:29:00 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
fn parse_short_flags(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
spans: &[Span],
|
|
|
|
spans_idx: &mut usize,
|
|
|
|
positional_idx: usize,
|
|
|
|
sig: &Signature,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Option<Vec<Flag>> {
|
2021-09-02 10:25:22 +02:00
|
|
|
let arg_span = spans[*spans_idx];
|
|
|
|
|
|
|
|
let arg_contents = working_set.get_span_contents(arg_span);
|
|
|
|
|
2023-04-10 23:52:51 +02:00
|
|
|
if let Ok(arg_contents_uft8_ref) = str::from_utf8(arg_contents) {
|
|
|
|
if arg_contents_uft8_ref.starts_with('-') && arg_contents_uft8_ref.len() > 1 {
|
|
|
|
let short_flags = &arg_contents_uft8_ref[1..];
|
2023-04-15 10:24:51 +02:00
|
|
|
let num_chars = short_flags.chars().count();
|
2023-04-10 23:52:51 +02:00
|
|
|
let mut found_short_flags = vec![];
|
|
|
|
let mut unmatched_short_flags = vec![];
|
2023-04-15 10:24:51 +02:00
|
|
|
for (offset, short_flag) in short_flags.char_indices() {
|
2023-04-10 23:52:51 +02:00
|
|
|
let short_flag_span = Span::new(
|
2023-04-15 10:24:51 +02:00
|
|
|
arg_span.start + 1 + offset,
|
|
|
|
arg_span.start + 1 + offset + short_flag.len_utf8(),
|
2023-04-10 23:52:51 +02:00
|
|
|
);
|
2023-04-15 10:24:51 +02:00
|
|
|
if let Some(flag) = sig.get_short_flag(short_flag) {
|
|
|
|
// Allow args in short flag batches as long as it is the last flag.
|
|
|
|
if flag.arg.is_some() && offset < num_chars - 1 {
|
|
|
|
working_set
|
|
|
|
.error(ParseError::OnlyLastFlagInBatchCanTakeArg(short_flag_span));
|
2023-04-10 23:52:51 +02:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
found_short_flags.push(flag);
|
|
|
|
} else {
|
|
|
|
unmatched_short_flags.push(short_flag_span);
|
2021-07-08 22:29:00 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-10-19 13:24:57 +02:00
|
|
|
if found_short_flags.is_empty()
|
2023-04-10 23:52:51 +02:00
|
|
|
// check to see if we have a negative number
|
2023-10-19 13:24:57 +02:00
|
|
|
&& matches!(
|
|
|
|
sig.get_positional(positional_idx),
|
|
|
|
Some(PositionalArg {
|
|
|
|
shape: SyntaxShape::Int | SyntaxShape::Number,
|
|
|
|
..
|
|
|
|
})
|
|
|
|
)
|
|
|
|
&& String::from_utf8_lossy(working_set.get_span_contents(arg_span))
|
|
|
|
.parse::<f64>()
|
|
|
|
.is_ok()
|
|
|
|
{
|
|
|
|
return None;
|
|
|
|
} else if let Some(first) = unmatched_short_flags.first() {
|
|
|
|
let contents = working_set.get_span_contents(*first);
|
|
|
|
working_set.error(ParseError::UnknownFlag(
|
|
|
|
sig.name.clone(),
|
|
|
|
format!("-{}", String::from_utf8_lossy(contents)),
|
|
|
|
*first,
|
|
|
|
sig.clone().formatted_flags(),
|
|
|
|
));
|
2021-07-08 22:29:00 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2023-04-10 23:52:51 +02:00
|
|
|
Some(found_short_flags)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2023-04-10 23:52:51 +02:00
|
|
|
working_set.error(ParseError::NonUtf8(arg_span));
|
2023-04-07 02:35:45 +02:00
|
|
|
None
|
2021-07-08 22:29:00 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-08 22:29:00 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
fn first_kw_idx(
|
|
|
|
working_set: &StateWorkingSet,
|
|
|
|
signature: &Signature,
|
|
|
|
spans: &[Span],
|
|
|
|
spans_idx: usize,
|
|
|
|
positional_idx: usize,
|
|
|
|
) -> (Option<usize>, usize) {
|
|
|
|
for idx in (positional_idx + 1)..signature.num_positionals() {
|
|
|
|
if let Some(PositionalArg {
|
|
|
|
shape: SyntaxShape::Keyword(kw, ..),
|
|
|
|
..
|
|
|
|
}) = signature.get_positional(idx)
|
|
|
|
{
|
|
|
|
#[allow(clippy::needless_range_loop)]
|
|
|
|
for span_idx in spans_idx..spans.len() {
|
|
|
|
let contents = working_set.get_span_contents(spans[span_idx]);
|
2021-08-26 23:48:27 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if contents == kw {
|
|
|
|
return (Some(idx), span_idx);
|
2021-08-26 23:48:27 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
(None, spans.len())
|
|
|
|
}
|
2021-08-26 23:48:27 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
fn calculate_end_span(
|
|
|
|
working_set: &StateWorkingSet,
|
|
|
|
signature: &Signature,
|
|
|
|
spans: &[Span],
|
|
|
|
spans_idx: usize,
|
|
|
|
positional_idx: usize,
|
|
|
|
) -> usize {
|
|
|
|
if signature.rest_positional.is_some() {
|
|
|
|
spans.len()
|
|
|
|
} else {
|
|
|
|
let (kw_pos, kw_idx) =
|
|
|
|
first_kw_idx(working_set, signature, spans, spans_idx, positional_idx);
|
2021-08-26 23:48:27 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if let Some(kw_pos) = kw_pos {
|
|
|
|
// We found a keyword. Keywords, once found, create a guidepost to
|
|
|
|
// show us where the positionals will lay into the arguments. Because they're
|
|
|
|
// keywords, they get to set this by being present
|
2021-08-26 23:48:27 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let positionals_between = kw_pos - positional_idx - 1;
|
|
|
|
if positionals_between > (kw_idx - spans_idx) {
|
|
|
|
kw_idx
|
2021-08-26 23:48:27 +02:00
|
|
|
} else {
|
2021-09-02 10:25:22 +02:00
|
|
|
kw_idx - positionals_between
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// Make space for the remaining require positionals, if we can
|
2022-02-14 18:33:47 +01:00
|
|
|
if signature.num_positionals_after(positional_idx) == 0 {
|
|
|
|
spans.len()
|
|
|
|
} else if positional_idx < signature.required_positional.len()
|
2021-09-02 10:25:22 +02:00
|
|
|
&& spans.len() > (signature.required_positional.len() - positional_idx)
|
|
|
|
{
|
|
|
|
spans.len() - (signature.required_positional.len() - positional_idx - 1)
|
|
|
|
} else {
|
2021-09-04 09:59:38 +02:00
|
|
|
spans_idx + 1
|
2021-07-24 07:57:17 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-24 07:57:17 +02:00
|
|
|
|
2021-12-15 23:56:12 +01:00
|
|
|
pub fn parse_multispan_value(
|
2021-09-02 10:25:22 +02:00
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
spans: &[Span],
|
|
|
|
spans_idx: &mut usize,
|
|
|
|
shape: &SyntaxShape,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2021-09-02 10:25:22 +02:00
|
|
|
match shape {
|
|
|
|
SyntaxShape::VarWithOptType => {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: var with opt type");
|
|
|
|
|
Improve type hovers (#9515)
# Description
This PR does a few things to help improve type hovers and, in the
process, fixes a few outstanding issues in the type system. Here's a
list of the changes:
* `for` now will try to infer the type of the iteration variable based
on the expression it's given. This fixes things like `for x in [1, 2, 3]
{ }` where `x` now properly gets the int type.
* Removed old input/output type fields from the signature, focuses on
the vec of signatures. Updated a bunch of dataframe commands that hadn't
moved over. This helps tie things together a bit better
* Fixed inference of types from subexpressions to use the last
expression in the block
* Fixed handling of explicit types in `let` and `mut` calls, so we now
respect that as the authoritative type
I also tried to add `def` input/output type inference, but unfortunately
we only know the predecl types universally, which means we won't have
enough information to properly know what the types of the custom
commands are.
# User-Facing Changes
Script typechecking will get tighter in some cases
Hovers should be more accurate in some cases that previously resorted to
any.
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect -A clippy::result_large_err` to check that
you're using the standard code style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-std/tests/run.nu` to run the tests for the
standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
---------
Co-authored-by: Darren Schroeder <343840+fdncred@users.noreply.github.com>
2023-06-28 19:19:48 +02:00
|
|
|
parse_var_with_opt_type(working_set, spans, spans_idx, false).0
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
SyntaxShape::RowCondition => {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: row condition");
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_row_condition(working_set, &spans[*spans_idx..]);
|
2021-09-02 10:25:22 +02:00
|
|
|
*spans_idx = spans.len() - 1;
|
2021-07-16 08:24:46 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
arg
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2022-01-15 16:26:52 +01:00
|
|
|
SyntaxShape::MathExpression => {
|
|
|
|
trace!("parsing: math expression");
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_math_expression(working_set, &spans[*spans_idx..], None);
|
2022-01-15 16:26:52 +01:00
|
|
|
*spans_idx = spans.len() - 1;
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
arg
|
2022-01-15 16:26:52 +01:00
|
|
|
}
|
2022-12-07 20:58:54 +01:00
|
|
|
SyntaxShape::OneOf(shapes) => {
|
2023-03-17 13:37:59 +01:00
|
|
|
// handle for `if` command.
|
2023-04-07 02:35:45 +02:00
|
|
|
//let block_then_exp = shapes.as_slice() == [SyntaxShape::Block, SyntaxShape::Expression];
|
2022-12-07 20:58:54 +01:00
|
|
|
for shape in shapes.iter() {
|
2023-04-07 02:35:45 +02:00
|
|
|
let starting_error_count = working_set.parse_errors.len();
|
2023-04-07 20:09:38 +02:00
|
|
|
let s = parse_multispan_value(working_set, spans, spans_idx, shape);
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
if starting_error_count == working_set.parse_errors.len() {
|
|
|
|
return s;
|
2023-05-24 21:53:57 +02:00
|
|
|
} else if let Some(
|
|
|
|
ParseError::Expected(..) | ParseError::ExpectedWithStringMsg(..),
|
|
|
|
) = working_set.parse_errors.last()
|
|
|
|
{
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
|
|
|
continue;
|
2022-12-07 20:58:54 +01:00
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
// `if` is parsing block first and then expression.
|
|
|
|
// when we're writing something like `else if $a`, parsing as a
|
|
|
|
// block will result to error(because it's not a block)
|
|
|
|
//
|
|
|
|
// If parse as a expression also failed, user is more likely concerned
|
|
|
|
// about expression failure rather than "expect block failure"".
|
|
|
|
|
|
|
|
// FIXME FIXME FIXME
|
|
|
|
// if block_then_exp {
|
|
|
|
// match &err {
|
|
|
|
// Some(ParseError::Expected(expected, _)) => {
|
|
|
|
// if expected.starts_with("block") {
|
|
|
|
// err = e
|
|
|
|
// }
|
|
|
|
// }
|
|
|
|
// _ => err = err.or(e),
|
|
|
|
// }
|
|
|
|
// } else {
|
|
|
|
// err = err.or(e)
|
|
|
|
// }
|
2022-12-07 20:58:54 +01:00
|
|
|
}
|
|
|
|
let span = spans[*spans_idx];
|
2022-12-09 14:48:12 +01:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
if working_set.parse_errors.is_empty() {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::ExpectedWithStringMsg(
|
2023-04-07 02:35:45 +02:00
|
|
|
format!("one of a list of accepted shapes: {shapes:?}"),
|
|
|
|
span,
|
|
|
|
));
|
2022-12-09 14:48:12 +01:00
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
Expression::garbage(span)
|
2022-12-07 20:58:54 +01:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
SyntaxShape::Expression => {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: expression");
|
|
|
|
|
Make external command substitution works friendly(like fish shell, trailing ending newlines) (#7156)
# Description
As title, when execute external sub command, auto-trimming end
new-lines, like how fish shell does.
And if the command is executed directly like: `cat tmp`, the result
won't change.
Fixes: #6816
Fixes: #3980
Note that although nushell works correctly by directly replace output of
external command to variable(or other places like string interpolation),
it's not friendly to user, and users almost want to use `str trim` to
trim trailing newline, I think that's why fish shell do this
automatically.
If the pr is ok, as a result, no more `str trim -r` is required when
user is writing scripts which using external commands.
# User-Facing Changes
Before:
<img width="523" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468810-86b04dbb-c147-459a-96a5-e0095eeaab3d.png">
After:
<img width="505" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468599-7b537488-3d6b-458e-9d75-d85780826db0.png">
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace --features=extra -- -D warnings -D
clippy::unwrap_used -A clippy::needless_collect` to check that you're
using the standard code style
- `cargo test --workspace --features=extra` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2022-11-23 04:51:57 +01:00
|
|
|
// is it subexpression?
|
|
|
|
// Not sure, but let's make it not, so the behavior is the same as previous version of nushell.
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_expression(working_set, &spans[*spans_idx..], false);
|
2021-09-02 10:25:22 +02:00
|
|
|
*spans_idx = spans.len() - 1;
|
2021-07-08 23:16:25 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
arg
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2023-07-14 23:51:28 +02:00
|
|
|
SyntaxShape::Signature => {
|
|
|
|
trace!("parsing: signature");
|
|
|
|
|
|
|
|
let sig = parse_full_signature(working_set, &spans[*spans_idx..]);
|
|
|
|
*spans_idx = spans.len() - 1;
|
|
|
|
|
|
|
|
sig
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
SyntaxShape::Keyword(keyword, arg) => {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!(
|
|
|
|
"parsing: keyword({}) {:?}",
|
|
|
|
String::from_utf8_lossy(keyword),
|
|
|
|
arg
|
|
|
|
);
|
2021-09-02 10:25:22 +02:00
|
|
|
let arg_span = spans[*spans_idx];
|
2021-07-08 23:16:25 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let arg_contents = working_set.get_span_contents(arg_span);
|
2021-07-17 07:28:25 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if arg_contents != keyword {
|
|
|
|
// When keywords mismatch, this is a strong indicator of something going wrong.
|
|
|
|
// We won't often override the current error, but as this is a strong indicator
|
|
|
|
// go ahead and override the current error and tell the user about the missing
|
|
|
|
// keyword/literal.
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::ExpectedKeyword(
|
2021-09-02 10:25:22 +02:00
|
|
|
String::from_utf8_lossy(keyword).into(),
|
|
|
|
arg_span,
|
|
|
|
))
|
|
|
|
}
|
2021-07-17 07:28:25 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
*spans_idx += 1;
|
|
|
|
if *spans_idx >= spans.len() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::KeywordMissingArgument(
|
|
|
|
arg.to_string(),
|
|
|
|
String::from_utf8_lossy(keyword).into(),
|
|
|
|
Span::new(spans[*spans_idx - 1].end, spans[*spans_idx - 1].end),
|
|
|
|
));
|
|
|
|
return Expression {
|
|
|
|
expr: Expr::Keyword(
|
|
|
|
keyword.clone(),
|
|
|
|
spans[*spans_idx - 1],
|
|
|
|
Box::new(Expression::garbage(arg_span)),
|
|
|
|
),
|
|
|
|
span: arg_span,
|
|
|
|
ty: Type::Any,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2021-07-08 23:16:25 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
let keyword_span = spans[*spans_idx - 1];
|
2023-04-07 20:09:38 +02:00
|
|
|
let expr = parse_multispan_value(working_set, spans, spans_idx, arg);
|
2021-09-02 10:25:22 +02:00
|
|
|
let ty = expr.ty.clone();
|
2021-07-17 07:28:25 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Keyword(keyword.clone(), keyword_span, Box::new(expr)),
|
|
|
|
span: arg_span,
|
|
|
|
ty,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
// All other cases are single-span values
|
|
|
|
let arg_span = spans[*spans_idx];
|
2021-07-08 23:16:25 +02:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_value(working_set, arg_span, shape)
|
2021-07-08 23:16:25 +02:00
|
|
|
}
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-08 23:16:25 +02:00
|
|
|
|
2022-06-12 21:18:00 +02:00
|
|
|
pub struct ParsedInternalCall {
|
|
|
|
pub call: Box<Call>,
|
|
|
|
pub output: Type,
|
|
|
|
}
|
|
|
|
|
2023-04-05 18:56:48 +02:00
|
|
|
fn attach_parser_info_builtin(working_set: &StateWorkingSet, name: &str, call: &mut Call) {
|
|
|
|
match name {
|
|
|
|
"use" | "overlay use" | "source-env" | "nu-check" => {
|
|
|
|
if let Some(var_id) = find_dirs_var(working_set, LIB_DIRS_VAR) {
|
|
|
|
call.set_parser_info(
|
|
|
|
DIR_VAR_PARSER_INFO.to_owned(),
|
|
|
|
Expression {
|
|
|
|
expr: Expr::Var(var_id),
|
|
|
|
span: call.head,
|
|
|
|
ty: Type::Any,
|
|
|
|
custom_completion: None,
|
|
|
|
},
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
pub fn parse_internal_call(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
command_span: Span,
|
|
|
|
spans: &[Span],
|
|
|
|
decl_id: usize,
|
2022-06-12 21:18:00 +02:00
|
|
|
) -> ParsedInternalCall {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: internal call (decl id: {})", decl_id);
|
|
|
|
|
2022-02-04 03:01:45 +01:00
|
|
|
let mut call = Call::new(command_span);
|
2021-09-02 10:25:22 +02:00
|
|
|
call.decl_id = decl_id;
|
|
|
|
call.head = command_span;
|
2021-07-08 08:19:38 +02:00
|
|
|
|
2022-06-10 17:59:35 +02:00
|
|
|
let decl = working_set.get_decl(decl_id);
|
|
|
|
let signature = decl.signature();
|
Improve type hovers (#9515)
# Description
This PR does a few things to help improve type hovers and, in the
process, fixes a few outstanding issues in the type system. Here's a
list of the changes:
* `for` now will try to infer the type of the iteration variable based
on the expression it's given. This fixes things like `for x in [1, 2, 3]
{ }` where `x` now properly gets the int type.
* Removed old input/output type fields from the signature, focuses on
the vec of signatures. Updated a bunch of dataframe commands that hadn't
moved over. This helps tie things together a bit better
* Fixed inference of types from subexpressions to use the last
expression in the block
* Fixed handling of explicit types in `let` and `mut` calls, so we now
respect that as the authoritative type
I also tried to add `def` input/output type inference, but unfortunately
we only know the predecl types universally, which means we won't have
enough information to properly know what the types of the custom
commands are.
# User-Facing Changes
Script typechecking will get tighter in some cases
Hovers should be more accurate in some cases that previously resorted to
any.
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect -A clippy::result_large_err` to check that
you're using the standard code style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-std/tests/run.nu` to run the tests for the
standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
---------
Co-authored-by: Darren Schroeder <343840+fdncred@users.noreply.github.com>
2023-06-28 19:19:48 +02:00
|
|
|
let output = signature.get_output_type();
|
2022-06-12 21:18:00 +02:00
|
|
|
|
2023-04-05 18:56:48 +02:00
|
|
|
if decl.is_builtin() {
|
|
|
|
attach_parser_info_builtin(working_set, decl.name(), &mut call);
|
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
// The index into the positional parameter in the definition
|
|
|
|
let mut positional_idx = 0;
|
2021-07-08 08:19:38 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
// The index into the spans of argument data given to parse
|
|
|
|
// Starting at the first argument
|
|
|
|
let mut spans_idx = 0;
|
2021-07-02 00:40:08 +02:00
|
|
|
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
if let Some(alias) = decl.as_alias() {
|
|
|
|
if let Expression {
|
|
|
|
expr: Expr::Call(wrapped_call),
|
|
|
|
..
|
|
|
|
} = &alias.wrapped_call
|
|
|
|
{
|
|
|
|
// Replace this command's call with the aliased call, but keep the alias name
|
|
|
|
call = *wrapped_call.clone();
|
|
|
|
call.head = command_span;
|
|
|
|
// Skip positionals passed to aliased call
|
|
|
|
positional_idx = call.positional_len();
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownState(
|
|
|
|
"Alias does not point to internal call.".to_string(),
|
|
|
|
command_span,
|
|
|
|
));
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
return ParsedInternalCall {
|
|
|
|
call: Box::new(call),
|
|
|
|
output: Type::Any,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if signature.creates_scope {
|
|
|
|
working_set.enter_scope();
|
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
while spans_idx < spans.len() {
|
|
|
|
let arg_span = spans[spans_idx];
|
2021-07-08 23:16:25 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
let starting_error_count = working_set.parse_errors.len();
|
2021-09-02 10:25:22 +02:00
|
|
|
// Check if we're on a long flag, if so, parse
|
2023-04-07 20:09:38 +02:00
|
|
|
let (long_name, arg) = parse_long_flag(working_set, spans, &mut spans_idx, &signature);
|
2022-12-21 23:33:26 +01:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if let Some(long_name) = long_name {
|
|
|
|
// We found a long flag, like --bar
|
2023-04-07 02:35:45 +02:00
|
|
|
if working_set.parse_errors[starting_error_count..]
|
|
|
|
.iter()
|
|
|
|
.any(|x| matches!(x, ParseError::UnknownFlag(_, _, _, _)))
|
2022-12-21 23:33:26 +01:00
|
|
|
&& signature.allows_unknown_args
|
|
|
|
{
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_value(working_set, arg_span, &SyntaxShape::Any);
|
2022-12-21 23:33:26 +01:00
|
|
|
|
|
|
|
call.add_unknown(arg);
|
|
|
|
} else {
|
|
|
|
call.add_named((long_name, None, arg));
|
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
spans_idx += 1;
|
|
|
|
continue;
|
|
|
|
}
|
2021-07-02 00:40:08 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
let starting_error_count = working_set.parse_errors.len();
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
// Check if we're on a short flag or group of short flags, if so, parse
|
2023-04-07 02:35:45 +02:00
|
|
|
let short_flags = parse_short_flags(
|
2021-09-02 10:25:22 +02:00
|
|
|
working_set,
|
|
|
|
spans,
|
|
|
|
&mut spans_idx,
|
|
|
|
positional_idx,
|
|
|
|
&signature,
|
|
|
|
);
|
2021-07-08 22:29:00 +02:00
|
|
|
|
2022-07-17 14:46:40 +02:00
|
|
|
if let Some(mut short_flags) = short_flags {
|
|
|
|
if short_flags.is_empty() {
|
2022-12-21 23:33:26 +01:00
|
|
|
// workaround for completions (PR #6067)
|
2022-07-17 14:46:40 +02:00
|
|
|
short_flags.push(Flag {
|
|
|
|
long: "".to_string(),
|
|
|
|
short: Some('a'),
|
|
|
|
arg: None,
|
|
|
|
required: false,
|
|
|
|
desc: "".to_string(),
|
|
|
|
var_id: None,
|
|
|
|
default_value: None,
|
|
|
|
})
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
if working_set.parse_errors[starting_error_count..]
|
|
|
|
.iter()
|
|
|
|
.any(|x| matches!(x, ParseError::UnknownFlag(_, _, _, _)))
|
2022-12-21 23:33:26 +01:00
|
|
|
&& signature.allows_unknown_args
|
|
|
|
{
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_value(working_set, arg_span, &SyntaxShape::Any);
|
2022-12-21 23:33:26 +01:00
|
|
|
|
|
|
|
call.add_unknown(arg);
|
|
|
|
} else {
|
|
|
|
for flag in short_flags {
|
|
|
|
if let Some(arg_shape) = flag.arg {
|
|
|
|
if let Some(arg) = spans.get(spans_idx + 1) {
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_value(working_set, *arg, &arg_shape);
|
2022-12-21 23:33:26 +01:00
|
|
|
|
|
|
|
if flag.long.is_empty() {
|
|
|
|
if let Some(short) = flag.short {
|
|
|
|
call.add_named((
|
|
|
|
Spanned {
|
|
|
|
item: String::new(),
|
|
|
|
span: spans[spans_idx],
|
|
|
|
},
|
|
|
|
Some(Spanned {
|
|
|
|
item: short.to_string(),
|
|
|
|
span: spans[spans_idx],
|
|
|
|
}),
|
|
|
|
Some(arg),
|
|
|
|
));
|
|
|
|
}
|
|
|
|
} else {
|
2022-04-09 07:17:48 +02:00
|
|
|
call.add_named((
|
|
|
|
Spanned {
|
2022-12-21 23:33:26 +01:00
|
|
|
item: flag.long.clone(),
|
2022-04-09 07:17:48 +02:00
|
|
|
span: spans[spans_idx],
|
|
|
|
},
|
2022-12-21 23:33:26 +01:00
|
|
|
None,
|
2022-04-09 07:17:48 +02:00
|
|
|
Some(arg),
|
|
|
|
));
|
|
|
|
}
|
2022-12-21 23:33:26 +01:00
|
|
|
spans_idx += 1;
|
2022-04-09 07:17:48 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::MissingFlagParam(
|
|
|
|
arg_shape.to_string(),
|
|
|
|
arg_span,
|
|
|
|
))
|
2022-12-21 23:33:26 +01:00
|
|
|
}
|
|
|
|
} else if flag.long.is_empty() {
|
|
|
|
if let Some(short) = flag.short {
|
2022-04-09 07:17:48 +02:00
|
|
|
call.add_named((
|
|
|
|
Spanned {
|
2022-12-21 23:33:26 +01:00
|
|
|
item: String::new(),
|
2022-04-09 07:17:48 +02:00
|
|
|
span: spans[spans_idx],
|
|
|
|
},
|
2022-12-21 23:33:26 +01:00
|
|
|
Some(Spanned {
|
|
|
|
item: short.to_string(),
|
|
|
|
span: spans[spans_idx],
|
|
|
|
}),
|
2022-04-09 07:17:48 +02:00
|
|
|
None,
|
|
|
|
));
|
|
|
|
}
|
2021-07-08 08:19:38 +02:00
|
|
|
} else {
|
2022-04-09 07:17:48 +02:00
|
|
|
call.add_named((
|
|
|
|
Spanned {
|
2022-12-21 23:33:26 +01:00
|
|
|
item: flag.long.clone(),
|
2022-04-09 07:17:48 +02:00
|
|
|
span: spans[spans_idx],
|
|
|
|
},
|
2022-12-21 23:33:26 +01:00
|
|
|
None,
|
2022-04-09 07:17:48 +02:00
|
|
|
None,
|
|
|
|
));
|
|
|
|
}
|
2021-07-08 08:19:38 +02:00
|
|
|
}
|
2021-07-08 22:29:00 +02:00
|
|
|
}
|
2022-12-21 23:33:26 +01:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
spans_idx += 1;
|
|
|
|
continue;
|
|
|
|
}
|
2021-07-08 22:29:00 +02:00
|
|
|
|
Allow spreading arguments to commands (#11289)
<!--
if this PR closes one or more issues, you can automatically link the PR
with
them by using one of the [*linking
keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword),
e.g.
- this PR should close #xxxx
- fixes #xxxx
you can also mention related issues, PRs or discussions!
-->
Finishes implementing https://github.com/nushell/nushell/issues/10598,
which asks for a spread operator in lists, in records, and when calling
commands.
# Description
<!--
Thank you for improving Nushell. Please, check our [contributing
guide](../CONTRIBUTING.md) and talk to the core team before making major
changes.
Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.
-->
This PR will allow spreading arguments to commands (both internal and
external). It will also deprecate spreading arguments automatically when
passing to external commands.
# User-Facing Changes
<!-- List of all changes that impact the user experience here. This
helps us keep track of breaking changes. -->
- Users will be able to use `...` to spread arguments to custom/builtin
commands that have rest parameters or allow unknown arguments, or to any
external command
- If a custom command doesn't have a rest parameter and it doesn't allow
unknown arguments either, the spread operator will not be allowed
- Passing lists to external commands without `...` will work for now but
will cause a deprecation warning saying that it'll stop working in 0.91
(is 2 versions enough time?)
Here's a function to help with demonstrating some behavior:
```nushell
> def foo [ a, b, c?, d?, ...rest ] { [$a $b $c $d $rest] | to nuon }
```
You can pass a list of arguments to fill in the `rest` parameter using
`...`:
```nushell
> foo 1 2 3 4 ...[5 6]
[1, 2, 3, 4, [5, 6]]
```
If you don't use `...`, the list `[5 6]` will be treated as a single
argument:
```nushell
> foo 1 2 3 4 [5 6] # Note the double [[]]
[1, 2, 3, 4, [[5, 6]]]
```
You can omit optional parameters before the spread arguments:
```nushell
> foo 1 2 3 ...[4 5] # d is omitted here
[1, 2, 3, null, [4, 5]]
```
If you have multiple lists, you can spread them all:
```nushell
> foo 1 2 3 ...[4 5] 6 7 ...[8] ...[]
[1, 2, 3, null, [4, 5, 6, 7, 8]]
```
Here's the kind of error you get when you try to spread arguments to a
command with no rest parameter:
![image](https://github.com/nushell/nushell/assets/45539777/93faceae-00eb-4e59-ac3f-17f98436e6e4)
And this is the warning you get when you pass a list to an external now
(without `...`):
![image](https://github.com/nushell/nushell/assets/45539777/d368f590-201e-49fb-8b20-68476ced415e)
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to
check that you're using the standard code style
- `cargo test --workspace` to check that all tests pass (on Windows make
sure to [enable developer
mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
- `cargo run -- -c "use std testing; testing run-tests --path
crates/nu-std"` to run the tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
Added tests to cover the following cases:
- Spreading arguments to a command that doesn't have a rest parameter
(unexpected spread argument error)
- Spreading arguments to a command that doesn't have a rest parameter
*but* there's also a missing positional argument (missing positional
error)
- Spreading arguments to a command that doesn't have a rest parameter
but does allow unknown arguments, such as `exec` (allowed)
- Spreading a list literal containing arguments of the wrong type (parse
error)
- Spreading a non-list value, both to internal and external commands
- Having named arguments in the middle of rest arguments
- `explain`ing a command call that spreads its arguments
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
# Examples
Suppose you have multiple tables:
```nushell
let people = [[id name age]; [0 alice 100] [1 bob 200] [2 eve 300]]
let evil_twins = [[id name age]; [0 ecila 100] [-1 bob 200] [-2 eve 300]]
```
Maybe you often find yourself needing to merge multiple tables and want
a utility to do that. You could write a function like this:
```nushell
def merge_all [ ...tables ] { $tables | reduce { |it, acc| $acc | merge $it } }
```
Then you can use it like this:
```nushell
> merge_all ...([$people $evil_twins] | each { |$it| $it | select name age })
╭───┬───────┬─────╮
│ # │ name │ age │
├───┼───────┼─────┤
│ 0 │ ecila │ 100 │
│ 1 │ bob │ 200 │
│ 2 │ eve │ 300 │
╰───┴───────┴─────╯
```
Except they had duplicate columns, so now you first want to suffix every
column with a number to tell you which table the column came from. You
can make a command for that:
```nushell
def select_and_merge [ --cols: list<string>, ...tables ] {
let renamed_tables = $tables
| enumerate
| each { |it|
$it.item | select $cols | rename ...($cols | each { |col| $col + ($it.index | into string) })
};
merge_all ...$renamed_tables
}
```
And call it like this:
```nushell
> select_and_merge --cols [name age] $people $evil_twins
╭───┬───────┬──────┬───────┬──────╮
│ # │ name0 │ age0 │ name1 │ age1 │
├───┼───────┼──────┼───────┼──────┤
│ 0 │ alice │ 100 │ ecila │ 100 │
│ 1 │ bob │ 200 │ bob │ 200 │
│ 2 │ eve │ 300 │ eve │ 300 │
╰───┴───────┴──────┴───────┴──────╯
```
---
Suppose someone's made a command to search for APT packages:
```nushell
# The main command
def search-pkgs [
--install # Whether to install any packages it finds
log_level: int # Pretend it's a good idea to make this a required positional parameter
exclude?: list<string> # Packages to exclude
repositories?: list<string> # Which repositories to look in (searches in all if not given)
...pkgs # Package names to search for
] {
{ install: $install, log_level: $log_level, exclude: ($exclude | to nuon), repositories: ($repositories | to nuon), pkgs: ($pkgs | to nuon) }
}
```
It has a lot of parameters to configure it, so you might make your own
helper commands to wrap around it for specific cases. Here's one
example:
```nushell
# Only look for packages locally
def search-pkgs-local [
--install # Whether to install any packages it finds
log_level: int
exclude?: list<string> # Packages to exclude
...pkgs # Package names to search for
] {
# All required and optional positional parameters are given
search-pkgs --install=$install $log_level [] ["<local URI or something>"] ...$pkgs
}
```
And you can run it like this:
```nushell
> search-pkgs-local --install=false 5 ...["python2.7" "vim"]
╭──────────────┬──────────────────────────────╮
│ install │ false │
│ log_level │ 5 │
│ exclude │ [] │
│ repositories │ ["<local URI or something>"] │
│ pkgs │ ["python2.7", vim] │
╰──────────────┴──────────────────────────────╯
```
One thing I realized when writing this was that if we decide to not
allow passing optional arguments using the spread operator, then you can
(mis?)use the spread operator to skip optional parameters. Here, I
didn't want to give `exclude` explicitly, so I used a spread operator to
pass the packages to install. Without it, I would've needed to do
`search-pkgs-local --install=false 5 [] "python2.7" "vim"` (explicitly
pass `[]` (or `null`, in the general case) to `exclude`). There are
probably more idiomatic ways to do this, but I just thought it was
something interesting.
If you're a virologist of the [xkcd](https://xkcd.com/350/) kind,
another helper command you might make is this:
```nushell
# Install any packages it finds
def live-dangerously [ ...pkgs ] {
# One optional argument was given (exclude), while another was not (repositories)
search-pkgs 0 [] ...$pkgs --install # Flags can go after spread arguments
}
```
Running it:
```nushell
> live-dangerously "git" "*vi*" # *vi* because I don't feel like typing out vim and neovim
╭──────────────┬─────────────╮
│ install │ true │
│ log_level │ 0 │
│ exclude │ [] │
│ repositories │ null │
│ pkgs │ [git, *vi*] │
╰──────────────┴─────────────╯
```
Here's an example that uses the spread operator more than once within
the same command call:
```nushell
let extras = [ chrome firefox python java git ]
def search-pkgs-curated [ ...pkgs ] {
(search-pkgs
1
[emacs]
["example.com", "foo.com"]
vim # A must for everyone!
...($pkgs | filter { |p| not ($p | str contains "*") }) # Remove packages with globs
python # Good tool to have
...$extras
--install=false
python3) # I forget, did I already put Python in extras?
}
```
Running it:
```nushell
> search-pkgs-curated "git" "*vi*"
╭──────────────┬───────────────────────────────────────────────────────────────────╮
│ install │ false │
│ log_level │ 1 │
│ exclude │ [emacs] │
│ repositories │ [example.com, foo.com] │
│ pkgs │ [vim, git, python, chrome, firefox, python, java, git, "python3"] │
╰──────────────┴───────────────────────────────────────────────────────────────────╯
```
2023-12-28 08:43:20 +01:00
|
|
|
{
|
|
|
|
let contents = working_set.get_span_contents(spans[spans_idx]);
|
|
|
|
|
|
|
|
if contents.len() > 3
|
|
|
|
&& contents.starts_with(b"...")
|
|
|
|
&& (contents[3] == b'$' || contents[3] == b'[' || contents[3] == b'(')
|
|
|
|
{
|
|
|
|
if signature.rest_positional.is_none() && !signature.allows_unknown_args {
|
|
|
|
working_set.error(ParseError::UnexpectedSpreadArg(
|
|
|
|
signature.call_signature(),
|
|
|
|
arg_span,
|
|
|
|
));
|
|
|
|
call.add_positional(Expression::garbage(arg_span));
|
|
|
|
} else if positional_idx < signature.required_positional.len() {
|
|
|
|
working_set.error(ParseError::MissingPositional(
|
|
|
|
signature.required_positional[positional_idx].name.clone(),
|
|
|
|
Span::new(spans[spans_idx].start, spans[spans_idx].start),
|
|
|
|
signature.call_signature(),
|
|
|
|
));
|
|
|
|
call.add_positional(Expression::garbage(arg_span));
|
|
|
|
} else {
|
|
|
|
let rest_shape = match &signature.rest_positional {
|
|
|
|
Some(arg) => arg.shape.clone(),
|
|
|
|
None => SyntaxShape::Any,
|
|
|
|
};
|
|
|
|
// Parse list of arguments to be spread
|
|
|
|
let args = parse_value(
|
|
|
|
working_set,
|
|
|
|
Span::new(arg_span.start + 3, arg_span.end),
|
|
|
|
&SyntaxShape::List(Box::new(rest_shape)),
|
|
|
|
);
|
|
|
|
|
|
|
|
call.add_spread(args);
|
|
|
|
// Let the parser know that it's parsing rest arguments now
|
|
|
|
positional_idx =
|
|
|
|
signature.required_positional.len() + signature.optional_positional.len();
|
|
|
|
}
|
|
|
|
|
|
|
|
spans_idx += 1;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
// Parse a positional arg if there is one
|
|
|
|
if let Some(positional) = signature.get_positional(positional_idx) {
|
|
|
|
let end = calculate_end_span(working_set, &signature, spans, spans_idx, positional_idx);
|
2021-07-17 01:22:01 +02:00
|
|
|
|
2022-02-14 18:33:47 +01:00
|
|
|
let end = if spans.len() > spans_idx && end == spans_idx {
|
|
|
|
end + 1
|
|
|
|
} else {
|
|
|
|
end
|
|
|
|
};
|
|
|
|
|
|
|
|
if spans[..end].is_empty() || spans_idx == end {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::MissingPositional(
|
|
|
|
positional.name.clone(),
|
|
|
|
Span::new(spans[spans_idx].end, spans[spans_idx].end),
|
|
|
|
signature.call_signature(),
|
|
|
|
));
|
2021-12-27 21:04:48 +01:00
|
|
|
positional_idx += 1;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
let arg = parse_multispan_value(
|
2021-09-02 10:25:22 +02:00
|
|
|
working_set,
|
|
|
|
&spans[..end],
|
|
|
|
&mut spans_idx,
|
|
|
|
&positional.shape,
|
|
|
|
);
|
2021-07-23 23:46:55 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let arg = if !type_compatible(&positional.shape.to_type(), &arg.ty) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::TypeMismatch(
|
|
|
|
positional.shape.to_type(),
|
|
|
|
arg.ty,
|
|
|
|
arg.span,
|
|
|
|
));
|
2023-03-28 23:23:10 +02:00
|
|
|
Expression::garbage(arg.span)
|
2021-07-08 08:19:38 +02:00
|
|
|
} else {
|
2021-09-02 10:25:22 +02:00
|
|
|
arg
|
|
|
|
};
|
2022-04-09 04:55:02 +02:00
|
|
|
call.add_positional(arg);
|
2021-09-02 10:25:22 +02:00
|
|
|
positional_idx += 1;
|
2022-12-21 23:33:26 +01:00
|
|
|
} else if signature.allows_unknown_args {
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_value(working_set, arg_span, &SyntaxShape::Any);
|
2022-12-21 23:33:26 +01:00
|
|
|
|
|
|
|
call.add_unknown(arg);
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2022-04-09 04:55:02 +02:00
|
|
|
call.add_positional(Expression::garbage(arg_span));
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::ExtraPositional(
|
|
|
|
signature.call_signature(),
|
|
|
|
arg_span,
|
|
|
|
))
|
2021-07-08 08:19:38 +02:00
|
|
|
}
|
2021-07-02 00:40:08 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
spans_idx += 1;
|
2021-07-08 08:19:38 +02:00
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
check_call(working_set, command_span, &signature, &call);
|
2021-07-31 07:20:40 +02:00
|
|
|
|
2021-10-09 18:10:46 +02:00
|
|
|
if signature.creates_scope {
|
|
|
|
working_set.exit_scope();
|
|
|
|
}
|
|
|
|
|
2022-06-12 21:18:00 +02:00
|
|
|
ParsedInternalCall {
|
|
|
|
call: Box::new(call),
|
|
|
|
output,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-31 07:20:40 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
pub fn parse_call(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
spans: &[Span],
|
2021-12-19 08:46:13 +01:00
|
|
|
head: Span,
|
Make external command substitution works friendly(like fish shell, trailing ending newlines) (#7156)
# Description
As title, when execute external sub command, auto-trimming end
new-lines, like how fish shell does.
And if the command is executed directly like: `cat tmp`, the result
won't change.
Fixes: #6816
Fixes: #3980
Note that although nushell works correctly by directly replace output of
external command to variable(or other places like string interpolation),
it's not friendly to user, and users almost want to use `str trim` to
trim trailing newline, I think that's why fish shell do this
automatically.
If the pr is ok, as a result, no more `str trim -r` is required when
user is writing scripts which using external commands.
# User-Facing Changes
Before:
<img width="523" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468810-86b04dbb-c147-459a-96a5-e0095eeaab3d.png">
After:
<img width="505" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468599-7b537488-3d6b-458e-9d75-d85780826db0.png">
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace --features=extra -- -D warnings -D
clippy::unwrap_used -A clippy::needless_collect` to check that you're
using the standard code style
- `cargo test --workspace --features=extra` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2022-11-23 04:51:57 +01:00
|
|
|
is_subexpression: bool,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: call");
|
|
|
|
|
2021-10-29 22:50:28 +02:00
|
|
|
if spans.is_empty() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownState(
|
|
|
|
"Encountered command with zero spans".into(),
|
|
|
|
span(spans),
|
|
|
|
));
|
|
|
|
return garbage(head);
|
2021-10-27 23:52:59 +02:00
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut pos = 0;
|
|
|
|
let cmd_start = pos;
|
2021-10-29 22:50:28 +02:00
|
|
|
let mut name_spans = vec![];
|
2022-01-10 03:52:01 +01:00
|
|
|
let mut name = vec![];
|
2021-08-17 01:00:00 +02:00
|
|
|
|
2021-10-29 22:50:28 +02:00
|
|
|
for word_span in spans[cmd_start..].iter() {
|
|
|
|
// Find the longest group of words that could form a command
|
2022-04-07 04:01:31 +02:00
|
|
|
|
2021-10-29 22:50:28 +02:00
|
|
|
name_spans.push(*word_span);
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-01-10 03:52:01 +01:00
|
|
|
let name_part = working_set.get_span_contents(*word_span);
|
|
|
|
if name.is_empty() {
|
|
|
|
name.extend(name_part);
|
|
|
|
} else {
|
|
|
|
name.push(b' ');
|
|
|
|
name.extend(name_part);
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2021-10-29 22:50:28 +02:00
|
|
|
pos += 1;
|
|
|
|
}
|
|
|
|
|
2023-07-13 21:05:03 +02:00
|
|
|
let mut maybe_decl_id = working_set.find_decl(&name);
|
2021-10-29 22:50:28 +02:00
|
|
|
|
|
|
|
while maybe_decl_id.is_none() {
|
|
|
|
// Find the longest command match
|
|
|
|
if name_spans.len() <= 1 {
|
|
|
|
// Keep the first word even if it does not match -- could be external command
|
|
|
|
break;
|
2021-06-30 03:42:56 +02:00
|
|
|
}
|
2021-09-11 14:07:19 +02:00
|
|
|
|
2021-10-29 22:50:28 +02:00
|
|
|
name_spans.pop();
|
|
|
|
pos -= 1;
|
|
|
|
|
2022-01-10 03:52:01 +01:00
|
|
|
let mut name = vec![];
|
|
|
|
for name_span in &name_spans {
|
|
|
|
let name_part = working_set.get_span_contents(*name_span);
|
|
|
|
if name.is_empty() {
|
|
|
|
name.extend(name_part);
|
|
|
|
} else {
|
|
|
|
name.push(b' ');
|
|
|
|
name.extend(name_part);
|
|
|
|
}
|
|
|
|
}
|
2023-07-13 21:05:03 +02:00
|
|
|
maybe_decl_id = working_set.find_decl(&name);
|
2021-10-29 22:50:28 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if let Some(decl_id) = maybe_decl_id {
|
2021-09-11 14:07:19 +02:00
|
|
|
// Before the internal parsing we check if there is no let or alias declarations
|
|
|
|
// that are missing their name, e.g.: let = 1 or alias = 2
|
|
|
|
if spans.len() > 1 {
|
|
|
|
let test_equal = working_set.get_span_contents(spans[1]);
|
|
|
|
|
2021-09-11 14:16:40 +02:00
|
|
|
if test_equal == [b'='] {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("incomplete statement");
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownState(
|
|
|
|
"Incomplete statement".into(),
|
|
|
|
span(spans),
|
|
|
|
));
|
|
|
|
return garbage(span(spans));
|
2021-09-11 14:07:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
// TODO: Try to remove the clone
|
2023-07-13 21:05:03 +02:00
|
|
|
let decl = working_set.get_decl(decl_id);
|
2022-01-01 22:42:50 +01:00
|
|
|
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
let parsed_call = if let Some(alias) = decl.as_alias() {
|
|
|
|
if let Expression {
|
|
|
|
expr: Expr::ExternalCall(head, args, is_subexpression),
|
|
|
|
span: _,
|
|
|
|
ty,
|
|
|
|
custom_completion,
|
2023-07-13 21:05:03 +02:00
|
|
|
} = &alias.clone().wrapped_call
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
{
|
|
|
|
trace!("parsing: alias of external call");
|
|
|
|
|
|
|
|
let mut final_args = args.clone();
|
|
|
|
|
|
|
|
for arg_span in spans.iter().skip(1) {
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_external_arg(working_set, *arg_span);
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
final_args.push(arg);
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut head = head.clone();
|
|
|
|
head.span = spans[0]; // replacing the spans preserves syntax highlighting
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression {
|
|
|
|
expr: Expr::ExternalCall(head, final_args, *is_subexpression),
|
|
|
|
span: span(spans),
|
|
|
|
ty: ty.clone(),
|
|
|
|
custom_completion: *custom_completion,
|
|
|
|
};
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
} else {
|
|
|
|
trace!("parsing: alias of internal call");
|
|
|
|
parse_internal_call(
|
|
|
|
working_set,
|
|
|
|
span(&spans[cmd_start..pos]),
|
|
|
|
&spans[pos..],
|
|
|
|
decl_id,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
trace!("parsing: internal call");
|
|
|
|
parse_internal_call(
|
|
|
|
working_set,
|
|
|
|
span(&spans[cmd_start..pos]),
|
|
|
|
&spans[pos..],
|
|
|
|
decl_id,
|
|
|
|
)
|
|
|
|
};
|
2022-06-12 21:18:00 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Call(parsed_call.call),
|
|
|
|
span: span(spans),
|
|
|
|
ty: parsed_call.output,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2021-09-12 14:48:19 +02:00
|
|
|
// We might be parsing left-unbounded range ("..10")
|
|
|
|
let bytes = working_set.get_span_contents(spans[0]);
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: range {:?} ", bytes);
|
2022-08-11 18:54:54 +02:00
|
|
|
if let (Some(b'.'), Some(b'.')) = (bytes.first(), bytes.get(1)) {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("-- found leading range indicator");
|
2023-04-07 02:35:45 +02:00
|
|
|
let starting_error_count = working_set.parse_errors.len();
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let range_expr = parse_range(working_set, spans[0]);
|
2023-04-07 02:35:45 +02:00
|
|
|
if working_set.parse_errors.len() == starting_error_count {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("-- successfully parsed range");
|
2023-04-07 02:35:45 +02:00
|
|
|
return range_expr;
|
2021-09-12 14:48:19 +02:00
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2021-09-12 14:48:19 +02:00
|
|
|
}
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: external call");
|
2021-10-29 22:50:28 +02:00
|
|
|
|
|
|
|
// Otherwise, try external command
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_external_call(working_set, spans, is_subexpression)
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_binary(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
Improve type hovers (#9515)
# Description
This PR does a few things to help improve type hovers and, in the
process, fixes a few outstanding issues in the type system. Here's a
list of the changes:
* `for` now will try to infer the type of the iteration variable based
on the expression it's given. This fixes things like `for x in [1, 2, 3]
{ }` where `x` now properly gets the int type.
* Removed old input/output type fields from the signature, focuses on
the vec of signatures. Updated a bunch of dataframe commands that hadn't
moved over. This helps tie things together a bit better
* Fixed inference of types from subexpressions to use the last
expression in the block
* Fixed handling of explicit types in `let` and `mut` calls, so we now
respect that as the authoritative type
I also tried to add `def` input/output type inference, but unfortunately
we only know the predecl types universally, which means we won't have
enough information to properly know what the types of the custom
commands are.
# User-Facing Changes
Script typechecking will get tighter in some cases
Hovers should be more accurate in some cases that previously resorted to
any.
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect -A clippy::result_large_err` to check that
you're using the standard code style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-std/tests/run.nu` to run the tests for the
standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
---------
Co-authored-by: Darren Schroeder <343840+fdncred@users.noreply.github.com>
2023-06-28 19:19:48 +02:00
|
|
|
trace!("parsing: binary");
|
2023-04-07 02:35:45 +02:00
|
|
|
let contents = working_set.get_span_contents(span);
|
|
|
|
if contents.starts_with(b"0x[") {
|
|
|
|
parse_binary_with_base(working_set, span, 16, 2, b"0x[", b"]")
|
|
|
|
} else if contents.starts_with(b"0o[") {
|
|
|
|
parse_binary_with_base(working_set, span, 8, 3, b"0o[", b"]")
|
2023-05-24 21:53:57 +02:00
|
|
|
} else if contents.starts_with(b"0b[") {
|
2023-04-07 02:35:45 +02:00
|
|
|
parse_binary_with_base(working_set, span, 2, 8, b"0b[", b"]")
|
2023-05-24 21:53:57 +02:00
|
|
|
} else {
|
|
|
|
working_set.error(ParseError::Expected("binary", span));
|
|
|
|
garbage(span)
|
2022-03-01 00:31:53 +01:00
|
|
|
}
|
2022-04-11 09:58:57 +02:00
|
|
|
}
|
2022-03-01 00:31:53 +01:00
|
|
|
|
2022-04-11 09:58:57 +02:00
|
|
|
fn parse_binary_with_base(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
span: Span,
|
|
|
|
base: u32,
|
|
|
|
min_digits_per_byte: usize,
|
|
|
|
prefix: &[u8],
|
|
|
|
suffix: &[u8],
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2022-03-01 00:31:53 +01:00
|
|
|
let token = working_set.get_span_contents(span);
|
|
|
|
|
2022-04-11 09:58:57 +02:00
|
|
|
if let Some(token) = token.strip_prefix(prefix) {
|
|
|
|
if let Some(token) = token.strip_suffix(suffix) {
|
|
|
|
let (lexed, err) = lex(
|
|
|
|
token,
|
|
|
|
span.start + prefix.len(),
|
|
|
|
&[b',', b'\r', b'\n'],
|
|
|
|
&[],
|
|
|
|
true,
|
|
|
|
);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2022-03-01 00:31:53 +01:00
|
|
|
|
2022-04-11 09:58:57 +02:00
|
|
|
let mut binary_value = vec![];
|
2022-03-01 00:31:53 +01:00
|
|
|
for token in lexed {
|
|
|
|
match token.contents {
|
|
|
|
TokenContents::Item => {
|
|
|
|
let contents = working_set.get_span_contents(token.span);
|
|
|
|
|
2022-04-11 09:58:57 +02:00
|
|
|
binary_value.extend_from_slice(contents);
|
2022-03-01 00:31:53 +01:00
|
|
|
}
|
2022-12-08 00:02:11 +01:00
|
|
|
TokenContents::Pipe
|
|
|
|
| TokenContents::PipePipe
|
|
|
|
| TokenContents::OutGreaterThan
|
2023-11-27 14:52:39 +01:00
|
|
|
| TokenContents::OutGreaterGreaterThan
|
2022-12-08 00:02:11 +01:00
|
|
|
| TokenContents::ErrGreaterThan
|
2023-11-27 14:52:39 +01:00
|
|
|
| TokenContents::ErrGreaterGreaterThan
|
|
|
|
| TokenContents::OutErrGreaterThan
|
|
|
|
| TokenContents::OutErrGreaterGreaterThan => {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("binary", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
return garbage(span);
|
2022-03-01 00:31:53 +01:00
|
|
|
}
|
2022-12-08 00:02:11 +01:00
|
|
|
TokenContents::Comment | TokenContents::Semicolon | TokenContents::Eol => {}
|
2022-03-01 00:31:53 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-11 09:58:57 +02:00
|
|
|
let required_padding = (min_digits_per_byte - binary_value.len() % min_digits_per_byte)
|
|
|
|
% min_digits_per_byte;
|
|
|
|
|
|
|
|
if required_padding != 0 {
|
|
|
|
binary_value = {
|
|
|
|
let mut tail = binary_value;
|
|
|
|
let mut binary_value: Vec<u8> = vec![b'0'; required_padding];
|
|
|
|
binary_value.append(&mut tail);
|
|
|
|
binary_value
|
|
|
|
};
|
2022-03-01 00:31:53 +01:00
|
|
|
}
|
|
|
|
|
2022-04-11 09:58:57 +02:00
|
|
|
let str = String::from_utf8_lossy(&binary_value).to_string();
|
2022-03-01 00:31:53 +01:00
|
|
|
|
2022-04-11 09:58:57 +02:00
|
|
|
match decode_with_base(&str, base, min_digits_per_byte) {
|
2022-03-01 00:31:53 +01:00
|
|
|
Ok(v) => {
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression {
|
|
|
|
expr: Expr::Binary(v),
|
|
|
|
span,
|
|
|
|
ty: Type::Binary,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2022-03-01 00:31:53 +01:00
|
|
|
}
|
|
|
|
Err(x) => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::IncorrectValue(
|
|
|
|
"not a binary value".into(),
|
|
|
|
span,
|
|
|
|
x.to_string(),
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2022-03-01 00:31:53 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("binary", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
garbage(span)
|
2022-03-01 00:31:53 +01:00
|
|
|
}
|
|
|
|
|
2022-04-11 09:58:57 +02:00
|
|
|
fn decode_with_base(s: &str, base: u32, digits_per_byte: usize) -> Result<Vec<u8>, ParseIntError> {
|
2022-06-17 20:11:48 +02:00
|
|
|
s.chars()
|
|
|
|
.chunks(digits_per_byte)
|
|
|
|
.into_iter()
|
|
|
|
.map(|chunk| {
|
|
|
|
let str: String = chunk.collect();
|
|
|
|
u8::from_str_radix(&str, base)
|
|
|
|
})
|
2022-04-11 09:58:57 +02:00
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
|
2023-01-15 16:03:57 +01:00
|
|
|
fn strip_underscores(token: &[u8]) -> String {
|
|
|
|
String::from_utf8_lossy(token)
|
|
|
|
.chars()
|
|
|
|
.filter(|c| *c != '_')
|
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_int(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
|
|
|
let token = working_set.get_span_contents(span);
|
|
|
|
|
|
|
|
fn extract_int(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
token: &str,
|
|
|
|
span: Span,
|
|
|
|
radix: u32,
|
|
|
|
) -> Expression {
|
2023-01-15 16:03:57 +01:00
|
|
|
if let Ok(num) = i64::from_str_radix(token, radix) {
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Int(num),
|
|
|
|
span,
|
|
|
|
ty: Type::Int,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-08-08 22:21:21 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::InvalidLiteral(
|
|
|
|
format!("invalid digits for radix {}", radix),
|
|
|
|
"int".into(),
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
|
|
|
|
garbage(span)
|
2021-08-08 22:21:21 +02:00
|
|
|
}
|
2023-01-15 16:03:57 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
let token = strip_underscores(token);
|
|
|
|
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
if token.is_empty() {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("int", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
return garbage(span);
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
}
|
|
|
|
|
2023-01-15 16:03:57 +01:00
|
|
|
if let Some(num) = token.strip_prefix("0b") {
|
2023-04-07 02:35:45 +02:00
|
|
|
extract_int(working_set, num, span, 2)
|
2023-01-15 16:03:57 +01:00
|
|
|
} else if let Some(num) = token.strip_prefix("0o") {
|
2023-04-07 02:35:45 +02:00
|
|
|
extract_int(working_set, num, span, 8)
|
2023-01-15 16:03:57 +01:00
|
|
|
} else if let Some(num) = token.strip_prefix("0x") {
|
2023-04-07 02:35:45 +02:00
|
|
|
extract_int(working_set, num, span, 16)
|
2023-01-15 16:03:57 +01:00
|
|
|
} else if let Ok(num) = token.parse::<i64>() {
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Int(num),
|
|
|
|
span,
|
|
|
|
ty: Type::Int,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("int", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
garbage(span)
|
2021-07-01 02:01:04 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-01 02:01:04 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_float(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
|
|
|
let token = working_set.get_span_contents(span);
|
2023-01-15 16:03:57 +01:00
|
|
|
let token = strip_underscores(token);
|
|
|
|
|
|
|
|
if let Ok(x) = token.parse::<f64>() {
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Float(x),
|
|
|
|
span,
|
|
|
|
ty: Type::Float,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("float", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
garbage(span)
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
}
|
2021-07-30 05:26:06 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_number(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
|
|
|
let starting_error_count = working_set.parse_errors.len();
|
|
|
|
|
|
|
|
let result = parse_int(working_set, span);
|
|
|
|
if starting_error_count == working_set.parse_errors.len() {
|
|
|
|
return result;
|
|
|
|
} else if !matches!(
|
|
|
|
working_set.parse_errors.last(),
|
|
|
|
Some(ParseError::Expected(_, _))
|
|
|
|
) {
|
|
|
|
} else {
|
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2021-07-30 05:26:06 +02:00
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
let result = parse_float(working_set, span);
|
|
|
|
|
|
|
|
if starting_error_count == working_set.parse_errors.len() {
|
|
|
|
return result;
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("number", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
garbage(span)
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-30 05:26:06 +02:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn parse_range(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: range");
|
|
|
|
|
2021-09-11 13:13:04 +02:00
|
|
|
// Range follows the following syntax: [<from>][<next_operator><next>]<range_operator>[<to>]
|
|
|
|
// where <next_operator> is ".."
|
2023-04-07 13:40:05 +02:00
|
|
|
// and <range_operator> is "..", "..=" or "..<"
|
2021-09-04 23:52:57 +02:00
|
|
|
// and one of the <from> or <to> bounds must be present (just '..' is not allowed since it
|
|
|
|
// looks like parent directory)
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
//bugbug range cannot be [..] because that looks like parent directory
|
2021-09-04 23:52:57 +02:00
|
|
|
|
|
|
|
let contents = working_set.get_span_contents(span);
|
2022-02-24 13:58:53 +01:00
|
|
|
|
2021-09-04 23:52:57 +02:00
|
|
|
let token = if let Ok(s) = String::from_utf8(contents.into()) {
|
|
|
|
s
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::NonUtf8(span));
|
|
|
|
return garbage(span);
|
2021-09-04 23:52:57 +02:00
|
|
|
};
|
|
|
|
|
2022-02-24 13:58:53 +01:00
|
|
|
if !token.contains("..") {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("at least one range bound set", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
return garbage(span);
|
2022-02-24 13:58:53 +01:00
|
|
|
}
|
|
|
|
|
2021-09-04 23:52:57 +02:00
|
|
|
// First, figure out what exact operators are used and determine their positions
|
|
|
|
let dotdot_pos: Vec<_> = token.match_indices("..").map(|(pos, _)| pos).collect();
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
let (next_op_pos, range_op_pos) = match dotdot_pos.len() {
|
|
|
|
1 => (None, dotdot_pos[0]),
|
|
|
|
2 => (Some(dotdot_pos[0]), dotdot_pos[1]),
|
|
|
|
_ => {
|
|
|
|
working_set.error(ParseError::Expected(
|
2023-05-24 21:53:57 +02:00
|
|
|
"one range operator ('..' or '..<') and optionally one next operator ('..')",
|
2023-04-07 02:35:45 +02:00
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
|
|
|
}
|
|
|
|
};
|
Prevent cubic time on nested parentheses (#10467)
<!--
if this PR closes one or more issues, you can automatically link the PR
with
them by using one of the [*linking
keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword),
e.g.
- this PR should close #xxxx
- fixes #xxxx
you can also mention related issues, PRs or discussions!
-->
# Description
<!--
Thank you for improving Nushell. Please, check our [contributing
guide](../CONTRIBUTING.md) and talk to the core team before making major
changes.
Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.
-->
When parse_range get an item like ((((1..2)))) it would try to parse
"((((1" with a long chain of recursive parsers, namely:
- parse_value
- parse_paren_expr
- parse_full_cell_path
- parse_block
- parse_pipeline
- parse_builtin_commands
- parse_expression
- parse_math_expression
- parse_value
- ...
where `parse_paren_expr` calls `parse_range` in turn. Because at any
time in the chain `parse_paren_expr` can call `parse_range`, which will
then continue the chain, we get quadratic number of function calls, each
linear on the size of the input
By checking with the lexer that the parens are matched, we prevent the
long chain from being called on unmatched braces. Now, this is still
more quadratic than it needs to be, to fix that, we should process
parens only once, instead of on each recursive call
# User-Facing Changes
<!-- List of all changes that impact the user experience here. This
helps us keep track of breaking changes. -->
Speed improvements in some edge cases
# Tests + Formatting
Not sure how to test this, maybe I could add a benchmark
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to
check that you're using the standard code style
- `cargo test --workspace` to check that all tests pass (on Windows make
sure to [enable developer
mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
- `cargo run -- -c "use std testing; testing run-tests --path
crates/nu-std"` to run the tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
# Other notes
Found using the fuzzer, by setting a timeout on max run-time. It also
found a stack-overflow on too many parentheses, which this doesn't fix.
2023-09-22 18:24:35 +02:00
|
|
|
// Avoid calling sub-parsers on unmatched parens, to prevent quadratic time on things like ((((1..2))))
|
|
|
|
// No need to call the expensive parse_value on "((((1"
|
|
|
|
if dotdot_pos[0] > 0 {
|
|
|
|
let (_tokens, err) = lex(
|
|
|
|
&contents[..dotdot_pos[0]],
|
|
|
|
span.start,
|
|
|
|
&[],
|
|
|
|
&[b'.', b'?'],
|
|
|
|
true,
|
|
|
|
);
|
|
|
|
if let Some(_err) = err {
|
|
|
|
working_set.error(ParseError::Expected("Valid expression before ..", span));
|
|
|
|
return garbage(span);
|
|
|
|
}
|
|
|
|
}
|
2021-09-04 23:52:57 +02:00
|
|
|
|
2021-09-11 13:13:04 +02:00
|
|
|
let (inclusion, range_op_str, range_op_span) = if let Some(pos) = token.find("..<") {
|
2021-09-04 23:52:57 +02:00
|
|
|
if pos == range_op_pos {
|
|
|
|
let op_str = "..<";
|
|
|
|
let op_span = Span::new(
|
|
|
|
span.start + range_op_pos,
|
|
|
|
span.start + range_op_pos + op_str.len(),
|
|
|
|
);
|
2021-09-11 13:13:04 +02:00
|
|
|
(RangeInclusion::RightExclusive, "..<", op_span)
|
2021-09-04 23:52:57 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
2023-05-24 21:53:57 +02:00
|
|
|
"inclusive operator preceding second range bound",
|
2023-04-07 02:35:45 +02:00
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2021-09-04 23:52:57 +02:00
|
|
|
}
|
|
|
|
} else {
|
2023-04-07 13:40:05 +02:00
|
|
|
let op_str = if token.contains("..=") { "..=" } else { ".." };
|
2021-09-04 23:52:57 +02:00
|
|
|
let op_span = Span::new(
|
|
|
|
span.start + range_op_pos,
|
|
|
|
span.start + range_op_pos + op_str.len(),
|
|
|
|
);
|
2023-04-07 13:40:05 +02:00
|
|
|
(RangeInclusion::Inclusive, op_str, op_span)
|
2021-09-04 23:52:57 +02:00
|
|
|
};
|
|
|
|
|
2021-09-11 13:13:04 +02:00
|
|
|
// Now, based on the operator positions, figure out where the bounds & next are located and
|
2021-09-04 23:52:57 +02:00
|
|
|
// parse them
|
2021-10-12 19:44:23 +02:00
|
|
|
// TODO: Actually parse the next number in the range
|
2021-09-04 23:52:57 +02:00
|
|
|
let from = if token.starts_with("..") {
|
2021-09-11 13:13:04 +02:00
|
|
|
// token starts with either next operator, or range operator -- we don't care which one
|
2021-09-04 23:52:57 +02:00
|
|
|
None
|
|
|
|
} else {
|
|
|
|
let from_span = Span::new(span.start, span.start + dotdot_pos[0]);
|
2023-04-07 02:35:45 +02:00
|
|
|
Some(Box::new(parse_value(
|
2022-03-18 20:03:57 +01:00
|
|
|
working_set,
|
|
|
|
from_span,
|
|
|
|
&SyntaxShape::Number,
|
2023-04-07 02:35:45 +02:00
|
|
|
)))
|
2021-09-04 23:52:57 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
let to = if token.ends_with(range_op_str) {
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
let to_span = Span::new(range_op_span.end, span.end);
|
2023-04-07 02:35:45 +02:00
|
|
|
Some(Box::new(parse_value(
|
2022-03-18 20:03:57 +01:00
|
|
|
working_set,
|
|
|
|
to_span,
|
|
|
|
&SyntaxShape::Number,
|
2023-04-07 02:35:45 +02:00
|
|
|
)))
|
2021-09-04 23:52:57 +02:00
|
|
|
};
|
|
|
|
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("-- from: {:?} to: {:?}", from, to);
|
|
|
|
|
2021-09-04 23:52:57 +02:00
|
|
|
if let (None, None) = (&from, &to) {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("at least one range bound set", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
return garbage(span);
|
2021-09-04 23:52:57 +02:00
|
|
|
}
|
|
|
|
|
2021-09-11 13:13:04 +02:00
|
|
|
let (next, next_op_span) = if let Some(pos) = next_op_pos {
|
|
|
|
let next_op_span = Span::new(span.start + pos, span.start + pos + "..".len());
|
|
|
|
let next_span = Span::new(next_op_span.end, range_op_span.start);
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
(
|
|
|
|
Some(Box::new(parse_value(
|
|
|
|
working_set,
|
|
|
|
next_span,
|
|
|
|
&SyntaxShape::Number,
|
|
|
|
))),
|
|
|
|
next_op_span,
|
|
|
|
)
|
2021-09-11 13:13:04 +02:00
|
|
|
} else {
|
2021-12-19 08:46:13 +01:00
|
|
|
(None, span)
|
2021-09-11 13:13:04 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
let range_op = RangeOperator {
|
|
|
|
inclusion,
|
|
|
|
span: range_op_span,
|
|
|
|
next_op_span,
|
|
|
|
};
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Range(from, next, to, range_op),
|
|
|
|
span,
|
|
|
|
ty: Type::Range,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-04 23:52:57 +02:00
|
|
|
}
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub(crate) fn parse_dollar_expr(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2022-02-24 13:58:53 +01:00
|
|
|
trace!("parsing: dollar expression");
|
2021-09-02 10:25:22 +02:00
|
|
|
let contents = working_set.get_span_contents(span);
|
2021-07-30 05:26:06 +02:00
|
|
|
|
2021-12-25 21:50:02 +01:00
|
|
|
if contents.starts_with(b"$\"") || contents.starts_with(b"$'") {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_string_interpolation(working_set, span)
|
2023-03-17 03:19:41 +01:00
|
|
|
} else if contents.starts_with(b"$.") {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_simple_cell_path(working_set, Span::new(span.start + 2, span.end))
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
let starting_error_count = working_set.parse_errors.len();
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let expr = parse_range(working_set, span);
|
2023-04-07 02:35:45 +02:00
|
|
|
if starting_error_count == working_set.parse_errors.len() {
|
|
|
|
expr
|
|
|
|
} else {
|
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_full_cell_path(working_set, None, span)
|
2023-04-07 02:35:45 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
}
|
2021-07-30 05:26:06 +02:00
|
|
|
|
2023-03-16 04:06:43 +01:00
|
|
|
pub fn parse_paren_expr(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
span: Span,
|
2023-03-16 21:08:41 +01:00
|
|
|
shape: &SyntaxShape,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
|
|
|
let starting_error_count = working_set.parse_errors.len();
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let expr = parse_range(working_set, span);
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
if starting_error_count == working_set.parse_errors.len() {
|
|
|
|
expr
|
2023-03-16 04:06:43 +01:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
|
|
|
|
|
|
|
if matches!(shape, SyntaxShape::Signature) {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_signature(working_set, span)
|
2023-04-07 02:35:45 +02:00
|
|
|
} else {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_full_cell_path(working_set, None, span)
|
2023-04-07 02:35:45 +02:00
|
|
|
}
|
2023-03-16 04:06:43 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn parse_brace_expr(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
span: Span,
|
|
|
|
shape: &SyntaxShape,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2023-03-16 04:06:43 +01:00
|
|
|
// Try to detect what kind of value we're about to parse
|
|
|
|
// FIXME: In the future, we should work over the token stream so we only have to do this once
|
|
|
|
// before parsing begins
|
|
|
|
|
|
|
|
// FIXME: we're still using the shape because we rely on it to know how to handle syntax where
|
|
|
|
// the parse is ambiguous. We'll need to update the parts of the grammar where this is ambiguous
|
|
|
|
// and then revisit the parsing.
|
|
|
|
|
2023-03-17 03:19:23 +01:00
|
|
|
if span.end <= (span.start + 1) {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::ExpectedWithStringMsg(
|
2023-04-07 02:35:45 +02:00
|
|
|
format!("non-block value: {shape}"),
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return Expression::garbage(span);
|
2023-03-17 03:19:23 +01:00
|
|
|
}
|
|
|
|
|
2023-03-16 04:06:43 +01:00
|
|
|
let bytes = working_set.get_span_contents(Span::new(span.start + 1, span.end - 1));
|
|
|
|
let (tokens, _) = lex(bytes, span.start + 1, &[b'\r', b'\n', b'\t'], &[b':'], true);
|
|
|
|
|
|
|
|
let second_token = tokens
|
2023-11-17 16:15:55 +01:00
|
|
|
.first()
|
2023-03-16 04:06:43 +01:00
|
|
|
.map(|token| working_set.get_span_contents(token.span));
|
|
|
|
|
2023-11-17 16:15:55 +01:00
|
|
|
let second_token_contents = tokens.first().map(|token| token.contents);
|
2023-03-16 04:06:43 +01:00
|
|
|
|
|
|
|
let third_token = tokens
|
|
|
|
.get(1)
|
|
|
|
.map(|token| working_set.get_span_contents(token.span));
|
|
|
|
|
2023-06-08 18:49:58 +02:00
|
|
|
if second_token.is_none() {
|
2023-03-16 04:06:43 +01:00
|
|
|
// If we're empty, that means an empty record or closure
|
2023-04-11 19:21:52 +02:00
|
|
|
if matches!(shape, SyntaxShape::Closure(_)) {
|
|
|
|
parse_closure_expression(working_set, shape, span)
|
2023-03-16 04:06:43 +01:00
|
|
|
} else if matches!(shape, SyntaxShape::Block) {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_block_expression(working_set, span)
|
2023-03-24 02:52:01 +01:00
|
|
|
} else if matches!(shape, SyntaxShape::MatchBlock) {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_match_block_expression(working_set, span)
|
2023-03-16 04:06:43 +01:00
|
|
|
} else {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_record(working_set, span)
|
2023-03-16 04:06:43 +01:00
|
|
|
}
|
|
|
|
} else if matches!(second_token_contents, Some(TokenContents::Pipe))
|
|
|
|
|| matches!(second_token_contents, Some(TokenContents::PipePipe))
|
|
|
|
{
|
2023-04-11 19:21:52 +02:00
|
|
|
parse_closure_expression(working_set, shape, span)
|
2023-03-16 04:06:43 +01:00
|
|
|
} else if matches!(third_token, Some(b":")) {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_full_cell_path(working_set, None, span)
|
Spread operator in record literals (#11144)
Goes towards implementing #10598, which asks for a spread operator in
lists, in records, and when calling commands (continuation of #11006,
which only implements it in lists)
# Description
This PR is for adding a spread operator that can be used when building
records. Additional functionality can be added later.
Changes:
- Previously, the `Expr::Record` variant held `(Expression, Expression)`
pairs. It now holds instances of an enum `RecordItem` (the name isn't
amazing) that allows either a key-value mapping or a spread operator.
- `...` will be treated as the spread operator when it appears before
`$`, `{`, or `(` inside records (no whitespace allowed in between) (not
implemented yet)
- The error message for duplicate columns now includes the column name
itself, because if two spread records are involved in such an error, you
can't tell which field was duplicated from the spans alone
`...` will still be treated as a normal string outside records, and even
in records, it is not treated as a spread operator when not followed
immediately by a `$`, `{`, or `(`.
# User-Facing Changes
Users will be able to use `...` when building records.
```
> let rec = { x: 1, ...{ a: 2 } }
> $rec
╭───┬───╮
│ x │ 1 │
│ a │ 2 │
╰───┴───╯
> { foo: bar, ...$rec, baz: blah }
╭─────┬──────╮
│ foo │ bar │
│ x │ 1 │
│ a │ 2 │
│ baz │ blah │
╰─────┴──────╯
```
If you want to update a field of a record, you'll have to use `merge`
instead:
```
> { ...$rec, x: 5 }
Error: nu::shell::column_defined_twice
× Record field or table column used twice: x
╭─[entry #2:1:1]
1 │ { ...$rec, x: 5 }
· ──┬─ ┬
· │ ╰── field redefined here
· ╰── field first defined here
╰────
> $rec | merge { x: 5 }
╭───┬───╮
│ x │ 5 │
│ a │ 2 │
╰───┴───╯
```
# Tests + Formatting
# After Submitting
2023-11-29 18:31:31 +01:00
|
|
|
} else if second_token.is_some_and(|c| {
|
|
|
|
c.len() > 3 && c.starts_with(b"...") && (c[3] == b'$' || c[3] == b'{' || c[3] == b'(')
|
|
|
|
}) {
|
|
|
|
parse_record(working_set, span)
|
2023-04-11 19:21:52 +02:00
|
|
|
} else if matches!(shape, SyntaxShape::Closure(_)) || matches!(shape, SyntaxShape::Any) {
|
|
|
|
parse_closure_expression(working_set, shape, span)
|
2023-03-16 04:06:43 +01:00
|
|
|
} else if matches!(shape, SyntaxShape::Block) {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_block_expression(working_set, span)
|
2023-03-24 02:52:01 +01:00
|
|
|
} else if matches!(shape, SyntaxShape::MatchBlock) {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_match_block_expression(working_set, span)
|
2023-03-16 04:06:43 +01:00
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::ExpectedWithStringMsg(
|
2023-04-07 02:35:45 +02:00
|
|
|
format!("non-block value: {shape}"),
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
|
|
|
|
Expression::garbage(span)
|
2023-03-16 04:06:43 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn parse_string_interpolation(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2021-09-02 10:25:22 +02:00
|
|
|
#[derive(PartialEq, Eq, Debug)]
|
|
|
|
enum InterpolationMode {
|
|
|
|
String,
|
|
|
|
Expression,
|
|
|
|
}
|
2021-07-30 05:26:06 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let contents = working_set.get_span_contents(span);
|
2021-07-30 05:26:06 +02:00
|
|
|
|
2022-03-03 19:14:03 +01:00
|
|
|
let mut double_quote = false;
|
|
|
|
|
2021-12-25 21:50:02 +01:00
|
|
|
let (start, end) = if contents.starts_with(b"$\"") {
|
2022-03-03 19:14:03 +01:00
|
|
|
double_quote = true;
|
2021-12-25 21:50:02 +01:00
|
|
|
let end = if contents.ends_with(b"\"") && contents.len() > 2 {
|
|
|
|
span.end - 1
|
|
|
|
} else {
|
|
|
|
span.end
|
|
|
|
};
|
|
|
|
(span.start + 2, end)
|
|
|
|
} else if contents.starts_with(b"$'") {
|
|
|
|
let end = if contents.ends_with(b"'") && contents.len() > 2 {
|
|
|
|
span.end - 1
|
|
|
|
} else {
|
|
|
|
span.end
|
|
|
|
};
|
|
|
|
(span.start + 2, end)
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2021-12-25 21:50:02 +01:00
|
|
|
(span.start, span.end)
|
2021-09-02 10:25:22 +02:00
|
|
|
};
|
|
|
|
|
2022-12-03 10:44:12 +01:00
|
|
|
let inner_span = Span::new(start, end);
|
2021-09-02 10:25:22 +02:00
|
|
|
let contents = working_set.get_span_contents(inner_span).to_vec();
|
|
|
|
|
|
|
|
let mut output = vec![];
|
|
|
|
let mut mode = InterpolationMode::String;
|
|
|
|
let mut token_start = start;
|
2022-02-10 17:09:08 +01:00
|
|
|
let mut delimiter_stack = vec![];
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-12-14 14:54:13 +01:00
|
|
|
let mut consecutive_backslashes: usize = 0;
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut b = start;
|
|
|
|
|
|
|
|
while b != end {
|
2022-12-14 14:54:13 +01:00
|
|
|
let current_byte = contents[b - start];
|
|
|
|
|
|
|
|
if mode == InterpolationMode::String {
|
|
|
|
let preceding_consecutive_backslashes = consecutive_backslashes;
|
|
|
|
|
|
|
|
let is_backslash = current_byte == b'\\';
|
|
|
|
consecutive_backslashes = if is_backslash {
|
|
|
|
preceding_consecutive_backslashes + 1
|
2022-11-06 23:17:00 +01:00
|
|
|
} else {
|
2022-12-14 14:54:13 +01:00
|
|
|
0
|
|
|
|
};
|
2022-03-03 19:14:03 +01:00
|
|
|
|
2022-12-14 14:54:13 +01:00
|
|
|
if current_byte == b'(' && (!double_quote || preceding_consecutive_backslashes % 2 == 0)
|
|
|
|
{
|
|
|
|
mode = InterpolationMode::Expression;
|
|
|
|
if token_start < b {
|
|
|
|
let span = Span::new(token_start, b);
|
|
|
|
let str_contents = working_set.get_span_contents(span);
|
2022-03-03 19:14:03 +01:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
let (str_contents, err) = if double_quote {
|
|
|
|
unescape_string(str_contents, span)
|
2022-12-14 14:54:13 +01:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
(str_contents.to_vec(), None)
|
2022-12-14 14:54:13 +01:00
|
|
|
};
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2022-12-14 14:54:13 +01:00
|
|
|
|
|
|
|
output.push(Expression {
|
|
|
|
expr: Expr::String(String::from_utf8_lossy(&str_contents).to_string()),
|
|
|
|
span,
|
|
|
|
ty: Type::String,
|
|
|
|
custom_completion: None,
|
|
|
|
});
|
|
|
|
token_start = b;
|
|
|
|
}
|
2022-11-06 23:17:00 +01:00
|
|
|
}
|
|
|
|
}
|
2022-12-14 14:54:13 +01:00
|
|
|
|
2022-11-06 23:17:00 +01:00
|
|
|
if mode == InterpolationMode::Expression {
|
2022-12-14 14:54:13 +01:00
|
|
|
let byte = current_byte;
|
2022-11-06 23:17:00 +01:00
|
|
|
if let Some(b'\'') = delimiter_stack.last() {
|
|
|
|
if byte == b'\'' {
|
|
|
|
delimiter_stack.pop();
|
|
|
|
}
|
|
|
|
} else if let Some(b'"') = delimiter_stack.last() {
|
|
|
|
if byte == b'"' {
|
|
|
|
delimiter_stack.pop();
|
|
|
|
}
|
|
|
|
} else if let Some(b'`') = delimiter_stack.last() {
|
|
|
|
if byte == b'`' {
|
|
|
|
delimiter_stack.pop();
|
|
|
|
}
|
|
|
|
} else if byte == b'\'' {
|
|
|
|
delimiter_stack.push(b'\'')
|
|
|
|
} else if byte == b'"' {
|
|
|
|
delimiter_stack.push(b'"');
|
|
|
|
} else if byte == b'`' {
|
|
|
|
delimiter_stack.push(b'`')
|
|
|
|
} else if byte == b'(' {
|
|
|
|
delimiter_stack.push(b')');
|
|
|
|
} else if byte == b')' {
|
|
|
|
if let Some(b')') = delimiter_stack.last() {
|
|
|
|
delimiter_stack.pop();
|
|
|
|
}
|
|
|
|
if delimiter_stack.is_empty() {
|
|
|
|
mode = InterpolationMode::String;
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if token_start < b {
|
2022-12-03 10:44:12 +01:00
|
|
|
let span = Span::new(token_start, b + 1);
|
2021-07-30 05:26:06 +02:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let expr = parse_full_cell_path(working_set, None, span);
|
2022-11-06 23:17:00 +01:00
|
|
|
output.push(expr);
|
2022-11-06 20:57:28 +01:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-11-06 23:17:00 +01:00
|
|
|
token_start = b + 1;
|
|
|
|
continue;
|
2021-07-30 05:26:06 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
b += 1;
|
|
|
|
}
|
2021-07-30 05:26:06 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
match mode {
|
|
|
|
InterpolationMode::String => {
|
|
|
|
if token_start < end {
|
2022-12-03 10:44:12 +01:00
|
|
|
let span = Span::new(token_start, end);
|
2021-09-02 10:25:22 +02:00
|
|
|
let str_contents = working_set.get_span_contents(span);
|
2022-03-15 17:09:30 +01:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
let (str_contents, err) = if double_quote {
|
|
|
|
unescape_string(str_contents, span)
|
2022-03-15 17:09:30 +01:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
(str_contents.to_vec(), None)
|
2022-03-15 17:09:30 +01:00
|
|
|
};
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2022-03-15 17:09:30 +01:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
output.push(Expression {
|
2022-03-15 17:09:30 +01:00
|
|
|
expr: Expr::String(String::from_utf8_lossy(&str_contents).to_string()),
|
2021-07-30 05:26:06 +02:00
|
|
|
span,
|
|
|
|
ty: Type::String,
|
2021-09-14 06:59:46 +02:00
|
|
|
custom_completion: None,
|
2021-09-02 10:25:22 +02:00
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
InterpolationMode::Expression => {
|
|
|
|
if token_start < end {
|
2022-12-03 10:44:12 +01:00
|
|
|
let span = Span::new(token_start, end);
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let expr = parse_full_cell_path(working_set, None, span);
|
2021-09-02 10:25:22 +02:00
|
|
|
output.push(expr);
|
|
|
|
}
|
2021-07-30 05:26:06 +02:00
|
|
|
}
|
2021-07-24 07:57:17 +02:00
|
|
|
}
|
2021-07-02 09:15:30 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::StringInterpolation(output),
|
|
|
|
span,
|
|
|
|
ty: Type::String,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-24 07:57:17 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_variable_expr(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2021-09-02 10:25:22 +02:00
|
|
|
let contents = working_set.get_span_contents(span);
|
|
|
|
|
2023-10-19 18:41:38 +02:00
|
|
|
if contents == b"$nu" {
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression {
|
|
|
|
expr: Expr::Var(nu_protocol::NU_VARIABLE_ID),
|
|
|
|
span,
|
|
|
|
ty: Type::Any,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2021-11-08 07:21:24 +01:00
|
|
|
} else if contents == b"$in" {
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression {
|
|
|
|
expr: Expr::Var(nu_protocol::IN_VARIABLE_ID),
|
|
|
|
span,
|
|
|
|
ty: Type::Any,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2022-01-04 22:34:42 +01:00
|
|
|
} else if contents == b"$env" {
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression {
|
|
|
|
expr: Expr::Var(nu_protocol::ENV_VARIABLE_ID),
|
|
|
|
span,
|
|
|
|
ty: Type::Any,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
|
2023-06-25 19:59:56 +02:00
|
|
|
let name = if contents.starts_with(b"$") {
|
|
|
|
String::from_utf8_lossy(&contents[1..]).to_string()
|
|
|
|
} else {
|
|
|
|
String::from_utf8_lossy(contents).to_string()
|
|
|
|
};
|
|
|
|
|
2023-05-02 17:17:14 +02:00
|
|
|
if let Some(id) = parse_variable(working_set, span) {
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Var(id),
|
|
|
|
span,
|
|
|
|
ty: working_set.get_variable(id).ty.clone(),
|
|
|
|
custom_completion: None,
|
2021-07-02 09:15:30 +02:00
|
|
|
}
|
2023-06-25 19:59:56 +02:00
|
|
|
} else if working_set.get_env_var(&name).is_some() {
|
|
|
|
working_set.error(ParseError::EnvVarNotVar(name, span));
|
|
|
|
garbage(span)
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2023-05-02 17:17:14 +02:00
|
|
|
let ws = &*working_set;
|
|
|
|
let suggestion = DidYouMean::new(&ws.list_variables(), ws.get_span_contents(span));
|
|
|
|
working_set.error(ParseError::VariableNotFound(suggestion, span));
|
2023-04-07 02:35:45 +02:00
|
|
|
garbage(span)
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
}
|
2021-07-02 09:15:30 +02:00
|
|
|
|
2021-10-02 04:59:11 +02:00
|
|
|
pub fn parse_cell_path(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
tokens: impl Iterator<Item = Token>,
|
2023-03-16 04:50:58 +01:00
|
|
|
expect_dot: bool,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Vec<PathMember> {
|
2023-03-16 04:50:58 +01:00
|
|
|
enum TokenType {
|
|
|
|
Dot, // .
|
|
|
|
QuestionOrDot, // ? or .
|
|
|
|
PathMember, // an int or string, like `1` or `foo`
|
|
|
|
}
|
|
|
|
|
|
|
|
// Parsing a cell path is essentially a state machine, and this is the state
|
|
|
|
let mut expected_token = if expect_dot {
|
|
|
|
TokenType::Dot
|
|
|
|
} else {
|
|
|
|
TokenType::PathMember
|
|
|
|
};
|
|
|
|
|
2021-10-02 04:59:11 +02:00
|
|
|
let mut tail = vec![];
|
|
|
|
|
|
|
|
for path_element in tokens {
|
|
|
|
let bytes = working_set.get_span_contents(path_element.span);
|
|
|
|
|
2023-03-16 04:50:58 +01:00
|
|
|
match expected_token {
|
|
|
|
TokenType::Dot => {
|
|
|
|
if bytes.len() != 1 || bytes[0] != b'.' {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected(".", path_element.span));
|
2023-04-07 02:35:45 +02:00
|
|
|
return tail;
|
2023-03-16 04:50:58 +01:00
|
|
|
}
|
|
|
|
expected_token = TokenType::PathMember;
|
2021-10-02 04:59:11 +02:00
|
|
|
}
|
2023-03-16 04:50:58 +01:00
|
|
|
TokenType::QuestionOrDot => {
|
|
|
|
if bytes.len() == 1 && bytes[0] == b'.' {
|
|
|
|
expected_token = TokenType::PathMember;
|
|
|
|
} else if bytes.len() == 1 && bytes[0] == b'?' {
|
|
|
|
if let Some(last) = tail.last_mut() {
|
|
|
|
match last {
|
|
|
|
PathMember::String {
|
|
|
|
ref mut optional, ..
|
|
|
|
} => *optional = true,
|
|
|
|
PathMember::Int {
|
|
|
|
ref mut optional, ..
|
|
|
|
} => *optional = true,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
expected_token = TokenType::Dot;
|
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected(". or ?", path_element.span));
|
2023-04-07 02:35:45 +02:00
|
|
|
return tail;
|
2023-03-16 04:50:58 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
TokenType::PathMember => {
|
2023-04-07 02:35:45 +02:00
|
|
|
let starting_error_count = working_set.parse_errors.len();
|
|
|
|
|
|
|
|
let expr = parse_int(working_set, path_element.span);
|
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
|
|
|
|
|
|
|
match expr {
|
|
|
|
Expression {
|
|
|
|
expr: Expr::Int(val),
|
|
|
|
span,
|
|
|
|
..
|
|
|
|
} => tail.push(PathMember::Int {
|
2023-03-16 04:50:58 +01:00
|
|
|
val: val as usize,
|
|
|
|
span,
|
|
|
|
optional: false,
|
|
|
|
}),
|
|
|
|
_ => {
|
2023-04-07 20:09:38 +02:00
|
|
|
let result = parse_string(working_set, path_element.span);
|
2023-03-16 04:50:58 +01:00
|
|
|
match result {
|
|
|
|
Expression {
|
|
|
|
expr: Expr::String(string),
|
|
|
|
span,
|
|
|
|
..
|
|
|
|
} => {
|
|
|
|
tail.push(PathMember::String {
|
|
|
|
val: string,
|
|
|
|
span,
|
|
|
|
optional: false,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
_ => {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set
|
|
|
|
.error(ParseError::Expected("string", path_element.span));
|
2023-04-07 02:35:45 +02:00
|
|
|
return tail;
|
2023-03-16 04:50:58 +01:00
|
|
|
}
|
2021-10-02 04:59:11 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-03-16 04:50:58 +01:00
|
|
|
expected_token = TokenType::QuestionOrDot;
|
2021-10-02 04:59:11 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
tail
|
2021-10-02 04:59:11 +02:00
|
|
|
}
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn parse_simple_cell_path(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2023-03-17 03:19:41 +01:00
|
|
|
let source = working_set.get_span_contents(span);
|
|
|
|
|
|
|
|
let (tokens, err) = lex(source, span.start, &[b'\n', b'\r'], &[b'.', b'?'], true);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err)
|
|
|
|
}
|
2023-03-17 03:19:41 +01:00
|
|
|
|
|
|
|
let tokens = tokens.into_iter().peekable();
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let cell_path = parse_cell_path(working_set, tokens, false);
|
2023-03-17 03:19:41 +01:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::CellPath(CellPath { members: cell_path }),
|
|
|
|
span,
|
|
|
|
ty: Type::CellPath,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2023-03-17 03:19:41 +01:00
|
|
|
}
|
|
|
|
|
2021-09-26 20:39:19 +02:00
|
|
|
pub fn parse_full_cell_path(
|
2021-09-02 10:25:22 +02:00
|
|
|
working_set: &mut StateWorkingSet,
|
2021-09-09 23:47:20 +02:00
|
|
|
implicit_head: Option<VarId>,
|
2021-09-02 10:25:22 +02:00
|
|
|
span: Span,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2022-12-10 18:23:24 +01:00
|
|
|
trace!("parsing: full cell path");
|
2021-09-26 20:39:19 +02:00
|
|
|
let full_cell_span = span;
|
2021-09-07 00:02:24 +02:00
|
|
|
let source = working_set.get_span_contents(span);
|
2021-07-08 08:19:38 +02:00
|
|
|
|
2023-03-16 04:50:58 +01:00
|
|
|
let (tokens, err) = lex(source, span.start, &[b'\n', b'\r'], &[b'.', b'?'], true);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err)
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2021-09-09 23:47:20 +02:00
|
|
|
let mut tokens = tokens.into_iter().peekable();
|
|
|
|
if let Some(head) = tokens.peek() {
|
2021-09-07 00:02:24 +02:00
|
|
|
let bytes = working_set.get_span_contents(head.span);
|
2021-10-02 04:59:11 +02:00
|
|
|
let (head, expect_dot) = if bytes.starts_with(b"(") {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: paren-head of full cell path");
|
|
|
|
|
2021-11-06 06:50:33 +01:00
|
|
|
let head_span = head.span;
|
2021-09-07 00:02:24 +02:00
|
|
|
let mut start = head.span.start;
|
|
|
|
let mut end = head.span.end;
|
2021-07-08 08:19:38 +02:00
|
|
|
|
2021-09-07 00:02:24 +02:00
|
|
|
if bytes.starts_with(b"(") {
|
|
|
|
start += 1;
|
|
|
|
}
|
|
|
|
if bytes.ends_with(b")") {
|
|
|
|
end -= 1;
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Unclosed(")".into(), Span::new(end, end)));
|
2021-09-07 00:02:24 +02:00
|
|
|
}
|
2021-07-02 09:15:30 +02:00
|
|
|
|
2022-12-03 10:44:12 +01:00
|
|
|
let span = Span::new(start, end);
|
2021-07-02 09:15:30 +02:00
|
|
|
|
2021-09-07 00:02:24 +02:00
|
|
|
let source = working_set.get_span_contents(span);
|
2021-07-02 09:15:30 +02:00
|
|
|
|
2021-11-21 19:13:09 +01:00
|
|
|
let (output, err) = lex(source, span.start, &[b'\n', b'\r'], &[], true);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err)
|
|
|
|
}
|
2021-07-02 09:15:30 +02:00
|
|
|
|
2022-06-12 21:18:00 +02:00
|
|
|
// Creating a Type scope to parse the new block. This will keep track of
|
|
|
|
// the previous input type found in that block
|
2023-04-21 21:00:33 +02:00
|
|
|
let output = parse_block(working_set, &output, span, true, true);
|
2022-06-12 21:18:00 +02:00
|
|
|
|
Improve type hovers (#9515)
# Description
This PR does a few things to help improve type hovers and, in the
process, fixes a few outstanding issues in the type system. Here's a
list of the changes:
* `for` now will try to infer the type of the iteration variable based
on the expression it's given. This fixes things like `for x in [1, 2, 3]
{ }` where `x` now properly gets the int type.
* Removed old input/output type fields from the signature, focuses on
the vec of signatures. Updated a bunch of dataframe commands that hadn't
moved over. This helps tie things together a bit better
* Fixed inference of types from subexpressions to use the last
expression in the block
* Fixed handling of explicit types in `let` and `mut` calls, so we now
respect that as the authoritative type
I also tried to add `def` input/output type inference, but unfortunately
we only know the predecl types universally, which means we won't have
enough information to properly know what the types of the custom
commands are.
# User-Facing Changes
Script typechecking will get tighter in some cases
Hovers should be more accurate in some cases that previously resorted to
any.
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect -A clippy::result_large_err` to check that
you're using the standard code style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-std/tests/run.nu` to run the tests for the
standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
---------
Co-authored-by: Darren Schroeder <343840+fdncred@users.noreply.github.com>
2023-06-28 19:19:48 +02:00
|
|
|
let ty = output.output_type();
|
2022-06-15 03:31:14 +02:00
|
|
|
|
2021-09-07 00:02:24 +02:00
|
|
|
let block_id = working_set.add_block(output);
|
2021-09-09 23:47:20 +02:00
|
|
|
tokens.next();
|
2021-09-07 00:02:24 +02:00
|
|
|
|
2021-09-09 23:47:20 +02:00
|
|
|
(
|
|
|
|
Expression {
|
|
|
|
expr: Expr::Subexpression(block_id),
|
2021-11-06 06:50:33 +01:00
|
|
|
span: head_span,
|
2022-06-15 03:31:14 +02:00
|
|
|
ty,
|
2021-09-14 06:59:46 +02:00
|
|
|
custom_completion: None,
|
2021-09-09 23:47:20 +02:00
|
|
|
},
|
|
|
|
true,
|
|
|
|
)
|
2021-11-08 00:18:00 +01:00
|
|
|
} else if bytes.starts_with(b"[") {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: table head of full cell path");
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let output = parse_table_expression(working_set, head.span);
|
2021-11-08 00:18:00 +01:00
|
|
|
|
|
|
|
tokens.next();
|
|
|
|
|
2021-11-11 00:14:00 +01:00
|
|
|
(output, true)
|
|
|
|
} else if bytes.starts_with(b"{") {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: record head of full cell path");
|
2023-04-07 20:09:38 +02:00
|
|
|
let output = parse_record(working_set, head.span);
|
2021-11-11 00:14:00 +01:00
|
|
|
|
|
|
|
tokens.next();
|
|
|
|
|
2021-11-08 00:18:00 +01:00
|
|
|
(output, true)
|
2021-09-07 00:02:24 +02:00
|
|
|
} else if bytes.starts_with(b"$") {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: $variable head of full cell path");
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
let out = parse_variable_expr(working_set, head.span);
|
2021-09-07 00:02:24 +02:00
|
|
|
|
2021-09-09 23:47:20 +02:00
|
|
|
tokens.next();
|
|
|
|
|
|
|
|
(out, true)
|
|
|
|
} else if let Some(var_id) = implicit_head {
|
2022-12-10 18:23:24 +01:00
|
|
|
trace!("parsing: implicit head of full cell path");
|
2021-09-09 23:47:20 +02:00
|
|
|
(
|
|
|
|
Expression {
|
|
|
|
expr: Expr::Var(var_id),
|
2022-12-10 18:23:24 +01:00
|
|
|
span: head.span,
|
2022-04-07 06:34:09 +02:00
|
|
|
ty: Type::Any,
|
2021-09-14 06:59:46 +02:00
|
|
|
custom_completion: None,
|
2021-09-09 23:47:20 +02:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
)
|
2021-09-07 00:02:24 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Mismatch(
|
|
|
|
"variable or subexpression".into(),
|
|
|
|
String::from_utf8_lossy(bytes).to_string(),
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2021-09-07 00:02:24 +02:00
|
|
|
};
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let tail = parse_cell_path(working_set, tokens, expect_dot);
|
2021-09-07 00:02:24 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
// FIXME: Get the type of the data at the tail using follow_cell_path() (or something)
|
|
|
|
ty: if !tail.is_empty() {
|
|
|
|
// Until the aforementioned fix is implemented, this is necessary to allow mutable list upserts
|
|
|
|
// such as $a.1 = 2 to work correctly.
|
|
|
|
Type::Any
|
|
|
|
} else {
|
|
|
|
head.ty.clone()
|
2022-12-06 18:51:55 +01:00
|
|
|
},
|
2023-04-07 02:35:45 +02:00
|
|
|
expr: Expr::FullCellPath(Box::new(FullCellPath { head, tail })),
|
|
|
|
span: full_cell_span,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-07 00:02:24 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
garbage(span)
|
2021-09-07 00:02:24 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-02 09:15:30 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_directory(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2022-04-22 22:18:51 +02:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
do not attempt to glob expand if the file path is wrapped in quotes (#11569)
# Description
Fixes: #11455
### For arguments which is annotated with `:path/:directory/:glob`
To fix the issue, we need to have a way to know if a path is originally
quoted during runtime. So the information needed to be added at several
levels:
* parse time (from user input to expression)
We need to add quoted information into `Expr::Filepath`,
`Expr::Directory`, `Expr::GlobPattern`
* eval time
When convert from `Expr::Filepath`, `Expr::Directory`,
`Expr::GlobPattern` to `Value::String` during runtime, we won't auto
expanded the path if it's quoted
### For `ls`
It's really special, because it accepts a `String` as a pattern, and it
generates `glob` expression inside the command itself.
So the idea behind the change is introducing a special SyntaxShape to
ls: `SyntaxShape::LsGlobPattern`. So we can track if the pattern is
originally quoted easier, and we don't auto expand the path either.
Then when constructing a glob pattern inside ls, we check if input
pattern is quoted, if so: we escape the input pattern, so we can run `ls
a[123]b`, because it's already escaped.
Finally, to accomplish the checking process, we also need to introduce a
new value type called `Value::QuotedString` to differ from
`Value::String`, it's used to generate an enum called `NuPath`, which is
finally used in `ls` function. `ls` learned from `NuPath` to know if
user input is quoted.
# User-Facing Changes
Actually it contains several changes
### For arguments which is annotated with `:path/:directory/:glob`
#### Before
```nushell
> def foo [p: path] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
> def foo [p: directory] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
> def foo [p: glob] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
```
#### After
```nushell
> def foo [p: path] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
> def foo [p: directory] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
> def foo [p: glob] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
```
### For ls command
`touch '[uwu]'`
#### Before
```
❯ ls -D "[uwu]"
Error: × No matches found for [uwu]
╭─[entry #6:1:1]
1 │ ls -D "[uwu]"
· ───┬───
· ╰── Pattern, file or folder not found
╰────
help: no matches found
```
#### After
```
❯ ls -D "[uwu]"
╭───┬───────┬──────┬──────┬──────────╮
│ # │ name │ type │ size │ modified │
├───┼───────┼──────┼──────┼──────────┤
│ 0 │ [uwu] │ file │ 0 B │ now │
╰───┴───────┴──────┴──────┴──────────╯
```
# Tests + Formatting
Done
# After Submitting
NaN
2024-01-21 16:22:25 +01:00
|
|
|
let quoted = is_quoted(bytes);
|
2022-05-01 20:37:20 +02:00
|
|
|
let (token, err) = unescape_unquote_string(bytes, span);
|
2022-04-22 22:18:51 +02:00
|
|
|
trace!("parsing: directory");
|
|
|
|
|
2022-05-01 20:37:20 +02:00
|
|
|
if err.is_none() {
|
2022-04-22 22:18:51 +02:00
|
|
|
trace!("-- found {}", token);
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
Expression {
|
do not attempt to glob expand if the file path is wrapped in quotes (#11569)
# Description
Fixes: #11455
### For arguments which is annotated with `:path/:directory/:glob`
To fix the issue, we need to have a way to know if a path is originally
quoted during runtime. So the information needed to be added at several
levels:
* parse time (from user input to expression)
We need to add quoted information into `Expr::Filepath`,
`Expr::Directory`, `Expr::GlobPattern`
* eval time
When convert from `Expr::Filepath`, `Expr::Directory`,
`Expr::GlobPattern` to `Value::String` during runtime, we won't auto
expanded the path if it's quoted
### For `ls`
It's really special, because it accepts a `String` as a pattern, and it
generates `glob` expression inside the command itself.
So the idea behind the change is introducing a special SyntaxShape to
ls: `SyntaxShape::LsGlobPattern`. So we can track if the pattern is
originally quoted easier, and we don't auto expand the path either.
Then when constructing a glob pattern inside ls, we check if input
pattern is quoted, if so: we escape the input pattern, so we can run `ls
a[123]b`, because it's already escaped.
Finally, to accomplish the checking process, we also need to introduce a
new value type called `Value::QuotedString` to differ from
`Value::String`, it's used to generate an enum called `NuPath`, which is
finally used in `ls` function. `ls` learned from `NuPath` to know if
user input is quoted.
# User-Facing Changes
Actually it contains several changes
### For arguments which is annotated with `:path/:directory/:glob`
#### Before
```nushell
> def foo [p: path] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
> def foo [p: directory] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
> def foo [p: glob] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
```
#### After
```nushell
> def foo [p: path] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
> def foo [p: directory] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
> def foo [p: glob] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
```
### For ls command
`touch '[uwu]'`
#### Before
```
❯ ls -D "[uwu]"
Error: × No matches found for [uwu]
╭─[entry #6:1:1]
1 │ ls -D "[uwu]"
· ───┬───
· ╰── Pattern, file or folder not found
╰────
help: no matches found
```
#### After
```
❯ ls -D "[uwu]"
╭───┬───────┬──────┬──────┬──────────╮
│ # │ name │ type │ size │ modified │
├───┼───────┼──────┼──────┼──────────┤
│ 0 │ [uwu] │ file │ 0 B │ now │
╰───┴───────┴──────┴──────┴──────────╯
```
# Tests + Formatting
Done
# After Submitting
NaN
2024-01-21 16:22:25 +01:00
|
|
|
expr: Expr::Directory(token, quoted),
|
2023-04-07 02:35:45 +02:00
|
|
|
span,
|
|
|
|
ty: Type::String,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2022-04-22 22:18:51 +02:00
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("directory", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
garbage(span)
|
2022-04-22 22:18:51 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_filepath(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2021-10-04 21:21:31 +02:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
do not attempt to glob expand if the file path is wrapped in quotes (#11569)
# Description
Fixes: #11455
### For arguments which is annotated with `:path/:directory/:glob`
To fix the issue, we need to have a way to know if a path is originally
quoted during runtime. So the information needed to be added at several
levels:
* parse time (from user input to expression)
We need to add quoted information into `Expr::Filepath`,
`Expr::Directory`, `Expr::GlobPattern`
* eval time
When convert from `Expr::Filepath`, `Expr::Directory`,
`Expr::GlobPattern` to `Value::String` during runtime, we won't auto
expanded the path if it's quoted
### For `ls`
It's really special, because it accepts a `String` as a pattern, and it
generates `glob` expression inside the command itself.
So the idea behind the change is introducing a special SyntaxShape to
ls: `SyntaxShape::LsGlobPattern`. So we can track if the pattern is
originally quoted easier, and we don't auto expand the path either.
Then when constructing a glob pattern inside ls, we check if input
pattern is quoted, if so: we escape the input pattern, so we can run `ls
a[123]b`, because it's already escaped.
Finally, to accomplish the checking process, we also need to introduce a
new value type called `Value::QuotedString` to differ from
`Value::String`, it's used to generate an enum called `NuPath`, which is
finally used in `ls` function. `ls` learned from `NuPath` to know if
user input is quoted.
# User-Facing Changes
Actually it contains several changes
### For arguments which is annotated with `:path/:directory/:glob`
#### Before
```nushell
> def foo [p: path] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
> def foo [p: directory] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
> def foo [p: glob] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
```
#### After
```nushell
> def foo [p: path] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
> def foo [p: directory] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
> def foo [p: glob] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
```
### For ls command
`touch '[uwu]'`
#### Before
```
❯ ls -D "[uwu]"
Error: × No matches found for [uwu]
╭─[entry #6:1:1]
1 │ ls -D "[uwu]"
· ───┬───
· ╰── Pattern, file or folder not found
╰────
help: no matches found
```
#### After
```
❯ ls -D "[uwu]"
╭───┬───────┬──────┬──────┬──────────╮
│ # │ name │ type │ size │ modified │
├───┼───────┼──────┼──────┼──────────┤
│ 0 │ [uwu] │ file │ 0 B │ now │
╰───┴───────┴──────┴──────┴──────────╯
```
# Tests + Formatting
Done
# After Submitting
NaN
2024-01-21 16:22:25 +01:00
|
|
|
let quoted = is_quoted(bytes);
|
2022-05-01 20:37:20 +02:00
|
|
|
let (token, err) = unescape_unquote_string(bytes, span);
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: filepath");
|
2021-10-04 21:21:31 +02:00
|
|
|
|
2022-05-01 20:37:20 +02:00
|
|
|
if err.is_none() {
|
2022-01-16 14:55:56 +01:00
|
|
|
trace!("-- found {}", token);
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
Expression {
|
do not attempt to glob expand if the file path is wrapped in quotes (#11569)
# Description
Fixes: #11455
### For arguments which is annotated with `:path/:directory/:glob`
To fix the issue, we need to have a way to know if a path is originally
quoted during runtime. So the information needed to be added at several
levels:
* parse time (from user input to expression)
We need to add quoted information into `Expr::Filepath`,
`Expr::Directory`, `Expr::GlobPattern`
* eval time
When convert from `Expr::Filepath`, `Expr::Directory`,
`Expr::GlobPattern` to `Value::String` during runtime, we won't auto
expanded the path if it's quoted
### For `ls`
It's really special, because it accepts a `String` as a pattern, and it
generates `glob` expression inside the command itself.
So the idea behind the change is introducing a special SyntaxShape to
ls: `SyntaxShape::LsGlobPattern`. So we can track if the pattern is
originally quoted easier, and we don't auto expand the path either.
Then when constructing a glob pattern inside ls, we check if input
pattern is quoted, if so: we escape the input pattern, so we can run `ls
a[123]b`, because it's already escaped.
Finally, to accomplish the checking process, we also need to introduce a
new value type called `Value::QuotedString` to differ from
`Value::String`, it's used to generate an enum called `NuPath`, which is
finally used in `ls` function. `ls` learned from `NuPath` to know if
user input is quoted.
# User-Facing Changes
Actually it contains several changes
### For arguments which is annotated with `:path/:directory/:glob`
#### Before
```nushell
> def foo [p: path] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
> def foo [p: directory] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
> def foo [p: glob] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
```
#### After
```nushell
> def foo [p: path] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
> def foo [p: directory] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
> def foo [p: glob] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
```
### For ls command
`touch '[uwu]'`
#### Before
```
❯ ls -D "[uwu]"
Error: × No matches found for [uwu]
╭─[entry #6:1:1]
1 │ ls -D "[uwu]"
· ───┬───
· ╰── Pattern, file or folder not found
╰────
help: no matches found
```
#### After
```
❯ ls -D "[uwu]"
╭───┬───────┬──────┬──────┬──────────╮
│ # │ name │ type │ size │ modified │
├───┼───────┼──────┼──────┼──────────┤
│ 0 │ [uwu] │ file │ 0 B │ now │
╰───┴───────┴──────┴──────┴──────────╯
```
# Tests + Formatting
Done
# After Submitting
NaN
2024-01-21 16:22:25 +01:00
|
|
|
expr: Expr::Filepath(token, quoted),
|
2023-04-07 02:35:45 +02:00
|
|
|
span,
|
|
|
|
ty: Type::String,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-10-04 21:21:31 +02:00
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("filepath", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
garbage(span)
|
2021-10-04 21:21:31 +02:00
|
|
|
}
|
|
|
|
}
|
2022-02-24 03:02:48 +01:00
|
|
|
/// Parse a datetime type, eg '2022-02-02'
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_datetime(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2022-02-24 03:02:48 +01:00
|
|
|
trace!("parsing: datetime");
|
|
|
|
|
|
|
|
let bytes = working_set.get_span_contents(span);
|
2022-02-24 13:58:53 +01:00
|
|
|
|
2023-05-24 21:53:57 +02:00
|
|
|
if bytes.len() < 6
|
2023-04-18 10:19:08 +02:00
|
|
|
|| !bytes[0].is_ascii_digit()
|
|
|
|
|| !bytes[1].is_ascii_digit()
|
|
|
|
|| !bytes[2].is_ascii_digit()
|
|
|
|
|| !bytes[3].is_ascii_digit()
|
2023-05-24 21:53:57 +02:00
|
|
|
|| bytes[4] != b'-'
|
2023-04-18 10:19:08 +02:00
|
|
|
{
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("datetime", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
return garbage(span);
|
2022-02-24 13:58:53 +01:00
|
|
|
}
|
|
|
|
|
2022-02-24 03:02:48 +01:00
|
|
|
let token = String::from_utf8_lossy(bytes).to_string();
|
|
|
|
|
|
|
|
if let Ok(datetime) = chrono::DateTime::parse_from_rfc3339(&token) {
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression {
|
|
|
|
expr: Expr::DateTime(datetime),
|
|
|
|
span,
|
|
|
|
ty: Type::Date,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2022-02-24 03:02:48 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Just the date
|
2022-02-24 13:58:53 +01:00
|
|
|
let just_date = token.clone() + "T00:00:00+00:00";
|
|
|
|
if let Ok(datetime) = chrono::DateTime::parse_from_rfc3339(&just_date) {
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression {
|
|
|
|
expr: Expr::DateTime(datetime),
|
|
|
|
span,
|
|
|
|
ty: Type::Date,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2022-02-24 03:02:48 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Date and time, assume UTC
|
2022-02-24 13:58:53 +01:00
|
|
|
let datetime = token + "+00:00";
|
|
|
|
if let Ok(datetime) = chrono::DateTime::parse_from_rfc3339(&datetime) {
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression {
|
|
|
|
expr: Expr::DateTime(datetime),
|
|
|
|
span,
|
|
|
|
ty: Type::Date,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2022-02-24 03:02:48 +01:00
|
|
|
}
|
|
|
|
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("datetime", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
garbage(span)
|
2022-02-24 03:02:48 +01:00
|
|
|
}
|
|
|
|
|
2021-10-05 04:27:39 +02:00
|
|
|
/// Parse a duration type, eg '10day'
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_duration(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: duration");
|
|
|
|
|
2022-03-03 14:16:04 +01:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
|
|
|
|
2023-05-18 01:54:35 +02:00
|
|
|
match parse_unit_value(bytes, span, DURATION_UNIT_GROUPS, Type::Duration, |x| x) {
|
|
|
|
Some(Ok(expr)) => expr,
|
|
|
|
Some(Err(mk_err_for)) => {
|
|
|
|
working_set.error(mk_err_for("duration"));
|
|
|
|
garbage(span)
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
None => {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("duration with valid units", span));
|
2023-05-18 01:54:35 +02:00
|
|
|
garbage(span)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
|
2023-05-18 01:54:35 +02:00
|
|
|
/// Parse a unit type, eg '10kb'
|
|
|
|
pub fn parse_filesize(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
|
|
|
trace!("parsing: filesize");
|
|
|
|
|
|
|
|
let bytes = working_set.get_span_contents(span);
|
|
|
|
|
2023-05-28 12:56:58 +02:00
|
|
|
// the hex digit `b` might be mistaken for the unit `b`, so check that first
|
|
|
|
if bytes.starts_with(b"0x") {
|
|
|
|
working_set.error(ParseError::Expected("filesize with valid units", span));
|
|
|
|
return garbage(span);
|
|
|
|
}
|
|
|
|
|
2023-05-18 01:54:35 +02:00
|
|
|
match parse_unit_value(bytes, span, FILESIZE_UNIT_GROUPS, Type::Filesize, |x| {
|
2023-11-08 23:58:54 +01:00
|
|
|
x.to_ascii_uppercase()
|
2023-05-18 01:54:35 +02:00
|
|
|
}) {
|
|
|
|
Some(Ok(expr)) => expr,
|
|
|
|
Some(Err(mk_err_for)) => {
|
|
|
|
working_set.error(mk_err_for("filesize"));
|
2023-04-07 02:35:45 +02:00
|
|
|
garbage(span)
|
|
|
|
}
|
2023-05-18 01:54:35 +02:00
|
|
|
None => {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("filesize with valid units", span));
|
2023-05-18 01:54:35 +02:00
|
|
|
garbage(span)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
type ParseUnitResult<'res> = Result<Expression, Box<dyn Fn(&'res str) -> ParseError>>;
|
|
|
|
type UnitGroup<'unit> = (Unit, &'unit str, Option<(Unit, i64)>);
|
|
|
|
|
|
|
|
pub fn parse_unit_value<'res>(
|
|
|
|
bytes: &[u8],
|
|
|
|
span: Span,
|
|
|
|
unit_groups: &[UnitGroup],
|
|
|
|
ty: Type,
|
|
|
|
transform: fn(String) -> String,
|
|
|
|
) -> Option<ParseUnitResult<'res>> {
|
|
|
|
if bytes.len() < 2
|
|
|
|
|| !(bytes[0].is_ascii_digit() || (bytes[0] == b'-' && bytes[1].is_ascii_digit()))
|
|
|
|
{
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
let value = transform(String::from_utf8_lossy(bytes).into());
|
|
|
|
|
|
|
|
if let Some((unit, name, convert)) = unit_groups.iter().find(|x| value.ends_with(x.1)) {
|
|
|
|
let lhs_len = value.len() - name.len();
|
|
|
|
let lhs = strip_underscores(value[..lhs_len].as_bytes());
|
|
|
|
let lhs_span = Span::new(span.start, span.start + lhs_len);
|
|
|
|
let unit_span = Span::new(span.start + lhs_len, span.end);
|
|
|
|
|
|
|
|
let (decimal_part, number_part) = modf(match lhs.parse::<f64>() {
|
|
|
|
Ok(it) => it,
|
|
|
|
Err(_) => {
|
|
|
|
let mk_err = move |name| {
|
|
|
|
ParseError::LabeledError(
|
|
|
|
format!("{name} value must be a number"),
|
|
|
|
"not a number".into(),
|
|
|
|
lhs_span,
|
|
|
|
)
|
|
|
|
};
|
|
|
|
return Some(Err(Box::new(mk_err)));
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
let (num, unit) = match convert {
|
|
|
|
Some(convert_to) => (
|
|
|
|
((number_part * convert_to.1 as f64) + (decimal_part * convert_to.1 as f64)) as i64,
|
|
|
|
convert_to.0,
|
|
|
|
),
|
|
|
|
None => (number_part as i64, *unit),
|
|
|
|
};
|
|
|
|
|
|
|
|
trace!("-- found {} {:?}", num, unit);
|
|
|
|
let expr = Expression {
|
|
|
|
expr: Expr::ValueWithUnit(
|
|
|
|
Box::new(Expression {
|
|
|
|
expr: Expr::Int(num),
|
|
|
|
span: lhs_span,
|
|
|
|
ty: Type::Number,
|
|
|
|
custom_completion: None,
|
|
|
|
}),
|
|
|
|
Spanned {
|
|
|
|
item: unit,
|
|
|
|
span: unit_span,
|
|
|
|
},
|
|
|
|
),
|
|
|
|
span,
|
|
|
|
ty,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
|
|
|
|
|
|
|
Some(Ok(expr))
|
|
|
|
} else {
|
|
|
|
None
|
2022-03-03 14:16:04 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-18 01:54:35 +02:00
|
|
|
pub const FILESIZE_UNIT_GROUPS: &[UnitGroup] = &[
|
|
|
|
(Unit::Kilobyte, "KB", Some((Unit::Byte, 1000))),
|
|
|
|
(Unit::Megabyte, "MB", Some((Unit::Kilobyte, 1000))),
|
|
|
|
(Unit::Gigabyte, "GB", Some((Unit::Megabyte, 1000))),
|
|
|
|
(Unit::Terabyte, "TB", Some((Unit::Gigabyte, 1000))),
|
|
|
|
(Unit::Petabyte, "PB", Some((Unit::Terabyte, 1000))),
|
|
|
|
(Unit::Exabyte, "EB", Some((Unit::Petabyte, 1000))),
|
|
|
|
(Unit::Kibibyte, "KIB", Some((Unit::Byte, 1024))),
|
|
|
|
(Unit::Mebibyte, "MIB", Some((Unit::Kibibyte, 1024))),
|
|
|
|
(Unit::Gibibyte, "GIB", Some((Unit::Mebibyte, 1024))),
|
|
|
|
(Unit::Tebibyte, "TIB", Some((Unit::Gibibyte, 1024))),
|
|
|
|
(Unit::Pebibyte, "PIB", Some((Unit::Tebibyte, 1024))),
|
|
|
|
(Unit::Exbibyte, "EIB", Some((Unit::Pebibyte, 1024))),
|
|
|
|
(Unit::Byte, "B", None),
|
|
|
|
];
|
|
|
|
|
|
|
|
pub const DURATION_UNIT_GROUPS: &[UnitGroup] = &[
|
|
|
|
(Unit::Nanosecond, "ns", None),
|
Fix duration type to not report months or years (#9632)
<!--
if this PR closes one or more issues, you can automatically link the PR
with
them by using one of the [*linking
keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword),
e.g.
- this PR should close #xxxx
- fixes #xxxx
you can also mention related issues, PRs or discussions!
-->
This PR should close #8036, #9028 (in the negative) and #9118.
Fix for #9118 is a bit pedantic. As reported, the issue is:
```
> 2023-05-07T04:08:45+12:00 - 2019-05-10T09:59:12+12:00
3yr 12month 2day 18hr 9min 33sec
```
with this PR, you now get:
```
> 2023-05-07T04:08:45+12:00 - 2019-05-10T09:59:12+12:00
208wk 1day 18hr 9min 33sec
```
Which is strictly correct, but could still fairly be called "weird date
arithmetic".
# Description
* [x] Abide by constraint that Value::Duration remains a number of
nanoseconds with no additional fields.
* [x] `to_string()` only displays weeks .. nanoseconds. Duration doesn't
have base date to compute months or years from.
* [x] `duration | into record` likewise only has fields for weeks ..
nanoseconds.
* [x] `string | into duration` now accepts compound form of duration
to_string() (e.g '2day 3hr`, not just '2day')
* [x] `duration | into string` now works (and produces the same
representation as to_string(), which may be compound).
# User-Facing Changes
## duration -> string -> duration
Now you can "round trip" an arbitrary duration value: convert it to a
string that may include multiple time units (a "compound" value), then
convert that string back into a duration. This required changes to
`string | into duration` and the addition of `duration | into string'.
```
> 2day + 3hr
2day 3hr # the "to_string()" representation (in this case, a compound value)
> 2day + 3hr | into string
2day 3hr # string value
> 2day + 3hr | into string | into duration
2day 3hr # round-trip duration -> string -> duration
```
Note that `to nuon` and `from nuon` already round-tripped durations, but
use a different string representation.
## potentially breaking changes
* string rendering of a duration no longer has 'yr' or 'month' phrases.
* record from `duration | into record` no longer has 'year' or 'month'
fields.
The excess duration is all lumped into the `week` field, which is the
largest time unit you can
convert to without knowing the datetime from which the duration was
calculated.
Scripts that depended on month or year time units on output will need to
be changed.
### Examples
```
> 365day
52wk 1day
## Used to be:
## 1yr
> 365day | into record
╭──────┬────╮
│ week │ 52 │
│ day │ 1 │
│ sign │ + │
╰──────┴────╯
## used to be:
##╭──────┬───╮
##│ year │ 1 │
##│ sign │ + │
##╰──────┴───╯
> (365day + 4wk + 5day + 6hr + 7min + 8sec + 9ms + 10us + 11ns)
56wk 6day 6hr 7min 8sec 9ms 10µs 11ns
## used to be:
## 1yr 1month 3day 6hr 7min 8sec 9ms 10µs 11ns
## which looks reasonable, but was actually only correct in 75% of the years and 25% of the months in the last 4 years.
> (365day + 4wk + 5day + 6hr + 7min + 8sec + 9ms + 10us + 11ns) | into record
╭─────────────┬────╮
│ week │ 56 │
│ day │ 6 │
│ hour │ 6 │
│ minute │ 7 │
│ second │ 8 │
│ millisecond │ 9 │
│ microsecond │ 10 │
│ nanosecond │ 11 │
│ sign │ + │
╰─────────────┴────╯
```
Strictly speaking, these changes could break an existing user script.
Losing years and months as time units is arguably a regression in
behavior.
Also, the corrected duration calculation could break an existing script
that was calibrated using the old algorithm.
# Tests + Formatting
```
> toolkit check pr
```
- :green_circle: `toolkit fmt`
- :green_circle: `toolkit clippy`
- :green_circle: `toolkit test`
- :green_circle: `toolkit test stdlib`
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
---------
Co-authored-by: Bob Hyman <bobhy@localhost.localdomain>
2023-08-08 13:24:09 +02:00
|
|
|
// todo start adding aliases for duration units here
|
2023-05-18 01:54:35 +02:00
|
|
|
(Unit::Microsecond, "us", Some((Unit::Nanosecond, 1000))),
|
|
|
|
(
|
|
|
|
// µ Micro Sign
|
|
|
|
Unit::Microsecond,
|
|
|
|
"\u{00B5}s",
|
|
|
|
Some((Unit::Nanosecond, 1000)),
|
|
|
|
),
|
|
|
|
(
|
|
|
|
// μ Greek small letter Mu
|
|
|
|
Unit::Microsecond,
|
|
|
|
"\u{03BC}s",
|
|
|
|
Some((Unit::Nanosecond, 1000)),
|
|
|
|
),
|
|
|
|
(Unit::Millisecond, "ms", Some((Unit::Microsecond, 1000))),
|
|
|
|
(Unit::Second, "sec", Some((Unit::Millisecond, 1000))),
|
|
|
|
(Unit::Minute, "min", Some((Unit::Second, 60))),
|
|
|
|
(Unit::Hour, "hr", Some((Unit::Minute, 60))),
|
|
|
|
(Unit::Day, "day", Some((Unit::Minute, 1440))),
|
|
|
|
(Unit::Week, "wk", Some((Unit::Day, 7))),
|
|
|
|
];
|
|
|
|
|
2022-09-29 20:24:17 +02:00
|
|
|
// Borrowed from libm at https://github.com/rust-lang/libm/blob/master/src/math/modf.rs
|
2023-05-18 01:54:35 +02:00
|
|
|
fn modf(x: f64) -> (f64, f64) {
|
2022-09-29 20:24:17 +02:00
|
|
|
let rv2: f64;
|
|
|
|
let mut u = x.to_bits();
|
|
|
|
let e = ((u >> 52 & 0x7ff) as i32) - 0x3ff;
|
|
|
|
|
|
|
|
/* no fractional part */
|
|
|
|
if e >= 52 {
|
|
|
|
rv2 = x;
|
|
|
|
if e == 0x400 && (u << 12) != 0 {
|
|
|
|
/* nan */
|
|
|
|
return (x, rv2);
|
2021-10-05 04:27:39 +02:00
|
|
|
}
|
2022-09-29 20:24:17 +02:00
|
|
|
u &= 1 << 63;
|
|
|
|
return (f64::from_bits(u), rv2);
|
2021-10-05 04:27:39 +02:00
|
|
|
}
|
|
|
|
|
2022-09-29 20:24:17 +02:00
|
|
|
/* no integral part*/
|
|
|
|
if e < 0 {
|
|
|
|
u &= 1 << 63;
|
|
|
|
rv2 = f64::from_bits(u);
|
|
|
|
return (x, rv2);
|
2022-02-24 13:58:53 +01:00
|
|
|
}
|
|
|
|
|
2022-09-29 20:24:17 +02:00
|
|
|
let mask = ((!0) >> 12) >> e;
|
|
|
|
if (u & mask) == 0 {
|
|
|
|
rv2 = x;
|
|
|
|
u &= 1 << 63;
|
|
|
|
return (f64::from_bits(u), rv2);
|
|
|
|
}
|
|
|
|
u &= !mask;
|
|
|
|
rv2 = f64::from_bits(u);
|
|
|
|
(x - rv2, rv2)
|
|
|
|
}
|
2021-10-05 04:27:39 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_glob_pattern(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2021-10-04 21:21:31 +02:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
do not attempt to glob expand if the file path is wrapped in quotes (#11569)
# Description
Fixes: #11455
### For arguments which is annotated with `:path/:directory/:glob`
To fix the issue, we need to have a way to know if a path is originally
quoted during runtime. So the information needed to be added at several
levels:
* parse time (from user input to expression)
We need to add quoted information into `Expr::Filepath`,
`Expr::Directory`, `Expr::GlobPattern`
* eval time
When convert from `Expr::Filepath`, `Expr::Directory`,
`Expr::GlobPattern` to `Value::String` during runtime, we won't auto
expanded the path if it's quoted
### For `ls`
It's really special, because it accepts a `String` as a pattern, and it
generates `glob` expression inside the command itself.
So the idea behind the change is introducing a special SyntaxShape to
ls: `SyntaxShape::LsGlobPattern`. So we can track if the pattern is
originally quoted easier, and we don't auto expand the path either.
Then when constructing a glob pattern inside ls, we check if input
pattern is quoted, if so: we escape the input pattern, so we can run `ls
a[123]b`, because it's already escaped.
Finally, to accomplish the checking process, we also need to introduce a
new value type called `Value::QuotedString` to differ from
`Value::String`, it's used to generate an enum called `NuPath`, which is
finally used in `ls` function. `ls` learned from `NuPath` to know if
user input is quoted.
# User-Facing Changes
Actually it contains several changes
### For arguments which is annotated with `:path/:directory/:glob`
#### Before
```nushell
> def foo [p: path] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
> def foo [p: directory] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
> def foo [p: glob] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
```
#### After
```nushell
> def foo [p: path] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
> def foo [p: directory] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
> def foo [p: glob] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
```
### For ls command
`touch '[uwu]'`
#### Before
```
❯ ls -D "[uwu]"
Error: × No matches found for [uwu]
╭─[entry #6:1:1]
1 │ ls -D "[uwu]"
· ───┬───
· ╰── Pattern, file or folder not found
╰────
help: no matches found
```
#### After
```
❯ ls -D "[uwu]"
╭───┬───────┬──────┬──────┬──────────╮
│ # │ name │ type │ size │ modified │
├───┼───────┼──────┼──────┼──────────┤
│ 0 │ [uwu] │ file │ 0 B │ now │
╰───┴───────┴──────┴──────┴──────────╯
```
# Tests + Formatting
Done
# After Submitting
NaN
2024-01-21 16:22:25 +01:00
|
|
|
let quoted = is_quoted(bytes);
|
2022-05-01 20:37:20 +02:00
|
|
|
let (token, err) = unescape_unquote_string(bytes, span);
|
|
|
|
trace!("parsing: glob pattern");
|
2021-10-04 21:21:31 +02:00
|
|
|
|
2022-05-01 20:37:20 +02:00
|
|
|
if err.is_none() {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("-- found {}", token);
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
Expression {
|
do not attempt to glob expand if the file path is wrapped in quotes (#11569)
# Description
Fixes: #11455
### For arguments which is annotated with `:path/:directory/:glob`
To fix the issue, we need to have a way to know if a path is originally
quoted during runtime. So the information needed to be added at several
levels:
* parse time (from user input to expression)
We need to add quoted information into `Expr::Filepath`,
`Expr::Directory`, `Expr::GlobPattern`
* eval time
When convert from `Expr::Filepath`, `Expr::Directory`,
`Expr::GlobPattern` to `Value::String` during runtime, we won't auto
expanded the path if it's quoted
### For `ls`
It's really special, because it accepts a `String` as a pattern, and it
generates `glob` expression inside the command itself.
So the idea behind the change is introducing a special SyntaxShape to
ls: `SyntaxShape::LsGlobPattern`. So we can track if the pattern is
originally quoted easier, and we don't auto expand the path either.
Then when constructing a glob pattern inside ls, we check if input
pattern is quoted, if so: we escape the input pattern, so we can run `ls
a[123]b`, because it's already escaped.
Finally, to accomplish the checking process, we also need to introduce a
new value type called `Value::QuotedString` to differ from
`Value::String`, it's used to generate an enum called `NuPath`, which is
finally used in `ls` function. `ls` learned from `NuPath` to know if
user input is quoted.
# User-Facing Changes
Actually it contains several changes
### For arguments which is annotated with `:path/:directory/:glob`
#### Before
```nushell
> def foo [p: path] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
> def foo [p: directory] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
> def foo [p: glob] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
```
#### After
```nushell
> def foo [p: path] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
> def foo [p: directory] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
> def foo [p: glob] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
```
### For ls command
`touch '[uwu]'`
#### Before
```
❯ ls -D "[uwu]"
Error: × No matches found for [uwu]
╭─[entry #6:1:1]
1 │ ls -D "[uwu]"
· ───┬───
· ╰── Pattern, file or folder not found
╰────
help: no matches found
```
#### After
```
❯ ls -D "[uwu]"
╭───┬───────┬──────┬──────┬──────────╮
│ # │ name │ type │ size │ modified │
├───┼───────┼──────┼──────┼──────────┤
│ 0 │ [uwu] │ file │ 0 B │ now │
╰───┴───────┴──────┴──────┴──────────╯
```
# Tests + Formatting
Done
# After Submitting
NaN
2024-01-21 16:22:25 +01:00
|
|
|
expr: Expr::GlobPattern(token, quoted),
|
|
|
|
span,
|
|
|
|
ty: Type::String,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
working_set.error(ParseError::Expected("glob pattern string", span));
|
|
|
|
|
|
|
|
garbage(span)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-03 19:14:03 +01:00
|
|
|
pub fn unescape_string(bytes: &[u8], span: Span) -> (Vec<u8>, Option<ParseError>) {
|
|
|
|
let mut output = Vec::new();
|
2023-04-07 02:35:45 +02:00
|
|
|
let mut error = None;
|
2022-03-03 19:14:03 +01:00
|
|
|
|
|
|
|
let mut idx = 0;
|
|
|
|
|
2023-04-18 10:19:08 +02:00
|
|
|
if !bytes.contains(&b'\\') {
|
|
|
|
return (bytes.to_vec(), None);
|
|
|
|
}
|
|
|
|
|
2023-01-28 21:25:53 +01:00
|
|
|
'us_loop: while idx < bytes.len() {
|
2022-03-03 19:14:03 +01:00
|
|
|
if bytes[idx] == b'\\' {
|
|
|
|
// We're in an escape
|
|
|
|
idx += 1;
|
|
|
|
|
|
|
|
match bytes.get(idx) {
|
|
|
|
Some(b'"') => {
|
|
|
|
output.push(b'"');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'\'') => {
|
|
|
|
output.push(b'\'');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'\\') => {
|
|
|
|
output.push(b'\\');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'/') => {
|
|
|
|
output.push(b'/');
|
|
|
|
idx += 1;
|
|
|
|
}
|
2022-03-07 23:39:16 +01:00
|
|
|
Some(b'(') => {
|
|
|
|
output.push(b'(');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b')') => {
|
|
|
|
output.push(b')');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'{') => {
|
|
|
|
output.push(b'{');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'}') => {
|
|
|
|
output.push(b'}');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'$') => {
|
|
|
|
output.push(b'$');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'^') => {
|
|
|
|
output.push(b'^');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'#') => {
|
|
|
|
output.push(b'#');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'|') => {
|
|
|
|
output.push(b'|');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'~') => {
|
|
|
|
output.push(b'~');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'a') => {
|
|
|
|
output.push(0x7);
|
|
|
|
idx += 1;
|
|
|
|
}
|
2022-03-03 19:14:03 +01:00
|
|
|
Some(b'b') => {
|
|
|
|
output.push(0x8);
|
|
|
|
idx += 1;
|
|
|
|
}
|
2022-03-07 23:39:16 +01:00
|
|
|
Some(b'e') => {
|
|
|
|
output.push(0x1b);
|
|
|
|
idx += 1;
|
|
|
|
}
|
2022-03-03 19:14:03 +01:00
|
|
|
Some(b'f') => {
|
|
|
|
output.push(0xc);
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'n') => {
|
|
|
|
output.push(b'\n');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'r') => {
|
|
|
|
output.push(b'\r');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b't') => {
|
|
|
|
output.push(b'\t');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'u') => {
|
2023-01-28 21:25:53 +01:00
|
|
|
let mut digits = String::with_capacity(10);
|
|
|
|
let mut cur_idx = idx + 1; // index of first beyond current end of token
|
|
|
|
|
|
|
|
if let Some(b'{') = bytes.get(idx + 1) {
|
|
|
|
cur_idx = idx + 2;
|
|
|
|
loop {
|
|
|
|
match bytes.get(cur_idx) {
|
|
|
|
Some(b'}') => {
|
|
|
|
cur_idx += 1;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
Some(c) => {
|
|
|
|
digits.push(*c as char);
|
|
|
|
cur_idx += 1;
|
|
|
|
}
|
|
|
|
_ => {
|
2023-04-07 02:35:45 +02:00
|
|
|
error = error.or(Some(ParseError::InvalidLiteral(
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
"missing '}' for unicode escape '\\u{X...}'".into(),
|
|
|
|
"string".into(),
|
2023-01-28 21:25:53 +01:00
|
|
|
Span::new(span.start + idx, span.end),
|
2023-04-07 02:35:45 +02:00
|
|
|
)));
|
2023-01-28 21:25:53 +01:00
|
|
|
break 'us_loop;
|
2022-03-03 19:14:03 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-01-28 21:25:53 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if (1..=6).contains(&digits.len()) {
|
|
|
|
let int = u32::from_str_radix(&digits, 16);
|
|
|
|
|
|
|
|
if let Ok(int) = int {
|
|
|
|
if int <= 0x10ffff {
|
|
|
|
let result = char::from_u32(int);
|
|
|
|
|
|
|
|
if let Some(result) = result {
|
|
|
|
let mut buffer = vec![0; 4];
|
|
|
|
let result = result.encode_utf8(&mut buffer);
|
|
|
|
|
|
|
|
for elem in result.bytes() {
|
|
|
|
output.push(elem);
|
|
|
|
}
|
|
|
|
|
|
|
|
idx = cur_idx;
|
|
|
|
continue 'us_loop;
|
|
|
|
}
|
|
|
|
}
|
2022-03-03 19:14:03 +01:00
|
|
|
}
|
|
|
|
}
|
2023-01-28 21:25:53 +01:00
|
|
|
// fall through -- escape not accepted above, must be error.
|
2023-04-07 02:35:45 +02:00
|
|
|
error = error.or(Some(ParseError::InvalidLiteral(
|
2023-03-10 22:20:31 +01:00
|
|
|
"invalid unicode escape '\\u{X...}', must be 1-6 hex digits, max value 10FFFF".into(),
|
|
|
|
"string".into(),
|
|
|
|
Span::new(span.start + idx, span.end),
|
2023-04-07 02:35:45 +02:00
|
|
|
)));
|
2023-01-28 21:25:53 +01:00
|
|
|
break 'us_loop;
|
2022-03-03 19:14:03 +01:00
|
|
|
}
|
2023-01-28 21:25:53 +01:00
|
|
|
|
2022-03-03 19:14:03 +01:00
|
|
|
_ => {
|
2023-04-07 02:35:45 +02:00
|
|
|
error = error.or(Some(ParseError::InvalidLiteral(
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
"unrecognized escape after '\\'".into(),
|
|
|
|
"string".into(),
|
2022-12-03 10:44:12 +01:00
|
|
|
Span::new(span.start + idx, span.end),
|
2023-04-07 02:35:45 +02:00
|
|
|
)));
|
2023-01-28 21:25:53 +01:00
|
|
|
break 'us_loop;
|
2022-03-03 19:14:03 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
output.push(bytes[idx]);
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
(output, error)
|
2022-03-03 19:14:03 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn unescape_unquote_string(bytes: &[u8], span: Span) -> (String, Option<ParseError>) {
|
|
|
|
if bytes.starts_with(b"\"") {
|
|
|
|
// Needs unescaping
|
|
|
|
let bytes = trim_quotes(bytes);
|
|
|
|
|
|
|
|
let (bytes, err) = unescape_string(bytes, span);
|
|
|
|
|
|
|
|
if let Ok(token) = String::from_utf8(bytes) {
|
|
|
|
(token, err)
|
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
(String::new(), Some(ParseError::Expected("string", span)))
|
2022-03-03 19:14:03 +01:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
let bytes = trim_quotes(bytes);
|
|
|
|
|
|
|
|
if let Ok(token) = String::from_utf8(bytes.into()) {
|
|
|
|
(token, None)
|
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
(String::new(), Some(ParseError::Expected("string", span)))
|
2022-03-03 19:14:03 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn parse_string(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: string");
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
2022-01-19 15:58:12 +01:00
|
|
|
|
2023-03-10 21:26:14 +01:00
|
|
|
if bytes.is_empty() {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("String", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression::garbage(span);
|
2023-03-10 21:26:14 +01:00
|
|
|
}
|
|
|
|
|
2022-04-26 01:44:44 +02:00
|
|
|
// Check for bare word interpolation
|
|
|
|
if bytes[0] != b'\'' && bytes[0] != b'"' && bytes[0] != b'`' && bytes.contains(&b'(') {
|
2023-04-07 20:09:38 +02:00
|
|
|
return parse_string_interpolation(working_set, span);
|
2022-04-26 01:44:44 +02:00
|
|
|
}
|
|
|
|
|
2022-03-03 19:14:03 +01:00
|
|
|
let (s, err) = unescape_unquote_string(bytes, span);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2021-07-16 22:26:40 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::String(s),
|
|
|
|
span,
|
|
|
|
ty: Type::String,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-02 09:15:30 +02:00
|
|
|
|
do not attempt to glob expand if the file path is wrapped in quotes (#11569)
# Description
Fixes: #11455
### For arguments which is annotated with `:path/:directory/:glob`
To fix the issue, we need to have a way to know if a path is originally
quoted during runtime. So the information needed to be added at several
levels:
* parse time (from user input to expression)
We need to add quoted information into `Expr::Filepath`,
`Expr::Directory`, `Expr::GlobPattern`
* eval time
When convert from `Expr::Filepath`, `Expr::Directory`,
`Expr::GlobPattern` to `Value::String` during runtime, we won't auto
expanded the path if it's quoted
### For `ls`
It's really special, because it accepts a `String` as a pattern, and it
generates `glob` expression inside the command itself.
So the idea behind the change is introducing a special SyntaxShape to
ls: `SyntaxShape::LsGlobPattern`. So we can track if the pattern is
originally quoted easier, and we don't auto expand the path either.
Then when constructing a glob pattern inside ls, we check if input
pattern is quoted, if so: we escape the input pattern, so we can run `ls
a[123]b`, because it's already escaped.
Finally, to accomplish the checking process, we also need to introduce a
new value type called `Value::QuotedString` to differ from
`Value::String`, it's used to generate an enum called `NuPath`, which is
finally used in `ls` function. `ls` learned from `NuPath` to know if
user input is quoted.
# User-Facing Changes
Actually it contains several changes
### For arguments which is annotated with `:path/:directory/:glob`
#### Before
```nushell
> def foo [p: path] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
> def foo [p: directory] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
> def foo [p: glob] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
```
#### After
```nushell
> def foo [p: path] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
> def foo [p: directory] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
> def foo [p: glob] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
```
### For ls command
`touch '[uwu]'`
#### Before
```
❯ ls -D "[uwu]"
Error: × No matches found for [uwu]
╭─[entry #6:1:1]
1 │ ls -D "[uwu]"
· ───┬───
· ╰── Pattern, file or folder not found
╰────
help: no matches found
```
#### After
```
❯ ls -D "[uwu]"
╭───┬───────┬──────┬──────┬──────────╮
│ # │ name │ type │ size │ modified │
├───┼───────┼──────┼──────┼──────────┤
│ 0 │ [uwu] │ file │ 0 B │ now │
╰───┴───────┴──────┴──────┴──────────╯
```
# Tests + Formatting
Done
# After Submitting
NaN
2024-01-21 16:22:25 +01:00
|
|
|
fn is_quoted(bytes: &[u8]) -> bool {
|
|
|
|
(bytes.starts_with(b"\"") && bytes.ends_with(b"\"") && bytes.len() > 1)
|
|
|
|
|| (bytes.starts_with(b"\'") && bytes.ends_with(b"\'") && bytes.len() > 1)
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_string_strict(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: string, with required delimiters");
|
|
|
|
|
2021-11-04 03:32:35 +01:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
2022-01-19 15:58:12 +01:00
|
|
|
|
|
|
|
// Check for unbalanced quotes:
|
2022-03-24 17:57:03 +01:00
|
|
|
{
|
|
|
|
let bytes = if bytes.starts_with(b"$") {
|
|
|
|
&bytes[1..]
|
|
|
|
} else {
|
|
|
|
bytes
|
|
|
|
};
|
|
|
|
if bytes.starts_with(b"\"") && (bytes.len() == 1 || !bytes.ends_with(b"\"")) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Unclosed("\"".into(), span));
|
|
|
|
return garbage(span);
|
2022-03-24 17:57:03 +01:00
|
|
|
}
|
|
|
|
if bytes.starts_with(b"\'") && (bytes.len() == 1 || !bytes.ends_with(b"\'")) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Unclosed("\'".into(), span));
|
|
|
|
return garbage(span);
|
2022-03-24 17:57:03 +01:00
|
|
|
}
|
2022-01-19 15:58:12 +01:00
|
|
|
}
|
|
|
|
|
2021-11-04 03:32:35 +01:00
|
|
|
let (bytes, quoted) = if (bytes.starts_with(b"\"") && bytes.ends_with(b"\"") && bytes.len() > 1)
|
|
|
|
|| (bytes.starts_with(b"\'") && bytes.ends_with(b"\'") && bytes.len() > 1)
|
|
|
|
{
|
|
|
|
(&bytes[1..(bytes.len() - 1)], true)
|
2022-03-24 17:57:03 +01:00
|
|
|
} else if (bytes.starts_with(b"$\"") && bytes.ends_with(b"\"") && bytes.len() > 2)
|
|
|
|
|| (bytes.starts_with(b"$\'") && bytes.ends_with(b"\'") && bytes.len() > 2)
|
|
|
|
{
|
|
|
|
(&bytes[2..(bytes.len() - 1)], true)
|
2021-11-04 03:32:35 +01:00
|
|
|
} else {
|
|
|
|
(bytes, false)
|
|
|
|
};
|
|
|
|
|
|
|
|
if let Ok(token) = String::from_utf8(bytes.into()) {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("-- found {}", token);
|
|
|
|
|
2021-11-04 03:32:35 +01:00
|
|
|
if quoted {
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::String(token),
|
|
|
|
span,
|
|
|
|
ty: Type::String,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-11-04 03:32:35 +01:00
|
|
|
} else if token.contains(' ') {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("string", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
garbage(span)
|
2021-11-04 03:32:35 +01:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::String(token),
|
|
|
|
span,
|
|
|
|
ty: Type::String,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-11-04 03:32:35 +01:00
|
|
|
}
|
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("string", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
garbage(span)
|
2021-11-04 03:32:35 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn parse_import_pattern(working_set: &mut StateWorkingSet, spans: &[Span]) -> Expression {
|
2023-11-17 16:15:55 +01:00
|
|
|
let Some(head_span) = spans.first() else {
|
2023-05-06 20:39:54 +02:00
|
|
|
working_set.error(ParseError::WrongImportPattern(
|
|
|
|
"needs at least one component of import pattern".to_string(),
|
|
|
|
span(spans),
|
|
|
|
));
|
2023-04-07 02:35:45 +02:00
|
|
|
return garbage(span(spans));
|
2021-10-26 23:06:08 +02:00
|
|
|
};
|
2021-09-26 20:39:19 +02:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let head_expr = parse_value(working_set, *head_span, &SyntaxShape::Any);
|
2022-12-21 23:21:03 +01:00
|
|
|
|
|
|
|
let (maybe_module_id, head_name) = match eval_constant(working_set, &head_expr) {
|
2023-12-04 20:13:47 +01:00
|
|
|
Ok(val) => match val.as_string() {
|
2022-12-21 23:21:03 +01:00
|
|
|
Ok(s) => (working_set.find_module(s.as_bytes()), s.into_bytes()),
|
|
|
|
Err(err) => {
|
2023-08-26 15:41:29 +02:00
|
|
|
working_set.error(err.wrap(working_set, span(spans)));
|
2023-04-07 02:35:45 +02:00
|
|
|
return garbage(span(spans));
|
2022-12-21 23:21:03 +01:00
|
|
|
}
|
|
|
|
},
|
|
|
|
Err(err) => {
|
2023-08-26 15:41:29 +02:00
|
|
|
working_set.error(err.wrap(working_set, span(spans)));
|
2023-04-07 02:35:45 +02:00
|
|
|
return garbage(span(spans));
|
2022-12-21 23:21:03 +01:00
|
|
|
}
|
|
|
|
};
|
2022-02-18 02:58:24 +01:00
|
|
|
|
2023-05-06 20:39:54 +02:00
|
|
|
let mut import_pattern = ImportPattern {
|
|
|
|
head: ImportPatternHead {
|
|
|
|
name: head_name,
|
|
|
|
id: maybe_module_id,
|
|
|
|
span: *head_span,
|
|
|
|
},
|
|
|
|
members: vec![],
|
|
|
|
hidden: HashSet::new(),
|
Recursively export constants from modules (#10049)
<!--
if this PR closes one or more issues, you can automatically link the PR
with
them by using one of the [*linking
keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword),
e.g.
- this PR should close #xxxx
- fixes #xxxx
you can also mention related issues, PRs or discussions!
-->
# Description
<!--
Thank you for improving Nushell. Please, check our [contributing
guide](../CONTRIBUTING.md) and talk to the core team before making major
changes.
Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.
-->
https://github.com/nushell/nushell/pull/9773 introduced constants to
modules and allowed to export them, but only within one level. This PR:
* allows recursive exporting of constants from all submodules
* fixes submodule imports in a list import pattern
* makes sure exported constants are actual constants
Should unblock https://github.com/nushell/nushell/pull/9678
### Example:
```nushell
module spam {
export module eggs {
export module bacon {
export const viking = 'eats'
}
}
}
use spam
print $spam.eggs.bacon.viking # prints 'eats'
use spam [eggs]
print $eggs.bacon.viking # prints 'eats'
use spam eggs bacon viking
print $viking # prints 'eats'
```
### Limitation 1:
Considering the above `spam` module, attempting to get `eggs bacon` from
`spam` module doesn't work directly:
```nushell
use spam [ eggs bacon ] # attempts to load `eggs`, then `bacon`
use spam [ "eggs bacon" ] # obviously wrong name for a constant, but doesn't work also for commands
```
Workaround (for example):
```nushell
use spam eggs
use eggs [ bacon ]
print $bacon.viking # prints 'eats'
```
I'm thinking I'll just leave it in, as you can easily work around this.
It is also a limitation of the import pattern in general, not just
constants.
### Limitation 2:
`overlay use` successfully imports the constants, but `overlay hide`
does not hide them, even though it seems to hide normal variables
successfully. This needs more investigation.
# User-Facing Changes
<!-- List of all changes that impact the user experience here. This
helps us keep track of breaking changes. -->
Allows recursive constant exports from submodules.
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect -A clippy::result_large_err` to check that
you're using the standard code style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- -c "use std testing; testing run-tests --path
crates/nu-std"` to run the tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
2023-08-20 14:51:35 +02:00
|
|
|
constants: vec![],
|
2023-05-06 20:39:54 +02:00
|
|
|
};
|
2021-09-27 02:23:22 +02:00
|
|
|
|
2023-05-06 20:39:54 +02:00
|
|
|
if spans.len() > 1 {
|
|
|
|
let mut leaf_member_span = None;
|
2021-09-27 02:23:22 +02:00
|
|
|
|
2023-05-06 20:39:54 +02:00
|
|
|
for tail_span in spans[1..].iter() {
|
|
|
|
if let Some(prev_span) = leaf_member_span {
|
|
|
|
let what = if working_set.get_span_contents(prev_span) == b"*" {
|
|
|
|
"glob"
|
|
|
|
} else {
|
|
|
|
"list"
|
|
|
|
};
|
|
|
|
working_set.error(ParseError::WrongImportPattern(
|
|
|
|
format!(
|
|
|
|
"{} member can be only at the end of an import pattern",
|
|
|
|
what
|
|
|
|
),
|
|
|
|
prev_span,
|
|
|
|
));
|
|
|
|
return Expression {
|
|
|
|
expr: Expr::ImportPattern(import_pattern),
|
|
|
|
span: prev_span,
|
|
|
|
ty: Type::List(Box::new(Type::String)),
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
let tail = working_set.get_span_contents(*tail_span);
|
|
|
|
|
|
|
|
if tail == b"*" {
|
|
|
|
import_pattern
|
|
|
|
.members
|
|
|
|
.push(ImportPatternMember::Glob { span: *tail_span });
|
|
|
|
|
|
|
|
leaf_member_span = Some(*tail_span);
|
|
|
|
} else if tail.starts_with(b"[") {
|
|
|
|
let result = parse_list_expression(working_set, *tail_span, &SyntaxShape::String);
|
|
|
|
|
|
|
|
let mut output = vec![];
|
|
|
|
|
|
|
|
if let Expression {
|
2021-09-27 02:23:22 +02:00
|
|
|
expr: Expr::List(list),
|
|
|
|
..
|
2023-05-06 20:39:54 +02:00
|
|
|
} = result
|
|
|
|
{
|
2022-07-29 10:57:10 +02:00
|
|
|
for expr in list {
|
|
|
|
let contents = working_set.get_span_contents(expr.span);
|
|
|
|
output.push((trim_quotes(contents).to_vec(), expr.span));
|
2021-09-27 02:23:22 +02:00
|
|
|
}
|
|
|
|
|
2023-05-06 20:39:54 +02:00
|
|
|
import_pattern
|
|
|
|
.members
|
|
|
|
.push(ImportPatternMember::List { names: output });
|
|
|
|
} else {
|
|
|
|
working_set.error(ParseError::ExportNotFound(result.span));
|
|
|
|
return Expression {
|
|
|
|
expr: Expr::ImportPattern(import_pattern),
|
|
|
|
span: span(spans),
|
|
|
|
ty: Type::List(Box::new(Type::String)),
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2021-09-27 02:23:22 +02:00
|
|
|
}
|
2023-05-06 20:39:54 +02:00
|
|
|
|
|
|
|
leaf_member_span = Some(*tail_span);
|
|
|
|
} else {
|
|
|
|
let tail = trim_quotes(tail);
|
|
|
|
|
|
|
|
import_pattern.members.push(ImportPatternMember::Name {
|
|
|
|
name: tail.to_vec(),
|
|
|
|
span: *tail_span,
|
|
|
|
});
|
2021-09-27 02:23:22 +02:00
|
|
|
}
|
2021-09-26 20:39:19 +02:00
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
Expression {
|
|
|
|
expr: Expr::ImportPattern(import_pattern),
|
|
|
|
span: span(&spans[1..]),
|
|
|
|
ty: Type::List(Box::new(Type::String)),
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-26 20:39:19 +02:00
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
pub fn parse_var_with_opt_type(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
spans: &[Span],
|
|
|
|
spans_idx: &mut usize,
|
2022-11-11 07:51:08 +01:00
|
|
|
mutable: bool,
|
Improve type hovers (#9515)
# Description
This PR does a few things to help improve type hovers and, in the
process, fixes a few outstanding issues in the type system. Here's a
list of the changes:
* `for` now will try to infer the type of the iteration variable based
on the expression it's given. This fixes things like `for x in [1, 2, 3]
{ }` where `x` now properly gets the int type.
* Removed old input/output type fields from the signature, focuses on
the vec of signatures. Updated a bunch of dataframe commands that hadn't
moved over. This helps tie things together a bit better
* Fixed inference of types from subexpressions to use the last
expression in the block
* Fixed handling of explicit types in `let` and `mut` calls, so we now
respect that as the authoritative type
I also tried to add `def` input/output type inference, but unfortunately
we only know the predecl types universally, which means we won't have
enough information to properly know what the types of the custom
commands are.
# User-Facing Changes
Script typechecking will get tighter in some cases
Hovers should be more accurate in some cases that previously resorted to
any.
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect -A clippy::result_large_err` to check that
you're using the standard code style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-std/tests/run.nu` to run the tests for the
standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
---------
Co-authored-by: Darren Schroeder <343840+fdncred@users.noreply.github.com>
2023-06-28 19:19:48 +02:00
|
|
|
) -> (Expression, Option<Type>) {
|
2021-09-02 10:25:22 +02:00
|
|
|
let bytes = working_set.get_span_contents(spans[*spans_idx]).to_vec();
|
2021-07-16 08:24:46 +02:00
|
|
|
|
2022-04-04 22:42:26 +02:00
|
|
|
if bytes.contains(&b' ')
|
|
|
|
|| bytes.contains(&b'"')
|
|
|
|
|| bytes.contains(&b'\'')
|
|
|
|
|| bytes.contains(&b'`')
|
|
|
|
{
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::VariableNotValid(spans[*spans_idx]));
|
Improve type hovers (#9515)
# Description
This PR does a few things to help improve type hovers and, in the
process, fixes a few outstanding issues in the type system. Here's a
list of the changes:
* `for` now will try to infer the type of the iteration variable based
on the expression it's given. This fixes things like `for x in [1, 2, 3]
{ }` where `x` now properly gets the int type.
* Removed old input/output type fields from the signature, focuses on
the vec of signatures. Updated a bunch of dataframe commands that hadn't
moved over. This helps tie things together a bit better
* Fixed inference of types from subexpressions to use the last
expression in the block
* Fixed handling of explicit types in `let` and `mut` calls, so we now
respect that as the authoritative type
I also tried to add `def` input/output type inference, but unfortunately
we only know the predecl types universally, which means we won't have
enough information to properly know what the types of the custom
commands are.
# User-Facing Changes
Script typechecking will get tighter in some cases
Hovers should be more accurate in some cases that previously resorted to
any.
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect -A clippy::result_large_err` to check that
you're using the standard code style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-std/tests/run.nu` to run the tests for the
standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
---------
Co-authored-by: Darren Schroeder <343840+fdncred@users.noreply.github.com>
2023-06-28 19:19:48 +02:00
|
|
|
return (garbage(spans[*spans_idx]), None);
|
2021-10-12 07:08:55 +02:00
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if bytes.ends_with(b":") {
|
|
|
|
// We end with colon, so the next span should be the type
|
|
|
|
if *spans_idx + 1 < spans.len() {
|
2023-09-24 11:01:21 +02:00
|
|
|
let span_beginning = *spans_idx;
|
2021-09-02 10:25:22 +02:00
|
|
|
*spans_idx += 1;
|
2023-09-24 11:01:21 +02:00
|
|
|
// signature like record<a: int b: int> is broken into multiple spans due to
|
|
|
|
// whitespaces. Collect the rest into one span and work on it
|
|
|
|
let full_span = span(&spans[*spans_idx..]);
|
|
|
|
let type_bytes = working_set.get_span_contents(full_span).to_vec();
|
2021-07-16 08:24:46 +02:00
|
|
|
|
2023-09-24 11:01:21 +02:00
|
|
|
let (tokens, parse_error) =
|
|
|
|
lex_signature(&type_bytes, full_span.start, &[b','], &[], true);
|
|
|
|
|
|
|
|
if let Some(parse_error) = parse_error {
|
|
|
|
working_set.parse_errors.push(parse_error);
|
|
|
|
}
|
|
|
|
|
|
|
|
let ty = parse_type(working_set, &type_bytes, tokens[0].span);
|
|
|
|
*spans_idx += spans.len() - *spans_idx - 1;
|
2021-07-16 08:24:46 +02:00
|
|
|
|
2022-07-27 04:08:54 +02:00
|
|
|
let var_name = bytes[0..(bytes.len() - 1)].to_vec();
|
|
|
|
|
|
|
|
if !is_variable(&var_name) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
2023-05-24 21:53:57 +02:00
|
|
|
"valid variable name",
|
2023-07-02 21:40:56 +02:00
|
|
|
spans[*spans_idx - 1],
|
2023-04-07 02:35:45 +02:00
|
|
|
));
|
2023-07-02 21:40:56 +02:00
|
|
|
return (garbage(spans[*spans_idx - 1]), None);
|
2022-07-27 04:08:54 +02:00
|
|
|
}
|
|
|
|
|
2022-11-11 07:51:08 +01:00
|
|
|
let id = working_set.add_variable(var_name, spans[*spans_idx - 1], ty.clone(), mutable);
|
2021-07-16 08:24:46 +02:00
|
|
|
|
Improve type hovers (#9515)
# Description
This PR does a few things to help improve type hovers and, in the
process, fixes a few outstanding issues in the type system. Here's a
list of the changes:
* `for` now will try to infer the type of the iteration variable based
on the expression it's given. This fixes things like `for x in [1, 2, 3]
{ }` where `x` now properly gets the int type.
* Removed old input/output type fields from the signature, focuses on
the vec of signatures. Updated a bunch of dataframe commands that hadn't
moved over. This helps tie things together a bit better
* Fixed inference of types from subexpressions to use the last
expression in the block
* Fixed handling of explicit types in `let` and `mut` calls, so we now
respect that as the authoritative type
I also tried to add `def` input/output type inference, but unfortunately
we only know the predecl types universally, which means we won't have
enough information to properly know what the types of the custom
commands are.
# User-Facing Changes
Script typechecking will get tighter in some cases
Hovers should be more accurate in some cases that previously resorted to
any.
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect -A clippy::result_large_err` to check that
you're using the standard code style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-std/tests/run.nu` to run the tests for the
standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
---------
Co-authored-by: Darren Schroeder <343840+fdncred@users.noreply.github.com>
2023-06-28 19:19:48 +02:00
|
|
|
(
|
|
|
|
Expression {
|
|
|
|
expr: Expr::VarDecl(id),
|
2023-09-24 11:01:21 +02:00
|
|
|
span: span(&spans[span_beginning..*spans_idx + 1]),
|
Improve type hovers (#9515)
# Description
This PR does a few things to help improve type hovers and, in the
process, fixes a few outstanding issues in the type system. Here's a
list of the changes:
* `for` now will try to infer the type of the iteration variable based
on the expression it's given. This fixes things like `for x in [1, 2, 3]
{ }` where `x` now properly gets the int type.
* Removed old input/output type fields from the signature, focuses on
the vec of signatures. Updated a bunch of dataframe commands that hadn't
moved over. This helps tie things together a bit better
* Fixed inference of types from subexpressions to use the last
expression in the block
* Fixed handling of explicit types in `let` and `mut` calls, so we now
respect that as the authoritative type
I also tried to add `def` input/output type inference, but unfortunately
we only know the predecl types universally, which means we won't have
enough information to properly know what the types of the custom
commands are.
# User-Facing Changes
Script typechecking will get tighter in some cases
Hovers should be more accurate in some cases that previously resorted to
any.
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect -A clippy::result_large_err` to check that
you're using the standard code style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-std/tests/run.nu` to run the tests for the
standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
---------
Co-authored-by: Darren Schroeder <343840+fdncred@users.noreply.github.com>
2023-06-28 19:19:48 +02:00
|
|
|
ty: ty.clone(),
|
|
|
|
custom_completion: None,
|
|
|
|
},
|
|
|
|
Some(ty),
|
|
|
|
)
|
2021-07-16 08:24:46 +02:00
|
|
|
} else {
|
2022-07-27 04:08:54 +02:00
|
|
|
let var_name = bytes[0..(bytes.len() - 1)].to_vec();
|
|
|
|
|
|
|
|
if !is_variable(&var_name) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
2023-05-24 21:53:57 +02:00
|
|
|
"valid variable name",
|
2023-04-07 02:35:45 +02:00
|
|
|
spans[*spans_idx],
|
|
|
|
));
|
Improve type hovers (#9515)
# Description
This PR does a few things to help improve type hovers and, in the
process, fixes a few outstanding issues in the type system. Here's a
list of the changes:
* `for` now will try to infer the type of the iteration variable based
on the expression it's given. This fixes things like `for x in [1, 2, 3]
{ }` where `x` now properly gets the int type.
* Removed old input/output type fields from the signature, focuses on
the vec of signatures. Updated a bunch of dataframe commands that hadn't
moved over. This helps tie things together a bit better
* Fixed inference of types from subexpressions to use the last
expression in the block
* Fixed handling of explicit types in `let` and `mut` calls, so we now
respect that as the authoritative type
I also tried to add `def` input/output type inference, but unfortunately
we only know the predecl types universally, which means we won't have
enough information to properly know what the types of the custom
commands are.
# User-Facing Changes
Script typechecking will get tighter in some cases
Hovers should be more accurate in some cases that previously resorted to
any.
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect -A clippy::result_large_err` to check that
you're using the standard code style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-std/tests/run.nu` to run the tests for the
standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
---------
Co-authored-by: Darren Schroeder <343840+fdncred@users.noreply.github.com>
2023-06-28 19:19:48 +02:00
|
|
|
return (garbage(spans[*spans_idx]), None);
|
2022-07-27 04:08:54 +02:00
|
|
|
}
|
|
|
|
|
2022-11-11 07:51:08 +01:00
|
|
|
let id = working_set.add_variable(var_name, spans[*spans_idx], Type::Any, mutable);
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
working_set.error(ParseError::MissingType(spans[*spans_idx]));
|
Improve type hovers (#9515)
# Description
This PR does a few things to help improve type hovers and, in the
process, fixes a few outstanding issues in the type system. Here's a
list of the changes:
* `for` now will try to infer the type of the iteration variable based
on the expression it's given. This fixes things like `for x in [1, 2, 3]
{ }` where `x` now properly gets the int type.
* Removed old input/output type fields from the signature, focuses on
the vec of signatures. Updated a bunch of dataframe commands that hadn't
moved over. This helps tie things together a bit better
* Fixed inference of types from subexpressions to use the last
expression in the block
* Fixed handling of explicit types in `let` and `mut` calls, so we now
respect that as the authoritative type
I also tried to add `def` input/output type inference, but unfortunately
we only know the predecl types universally, which means we won't have
enough information to properly know what the types of the custom
commands are.
# User-Facing Changes
Script typechecking will get tighter in some cases
Hovers should be more accurate in some cases that previously resorted to
any.
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect -A clippy::result_large_err` to check that
you're using the standard code style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-std/tests/run.nu` to run the tests for the
standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
---------
Co-authored-by: Darren Schroeder <343840+fdncred@users.noreply.github.com>
2023-06-28 19:19:48 +02:00
|
|
|
(
|
|
|
|
Expression {
|
|
|
|
expr: Expr::VarDecl(id),
|
|
|
|
span: spans[*spans_idx],
|
|
|
|
ty: Type::Any,
|
|
|
|
custom_completion: None,
|
|
|
|
},
|
|
|
|
None,
|
|
|
|
)
|
2021-07-16 08:24:46 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2022-07-27 04:08:54 +02:00
|
|
|
let var_name = bytes;
|
|
|
|
|
|
|
|
if !is_variable(&var_name) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
2023-05-24 21:53:57 +02:00
|
|
|
"valid variable name",
|
2023-04-07 02:35:45 +02:00
|
|
|
spans[*spans_idx],
|
|
|
|
));
|
Improve type hovers (#9515)
# Description
This PR does a few things to help improve type hovers and, in the
process, fixes a few outstanding issues in the type system. Here's a
list of the changes:
* `for` now will try to infer the type of the iteration variable based
on the expression it's given. This fixes things like `for x in [1, 2, 3]
{ }` where `x` now properly gets the int type.
* Removed old input/output type fields from the signature, focuses on
the vec of signatures. Updated a bunch of dataframe commands that hadn't
moved over. This helps tie things together a bit better
* Fixed inference of types from subexpressions to use the last
expression in the block
* Fixed handling of explicit types in `let` and `mut` calls, so we now
respect that as the authoritative type
I also tried to add `def` input/output type inference, but unfortunately
we only know the predecl types universally, which means we won't have
enough information to properly know what the types of the custom
commands are.
# User-Facing Changes
Script typechecking will get tighter in some cases
Hovers should be more accurate in some cases that previously resorted to
any.
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect -A clippy::result_large_err` to check that
you're using the standard code style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-std/tests/run.nu` to run the tests for the
standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
---------
Co-authored-by: Darren Schroeder <343840+fdncred@users.noreply.github.com>
2023-06-28 19:19:48 +02:00
|
|
|
return (garbage(spans[*spans_idx]), None);
|
2022-07-27 04:08:54 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
let id = working_set.add_variable(
|
|
|
|
var_name,
|
|
|
|
span(&spans[*spans_idx..*spans_idx + 1]),
|
|
|
|
Type::Any,
|
2022-11-11 07:51:08 +01:00
|
|
|
mutable,
|
2022-07-27 04:08:54 +02:00
|
|
|
);
|
2021-07-08 00:55:46 +02:00
|
|
|
|
Improve type hovers (#9515)
# Description
This PR does a few things to help improve type hovers and, in the
process, fixes a few outstanding issues in the type system. Here's a
list of the changes:
* `for` now will try to infer the type of the iteration variable based
on the expression it's given. This fixes things like `for x in [1, 2, 3]
{ }` where `x` now properly gets the int type.
* Removed old input/output type fields from the signature, focuses on
the vec of signatures. Updated a bunch of dataframe commands that hadn't
moved over. This helps tie things together a bit better
* Fixed inference of types from subexpressions to use the last
expression in the block
* Fixed handling of explicit types in `let` and `mut` calls, so we now
respect that as the authoritative type
I also tried to add `def` input/output type inference, but unfortunately
we only know the predecl types universally, which means we won't have
enough information to properly know what the types of the custom
commands are.
# User-Facing Changes
Script typechecking will get tighter in some cases
Hovers should be more accurate in some cases that previously resorted to
any.
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect -A clippy::result_large_err` to check that
you're using the standard code style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-std/tests/run.nu` to run the tests for the
standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
---------
Co-authored-by: Darren Schroeder <343840+fdncred@users.noreply.github.com>
2023-06-28 19:19:48 +02:00
|
|
|
(
|
|
|
|
Expression {
|
|
|
|
expr: Expr::VarDecl(id),
|
|
|
|
span: span(&spans[*spans_idx..*spans_idx + 1]),
|
|
|
|
ty: Type::Any,
|
|
|
|
custom_completion: None,
|
|
|
|
},
|
|
|
|
None,
|
|
|
|
)
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
}
|
2021-09-09 23:47:20 +02:00
|
|
|
|
|
|
|
pub fn expand_to_cell_path(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
expression: &mut Expression,
|
|
|
|
var_id: VarId,
|
|
|
|
) {
|
2022-12-10 18:23:24 +01:00
|
|
|
trace!("parsing: expanding to cell path");
|
2021-09-09 23:47:20 +02:00
|
|
|
if let Expression {
|
|
|
|
expr: Expr::String(_),
|
|
|
|
span,
|
|
|
|
..
|
|
|
|
} = expression
|
|
|
|
{
|
|
|
|
// Re-parse the string as if it were a cell-path
|
2023-04-07 20:09:38 +02:00
|
|
|
let new_expression = parse_full_cell_path(working_set, Some(var_id), *span);
|
2021-09-09 23:47:20 +02:00
|
|
|
|
|
|
|
*expression = new_expression;
|
|
|
|
}
|
2024-01-29 20:42:27 +01:00
|
|
|
|
|
|
|
if let Expression {
|
|
|
|
expr: Expr::UnaryNot(inner),
|
|
|
|
..
|
|
|
|
} = expression
|
|
|
|
{
|
|
|
|
expand_to_cell_path(working_set, inner, var_id);
|
|
|
|
}
|
2021-09-09 23:47:20 +02:00
|
|
|
}
|
|
|
|
|
2023-07-14 23:51:28 +02:00
|
|
|
pub fn parse_input_output_types(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
spans: &[Span],
|
|
|
|
) -> Vec<(Type, Type)> {
|
|
|
|
let mut full_span = span(spans);
|
|
|
|
|
|
|
|
let mut bytes = working_set.get_span_contents(full_span);
|
|
|
|
|
|
|
|
if bytes.starts_with(b"[") {
|
|
|
|
bytes = &bytes[1..];
|
|
|
|
full_span.start += 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
if bytes.ends_with(b"]") {
|
|
|
|
bytes = &bytes[..(bytes.len() - 1)];
|
|
|
|
full_span.end -= 1;
|
|
|
|
}
|
|
|
|
|
2023-09-24 11:01:21 +02:00
|
|
|
let (tokens, parse_error) = lex_signature(bytes, full_span.start, &[b','], &[], true);
|
2023-07-14 23:51:28 +02:00
|
|
|
|
|
|
|
if let Some(parse_error) = parse_error {
|
|
|
|
working_set.parse_errors.push(parse_error);
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut output = vec![];
|
|
|
|
|
|
|
|
let mut idx = 0;
|
|
|
|
while idx < tokens.len() {
|
|
|
|
let type_bytes = working_set.get_span_contents(tokens[idx].span).to_vec();
|
|
|
|
let input_type = parse_type(working_set, &type_bytes, tokens[idx].span);
|
|
|
|
|
|
|
|
idx += 1;
|
|
|
|
if idx >= tokens.len() {
|
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"arrow (->)",
|
|
|
|
Span::new(tokens[idx - 1].span.end, tokens[idx - 1].span.end),
|
|
|
|
));
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
let arrow = working_set.get_span_contents(tokens[idx].span);
|
|
|
|
if arrow != b"->" {
|
|
|
|
working_set.error(ParseError::Expected("arrow (->)", tokens[idx].span));
|
|
|
|
}
|
|
|
|
|
|
|
|
idx += 1;
|
|
|
|
if idx >= tokens.len() {
|
|
|
|
working_set.error(ParseError::MissingType(Span::new(
|
|
|
|
tokens[idx - 1].span.end,
|
|
|
|
tokens[idx - 1].span.end,
|
|
|
|
)));
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
let type_bytes = working_set.get_span_contents(tokens[idx].span).to_vec();
|
|
|
|
let output_type = parse_type(working_set, &type_bytes, tokens[idx].span);
|
|
|
|
|
|
|
|
output.push((input_type, output_type));
|
|
|
|
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
output
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn parse_full_signature(working_set: &mut StateWorkingSet, spans: &[Span]) -> Expression {
|
|
|
|
let arg_signature = working_set.get_span_contents(spans[0]);
|
|
|
|
|
|
|
|
if arg_signature.ends_with(b":") {
|
|
|
|
let mut arg_signature =
|
|
|
|
parse_signature(working_set, Span::new(spans[0].start, spans[0].end - 1));
|
|
|
|
|
|
|
|
let input_output_types = parse_input_output_types(working_set, &spans[1..]);
|
|
|
|
|
|
|
|
if let Expression {
|
|
|
|
expr: Expr::Signature(sig),
|
|
|
|
span: expr_span,
|
|
|
|
..
|
|
|
|
} = &mut arg_signature
|
|
|
|
{
|
|
|
|
sig.input_output_types = input_output_types;
|
|
|
|
expr_span.end = span(&spans[1..]).end;
|
|
|
|
}
|
|
|
|
arg_signature
|
|
|
|
} else {
|
|
|
|
parse_signature(working_set, spans[0])
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn parse_row_condition(working_set: &mut StateWorkingSet, spans: &[Span]) -> Expression {
|
2022-11-11 07:51:08 +01:00
|
|
|
let var_id = working_set.add_variable(b"$it".to_vec(), span(spans), Type::Any, false);
|
2023-04-07 20:09:38 +02:00
|
|
|
let expression = parse_math_expression(working_set, spans, Some(var_id));
|
2021-09-09 23:47:20 +02:00
|
|
|
let span = span(spans);
|
2021-11-26 04:49:03 +01:00
|
|
|
|
|
|
|
let block_id = match expression.expr {
|
|
|
|
Expr::Block(block_id) => block_id,
|
2022-11-10 09:21:49 +01:00
|
|
|
Expr::Closure(block_id) => block_id,
|
2021-11-26 04:49:03 +01:00
|
|
|
_ => {
|
|
|
|
// We have an expression, so let's convert this into a block.
|
|
|
|
let mut block = Block::new();
|
|
|
|
let mut pipeline = Pipeline::new();
|
2022-11-18 22:46:48 +01:00
|
|
|
pipeline
|
|
|
|
.elements
|
2022-11-22 19:26:13 +01:00
|
|
|
.push(PipelineElement::Expression(None, expression));
|
2021-11-26 04:49:03 +01:00
|
|
|
|
2022-02-15 20:31:14 +01:00
|
|
|
block.pipelines.push(pipeline);
|
2021-11-26 04:49:03 +01:00
|
|
|
|
|
|
|
block.signature.required_positional.push(PositionalArg {
|
|
|
|
name: "$it".into(),
|
|
|
|
desc: "row condition".into(),
|
|
|
|
shape: SyntaxShape::Any,
|
|
|
|
var_id: Some(var_id),
|
2022-03-07 21:08:56 +01:00
|
|
|
default_value: None,
|
2021-11-26 04:49:03 +01:00
|
|
|
});
|
|
|
|
|
|
|
|
working_set.add_block(block)
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
ty: Type::Bool,
|
|
|
|
span,
|
|
|
|
expr: Expr::RowCondition(block_id),
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-16 23:55:12 +02:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn parse_signature(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2021-09-02 10:25:22 +02:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
2021-07-16 23:55:12 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut start = span.start;
|
|
|
|
let mut end = span.end;
|
2021-07-16 23:55:12 +02:00
|
|
|
|
2022-08-30 06:17:10 +02:00
|
|
|
let mut has_paren = false;
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if bytes.starts_with(b"[") {
|
|
|
|
start += 1;
|
2022-08-30 06:17:10 +02:00
|
|
|
} else if bytes.starts_with(b"(") {
|
|
|
|
has_paren = true;
|
|
|
|
start += 1;
|
2021-10-11 22:58:38 +02:00
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("[ or (", Span::new(start, start + 1)));
|
2023-04-07 02:35:45 +02:00
|
|
|
return garbage(span);
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-10-11 22:58:38 +02:00
|
|
|
|
2022-08-30 06:17:10 +02:00
|
|
|
if (has_paren && bytes.ends_with(b")")) || (!has_paren && bytes.ends_with(b"]")) {
|
2021-09-02 10:25:22 +02:00
|
|
|
end -= 1;
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Unclosed("] or )".into(), Span::new(end, end)));
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-16 23:55:12 +02:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let sig = parse_signature_helper(working_set, Span::new(start, end));
|
2021-09-06 01:16:27 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Signature(sig),
|
|
|
|
span,
|
|
|
|
ty: Type::Signature,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-06 01:16:27 +02:00
|
|
|
}
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn parse_signature_helper(working_set: &mut StateWorkingSet, span: Span) -> Box<Signature> {
|
2022-03-07 21:08:56 +01:00
|
|
|
#[allow(clippy::enum_variant_names)]
|
2021-09-06 01:16:27 +02:00
|
|
|
enum ParseMode {
|
|
|
|
ArgMode,
|
2022-12-31 12:18:53 +01:00
|
|
|
AfterCommaArgMode,
|
2021-09-06 01:16:27 +02:00
|
|
|
TypeMode,
|
2022-03-07 21:08:56 +01:00
|
|
|
DefaultValueMode,
|
2021-09-06 01:16:27 +02:00
|
|
|
}
|
|
|
|
|
2022-03-07 21:08:56 +01:00
|
|
|
#[derive(Debug)]
|
2021-09-06 01:16:27 +02:00
|
|
|
enum Arg {
|
2023-11-14 13:46:05 +01:00
|
|
|
Positional {
|
|
|
|
arg: PositionalArg,
|
|
|
|
required: bool,
|
|
|
|
type_annotated: bool,
|
|
|
|
},
|
2022-03-07 17:44:27 +01:00
|
|
|
RestPositional(PositionalArg),
|
2023-11-14 13:46:05 +01:00
|
|
|
Flag {
|
|
|
|
flag: Flag,
|
|
|
|
type_annotated: bool,
|
|
|
|
},
|
2021-09-06 01:16:27 +02:00
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let source = working_set.get_span_contents(span);
|
2021-07-16 23:55:12 +02:00
|
|
|
|
2023-03-24 12:54:06 +01:00
|
|
|
let (output, err) = lex_signature(
|
2022-03-07 21:08:56 +01:00
|
|
|
source,
|
|
|
|
span.start,
|
2022-12-31 12:18:53 +01:00
|
|
|
&[b'\n', b'\r'],
|
|
|
|
&[b':', b'=', b','],
|
2022-03-07 21:08:56 +01:00
|
|
|
false,
|
|
|
|
);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2021-07-16 23:55:12 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut args: Vec<Arg> = vec![];
|
|
|
|
let mut parse_mode = ParseMode::ArgMode;
|
2021-07-16 23:55:12 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
for token in &output {
|
|
|
|
match token {
|
|
|
|
Token {
|
|
|
|
contents: crate::TokenContents::Item,
|
|
|
|
span,
|
|
|
|
} => {
|
|
|
|
let span = *span;
|
2023-04-07 02:35:45 +02:00
|
|
|
let contents = working_set.get_span_contents(span).to_vec();
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-12-28 00:00:44 +01:00
|
|
|
// The : symbol separates types
|
2021-09-02 10:25:22 +02:00
|
|
|
if contents == b":" {
|
|
|
|
match parse_mode {
|
|
|
|
ParseMode::ArgMode => {
|
|
|
|
parse_mode = ParseMode::TypeMode;
|
2021-07-16 23:55:12 +02:00
|
|
|
}
|
2022-12-31 12:18:53 +01:00
|
|
|
ParseMode::AfterCommaArgMode => {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("parameter or flag", span));
|
2022-12-31 12:18:53 +01:00
|
|
|
}
|
2022-03-07 21:08:56 +01:00
|
|
|
ParseMode::TypeMode | ParseMode::DefaultValueMode => {
|
2021-09-02 10:25:22 +02:00
|
|
|
// We're seeing two types for the same thing for some reason, error
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("type", span));
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
}
|
2022-12-28 00:00:44 +01:00
|
|
|
}
|
|
|
|
// The = symbol separates a variable from its default value
|
|
|
|
else if contents == b"=" {
|
2022-03-07 21:08:56 +01:00
|
|
|
match parse_mode {
|
2023-11-14 13:46:05 +01:00
|
|
|
ParseMode::TypeMode | ParseMode::ArgMode => {
|
2022-03-07 21:08:56 +01:00
|
|
|
parse_mode = ParseMode::DefaultValueMode;
|
|
|
|
}
|
2022-12-31 12:18:53 +01:00
|
|
|
ParseMode::AfterCommaArgMode => {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("parameter or flag", span));
|
2022-12-31 12:18:53 +01:00
|
|
|
}
|
2022-03-07 21:08:56 +01:00
|
|
|
ParseMode::DefaultValueMode => {
|
|
|
|
// We're seeing two default values for some reason, error
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("default value", span));
|
2022-03-07 21:08:56 +01:00
|
|
|
}
|
|
|
|
}
|
2022-12-31 12:18:53 +01:00
|
|
|
}
|
|
|
|
// The , symbol separates params only
|
|
|
|
else if contents == b"," {
|
|
|
|
match parse_mode {
|
|
|
|
ParseMode::ArgMode => parse_mode = ParseMode::AfterCommaArgMode,
|
|
|
|
ParseMode::AfterCommaArgMode => {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("parameter or flag", span));
|
2022-12-31 12:18:53 +01:00
|
|
|
}
|
|
|
|
ParseMode::TypeMode => {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("type", span));
|
2022-12-31 12:18:53 +01:00
|
|
|
}
|
|
|
|
ParseMode::DefaultValueMode => {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("default value", span));
|
2022-12-31 12:18:53 +01:00
|
|
|
}
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
|
|
|
match parse_mode {
|
2022-12-31 12:18:53 +01:00
|
|
|
ParseMode::ArgMode | ParseMode::AfterCommaArgMode => {
|
|
|
|
// Long flag with optional short form following with no whitespace, e.g. --output, --age(-a)
|
2021-09-02 10:25:22 +02:00
|
|
|
if contents.starts_with(b"--") && contents.len() > 2 {
|
2022-12-28 00:00:44 +01:00
|
|
|
// Split the long flag from the short flag with the ( character as delimiter.
|
|
|
|
// The trailing ) is removed further down.
|
2021-09-02 10:25:22 +02:00
|
|
|
let flags: Vec<_> =
|
|
|
|
contents.split(|x| x == &b'(').map(|x| x.to_vec()).collect();
|
|
|
|
|
2021-10-13 19:53:27 +02:00
|
|
|
let long = String::from_utf8_lossy(&flags[0][2..]).to_string();
|
2022-07-27 04:08:54 +02:00
|
|
|
let mut variable_name = flags[0][2..].to_vec();
|
|
|
|
// Replace the '-' in a variable name with '_'
|
|
|
|
(0..variable_name.len()).for_each(|idx| {
|
|
|
|
if variable_name[idx] == b'-' {
|
|
|
|
variable_name[idx] = b'_';
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
if !is_variable(&variable_name) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
2023-05-24 21:53:57 +02:00
|
|
|
"valid variable name for this long flag",
|
2023-04-07 02:35:45 +02:00
|
|
|
span,
|
|
|
|
))
|
2022-07-27 04:08:54 +02:00
|
|
|
}
|
|
|
|
|
2022-03-09 10:42:19 +01:00
|
|
|
let var_id =
|
2022-11-11 07:51:08 +01:00
|
|
|
working_set.add_variable(variable_name, span, Type::Any, false);
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-12-28 00:00:44 +01:00
|
|
|
// If there's no short flag, exit now. Otherwise, parse it.
|
2021-09-02 10:25:22 +02:00
|
|
|
if flags.len() == 1 {
|
2023-11-14 13:46:05 +01:00
|
|
|
args.push(Arg::Flag {
|
|
|
|
flag: Flag {
|
|
|
|
arg: None,
|
|
|
|
desc: String::new(),
|
|
|
|
long,
|
|
|
|
short: None,
|
|
|
|
required: false,
|
|
|
|
var_id: Some(var_id),
|
|
|
|
default_value: None,
|
|
|
|
},
|
|
|
|
type_annotated: false,
|
|
|
|
});
|
2022-07-10 10:32:52 +02:00
|
|
|
} else if flags.len() >= 3 {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
2023-05-24 21:53:57 +02:00
|
|
|
"only one short flag alternative",
|
2023-04-07 02:35:45 +02:00
|
|
|
span,
|
|
|
|
));
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
|
|
|
let short_flag = &flags[1];
|
|
|
|
let short_flag = if !short_flag.starts_with(b"-")
|
|
|
|
|| !short_flag.ends_with(b")")
|
|
|
|
{
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
2023-05-24 21:53:57 +02:00
|
|
|
"short flag alternative for the long flag",
|
2023-04-07 02:35:45 +02:00
|
|
|
span,
|
|
|
|
));
|
2021-09-02 10:25:22 +02:00
|
|
|
short_flag
|
2021-07-17 20:52:50 +02:00
|
|
|
} else {
|
2022-12-28 00:00:44 +01:00
|
|
|
// Obtain the flag's name by removing the starting - and trailing )
|
2021-09-02 10:25:22 +02:00
|
|
|
&short_flag[1..(short_flag.len() - 1)]
|
|
|
|
};
|
2022-12-28 00:00:44 +01:00
|
|
|
// Note that it is currently possible to make a short flag with non-alphanumeric characters,
|
|
|
|
// like -).
|
2021-07-17 00:39:30 +02:00
|
|
|
|
|
|
|
let short_flag =
|
|
|
|
String::from_utf8_lossy(short_flag).to_string();
|
|
|
|
let chars: Vec<char> = short_flag.chars().collect();
|
2021-10-13 19:53:27 +02:00
|
|
|
let long = String::from_utf8_lossy(&flags[0][2..]).to_string();
|
2022-07-27 09:27:28 +02:00
|
|
|
let mut variable_name = flags[0][2..].to_vec();
|
|
|
|
|
|
|
|
(0..variable_name.len()).for_each(|idx| {
|
|
|
|
if variable_name[idx] == b'-' {
|
|
|
|
variable_name[idx] = b'_';
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
2022-07-27 04:08:54 +02:00
|
|
|
if !is_variable(&variable_name) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
2023-05-24 21:53:57 +02:00
|
|
|
"valid variable name for this short flag",
|
2023-04-07 02:35:45 +02:00
|
|
|
span,
|
|
|
|
))
|
2022-07-27 04:08:54 +02:00
|
|
|
}
|
|
|
|
|
2022-11-11 07:51:08 +01:00
|
|
|
let var_id = working_set.add_variable(
|
|
|
|
variable_name,
|
|
|
|
span,
|
|
|
|
Type::Any,
|
|
|
|
false,
|
|
|
|
);
|
2021-07-17 00:39:30 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if chars.len() == 1 {
|
2023-11-14 13:46:05 +01:00
|
|
|
args.push(Arg::Flag {
|
|
|
|
flag: Flag {
|
|
|
|
arg: None,
|
|
|
|
desc: String::new(),
|
|
|
|
long,
|
|
|
|
short: Some(chars[0]),
|
|
|
|
required: false,
|
|
|
|
var_id: Some(var_id),
|
|
|
|
default_value: None,
|
|
|
|
},
|
|
|
|
type_annotated: false,
|
|
|
|
});
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("short flag", span));
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
}
|
2022-12-31 12:18:53 +01:00
|
|
|
parse_mode = ParseMode::ArgMode;
|
2022-12-28 00:00:44 +01:00
|
|
|
}
|
|
|
|
// Mandatory short flag, e.g. -e (must be one character)
|
|
|
|
else if contents.starts_with(b"-") && contents.len() > 1 {
|
2021-09-02 10:25:22 +02:00
|
|
|
let short_flag = &contents[1..];
|
|
|
|
let short_flag = String::from_utf8_lossy(short_flag).to_string();
|
|
|
|
let chars: Vec<char> = short_flag.chars().collect();
|
|
|
|
|
|
|
|
if chars.len() > 1 {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("short flag", span));
|
2022-01-06 22:06:54 +01:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-01-06 22:06:54 +01:00
|
|
|
let mut encoded_var_name = vec![0u8; 4];
|
|
|
|
let len = chars[0].encode_utf8(&mut encoded_var_name).len();
|
|
|
|
let variable_name = encoded_var_name[0..len].to_vec();
|
2022-12-28 00:00:44 +01:00
|
|
|
|
2022-07-27 04:08:54 +02:00
|
|
|
if !is_variable(&variable_name) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
2023-05-24 21:53:57 +02:00
|
|
|
"valid variable name for this short flag",
|
2023-04-07 02:35:45 +02:00
|
|
|
span,
|
|
|
|
))
|
2022-07-27 04:08:54 +02:00
|
|
|
}
|
|
|
|
|
2022-03-09 10:42:19 +01:00
|
|
|
let var_id =
|
2022-11-11 07:51:08 +01:00
|
|
|
working_set.add_variable(variable_name, span, Type::Any, false);
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2023-11-14 13:46:05 +01:00
|
|
|
args.push(Arg::Flag {
|
|
|
|
flag: Flag {
|
|
|
|
arg: None,
|
|
|
|
desc: String::new(),
|
|
|
|
long: String::new(),
|
|
|
|
short: Some(chars[0]),
|
|
|
|
required: false,
|
|
|
|
var_id: Some(var_id),
|
|
|
|
default_value: None,
|
|
|
|
},
|
|
|
|
type_annotated: false,
|
|
|
|
});
|
2022-12-31 12:18:53 +01:00
|
|
|
parse_mode = ParseMode::ArgMode;
|
2022-12-28 00:00:44 +01:00
|
|
|
}
|
2022-12-31 12:18:53 +01:00
|
|
|
// Short flag alias for long flag, e.g. --b (-a)
|
|
|
|
// This is the same as the short flag in --b(-a)
|
2022-12-28 00:00:44 +01:00
|
|
|
else if contents.starts_with(b"(-") {
|
2022-12-31 12:18:53 +01:00
|
|
|
if matches!(parse_mode, ParseMode::AfterCommaArgMode) {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set
|
|
|
|
.error(ParseError::Expected("parameter or flag", span));
|
2022-12-31 12:18:53 +01:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
let short_flag = &contents[2..];
|
|
|
|
|
|
|
|
let short_flag = if !short_flag.ends_with(b")") {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("short flag", span));
|
2021-09-02 10:25:22 +02:00
|
|
|
short_flag
|
|
|
|
} else {
|
|
|
|
&short_flag[..(short_flag.len() - 1)]
|
|
|
|
};
|
2021-07-17 20:52:50 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let short_flag = String::from_utf8_lossy(short_flag).to_string();
|
|
|
|
let chars: Vec<char> = short_flag.chars().collect();
|
2021-07-17 20:52:50 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if chars.len() == 1 {
|
|
|
|
match args.last_mut() {
|
2023-11-14 13:46:05 +01:00
|
|
|
Some(Arg::Flag { flag, .. }) => {
|
2021-09-02 10:25:22 +02:00
|
|
|
if flag.short.is_some() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
2023-05-24 21:53:57 +02:00
|
|
|
"one short flag",
|
2023-04-07 02:35:45 +02:00
|
|
|
span,
|
|
|
|
));
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
|
|
|
flag.short = Some(chars[0]);
|
2021-07-17 20:52:50 +02:00
|
|
|
}
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
_ => {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set
|
|
|
|
.error(ParseError::Expected("unknown flag", span));
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-17 20:52:50 +02:00
|
|
|
}
|
2021-07-30 00:56:51 +02:00
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("short flag", span));
|
2021-07-16 23:55:12 +02:00
|
|
|
}
|
2022-12-28 00:00:44 +01:00
|
|
|
}
|
|
|
|
// Positional arg, optional
|
|
|
|
else if contents.ends_with(b"?") {
|
2021-09-02 10:25:22 +02:00
|
|
|
let contents: Vec<_> = contents[..(contents.len() - 1)].into();
|
|
|
|
let name = String::from_utf8_lossy(&contents).to_string();
|
|
|
|
|
2022-07-27 04:08:54 +02:00
|
|
|
if !is_variable(&contents) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
2023-05-24 21:53:57 +02:00
|
|
|
"valid variable name for this optional parameter",
|
2023-04-07 02:35:45 +02:00
|
|
|
span,
|
|
|
|
))
|
2022-07-27 04:08:54 +02:00
|
|
|
}
|
|
|
|
|
2022-11-11 07:51:08 +01:00
|
|
|
let var_id =
|
|
|
|
working_set.add_variable(contents, span, Type::Any, false);
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2023-11-14 13:46:05 +01:00
|
|
|
args.push(Arg::Positional {
|
|
|
|
arg: PositionalArg {
|
2021-09-02 10:25:22 +02:00
|
|
|
desc: String::new(),
|
|
|
|
name,
|
|
|
|
shape: SyntaxShape::Any,
|
|
|
|
var_id: Some(var_id),
|
2022-03-07 21:08:56 +01:00
|
|
|
default_value: None,
|
2021-09-02 10:25:22 +02:00
|
|
|
},
|
2023-11-14 13:46:05 +01:00
|
|
|
required: false,
|
|
|
|
type_annotated: false,
|
|
|
|
});
|
2022-12-31 12:18:53 +01:00
|
|
|
parse_mode = ParseMode::ArgMode;
|
2022-12-28 00:00:44 +01:00
|
|
|
}
|
|
|
|
// Rest param
|
|
|
|
else if let Some(contents) = contents.strip_prefix(b"...") {
|
2021-09-07 05:37:02 +02:00
|
|
|
let name = String::from_utf8_lossy(contents).to_string();
|
|
|
|
let contents_vec: Vec<u8> = contents.to_vec();
|
2022-12-28 00:00:44 +01:00
|
|
|
|
2022-07-27 04:08:54 +02:00
|
|
|
if !is_variable(&contents_vec) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
2023-05-24 21:53:57 +02:00
|
|
|
"valid variable name for this rest parameter",
|
2023-04-07 02:35:45 +02:00
|
|
|
span,
|
|
|
|
))
|
2022-07-27 04:08:54 +02:00
|
|
|
}
|
2021-09-07 05:37:02 +02:00
|
|
|
|
2022-03-09 10:42:19 +01:00
|
|
|
let var_id =
|
2022-11-11 07:51:08 +01:00
|
|
|
working_set.add_variable(contents_vec, span, Type::Any, false);
|
2021-09-07 05:37:02 +02:00
|
|
|
|
2022-03-07 17:44:27 +01:00
|
|
|
args.push(Arg::RestPositional(PositionalArg {
|
|
|
|
desc: String::new(),
|
|
|
|
name,
|
|
|
|
shape: SyntaxShape::Any,
|
|
|
|
var_id: Some(var_id),
|
2022-03-07 21:08:56 +01:00
|
|
|
default_value: None,
|
2022-03-07 17:44:27 +01:00
|
|
|
}));
|
2022-12-31 12:18:53 +01:00
|
|
|
parse_mode = ParseMode::ArgMode;
|
2022-12-28 00:00:44 +01:00
|
|
|
}
|
|
|
|
// Normal param
|
|
|
|
else {
|
2023-04-07 02:35:45 +02:00
|
|
|
let name = String::from_utf8_lossy(&contents).to_string();
|
2021-09-02 10:25:22 +02:00
|
|
|
let contents_vec = contents.to_vec();
|
|
|
|
|
2022-07-27 04:08:54 +02:00
|
|
|
if !is_variable(&contents_vec) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
2023-05-24 21:53:57 +02:00
|
|
|
"valid variable name for this parameter",
|
2023-04-07 02:35:45 +02:00
|
|
|
span,
|
|
|
|
))
|
2022-07-27 04:08:54 +02:00
|
|
|
}
|
|
|
|
|
2022-03-09 10:42:19 +01:00
|
|
|
let var_id =
|
2022-11-11 07:51:08 +01:00
|
|
|
working_set.add_variable(contents_vec, span, Type::Any, false);
|
2021-09-02 10:25:22 +02:00
|
|
|
|
|
|
|
// Positional arg, required
|
2023-11-14 13:46:05 +01:00
|
|
|
args.push(Arg::Positional {
|
|
|
|
arg: PositionalArg {
|
2021-09-02 10:25:22 +02:00
|
|
|
desc: String::new(),
|
|
|
|
name,
|
|
|
|
shape: SyntaxShape::Any,
|
|
|
|
var_id: Some(var_id),
|
2022-03-07 21:08:56 +01:00
|
|
|
default_value: None,
|
2021-09-02 10:25:22 +02:00
|
|
|
},
|
2023-11-14 13:46:05 +01:00
|
|
|
required: true,
|
|
|
|
type_annotated: false,
|
|
|
|
});
|
2022-12-31 12:18:53 +01:00
|
|
|
parse_mode = ParseMode::ArgMode;
|
2021-07-16 23:55:12 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
ParseMode::TypeMode => {
|
|
|
|
if let Some(last) = args.last_mut() {
|
2023-10-05 22:39:37 +02:00
|
|
|
let syntax_shape = parse_shape_name(
|
|
|
|
working_set,
|
|
|
|
&contents,
|
|
|
|
span,
|
|
|
|
ShapeDescriptorUse::Argument,
|
|
|
|
);
|
2021-10-12 19:44:23 +02:00
|
|
|
//TODO check if we're replacing a custom parameter already
|
2021-09-02 10:25:22 +02:00
|
|
|
match last {
|
2023-11-14 13:46:05 +01:00
|
|
|
Arg::Positional {
|
|
|
|
arg: PositionalArg { shape, var_id, .. },
|
|
|
|
required: _,
|
|
|
|
type_annotated,
|
|
|
|
} => {
|
2021-09-02 10:25:22 +02:00
|
|
|
working_set.set_variable_type(var_id.expect("internal error: all custom parameters must have var_ids"), syntax_shape.to_type());
|
|
|
|
*shape = syntax_shape;
|
2023-11-14 13:46:05 +01:00
|
|
|
*type_annotated = true;
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2022-03-07 17:44:27 +01:00
|
|
|
Arg::RestPositional(PositionalArg {
|
|
|
|
shape, var_id, ..
|
|
|
|
}) => {
|
2023-07-26 20:22:08 +02:00
|
|
|
working_set.set_variable_type(var_id.expect("internal error: all custom parameters must have var_ids"), Type::List(Box::new(syntax_shape.to_type())));
|
2022-03-07 17:44:27 +01:00
|
|
|
*shape = syntax_shape;
|
|
|
|
}
|
2023-11-14 13:46:05 +01:00
|
|
|
Arg::Flag {
|
|
|
|
flag: Flag { arg, var_id, .. },
|
|
|
|
type_annotated,
|
|
|
|
} => {
|
2023-09-23 10:20:48 +02:00
|
|
|
working_set.set_variable_type(var_id.expect("internal error: all custom parameters must have var_ids"), syntax_shape.to_type());
|
Deprecate `--flag: bool` in custom command (#11365)
# Description
While #11057 is merged, it's hard to tell the difference between
`--flag: bool` and `--flag`, and it makes user hard to read custom
commands' signature, and hard to use them correctly.
After discussion, I think we can deprecate `--flag: bool` usage, and
encourage using `--flag` instead.
# User-Facing Changes
The following code will raise warning message, but don't stop from
running.
```nushell
❯ def florb [--dry-run: bool, --another-flag] { "aaa" }; florb
Error: × Deprecated: --flag: bool
╭─[entry #7:1:1]
1 │ def florb [--dry-run: bool, --another-flag] { "aaa" }; florb
· ──┬─
· ╰── `--flag: bool` is deprecated. Please use `--flag` instead, more info: https://www.nushell.sh/book/custom_commands.html
╰────
aaa
```
cc @kubouch
# Tests + Formatting
Done
# After Submitting
- [ ] Add more information under
https://www.nushell.sh/book/custom_commands.html to indicate `--dry-run:
bool` is not allowed,
- [ ] remove `: bool` from custom commands between 0.89 and 0.90
---------
Co-authored-by: Antoine Stevan <44101798+amtoine@users.noreply.github.com>
2023-12-21 10:07:08 +01:00
|
|
|
if syntax_shape == SyntaxShape::Boolean {
|
2024-01-25 07:16:49 +01:00
|
|
|
working_set.error(ParseError::LabeledError(
|
|
|
|
"Type annotations are not allowed for boolean switches.".to_string(),
|
|
|
|
"Remove the `: bool` type annotation.".to_string(),
|
Deprecate `--flag: bool` in custom command (#11365)
# Description
While #11057 is merged, it's hard to tell the difference between
`--flag: bool` and `--flag`, and it makes user hard to read custom
commands' signature, and hard to use them correctly.
After discussion, I think we can deprecate `--flag: bool` usage, and
encourage using `--flag` instead.
# User-Facing Changes
The following code will raise warning message, but don't stop from
running.
```nushell
❯ def florb [--dry-run: bool, --another-flag] { "aaa" }; florb
Error: × Deprecated: --flag: bool
╭─[entry #7:1:1]
1 │ def florb [--dry-run: bool, --another-flag] { "aaa" }; florb
· ──┬─
· ╰── `--flag: bool` is deprecated. Please use `--flag` instead, more info: https://www.nushell.sh/book/custom_commands.html
╰────
aaa
```
cc @kubouch
# Tests + Formatting
Done
# After Submitting
- [ ] Add more information under
https://www.nushell.sh/book/custom_commands.html to indicate `--dry-run:
bool` is not allowed,
- [ ] remove `: bool` from custom commands between 0.89 and 0.90
---------
Co-authored-by: Antoine Stevan <44101798+amtoine@users.noreply.github.com>
2023-12-21 10:07:08 +01:00
|
|
|
span,
|
|
|
|
));
|
|
|
|
}
|
2023-09-23 10:20:48 +02:00
|
|
|
*arg = Some(syntax_shape);
|
2023-11-14 13:46:05 +01:00
|
|
|
*type_annotated = true;
|
2021-07-16 23:55:12 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
parse_mode = ParseMode::ArgMode;
|
2021-07-16 23:55:12 +02:00
|
|
|
}
|
2022-03-07 21:08:56 +01:00
|
|
|
ParseMode::DefaultValueMode => {
|
|
|
|
if let Some(last) = args.last_mut() {
|
2023-04-07 20:09:38 +02:00
|
|
|
let expression = parse_value(working_set, span, &SyntaxShape::Any);
|
2022-03-07 21:08:56 +01:00
|
|
|
|
|
|
|
//TODO check if we're replacing a custom parameter already
|
|
|
|
match last {
|
2023-11-14 13:46:05 +01:00
|
|
|
Arg::Positional {
|
|
|
|
arg:
|
|
|
|
PositionalArg {
|
|
|
|
shape,
|
|
|
|
var_id,
|
|
|
|
default_value,
|
|
|
|
..
|
|
|
|
},
|
2022-03-07 21:08:56 +01:00
|
|
|
required,
|
2023-11-14 13:46:05 +01:00
|
|
|
type_annotated,
|
|
|
|
} => {
|
2022-03-07 21:08:56 +01:00
|
|
|
let var_id = var_id.expect("internal error: all custom parameters must have var_ids");
|
2022-03-09 10:42:19 +01:00
|
|
|
let var_type = &working_set.get_variable(var_id).ty;
|
2022-03-07 21:08:56 +01:00
|
|
|
match var_type {
|
2022-04-07 06:34:09 +02:00
|
|
|
Type::Any => {
|
2023-11-14 13:46:05 +01:00
|
|
|
if !*type_annotated {
|
2023-09-20 17:58:29 +02:00
|
|
|
working_set.set_variable_type(
|
|
|
|
var_id,
|
|
|
|
expression.ty.clone(),
|
|
|
|
);
|
|
|
|
}
|
2022-03-07 21:08:56 +01:00
|
|
|
}
|
allow records to have type annotations (#8914)
# Description
follow up to #8529
cleaned up version of #8892
- the original syntax is okay
```nu
def okay [rec: record] {}
```
- you can now add type annotations for fields if you know
them before hand
```nu
def okay [rec: record<name: string>] {}
```
- you can specify multiple fields
```nu
def okay [person: record<name: string age: int>] {}
# an optional comma is allowed
def okay [person: record<name: string, age: int>] {}
```
- if annotations are specified, any use of the command will be type
checked against the specified type
```nu
def unwrap [result: record<ok: bool, value: any>] {}
unwrap {ok: 2, value: "value"}
# errors with
Error: nu::parser::type_mismatch
× Type mismatch.
╭─[entry #4:1:1]
1 │ unwrap {ok: 2, value: "value"}
· ───────┬─────
· ╰── expected record<ok: bool, value: any>, found record<ok: int, value: string>
╰────
```
> here the error is in the `ok` field, since `any` is coerced into any
type
> as a result `unwrap {ok: true, value: "value"}` is okay
- the key must be a string, either quoted or unquoted
```nu
def err [rec: record<{}: list>] {}
# errors with
Error:
× `record` type annotations key not string
╭─[entry #7:1:1]
1 │ def unwrap [result: record<{}: bool, value: any>] {}
· ─┬
· ╰── must be a string
╰────
```
- a key doesn't have to have a type in which case it is assumed to be
`any`
```nu
def okay [person: record<name age>] {}
def okay [person: record<name: string age>] {}
```
- however, if you put a colon, you have to specify a type
```nu
def err [person: record<name: >] {}
# errors with
Error: nu::parser::parse_mismatch
× Parse mismatch during operation.
╭─[entry #12:1:1]
1 │ def unwrap [res: record<name: >] { $res }
· ┬
· ╰── expected type after colon
╰────
```
# User-Facing Changes
**[BREAKING CHANGES]**
- this change adds a field to `SyntaxShape::Record` so any plugins that
used it will have to update and include the field. though if you are
unsure of the type the record expects, `SyntaxShape::Record(vec![])`
will suffice
2023-04-26 15:16:55 +02:00
|
|
|
_ => {
|
|
|
|
if !type_compatible(var_type, &expression.ty) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(
|
|
|
|
ParseError::AssignmentMismatch(
|
2022-03-07 21:08:56 +01:00
|
|
|
"Default value wrong type".into(),
|
2023-04-07 02:35:45 +02:00
|
|
|
format!(
|
allow records to have type annotations (#8914)
# Description
follow up to #8529
cleaned up version of #8892
- the original syntax is okay
```nu
def okay [rec: record] {}
```
- you can now add type annotations for fields if you know
them before hand
```nu
def okay [rec: record<name: string>] {}
```
- you can specify multiple fields
```nu
def okay [person: record<name: string age: int>] {}
# an optional comma is allowed
def okay [person: record<name: string, age: int>] {}
```
- if annotations are specified, any use of the command will be type
checked against the specified type
```nu
def unwrap [result: record<ok: bool, value: any>] {}
unwrap {ok: 2, value: "value"}
# errors with
Error: nu::parser::type_mismatch
× Type mismatch.
╭─[entry #4:1:1]
1 │ unwrap {ok: 2, value: "value"}
· ───────┬─────
· ╰── expected record<ok: bool, value: any>, found record<ok: int, value: string>
╰────
```
> here the error is in the `ok` field, since `any` is coerced into any
type
> as a result `unwrap {ok: true, value: "value"}` is okay
- the key must be a string, either quoted or unquoted
```nu
def err [rec: record<{}: list>] {}
# errors with
Error:
× `record` type annotations key not string
╭─[entry #7:1:1]
1 │ def unwrap [result: record<{}: bool, value: any>] {}
· ─┬
· ╰── must be a string
╰────
```
- a key doesn't have to have a type in which case it is assumed to be
`any`
```nu
def okay [person: record<name age>] {}
def okay [person: record<name: string age>] {}
```
- however, if you put a colon, you have to specify a type
```nu
def err [person: record<name: >] {}
# errors with
Error: nu::parser::parse_mismatch
× Parse mismatch during operation.
╭─[entry #12:1:1]
1 │ def unwrap [res: record<name: >] { $res }
· ┬
· ╰── expected type after colon
╰────
```
# User-Facing Changes
**[BREAKING CHANGES]**
- this change adds a field to `SyntaxShape::Record` so any plugins that
used it will have to update and include the field. though if you are
unsure of the type the record expects, `SyntaxShape::Record(vec![])`
will suffice
2023-04-26 15:16:55 +02:00
|
|
|
"expected default value to be `{var_type}`"
|
2023-04-07 02:35:45 +02:00
|
|
|
),
|
2022-03-07 21:08:56 +01:00
|
|
|
expression.span,
|
2023-04-07 02:35:45 +02:00
|
|
|
),
|
|
|
|
)
|
2022-03-07 21:08:56 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-04-26 16:14:02 +02:00
|
|
|
|
|
|
|
*default_value = if let Ok(constant) =
|
|
|
|
eval_constant(working_set, &expression)
|
|
|
|
{
|
|
|
|
Some(constant)
|
|
|
|
} else {
|
|
|
|
working_set.error(ParseError::NonConstantDefaultValue(
|
|
|
|
expression.span,
|
|
|
|
));
|
|
|
|
None
|
|
|
|
};
|
|
|
|
|
2023-11-14 13:46:05 +01:00
|
|
|
if !*type_annotated {
|
2023-09-20 17:58:29 +02:00
|
|
|
*shape = expression.ty.to_shape();
|
|
|
|
}
|
2022-03-07 21:08:56 +01:00
|
|
|
*required = false;
|
|
|
|
}
|
|
|
|
Arg::RestPositional(..) => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::AssignmentMismatch(
|
|
|
|
"Rest parameter was given a default value".into(),
|
|
|
|
"can't have default value".into(),
|
|
|
|
expression.span,
|
|
|
|
))
|
2022-03-07 21:08:56 +01:00
|
|
|
}
|
2023-11-14 13:46:05 +01:00
|
|
|
Arg::Flag {
|
|
|
|
flag:
|
|
|
|
Flag {
|
|
|
|
arg,
|
|
|
|
var_id,
|
|
|
|
default_value,
|
|
|
|
..
|
|
|
|
},
|
|
|
|
type_annotated,
|
|
|
|
} => {
|
2023-05-03 23:09:36 +02:00
|
|
|
let expression_span = expression.span;
|
|
|
|
|
|
|
|
*default_value = if let Ok(value) =
|
|
|
|
eval_constant(working_set, &expression)
|
|
|
|
{
|
|
|
|
Some(value)
|
|
|
|
} else {
|
|
|
|
working_set.error(ParseError::NonConstantDefaultValue(
|
|
|
|
expression_span,
|
|
|
|
));
|
|
|
|
None
|
|
|
|
};
|
|
|
|
|
2022-03-07 21:08:56 +01:00
|
|
|
let var_id = var_id.expect("internal error: all custom parameters must have var_ids");
|
2022-03-09 10:42:19 +01:00
|
|
|
let var_type = &working_set.get_variable(var_id).ty;
|
2022-03-07 21:08:56 +01:00
|
|
|
let expression_ty = expression.ty.clone();
|
|
|
|
|
2023-09-23 10:20:48 +02:00
|
|
|
// Flags with no TypeMode are just present/not-present switches
|
|
|
|
// in the case, `var_type` is any.
|
|
|
|
match var_type {
|
|
|
|
Type::Any => {
|
2023-11-14 13:46:05 +01:00
|
|
|
if !*type_annotated {
|
2023-09-23 10:20:48 +02:00
|
|
|
*arg = Some(expression_ty.to_shape());
|
|
|
|
working_set
|
|
|
|
.set_variable_type(var_id, expression_ty);
|
2022-03-07 21:08:56 +01:00
|
|
|
}
|
2023-09-23 10:20:48 +02:00
|
|
|
}
|
|
|
|
t => {
|
2023-09-24 11:30:58 +02:00
|
|
|
if !type_compatible(t, &expression_ty) {
|
2023-09-23 10:20:48 +02:00
|
|
|
working_set.error(
|
|
|
|
ParseError::AssignmentMismatch(
|
|
|
|
"Default value is the wrong type"
|
|
|
|
.into(),
|
|
|
|
format!(
|
allow records to have type annotations (#8914)
# Description
follow up to #8529
cleaned up version of #8892
- the original syntax is okay
```nu
def okay [rec: record] {}
```
- you can now add type annotations for fields if you know
them before hand
```nu
def okay [rec: record<name: string>] {}
```
- you can specify multiple fields
```nu
def okay [person: record<name: string age: int>] {}
# an optional comma is allowed
def okay [person: record<name: string, age: int>] {}
```
- if annotations are specified, any use of the command will be type
checked against the specified type
```nu
def unwrap [result: record<ok: bool, value: any>] {}
unwrap {ok: 2, value: "value"}
# errors with
Error: nu::parser::type_mismatch
× Type mismatch.
╭─[entry #4:1:1]
1 │ unwrap {ok: 2, value: "value"}
· ───────┬─────
· ╰── expected record<ok: bool, value: any>, found record<ok: int, value: string>
╰────
```
> here the error is in the `ok` field, since `any` is coerced into any
type
> as a result `unwrap {ok: true, value: "value"}` is okay
- the key must be a string, either quoted or unquoted
```nu
def err [rec: record<{}: list>] {}
# errors with
Error:
× `record` type annotations key not string
╭─[entry #7:1:1]
1 │ def unwrap [result: record<{}: bool, value: any>] {}
· ─┬
· ╰── must be a string
╰────
```
- a key doesn't have to have a type in which case it is assumed to be
`any`
```nu
def okay [person: record<name age>] {}
def okay [person: record<name: string age>] {}
```
- however, if you put a colon, you have to specify a type
```nu
def err [person: record<name: >] {}
# errors with
Error: nu::parser::parse_mismatch
× Parse mismatch during operation.
╭─[entry #12:1:1]
1 │ def unwrap [res: record<name: >] { $res }
· ┬
· ╰── expected type after colon
╰────
```
# User-Facing Changes
**[BREAKING CHANGES]**
- this change adds a field to `SyntaxShape::Record` so any plugins that
used it will have to update and include the field. though if you are
unsure of the type the record expects, `SyntaxShape::Record(vec![])`
will suffice
2023-04-26 15:16:55 +02:00
|
|
|
"expected default value to be `{t}`"
|
2022-12-31 12:18:53 +01:00
|
|
|
),
|
2023-09-23 10:20:48 +02:00
|
|
|
expression_span,
|
|
|
|
),
|
|
|
|
)
|
2022-03-07 21:08:56 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
parse_mode = ParseMode::ArgMode;
|
|
|
|
}
|
2021-07-16 23:55:12 +02:00
|
|
|
}
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
Token {
|
|
|
|
contents: crate::TokenContents::Comment,
|
|
|
|
span,
|
|
|
|
} => {
|
2022-12-03 10:44:12 +01:00
|
|
|
let contents = working_set.get_span_contents(Span::new(span.start + 1, span.end));
|
2021-09-02 10:25:22 +02:00
|
|
|
|
|
|
|
let mut contents = String::from_utf8_lossy(contents).to_string();
|
|
|
|
contents = contents.trim().into();
|
|
|
|
|
|
|
|
if let Some(last) = args.last_mut() {
|
|
|
|
match last {
|
2023-11-14 13:46:05 +01:00
|
|
|
Arg::Flag { flag, .. } => {
|
2021-09-02 10:25:22 +02:00
|
|
|
if !flag.desc.is_empty() {
|
|
|
|
flag.desc.push('\n');
|
2021-07-17 00:31:36 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
flag.desc.push_str(&contents);
|
|
|
|
}
|
2023-11-14 13:46:05 +01:00
|
|
|
Arg::Positional {
|
|
|
|
arg: positional, ..
|
|
|
|
} => {
|
2021-09-02 10:25:22 +02:00
|
|
|
if !positional.desc.is_empty() {
|
|
|
|
positional.desc.push('\n');
|
2021-07-17 00:31:36 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
positional.desc.push_str(&contents);
|
2021-07-17 00:31:36 +02:00
|
|
|
}
|
2022-03-07 17:44:27 +01:00
|
|
|
Arg::RestPositional(positional) => {
|
|
|
|
if !positional.desc.is_empty() {
|
|
|
|
positional.desc.push('\n');
|
|
|
|
}
|
|
|
|
positional.desc.push_str(&contents);
|
|
|
|
}
|
2021-07-17 00:31:36 +02:00
|
|
|
}
|
|
|
|
}
|
2021-07-16 23:55:12 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
_ => {}
|
2021-07-16 23:55:12 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-16 23:55:12 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut sig = Signature::new(String::new());
|
|
|
|
|
|
|
|
for arg in args {
|
|
|
|
match arg {
|
2023-11-14 13:46:05 +01:00
|
|
|
Arg::Positional {
|
|
|
|
arg: positional,
|
|
|
|
required,
|
|
|
|
..
|
|
|
|
} => {
|
2021-09-07 05:37:02 +02:00
|
|
|
if required {
|
2022-03-07 21:08:56 +01:00
|
|
|
if !sig.optional_positional.is_empty() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::RequiredAfterOptional(
|
|
|
|
positional.name.clone(),
|
|
|
|
span,
|
|
|
|
))
|
2022-03-07 21:08:56 +01:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
sig.required_positional.push(positional)
|
|
|
|
} else {
|
|
|
|
sig.optional_positional.push(positional)
|
2021-07-17 00:53:45 +02:00
|
|
|
}
|
2021-07-16 23:55:12 +02:00
|
|
|
}
|
2023-11-14 13:46:05 +01:00
|
|
|
Arg::Flag { flag, .. } => sig.named.push(flag),
|
2022-03-07 17:44:27 +01:00
|
|
|
Arg::RestPositional(positional) => {
|
|
|
|
if positional.name.is_empty() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::RestNeedsName(span))
|
2022-03-07 17:44:27 +01:00
|
|
|
} else if sig.rest_positional.is_none() {
|
|
|
|
sig.rest_positional = Some(PositionalArg {
|
|
|
|
name: positional.name,
|
|
|
|
..positional
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
// Too many rest params
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::MultipleRestParams(span))
|
2022-03-07 17:44:27 +01:00
|
|
|
}
|
|
|
|
}
|
2021-07-16 23:55:12 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Box::new(sig)
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-08 09:49:17 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
pub fn parse_list_expression(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
span: Span,
|
|
|
|
element_shape: &SyntaxShape,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2021-09-02 10:25:22 +02:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
2021-07-08 09:49:17 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut start = span.start;
|
|
|
|
let mut end = span.end;
|
2021-07-08 09:49:17 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if bytes.starts_with(b"[") {
|
|
|
|
start += 1;
|
|
|
|
}
|
|
|
|
if bytes.ends_with(b"]") {
|
|
|
|
end -= 1;
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Unclosed("]".into(), Span::new(end, end)));
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-08 09:49:17 +02:00
|
|
|
|
2022-12-03 10:44:12 +01:00
|
|
|
let inner_span = Span::new(start, end);
|
2022-01-03 04:18:23 +01:00
|
|
|
let source = working_set.get_span_contents(inner_span);
|
2021-07-08 09:49:17 +02:00
|
|
|
|
2022-01-03 04:18:23 +01:00
|
|
|
let (output, err) = lex(source, inner_span.start, &[b'\n', b'\r', b','], &[], true);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err)
|
|
|
|
}
|
2021-07-08 09:49:17 +02:00
|
|
|
|
2023-11-22 22:10:08 +01:00
|
|
|
let (mut output, err) = lite_parse(&output);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err)
|
|
|
|
}
|
2021-07-08 23:45:56 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut args = vec![];
|
2021-08-17 02:26:05 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut contained_type: Option<Type> = None;
|
2021-07-08 09:49:17 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if !output.block.is_empty() {
|
2023-11-22 22:10:08 +01:00
|
|
|
for arg in output.block.remove(0).commands {
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut spans_idx = 0;
|
2021-07-08 23:45:56 +02:00
|
|
|
|
2023-11-22 22:10:08 +01:00
|
|
|
if let LiteElement::Command(_, mut command) = arg {
|
2022-11-18 22:46:48 +01:00
|
|
|
while spans_idx < command.parts.len() {
|
2023-11-22 22:10:08 +01:00
|
|
|
let curr_span = command.parts[spans_idx];
|
|
|
|
let curr_tok = working_set.get_span_contents(curr_span);
|
|
|
|
let (arg, ty) = if curr_tok.starts_with(b"...")
|
|
|
|
&& curr_tok.len() > 3
|
|
|
|
&& (curr_tok[3] == b'$' || curr_tok[3] == b'[' || curr_tok[3] == b'(')
|
|
|
|
{
|
|
|
|
// Parse the spread operator
|
|
|
|
// Remove "..." before parsing argument to spread operator
|
|
|
|
command.parts[spans_idx] = Span::new(curr_span.start + 3, curr_span.end);
|
|
|
|
let spread_arg = parse_multispan_value(
|
|
|
|
working_set,
|
|
|
|
&command.parts,
|
|
|
|
&mut spans_idx,
|
|
|
|
&SyntaxShape::List(Box::new(element_shape.clone())),
|
|
|
|
);
|
|
|
|
let elem_ty = match &spread_arg.ty {
|
|
|
|
Type::List(elem_ty) => *elem_ty.clone(),
|
|
|
|
_ => Type::Any,
|
|
|
|
};
|
|
|
|
let span = Span::new(curr_span.start, spread_arg.span.end);
|
|
|
|
let spread_expr = Expression {
|
|
|
|
expr: Expr::Spread(Box::new(spread_arg)),
|
|
|
|
span,
|
|
|
|
ty: elem_ty.clone(),
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
|
|
|
(spread_expr, elem_ty)
|
|
|
|
} else {
|
|
|
|
let arg = parse_multispan_value(
|
|
|
|
working_set,
|
|
|
|
&command.parts,
|
|
|
|
&mut spans_idx,
|
|
|
|
element_shape,
|
|
|
|
);
|
|
|
|
let ty = arg.ty.clone();
|
|
|
|
(arg, ty)
|
|
|
|
};
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-11-18 22:46:48 +01:00
|
|
|
if let Some(ref ctype) = contained_type {
|
2023-11-22 22:10:08 +01:00
|
|
|
if *ctype != ty {
|
2022-11-18 22:46:48 +01:00
|
|
|
contained_type = Some(Type::Any);
|
|
|
|
}
|
|
|
|
} else {
|
2023-11-22 22:10:08 +01:00
|
|
|
contained_type = Some(ty);
|
2021-08-17 02:26:05 +02:00
|
|
|
}
|
|
|
|
|
2022-11-18 22:46:48 +01:00
|
|
|
args.push(arg);
|
2021-07-16 08:24:46 +02:00
|
|
|
|
2022-11-18 22:46:48 +01:00
|
|
|
spans_idx += 1;
|
|
|
|
}
|
2021-07-08 09:49:17 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::List(args),
|
|
|
|
span,
|
|
|
|
ty: Type::List(Box::new(if let Some(ty) = contained_type {
|
|
|
|
ty
|
|
|
|
} else {
|
|
|
|
Type::Any
|
|
|
|
})),
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
|
2023-07-07 11:06:09 +02:00
|
|
|
fn parse_table_expression(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
|
|
|
let bytes = working_set.get_span_contents(span);
|
|
|
|
let inner_span = {
|
|
|
|
let start = if bytes.starts_with(b"[") {
|
|
|
|
span.start + 1
|
|
|
|
} else {
|
|
|
|
span.start
|
|
|
|
};
|
2021-07-06 00:58:56 +02:00
|
|
|
|
2023-07-07 11:06:09 +02:00
|
|
|
let end = if bytes.ends_with(b"]") {
|
|
|
|
span.end - 1
|
|
|
|
} else {
|
|
|
|
let end = span.end;
|
|
|
|
working_set.error(ParseError::Unclosed("]".into(), Span::new(end, end)));
|
|
|
|
span.end
|
|
|
|
};
|
2021-07-06 00:58:56 +02:00
|
|
|
|
2023-07-07 11:06:09 +02:00
|
|
|
Span::new(start, end)
|
|
|
|
};
|
2021-07-06 00:58:56 +02:00
|
|
|
|
2022-01-03 04:18:23 +01:00
|
|
|
let source = working_set.get_span_contents(inner_span);
|
2023-07-07 11:06:09 +02:00
|
|
|
let (tokens, err) = lex(source, inner_span.start, &[b'\n', b'\r', b','], &[], true);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2021-07-06 00:58:56 +02:00
|
|
|
|
Fix exponential parser time on sequence of [[[[ (#10439)
<!--
if this PR closes one or more issues, you can automatically link the PR
with
them by using one of the [*linking
keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword),
e.g.
- this PR should close #xxxx
- fixes #xxxx
you can also mention related issues, PRs or discussions!
-->
# Description
<!--
Thank you for improving Nushell. Please, check our [contributing
guide](../CONTRIBUTING.md) and talk to the core team before making major
changes.
Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.
-->
Before this change, parsing `[[[[[[[[[[[[[[[[[[[[[[` would cause nushell
to consume several gigabytes of memory, now it should be linear in time.
The old code first tried parsing the head of the table as a list and
then after that it checked if it got more arguments. If it didn't, it
throws away the previous result and tries to parse the whole thing as a
list, which means we call `parse_list_expression` twice for each call to
`parse_table_expression`, resulting in the exponential growth
The fix is to simply check that we have all the arguments we need before
parsing the head of the table, so we know that we will either call
parse_list_expression only on sub-expressions or on the whole thing,
never both.
Fixes #10438
# User-Facing Changes
Should give a noticable speedup when typing a sequence of `[[[[[[` open
brackets
<!-- List of all changes that impact the user experience here. This
helps us keep track of breaking changes. -->
# Tests + Formatting
I would like to add tests, but I'm not sure how to do that without
crashing CI with OOM on regression
- [x] Don't forget to add tests that cover your changes.
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used`
to check that you're using the standard code style
- [x] `cargo test --workspace` to check that all tests pass (on Windows
make sure to [enable developer
mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
- [x] `cargo run -- -c "use std testing; testing run-tests --path
crates/nu-std"` to run the tests for the standard library
<!--
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-09-20 17:53:48 +02:00
|
|
|
// Check that we have all arguments first, before trying to parse the first
|
|
|
|
// in order to avoid exponential parsing time
|
|
|
|
let [first, second, rest @ ..] = &tokens[..] else {
|
2023-07-07 11:06:09 +02:00
|
|
|
return parse_list_expression(working_set, span, &SyntaxShape::Any);
|
|
|
|
};
|
Fix exponential parser time on sequence of [[[[ (#10439)
<!--
if this PR closes one or more issues, you can automatically link the PR
with
them by using one of the [*linking
keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword),
e.g.
- this PR should close #xxxx
- fixes #xxxx
you can also mention related issues, PRs or discussions!
-->
# Description
<!--
Thank you for improving Nushell. Please, check our [contributing
guide](../CONTRIBUTING.md) and talk to the core team before making major
changes.
Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.
-->
Before this change, parsing `[[[[[[[[[[[[[[[[[[[[[[` would cause nushell
to consume several gigabytes of memory, now it should be linear in time.
The old code first tried parsing the head of the table as a list and
then after that it checked if it got more arguments. If it didn't, it
throws away the previous result and tries to parse the whole thing as a
list, which means we call `parse_list_expression` twice for each call to
`parse_table_expression`, resulting in the exponential growth
The fix is to simply check that we have all the arguments we need before
parsing the head of the table, so we know that we will either call
parse_list_expression only on sub-expressions or on the whole thing,
never both.
Fixes #10438
# User-Facing Changes
Should give a noticable speedup when typing a sequence of `[[[[[[` open
brackets
<!-- List of all changes that impact the user experience here. This
helps us keep track of breaking changes. -->
# Tests + Formatting
I would like to add tests, but I'm not sure how to do that without
crashing CI with OOM on regression
- [x] Don't forget to add tests that cover your changes.
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used`
to check that you're using the standard code style
- [x] `cargo test --workspace` to check that all tests pass (on Windows
make sure to [enable developer
mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
- [x] `cargo run -- -c "use std testing; testing run-tests --path
crates/nu-std"` to run the tests for the standard library
<!--
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-09-20 17:53:48 +02:00
|
|
|
if !working_set.get_span_contents(first.span).starts_with(b"[")
|
|
|
|
|| second.contents != TokenContents::Semicolon
|
|
|
|
|| rest.is_empty()
|
2023-07-07 11:06:09 +02:00
|
|
|
{
|
|
|
|
return parse_list_expression(working_set, span, &SyntaxShape::Any);
|
|
|
|
};
|
Fix exponential parser time on sequence of [[[[ (#10439)
<!--
if this PR closes one or more issues, you can automatically link the PR
with
them by using one of the [*linking
keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword),
e.g.
- this PR should close #xxxx
- fixes #xxxx
you can also mention related issues, PRs or discussions!
-->
# Description
<!--
Thank you for improving Nushell. Please, check our [contributing
guide](../CONTRIBUTING.md) and talk to the core team before making major
changes.
Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.
-->
Before this change, parsing `[[[[[[[[[[[[[[[[[[[[[[` would cause nushell
to consume several gigabytes of memory, now it should be linear in time.
The old code first tried parsing the head of the table as a list and
then after that it checked if it got more arguments. If it didn't, it
throws away the previous result and tries to parse the whole thing as a
list, which means we call `parse_list_expression` twice for each call to
`parse_table_expression`, resulting in the exponential growth
The fix is to simply check that we have all the arguments we need before
parsing the head of the table, so we know that we will either call
parse_list_expression only on sub-expressions or on the whole thing,
never both.
Fixes #10438
# User-Facing Changes
Should give a noticable speedup when typing a sequence of `[[[[[[` open
brackets
<!-- List of all changes that impact the user experience here. This
helps us keep track of breaking changes. -->
# Tests + Formatting
I would like to add tests, but I'm not sure how to do that without
crashing CI with OOM on regression
- [x] Don't forget to add tests that cover your changes.
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used`
to check that you're using the standard code style
- [x] `cargo test --workspace` to check that all tests pass (on Windows
make sure to [enable developer
mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
- [x] `cargo run -- -c "use std testing; testing run-tests --path
crates/nu-std"` to run the tests for the standard library
<!--
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-09-20 17:53:48 +02:00
|
|
|
let head = parse_list_expression(working_set, first.span, &SyntaxShape::Any);
|
2023-07-07 11:06:09 +02:00
|
|
|
let head = {
|
2023-09-04 09:42:31 +02:00
|
|
|
let Expression {
|
|
|
|
expr: Expr::List(vals),
|
|
|
|
..
|
|
|
|
} = head
|
|
|
|
else {
|
2023-07-07 11:06:09 +02:00
|
|
|
unreachable!("head must be a list by now")
|
|
|
|
};
|
2022-11-18 22:46:48 +01:00
|
|
|
|
2023-07-07 11:06:09 +02:00
|
|
|
vals
|
|
|
|
};
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2023-07-07 11:06:09 +02:00
|
|
|
let errors = working_set.parse_errors.len();
|
|
|
|
|
|
|
|
let rows = rest
|
|
|
|
.iter()
|
|
|
|
.fold(Vec::with_capacity(rest.len()), |mut acc, it| {
|
|
|
|
use std::cmp::Ordering;
|
|
|
|
let text = working_set.get_span_contents(it.span).to_vec();
|
|
|
|
match text.as_slice() {
|
|
|
|
b"," => acc,
|
|
|
|
_ if !&text.starts_with(b"[") => {
|
|
|
|
let err = ParseError::LabeledErrorWithHelp {
|
|
|
|
error: String::from("Table item not list"),
|
|
|
|
label: String::from("not a list"),
|
|
|
|
span: it.span,
|
|
|
|
help: String::from("All table items must be lists"),
|
|
|
|
};
|
|
|
|
working_set.error(err);
|
|
|
|
acc
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
let ls = parse_list_expression(working_set, it.span, &SyntaxShape::Any);
|
|
|
|
let Expression {
|
|
|
|
expr: Expr::List(item),
|
|
|
|
span,
|
2022-11-18 22:46:48 +01:00
|
|
|
..
|
2023-09-04 09:42:31 +02:00
|
|
|
} = ls
|
|
|
|
else {
|
2023-07-07 11:06:09 +02:00
|
|
|
unreachable!("the item must be a list")
|
|
|
|
};
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2023-07-07 11:06:09 +02:00
|
|
|
match item.len().cmp(&head.len()) {
|
|
|
|
Ordering::Less => {
|
|
|
|
let err = ParseError::MissingColumns(head.len(), span);
|
|
|
|
working_set.error(err);
|
Avoid blocking when `o+e>` redirects too much stderr message (#8784)
# Description
Fixes: #8565
Here is another pr #7240 tried to address the issue, but it works in a
wrong way.
After this change `o+e>` won't redirect all stdout message then stderr
message and it works more like how bash does.
# User-Facing Changes
For the given python code:
```python
# test.py
import sys
print('aa'*300, flush=True)
print('bb'*999999, file=sys.stderr, flush=True)
print('cc'*300, flush=True)
```
Running `python test.py out+err> a.txt` shoudn't hang nushell, and
`a.txt` keeps output in the same order
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SameTargetRedirection` if we meet `out+err>` redirection
token(which is generated by lex function),
During converting from lite block to block,
LiteElement::SameTargetRedirection will be converted to
PipelineElement::SameTargetRedirection.
Then in the block eval process, if we get
PipelineElement::SameTargetRedirection, we'll invoke `run-external` with
`--redirect-combine` flag, then pipe the result into save command
## What happened internally?
Take the following command as example:
`^ls o+e> log.txt`
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline {
commands: [
SameTargetRedirection {
cmd: (None, LiteCommand { comments: [], parts: [Span { start: 147945, end: 147948}]}),
redirection: (Span { start: 147949, end: 147957 }, LiteCommand { comments: [], parts: [Span { start: 147958, end: 147965 }]})
}
]
}
]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
SameTargetRedirection {
cmd: (None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 147946, end: 147948 }, ty: String, custom_completion: None}, [], false),
span: Span { start: 147945, end: 147948},
ty: Any, custom_completion: None
}),
redirection: (Span { start: 147949, end: 147957}, Expression {expr: String("log.txt"), span: Span { start: 147958, end: 147965 },ty: String,custom_completion: None}
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-utils/standard_library/tests.nu` to run the
tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-05-18 00:47:03 +02:00
|
|
|
}
|
2023-07-07 11:06:09 +02:00
|
|
|
Ordering::Greater => {
|
|
|
|
let span = {
|
|
|
|
let start = item[head.len()].span.start;
|
|
|
|
let end = span.end;
|
|
|
|
Span::new(start, end)
|
|
|
|
};
|
|
|
|
let err = ParseError::ExtraColumns(head.len(), span);
|
|
|
|
working_set.error(err);
|
2021-09-08 20:54:27 +02:00
|
|
|
}
|
2023-07-07 11:06:09 +02:00
|
|
|
Ordering::Equal => {}
|
2021-09-08 20:54:27 +02:00
|
|
|
}
|
2023-07-07 11:06:09 +02:00
|
|
|
|
|
|
|
acc.push(item);
|
|
|
|
acc
|
2021-07-06 03:48:45 +02:00
|
|
|
}
|
|
|
|
}
|
2023-07-07 11:06:09 +02:00
|
|
|
});
|
|
|
|
|
|
|
|
let ty = if working_set.parse_errors.len() == errors {
|
|
|
|
let (ty, errs) = table_type(&head, &rows);
|
2023-07-12 00:00:31 +02:00
|
|
|
working_set.parse_errors.extend(errs);
|
2023-07-07 11:06:09 +02:00
|
|
|
ty
|
|
|
|
} else {
|
|
|
|
Type::Table(vec![])
|
|
|
|
};
|
|
|
|
|
|
|
|
Expression {
|
|
|
|
expr: Expr::Table(head, rows),
|
|
|
|
span,
|
|
|
|
ty,
|
|
|
|
custom_completion: None,
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
}
|
2021-07-02 09:32:30 +02:00
|
|
|
|
2023-07-07 11:06:09 +02:00
|
|
|
fn table_type(head: &[Expression], rows: &[Vec<Expression>]) -> (Type, Vec<ParseError>) {
|
|
|
|
let mut errors = vec![];
|
|
|
|
let mut rows = rows.to_vec();
|
|
|
|
let mut mk_ty = || -> Type {
|
|
|
|
rows.iter_mut()
|
|
|
|
.map(|row| row.pop().map(|x| x.ty).unwrap_or_default())
|
|
|
|
.reduce(|acc, ty| -> Type {
|
|
|
|
if type_compatible(&acc, &ty) {
|
|
|
|
ty
|
|
|
|
} else {
|
|
|
|
Type::Any
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.unwrap_or_default()
|
|
|
|
};
|
|
|
|
|
|
|
|
let mk_error = |span| ParseError::LabeledErrorWithHelp {
|
|
|
|
error: "Table column name not string".into(),
|
|
|
|
label: "must be a string".into(),
|
|
|
|
help: "Table column names should be able to be converted into strings".into(),
|
|
|
|
span,
|
|
|
|
};
|
|
|
|
|
|
|
|
let mut ty = head
|
|
|
|
.iter()
|
|
|
|
.rev()
|
|
|
|
.map(|expr| {
|
|
|
|
if let Some(str) = expr.as_string() {
|
|
|
|
str
|
|
|
|
} else {
|
|
|
|
errors.push(mk_error(expr.span));
|
|
|
|
String::from("{ column }")
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.map(|title| (title, mk_ty()))
|
|
|
|
.collect_vec();
|
|
|
|
|
|
|
|
ty.reverse();
|
|
|
|
|
|
|
|
(Type::Table(ty), errors)
|
|
|
|
}
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn parse_block_expression(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: block expression");
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
2021-07-02 09:32:30 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut start = span.start;
|
|
|
|
let mut end = span.end;
|
2021-07-02 09:32:30 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if bytes.starts_with(b"{") {
|
|
|
|
start += 1;
|
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("block", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
return garbage(span);
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
if bytes.ends_with(b"}") {
|
|
|
|
end -= 1;
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Unclosed("}".into(), Span::new(end, end)));
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-02 09:32:30 +02:00
|
|
|
|
2022-12-03 10:44:12 +01:00
|
|
|
let inner_span = Span::new(start, end);
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-01-03 06:21:26 +01:00
|
|
|
let source = working_set.get_span_contents(inner_span);
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-01-22 19:24:47 +01:00
|
|
|
let (output, err) = lex(source, start, &[], &[], false);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2021-09-06 04:20:02 +02:00
|
|
|
working_set.enter_scope();
|
2021-09-08 00:00:20 +02:00
|
|
|
|
2022-11-10 09:21:49 +01:00
|
|
|
// Check to see if we have parameters
|
|
|
|
let (signature, amt_to_skip): (Option<(Box<Signature>, Span)>, usize) = match output.first() {
|
|
|
|
Some(Token {
|
|
|
|
contents: TokenContents::Pipe,
|
|
|
|
span,
|
|
|
|
}) => {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("block but found closure", *span));
|
2022-11-10 09:21:49 +01:00
|
|
|
(None, 0)
|
|
|
|
}
|
|
|
|
_ => (None, 0),
|
|
|
|
};
|
|
|
|
|
2023-04-21 21:00:33 +02:00
|
|
|
let mut output = parse_block(working_set, &output[amt_to_skip..], span, false, false);
|
2022-11-10 09:21:49 +01:00
|
|
|
|
|
|
|
if let Some(signature) = signature {
|
|
|
|
output.signature = signature.0;
|
|
|
|
} else if let Some(last) = working_set.delta.scope.last() {
|
|
|
|
// FIXME: this only supports the top $it. Is this sufficient?
|
|
|
|
|
|
|
|
if let Some(var_id) = last.get_var(b"$it") {
|
|
|
|
let mut signature = Signature::new("");
|
|
|
|
signature.required_positional.push(PositionalArg {
|
|
|
|
var_id: Some(*var_id),
|
|
|
|
name: "$it".into(),
|
|
|
|
desc: String::new(),
|
|
|
|
shape: SyntaxShape::Any,
|
|
|
|
default_value: None,
|
|
|
|
});
|
|
|
|
output.signature = Box::new(signature);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
output.span = Some(span);
|
|
|
|
|
|
|
|
working_set.exit_scope();
|
|
|
|
|
|
|
|
let block_id = working_set.add_block(output);
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Block(block_id),
|
|
|
|
span,
|
|
|
|
ty: Type::Block,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2022-11-10 09:21:49 +01:00
|
|
|
}
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn parse_match_block_expression(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2023-03-24 02:52:01 +01:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
|
|
|
|
|
|
|
let mut start = span.start;
|
|
|
|
let mut end = span.end;
|
|
|
|
|
|
|
|
if bytes.starts_with(b"{") {
|
|
|
|
start += 1;
|
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("closure", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
return garbage(span);
|
2023-03-24 02:52:01 +01:00
|
|
|
}
|
|
|
|
if bytes.ends_with(b"}") {
|
|
|
|
end -= 1;
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Unclosed("}".into(), Span::new(end, end)));
|
2023-03-24 02:52:01 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
let inner_span = Span::new(start, end);
|
|
|
|
|
|
|
|
let source = working_set.get_span_contents(inner_span);
|
|
|
|
|
2023-03-27 00:31:57 +02:00
|
|
|
let (output, err) = lex(source, start, &[b' ', b'\r', b'\n', b',', b'|'], &[], false);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2023-03-24 02:52:01 +01:00
|
|
|
|
|
|
|
let mut position = 0;
|
|
|
|
|
|
|
|
let mut output_matches = vec![];
|
|
|
|
|
|
|
|
while position < output.len() {
|
|
|
|
// Each match gets its own scope
|
|
|
|
|
|
|
|
working_set.enter_scope();
|
|
|
|
|
|
|
|
// First parse the pattern
|
2023-04-07 02:35:45 +02:00
|
|
|
let mut pattern = parse_pattern(working_set, output[position].span);
|
2023-03-24 02:52:01 +01:00
|
|
|
|
|
|
|
position += 1;
|
|
|
|
|
|
|
|
if position >= output.len() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Mismatch(
|
2023-03-24 02:52:01 +01:00
|
|
|
"=>".into(),
|
|
|
|
"end of input".into(),
|
|
|
|
Span::new(output[position - 1].span.end, output[position - 1].span.end),
|
2023-04-07 02:35:45 +02:00
|
|
|
));
|
2023-03-24 02:52:01 +01:00
|
|
|
|
|
|
|
working_set.exit_scope();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2023-03-27 00:31:57 +02:00
|
|
|
let mut connector = working_set.get_span_contents(output[position].span);
|
2023-07-16 02:25:12 +02:00
|
|
|
|
|
|
|
// Multiple patterns connected by '|'
|
2023-03-27 00:31:57 +02:00
|
|
|
if connector == b"|" && position < output.len() {
|
|
|
|
let mut or_pattern = vec![pattern];
|
|
|
|
|
|
|
|
while connector == b"|" && position < output.len() {
|
|
|
|
connector = b"";
|
|
|
|
|
|
|
|
position += 1;
|
|
|
|
|
|
|
|
if position >= output.len() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Mismatch(
|
2023-03-27 00:31:57 +02:00
|
|
|
"pattern".into(),
|
|
|
|
"end of input".into(),
|
|
|
|
Span::new(output[position - 1].span.end, output[position - 1].span.end),
|
2023-04-07 02:35:45 +02:00
|
|
|
));
|
2023-03-27 00:31:57 +02:00
|
|
|
|
|
|
|
working_set.exit_scope();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
let pattern = parse_pattern(working_set, output[position].span);
|
2023-03-27 00:31:57 +02:00
|
|
|
or_pattern.push(pattern);
|
|
|
|
|
|
|
|
position += 1;
|
|
|
|
if position >= output.len() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Mismatch(
|
2023-03-27 00:31:57 +02:00
|
|
|
"=>".into(),
|
|
|
|
"end of input".into(),
|
|
|
|
Span::new(output[position - 1].span.end, output[position - 1].span.end),
|
2023-04-07 02:35:45 +02:00
|
|
|
));
|
2023-03-27 00:31:57 +02:00
|
|
|
|
|
|
|
working_set.exit_scope();
|
|
|
|
break;
|
|
|
|
} else {
|
|
|
|
connector = working_set.get_span_contents(output[position].span);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let start = or_pattern
|
|
|
|
.first()
|
|
|
|
.expect("internal error: unexpected state of or-pattern")
|
|
|
|
.span
|
|
|
|
.start;
|
|
|
|
let end = or_pattern
|
|
|
|
.last()
|
|
|
|
.expect("internal error: unexpected state of or-pattern")
|
|
|
|
.span
|
|
|
|
.end;
|
|
|
|
|
|
|
|
pattern = MatchPattern {
|
|
|
|
pattern: Pattern::Or(or_pattern),
|
2023-07-16 02:25:12 +02:00
|
|
|
guard: None,
|
2023-03-27 00:31:57 +02:00
|
|
|
span: Span::new(start, end),
|
|
|
|
}
|
2023-07-16 02:25:12 +02:00
|
|
|
// A match guard
|
|
|
|
} else if connector == b"if" {
|
|
|
|
let if_end = {
|
|
|
|
let end = output[position].span.end;
|
|
|
|
Span::new(end, end)
|
|
|
|
};
|
|
|
|
|
|
|
|
position += 1;
|
|
|
|
|
|
|
|
let mk_err = || ParseError::LabeledErrorWithHelp {
|
|
|
|
error: "Match guard without an expression".into(),
|
|
|
|
label: "expected an expression".into(),
|
|
|
|
help: "The `if` keyword must be followed with an expression".into(),
|
|
|
|
span: if_end,
|
|
|
|
};
|
2023-03-27 00:31:57 +02:00
|
|
|
|
2023-07-16 02:25:12 +02:00
|
|
|
if output.get(position).is_none() {
|
|
|
|
working_set.error(mk_err());
|
|
|
|
return garbage(span);
|
|
|
|
};
|
|
|
|
|
|
|
|
let (tokens, found) = if let Some((pos, _)) = output[position..]
|
|
|
|
.iter()
|
|
|
|
.find_position(|t| working_set.get_span_contents(t.span) == b"=>")
|
|
|
|
{
|
|
|
|
if position + pos == position {
|
|
|
|
working_set.error(mk_err());
|
|
|
|
return garbage(span);
|
|
|
|
}
|
|
|
|
|
|
|
|
(&output[position..position + pos], true)
|
|
|
|
} else {
|
|
|
|
(&output[position..], false)
|
|
|
|
};
|
|
|
|
|
|
|
|
let mut start = 0;
|
|
|
|
let guard = parse_multispan_value(
|
|
|
|
working_set,
|
|
|
|
&tokens.iter().map(|tok| tok.span).collect_vec(),
|
|
|
|
&mut start,
|
|
|
|
&SyntaxShape::MathExpression,
|
|
|
|
);
|
|
|
|
|
|
|
|
pattern.guard = Some(guard);
|
|
|
|
position += if found { start + 1 } else { start };
|
|
|
|
connector = working_set.get_span_contents(output[position].span);
|
|
|
|
}
|
2023-03-27 00:31:57 +02:00
|
|
|
// Then the `=>` arrow
|
|
|
|
if connector != b"=>" {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Mismatch(
|
2023-03-24 02:52:01 +01:00
|
|
|
"=>".into(),
|
|
|
|
"end of input".into(),
|
|
|
|
Span::new(output[position - 1].span.end, output[position - 1].span.end),
|
2023-04-07 02:35:45 +02:00
|
|
|
));
|
2023-03-27 00:31:57 +02:00
|
|
|
} else {
|
|
|
|
position += 1;
|
2023-03-24 02:52:01 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Finally, the value/expression/block that we will run to produce the result
|
|
|
|
if position >= output.len() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Mismatch(
|
2023-03-24 02:52:01 +01:00
|
|
|
"match result".into(),
|
|
|
|
"end of input".into(),
|
|
|
|
Span::new(output[position - 1].span.end, output[position - 1].span.end),
|
2023-04-07 02:35:45 +02:00
|
|
|
));
|
2023-03-24 02:52:01 +01:00
|
|
|
|
|
|
|
working_set.exit_scope();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
let result = parse_multispan_value(
|
2023-03-24 02:52:01 +01:00
|
|
|
working_set,
|
|
|
|
&[output[position].span],
|
|
|
|
&mut 0,
|
|
|
|
&SyntaxShape::OneOf(vec![SyntaxShape::Block, SyntaxShape::Expression]),
|
|
|
|
);
|
|
|
|
position += 1;
|
|
|
|
working_set.exit_scope();
|
|
|
|
|
|
|
|
output_matches.push((pattern, result));
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::MatchBlock(output_matches),
|
|
|
|
span,
|
|
|
|
ty: Type::Any,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2023-03-24 02:52:01 +01:00
|
|
|
}
|
|
|
|
|
2022-11-10 09:21:49 +01:00
|
|
|
pub fn parse_closure_expression(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
shape: &SyntaxShape,
|
|
|
|
span: Span,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2022-11-10 09:21:49 +01:00
|
|
|
trace!("parsing: closure expression");
|
|
|
|
|
|
|
|
let bytes = working_set.get_span_contents(span);
|
|
|
|
|
|
|
|
let mut start = span.start;
|
|
|
|
let mut end = span.end;
|
|
|
|
|
|
|
|
if bytes.starts_with(b"{") {
|
|
|
|
start += 1;
|
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("closure", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
return garbage(span);
|
2022-11-10 09:21:49 +01:00
|
|
|
}
|
|
|
|
if bytes.ends_with(b"}") {
|
|
|
|
end -= 1;
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Unclosed("}".into(), Span::new(end, end)));
|
2022-11-10 09:21:49 +01:00
|
|
|
}
|
|
|
|
|
2022-12-03 10:44:12 +01:00
|
|
|
let inner_span = Span::new(start, end);
|
2022-11-10 09:21:49 +01:00
|
|
|
|
|
|
|
let source = working_set.get_span_contents(inner_span);
|
|
|
|
|
|
|
|
let (output, err) = lex(source, start, &[], &[], false);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2022-11-10 09:21:49 +01:00
|
|
|
|
|
|
|
working_set.enter_scope();
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
// Check to see if we have parameters
|
2022-02-17 12:40:24 +01:00
|
|
|
let (signature, amt_to_skip): (Option<(Box<Signature>, Span)>, usize) = match output.first() {
|
2021-09-02 10:25:22 +02:00
|
|
|
Some(Token {
|
|
|
|
contents: TokenContents::Pipe,
|
2021-09-06 01:16:27 +02:00
|
|
|
span,
|
|
|
|
}) => {
|
|
|
|
// We've found a parameter list
|
|
|
|
let start_point = span.start;
|
|
|
|
let mut token_iter = output.iter().enumerate().skip(1);
|
|
|
|
let mut end_span = None;
|
|
|
|
let mut amt_to_skip = 1;
|
|
|
|
|
|
|
|
for token in &mut token_iter {
|
|
|
|
if let Token {
|
2021-09-02 10:25:22 +02:00
|
|
|
contents: TokenContents::Pipe,
|
2021-09-06 01:16:27 +02:00
|
|
|
span,
|
|
|
|
} = token.1
|
|
|
|
{
|
|
|
|
end_span = Some(span);
|
|
|
|
amt_to_skip = token.0;
|
|
|
|
break;
|
2021-08-25 21:29:36 +02:00
|
|
|
}
|
|
|
|
}
|
2021-09-06 01:16:27 +02:00
|
|
|
|
|
|
|
let end_point = if let Some(span) = end_span {
|
|
|
|
span.end
|
|
|
|
} else {
|
|
|
|
end
|
|
|
|
};
|
|
|
|
|
2022-12-03 10:44:12 +01:00
|
|
|
let signature_span = Span::new(start_point, end_point);
|
2023-04-07 20:09:38 +02:00
|
|
|
let signature = parse_signature_helper(working_set, signature_span);
|
2021-09-06 01:16:27 +02:00
|
|
|
|
2022-02-17 12:40:24 +01:00
|
|
|
(Some((signature, signature_span)), amt_to_skip)
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2022-11-10 09:21:49 +01:00
|
|
|
Some(Token {
|
2022-12-08 00:02:11 +01:00
|
|
|
contents: TokenContents::PipePipe,
|
2022-11-10 09:21:49 +01:00
|
|
|
span,
|
2022-12-08 00:02:11 +01:00
|
|
|
}) => (
|
|
|
|
Some((Box::new(Signature::new("closure".to_string())), *span)),
|
|
|
|
1,
|
|
|
|
),
|
2023-04-11 19:21:52 +02:00
|
|
|
_ => (None, 0),
|
2021-09-02 10:25:22 +02:00
|
|
|
};
|
2021-08-25 21:29:36 +02:00
|
|
|
|
2022-01-08 01:40:40 +01:00
|
|
|
// TODO: Finish this
|
2022-11-10 09:21:49 +01:00
|
|
|
if let SyntaxShape::Closure(Some(v)) = shape {
|
2022-02-17 12:40:24 +01:00
|
|
|
if let Some((sig, sig_span)) = &signature {
|
2022-04-08 21:57:27 +02:00
|
|
|
if sig.num_positionals() > v.len() {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::ExpectedWithStringMsg(
|
2023-04-07 02:35:45 +02:00
|
|
|
format!(
|
|
|
|
"{} closure parameter{}",
|
|
|
|
v.len(),
|
|
|
|
if v.len() > 1 { "s" } else { "" }
|
|
|
|
),
|
|
|
|
*sig_span,
|
|
|
|
));
|
2022-02-17 12:40:24 +01:00
|
|
|
}
|
2021-09-13 09:54:13 +02:00
|
|
|
|
2022-02-17 12:40:24 +01:00
|
|
|
for (expected, PositionalArg { name, shape, .. }) in
|
|
|
|
v.iter().zip(sig.required_positional.iter())
|
|
|
|
{
|
|
|
|
if expected != shape && *shape != SyntaxShape::Any {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::ParameterMismatchType(
|
|
|
|
name.to_owned(),
|
|
|
|
expected.to_string(),
|
|
|
|
shape.to_string(),
|
|
|
|
*sig_span,
|
|
|
|
));
|
2022-02-17 12:40:24 +01:00
|
|
|
}
|
|
|
|
}
|
2021-09-13 09:54:13 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-21 21:00:33 +02:00
|
|
|
let mut output = parse_block(working_set, &output[amt_to_skip..], span, false, false);
|
2021-07-02 09:32:30 +02:00
|
|
|
|
2021-09-06 01:16:27 +02:00
|
|
|
if let Some(signature) = signature {
|
2022-02-17 12:40:24 +01:00
|
|
|
output.signature = signature.0;
|
2021-09-06 04:20:02 +02:00
|
|
|
} else if let Some(last) = working_set.delta.scope.last() {
|
2021-10-12 19:44:23 +02:00
|
|
|
// FIXME: this only supports the top $it. Is this sufficient?
|
2021-09-13 09:31:11 +02:00
|
|
|
|
2021-09-06 04:20:02 +02:00
|
|
|
if let Some(var_id) = last.get_var(b"$it") {
|
|
|
|
let mut signature = Signature::new("");
|
|
|
|
signature.required_positional.push(PositionalArg {
|
|
|
|
var_id: Some(*var_id),
|
|
|
|
name: "$it".into(),
|
|
|
|
desc: String::new(),
|
|
|
|
shape: SyntaxShape::Any,
|
2022-03-07 21:08:56 +01:00
|
|
|
default_value: None,
|
2021-09-06 04:20:02 +02:00
|
|
|
});
|
|
|
|
output.signature = Box::new(signature);
|
|
|
|
}
|
2021-09-06 01:16:27 +02:00
|
|
|
}
|
|
|
|
|
2022-01-30 23:05:25 +01:00
|
|
|
output.span = Some(span);
|
2021-10-25 22:04:23 +02:00
|
|
|
|
2021-09-06 04:20:02 +02:00
|
|
|
working_set.exit_scope();
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let block_id = working_set.add_block(output);
|
2021-07-16 22:26:40 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Closure(block_id),
|
|
|
|
span,
|
|
|
|
ty: Type::Closure,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-02 09:32:30 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
pub fn parse_value(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
span: Span,
|
|
|
|
shape: &SyntaxShape,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2023-04-07 21:01:12 +02:00
|
|
|
trace!("parsing: value: {}", shape);
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
|
|
|
|
2022-02-24 13:58:53 +01:00
|
|
|
if bytes.is_empty() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::IncompleteParser(span));
|
|
|
|
return garbage(span);
|
2022-02-24 13:58:53 +01:00
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
// Check for reserved keyword values
|
|
|
|
match bytes {
|
|
|
|
b"true" => {
|
|
|
|
if matches!(shape, SyntaxShape::Boolean) || matches!(shape, SyntaxShape::Any) {
|
|
|
|
return Expression {
|
|
|
|
expr: Expr::Bool(true),
|
|
|
|
span,
|
|
|
|
ty: Type::Bool,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2022-03-03 19:14:03 +01:00
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("non-boolean value", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression::garbage(span);
|
2022-03-03 19:14:03 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
b"false" => {
|
|
|
|
if matches!(shape, SyntaxShape::Boolean) || matches!(shape, SyntaxShape::Any) {
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression {
|
|
|
|
expr: Expr::Bool(false),
|
|
|
|
span,
|
|
|
|
ty: Type::Bool,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2022-03-03 19:14:03 +01:00
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("non-boolean value", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression::garbage(span);
|
2022-03-03 19:14:03 +01:00
|
|
|
}
|
2022-03-03 01:55:03 +01:00
|
|
|
}
|
2022-03-03 19:14:03 +01:00
|
|
|
b"null" => {
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression {
|
|
|
|
expr: Expr::Nothing,
|
|
|
|
span,
|
|
|
|
ty: Type::Nothing,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2022-03-03 01:55:03 +01:00
|
|
|
}
|
Require that values that look like numbers parse as numberlike (#8635)
# Description
Require that any value that looks like it might be a number (starts with
a digit, or a '-' + digit, or a '+' + digits, or a special form float
like `-inf`, `inf`, or `NaN`) must now be treated as a number-like
value. Number-like syntax can only parse into number-like values.
Number-like values include: durations, ints, floats, ranges, filesizes,
binary data, etc.
# User-Facing Changes
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
Just making sure we see this for release notes 😅
This breaks any and all numberlike values that were treated as strings
before. Example, we used to allow `3,` as a bare word. Anything like
this would now require quotes or backticks to be treated as a string or
bare word, respectively.
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-03-28 08:31:38 +02:00
|
|
|
b"-inf" | b"inf" | b"NaN" => {
|
2023-04-11 19:21:31 +02:00
|
|
|
return parse_float(working_set, span);
|
Require that values that look like numbers parse as numberlike (#8635)
# Description
Require that any value that looks like it might be a number (starts with
a digit, or a '-' + digit, or a '+' + digits, or a special form float
like `-inf`, `inf`, or `NaN`) must now be treated as a number-like
value. Number-like syntax can only parse into number-like values.
Number-like values include: durations, ints, floats, ranges, filesizes,
binary data, etc.
# User-Facing Changes
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
Just making sure we see this for release notes 😅
This breaks any and all numberlike values that were treated as strings
before. Example, we used to allow `3,` as a bare word. Anything like
this would now require quotes or backticks to be treated as a string or
bare word, respectively.
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-03-28 08:31:38 +02:00
|
|
|
}
|
2022-03-03 19:14:03 +01:00
|
|
|
_ => {}
|
2022-03-03 01:55:03 +01:00
|
|
|
}
|
2022-03-03 19:14:03 +01:00
|
|
|
|
2022-02-24 13:58:53 +01:00
|
|
|
match bytes[0] {
|
2023-04-07 20:09:38 +02:00
|
|
|
b'$' => return parse_dollar_expr(working_set, span),
|
|
|
|
b'(' => return parse_paren_expr(working_set, span, shape),
|
|
|
|
b'{' => return parse_brace_expr(working_set, span, shape),
|
2022-02-24 13:58:53 +01:00
|
|
|
b'[' => match shape {
|
2021-09-02 10:25:22 +02:00
|
|
|
SyntaxShape::Any
|
|
|
|
| SyntaxShape::List(_)
|
2023-07-07 11:06:09 +02:00
|
|
|
| SyntaxShape::Table(_)
|
treat path contains '?' as pattern (#10142)
Fix https://github.com/nushell/nushell/issues/10136
# Description
Current nushell only handle path containing '*' as match pattern and
treat '?' as just normal path.
This pr makes path containing '?' is also processed as pattern.
🔴 **Concerns: Need to design/comfirm a consistent rule to handle
dirs/files with '?' in their names.**
Currently:
- if no dir has exactly same name with pattern, it will print the list
of matched directories
- if pattern exactly matches an empty dir's name, it will just print the
empty dir's content ( i.e. `[]`)
- if pattern exactly matches an dir's name, it will perform pattern
match and print all the dir contains
e.g.
```bash
mkdir src
ls s?c
```
| name | type | size | modified |
| ---- | ---- | ------ | --------------------------------------------- |
| src | dir | 1.1 KB | Tue, 29 Aug 2023 07:39:41 +0900 (9 hours ago) |
-----------
```bash
mkdir src
mkdir scc
mkdir scs
ls s?c
```
| name | type | size | modified |
| ---- | ---- | ------ |
------------------------------------------------ |
| scc | dir | 64 B | Tue, 29 Aug 2023 16:55:31 +0900 (14 seconds ago) |
| src | dir | 1.1 KB | Tue, 29 Aug 2023 07:39:41 +0900 (9 hours ago) |
-----------
```bash
mkdir s?c
ls s?c
```
print empty (i.e. ls of dir `s?c`)
-----------
```bash
mkdir -p s?c/test
ls s?c
```
|name|type|size|modified|
|-|-|-|-|
|s?c/test|dir|64 B|Tue, 29 Aug 2023 16:47:53 +0900 (2 minutes ago)|
|src/bytes|dir|480 B|Fri, 25 Aug 2023 17:43:52 +0900 (3 days ago)|
|src/charting|dir|160 B|Fri, 25 Aug 2023 17:43:52 +0900 (3 days ago)|
|src/conversions|dir|160 B|Fri, 25 Aug 2023 17:43:52 +0900 (3 days ago)|
-----------
# User-Facing Changes
User will be able to use '?' to match directory/file.
# Tests + Formatting
- :green_circle: `toolkit fmt`
- :green_circle: `toolkit clippy`
- :green_circle: `toolkit test`
- :green_circle: `toolkit test stdlib`
# After Submitting
None
---------
Co-authored-by: Horasal <horsal@horsal.dev>
2023-09-04 02:25:00 +02:00
|
|
|
| SyntaxShape::Signature
|
|
|
|
| SyntaxShape::Filepath
|
do not attempt to glob expand if the file path is wrapped in quotes (#11569)
# Description
Fixes: #11455
### For arguments which is annotated with `:path/:directory/:glob`
To fix the issue, we need to have a way to know if a path is originally
quoted during runtime. So the information needed to be added at several
levels:
* parse time (from user input to expression)
We need to add quoted information into `Expr::Filepath`,
`Expr::Directory`, `Expr::GlobPattern`
* eval time
When convert from `Expr::Filepath`, `Expr::Directory`,
`Expr::GlobPattern` to `Value::String` during runtime, we won't auto
expanded the path if it's quoted
### For `ls`
It's really special, because it accepts a `String` as a pattern, and it
generates `glob` expression inside the command itself.
So the idea behind the change is introducing a special SyntaxShape to
ls: `SyntaxShape::LsGlobPattern`. So we can track if the pattern is
originally quoted easier, and we don't auto expand the path either.
Then when constructing a glob pattern inside ls, we check if input
pattern is quoted, if so: we escape the input pattern, so we can run `ls
a[123]b`, because it's already escaped.
Finally, to accomplish the checking process, we also need to introduce a
new value type called `Value::QuotedString` to differ from
`Value::String`, it's used to generate an enum called `NuPath`, which is
finally used in `ls` function. `ls` learned from `NuPath` to know if
user input is quoted.
# User-Facing Changes
Actually it contains several changes
### For arguments which is annotated with `:path/:directory/:glob`
#### Before
```nushell
> def foo [p: path] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
> def foo [p: directory] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
> def foo [p: glob] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
```
#### After
```nushell
> def foo [p: path] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
> def foo [p: directory] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
> def foo [p: glob] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
```
### For ls command
`touch '[uwu]'`
#### Before
```
❯ ls -D "[uwu]"
Error: × No matches found for [uwu]
╭─[entry #6:1:1]
1 │ ls -D "[uwu]"
· ───┬───
· ╰── Pattern, file or folder not found
╰────
help: no matches found
```
#### After
```
❯ ls -D "[uwu]"
╭───┬───────┬──────┬──────┬──────────╮
│ # │ name │ type │ size │ modified │
├───┼───────┼──────┼──────┼──────────┤
│ 0 │ [uwu] │ file │ 0 B │ now │
╰───┴───────┴──────┴──────┴──────────╯
```
# Tests + Formatting
Done
# After Submitting
NaN
2024-01-21 16:22:25 +01:00
|
|
|
| SyntaxShape::String
|
Unify glob behavior on `open`, `rm`, `cp-old`, `mv`, `umv`, `cp` and `du` commands (#11621)
# Description
This pr is a follow up to
[#11569](https://github.com/nushell/nushell/pull/11569#issuecomment-1902279587)
> Revert the logic in https://github.com/nushell/nushell/pull/10694 and
apply the logic in this pr to mv, cp, rv will require a larger change, I
need to think how to achieve the bahavior
And sorry @bobhy for reverting some of your changes.
This pr is going to unify glob behavior on the given commands:
* open
* rm
* cp-old
* mv
* umv
* cp
* du
So they have the same behavior to `ls`, which is:
If given parameter is quoted by single quote(`'`) or double quote(`"`),
don't auto-expand the glob pattern. If not quoted, auto-expand the glob
pattern.
Fixes: #9558 Fixes: #10211 Fixes: #9310 Fixes: #10364
# TODO
But there is one thing remains: if we give a variable to the command, it
will always auto-expand the glob pattern, e.g:
```nushell
let path = "a[123]b"
rm $path
```
I don't think it's expected. But I also think user might want to
auto-expand the glob pattern in variables.
So I'll introduce a new command called `glob escape`, then if user
doesn't want to auto-expand the glob pattern, he can just do this: `rm
($path | glob escape)`
# User-Facing Changes
<!-- List of all changes that impact the user experience here. This
helps us keep track of breaking changes. -->
# Tests + Formatting
Done
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
## NOTE
This pr changes the semantic of `GlobPattern`, before this pr, it will
`expand path` after evaluated, this makes `nu_engine::glob_from` have no
chance to glob things right if a path contains glob pattern.
e.g: [#9310
](https://github.com/nushell/nushell/issues/9310#issuecomment-1886824030)
#10211
I think changing the semantic is fine, because it makes glob works if
path contains something like '*'.
It maybe a breaking change if a custom command's argument are annotated
by `: glob`.
2024-01-26 14:57:35 +01:00
|
|
|
| SyntaxShape::GlobPattern => {}
|
2021-09-02 10:25:22 +02:00
|
|
|
_ => {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("non-[] value", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression::garbage(span);
|
2021-07-08 23:45:56 +02:00
|
|
|
}
|
2022-02-24 13:58:53 +01:00
|
|
|
},
|
|
|
|
_ => {}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-01 03:31:02 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
match shape {
|
2023-09-29 19:22:58 +02:00
|
|
|
SyntaxShape::CompleterWrapper(shape, custom_completion) => {
|
2023-04-07 20:09:38 +02:00
|
|
|
let mut expression = parse_value(working_set, span, shape);
|
2022-03-10 08:49:02 +01:00
|
|
|
expression.custom_completion = Some(*custom_completion);
|
2023-04-07 02:35:45 +02:00
|
|
|
expression
|
2021-09-14 06:59:46 +02:00
|
|
|
}
|
2023-04-11 19:21:31 +02:00
|
|
|
SyntaxShape::Number => parse_number(working_set, span),
|
2023-09-13 23:53:55 +02:00
|
|
|
SyntaxShape::Float => parse_float(working_set, span),
|
2023-04-11 19:21:31 +02:00
|
|
|
SyntaxShape::Int => parse_int(working_set, span),
|
|
|
|
SyntaxShape::Duration => parse_duration(working_set, span),
|
|
|
|
SyntaxShape::DateTime => parse_datetime(working_set, span),
|
|
|
|
SyntaxShape::Filesize => parse_filesize(working_set, span),
|
2023-04-07 20:09:38 +02:00
|
|
|
SyntaxShape::Range => parse_range(working_set, span),
|
2021-10-04 21:21:31 +02:00
|
|
|
SyntaxShape::Filepath => parse_filepath(working_set, span),
|
2022-04-22 22:18:51 +02:00
|
|
|
SyntaxShape::Directory => parse_directory(working_set, span),
|
2021-10-04 21:21:31 +02:00
|
|
|
SyntaxShape::GlobPattern => parse_glob_pattern(working_set, span),
|
2023-04-07 20:09:38 +02:00
|
|
|
SyntaxShape::String => parse_string(working_set, span),
|
2022-03-01 00:31:53 +01:00
|
|
|
SyntaxShape::Binary => parse_binary(working_set, span),
|
2021-09-02 10:25:22 +02:00
|
|
|
SyntaxShape::Signature => {
|
|
|
|
if bytes.starts_with(b"[") {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_signature(working_set, span)
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("signature", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
Expression::garbage(span)
|
2021-07-09 08:23:20 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
SyntaxShape::List(elem) => {
|
|
|
|
if bytes.starts_with(b"[") {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_list_expression(working_set, span, elem)
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("list", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
Expression::garbage(span)
|
2021-07-08 09:49:17 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2023-07-07 11:06:09 +02:00
|
|
|
SyntaxShape::Table(_) => {
|
2021-09-02 10:25:22 +02:00
|
|
|
if bytes.starts_with(b"[") {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_table_expression(working_set, span)
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("table", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
Expression::garbage(span)
|
2021-07-02 08:44:37 +02:00
|
|
|
}
|
2021-07-01 02:01:04 +02:00
|
|
|
}
|
2023-04-07 20:09:38 +02:00
|
|
|
SyntaxShape::CellPath => parse_simple_cell_path(working_set, span),
|
2021-10-12 06:49:17 +02:00
|
|
|
SyntaxShape::Boolean => {
|
|
|
|
// Redundant, though we catch bad boolean parses here
|
2022-03-03 01:55:03 +01:00
|
|
|
if bytes == b"true" || bytes == b"false" {
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Bool(true),
|
|
|
|
span,
|
|
|
|
ty: Type::Bool,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-10-12 06:49:17 +02:00
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("bool", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
Expression::garbage(span)
|
2021-10-12 06:49:17 +02:00
|
|
|
}
|
|
|
|
}
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
|
|
|
|
// Be sure to return ParseError::Expected(..) if invoked for one of these shapes, but lex
|
|
|
|
// stream doesn't start with '{'} -- parsing in SyntaxShape::Any arm depends on this error variant.
|
allow records to have type annotations (#8914)
# Description
follow up to #8529
cleaned up version of #8892
- the original syntax is okay
```nu
def okay [rec: record] {}
```
- you can now add type annotations for fields if you know
them before hand
```nu
def okay [rec: record<name: string>] {}
```
- you can specify multiple fields
```nu
def okay [person: record<name: string age: int>] {}
# an optional comma is allowed
def okay [person: record<name: string, age: int>] {}
```
- if annotations are specified, any use of the command will be type
checked against the specified type
```nu
def unwrap [result: record<ok: bool, value: any>] {}
unwrap {ok: 2, value: "value"}
# errors with
Error: nu::parser::type_mismatch
× Type mismatch.
╭─[entry #4:1:1]
1 │ unwrap {ok: 2, value: "value"}
· ───────┬─────
· ╰── expected record<ok: bool, value: any>, found record<ok: int, value: string>
╰────
```
> here the error is in the `ok` field, since `any` is coerced into any
type
> as a result `unwrap {ok: true, value: "value"}` is okay
- the key must be a string, either quoted or unquoted
```nu
def err [rec: record<{}: list>] {}
# errors with
Error:
× `record` type annotations key not string
╭─[entry #7:1:1]
1 │ def unwrap [result: record<{}: bool, value: any>] {}
· ─┬
· ╰── must be a string
╰────
```
- a key doesn't have to have a type in which case it is assumed to be
`any`
```nu
def okay [person: record<name age>] {}
def okay [person: record<name: string age>] {}
```
- however, if you put a colon, you have to specify a type
```nu
def err [person: record<name: >] {}
# errors with
Error: nu::parser::parse_mismatch
× Parse mismatch during operation.
╭─[entry #12:1:1]
1 │ def unwrap [res: record<name: >] { $res }
· ┬
· ╰── expected type after colon
╰────
```
# User-Facing Changes
**[BREAKING CHANGES]**
- this change adds a field to `SyntaxShape::Record` so any plugins that
used it will have to update and include the field. though if you are
unsure of the type the record expects, `SyntaxShape::Record(vec![])`
will suffice
2023-04-26 15:16:55 +02:00
|
|
|
SyntaxShape::Block | SyntaxShape::Closure(..) | SyntaxShape::Record(_) => {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("block, closure or record", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
Expression::garbage(span)
|
|
|
|
}
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
SyntaxShape::Any => {
|
2021-09-07 05:56:30 +02:00
|
|
|
if bytes.starts_with(b"[") {
|
2021-11-08 00:18:00 +01:00
|
|
|
//parse_value(working_set, span, &SyntaxShape::Table)
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_full_cell_path(working_set, None, span)
|
2021-09-07 05:56:30 +02:00
|
|
|
} else {
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
let shapes = [
|
2023-04-11 19:21:31 +02:00
|
|
|
SyntaxShape::Binary,
|
|
|
|
SyntaxShape::Filesize,
|
|
|
|
SyntaxShape::Duration,
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
SyntaxShape::Range,
|
2023-05-24 21:53:57 +02:00
|
|
|
SyntaxShape::DateTime,
|
2023-04-11 19:21:31 +02:00
|
|
|
SyntaxShape::Int,
|
|
|
|
SyntaxShape::Number,
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
SyntaxShape::String,
|
|
|
|
];
|
2021-09-07 05:56:30 +02:00
|
|
|
for shape in shapes.iter() {
|
2023-04-07 02:35:45 +02:00
|
|
|
let starting_error_count = working_set.parse_errors.len();
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let s = parse_value(working_set, span, shape);
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
if starting_error_count == working_set.parse_errors.len() {
|
|
|
|
return s;
|
|
|
|
} else {
|
2023-04-07 21:01:12 +02:00
|
|
|
match working_set.parse_errors.get(starting_error_count) {
|
2023-05-24 21:53:57 +02:00
|
|
|
Some(
|
|
|
|
ParseError::Expected(_, _)
|
|
|
|
| ParseError::ExpectedWithStringMsg(_, _),
|
|
|
|
) => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
return s;
|
|
|
|
}
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
}
|
2021-09-07 05:56:30 +02:00
|
|
|
}
|
|
|
|
}
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("any shape", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
garbage(span)
|
2021-07-02 08:44:37 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
x => {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::ExpectedWithStringMsg(
|
|
|
|
x.to_type().to_string(),
|
|
|
|
span,
|
|
|
|
));
|
2023-04-07 02:35:45 +02:00
|
|
|
garbage(span)
|
|
|
|
}
|
2021-07-02 08:44:37 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_operator(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2021-09-02 10:25:22 +02:00
|
|
|
let contents = working_set.get_span_contents(span);
|
|
|
|
|
|
|
|
let operator = match contents {
|
2022-11-11 07:51:08 +01:00
|
|
|
b"=" => Operator::Assignment(Assignment::Assign),
|
2022-11-11 19:50:43 +01:00
|
|
|
b"+=" => Operator::Assignment(Assignment::PlusAssign),
|
2022-12-09 17:20:58 +01:00
|
|
|
b"++=" => Operator::Assignment(Assignment::AppendAssign),
|
2022-11-11 19:50:43 +01:00
|
|
|
b"-=" => Operator::Assignment(Assignment::MinusAssign),
|
|
|
|
b"*=" => Operator::Assignment(Assignment::MultiplyAssign),
|
|
|
|
b"/=" => Operator::Assignment(Assignment::DivideAssign),
|
2022-11-11 07:51:08 +01:00
|
|
|
b"==" => Operator::Comparison(Comparison::Equal),
|
|
|
|
b"!=" => Operator::Comparison(Comparison::NotEqual),
|
|
|
|
b"<" => Operator::Comparison(Comparison::LessThan),
|
|
|
|
b"<=" => Operator::Comparison(Comparison::LessThanOrEqual),
|
|
|
|
b">" => Operator::Comparison(Comparison::GreaterThan),
|
|
|
|
b">=" => Operator::Comparison(Comparison::GreaterThanOrEqual),
|
|
|
|
b"=~" => Operator::Comparison(Comparison::RegexMatch),
|
|
|
|
b"!~" => Operator::Comparison(Comparison::NotRegexMatch),
|
|
|
|
b"+" => Operator::Math(Math::Plus),
|
|
|
|
b"++" => Operator::Math(Math::Append),
|
|
|
|
b"-" => Operator::Math(Math::Minus),
|
|
|
|
b"*" => Operator::Math(Math::Multiply),
|
|
|
|
b"/" => Operator::Math(Math::Divide),
|
|
|
|
b"//" => Operator::Math(Math::FloorDivision),
|
|
|
|
b"in" => Operator::Comparison(Comparison::In),
|
|
|
|
b"not-in" => Operator::Comparison(Comparison::NotIn),
|
|
|
|
b"mod" => Operator::Math(Math::Modulo),
|
|
|
|
b"bit-or" => Operator::Bits(Bits::BitOr),
|
|
|
|
b"bit-xor" => Operator::Bits(Bits::BitXor),
|
|
|
|
b"bit-and" => Operator::Bits(Bits::BitAnd),
|
|
|
|
b"bit-shl" => Operator::Bits(Bits::ShiftLeft),
|
|
|
|
b"bit-shr" => Operator::Bits(Bits::ShiftRight),
|
|
|
|
b"starts-with" => Operator::Comparison(Comparison::StartsWith),
|
|
|
|
b"ends-with" => Operator::Comparison(Comparison::EndsWith),
|
2022-12-08 00:02:11 +01:00
|
|
|
b"and" => Operator::Boolean(Boolean::And),
|
|
|
|
b"or" => Operator::Boolean(Boolean::Or),
|
2022-11-26 17:02:37 +01:00
|
|
|
b"xor" => Operator::Boolean(Boolean::Xor),
|
2022-11-11 07:51:08 +01:00
|
|
|
b"**" => Operator::Math(Math::Pow),
|
2022-11-26 22:59:43 +01:00
|
|
|
// WARNING: not actual operators below! Error handling only
|
|
|
|
pow @ (b"^" | b"pow") => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownOperator(
|
|
|
|
match pow {
|
|
|
|
b"^" => "^",
|
|
|
|
b"pow" => "pow",
|
|
|
|
_ => unreachable!(),
|
|
|
|
},
|
|
|
|
"Use '**' for exponentiation or 'bit-xor' for bitwise XOR.",
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2022-11-26 22:59:43 +01:00
|
|
|
}
|
|
|
|
equality @ (b"is" | b"===") => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownOperator(
|
|
|
|
match equality {
|
|
|
|
b"is" => "is",
|
|
|
|
b"===" => "===",
|
|
|
|
_ => unreachable!(),
|
|
|
|
},
|
|
|
|
"Did you mean '=='?",
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2022-11-26 22:59:43 +01:00
|
|
|
}
|
|
|
|
b"contains" => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownOperator(
|
|
|
|
"contains",
|
|
|
|
"Did you mean '$string =~ $pattern' or '$element in $container'?",
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2022-11-26 22:59:43 +01:00
|
|
|
}
|
|
|
|
b"%" => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownOperator(
|
|
|
|
"%",
|
|
|
|
"Did you mean 'mod'?",
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2022-11-26 22:59:43 +01:00
|
|
|
}
|
2022-12-01 11:34:41 +01:00
|
|
|
b"&" => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownOperator(
|
|
|
|
"&",
|
|
|
|
"Did you mean 'bit-and'?",
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2022-12-01 11:34:41 +01:00
|
|
|
}
|
|
|
|
b"<<" => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownOperator(
|
|
|
|
"<<",
|
|
|
|
"Did you mean 'bit-shl'?",
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2022-12-01 11:34:41 +01:00
|
|
|
}
|
|
|
|
b">>" => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownOperator(
|
|
|
|
">>",
|
|
|
|
"Did you mean 'bit-shr'?",
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2022-12-01 11:34:41 +01:00
|
|
|
}
|
|
|
|
bits @ (b"bits-and" | b"bits-xor" | b"bits-or" | b"bits-shl" | b"bits-shr") => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownOperator(
|
|
|
|
match bits {
|
|
|
|
b"bits-and" => "bits-and",
|
|
|
|
b"bits-xor" => "bits-xor",
|
|
|
|
b"bits-or" => "bits-or",
|
|
|
|
b"bits-shl" => "bits-shl",
|
|
|
|
b"bits-shr" => "bits-shr",
|
|
|
|
_ => unreachable!(),
|
|
|
|
},
|
|
|
|
match bits {
|
|
|
|
b"bits-and" => "Did you mean 'bit-and'?",
|
|
|
|
b"bits-xor" => "Did you mean 'bit-xor'?",
|
|
|
|
b"bits-or" => "Did you mean 'bit-or'?",
|
|
|
|
b"bits-shl" => "Did you mean 'bit-shl'?",
|
|
|
|
b"bits-shr" => "Did you mean 'bit-shr'?",
|
|
|
|
_ => unreachable!(),
|
|
|
|
},
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2022-12-01 11:34:41 +01:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
_ => {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("operator", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
return garbage(span);
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
};
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Operator(operator),
|
|
|
|
span,
|
|
|
|
ty: Type::Any,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
pub fn parse_math_expression(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
spans: &[Span],
|
2021-09-09 23:47:20 +02:00
|
|
|
lhs_row_var_id: Option<VarId>,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2022-12-10 18:23:24 +01:00
|
|
|
trace!("parsing: math expression");
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
// As the expr_stack grows, we increase the required precedence to grow larger
|
|
|
|
// If, at any time, the operator we're looking at is the same or lower precedence
|
|
|
|
// of what is in the expression stack, we collapse the expression stack.
|
|
|
|
//
|
|
|
|
// This leads to an expression stack that grows under increasing precedence and collapses
|
|
|
|
// under decreasing/sustained precedence
|
|
|
|
//
|
|
|
|
// The end result is a stack that we can fold into binary operations as right associations
|
|
|
|
// safely.
|
|
|
|
|
|
|
|
let mut expr_stack: Vec<Expression> = vec![];
|
|
|
|
|
|
|
|
let mut idx = 0;
|
|
|
|
let mut last_prec = 1000000;
|
|
|
|
|
2022-04-06 21:10:25 +02:00
|
|
|
let first_span = working_set.get_span_contents(spans[0]);
|
|
|
|
|
2024-01-29 20:42:27 +01:00
|
|
|
let mut not_start_spans = vec![];
|
|
|
|
|
2023-03-24 02:52:01 +01:00
|
|
|
if first_span == b"if" || first_span == b"match" {
|
2023-03-22 21:14:10 +01:00
|
|
|
// If expression
|
|
|
|
if spans.len() > 1 {
|
2023-04-07 20:09:38 +02:00
|
|
|
return parse_call(working_set, spans, spans[0], false);
|
2023-03-22 21:14:10 +01:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
2023-05-24 21:53:57 +02:00
|
|
|
"expression",
|
2023-04-07 02:35:45 +02:00
|
|
|
Span::new(spans[0].end, spans[0].end),
|
|
|
|
));
|
|
|
|
return garbage(spans[0]);
|
2023-03-22 21:14:10 +01:00
|
|
|
}
|
|
|
|
} else if first_span == b"not" {
|
2024-01-29 20:42:27 +01:00
|
|
|
not_start_spans.push(spans[idx].start);
|
|
|
|
idx += 1;
|
|
|
|
while idx < spans.len() {
|
|
|
|
let next_value = working_set.get_span_contents(spans[idx]);
|
|
|
|
|
|
|
|
if next_value == b"not" {
|
|
|
|
not_start_spans.push(spans[idx].start);
|
|
|
|
idx += 1;
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if idx == spans.len() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
2023-05-24 21:53:57 +02:00
|
|
|
"expression",
|
2024-01-29 20:42:27 +01:00
|
|
|
Span::new(spans[idx - 1].end, spans[idx - 1].end),
|
2023-04-07 02:35:45 +02:00
|
|
|
));
|
2024-01-29 20:42:27 +01:00
|
|
|
return garbage(spans[idx - 1]);
|
2022-04-06 21:10:25 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-29 20:42:27 +01:00
|
|
|
let mut lhs = parse_value(working_set, spans[idx], &SyntaxShape::Any);
|
|
|
|
|
|
|
|
for not_start_span in not_start_spans.iter().rev() {
|
|
|
|
lhs = Expression {
|
|
|
|
expr: Expr::UnaryNot(Box::new(lhs)),
|
|
|
|
span: Span::new(*not_start_span, spans[idx].end),
|
|
|
|
ty: Type::Bool,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
not_start_spans.clear();
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
idx += 1;
|
|
|
|
|
2022-04-03 00:41:36 +02:00
|
|
|
if idx >= spans.len() {
|
|
|
|
// We already found the one part of our expression, so let's expand
|
|
|
|
if let Some(row_var_id) = lhs_row_var_id {
|
2023-04-07 20:09:38 +02:00
|
|
|
expand_to_cell_path(working_set, &mut lhs, row_var_id);
|
2022-04-03 00:41:36 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
expr_stack.push(lhs);
|
|
|
|
|
|
|
|
while idx < spans.len() {
|
2023-04-07 02:35:45 +02:00
|
|
|
let op = parse_operator(working_set, spans[idx]);
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let op_prec = op.precedence();
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
idx += 1;
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if idx == spans.len() {
|
|
|
|
// Handle broken math expr `1 +` etc
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::IncompleteMathExpression(spans[idx - 1]));
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
expr_stack.push(Expression::garbage(spans[idx - 1]));
|
|
|
|
expr_stack.push(Expression::garbage(spans[idx - 1]));
|
2021-07-22 21:50:59 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
break;
|
|
|
|
}
|
2021-07-22 21:50:59 +02:00
|
|
|
|
2023-07-13 10:55:41 +02:00
|
|
|
let content = working_set.get_span_contents(spans[idx]);
|
|
|
|
// allow `if` to be a special value for assignment.
|
2024-01-29 20:42:27 +01:00
|
|
|
|
2023-07-13 10:55:41 +02:00
|
|
|
if content == b"if" || content == b"match" {
|
|
|
|
let rhs = parse_call(working_set, &spans[idx..], spans[0], false);
|
|
|
|
expr_stack.push(op);
|
|
|
|
expr_stack.push(rhs);
|
|
|
|
break;
|
2024-01-29 20:42:27 +01:00
|
|
|
} else if content == b"not" {
|
|
|
|
not_start_spans.push(spans[idx].start);
|
|
|
|
idx += 1;
|
|
|
|
while idx < spans.len() {
|
|
|
|
let next_value = working_set.get_span_contents(spans[idx]);
|
|
|
|
|
|
|
|
if next_value == b"not" {
|
|
|
|
not_start_spans.push(spans[idx].start);
|
|
|
|
idx += 1;
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if idx == spans.len() {
|
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"expression",
|
|
|
|
Span::new(spans[idx - 1].end, spans[idx - 1].end),
|
|
|
|
));
|
|
|
|
return garbage(spans[idx - 1]);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
let mut rhs = parse_value(working_set, spans[idx], &SyntaxShape::Any);
|
|
|
|
|
|
|
|
for not_start_span in not_start_spans.iter().rev() {
|
|
|
|
rhs = Expression {
|
|
|
|
expr: Expr::UnaryNot(Box::new(rhs)),
|
|
|
|
span: Span::new(*not_start_span, spans[idx].end),
|
|
|
|
ty: Type::Bool,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2023-07-13 10:55:41 +02:00
|
|
|
}
|
2024-01-29 20:42:27 +01:00
|
|
|
not_start_spans.clear();
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2022-03-25 04:23:08 +01:00
|
|
|
while op_prec <= last_prec && expr_stack.len() > 1 {
|
2021-11-06 08:31:28 +01:00
|
|
|
// Collapse the right associated operations first
|
|
|
|
// so that we can get back to a stack with a lower precedence
|
|
|
|
let mut rhs = expr_stack
|
|
|
|
.pop()
|
|
|
|
.expect("internal error: expression stack empty");
|
|
|
|
let mut op = expr_stack
|
|
|
|
.pop()
|
|
|
|
.expect("internal error: expression stack empty");
|
|
|
|
|
2022-03-25 04:23:08 +01:00
|
|
|
last_prec = op.precedence();
|
|
|
|
|
|
|
|
if last_prec < op_prec {
|
|
|
|
expr_stack.push(op);
|
|
|
|
expr_stack.push(rhs);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2021-11-06 08:31:28 +01:00
|
|
|
let mut lhs = expr_stack
|
|
|
|
.pop()
|
|
|
|
.expect("internal error: expression stack empty");
|
|
|
|
|
|
|
|
if let Some(row_var_id) = lhs_row_var_id {
|
2023-04-07 20:09:38 +02:00
|
|
|
expand_to_cell_path(working_set, &mut lhs, row_var_id);
|
2021-11-06 08:31:28 +01:00
|
|
|
}
|
2021-09-09 23:47:20 +02:00
|
|
|
|
2021-11-06 08:31:28 +01:00
|
|
|
let (result_ty, err) = math_result_type(working_set, &mut lhs, &mut op, &mut rhs);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2021-11-06 08:31:28 +01:00
|
|
|
let op_span = span(&[lhs.span, rhs.span]);
|
|
|
|
expr_stack.push(Expression {
|
|
|
|
expr: Expr::BinaryOp(Box::new(lhs), Box::new(op), Box::new(rhs)),
|
|
|
|
span: op_span,
|
|
|
|
ty: result_ty,
|
|
|
|
custom_completion: None,
|
|
|
|
});
|
2021-07-02 08:44:37 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
expr_stack.push(op);
|
|
|
|
expr_stack.push(rhs);
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
last_prec = op_prec;
|
2021-07-23 23:19:30 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
idx += 1;
|
|
|
|
}
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
while expr_stack.len() != 1 {
|
|
|
|
let mut rhs = expr_stack
|
|
|
|
.pop()
|
|
|
|
.expect("internal error: expression stack empty");
|
|
|
|
let mut op = expr_stack
|
2021-07-02 08:44:37 +02:00
|
|
|
.pop()
|
|
|
|
.expect("internal error: expression stack empty");
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut lhs = expr_stack
|
|
|
|
.pop()
|
|
|
|
.expect("internal error: expression stack empty");
|
|
|
|
|
2021-09-09 23:47:20 +02:00
|
|
|
if let Some(row_var_id) = lhs_row_var_id {
|
2023-04-07 20:09:38 +02:00
|
|
|
expand_to_cell_path(working_set, &mut lhs, row_var_id);
|
2021-09-09 23:47:20 +02:00
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let (result_ty, err) = math_result_type(working_set, &mut lhs, &mut op, &mut rhs);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err)
|
|
|
|
}
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let binary_op_span = span(&[lhs.span, rhs.span]);
|
|
|
|
expr_stack.push(Expression {
|
|
|
|
expr: Expr::BinaryOp(Box::new(lhs), Box::new(op), Box::new(rhs)),
|
|
|
|
span: binary_op_span,
|
|
|
|
ty: result_ty,
|
2021-09-14 06:59:46 +02:00
|
|
|
custom_completion: None,
|
2021-09-02 10:25:22 +02:00
|
|
|
});
|
2021-07-01 02:01:04 +02:00
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
expr_stack
|
2021-09-02 10:25:22 +02:00
|
|
|
.pop()
|
2023-04-07 02:35:45 +02:00
|
|
|
.expect("internal error: expression stack empty")
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn parse_expression(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
spans: &[Span],
|
Make external command substitution works friendly(like fish shell, trailing ending newlines) (#7156)
# Description
As title, when execute external sub command, auto-trimming end
new-lines, like how fish shell does.
And if the command is executed directly like: `cat tmp`, the result
won't change.
Fixes: #6816
Fixes: #3980
Note that although nushell works correctly by directly replace output of
external command to variable(or other places like string interpolation),
it's not friendly to user, and users almost want to use `str trim` to
trim trailing newline, I think that's why fish shell do this
automatically.
If the pr is ok, as a result, no more `str trim -r` is required when
user is writing scripts which using external commands.
# User-Facing Changes
Before:
<img width="523" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468810-86b04dbb-c147-459a-96a5-e0095eeaab3d.png">
After:
<img width="505" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468599-7b537488-3d6b-458e-9d75-d85780826db0.png">
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace --features=extra -- -D warnings -D
clippy::unwrap_used -A clippy::needless_collect` to check that you're
using the standard code style
- `cargo test --workspace --features=extra` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2022-11-23 04:51:57 +01:00
|
|
|
is_subexpression: bool,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2022-12-10 18:23:24 +01:00
|
|
|
trace!("parsing: expression");
|
|
|
|
|
2021-11-04 03:32:35 +01:00
|
|
|
let mut pos = 0;
|
|
|
|
let mut shorthand = vec![];
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2021-11-04 03:32:35 +01:00
|
|
|
while pos < spans.len() {
|
|
|
|
// Check if there is any environment shorthand
|
|
|
|
let name = working_set.get_span_contents(spans[pos]);
|
2022-03-03 01:55:03 +01:00
|
|
|
|
2022-03-29 22:56:55 +02:00
|
|
|
let split = name.splitn(2, |x| *x == b'=');
|
2021-11-04 03:32:35 +01:00
|
|
|
let split: Vec<_> = split.collect();
|
2023-04-07 13:40:05 +02:00
|
|
|
if !name.starts_with(b"^")
|
|
|
|
&& split.len() == 2
|
|
|
|
&& !split[0].is_empty()
|
|
|
|
&& !split[0].ends_with(b"..")
|
|
|
|
// was range op ..=
|
|
|
|
{
|
2021-11-04 03:32:35 +01:00
|
|
|
let point = split[0].len() + 1;
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
let starting_error_count = working_set.parse_errors.len();
|
|
|
|
|
2021-11-04 03:32:35 +01:00
|
|
|
let lhs = parse_string_strict(
|
|
|
|
working_set,
|
2022-12-03 10:44:12 +01:00
|
|
|
Span::new(spans[pos].start, spans[pos].start + point - 1),
|
2021-11-04 03:32:35 +01:00
|
|
|
);
|
|
|
|
let rhs = if spans[pos].start + point < spans[pos].end {
|
2022-12-03 10:44:12 +01:00
|
|
|
let rhs_span = Span::new(spans[pos].start + point, spans[pos].end);
|
2022-05-07 13:21:29 +02:00
|
|
|
|
|
|
|
if working_set.get_span_contents(rhs_span).starts_with(b"$") {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_dollar_expr(working_set, rhs_span)
|
2022-05-07 13:21:29 +02:00
|
|
|
} else {
|
|
|
|
parse_string_strict(working_set, rhs_span)
|
|
|
|
}
|
2021-11-04 03:32:35 +01:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::String(String::new()),
|
|
|
|
span: Span::unknown(),
|
|
|
|
ty: Type::Nothing,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-11-04 03:32:35 +01:00
|
|
|
};
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
if starting_error_count == working_set.parse_errors.len() {
|
|
|
|
shorthand.push((lhs, rhs));
|
2021-11-04 03:32:35 +01:00
|
|
|
pos += 1;
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2021-11-04 03:32:35 +01:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if pos == spans.len() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownCommand(spans[0]));
|
|
|
|
return garbage(span(spans));
|
2021-11-04 03:32:35 +01:00
|
|
|
}
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let output = if is_math_expression_like(working_set, spans[pos]) {
|
|
|
|
parse_math_expression(working_set, &spans[pos..], None)
|
2021-11-04 03:32:35 +01:00
|
|
|
} else {
|
2023-02-22 13:14:20 +01:00
|
|
|
let bytes = working_set.get_span_contents(spans[pos]).to_vec();
|
2022-04-07 04:01:31 +02:00
|
|
|
|
2022-01-15 16:26:52 +01:00
|
|
|
// For now, check for special parses of certain keywords
|
2023-02-22 13:14:20 +01:00
|
|
|
match bytes.as_slice() {
|
2023-11-16 23:44:28 +01:00
|
|
|
b"def" | b"extern" | b"for" | b"module" | b"use" | b"source" | b"alias" | b"export"
|
|
|
|
| b"hide" => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::BuiltinCommandInPipeline(
|
2023-02-22 13:14:20 +01:00
|
|
|
String::from_utf8(bytes)
|
|
|
|
.expect("builtin commands bytes should be able to convert to string"),
|
2022-02-15 20:31:14 +01:00
|
|
|
spans[0],
|
2023-04-07 02:35:45 +02:00
|
|
|
));
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_call(working_set, &spans[pos..], spans[0], is_subexpression)
|
2023-04-07 02:35:45 +02:00
|
|
|
}
|
|
|
|
b"let" | b"const" | b"mut" => {
|
|
|
|
working_set.error(ParseError::AssignInPipeline(
|
2023-02-22 13:14:20 +01:00
|
|
|
String::from_utf8(bytes)
|
|
|
|
.expect("builtin commands bytes should be able to convert to string"),
|
2022-05-29 22:16:41 +02:00
|
|
|
String::from_utf8_lossy(match spans.len() {
|
2023-07-12 00:00:31 +02:00
|
|
|
1..=3 => b"value",
|
2023-01-20 00:11:48 +01:00
|
|
|
_ => working_set.get_span_contents(spans[3]),
|
|
|
|
})
|
|
|
|
.to_string(),
|
|
|
|
String::from_utf8_lossy(match spans.len() {
|
|
|
|
1 => b"variable",
|
|
|
|
_ => working_set.get_span_contents(spans[1]),
|
|
|
|
})
|
|
|
|
.to_string(),
|
|
|
|
spans[0],
|
2023-04-07 02:35:45 +02:00
|
|
|
));
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_call(working_set, &spans[pos..], spans[0], is_subexpression)
|
2023-04-07 02:35:45 +02:00
|
|
|
}
|
2022-05-07 21:39:22 +02:00
|
|
|
b"overlay" => {
|
|
|
|
if spans.len() > 1 && working_set.get_span_contents(spans[1]) == b"list" {
|
|
|
|
// whitelist 'overlay list'
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_call(working_set, &spans[pos..], spans[0], is_subexpression)
|
2022-05-07 21:39:22 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::BuiltinCommandInPipeline(
|
|
|
|
"overlay".into(),
|
|
|
|
spans[0],
|
|
|
|
));
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_call(working_set, &spans[pos..], spans[0], is_subexpression)
|
2022-05-07 21:39:22 +02:00
|
|
|
}
|
|
|
|
}
|
2023-04-07 20:09:38 +02:00
|
|
|
b"where" => parse_where_expr(working_set, &spans[pos..]),
|
2022-01-15 16:26:52 +01:00
|
|
|
#[cfg(feature = "plugin")]
|
2023-04-07 02:35:45 +02:00
|
|
|
b"register" => {
|
|
|
|
working_set.error(ParseError::BuiltinCommandInPipeline(
|
|
|
|
"plugin".into(),
|
|
|
|
spans[0],
|
|
|
|
));
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_call(working_set, &spans[pos..], spans[0], is_subexpression)
|
2023-04-07 02:35:45 +02:00
|
|
|
}
|
2022-01-15 16:26:52 +01:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
_ => parse_call(working_set, &spans[pos..], spans[0], is_subexpression),
|
2022-01-15 16:26:52 +01:00
|
|
|
}
|
2021-11-04 03:32:35 +01:00
|
|
|
};
|
|
|
|
|
2023-07-13 21:05:03 +02:00
|
|
|
let with_env = working_set.find_decl(b"with-env");
|
2021-11-04 03:32:35 +01:00
|
|
|
|
|
|
|
if !shorthand.is_empty() {
|
|
|
|
if let Some(decl_id) = with_env {
|
|
|
|
let mut block = Block::default();
|
|
|
|
let ty = output.ty.clone();
|
2022-11-18 22:46:48 +01:00
|
|
|
block.pipelines = vec![Pipeline::from_vec(vec![output])];
|
2021-11-04 03:32:35 +01:00
|
|
|
|
|
|
|
let block_id = working_set.add_block(block);
|
|
|
|
|
|
|
|
let mut env_vars = vec![];
|
|
|
|
for sh in shorthand {
|
|
|
|
env_vars.push(sh.0);
|
|
|
|
env_vars.push(sh.1);
|
|
|
|
}
|
|
|
|
|
2022-04-09 04:55:02 +02:00
|
|
|
let arguments = vec![
|
|
|
|
Argument::Positional(Expression {
|
2021-11-04 03:32:35 +01:00
|
|
|
expr: Expr::List(env_vars),
|
|
|
|
span: span(&spans[..pos]),
|
2022-04-07 06:34:09 +02:00
|
|
|
ty: Type::Any,
|
2021-11-04 03:32:35 +01:00
|
|
|
custom_completion: None,
|
2022-04-09 04:55:02 +02:00
|
|
|
}),
|
|
|
|
Argument::Positional(Expression {
|
2022-11-10 09:21:49 +01:00
|
|
|
expr: Expr::Closure(block_id),
|
2021-11-04 03:32:35 +01:00
|
|
|
span: span(&spans[pos..]),
|
2022-11-10 09:21:49 +01:00
|
|
|
ty: Type::Closure,
|
2021-11-04 03:32:35 +01:00
|
|
|
custom_completion: None,
|
2022-04-09 04:55:02 +02:00
|
|
|
}),
|
2021-11-04 03:32:35 +01:00
|
|
|
];
|
|
|
|
|
2022-02-21 18:58:04 +01:00
|
|
|
let expr = Expr::Call(Box::new(Call {
|
2022-12-03 10:44:12 +01:00
|
|
|
head: Span::unknown(),
|
2022-02-21 18:58:04 +01:00
|
|
|
decl_id,
|
2022-04-09 04:55:02 +02:00
|
|
|
arguments,
|
2022-02-21 23:22:21 +01:00
|
|
|
redirect_stdout: true,
|
|
|
|
redirect_stderr: false,
|
2023-04-05 18:56:48 +02:00
|
|
|
parser_info: HashMap::new(),
|
2022-02-21 18:58:04 +01:00
|
|
|
}));
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr,
|
|
|
|
custom_completion: None,
|
|
|
|
span: span(spans),
|
|
|
|
ty,
|
|
|
|
}
|
2021-11-04 03:32:35 +01:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
output
|
2021-11-04 03:32:35 +01:00
|
|
|
}
|
2021-10-27 23:52:59 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
output
|
2021-07-01 02:01:04 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-01 02:01:04 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_variable(working_set: &mut StateWorkingSet, span: Span) -> Option<VarId> {
|
2021-09-02 10:25:22 +02:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
2021-07-01 02:01:04 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if is_variable(bytes) {
|
2023-07-13 21:05:03 +02:00
|
|
|
working_set.find_variable(bytes)
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("valid variable name", span));
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
None
|
2021-07-01 02:01:04 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-01 02:01:04 +02:00
|
|
|
|
2022-02-15 20:31:14 +01:00
|
|
|
pub fn parse_builtin_commands(
|
2021-09-27 14:10:18 +02:00
|
|
|
working_set: &mut StateWorkingSet,
|
2022-01-22 19:24:47 +01:00
|
|
|
lite_command: &LiteCommand,
|
Make external command substitution works friendly(like fish shell, trailing ending newlines) (#7156)
# Description
As title, when execute external sub command, auto-trimming end
new-lines, like how fish shell does.
And if the command is executed directly like: `cat tmp`, the result
won't change.
Fixes: #6816
Fixes: #3980
Note that although nushell works correctly by directly replace output of
external command to variable(or other places like string interpolation),
it's not friendly to user, and users almost want to use `str trim` to
trim trailing newline, I think that's why fish shell do this
automatically.
If the pr is ok, as a result, no more `str trim -r` is required when
user is writing scripts which using external commands.
# User-Facing Changes
Before:
<img width="523" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468810-86b04dbb-c147-459a-96a5-e0095eeaab3d.png">
After:
<img width="505" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468599-7b537488-3d6b-458e-9d75-d85780826db0.png">
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace --features=extra -- -D warnings -D
clippy::unwrap_used -A clippy::needless_collect` to check that you're
using the standard code style
- `cargo test --workspace --features=extra` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2022-11-23 04:51:57 +01:00
|
|
|
is_subexpression: bool,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Pipeline {
|
Improve type hovers (#9515)
# Description
This PR does a few things to help improve type hovers and, in the
process, fixes a few outstanding issues in the type system. Here's a
list of the changes:
* `for` now will try to infer the type of the iteration variable based
on the expression it's given. This fixes things like `for x in [1, 2, 3]
{ }` where `x` now properly gets the int type.
* Removed old input/output type fields from the signature, focuses on
the vec of signatures. Updated a bunch of dataframe commands that hadn't
moved over. This helps tie things together a bit better
* Fixed inference of types from subexpressions to use the last
expression in the block
* Fixed handling of explicit types in `let` and `mut` calls, so we now
respect that as the authoritative type
I also tried to add `def` input/output type inference, but unfortunately
we only know the predecl types universally, which means we won't have
enough information to properly know what the types of the custom
commands are.
# User-Facing Changes
Script typechecking will get tighter in some cases
Hovers should be more accurate in some cases that previously resorted to
any.
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect -A clippy::result_large_err` to check that
you're using the standard code style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-std/tests/run.nu` to run the tests for the
standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
---------
Co-authored-by: Darren Schroeder <343840+fdncred@users.noreply.github.com>
2023-06-28 19:19:48 +02:00
|
|
|
trace!("parsing: builtin commands");
|
2023-04-07 20:09:38 +02:00
|
|
|
if !is_math_expression_like(working_set, lite_command.parts[0])
|
2023-03-10 22:20:31 +01:00
|
|
|
&& !is_unaliasable_parser_keyword(working_set, &lite_command.parts)
|
|
|
|
{
|
Improve type hovers (#9515)
# Description
This PR does a few things to help improve type hovers and, in the
process, fixes a few outstanding issues in the type system. Here's a
list of the changes:
* `for` now will try to infer the type of the iteration variable based
on the expression it's given. This fixes things like `for x in [1, 2, 3]
{ }` where `x` now properly gets the int type.
* Removed old input/output type fields from the signature, focuses on
the vec of signatures. Updated a bunch of dataframe commands that hadn't
moved over. This helps tie things together a bit better
* Fixed inference of types from subexpressions to use the last
expression in the block
* Fixed handling of explicit types in `let` and `mut` calls, so we now
respect that as the authoritative type
I also tried to add `def` input/output type inference, but unfortunately
we only know the predecl types universally, which means we won't have
enough information to properly know what the types of the custom
commands are.
# User-Facing Changes
Script typechecking will get tighter in some cases
Hovers should be more accurate in some cases that previously resorted to
any.
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect -A clippy::result_large_err` to check that
you're using the standard code style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-std/tests/run.nu` to run the tests for the
standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
---------
Co-authored-by: Darren Schroeder <343840+fdncred@users.noreply.github.com>
2023-06-28 19:19:48 +02:00
|
|
|
trace!("parsing: not math expression or unaliasable parser keyword");
|
2023-03-10 22:20:31 +01:00
|
|
|
let name = working_set.get_span_contents(lite_command.parts[0]);
|
2023-07-13 21:05:03 +02:00
|
|
|
if let Some(decl_id) = working_set.find_decl(name) {
|
2023-03-10 22:20:31 +01:00
|
|
|
let cmd = working_set.get_decl(decl_id);
|
|
|
|
if cmd.is_alias() {
|
|
|
|
// Parse keywords that can be aliased. Note that we check for "unaliasable" keywords
|
|
|
|
// because alias can have any name, therefore, we can't check for "aliasable" keywords.
|
2023-04-07 02:35:45 +02:00
|
|
|
let call_expr = parse_call(
|
2023-03-10 22:20:31 +01:00
|
|
|
working_set,
|
|
|
|
&lite_command.parts,
|
|
|
|
lite_command.parts[0],
|
|
|
|
is_subexpression,
|
|
|
|
);
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Expression {
|
|
|
|
expr: Expr::Call(call),
|
|
|
|
..
|
|
|
|
} = call_expr
|
|
|
|
{
|
|
|
|
// Apply parse keyword side effects
|
|
|
|
let cmd = working_set.get_decl(call.decl_id);
|
|
|
|
match cmd.name() {
|
|
|
|
"overlay hide" => return parse_overlay_hide(working_set, call),
|
|
|
|
"overlay new" => return parse_overlay_new(working_set, call),
|
2023-04-07 20:09:38 +02:00
|
|
|
"overlay use" => return parse_overlay_use(working_set, call),
|
2023-04-07 02:35:45 +02:00
|
|
|
_ => { /* this alias is not a parser keyword */ }
|
2023-03-10 22:20:31 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
Improve type hovers (#9515)
# Description
This PR does a few things to help improve type hovers and, in the
process, fixes a few outstanding issues in the type system. Here's a
list of the changes:
* `for` now will try to infer the type of the iteration variable based
on the expression it's given. This fixes things like `for x in [1, 2, 3]
{ }` where `x` now properly gets the int type.
* Removed old input/output type fields from the signature, focuses on
the vec of signatures. Updated a bunch of dataframe commands that hadn't
moved over. This helps tie things together a bit better
* Fixed inference of types from subexpressions to use the last
expression in the block
* Fixed handling of explicit types in `let` and `mut` calls, so we now
respect that as the authoritative type
I also tried to add `def` input/output type inference, but unfortunately
we only know the predecl types universally, which means we won't have
enough information to properly know what the types of the custom
commands are.
# User-Facing Changes
Script typechecking will get tighter in some cases
Hovers should be more accurate in some cases that previously resorted to
any.
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect -A clippy::result_large_err` to check that
you're using the standard code style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-std/tests/run.nu` to run the tests for the
standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
---------
Co-authored-by: Darren Schroeder <343840+fdncred@users.noreply.github.com>
2023-06-28 19:19:48 +02:00
|
|
|
trace!("parsing: checking for keywords");
|
2022-01-22 19:24:47 +01:00
|
|
|
let name = working_set.get_span_contents(lite_command.parts[0]);
|
2021-09-27 14:10:18 +02:00
|
|
|
|
2021-09-13 21:59:11 +02:00
|
|
|
match name {
|
2023-11-19 16:25:09 +01:00
|
|
|
b"def" => parse_def(working_set, lite_command, None).0,
|
2023-11-16 23:44:28 +01:00
|
|
|
b"extern" => parse_extern(working_set, lite_command, None),
|
2023-07-03 07:45:10 +02:00
|
|
|
b"let" => parse_let(working_set, &lite_command.parts),
|
|
|
|
b"const" => parse_const(working_set, &lite_command.parts),
|
2023-04-07 20:09:38 +02:00
|
|
|
b"mut" => parse_mut(working_set, &lite_command.parts),
|
2022-01-15 16:26:52 +01:00
|
|
|
b"for" => {
|
2023-04-07 20:09:38 +02:00
|
|
|
let expr = parse_for(working_set, &lite_command.parts);
|
2023-04-07 02:35:45 +02:00
|
|
|
Pipeline::from_vec(vec![expr])
|
2022-01-15 16:26:52 +01:00
|
|
|
}
|
2023-04-07 20:09:38 +02:00
|
|
|
b"alias" => parse_alias(working_set, lite_command, None),
|
2023-05-06 22:55:10 +02:00
|
|
|
b"module" => parse_module(working_set, lite_command, None).0,
|
2022-07-29 10:57:10 +02:00
|
|
|
b"use" => {
|
2023-04-07 20:09:38 +02:00
|
|
|
let (pipeline, _) = parse_use(working_set, &lite_command.parts);
|
2023-04-07 02:35:45 +02:00
|
|
|
pipeline
|
2022-07-29 10:57:10 +02:00
|
|
|
}
|
2023-04-07 20:09:38 +02:00
|
|
|
b"overlay" => parse_keyword(working_set, lite_command, is_subexpression),
|
|
|
|
b"source" | b"source-env" => parse_source(working_set, &lite_command.parts),
|
|
|
|
b"export" => parse_export_in_block(working_set, lite_command),
|
|
|
|
b"hide" => parse_hide(working_set, &lite_command.parts),
|
|
|
|
b"where" => parse_where(working_set, &lite_command.parts),
|
2021-11-02 21:56:00 +01:00
|
|
|
#[cfg(feature = "plugin")]
|
2023-04-07 20:09:38 +02:00
|
|
|
b"register" => parse_register(working_set, &lite_command.parts),
|
2021-09-13 21:59:11 +02:00
|
|
|
_ => {
|
2023-04-07 20:09:38 +02:00
|
|
|
let expr = parse_expression(working_set, &lite_command.parts, is_subexpression);
|
2023-03-10 22:20:31 +01:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Pipeline::from_vec(vec![expr])
|
2021-09-27 14:10:18 +02:00
|
|
|
}
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-06-30 03:42:56 +02:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn parse_record(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2021-11-11 00:14:00 +01:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
|
|
|
|
|
|
|
let mut start = span.start;
|
|
|
|
let mut end = span.end;
|
|
|
|
|
|
|
|
if bytes.starts_with(b"{") {
|
|
|
|
start += 1;
|
|
|
|
} else {
|
2023-05-24 21:53:57 +02:00
|
|
|
working_set.error(ParseError::Expected("{", Span::new(start, start + 1)));
|
2023-04-07 02:35:45 +02:00
|
|
|
return garbage(span);
|
2021-11-11 00:14:00 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if bytes.ends_with(b"}") {
|
|
|
|
end -= 1;
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Unclosed("}".into(), Span::new(end, end)));
|
2021-11-11 00:14:00 +01:00
|
|
|
}
|
|
|
|
|
2022-12-03 10:44:12 +01:00
|
|
|
let inner_span = Span::new(start, end);
|
2022-01-03 06:21:26 +01:00
|
|
|
let source = working_set.get_span_contents(inner_span);
|
2021-11-11 00:14:00 +01:00
|
|
|
|
2021-11-21 19:13:09 +01:00
|
|
|
let (tokens, err) = lex(source, start, &[b'\n', b'\r', b','], &[b':'], true);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2021-11-11 00:14:00 +01:00
|
|
|
|
|
|
|
let mut output = vec![];
|
|
|
|
let mut idx = 0;
|
|
|
|
|
2023-03-28 23:23:10 +02:00
|
|
|
let mut field_types = Some(vec![]);
|
2021-11-11 00:14:00 +01:00
|
|
|
while idx < tokens.len() {
|
Spread operator in record literals (#11144)
Goes towards implementing #10598, which asks for a spread operator in
lists, in records, and when calling commands (continuation of #11006,
which only implements it in lists)
# Description
This PR is for adding a spread operator that can be used when building
records. Additional functionality can be added later.
Changes:
- Previously, the `Expr::Record` variant held `(Expression, Expression)`
pairs. It now holds instances of an enum `RecordItem` (the name isn't
amazing) that allows either a key-value mapping or a spread operator.
- `...` will be treated as the spread operator when it appears before
`$`, `{`, or `(` inside records (no whitespace allowed in between) (not
implemented yet)
- The error message for duplicate columns now includes the column name
itself, because if two spread records are involved in such an error, you
can't tell which field was duplicated from the spans alone
`...` will still be treated as a normal string outside records, and even
in records, it is not treated as a spread operator when not followed
immediately by a `$`, `{`, or `(`.
# User-Facing Changes
Users will be able to use `...` when building records.
```
> let rec = { x: 1, ...{ a: 2 } }
> $rec
╭───┬───╮
│ x │ 1 │
│ a │ 2 │
╰───┴───╯
> { foo: bar, ...$rec, baz: blah }
╭─────┬──────╮
│ foo │ bar │
│ x │ 1 │
│ a │ 2 │
│ baz │ blah │
╰─────┴──────╯
```
If you want to update a field of a record, you'll have to use `merge`
instead:
```
> { ...$rec, x: 5 }
Error: nu::shell::column_defined_twice
× Record field or table column used twice: x
╭─[entry #2:1:1]
1 │ { ...$rec, x: 5 }
· ──┬─ ┬
· │ ╰── field redefined here
· ╰── field first defined here
╰────
> $rec | merge { x: 5 }
╭───┬───╮
│ x │ 5 │
│ a │ 2 │
╰───┴───╯
```
# Tests + Formatting
# After Submitting
2023-11-29 18:31:31 +01:00
|
|
|
let curr_span = tokens[idx].span;
|
|
|
|
let curr_tok = working_set.get_span_contents(curr_span);
|
|
|
|
if curr_tok.starts_with(b"...")
|
|
|
|
&& curr_tok.len() > 3
|
|
|
|
&& (curr_tok[3] == b'$' || curr_tok[3] == b'{' || curr_tok[3] == b'(')
|
|
|
|
{
|
|
|
|
// Parse spread operator
|
|
|
|
let inner = parse_value(
|
|
|
|
working_set,
|
|
|
|
Span::new(curr_span.start + 3, curr_span.end),
|
|
|
|
&SyntaxShape::Record(vec![]),
|
|
|
|
);
|
|
|
|
idx += 1;
|
2021-11-11 00:14:00 +01:00
|
|
|
|
Spread operator in record literals (#11144)
Goes towards implementing #10598, which asks for a spread operator in
lists, in records, and when calling commands (continuation of #11006,
which only implements it in lists)
# Description
This PR is for adding a spread operator that can be used when building
records. Additional functionality can be added later.
Changes:
- Previously, the `Expr::Record` variant held `(Expression, Expression)`
pairs. It now holds instances of an enum `RecordItem` (the name isn't
amazing) that allows either a key-value mapping or a spread operator.
- `...` will be treated as the spread operator when it appears before
`$`, `{`, or `(` inside records (no whitespace allowed in between) (not
implemented yet)
- The error message for duplicate columns now includes the column name
itself, because if two spread records are involved in such an error, you
can't tell which field was duplicated from the spans alone
`...` will still be treated as a normal string outside records, and even
in records, it is not treated as a spread operator when not followed
immediately by a `$`, `{`, or `(`.
# User-Facing Changes
Users will be able to use `...` when building records.
```
> let rec = { x: 1, ...{ a: 2 } }
> $rec
╭───┬───╮
│ x │ 1 │
│ a │ 2 │
╰───┴───╯
> { foo: bar, ...$rec, baz: blah }
╭─────┬──────╮
│ foo │ bar │
│ x │ 1 │
│ a │ 2 │
│ baz │ blah │
╰─────┴──────╯
```
If you want to update a field of a record, you'll have to use `merge`
instead:
```
> { ...$rec, x: 5 }
Error: nu::shell::column_defined_twice
× Record field or table column used twice: x
╭─[entry #2:1:1]
1 │ { ...$rec, x: 5 }
· ──┬─ ┬
· │ ╰── field redefined here
· ╰── field first defined here
╰────
> $rec | merge { x: 5 }
╭───┬───╮
│ x │ 5 │
│ a │ 2 │
╰───┴───╯
```
# Tests + Formatting
# After Submitting
2023-11-29 18:31:31 +01:00
|
|
|
match &inner.ty {
|
|
|
|
Type::Record(inner_fields) => {
|
|
|
|
if let Some(fields) = &mut field_types {
|
|
|
|
for (field, ty) in inner_fields {
|
|
|
|
fields.push((field.clone(), ty.clone()));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
// We can't properly see all the field types
|
|
|
|
// so fall back to the Any type later
|
|
|
|
field_types = None;
|
|
|
|
}
|
2023-03-28 23:23:10 +02:00
|
|
|
}
|
Spread operator in record literals (#11144)
Goes towards implementing #10598, which asks for a spread operator in
lists, in records, and when calling commands (continuation of #11006,
which only implements it in lists)
# Description
This PR is for adding a spread operator that can be used when building
records. Additional functionality can be added later.
Changes:
- Previously, the `Expr::Record` variant held `(Expression, Expression)`
pairs. It now holds instances of an enum `RecordItem` (the name isn't
amazing) that allows either a key-value mapping or a spread operator.
- `...` will be treated as the spread operator when it appears before
`$`, `{`, or `(` inside records (no whitespace allowed in between) (not
implemented yet)
- The error message for duplicate columns now includes the column name
itself, because if two spread records are involved in such an error, you
can't tell which field was duplicated from the spans alone
`...` will still be treated as a normal string outside records, and even
in records, it is not treated as a spread operator when not followed
immediately by a `$`, `{`, or `(`.
# User-Facing Changes
Users will be able to use `...` when building records.
```
> let rec = { x: 1, ...{ a: 2 } }
> $rec
╭───┬───╮
│ x │ 1 │
│ a │ 2 │
╰───┴───╯
> { foo: bar, ...$rec, baz: blah }
╭─────┬──────╮
│ foo │ bar │
│ x │ 1 │
│ a │ 2 │
│ baz │ blah │
╰─────┴──────╯
```
If you want to update a field of a record, you'll have to use `merge`
instead:
```
> { ...$rec, x: 5 }
Error: nu::shell::column_defined_twice
× Record field or table column used twice: x
╭─[entry #2:1:1]
1 │ { ...$rec, x: 5 }
· ──┬─ ┬
· │ ╰── field redefined here
· ╰── field first defined here
╰────
> $rec | merge { x: 5 }
╭───┬───╮
│ x │ 5 │
│ a │ 2 │
╰───┴───╯
```
# Tests + Formatting
# After Submitting
2023-11-29 18:31:31 +01:00
|
|
|
output.push(RecordItem::Spread(
|
|
|
|
Span::new(curr_span.start, curr_span.start + 3),
|
|
|
|
inner,
|
|
|
|
));
|
2023-03-28 23:23:10 +02:00
|
|
|
} else {
|
Spread operator in record literals (#11144)
Goes towards implementing #10598, which asks for a spread operator in
lists, in records, and when calling commands (continuation of #11006,
which only implements it in lists)
# Description
This PR is for adding a spread operator that can be used when building
records. Additional functionality can be added later.
Changes:
- Previously, the `Expr::Record` variant held `(Expression, Expression)`
pairs. It now holds instances of an enum `RecordItem` (the name isn't
amazing) that allows either a key-value mapping or a spread operator.
- `...` will be treated as the spread operator when it appears before
`$`, `{`, or `(` inside records (no whitespace allowed in between) (not
implemented yet)
- The error message for duplicate columns now includes the column name
itself, because if two spread records are involved in such an error, you
can't tell which field was duplicated from the spans alone
`...` will still be treated as a normal string outside records, and even
in records, it is not treated as a spread operator when not followed
immediately by a `$`, `{`, or `(`.
# User-Facing Changes
Users will be able to use `...` when building records.
```
> let rec = { x: 1, ...{ a: 2 } }
> $rec
╭───┬───╮
│ x │ 1 │
│ a │ 2 │
╰───┴───╯
> { foo: bar, ...$rec, baz: blah }
╭─────┬──────╮
│ foo │ bar │
│ x │ 1 │
│ a │ 2 │
│ baz │ blah │
╰─────┴──────╯
```
If you want to update a field of a record, you'll have to use `merge`
instead:
```
> { ...$rec, x: 5 }
Error: nu::shell::column_defined_twice
× Record field or table column used twice: x
╭─[entry #2:1:1]
1 │ { ...$rec, x: 5 }
· ──┬─ ┬
· │ ╰── field redefined here
· ╰── field first defined here
╰────
> $rec | merge { x: 5 }
╭───┬───╮
│ x │ 5 │
│ a │ 2 │
╰───┴───╯
```
# Tests + Formatting
# After Submitting
2023-11-29 18:31:31 +01:00
|
|
|
// Normal key-value pair
|
|
|
|
let field = parse_value(working_set, curr_span, &SyntaxShape::Any);
|
|
|
|
|
|
|
|
idx += 1;
|
|
|
|
if idx == tokens.len() {
|
More specific errors for missing values in records (#11423)
# Description
Currently, when writing a record, if you don't give the value for a
field, the syntax error highlights the entire record instead of
pinpointing the issue. Here's some examples:
```nushell
> { a: 2, 3 } # Missing colon (and value)
Error: nu::parser::parse_mismatch
× Parse mismatch during operation.
╭─[entry #2:1:1]
1 │ { a: 2, 3 }
· ─────┬─────
· ╰── expected record
╰────
> { a: 2, 3: } # Missing value
Error: nu::parser::parse_mismatch
× Parse mismatch during operation.
╭─[entry #3:1:1]
1 │ { a: 2, 3: }
· ──────┬─────
· ╰── expected record
╰────
> { a: 2, 3 4 } # Missing colon
Error: nu::parser::parse_mismatch
× Parse mismatch during operation.
╭─[entry #4:1:1]
1 │ { a: 2, 3 4 }
· ──────┬──────
· ╰── expected record
╰────
```
In all of them, the entire record is highlighted red because an
`Expr::Garbage` is returned covering that whole span:
![image](https://github.com/nushell/nushell/assets/45539777/36660b50-23be-4353-b180-3f84eff3c220)
This PR is for highlighting only the part inside the record that could
not be parsed. If the record literal is big, an error message pointing
to the start of where the parser thinks things went wrong should help
people fix their code.
# User-Facing Changes
Below are screenshots of the new errors:
If there's a stray record key right before the record ends, it
highlights only that key and tells the user it expected a colon after
it:
![image](https://github.com/nushell/nushell/assets/45539777/94503256-8ea2-47dd-b69a-4b520c66f7b6)
If the record ends before the value for the last field was given, it
highlights the key and colon of that field and tells the user it
expected a value after the colon:
![image](https://github.com/nushell/nushell/assets/45539777/2f3837ec-3b35-4b81-8c57-706f8056ac04)
If there are two consecutive expressions without a colon between them,
it highlights everything from the second expression to the end of the
record and tells the user it expected a colon. I was tempted to add a
help message suggesting adding a colon in between, but that may not
always be the right thing to do.
![image](https://github.com/nushell/nushell/assets/45539777/1abaaaa8-1896-4909-bbb7-9a38cece5250)
# Tests + Formatting
# After Submitting
2023-12-27 10:15:12 +01:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"':'",
|
|
|
|
Span::new(curr_span.end, curr_span.end),
|
|
|
|
));
|
|
|
|
output.push(RecordItem::Pair(
|
|
|
|
garbage(curr_span),
|
|
|
|
garbage(Span::new(curr_span.end, curr_span.end)),
|
|
|
|
));
|
|
|
|
break;
|
Spread operator in record literals (#11144)
Goes towards implementing #10598, which asks for a spread operator in
lists, in records, and when calling commands (continuation of #11006,
which only implements it in lists)
# Description
This PR is for adding a spread operator that can be used when building
records. Additional functionality can be added later.
Changes:
- Previously, the `Expr::Record` variant held `(Expression, Expression)`
pairs. It now holds instances of an enum `RecordItem` (the name isn't
amazing) that allows either a key-value mapping or a spread operator.
- `...` will be treated as the spread operator when it appears before
`$`, `{`, or `(` inside records (no whitespace allowed in between) (not
implemented yet)
- The error message for duplicate columns now includes the column name
itself, because if two spread records are involved in such an error, you
can't tell which field was duplicated from the spans alone
`...` will still be treated as a normal string outside records, and even
in records, it is not treated as a spread operator when not followed
immediately by a `$`, `{`, or `(`.
# User-Facing Changes
Users will be able to use `...` when building records.
```
> let rec = { x: 1, ...{ a: 2 } }
> $rec
╭───┬───╮
│ x │ 1 │
│ a │ 2 │
╰───┴───╯
> { foo: bar, ...$rec, baz: blah }
╭─────┬──────╮
│ foo │ bar │
│ x │ 1 │
│ a │ 2 │
│ baz │ blah │
╰─────┴──────╯
```
If you want to update a field of a record, you'll have to use `merge`
instead:
```
> { ...$rec, x: 5 }
Error: nu::shell::column_defined_twice
× Record field or table column used twice: x
╭─[entry #2:1:1]
1 │ { ...$rec, x: 5 }
· ──┬─ ┬
· │ ╰── field redefined here
· ╰── field first defined here
╰────
> $rec | merge { x: 5 }
╭───┬───╮
│ x │ 5 │
│ a │ 2 │
╰───┴───╯
```
# Tests + Formatting
# After Submitting
2023-11-29 18:31:31 +01:00
|
|
|
}
|
More specific errors for missing values in records (#11423)
# Description
Currently, when writing a record, if you don't give the value for a
field, the syntax error highlights the entire record instead of
pinpointing the issue. Here's some examples:
```nushell
> { a: 2, 3 } # Missing colon (and value)
Error: nu::parser::parse_mismatch
× Parse mismatch during operation.
╭─[entry #2:1:1]
1 │ { a: 2, 3 }
· ─────┬─────
· ╰── expected record
╰────
> { a: 2, 3: } # Missing value
Error: nu::parser::parse_mismatch
× Parse mismatch during operation.
╭─[entry #3:1:1]
1 │ { a: 2, 3: }
· ──────┬─────
· ╰── expected record
╰────
> { a: 2, 3 4 } # Missing colon
Error: nu::parser::parse_mismatch
× Parse mismatch during operation.
╭─[entry #4:1:1]
1 │ { a: 2, 3 4 }
· ──────┬──────
· ╰── expected record
╰────
```
In all of them, the entire record is highlighted red because an
`Expr::Garbage` is returned covering that whole span:
![image](https://github.com/nushell/nushell/assets/45539777/36660b50-23be-4353-b180-3f84eff3c220)
This PR is for highlighting only the part inside the record that could
not be parsed. If the record literal is big, an error message pointing
to the start of where the parser thinks things went wrong should help
people fix their code.
# User-Facing Changes
Below are screenshots of the new errors:
If there's a stray record key right before the record ends, it
highlights only that key and tells the user it expected a colon after
it:
![image](https://github.com/nushell/nushell/assets/45539777/94503256-8ea2-47dd-b69a-4b520c66f7b6)
If the record ends before the value for the last field was given, it
highlights the key and colon of that field and tells the user it
expected a value after the colon:
![image](https://github.com/nushell/nushell/assets/45539777/2f3837ec-3b35-4b81-8c57-706f8056ac04)
If there are two consecutive expressions without a colon between them,
it highlights everything from the second expression to the end of the
record and tells the user it expected a colon. I was tempted to add a
help message suggesting adding a colon in between, but that may not
always be the right thing to do.
![image](https://github.com/nushell/nushell/assets/45539777/1abaaaa8-1896-4909-bbb7-9a38cece5250)
# Tests + Formatting
# After Submitting
2023-12-27 10:15:12 +01:00
|
|
|
let colon_span = tokens[idx].span;
|
|
|
|
let colon = working_set.get_span_contents(colon_span);
|
Spread operator in record literals (#11144)
Goes towards implementing #10598, which asks for a spread operator in
lists, in records, and when calling commands (continuation of #11006,
which only implements it in lists)
# Description
This PR is for adding a spread operator that can be used when building
records. Additional functionality can be added later.
Changes:
- Previously, the `Expr::Record` variant held `(Expression, Expression)`
pairs. It now holds instances of an enum `RecordItem` (the name isn't
amazing) that allows either a key-value mapping or a spread operator.
- `...` will be treated as the spread operator when it appears before
`$`, `{`, or `(` inside records (no whitespace allowed in between) (not
implemented yet)
- The error message for duplicate columns now includes the column name
itself, because if two spread records are involved in such an error, you
can't tell which field was duplicated from the spans alone
`...` will still be treated as a normal string outside records, and even
in records, it is not treated as a spread operator when not followed
immediately by a `$`, `{`, or `(`.
# User-Facing Changes
Users will be able to use `...` when building records.
```
> let rec = { x: 1, ...{ a: 2 } }
> $rec
╭───┬───╮
│ x │ 1 │
│ a │ 2 │
╰───┴───╯
> { foo: bar, ...$rec, baz: blah }
╭─────┬──────╮
│ foo │ bar │
│ x │ 1 │
│ a │ 2 │
│ baz │ blah │
╰─────┴──────╯
```
If you want to update a field of a record, you'll have to use `merge`
instead:
```
> { ...$rec, x: 5 }
Error: nu::shell::column_defined_twice
× Record field or table column used twice: x
╭─[entry #2:1:1]
1 │ { ...$rec, x: 5 }
· ──┬─ ┬
· │ ╰── field redefined here
· ╰── field first defined here
╰────
> $rec | merge { x: 5 }
╭───┬───╮
│ x │ 5 │
│ a │ 2 │
╰───┴───╯
```
# Tests + Formatting
# After Submitting
2023-11-29 18:31:31 +01:00
|
|
|
idx += 1;
|
More specific errors for missing values in records (#11423)
# Description
Currently, when writing a record, if you don't give the value for a
field, the syntax error highlights the entire record instead of
pinpointing the issue. Here's some examples:
```nushell
> { a: 2, 3 } # Missing colon (and value)
Error: nu::parser::parse_mismatch
× Parse mismatch during operation.
╭─[entry #2:1:1]
1 │ { a: 2, 3 }
· ─────┬─────
· ╰── expected record
╰────
> { a: 2, 3: } # Missing value
Error: nu::parser::parse_mismatch
× Parse mismatch during operation.
╭─[entry #3:1:1]
1 │ { a: 2, 3: }
· ──────┬─────
· ╰── expected record
╰────
> { a: 2, 3 4 } # Missing colon
Error: nu::parser::parse_mismatch
× Parse mismatch during operation.
╭─[entry #4:1:1]
1 │ { a: 2, 3 4 }
· ──────┬──────
· ╰── expected record
╰────
```
In all of them, the entire record is highlighted red because an
`Expr::Garbage` is returned covering that whole span:
![image](https://github.com/nushell/nushell/assets/45539777/36660b50-23be-4353-b180-3f84eff3c220)
This PR is for highlighting only the part inside the record that could
not be parsed. If the record literal is big, an error message pointing
to the start of where the parser thinks things went wrong should help
people fix their code.
# User-Facing Changes
Below are screenshots of the new errors:
If there's a stray record key right before the record ends, it
highlights only that key and tells the user it expected a colon after
it:
![image](https://github.com/nushell/nushell/assets/45539777/94503256-8ea2-47dd-b69a-4b520c66f7b6)
If the record ends before the value for the last field was given, it
highlights the key and colon of that field and tells the user it
expected a value after the colon:
![image](https://github.com/nushell/nushell/assets/45539777/2f3837ec-3b35-4b81-8c57-706f8056ac04)
If there are two consecutive expressions without a colon between them,
it highlights everything from the second expression to the end of the
record and tells the user it expected a colon. I was tempted to add a
help message suggesting adding a colon in between, but that may not
always be the right thing to do.
![image](https://github.com/nushell/nushell/assets/45539777/1abaaaa8-1896-4909-bbb7-9a38cece5250)
# Tests + Formatting
# After Submitting
2023-12-27 10:15:12 +01:00
|
|
|
if colon != b":" {
|
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"':'",
|
|
|
|
Span::new(colon_span.start, colon_span.start),
|
|
|
|
));
|
|
|
|
output.push(RecordItem::Pair(
|
|
|
|
field,
|
|
|
|
garbage(Span::new(
|
|
|
|
colon_span.start,
|
|
|
|
tokens[tokens.len() - 1].span.end,
|
|
|
|
)),
|
|
|
|
));
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
if idx == tokens.len() {
|
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"value for record field",
|
|
|
|
Span::new(colon_span.end, colon_span.end),
|
|
|
|
));
|
|
|
|
output.push(RecordItem::Pair(
|
|
|
|
garbage(Span::new(curr_span.start, colon_span.end)),
|
|
|
|
garbage(Span::new(colon_span.end, tokens[tokens.len() - 1].span.end)),
|
|
|
|
));
|
|
|
|
break;
|
Spread operator in record literals (#11144)
Goes towards implementing #10598, which asks for a spread operator in
lists, in records, and when calling commands (continuation of #11006,
which only implements it in lists)
# Description
This PR is for adding a spread operator that can be used when building
records. Additional functionality can be added later.
Changes:
- Previously, the `Expr::Record` variant held `(Expression, Expression)`
pairs. It now holds instances of an enum `RecordItem` (the name isn't
amazing) that allows either a key-value mapping or a spread operator.
- `...` will be treated as the spread operator when it appears before
`$`, `{`, or `(` inside records (no whitespace allowed in between) (not
implemented yet)
- The error message for duplicate columns now includes the column name
itself, because if two spread records are involved in such an error, you
can't tell which field was duplicated from the spans alone
`...` will still be treated as a normal string outside records, and even
in records, it is not treated as a spread operator when not followed
immediately by a `$`, `{`, or `(`.
# User-Facing Changes
Users will be able to use `...` when building records.
```
> let rec = { x: 1, ...{ a: 2 } }
> $rec
╭───┬───╮
│ x │ 1 │
│ a │ 2 │
╰───┴───╯
> { foo: bar, ...$rec, baz: blah }
╭─────┬──────╮
│ foo │ bar │
│ x │ 1 │
│ a │ 2 │
│ baz │ blah │
╰─────┴──────╯
```
If you want to update a field of a record, you'll have to use `merge`
instead:
```
> { ...$rec, x: 5 }
Error: nu::shell::column_defined_twice
× Record field or table column used twice: x
╭─[entry #2:1:1]
1 │ { ...$rec, x: 5 }
· ──┬─ ┬
· │ ╰── field redefined here
· ╰── field first defined here
╰────
> $rec | merge { x: 5 }
╭───┬───╮
│ x │ 5 │
│ a │ 2 │
╰───┴───╯
```
# Tests + Formatting
# After Submitting
2023-11-29 18:31:31 +01:00
|
|
|
}
|
|
|
|
let value = parse_value(working_set, tokens[idx].span, &SyntaxShape::Any);
|
|
|
|
idx += 1;
|
|
|
|
|
|
|
|
if let Some(field) = field.as_string() {
|
|
|
|
if let Some(fields) = &mut field_types {
|
|
|
|
fields.push((field, value.ty.clone()));
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// We can't properly see all the field types
|
|
|
|
// so fall back to the Any type later
|
|
|
|
field_types = None;
|
|
|
|
}
|
|
|
|
output.push(RecordItem::Pair(field, value));
|
2023-03-28 23:23:10 +02:00
|
|
|
}
|
2021-11-11 00:14:00 +01:00
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Record(output),
|
|
|
|
span,
|
|
|
|
ty: (if let Some(fields) = field_types {
|
|
|
|
Type::Record(fields)
|
|
|
|
} else {
|
|
|
|
Type::Any
|
|
|
|
}),
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-11-11 00:14:00 +01:00
|
|
|
}
|
|
|
|
|
2023-07-03 07:45:10 +02:00
|
|
|
pub fn parse_pipeline(
|
2021-09-02 10:25:22 +02:00
|
|
|
working_set: &mut StateWorkingSet,
|
2023-07-03 07:45:10 +02:00
|
|
|
pipeline: &LitePipeline,
|
2022-04-08 23:41:05 +02:00
|
|
|
is_subexpression: bool,
|
2023-07-03 07:45:10 +02:00
|
|
|
pipeline_index: usize,
|
|
|
|
) -> Pipeline {
|
|
|
|
if pipeline.commands.len() > 1 {
|
2023-07-11 20:36:34 +02:00
|
|
|
// Special case: allow `let` and `mut` to consume the whole pipeline, eg) `let abc = "foo" | str length`
|
2023-07-03 07:45:10 +02:00
|
|
|
match &pipeline.commands[0] {
|
|
|
|
LiteElement::Command(_, command) if !command.parts.is_empty() => {
|
2023-07-11 20:36:34 +02:00
|
|
|
if working_set.get_span_contents(command.parts[0]) == b"let"
|
|
|
|
|| working_set.get_span_contents(command.parts[0]) == b"mut"
|
|
|
|
{
|
2023-07-03 07:45:10 +02:00
|
|
|
let mut new_command = LiteCommand {
|
|
|
|
comments: vec![],
|
|
|
|
parts: command.parts.clone(),
|
|
|
|
};
|
2022-11-18 22:46:48 +01:00
|
|
|
|
2023-07-03 07:45:10 +02:00
|
|
|
for command in &pipeline.commands[1..] {
|
|
|
|
match command {
|
|
|
|
LiteElement::Command(Some(pipe_span), command) => {
|
|
|
|
new_command.parts.push(*pipe_span);
|
2022-01-01 22:42:50 +01:00
|
|
|
|
2023-07-03 07:45:10 +02:00
|
|
|
new_command.comments.extend_from_slice(&command.comments);
|
|
|
|
new_command.parts.extend_from_slice(&command.parts);
|
|
|
|
}
|
2023-09-04 02:21:45 +02:00
|
|
|
LiteElement::Redirection(span, ..) => {
|
|
|
|
working_set.error(ParseError::RedirectionInLetMut(*span, None))
|
|
|
|
}
|
|
|
|
LiteElement::SeparateRedirection { out, err } => {
|
|
|
|
working_set.error(ParseError::RedirectionInLetMut(
|
|
|
|
out.0.min(err.0),
|
|
|
|
Some(out.0.max(err.0)),
|
|
|
|
))
|
|
|
|
}
|
|
|
|
LiteElement::SameTargetRedirection { redirection, .. } => working_set
|
|
|
|
.error(ParseError::RedirectionInLetMut(redirection.0, None)),
|
2023-07-03 07:45:10 +02:00
|
|
|
_ => panic!("unsupported"),
|
|
|
|
}
|
|
|
|
}
|
2021-06-30 03:42:56 +02:00
|
|
|
|
2023-07-03 07:45:10 +02:00
|
|
|
// if the 'let' is complete enough, use it, if not, fall through for now
|
|
|
|
if new_command.parts.len() > 3 {
|
|
|
|
let rhs_span = nu_protocol::span(&new_command.parts[3..]);
|
2021-07-01 02:01:04 +02:00
|
|
|
|
2023-07-03 07:45:10 +02:00
|
|
|
new_command.parts.truncate(3);
|
|
|
|
new_command.parts.push(rhs_span);
|
2022-11-18 22:46:48 +01:00
|
|
|
|
2023-07-03 07:45:10 +02:00
|
|
|
let mut pipeline =
|
|
|
|
parse_builtin_commands(working_set, &new_command, is_subexpression);
|
2022-11-18 22:46:48 +01:00
|
|
|
|
2023-07-03 07:45:10 +02:00
|
|
|
if pipeline_index == 0 {
|
2023-07-13 21:05:03 +02:00
|
|
|
let let_decl_id = working_set.find_decl(b"let");
|
|
|
|
let mut_decl_id = working_set.find_decl(b"mut");
|
2023-07-11 20:36:34 +02:00
|
|
|
for element in pipeline.elements.iter_mut() {
|
|
|
|
if let PipelineElement::Expression(
|
|
|
|
_,
|
|
|
|
Expression {
|
|
|
|
expr: Expr::Call(call),
|
|
|
|
..
|
|
|
|
},
|
|
|
|
) = element
|
|
|
|
{
|
|
|
|
if Some(call.decl_id) == let_decl_id
|
|
|
|
|| Some(call.decl_id) == mut_decl_id
|
2023-07-03 07:45:10 +02:00
|
|
|
{
|
2023-07-11 20:36:34 +02:00
|
|
|
// Do an expansion
|
|
|
|
if let Some(Expression {
|
|
|
|
expr: Expr::Block(block_id),
|
|
|
|
..
|
|
|
|
}) = call.positional_iter_mut().nth(1)
|
|
|
|
{
|
|
|
|
let block = working_set.get_block(*block_id);
|
2023-04-18 10:19:08 +02:00
|
|
|
|
2023-08-28 13:38:11 +02:00
|
|
|
if let Some(PipelineElement::Expression(
|
|
|
|
prepend,
|
|
|
|
expr,
|
|
|
|
)) = block
|
|
|
|
.pipelines
|
|
|
|
.first()
|
|
|
|
.and_then(|p| p.elements.first())
|
|
|
|
.cloned()
|
2023-07-11 20:36:34 +02:00
|
|
|
{
|
|
|
|
if expr.has_in_variable(working_set) {
|
|
|
|
let new_expr = PipelineElement::Expression(
|
|
|
|
prepend,
|
|
|
|
wrap_expr_with_collect(working_set, &expr),
|
|
|
|
);
|
|
|
|
|
|
|
|
let block =
|
|
|
|
working_set.get_block_mut(*block_id);
|
|
|
|
block.pipelines[0].elements[0] = new_expr;
|
2023-07-03 07:45:10 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-07-11 20:36:34 +02:00
|
|
|
continue;
|
2023-07-03 07:45:10 +02:00
|
|
|
} else if element.has_in_variable(working_set)
|
|
|
|
&& !is_subexpression
|
|
|
|
{
|
|
|
|
*element = wrap_element_with_collect(working_set, element);
|
|
|
|
}
|
2023-07-11 20:36:34 +02:00
|
|
|
} else if element.has_in_variable(working_set) && !is_subexpression
|
|
|
|
{
|
|
|
|
*element = wrap_element_with_collect(working_set, element);
|
2023-07-03 07:45:10 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
Support redirect `err` and `out` to different streams (#7685)
# Description
Closes: #7364
# User-Facing Changes
Given the following shell script:
```bash
x=$(printf '=%.0s' {1..100})
echo $x
echo $x 1>&2
```
It supports the following command:
```
bash test.sh out> out.txt err> err.txt
```
Then both `out.txt` and `err.txt` will contain `=`(100 times)
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SeparateRedirection` if we meet two Redirection
token(which is generated by `lex` function),
During converting from lite block to block,
`LiteElement::SeparateRedirection` will be converted to
`PipelineElement::SeparateRedirection`.
Then in the block eval process, if we get
`PipelineElement::SeparateRedirection`, we invoke `save` command with
`--stderr` arguments to acthive our behavior.
## What happened internally?
Take the following command as example:
```
^ls out> out.txt err> err.txt
```
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, Stdout, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, LiteCommand { comments: [], parts: [Span { start: 39063, end: 39070 }] })
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(
None,
LiteCommand { comments: [], parts: [Span { start: 38525, end: 38528 }] }),
// new one! two Redirection merged into one SeparateRedirection.
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, LiteCommand { comments: [], parts: [Span { start: 38534, end: 38541 }] }),
err: (Span { start: 38542, end: 38546 }, LiteCommand { comments: [], parts: [Span { start: 38547, end: 38554 }] })
}
]
}]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, Stdout, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, Expression { expr: String("err.txt"), span: Span { start: 39063, end: 39070 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 38526, end: 38528 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 38525, end: 38528 },
ty: Any,
custom_completion: None
}),
// new one! SeparateRedirection
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, Expression { expr: String("out.txt"), span: Span { start: 38534, end: 38541 }, ty: String, custom_completion: None }),
err: (Span { start: 38542, end: 38546 }, Expression { expr: String("err.txt"), span: Span { start: 38547, end: 38554 }, ty: String, custom_completion: None })
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-01-12 10:22:30 +01:00
|
|
|
|
2023-07-03 07:45:10 +02:00
|
|
|
return pipeline;
|
2023-04-18 10:19:08 +02:00
|
|
|
}
|
2023-07-03 07:45:10 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => {}
|
|
|
|
};
|
|
|
|
|
|
|
|
let mut output = pipeline
|
|
|
|
.commands
|
|
|
|
.iter()
|
|
|
|
.map(|command| match command {
|
|
|
|
LiteElement::Command(span, command) => {
|
|
|
|
trace!("parsing: pipeline element: command");
|
|
|
|
let expr = parse_expression(working_set, &command.parts, is_subexpression);
|
Support redirect `err` and `out` to different streams (#7685)
# Description
Closes: #7364
# User-Facing Changes
Given the following shell script:
```bash
x=$(printf '=%.0s' {1..100})
echo $x
echo $x 1>&2
```
It supports the following command:
```
bash test.sh out> out.txt err> err.txt
```
Then both `out.txt` and `err.txt` will contain `=`(100 times)
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SeparateRedirection` if we meet two Redirection
token(which is generated by `lex` function),
During converting from lite block to block,
`LiteElement::SeparateRedirection` will be converted to
`PipelineElement::SeparateRedirection`.
Then in the block eval process, if we get
`PipelineElement::SeparateRedirection`, we invoke `save` command with
`--stderr` arguments to acthive our behavior.
## What happened internally?
Take the following command as example:
```
^ls out> out.txt err> err.txt
```
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, Stdout, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, LiteCommand { comments: [], parts: [Span { start: 39063, end: 39070 }] })
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(
None,
LiteCommand { comments: [], parts: [Span { start: 38525, end: 38528 }] }),
// new one! two Redirection merged into one SeparateRedirection.
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, LiteCommand { comments: [], parts: [Span { start: 38534, end: 38541 }] }),
err: (Span { start: 38542, end: 38546 }, LiteCommand { comments: [], parts: [Span { start: 38547, end: 38554 }] })
}
]
}]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, Stdout, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, Expression { expr: String("err.txt"), span: Span { start: 39063, end: 39070 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 38526, end: 38528 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 38525, end: 38528 },
ty: Any,
custom_completion: None
}),
// new one! SeparateRedirection
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, Expression { expr: String("out.txt"), span: Span { start: 38534, end: 38541 }, ty: String, custom_completion: None }),
err: (Span { start: 38542, end: 38546 }, Expression { expr: String("err.txt"), span: Span { start: 38547, end: 38554 }, ty: String, custom_completion: None })
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-01-12 10:22:30 +01:00
|
|
|
|
2023-07-03 07:45:10 +02:00
|
|
|
PipelineElement::Expression(*span, expr)
|
|
|
|
}
|
2023-11-27 14:52:39 +01:00
|
|
|
LiteElement::Redirection(span, redirection, command, is_append_mode) => {
|
2023-07-20 13:56:46 +02:00
|
|
|
let expr = parse_value(working_set, command.parts[0], &SyntaxShape::Any);
|
Support redirect `err` and `out` to different streams (#7685)
# Description
Closes: #7364
# User-Facing Changes
Given the following shell script:
```bash
x=$(printf '=%.0s' {1..100})
echo $x
echo $x 1>&2
```
It supports the following command:
```
bash test.sh out> out.txt err> err.txt
```
Then both `out.txt` and `err.txt` will contain `=`(100 times)
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SeparateRedirection` if we meet two Redirection
token(which is generated by `lex` function),
During converting from lite block to block,
`LiteElement::SeparateRedirection` will be converted to
`PipelineElement::SeparateRedirection`.
Then in the block eval process, if we get
`PipelineElement::SeparateRedirection`, we invoke `save` command with
`--stderr` arguments to acthive our behavior.
## What happened internally?
Take the following command as example:
```
^ls out> out.txt err> err.txt
```
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, Stdout, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, LiteCommand { comments: [], parts: [Span { start: 39063, end: 39070 }] })
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(
None,
LiteCommand { comments: [], parts: [Span { start: 38525, end: 38528 }] }),
// new one! two Redirection merged into one SeparateRedirection.
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, LiteCommand { comments: [], parts: [Span { start: 38534, end: 38541 }] }),
err: (Span { start: 38542, end: 38546 }, LiteCommand { comments: [], parts: [Span { start: 38547, end: 38554 }] })
}
]
}]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, Stdout, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, Expression { expr: String("err.txt"), span: Span { start: 39063, end: 39070 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 38526, end: 38528 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 38525, end: 38528 },
ty: Any,
custom_completion: None
}),
// new one! SeparateRedirection
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, Expression { expr: String("out.txt"), span: Span { start: 38534, end: 38541 }, ty: String, custom_completion: None }),
err: (Span { start: 38542, end: 38546 }, Expression { expr: String("err.txt"), span: Span { start: 38547, end: 38554 }, ty: String, custom_completion: None })
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-01-12 10:22:30 +01:00
|
|
|
|
2023-11-27 14:52:39 +01:00
|
|
|
PipelineElement::Redirection(*span, redirection.clone(), expr, *is_append_mode)
|
2023-07-03 07:45:10 +02:00
|
|
|
}
|
|
|
|
LiteElement::SeparateRedirection {
|
2023-11-27 14:52:39 +01:00
|
|
|
out: (out_span, out_command, out_append_mode),
|
|
|
|
err: (err_span, err_command, err_append_mode),
|
2023-07-03 07:45:10 +02:00
|
|
|
} => {
|
|
|
|
trace!("parsing: pipeline element: separate redirection");
|
2023-07-20 13:56:46 +02:00
|
|
|
let out_expr =
|
|
|
|
parse_value(working_set, out_command.parts[0], &SyntaxShape::Any);
|
2021-09-10 09:28:43 +02:00
|
|
|
|
2023-07-20 13:56:46 +02:00
|
|
|
let err_expr =
|
|
|
|
parse_value(working_set, err_command.parts[0], &SyntaxShape::Any);
|
2023-07-03 07:45:10 +02:00
|
|
|
|
|
|
|
PipelineElement::SeparateRedirection {
|
2023-11-27 14:52:39 +01:00
|
|
|
out: (*out_span, out_expr, *out_append_mode),
|
|
|
|
err: (*err_span, err_expr, *err_append_mode),
|
2021-11-08 07:21:24 +01:00
|
|
|
}
|
|
|
|
}
|
2023-07-03 07:45:10 +02:00
|
|
|
LiteElement::SameTargetRedirection {
|
|
|
|
cmd: (cmd_span, command),
|
2023-11-27 14:52:39 +01:00
|
|
|
redirection: (redirect_span, redirect_command, is_append_mode),
|
2023-07-03 07:45:10 +02:00
|
|
|
} => {
|
|
|
|
trace!("parsing: pipeline element: same target redirection");
|
|
|
|
let expr = parse_expression(working_set, &command.parts, is_subexpression);
|
2023-07-20 13:56:46 +02:00
|
|
|
let redirect_expr =
|
|
|
|
parse_value(working_set, redirect_command.parts[0], &SyntaxShape::Any);
|
2023-07-03 07:45:10 +02:00
|
|
|
PipelineElement::SameTargetRedirection {
|
|
|
|
cmd: (*cmd_span, expr),
|
2023-11-27 14:52:39 +01:00
|
|
|
redirection: (*redirect_span, redirect_expr, *is_append_mode),
|
2023-04-18 10:19:08 +02:00
|
|
|
}
|
|
|
|
}
|
2023-07-03 07:45:10 +02:00
|
|
|
})
|
|
|
|
.collect::<Vec<PipelineElement>>();
|
2022-11-18 22:46:48 +01:00
|
|
|
|
2023-07-03 07:45:10 +02:00
|
|
|
if is_subexpression {
|
|
|
|
for element in output.iter_mut().skip(1) {
|
|
|
|
if element.has_in_variable(working_set) {
|
|
|
|
*element = wrap_element_with_collect(working_set, element);
|
|
|
|
}
|
|
|
|
}
|
2023-04-18 10:19:08 +02:00
|
|
|
} else {
|
2023-07-03 07:45:10 +02:00
|
|
|
for element in output.iter_mut() {
|
|
|
|
if element.has_in_variable(working_set) {
|
|
|
|
*element = wrap_element_with_collect(working_set, element);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
Pipeline { elements: output }
|
|
|
|
} else {
|
|
|
|
match &pipeline.commands[0] {
|
|
|
|
LiteElement::Command(_, command)
|
2023-11-27 14:52:39 +01:00
|
|
|
| LiteElement::Redirection(_, _, command, _)
|
2023-07-03 07:45:10 +02:00
|
|
|
| LiteElement::SeparateRedirection {
|
2023-11-27 14:52:39 +01:00
|
|
|
out: (_, command, _),
|
|
|
|
..
|
2023-07-03 07:45:10 +02:00
|
|
|
} => {
|
|
|
|
let mut pipeline = parse_builtin_commands(working_set, command, is_subexpression);
|
|
|
|
|
2023-07-13 21:05:03 +02:00
|
|
|
let let_decl_id = working_set.find_decl(b"let");
|
|
|
|
let mut_decl_id = working_set.find_decl(b"mut");
|
2023-07-11 20:36:34 +02:00
|
|
|
|
2023-07-03 07:45:10 +02:00
|
|
|
if pipeline_index == 0 {
|
2023-07-11 20:36:34 +02:00
|
|
|
for element in pipeline.elements.iter_mut() {
|
|
|
|
if let PipelineElement::Expression(
|
|
|
|
_,
|
|
|
|
Expression {
|
|
|
|
expr: Expr::Call(call),
|
|
|
|
..
|
|
|
|
},
|
|
|
|
) = element
|
|
|
|
{
|
|
|
|
if Some(call.decl_id) == let_decl_id
|
|
|
|
|| Some(call.decl_id) == mut_decl_id
|
2023-07-03 07:45:10 +02:00
|
|
|
{
|
2023-07-11 20:36:34 +02:00
|
|
|
// Do an expansion
|
|
|
|
if let Some(Expression {
|
|
|
|
expr: Expr::Block(block_id),
|
|
|
|
..
|
|
|
|
}) = call.positional_iter_mut().nth(1)
|
|
|
|
{
|
|
|
|
let block = working_set.get_block(*block_id);
|
2023-07-03 07:45:10 +02:00
|
|
|
|
2023-08-28 13:38:11 +02:00
|
|
|
if let Some(PipelineElement::Expression(prepend, expr)) = block
|
|
|
|
.pipelines
|
|
|
|
.first()
|
|
|
|
.and_then(|p| p.elements.first())
|
|
|
|
.cloned()
|
|
|
|
{
|
2023-07-11 20:36:34 +02:00
|
|
|
if expr.has_in_variable(working_set) {
|
|
|
|
let new_expr = PipelineElement::Expression(
|
|
|
|
prepend,
|
|
|
|
wrap_expr_with_collect(working_set, &expr),
|
|
|
|
);
|
2023-07-03 07:45:10 +02:00
|
|
|
|
2023-07-11 20:36:34 +02:00
|
|
|
let block = working_set.get_block_mut(*block_id);
|
|
|
|
block.pipelines[0].elements[0] = new_expr;
|
2022-01-27 00:46:13 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-07-11 20:36:34 +02:00
|
|
|
continue;
|
2023-07-03 07:45:10 +02:00
|
|
|
} else if element.has_in_variable(working_set) && !is_subexpression {
|
|
|
|
*element = wrap_element_with_collect(working_set, element);
|
2022-01-27 00:46:13 +01:00
|
|
|
}
|
2023-07-11 20:36:34 +02:00
|
|
|
} else if element.has_in_variable(working_set) && !is_subexpression {
|
|
|
|
*element = wrap_element_with_collect(working_set, element);
|
2022-01-27 00:46:13 +01:00
|
|
|
}
|
2022-12-13 04:36:13 +01:00
|
|
|
}
|
|
|
|
}
|
2023-07-03 07:45:10 +02:00
|
|
|
pipeline
|
|
|
|
}
|
|
|
|
LiteElement::SameTargetRedirection {
|
|
|
|
cmd: (span, command),
|
2023-11-27 14:52:39 +01:00
|
|
|
redirection: (redirect_span, redirect_cmd, is_append_mode),
|
2023-07-03 07:45:10 +02:00
|
|
|
} => {
|
|
|
|
trace!("parsing: pipeline element: same target redirection");
|
|
|
|
let expr = parse_expression(working_set, &command.parts, is_subexpression);
|
Avoid blocking when `o+e>` redirects too much stderr message (#8784)
# Description
Fixes: #8565
Here is another pr #7240 tried to address the issue, but it works in a
wrong way.
After this change `o+e>` won't redirect all stdout message then stderr
message and it works more like how bash does.
# User-Facing Changes
For the given python code:
```python
# test.py
import sys
print('aa'*300, flush=True)
print('bb'*999999, file=sys.stderr, flush=True)
print('cc'*300, flush=True)
```
Running `python test.py out+err> a.txt` shoudn't hang nushell, and
`a.txt` keeps output in the same order
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SameTargetRedirection` if we meet `out+err>` redirection
token(which is generated by lex function),
During converting from lite block to block,
LiteElement::SameTargetRedirection will be converted to
PipelineElement::SameTargetRedirection.
Then in the block eval process, if we get
PipelineElement::SameTargetRedirection, we'll invoke `run-external` with
`--redirect-combine` flag, then pipe the result into save command
## What happened internally?
Take the following command as example:
`^ls o+e> log.txt`
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline {
commands: [
SameTargetRedirection {
cmd: (None, LiteCommand { comments: [], parts: [Span { start: 147945, end: 147948}]}),
redirection: (Span { start: 147949, end: 147957 }, LiteCommand { comments: [], parts: [Span { start: 147958, end: 147965 }]})
}
]
}
]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
SameTargetRedirection {
cmd: (None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 147946, end: 147948 }, ty: String, custom_completion: None}, [], false),
span: Span { start: 147945, end: 147948},
ty: Any, custom_completion: None
}),
redirection: (Span { start: 147949, end: 147957}, Expression {expr: String("log.txt"), span: Span { start: 147958, end: 147965 },ty: String,custom_completion: None}
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-utils/standard_library/tests.nu` to run the
tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-05-18 00:47:03 +02:00
|
|
|
|
2023-07-20 13:56:46 +02:00
|
|
|
let redirect_expr =
|
|
|
|
parse_value(working_set, redirect_cmd.parts[0], &SyntaxShape::Any);
|
Avoid blocking when `o+e>` redirects too much stderr message (#8784)
# Description
Fixes: #8565
Here is another pr #7240 tried to address the issue, but it works in a
wrong way.
After this change `o+e>` won't redirect all stdout message then stderr
message and it works more like how bash does.
# User-Facing Changes
For the given python code:
```python
# test.py
import sys
print('aa'*300, flush=True)
print('bb'*999999, file=sys.stderr, flush=True)
print('cc'*300, flush=True)
```
Running `python test.py out+err> a.txt` shoudn't hang nushell, and
`a.txt` keeps output in the same order
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SameTargetRedirection` if we meet `out+err>` redirection
token(which is generated by lex function),
During converting from lite block to block,
LiteElement::SameTargetRedirection will be converted to
PipelineElement::SameTargetRedirection.
Then in the block eval process, if we get
PipelineElement::SameTargetRedirection, we'll invoke `run-external` with
`--redirect-combine` flag, then pipe the result into save command
## What happened internally?
Take the following command as example:
`^ls o+e> log.txt`
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline {
commands: [
SameTargetRedirection {
cmd: (None, LiteCommand { comments: [], parts: [Span { start: 147945, end: 147948}]}),
redirection: (Span { start: 147949, end: 147957 }, LiteCommand { comments: [], parts: [Span { start: 147958, end: 147965 }]})
}
]
}
]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
SameTargetRedirection {
cmd: (None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 147946, end: 147948 }, ty: String, custom_completion: None}, [], false),
span: Span { start: 147945, end: 147948},
ty: Any, custom_completion: None
}),
redirection: (Span { start: 147949, end: 147957}, Expression {expr: String("log.txt"), span: Span { start: 147958, end: 147965 },ty: String,custom_completion: None}
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-utils/standard_library/tests.nu` to run the
tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-05-18 00:47:03 +02:00
|
|
|
|
2023-07-03 07:45:10 +02:00
|
|
|
Pipeline {
|
|
|
|
elements: vec![PipelineElement::SameTargetRedirection {
|
|
|
|
cmd: (*span, expr),
|
2023-11-27 14:52:39 +01:00
|
|
|
redirection: (*redirect_span, redirect_expr, *is_append_mode),
|
2023-07-03 07:45:10 +02:00
|
|
|
}],
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn parse_block(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
tokens: &[Token],
|
|
|
|
span: Span,
|
|
|
|
scoped: bool,
|
|
|
|
is_subexpression: bool,
|
|
|
|
) -> Block {
|
|
|
|
let (lite_block, err) = lite_parse(tokens);
|
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
|
|
|
|
|
|
|
trace!("parsing block: {:?}", lite_block);
|
|
|
|
|
|
|
|
if scoped {
|
|
|
|
working_set.enter_scope();
|
|
|
|
}
|
|
|
|
|
|
|
|
// Pre-declare any definition so that definitions
|
|
|
|
// that share the same block can see each other
|
|
|
|
for pipeline in &lite_block.block {
|
|
|
|
if pipeline.commands.len() == 1 {
|
|
|
|
match &pipeline.commands[0] {
|
|
|
|
LiteElement::Command(_, command)
|
2023-11-27 14:52:39 +01:00
|
|
|
| LiteElement::Redirection(_, _, command, _)
|
2023-07-03 07:45:10 +02:00
|
|
|
| LiteElement::SeparateRedirection {
|
2023-11-27 14:52:39 +01:00
|
|
|
out: (_, command, _),
|
|
|
|
..
|
Avoid blocking when `o+e>` redirects too much stderr message (#8784)
# Description
Fixes: #8565
Here is another pr #7240 tried to address the issue, but it works in a
wrong way.
After this change `o+e>` won't redirect all stdout message then stderr
message and it works more like how bash does.
# User-Facing Changes
For the given python code:
```python
# test.py
import sys
print('aa'*300, flush=True)
print('bb'*999999, file=sys.stderr, flush=True)
print('cc'*300, flush=True)
```
Running `python test.py out+err> a.txt` shoudn't hang nushell, and
`a.txt` keeps output in the same order
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SameTargetRedirection` if we meet `out+err>` redirection
token(which is generated by lex function),
During converting from lite block to block,
LiteElement::SameTargetRedirection will be converted to
PipelineElement::SameTargetRedirection.
Then in the block eval process, if we get
PipelineElement::SameTargetRedirection, we'll invoke `run-external` with
`--redirect-combine` flag, then pipe the result into save command
## What happened internally?
Take the following command as example:
`^ls o+e> log.txt`
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline {
commands: [
SameTargetRedirection {
cmd: (None, LiteCommand { comments: [], parts: [Span { start: 147945, end: 147948}]}),
redirection: (Span { start: 147949, end: 147957 }, LiteCommand { comments: [], parts: [Span { start: 147958, end: 147965 }]})
}
]
}
]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
SameTargetRedirection {
cmd: (None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 147946, end: 147948 }, ty: String, custom_completion: None}, [], false),
span: Span { start: 147945, end: 147948},
ty: Any, custom_completion: None
}),
redirection: (Span { start: 147949, end: 147957}, Expression {expr: String("log.txt"), span: Span { start: 147958, end: 147965 },ty: String,custom_completion: None}
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-utils/standard_library/tests.nu` to run the
tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-05-18 00:47:03 +02:00
|
|
|
}
|
2023-07-03 07:45:10 +02:00
|
|
|
| LiteElement::SameTargetRedirection {
|
|
|
|
cmd: (_, command), ..
|
|
|
|
} => parse_def_predecl(working_set, &command.parts),
|
2021-09-10 09:28:43 +02:00
|
|
|
}
|
2023-04-18 10:19:08 +02:00
|
|
|
}
|
|
|
|
}
|
2021-06-30 03:42:56 +02:00
|
|
|
|
2023-07-03 07:45:10 +02:00
|
|
|
let mut block = Block::new_with_capacity(lite_block.block.len());
|
|
|
|
|
|
|
|
for (idx, lite_pipeline) in lite_block.block.iter().enumerate() {
|
|
|
|
let pipeline = parse_pipeline(working_set, lite_pipeline, is_subexpression, idx);
|
|
|
|
block.pipelines.push(pipeline);
|
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if scoped {
|
|
|
|
working_set.exit_scope();
|
2021-06-30 03:42:56 +02:00
|
|
|
}
|
|
|
|
|
2023-04-21 21:00:33 +02:00
|
|
|
block.span = Some(span);
|
|
|
|
|
2023-07-14 23:51:28 +02:00
|
|
|
let errors = type_check::check_block_input_output(working_set, &block);
|
|
|
|
if !errors.is_empty() {
|
|
|
|
working_set.parse_errors.extend_from_slice(&errors);
|
|
|
|
}
|
2023-07-14 05:20:35 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
block
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-06-30 03:42:56 +02:00
|
|
|
|
2022-11-10 09:21:49 +01:00
|
|
|
pub fn discover_captures_in_closure(
|
2021-10-25 22:04:23 +02:00
|
|
|
working_set: &StateWorkingSet,
|
|
|
|
block: &Block,
|
|
|
|
seen: &mut Vec<VarId>,
|
2022-11-11 07:51:08 +01:00
|
|
|
seen_blocks: &mut HashMap<BlockId, Vec<(VarId, Span)>>,
|
2023-04-17 00:24:56 +02:00
|
|
|
output: &mut Vec<(VarId, Span)>,
|
|
|
|
) -> Result<(), ParseError> {
|
2021-10-25 22:04:23 +02:00
|
|
|
for flag in &block.signature.named {
|
|
|
|
if let Some(var_id) = flag.var_id {
|
|
|
|
seen.push(var_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for positional in &block.signature.required_positional {
|
|
|
|
if let Some(var_id) = positional.var_id {
|
|
|
|
seen.push(var_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for positional in &block.signature.optional_positional {
|
|
|
|
if let Some(var_id) = positional.var_id {
|
|
|
|
seen.push(var_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for positional in &block.signature.rest_positional {
|
|
|
|
if let Some(var_id) = positional.var_id {
|
|
|
|
seen.push(var_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-02-15 20:31:14 +01:00
|
|
|
for pipeline in &block.pipelines {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_pipeline(working_set, pipeline, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
|
2023-04-17 00:24:56 +02:00
|
|
|
Ok(())
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
|
2022-02-11 00:15:15 +01:00
|
|
|
fn discover_captures_in_pipeline(
|
2021-10-25 22:04:23 +02:00
|
|
|
working_set: &StateWorkingSet,
|
|
|
|
pipeline: &Pipeline,
|
|
|
|
seen: &mut Vec<VarId>,
|
2022-11-11 07:51:08 +01:00
|
|
|
seen_blocks: &mut HashMap<BlockId, Vec<(VarId, Span)>>,
|
2023-04-17 00:24:56 +02:00
|
|
|
output: &mut Vec<(VarId, Span)>,
|
|
|
|
) -> Result<(), ParseError> {
|
2022-11-18 22:46:48 +01:00
|
|
|
for element in &pipeline.elements {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_pipeline_element(working_set, element, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
|
2023-04-17 00:24:56 +02:00
|
|
|
Ok(())
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
|
2022-11-18 22:46:48 +01:00
|
|
|
// Closes over captured variables
|
|
|
|
pub fn discover_captures_in_pipeline_element(
|
|
|
|
working_set: &StateWorkingSet,
|
|
|
|
element: &PipelineElement,
|
|
|
|
seen: &mut Vec<VarId>,
|
|
|
|
seen_blocks: &mut HashMap<BlockId, Vec<(VarId, Span)>>,
|
2023-04-17 00:24:56 +02:00
|
|
|
output: &mut Vec<(VarId, Span)>,
|
|
|
|
) -> Result<(), ParseError> {
|
2022-11-18 22:46:48 +01:00
|
|
|
match element {
|
2022-11-22 19:26:13 +01:00
|
|
|
PipelineElement::Expression(_, expression)
|
2023-11-27 14:52:39 +01:00
|
|
|
| PipelineElement::Redirection(_, _, expression, _)
|
2022-12-13 04:36:13 +01:00
|
|
|
| PipelineElement::And(_, expression)
|
2022-11-22 19:26:13 +01:00
|
|
|
| PipelineElement::Or(_, expression) => {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, expression, seen, seen_blocks, output)
|
2022-11-18 22:46:48 +01:00
|
|
|
}
|
Support redirect `err` and `out` to different streams (#7685)
# Description
Closes: #7364
# User-Facing Changes
Given the following shell script:
```bash
x=$(printf '=%.0s' {1..100})
echo $x
echo $x 1>&2
```
It supports the following command:
```
bash test.sh out> out.txt err> err.txt
```
Then both `out.txt` and `err.txt` will contain `=`(100 times)
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SeparateRedirection` if we meet two Redirection
token(which is generated by `lex` function),
During converting from lite block to block,
`LiteElement::SeparateRedirection` will be converted to
`PipelineElement::SeparateRedirection`.
Then in the block eval process, if we get
`PipelineElement::SeparateRedirection`, we invoke `save` command with
`--stderr` arguments to acthive our behavior.
## What happened internally?
Take the following command as example:
```
^ls out> out.txt err> err.txt
```
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, Stdout, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, LiteCommand { comments: [], parts: [Span { start: 39063, end: 39070 }] })
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(
None,
LiteCommand { comments: [], parts: [Span { start: 38525, end: 38528 }] }),
// new one! two Redirection merged into one SeparateRedirection.
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, LiteCommand { comments: [], parts: [Span { start: 38534, end: 38541 }] }),
err: (Span { start: 38542, end: 38546 }, LiteCommand { comments: [], parts: [Span { start: 38547, end: 38554 }] })
}
]
}]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, Stdout, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, Expression { expr: String("err.txt"), span: Span { start: 39063, end: 39070 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 38526, end: 38528 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 38525, end: 38528 },
ty: Any,
custom_completion: None
}),
// new one! SeparateRedirection
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, Expression { expr: String("out.txt"), span: Span { start: 38534, end: 38541 }, ty: String, custom_completion: None }),
err: (Span { start: 38542, end: 38546 }, Expression { expr: String("err.txt"), span: Span { start: 38547, end: 38554 }, ty: String, custom_completion: None })
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-01-12 10:22:30 +01:00
|
|
|
PipelineElement::SeparateRedirection {
|
2023-11-27 14:52:39 +01:00
|
|
|
out: (_, out_expr, _),
|
|
|
|
err: (_, err_expr, _),
|
Support redirect `err` and `out` to different streams (#7685)
# Description
Closes: #7364
# User-Facing Changes
Given the following shell script:
```bash
x=$(printf '=%.0s' {1..100})
echo $x
echo $x 1>&2
```
It supports the following command:
```
bash test.sh out> out.txt err> err.txt
```
Then both `out.txt` and `err.txt` will contain `=`(100 times)
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SeparateRedirection` if we meet two Redirection
token(which is generated by `lex` function),
During converting from lite block to block,
`LiteElement::SeparateRedirection` will be converted to
`PipelineElement::SeparateRedirection`.
Then in the block eval process, if we get
`PipelineElement::SeparateRedirection`, we invoke `save` command with
`--stderr` arguments to acthive our behavior.
## What happened internally?
Take the following command as example:
```
^ls out> out.txt err> err.txt
```
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, Stdout, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, LiteCommand { comments: [], parts: [Span { start: 39063, end: 39070 }] })
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(
None,
LiteCommand { comments: [], parts: [Span { start: 38525, end: 38528 }] }),
// new one! two Redirection merged into one SeparateRedirection.
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, LiteCommand { comments: [], parts: [Span { start: 38534, end: 38541 }] }),
err: (Span { start: 38542, end: 38546 }, LiteCommand { comments: [], parts: [Span { start: 38547, end: 38554 }] })
}
]
}]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, Stdout, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, Expression { expr: String("err.txt"), span: Span { start: 39063, end: 39070 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 38526, end: 38528 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 38525, end: 38528 },
ty: Any,
custom_completion: None
}),
// new one! SeparateRedirection
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, Expression { expr: String("out.txt"), span: Span { start: 38534, end: 38541 }, ty: String, custom_completion: None }),
err: (Span { start: 38542, end: 38546 }, Expression { expr: String("err.txt"), span: Span { start: 38547, end: 38554 }, ty: String, custom_completion: None })
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-01-12 10:22:30 +01:00
|
|
|
} => {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, out_expr, seen, seen_blocks, output)?;
|
|
|
|
discover_captures_in_expr(working_set, err_expr, seen, seen_blocks, output)?;
|
|
|
|
Ok(())
|
Support redirect `err` and `out` to different streams (#7685)
# Description
Closes: #7364
# User-Facing Changes
Given the following shell script:
```bash
x=$(printf '=%.0s' {1..100})
echo $x
echo $x 1>&2
```
It supports the following command:
```
bash test.sh out> out.txt err> err.txt
```
Then both `out.txt` and `err.txt` will contain `=`(100 times)
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SeparateRedirection` if we meet two Redirection
token(which is generated by `lex` function),
During converting from lite block to block,
`LiteElement::SeparateRedirection` will be converted to
`PipelineElement::SeparateRedirection`.
Then in the block eval process, if we get
`PipelineElement::SeparateRedirection`, we invoke `save` command with
`--stderr` arguments to acthive our behavior.
## What happened internally?
Take the following command as example:
```
^ls out> out.txt err> err.txt
```
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, Stdout, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, LiteCommand { comments: [], parts: [Span { start: 39063, end: 39070 }] })
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(
None,
LiteCommand { comments: [], parts: [Span { start: 38525, end: 38528 }] }),
// new one! two Redirection merged into one SeparateRedirection.
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, LiteCommand { comments: [], parts: [Span { start: 38534, end: 38541 }] }),
err: (Span { start: 38542, end: 38546 }, LiteCommand { comments: [], parts: [Span { start: 38547, end: 38554 }] })
}
]
}]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, Stdout, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, Expression { expr: String("err.txt"), span: Span { start: 39063, end: 39070 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 38526, end: 38528 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 38525, end: 38528 },
ty: Any,
custom_completion: None
}),
// new one! SeparateRedirection
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, Expression { expr: String("out.txt"), span: Span { start: 38534, end: 38541 }, ty: String, custom_completion: None }),
err: (Span { start: 38542, end: 38546 }, Expression { expr: String("err.txt"), span: Span { start: 38547, end: 38554 }, ty: String, custom_completion: None })
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-01-12 10:22:30 +01:00
|
|
|
}
|
Avoid blocking when `o+e>` redirects too much stderr message (#8784)
# Description
Fixes: #8565
Here is another pr #7240 tried to address the issue, but it works in a
wrong way.
After this change `o+e>` won't redirect all stdout message then stderr
message and it works more like how bash does.
# User-Facing Changes
For the given python code:
```python
# test.py
import sys
print('aa'*300, flush=True)
print('bb'*999999, file=sys.stderr, flush=True)
print('cc'*300, flush=True)
```
Running `python test.py out+err> a.txt` shoudn't hang nushell, and
`a.txt` keeps output in the same order
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SameTargetRedirection` if we meet `out+err>` redirection
token(which is generated by lex function),
During converting from lite block to block,
LiteElement::SameTargetRedirection will be converted to
PipelineElement::SameTargetRedirection.
Then in the block eval process, if we get
PipelineElement::SameTargetRedirection, we'll invoke `run-external` with
`--redirect-combine` flag, then pipe the result into save command
## What happened internally?
Take the following command as example:
`^ls o+e> log.txt`
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline {
commands: [
SameTargetRedirection {
cmd: (None, LiteCommand { comments: [], parts: [Span { start: 147945, end: 147948}]}),
redirection: (Span { start: 147949, end: 147957 }, LiteCommand { comments: [], parts: [Span { start: 147958, end: 147965 }]})
}
]
}
]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
SameTargetRedirection {
cmd: (None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 147946, end: 147948 }, ty: String, custom_completion: None}, [], false),
span: Span { start: 147945, end: 147948},
ty: Any, custom_completion: None
}),
redirection: (Span { start: 147949, end: 147957}, Expression {expr: String("log.txt"), span: Span { start: 147958, end: 147965 },ty: String,custom_completion: None}
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-utils/standard_library/tests.nu` to run the
tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-05-18 00:47:03 +02:00
|
|
|
PipelineElement::SameTargetRedirection {
|
|
|
|
cmd: (_, cmd_expr),
|
2023-11-27 14:52:39 +01:00
|
|
|
redirection: (_, redirect_expr, _),
|
Avoid blocking when `o+e>` redirects too much stderr message (#8784)
# Description
Fixes: #8565
Here is another pr #7240 tried to address the issue, but it works in a
wrong way.
After this change `o+e>` won't redirect all stdout message then stderr
message and it works more like how bash does.
# User-Facing Changes
For the given python code:
```python
# test.py
import sys
print('aa'*300, flush=True)
print('bb'*999999, file=sys.stderr, flush=True)
print('cc'*300, flush=True)
```
Running `python test.py out+err> a.txt` shoudn't hang nushell, and
`a.txt` keeps output in the same order
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SameTargetRedirection` if we meet `out+err>` redirection
token(which is generated by lex function),
During converting from lite block to block,
LiteElement::SameTargetRedirection will be converted to
PipelineElement::SameTargetRedirection.
Then in the block eval process, if we get
PipelineElement::SameTargetRedirection, we'll invoke `run-external` with
`--redirect-combine` flag, then pipe the result into save command
## What happened internally?
Take the following command as example:
`^ls o+e> log.txt`
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline {
commands: [
SameTargetRedirection {
cmd: (None, LiteCommand { comments: [], parts: [Span { start: 147945, end: 147948}]}),
redirection: (Span { start: 147949, end: 147957 }, LiteCommand { comments: [], parts: [Span { start: 147958, end: 147965 }]})
}
]
}
]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
SameTargetRedirection {
cmd: (None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 147946, end: 147948 }, ty: String, custom_completion: None}, [], false),
span: Span { start: 147945, end: 147948},
ty: Any, custom_completion: None
}),
redirection: (Span { start: 147949, end: 147957}, Expression {expr: String("log.txt"), span: Span { start: 147958, end: 147965 },ty: String,custom_completion: None}
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-utils/standard_library/tests.nu` to run the
tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-05-18 00:47:03 +02:00
|
|
|
} => {
|
|
|
|
discover_captures_in_expr(working_set, cmd_expr, seen, seen_blocks, output)?;
|
|
|
|
discover_captures_in_expr(working_set, redirect_expr, seen, seen_blocks, output)?;
|
|
|
|
Ok(())
|
|
|
|
}
|
2022-11-18 22:46:48 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-03-24 10:50:23 +01:00
|
|
|
pub fn discover_captures_in_pattern(pattern: &MatchPattern, seen: &mut Vec<VarId>) {
|
|
|
|
match &pattern.pattern {
|
|
|
|
Pattern::Variable(var_id) => seen.push(*var_id),
|
|
|
|
Pattern::List(items) => {
|
|
|
|
for item in items {
|
|
|
|
discover_captures_in_pattern(item, seen)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Pattern::Record(items) => {
|
|
|
|
for item in items {
|
|
|
|
discover_captures_in_pattern(&item.1, seen)
|
|
|
|
}
|
|
|
|
}
|
2023-03-27 00:31:57 +02:00
|
|
|
Pattern::Or(patterns) => {
|
|
|
|
for pattern in patterns {
|
|
|
|
discover_captures_in_pattern(pattern, seen)
|
|
|
|
}
|
|
|
|
}
|
2023-03-31 00:08:53 +02:00
|
|
|
Pattern::Rest(var_id) => seen.push(*var_id),
|
|
|
|
Pattern::Value(_) | Pattern::IgnoreValue | Pattern::IgnoreRest | Pattern::Garbage => {}
|
2023-03-24 10:50:23 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-11 07:51:08 +01:00
|
|
|
// Closes over captured variables
|
2022-02-11 00:15:15 +01:00
|
|
|
pub fn discover_captures_in_expr(
|
2021-10-25 22:04:23 +02:00
|
|
|
working_set: &StateWorkingSet,
|
|
|
|
expr: &Expression,
|
|
|
|
seen: &mut Vec<VarId>,
|
2022-11-11 07:51:08 +01:00
|
|
|
seen_blocks: &mut HashMap<BlockId, Vec<(VarId, Span)>>,
|
2023-04-17 00:24:56 +02:00
|
|
|
output: &mut Vec<(VarId, Span)>,
|
|
|
|
) -> Result<(), ParseError> {
|
2021-10-25 22:04:23 +02:00
|
|
|
match &expr.expr {
|
|
|
|
Expr::BinaryOp(lhs, _, rhs) => {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, lhs, seen, seen_blocks, output)?;
|
|
|
|
discover_captures_in_expr(working_set, rhs, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
2022-04-06 21:10:25 +02:00
|
|
|
Expr::UnaryNot(expr) => {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, expr, seen, seen_blocks, output)?;
|
2022-04-06 21:10:25 +02:00
|
|
|
}
|
2022-11-10 09:21:49 +01:00
|
|
|
Expr::Closure(block_id) => {
|
|
|
|
let block = working_set.get_block(*block_id);
|
|
|
|
let results = {
|
|
|
|
let mut seen = vec![];
|
2023-04-17 00:24:56 +02:00
|
|
|
let mut results = vec![];
|
|
|
|
|
|
|
|
discover_captures_in_closure(
|
|
|
|
working_set,
|
|
|
|
block,
|
|
|
|
&mut seen,
|
|
|
|
seen_blocks,
|
|
|
|
&mut results,
|
|
|
|
)?;
|
2022-11-11 07:51:08 +01:00
|
|
|
|
|
|
|
for (var_id, span) in results.iter() {
|
|
|
|
if !seen.contains(var_id) {
|
|
|
|
if let Some(variable) = working_set.get_variable_if_possible(*var_id) {
|
|
|
|
if variable.mutable {
|
|
|
|
return Err(ParseError::CaptureOfMutableVar(*span));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
results
|
2022-11-10 09:21:49 +01:00
|
|
|
};
|
|
|
|
seen_blocks.insert(*block_id, results.clone());
|
2022-11-11 07:51:08 +01:00
|
|
|
for (var_id, span) in results.into_iter() {
|
2022-11-10 09:21:49 +01:00
|
|
|
if !seen.contains(&var_id) {
|
2022-11-11 07:51:08 +01:00
|
|
|
output.push((var_id, span))
|
2022-11-10 09:21:49 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-10-25 22:04:23 +02:00
|
|
|
Expr::Block(block_id) => {
|
2022-05-17 00:49:59 +02:00
|
|
|
let block = working_set.get_block(*block_id);
|
2022-11-10 09:21:49 +01:00
|
|
|
// FIXME: is this correct?
|
2022-05-17 00:49:59 +02:00
|
|
|
let results = {
|
|
|
|
let mut seen = vec![];
|
2023-04-17 00:24:56 +02:00
|
|
|
let mut results = vec![];
|
|
|
|
discover_captures_in_closure(
|
|
|
|
working_set,
|
|
|
|
block,
|
|
|
|
&mut seen,
|
|
|
|
seen_blocks,
|
|
|
|
&mut results,
|
|
|
|
)?;
|
|
|
|
results
|
2022-05-17 00:49:59 +02:00
|
|
|
};
|
2023-04-17 00:24:56 +02:00
|
|
|
|
2022-05-17 00:49:59 +02:00
|
|
|
seen_blocks.insert(*block_id, results.clone());
|
2022-11-11 07:51:08 +01:00
|
|
|
for (var_id, span) in results.into_iter() {
|
2022-05-17 00:49:59 +02:00
|
|
|
if !seen.contains(&var_id) {
|
2022-11-11 07:51:08 +01:00
|
|
|
output.push((var_id, span))
|
2022-02-11 00:15:15 +01:00
|
|
|
}
|
|
|
|
}
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
2022-03-01 00:31:53 +01:00
|
|
|
Expr::Binary(_) => {}
|
2021-10-25 22:04:23 +02:00
|
|
|
Expr::Bool(_) => {}
|
|
|
|
Expr::Call(call) => {
|
2022-01-12 05:06:56 +01:00
|
|
|
let decl = working_set.get_decl(call.decl_id);
|
2022-02-11 00:15:15 +01:00
|
|
|
if let Some(block_id) = decl.get_block_id() {
|
|
|
|
match seen_blocks.get(&block_id) {
|
|
|
|
Some(capture_list) => {
|
2023-07-20 21:10:54 +02:00
|
|
|
// Push captures onto the outer closure that aren't created by that outer closure
|
|
|
|
for capture in capture_list {
|
|
|
|
if !seen.contains(&capture.0) {
|
|
|
|
output.push(*capture);
|
|
|
|
}
|
|
|
|
}
|
2022-02-11 00:15:15 +01:00
|
|
|
}
|
|
|
|
None => {
|
|
|
|
let block = working_set.get_block(block_id);
|
|
|
|
if !block.captures.is_empty() {
|
2023-07-20 21:10:54 +02:00
|
|
|
for capture in &block.captures {
|
|
|
|
if !seen.contains(capture) {
|
|
|
|
output.push((*capture, call.head));
|
|
|
|
}
|
|
|
|
}
|
2022-02-11 00:15:15 +01:00
|
|
|
} else {
|
2023-07-20 21:10:54 +02:00
|
|
|
let result = {
|
|
|
|
let mut seen = vec![];
|
|
|
|
seen_blocks.insert(block_id, output.clone());
|
|
|
|
|
|
|
|
let mut result = vec![];
|
|
|
|
discover_captures_in_closure(
|
|
|
|
working_set,
|
|
|
|
block,
|
|
|
|
&mut seen,
|
|
|
|
seen_blocks,
|
|
|
|
&mut result,
|
|
|
|
)?;
|
|
|
|
|
|
|
|
result
|
|
|
|
};
|
|
|
|
// Push captures onto the outer closure that aren't created by that outer closure
|
|
|
|
for capture in &result {
|
|
|
|
if !seen.contains(&capture.0) {
|
|
|
|
output.push(*capture);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-02-11 00:15:15 +01:00
|
|
|
seen_blocks.insert(block_id, result);
|
|
|
|
}
|
2022-01-21 17:39:55 +01:00
|
|
|
}
|
|
|
|
}
|
2022-01-12 05:06:56 +01:00
|
|
|
}
|
|
|
|
|
Allow spreading arguments to commands (#11289)
<!--
if this PR closes one or more issues, you can automatically link the PR
with
them by using one of the [*linking
keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword),
e.g.
- this PR should close #xxxx
- fixes #xxxx
you can also mention related issues, PRs or discussions!
-->
Finishes implementing https://github.com/nushell/nushell/issues/10598,
which asks for a spread operator in lists, in records, and when calling
commands.
# Description
<!--
Thank you for improving Nushell. Please, check our [contributing
guide](../CONTRIBUTING.md) and talk to the core team before making major
changes.
Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.
-->
This PR will allow spreading arguments to commands (both internal and
external). It will also deprecate spreading arguments automatically when
passing to external commands.
# User-Facing Changes
<!-- List of all changes that impact the user experience here. This
helps us keep track of breaking changes. -->
- Users will be able to use `...` to spread arguments to custom/builtin
commands that have rest parameters or allow unknown arguments, or to any
external command
- If a custom command doesn't have a rest parameter and it doesn't allow
unknown arguments either, the spread operator will not be allowed
- Passing lists to external commands without `...` will work for now but
will cause a deprecation warning saying that it'll stop working in 0.91
(is 2 versions enough time?)
Here's a function to help with demonstrating some behavior:
```nushell
> def foo [ a, b, c?, d?, ...rest ] { [$a $b $c $d $rest] | to nuon }
```
You can pass a list of arguments to fill in the `rest` parameter using
`...`:
```nushell
> foo 1 2 3 4 ...[5 6]
[1, 2, 3, 4, [5, 6]]
```
If you don't use `...`, the list `[5 6]` will be treated as a single
argument:
```nushell
> foo 1 2 3 4 [5 6] # Note the double [[]]
[1, 2, 3, 4, [[5, 6]]]
```
You can omit optional parameters before the spread arguments:
```nushell
> foo 1 2 3 ...[4 5] # d is omitted here
[1, 2, 3, null, [4, 5]]
```
If you have multiple lists, you can spread them all:
```nushell
> foo 1 2 3 ...[4 5] 6 7 ...[8] ...[]
[1, 2, 3, null, [4, 5, 6, 7, 8]]
```
Here's the kind of error you get when you try to spread arguments to a
command with no rest parameter:
![image](https://github.com/nushell/nushell/assets/45539777/93faceae-00eb-4e59-ac3f-17f98436e6e4)
And this is the warning you get when you pass a list to an external now
(without `...`):
![image](https://github.com/nushell/nushell/assets/45539777/d368f590-201e-49fb-8b20-68476ced415e)
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to
check that you're using the standard code style
- `cargo test --workspace` to check that all tests pass (on Windows make
sure to [enable developer
mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
- `cargo run -- -c "use std testing; testing run-tests --path
crates/nu-std"` to run the tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
Added tests to cover the following cases:
- Spreading arguments to a command that doesn't have a rest parameter
(unexpected spread argument error)
- Spreading arguments to a command that doesn't have a rest parameter
*but* there's also a missing positional argument (missing positional
error)
- Spreading arguments to a command that doesn't have a rest parameter
but does allow unknown arguments, such as `exec` (allowed)
- Spreading a list literal containing arguments of the wrong type (parse
error)
- Spreading a non-list value, both to internal and external commands
- Having named arguments in the middle of rest arguments
- `explain`ing a command call that spreads its arguments
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
# Examples
Suppose you have multiple tables:
```nushell
let people = [[id name age]; [0 alice 100] [1 bob 200] [2 eve 300]]
let evil_twins = [[id name age]; [0 ecila 100] [-1 bob 200] [-2 eve 300]]
```
Maybe you often find yourself needing to merge multiple tables and want
a utility to do that. You could write a function like this:
```nushell
def merge_all [ ...tables ] { $tables | reduce { |it, acc| $acc | merge $it } }
```
Then you can use it like this:
```nushell
> merge_all ...([$people $evil_twins] | each { |$it| $it | select name age })
╭───┬───────┬─────╮
│ # │ name │ age │
├───┼───────┼─────┤
│ 0 │ ecila │ 100 │
│ 1 │ bob │ 200 │
│ 2 │ eve │ 300 │
╰───┴───────┴─────╯
```
Except they had duplicate columns, so now you first want to suffix every
column with a number to tell you which table the column came from. You
can make a command for that:
```nushell
def select_and_merge [ --cols: list<string>, ...tables ] {
let renamed_tables = $tables
| enumerate
| each { |it|
$it.item | select $cols | rename ...($cols | each { |col| $col + ($it.index | into string) })
};
merge_all ...$renamed_tables
}
```
And call it like this:
```nushell
> select_and_merge --cols [name age] $people $evil_twins
╭───┬───────┬──────┬───────┬──────╮
│ # │ name0 │ age0 │ name1 │ age1 │
├───┼───────┼──────┼───────┼──────┤
│ 0 │ alice │ 100 │ ecila │ 100 │
│ 1 │ bob │ 200 │ bob │ 200 │
│ 2 │ eve │ 300 │ eve │ 300 │
╰───┴───────┴──────┴───────┴──────╯
```
---
Suppose someone's made a command to search for APT packages:
```nushell
# The main command
def search-pkgs [
--install # Whether to install any packages it finds
log_level: int # Pretend it's a good idea to make this a required positional parameter
exclude?: list<string> # Packages to exclude
repositories?: list<string> # Which repositories to look in (searches in all if not given)
...pkgs # Package names to search for
] {
{ install: $install, log_level: $log_level, exclude: ($exclude | to nuon), repositories: ($repositories | to nuon), pkgs: ($pkgs | to nuon) }
}
```
It has a lot of parameters to configure it, so you might make your own
helper commands to wrap around it for specific cases. Here's one
example:
```nushell
# Only look for packages locally
def search-pkgs-local [
--install # Whether to install any packages it finds
log_level: int
exclude?: list<string> # Packages to exclude
...pkgs # Package names to search for
] {
# All required and optional positional parameters are given
search-pkgs --install=$install $log_level [] ["<local URI or something>"] ...$pkgs
}
```
And you can run it like this:
```nushell
> search-pkgs-local --install=false 5 ...["python2.7" "vim"]
╭──────────────┬──────────────────────────────╮
│ install │ false │
│ log_level │ 5 │
│ exclude │ [] │
│ repositories │ ["<local URI or something>"] │
│ pkgs │ ["python2.7", vim] │
╰──────────────┴──────────────────────────────╯
```
One thing I realized when writing this was that if we decide to not
allow passing optional arguments using the spread operator, then you can
(mis?)use the spread operator to skip optional parameters. Here, I
didn't want to give `exclude` explicitly, so I used a spread operator to
pass the packages to install. Without it, I would've needed to do
`search-pkgs-local --install=false 5 [] "python2.7" "vim"` (explicitly
pass `[]` (or `null`, in the general case) to `exclude`). There are
probably more idiomatic ways to do this, but I just thought it was
something interesting.
If you're a virologist of the [xkcd](https://xkcd.com/350/) kind,
another helper command you might make is this:
```nushell
# Install any packages it finds
def live-dangerously [ ...pkgs ] {
# One optional argument was given (exclude), while another was not (repositories)
search-pkgs 0 [] ...$pkgs --install # Flags can go after spread arguments
}
```
Running it:
```nushell
> live-dangerously "git" "*vi*" # *vi* because I don't feel like typing out vim and neovim
╭──────────────┬─────────────╮
│ install │ true │
│ log_level │ 0 │
│ exclude │ [] │
│ repositories │ null │
│ pkgs │ [git, *vi*] │
╰──────────────┴─────────────╯
```
Here's an example that uses the spread operator more than once within
the same command call:
```nushell
let extras = [ chrome firefox python java git ]
def search-pkgs-curated [ ...pkgs ] {
(search-pkgs
1
[emacs]
["example.com", "foo.com"]
vim # A must for everyone!
...($pkgs | filter { |p| not ($p | str contains "*") }) # Remove packages with globs
python # Good tool to have
...$extras
--install=false
python3) # I forget, did I already put Python in extras?
}
```
Running it:
```nushell
> search-pkgs-curated "git" "*vi*"
╭──────────────┬───────────────────────────────────────────────────────────────────╮
│ install │ false │
│ log_level │ 1 │
│ exclude │ [emacs] │
│ repositories │ [example.com, foo.com] │
│ pkgs │ [vim, git, python, chrome, firefox, python, java, git, "python3"] │
╰──────────────┴───────────────────────────────────────────────────────────────────╯
```
2023-12-28 08:43:20 +01:00
|
|
|
for arg in &call.arguments {
|
|
|
|
match arg {
|
|
|
|
Argument::Named(named) => {
|
|
|
|
if let Some(arg) = &named.2 {
|
|
|
|
discover_captures_in_expr(working_set, arg, seen, seen_blocks, output)?;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Argument::Positional(expr)
|
|
|
|
| Argument::Unknown(expr)
|
|
|
|
| Argument::Spread(expr) => {
|
|
|
|
discover_captures_in_expr(working_set, expr, seen, seen_blocks, output)?;
|
|
|
|
}
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Expr::CellPath(_) => {}
|
2022-02-24 03:02:48 +01:00
|
|
|
Expr::DateTime(_) => {}
|
Allow spreading arguments to commands (#11289)
<!--
if this PR closes one or more issues, you can automatically link the PR
with
them by using one of the [*linking
keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword),
e.g.
- this PR should close #xxxx
- fixes #xxxx
you can also mention related issues, PRs or discussions!
-->
Finishes implementing https://github.com/nushell/nushell/issues/10598,
which asks for a spread operator in lists, in records, and when calling
commands.
# Description
<!--
Thank you for improving Nushell. Please, check our [contributing
guide](../CONTRIBUTING.md) and talk to the core team before making major
changes.
Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.
-->
This PR will allow spreading arguments to commands (both internal and
external). It will also deprecate spreading arguments automatically when
passing to external commands.
# User-Facing Changes
<!-- List of all changes that impact the user experience here. This
helps us keep track of breaking changes. -->
- Users will be able to use `...` to spread arguments to custom/builtin
commands that have rest parameters or allow unknown arguments, or to any
external command
- If a custom command doesn't have a rest parameter and it doesn't allow
unknown arguments either, the spread operator will not be allowed
- Passing lists to external commands without `...` will work for now but
will cause a deprecation warning saying that it'll stop working in 0.91
(is 2 versions enough time?)
Here's a function to help with demonstrating some behavior:
```nushell
> def foo [ a, b, c?, d?, ...rest ] { [$a $b $c $d $rest] | to nuon }
```
You can pass a list of arguments to fill in the `rest` parameter using
`...`:
```nushell
> foo 1 2 3 4 ...[5 6]
[1, 2, 3, 4, [5, 6]]
```
If you don't use `...`, the list `[5 6]` will be treated as a single
argument:
```nushell
> foo 1 2 3 4 [5 6] # Note the double [[]]
[1, 2, 3, 4, [[5, 6]]]
```
You can omit optional parameters before the spread arguments:
```nushell
> foo 1 2 3 ...[4 5] # d is omitted here
[1, 2, 3, null, [4, 5]]
```
If you have multiple lists, you can spread them all:
```nushell
> foo 1 2 3 ...[4 5] 6 7 ...[8] ...[]
[1, 2, 3, null, [4, 5, 6, 7, 8]]
```
Here's the kind of error you get when you try to spread arguments to a
command with no rest parameter:
![image](https://github.com/nushell/nushell/assets/45539777/93faceae-00eb-4e59-ac3f-17f98436e6e4)
And this is the warning you get when you pass a list to an external now
(without `...`):
![image](https://github.com/nushell/nushell/assets/45539777/d368f590-201e-49fb-8b20-68476ced415e)
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to
check that you're using the standard code style
- `cargo test --workspace` to check that all tests pass (on Windows make
sure to [enable developer
mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
- `cargo run -- -c "use std testing; testing run-tests --path
crates/nu-std"` to run the tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
Added tests to cover the following cases:
- Spreading arguments to a command that doesn't have a rest parameter
(unexpected spread argument error)
- Spreading arguments to a command that doesn't have a rest parameter
*but* there's also a missing positional argument (missing positional
error)
- Spreading arguments to a command that doesn't have a rest parameter
but does allow unknown arguments, such as `exec` (allowed)
- Spreading a list literal containing arguments of the wrong type (parse
error)
- Spreading a non-list value, both to internal and external commands
- Having named arguments in the middle of rest arguments
- `explain`ing a command call that spreads its arguments
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
# Examples
Suppose you have multiple tables:
```nushell
let people = [[id name age]; [0 alice 100] [1 bob 200] [2 eve 300]]
let evil_twins = [[id name age]; [0 ecila 100] [-1 bob 200] [-2 eve 300]]
```
Maybe you often find yourself needing to merge multiple tables and want
a utility to do that. You could write a function like this:
```nushell
def merge_all [ ...tables ] { $tables | reduce { |it, acc| $acc | merge $it } }
```
Then you can use it like this:
```nushell
> merge_all ...([$people $evil_twins] | each { |$it| $it | select name age })
╭───┬───────┬─────╮
│ # │ name │ age │
├───┼───────┼─────┤
│ 0 │ ecila │ 100 │
│ 1 │ bob │ 200 │
│ 2 │ eve │ 300 │
╰───┴───────┴─────╯
```
Except they had duplicate columns, so now you first want to suffix every
column with a number to tell you which table the column came from. You
can make a command for that:
```nushell
def select_and_merge [ --cols: list<string>, ...tables ] {
let renamed_tables = $tables
| enumerate
| each { |it|
$it.item | select $cols | rename ...($cols | each { |col| $col + ($it.index | into string) })
};
merge_all ...$renamed_tables
}
```
And call it like this:
```nushell
> select_and_merge --cols [name age] $people $evil_twins
╭───┬───────┬──────┬───────┬──────╮
│ # │ name0 │ age0 │ name1 │ age1 │
├───┼───────┼──────┼───────┼──────┤
│ 0 │ alice │ 100 │ ecila │ 100 │
│ 1 │ bob │ 200 │ bob │ 200 │
│ 2 │ eve │ 300 │ eve │ 300 │
╰───┴───────┴──────┴───────┴──────╯
```
---
Suppose someone's made a command to search for APT packages:
```nushell
# The main command
def search-pkgs [
--install # Whether to install any packages it finds
log_level: int # Pretend it's a good idea to make this a required positional parameter
exclude?: list<string> # Packages to exclude
repositories?: list<string> # Which repositories to look in (searches in all if not given)
...pkgs # Package names to search for
] {
{ install: $install, log_level: $log_level, exclude: ($exclude | to nuon), repositories: ($repositories | to nuon), pkgs: ($pkgs | to nuon) }
}
```
It has a lot of parameters to configure it, so you might make your own
helper commands to wrap around it for specific cases. Here's one
example:
```nushell
# Only look for packages locally
def search-pkgs-local [
--install # Whether to install any packages it finds
log_level: int
exclude?: list<string> # Packages to exclude
...pkgs # Package names to search for
] {
# All required and optional positional parameters are given
search-pkgs --install=$install $log_level [] ["<local URI or something>"] ...$pkgs
}
```
And you can run it like this:
```nushell
> search-pkgs-local --install=false 5 ...["python2.7" "vim"]
╭──────────────┬──────────────────────────────╮
│ install │ false │
│ log_level │ 5 │
│ exclude │ [] │
│ repositories │ ["<local URI or something>"] │
│ pkgs │ ["python2.7", vim] │
╰──────────────┴──────────────────────────────╯
```
One thing I realized when writing this was that if we decide to not
allow passing optional arguments using the spread operator, then you can
(mis?)use the spread operator to skip optional parameters. Here, I
didn't want to give `exclude` explicitly, so I used a spread operator to
pass the packages to install. Without it, I would've needed to do
`search-pkgs-local --install=false 5 [] "python2.7" "vim"` (explicitly
pass `[]` (or `null`, in the general case) to `exclude`). There are
probably more idiomatic ways to do this, but I just thought it was
something interesting.
If you're a virologist of the [xkcd](https://xkcd.com/350/) kind,
another helper command you might make is this:
```nushell
# Install any packages it finds
def live-dangerously [ ...pkgs ] {
# One optional argument was given (exclude), while another was not (repositories)
search-pkgs 0 [] ...$pkgs --install # Flags can go after spread arguments
}
```
Running it:
```nushell
> live-dangerously "git" "*vi*" # *vi* because I don't feel like typing out vim and neovim
╭──────────────┬─────────────╮
│ install │ true │
│ log_level │ 0 │
│ exclude │ [] │
│ repositories │ null │
│ pkgs │ [git, *vi*] │
╰──────────────┴─────────────╯
```
Here's an example that uses the spread operator more than once within
the same command call:
```nushell
let extras = [ chrome firefox python java git ]
def search-pkgs-curated [ ...pkgs ] {
(search-pkgs
1
[emacs]
["example.com", "foo.com"]
vim # A must for everyone!
...($pkgs | filter { |p| not ($p | str contains "*") }) # Remove packages with globs
python # Good tool to have
...$extras
--install=false
python3) # I forget, did I already put Python in extras?
}
```
Running it:
```nushell
> search-pkgs-curated "git" "*vi*"
╭──────────────┬───────────────────────────────────────────────────────────────────╮
│ install │ false │
│ log_level │ 1 │
│ exclude │ [emacs] │
│ repositories │ [example.com, foo.com] │
│ pkgs │ [vim, git, python, chrome, firefox, python, java, git, "python3"] │
╰──────────────┴───────────────────────────────────────────────────────────────────╯
```
2023-12-28 08:43:20 +01:00
|
|
|
Expr::ExternalCall(head, args, _) => {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, head, seen, seen_blocks, output)?;
|
2022-01-13 09:17:45 +01:00
|
|
|
|
Allow spreading arguments to commands (#11289)
<!--
if this PR closes one or more issues, you can automatically link the PR
with
them by using one of the [*linking
keywords*](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue#linking-a-pull-request-to-an-issue-using-a-keyword),
e.g.
- this PR should close #xxxx
- fixes #xxxx
you can also mention related issues, PRs or discussions!
-->
Finishes implementing https://github.com/nushell/nushell/issues/10598,
which asks for a spread operator in lists, in records, and when calling
commands.
# Description
<!--
Thank you for improving Nushell. Please, check our [contributing
guide](../CONTRIBUTING.md) and talk to the core team before making major
changes.
Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.
-->
This PR will allow spreading arguments to commands (both internal and
external). It will also deprecate spreading arguments automatically when
passing to external commands.
# User-Facing Changes
<!-- List of all changes that impact the user experience here. This
helps us keep track of breaking changes. -->
- Users will be able to use `...` to spread arguments to custom/builtin
commands that have rest parameters or allow unknown arguments, or to any
external command
- If a custom command doesn't have a rest parameter and it doesn't allow
unknown arguments either, the spread operator will not be allowed
- Passing lists to external commands without `...` will work for now but
will cause a deprecation warning saying that it'll stop working in 0.91
(is 2 versions enough time?)
Here's a function to help with demonstrating some behavior:
```nushell
> def foo [ a, b, c?, d?, ...rest ] { [$a $b $c $d $rest] | to nuon }
```
You can pass a list of arguments to fill in the `rest` parameter using
`...`:
```nushell
> foo 1 2 3 4 ...[5 6]
[1, 2, 3, 4, [5, 6]]
```
If you don't use `...`, the list `[5 6]` will be treated as a single
argument:
```nushell
> foo 1 2 3 4 [5 6] # Note the double [[]]
[1, 2, 3, 4, [[5, 6]]]
```
You can omit optional parameters before the spread arguments:
```nushell
> foo 1 2 3 ...[4 5] # d is omitted here
[1, 2, 3, null, [4, 5]]
```
If you have multiple lists, you can spread them all:
```nushell
> foo 1 2 3 ...[4 5] 6 7 ...[8] ...[]
[1, 2, 3, null, [4, 5, 6, 7, 8]]
```
Here's the kind of error you get when you try to spread arguments to a
command with no rest parameter:
![image](https://github.com/nushell/nushell/assets/45539777/93faceae-00eb-4e59-ac3f-17f98436e6e4)
And this is the warning you get when you pass a list to an external now
(without `...`):
![image](https://github.com/nushell/nushell/assets/45539777/d368f590-201e-49fb-8b20-68476ced415e)
# Tests + Formatting
<!--
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to
check that you're using the standard code style
- `cargo test --workspace` to check that all tests pass (on Windows make
sure to [enable developer
mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
- `cargo run -- -c "use std testing; testing run-tests --path
crates/nu-std"` to run the tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
-->
Added tests to cover the following cases:
- Spreading arguments to a command that doesn't have a rest parameter
(unexpected spread argument error)
- Spreading arguments to a command that doesn't have a rest parameter
*but* there's also a missing positional argument (missing positional
error)
- Spreading arguments to a command that doesn't have a rest parameter
but does allow unknown arguments, such as `exec` (allowed)
- Spreading a list literal containing arguments of the wrong type (parse
error)
- Spreading a non-list value, both to internal and external commands
- Having named arguments in the middle of rest arguments
- `explain`ing a command call that spreads its arguments
# After Submitting
<!-- If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
-->
# Examples
Suppose you have multiple tables:
```nushell
let people = [[id name age]; [0 alice 100] [1 bob 200] [2 eve 300]]
let evil_twins = [[id name age]; [0 ecila 100] [-1 bob 200] [-2 eve 300]]
```
Maybe you often find yourself needing to merge multiple tables and want
a utility to do that. You could write a function like this:
```nushell
def merge_all [ ...tables ] { $tables | reduce { |it, acc| $acc | merge $it } }
```
Then you can use it like this:
```nushell
> merge_all ...([$people $evil_twins] | each { |$it| $it | select name age })
╭───┬───────┬─────╮
│ # │ name │ age │
├───┼───────┼─────┤
│ 0 │ ecila │ 100 │
│ 1 │ bob │ 200 │
│ 2 │ eve │ 300 │
╰───┴───────┴─────╯
```
Except they had duplicate columns, so now you first want to suffix every
column with a number to tell you which table the column came from. You
can make a command for that:
```nushell
def select_and_merge [ --cols: list<string>, ...tables ] {
let renamed_tables = $tables
| enumerate
| each { |it|
$it.item | select $cols | rename ...($cols | each { |col| $col + ($it.index | into string) })
};
merge_all ...$renamed_tables
}
```
And call it like this:
```nushell
> select_and_merge --cols [name age] $people $evil_twins
╭───┬───────┬──────┬───────┬──────╮
│ # │ name0 │ age0 │ name1 │ age1 │
├───┼───────┼──────┼───────┼──────┤
│ 0 │ alice │ 100 │ ecila │ 100 │
│ 1 │ bob │ 200 │ bob │ 200 │
│ 2 │ eve │ 300 │ eve │ 300 │
╰───┴───────┴──────┴───────┴──────╯
```
---
Suppose someone's made a command to search for APT packages:
```nushell
# The main command
def search-pkgs [
--install # Whether to install any packages it finds
log_level: int # Pretend it's a good idea to make this a required positional parameter
exclude?: list<string> # Packages to exclude
repositories?: list<string> # Which repositories to look in (searches in all if not given)
...pkgs # Package names to search for
] {
{ install: $install, log_level: $log_level, exclude: ($exclude | to nuon), repositories: ($repositories | to nuon), pkgs: ($pkgs | to nuon) }
}
```
It has a lot of parameters to configure it, so you might make your own
helper commands to wrap around it for specific cases. Here's one
example:
```nushell
# Only look for packages locally
def search-pkgs-local [
--install # Whether to install any packages it finds
log_level: int
exclude?: list<string> # Packages to exclude
...pkgs # Package names to search for
] {
# All required and optional positional parameters are given
search-pkgs --install=$install $log_level [] ["<local URI or something>"] ...$pkgs
}
```
And you can run it like this:
```nushell
> search-pkgs-local --install=false 5 ...["python2.7" "vim"]
╭──────────────┬──────────────────────────────╮
│ install │ false │
│ log_level │ 5 │
│ exclude │ [] │
│ repositories │ ["<local URI or something>"] │
│ pkgs │ ["python2.7", vim] │
╰──────────────┴──────────────────────────────╯
```
One thing I realized when writing this was that if we decide to not
allow passing optional arguments using the spread operator, then you can
(mis?)use the spread operator to skip optional parameters. Here, I
didn't want to give `exclude` explicitly, so I used a spread operator to
pass the packages to install. Without it, I would've needed to do
`search-pkgs-local --install=false 5 [] "python2.7" "vim"` (explicitly
pass `[]` (or `null`, in the general case) to `exclude`). There are
probably more idiomatic ways to do this, but I just thought it was
something interesting.
If you're a virologist of the [xkcd](https://xkcd.com/350/) kind,
another helper command you might make is this:
```nushell
# Install any packages it finds
def live-dangerously [ ...pkgs ] {
# One optional argument was given (exclude), while another was not (repositories)
search-pkgs 0 [] ...$pkgs --install # Flags can go after spread arguments
}
```
Running it:
```nushell
> live-dangerously "git" "*vi*" # *vi* because I don't feel like typing out vim and neovim
╭──────────────┬─────────────╮
│ install │ true │
│ log_level │ 0 │
│ exclude │ [] │
│ repositories │ null │
│ pkgs │ [git, *vi*] │
╰──────────────┴─────────────╯
```
Here's an example that uses the spread operator more than once within
the same command call:
```nushell
let extras = [ chrome firefox python java git ]
def search-pkgs-curated [ ...pkgs ] {
(search-pkgs
1
[emacs]
["example.com", "foo.com"]
vim # A must for everyone!
...($pkgs | filter { |p| not ($p | str contains "*") }) # Remove packages with globs
python # Good tool to have
...$extras
--install=false
python3) # I forget, did I already put Python in extras?
}
```
Running it:
```nushell
> search-pkgs-curated "git" "*vi*"
╭──────────────┬───────────────────────────────────────────────────────────────────╮
│ install │ false │
│ log_level │ 1 │
│ exclude │ [emacs] │
│ repositories │ [example.com, foo.com] │
│ pkgs │ [vim, git, python, chrome, firefox, python, java, git, "python3"] │
╰──────────────┴───────────────────────────────────────────────────────────────────╯
```
2023-12-28 08:43:20 +01:00
|
|
|
for ExternalArgument::Regular(expr) | ExternalArgument::Spread(expr) in args {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, expr, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
}
|
do not attempt to glob expand if the file path is wrapped in quotes (#11569)
# Description
Fixes: #11455
### For arguments which is annotated with `:path/:directory/:glob`
To fix the issue, we need to have a way to know if a path is originally
quoted during runtime. So the information needed to be added at several
levels:
* parse time (from user input to expression)
We need to add quoted information into `Expr::Filepath`,
`Expr::Directory`, `Expr::GlobPattern`
* eval time
When convert from `Expr::Filepath`, `Expr::Directory`,
`Expr::GlobPattern` to `Value::String` during runtime, we won't auto
expanded the path if it's quoted
### For `ls`
It's really special, because it accepts a `String` as a pattern, and it
generates `glob` expression inside the command itself.
So the idea behind the change is introducing a special SyntaxShape to
ls: `SyntaxShape::LsGlobPattern`. So we can track if the pattern is
originally quoted easier, and we don't auto expand the path either.
Then when constructing a glob pattern inside ls, we check if input
pattern is quoted, if so: we escape the input pattern, so we can run `ls
a[123]b`, because it's already escaped.
Finally, to accomplish the checking process, we also need to introduce a
new value type called `Value::QuotedString` to differ from
`Value::String`, it's used to generate an enum called `NuPath`, which is
finally used in `ls` function. `ls` learned from `NuPath` to know if
user input is quoted.
# User-Facing Changes
Actually it contains several changes
### For arguments which is annotated with `:path/:directory/:glob`
#### Before
```nushell
> def foo [p: path] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
> def foo [p: directory] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
> def foo [p: glob] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
```
#### After
```nushell
> def foo [p: path] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
> def foo [p: directory] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
> def foo [p: glob] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
```
### For ls command
`touch '[uwu]'`
#### Before
```
❯ ls -D "[uwu]"
Error: × No matches found for [uwu]
╭─[entry #6:1:1]
1 │ ls -D "[uwu]"
· ───┬───
· ╰── Pattern, file or folder not found
╰────
help: no matches found
```
#### After
```
❯ ls -D "[uwu]"
╭───┬───────┬──────┬──────┬──────────╮
│ # │ name │ type │ size │ modified │
├───┼───────┼──────┼──────┼──────────┤
│ 0 │ [uwu] │ file │ 0 B │ now │
╰───┴───────┴──────┴──────┴──────────╯
```
# Tests + Formatting
Done
# After Submitting
NaN
2024-01-21 16:22:25 +01:00
|
|
|
Expr::Filepath(_, _) => {}
|
|
|
|
Expr::Directory(_, _) => {}
|
2021-10-25 22:04:23 +02:00
|
|
|
Expr::Float(_) => {}
|
|
|
|
Expr::FullCellPath(cell_path) => {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, &cell_path.head, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
2021-11-16 00:16:06 +01:00
|
|
|
Expr::ImportPattern(_) => {}
|
2022-09-04 17:36:42 +02:00
|
|
|
Expr::Overlay(_) => {}
|
2021-10-25 22:04:23 +02:00
|
|
|
Expr::Garbage => {}
|
2021-12-20 02:05:33 +01:00
|
|
|
Expr::Nothing => {}
|
do not attempt to glob expand if the file path is wrapped in quotes (#11569)
# Description
Fixes: #11455
### For arguments which is annotated with `:path/:directory/:glob`
To fix the issue, we need to have a way to know if a path is originally
quoted during runtime. So the information needed to be added at several
levels:
* parse time (from user input to expression)
We need to add quoted information into `Expr::Filepath`,
`Expr::Directory`, `Expr::GlobPattern`
* eval time
When convert from `Expr::Filepath`, `Expr::Directory`,
`Expr::GlobPattern` to `Value::String` during runtime, we won't auto
expanded the path if it's quoted
### For `ls`
It's really special, because it accepts a `String` as a pattern, and it
generates `glob` expression inside the command itself.
So the idea behind the change is introducing a special SyntaxShape to
ls: `SyntaxShape::LsGlobPattern`. So we can track if the pattern is
originally quoted easier, and we don't auto expand the path either.
Then when constructing a glob pattern inside ls, we check if input
pattern is quoted, if so: we escape the input pattern, so we can run `ls
a[123]b`, because it's already escaped.
Finally, to accomplish the checking process, we also need to introduce a
new value type called `Value::QuotedString` to differ from
`Value::String`, it's used to generate an enum called `NuPath`, which is
finally used in `ls` function. `ls` learned from `NuPath` to know if
user input is quoted.
# User-Facing Changes
Actually it contains several changes
### For arguments which is annotated with `:path/:directory/:glob`
#### Before
```nushell
> def foo [p: path] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
> def foo [p: directory] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
> def foo [p: glob] { echo $p }; print (foo "~/a"); print (foo '~/a')
/home/windsoilder/a
/home/windsoilder/a
```
#### After
```nushell
> def foo [p: path] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
> def foo [p: directory] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
> def foo [p: glob] { echo $p }; print (foo "~/a"); print (foo '~/a')
~/a
~/a
```
### For ls command
`touch '[uwu]'`
#### Before
```
❯ ls -D "[uwu]"
Error: × No matches found for [uwu]
╭─[entry #6:1:1]
1 │ ls -D "[uwu]"
· ───┬───
· ╰── Pattern, file or folder not found
╰────
help: no matches found
```
#### After
```
❯ ls -D "[uwu]"
╭───┬───────┬──────┬──────┬──────────╮
│ # │ name │ type │ size │ modified │
├───┼───────┼──────┼──────┼──────────┤
│ 0 │ [uwu] │ file │ 0 B │ now │
╰───┴───────┴──────┴──────┴──────────╯
```
# Tests + Formatting
Done
# After Submitting
NaN
2024-01-21 16:22:25 +01:00
|
|
|
Expr::GlobPattern(_, _) => {}
|
2021-10-25 22:04:23 +02:00
|
|
|
Expr::Int(_) => {}
|
|
|
|
Expr::Keyword(_, _, expr) => {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, expr, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
Expr::List(exprs) => {
|
|
|
|
for expr in exprs {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, expr, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
Expr::Operator(_) => {}
|
|
|
|
Expr::Range(expr1, expr2, expr3, _) => {
|
|
|
|
if let Some(expr) = expr1 {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, expr, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
if let Some(expr) = expr2 {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, expr, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
if let Some(expr) = expr3 {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, expr, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
}
|
Spread operator in record literals (#11144)
Goes towards implementing #10598, which asks for a spread operator in
lists, in records, and when calling commands (continuation of #11006,
which only implements it in lists)
# Description
This PR is for adding a spread operator that can be used when building
records. Additional functionality can be added later.
Changes:
- Previously, the `Expr::Record` variant held `(Expression, Expression)`
pairs. It now holds instances of an enum `RecordItem` (the name isn't
amazing) that allows either a key-value mapping or a spread operator.
- `...` will be treated as the spread operator when it appears before
`$`, `{`, or `(` inside records (no whitespace allowed in between) (not
implemented yet)
- The error message for duplicate columns now includes the column name
itself, because if two spread records are involved in such an error, you
can't tell which field was duplicated from the spans alone
`...` will still be treated as a normal string outside records, and even
in records, it is not treated as a spread operator when not followed
immediately by a `$`, `{`, or `(`.
# User-Facing Changes
Users will be able to use `...` when building records.
```
> let rec = { x: 1, ...{ a: 2 } }
> $rec
╭───┬───╮
│ x │ 1 │
│ a │ 2 │
╰───┴───╯
> { foo: bar, ...$rec, baz: blah }
╭─────┬──────╮
│ foo │ bar │
│ x │ 1 │
│ a │ 2 │
│ baz │ blah │
╰─────┴──────╯
```
If you want to update a field of a record, you'll have to use `merge`
instead:
```
> { ...$rec, x: 5 }
Error: nu::shell::column_defined_twice
× Record field or table column used twice: x
╭─[entry #2:1:1]
1 │ { ...$rec, x: 5 }
· ──┬─ ┬
· │ ╰── field redefined here
· ╰── field first defined here
╰────
> $rec | merge { x: 5 }
╭───┬───╮
│ x │ 5 │
│ a │ 2 │
╰───┴───╯
```
# Tests + Formatting
# After Submitting
2023-11-29 18:31:31 +01:00
|
|
|
Expr::Record(items) => {
|
|
|
|
for item in items {
|
|
|
|
match item {
|
|
|
|
RecordItem::Pair(field_name, field_value) => {
|
|
|
|
discover_captures_in_expr(
|
|
|
|
working_set,
|
|
|
|
field_name,
|
|
|
|
seen,
|
|
|
|
seen_blocks,
|
|
|
|
output,
|
|
|
|
)?;
|
|
|
|
discover_captures_in_expr(
|
|
|
|
working_set,
|
|
|
|
field_value,
|
|
|
|
seen,
|
|
|
|
seen_blocks,
|
|
|
|
output,
|
|
|
|
)?;
|
|
|
|
}
|
|
|
|
RecordItem::Spread(_, record) => {
|
|
|
|
discover_captures_in_expr(working_set, record, seen, seen_blocks, output)?;
|
|
|
|
}
|
|
|
|
}
|
2021-11-11 00:14:00 +01:00
|
|
|
}
|
|
|
|
}
|
2022-01-12 05:06:56 +01:00
|
|
|
Expr::Signature(sig) => {
|
|
|
|
// Something with a declaration, similar to a var decl, will introduce more VarIds into the stack at eval
|
|
|
|
for pos in &sig.required_positional {
|
|
|
|
if let Some(var_id) = pos.var_id {
|
|
|
|
seen.push(var_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for pos in &sig.optional_positional {
|
|
|
|
if let Some(var_id) = pos.var_id {
|
|
|
|
seen.push(var_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if let Some(rest) = &sig.rest_positional {
|
|
|
|
if let Some(var_id) = rest.var_id {
|
|
|
|
seen.push(var_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for named in &sig.named {
|
|
|
|
if let Some(var_id) = named.var_id {
|
|
|
|
seen.push(var_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-10-25 22:04:23 +02:00
|
|
|
Expr::String(_) => {}
|
2021-12-25 21:50:02 +01:00
|
|
|
Expr::StringInterpolation(exprs) => {
|
|
|
|
for expr in exprs {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, expr, seen, seen_blocks, output)?;
|
2021-12-25 21:50:02 +01:00
|
|
|
}
|
|
|
|
}
|
2023-03-24 02:52:01 +01:00
|
|
|
Expr::MatchBlock(match_block) => {
|
|
|
|
for match_ in match_block {
|
2023-03-24 10:50:23 +01:00
|
|
|
discover_captures_in_pattern(&match_.0, seen);
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, &match_.1, seen, seen_blocks, output)?;
|
2023-03-24 02:52:01 +01:00
|
|
|
}
|
|
|
|
}
|
2021-11-26 04:49:03 +01:00
|
|
|
Expr::RowCondition(block_id) | Expr::Subexpression(block_id) => {
|
2022-05-17 00:49:59 +02:00
|
|
|
let block = working_set.get_block(*block_id);
|
2023-04-17 00:24:56 +02:00
|
|
|
|
2022-05-17 00:49:59 +02:00
|
|
|
let results = {
|
2023-04-17 00:24:56 +02:00
|
|
|
let mut results = vec![];
|
2022-05-17 00:49:59 +02:00
|
|
|
let mut seen = vec![];
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_closure(
|
|
|
|
working_set,
|
|
|
|
block,
|
|
|
|
&mut seen,
|
|
|
|
seen_blocks,
|
|
|
|
&mut results,
|
|
|
|
)?;
|
|
|
|
results
|
2022-05-17 00:49:59 +02:00
|
|
|
};
|
2023-04-18 10:19:08 +02:00
|
|
|
|
2022-05-17 00:49:59 +02:00
|
|
|
seen_blocks.insert(*block_id, results.clone());
|
2022-11-11 07:51:08 +01:00
|
|
|
for (var_id, span) in results.into_iter() {
|
2022-05-17 00:49:59 +02:00
|
|
|
if !seen.contains(&var_id) {
|
2022-11-11 07:51:08 +01:00
|
|
|
output.push((var_id, span))
|
2022-02-11 13:37:10 +01:00
|
|
|
}
|
|
|
|
}
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
Expr::Table(headers, values) => {
|
|
|
|
for header in headers {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, header, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
for row in values {
|
|
|
|
for cell in row {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, cell, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Expr::ValueWithUnit(expr, _) => {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, expr, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
Expr::Var(var_id) => {
|
2023-10-19 18:41:38 +02:00
|
|
|
if (*var_id > ENV_VARIABLE_ID || *var_id == IN_VARIABLE_ID) && !seen.contains(var_id) {
|
2022-11-11 07:51:08 +01:00
|
|
|
output.push((*var_id, expr.span));
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
Expr::VarDecl(var_id) => {
|
|
|
|
seen.push(*var_id);
|
|
|
|
}
|
2023-11-22 22:10:08 +01:00
|
|
|
Expr::Spread(expr) => {
|
|
|
|
discover_captures_in_expr(working_set, expr, seen, seen_blocks, output)?;
|
|
|
|
}
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
2023-04-17 00:24:56 +02:00
|
|
|
Ok(())
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
|
2022-11-18 22:46:48 +01:00
|
|
|
fn wrap_element_with_collect(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
element: &PipelineElement,
|
|
|
|
) -> PipelineElement {
|
|
|
|
match element {
|
2022-11-22 19:26:13 +01:00
|
|
|
PipelineElement::Expression(span, expression) => {
|
|
|
|
PipelineElement::Expression(*span, wrap_expr_with_collect(working_set, expression))
|
2022-11-18 22:46:48 +01:00
|
|
|
}
|
2023-11-27 14:52:39 +01:00
|
|
|
PipelineElement::Redirection(span, redirection, expression, is_append_mode) => {
|
2022-11-22 19:26:13 +01:00
|
|
|
PipelineElement::Redirection(
|
|
|
|
*span,
|
|
|
|
redirection.clone(),
|
|
|
|
wrap_expr_with_collect(working_set, expression),
|
2023-11-27 14:52:39 +01:00
|
|
|
*is_append_mode,
|
2022-11-22 19:26:13 +01:00
|
|
|
)
|
2022-11-18 22:46:48 +01:00
|
|
|
}
|
Support redirect `err` and `out` to different streams (#7685)
# Description
Closes: #7364
# User-Facing Changes
Given the following shell script:
```bash
x=$(printf '=%.0s' {1..100})
echo $x
echo $x 1>&2
```
It supports the following command:
```
bash test.sh out> out.txt err> err.txt
```
Then both `out.txt` and `err.txt` will contain `=`(100 times)
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SeparateRedirection` if we meet two Redirection
token(which is generated by `lex` function),
During converting from lite block to block,
`LiteElement::SeparateRedirection` will be converted to
`PipelineElement::SeparateRedirection`.
Then in the block eval process, if we get
`PipelineElement::SeparateRedirection`, we invoke `save` command with
`--stderr` arguments to acthive our behavior.
## What happened internally?
Take the following command as example:
```
^ls out> out.txt err> err.txt
```
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, Stdout, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, LiteCommand { comments: [], parts: [Span { start: 39063, end: 39070 }] })
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(
None,
LiteCommand { comments: [], parts: [Span { start: 38525, end: 38528 }] }),
// new one! two Redirection merged into one SeparateRedirection.
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, LiteCommand { comments: [], parts: [Span { start: 38534, end: 38541 }] }),
err: (Span { start: 38542, end: 38546 }, LiteCommand { comments: [], parts: [Span { start: 38547, end: 38554 }] })
}
]
}]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, Stdout, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, Expression { expr: String("err.txt"), span: Span { start: 39063, end: 39070 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 38526, end: 38528 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 38525, end: 38528 },
ty: Any,
custom_completion: None
}),
// new one! SeparateRedirection
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, Expression { expr: String("out.txt"), span: Span { start: 38534, end: 38541 }, ty: String, custom_completion: None }),
err: (Span { start: 38542, end: 38546 }, Expression { expr: String("err.txt"), span: Span { start: 38547, end: 38554 }, ty: String, custom_completion: None })
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-01-12 10:22:30 +01:00
|
|
|
PipelineElement::SeparateRedirection {
|
2023-11-27 14:52:39 +01:00
|
|
|
out: (out_span, out_exp, out_append_mode),
|
|
|
|
err: (err_span, err_exp, err_append_mode),
|
Support redirect `err` and `out` to different streams (#7685)
# Description
Closes: #7364
# User-Facing Changes
Given the following shell script:
```bash
x=$(printf '=%.0s' {1..100})
echo $x
echo $x 1>&2
```
It supports the following command:
```
bash test.sh out> out.txt err> err.txt
```
Then both `out.txt` and `err.txt` will contain `=`(100 times)
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SeparateRedirection` if we meet two Redirection
token(which is generated by `lex` function),
During converting from lite block to block,
`LiteElement::SeparateRedirection` will be converted to
`PipelineElement::SeparateRedirection`.
Then in the block eval process, if we get
`PipelineElement::SeparateRedirection`, we invoke `save` command with
`--stderr` arguments to acthive our behavior.
## What happened internally?
Take the following command as example:
```
^ls out> out.txt err> err.txt
```
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, Stdout, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, LiteCommand { comments: [], parts: [Span { start: 39063, end: 39070 }] })
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(
None,
LiteCommand { comments: [], parts: [Span { start: 38525, end: 38528 }] }),
// new one! two Redirection merged into one SeparateRedirection.
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, LiteCommand { comments: [], parts: [Span { start: 38534, end: 38541 }] }),
err: (Span { start: 38542, end: 38546 }, LiteCommand { comments: [], parts: [Span { start: 38547, end: 38554 }] })
}
]
}]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, Stdout, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, Expression { expr: String("err.txt"), span: Span { start: 39063, end: 39070 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 38526, end: 38528 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 38525, end: 38528 },
ty: Any,
custom_completion: None
}),
// new one! SeparateRedirection
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, Expression { expr: String("out.txt"), span: Span { start: 38534, end: 38541 }, ty: String, custom_completion: None }),
err: (Span { start: 38542, end: 38546 }, Expression { expr: String("err.txt"), span: Span { start: 38547, end: 38554 }, ty: String, custom_completion: None })
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-01-12 10:22:30 +01:00
|
|
|
} => PipelineElement::SeparateRedirection {
|
2023-11-27 14:52:39 +01:00
|
|
|
out: (
|
|
|
|
*out_span,
|
|
|
|
wrap_expr_with_collect(working_set, out_exp),
|
|
|
|
*out_append_mode,
|
|
|
|
),
|
|
|
|
err: (
|
|
|
|
*err_span,
|
|
|
|
wrap_expr_with_collect(working_set, err_exp),
|
|
|
|
*err_append_mode,
|
|
|
|
),
|
Support redirect `err` and `out` to different streams (#7685)
# Description
Closes: #7364
# User-Facing Changes
Given the following shell script:
```bash
x=$(printf '=%.0s' {1..100})
echo $x
echo $x 1>&2
```
It supports the following command:
```
bash test.sh out> out.txt err> err.txt
```
Then both `out.txt` and `err.txt` will contain `=`(100 times)
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SeparateRedirection` if we meet two Redirection
token(which is generated by `lex` function),
During converting from lite block to block,
`LiteElement::SeparateRedirection` will be converted to
`PipelineElement::SeparateRedirection`.
Then in the block eval process, if we get
`PipelineElement::SeparateRedirection`, we invoke `save` command with
`--stderr` arguments to acthive our behavior.
## What happened internally?
Take the following command as example:
```
^ls out> out.txt err> err.txt
```
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, Stdout, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, LiteCommand { comments: [], parts: [Span { start: 39063, end: 39070 }] })
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(
None,
LiteCommand { comments: [], parts: [Span { start: 38525, end: 38528 }] }),
// new one! two Redirection merged into one SeparateRedirection.
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, LiteCommand { comments: [], parts: [Span { start: 38534, end: 38541 }] }),
err: (Span { start: 38542, end: 38546 }, LiteCommand { comments: [], parts: [Span { start: 38547, end: 38554 }] })
}
]
}]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, Stdout, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, Expression { expr: String("err.txt"), span: Span { start: 39063, end: 39070 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 38526, end: 38528 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 38525, end: 38528 },
ty: Any,
custom_completion: None
}),
// new one! SeparateRedirection
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, Expression { expr: String("out.txt"), span: Span { start: 38534, end: 38541 }, ty: String, custom_completion: None }),
err: (Span { start: 38542, end: 38546 }, Expression { expr: String("err.txt"), span: Span { start: 38547, end: 38554 }, ty: String, custom_completion: None })
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-01-12 10:22:30 +01:00
|
|
|
},
|
Avoid blocking when `o+e>` redirects too much stderr message (#8784)
# Description
Fixes: #8565
Here is another pr #7240 tried to address the issue, but it works in a
wrong way.
After this change `o+e>` won't redirect all stdout message then stderr
message and it works more like how bash does.
# User-Facing Changes
For the given python code:
```python
# test.py
import sys
print('aa'*300, flush=True)
print('bb'*999999, file=sys.stderr, flush=True)
print('cc'*300, flush=True)
```
Running `python test.py out+err> a.txt` shoudn't hang nushell, and
`a.txt` keeps output in the same order
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SameTargetRedirection` if we meet `out+err>` redirection
token(which is generated by lex function),
During converting from lite block to block,
LiteElement::SameTargetRedirection will be converted to
PipelineElement::SameTargetRedirection.
Then in the block eval process, if we get
PipelineElement::SameTargetRedirection, we'll invoke `run-external` with
`--redirect-combine` flag, then pipe the result into save command
## What happened internally?
Take the following command as example:
`^ls o+e> log.txt`
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline {
commands: [
SameTargetRedirection {
cmd: (None, LiteCommand { comments: [], parts: [Span { start: 147945, end: 147948}]}),
redirection: (Span { start: 147949, end: 147957 }, LiteCommand { comments: [], parts: [Span { start: 147958, end: 147965 }]})
}
]
}
]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
SameTargetRedirection {
cmd: (None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 147946, end: 147948 }, ty: String, custom_completion: None}, [], false),
span: Span { start: 147945, end: 147948},
ty: Any, custom_completion: None
}),
redirection: (Span { start: 147949, end: 147957}, Expression {expr: String("log.txt"), span: Span { start: 147958, end: 147965 },ty: String,custom_completion: None}
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-utils/standard_library/tests.nu` to run the
tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-05-18 00:47:03 +02:00
|
|
|
PipelineElement::SameTargetRedirection {
|
|
|
|
cmd: (cmd_span, cmd_exp),
|
2023-11-27 14:52:39 +01:00
|
|
|
redirection: (redirect_span, redirect_exp, is_append_mode),
|
Avoid blocking when `o+e>` redirects too much stderr message (#8784)
# Description
Fixes: #8565
Here is another pr #7240 tried to address the issue, but it works in a
wrong way.
After this change `o+e>` won't redirect all stdout message then stderr
message and it works more like how bash does.
# User-Facing Changes
For the given python code:
```python
# test.py
import sys
print('aa'*300, flush=True)
print('bb'*999999, file=sys.stderr, flush=True)
print('cc'*300, flush=True)
```
Running `python test.py out+err> a.txt` shoudn't hang nushell, and
`a.txt` keeps output in the same order
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SameTargetRedirection` if we meet `out+err>` redirection
token(which is generated by lex function),
During converting from lite block to block,
LiteElement::SameTargetRedirection will be converted to
PipelineElement::SameTargetRedirection.
Then in the block eval process, if we get
PipelineElement::SameTargetRedirection, we'll invoke `run-external` with
`--redirect-combine` flag, then pipe the result into save command
## What happened internally?
Take the following command as example:
`^ls o+e> log.txt`
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline {
commands: [
SameTargetRedirection {
cmd: (None, LiteCommand { comments: [], parts: [Span { start: 147945, end: 147948}]}),
redirection: (Span { start: 147949, end: 147957 }, LiteCommand { comments: [], parts: [Span { start: 147958, end: 147965 }]})
}
]
}
]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
SameTargetRedirection {
cmd: (None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 147946, end: 147948 }, ty: String, custom_completion: None}, [], false),
span: Span { start: 147945, end: 147948},
ty: Any, custom_completion: None
}),
redirection: (Span { start: 147949, end: 147957}, Expression {expr: String("log.txt"), span: Span { start: 147958, end: 147965 },ty: String,custom_completion: None}
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-utils/standard_library/tests.nu` to run the
tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-05-18 00:47:03 +02:00
|
|
|
} => PipelineElement::SameTargetRedirection {
|
|
|
|
cmd: (*cmd_span, wrap_expr_with_collect(working_set, cmd_exp)),
|
|
|
|
redirection: (
|
|
|
|
*redirect_span,
|
|
|
|
wrap_expr_with_collect(working_set, redirect_exp),
|
2023-11-27 14:52:39 +01:00
|
|
|
*is_append_mode,
|
Avoid blocking when `o+e>` redirects too much stderr message (#8784)
# Description
Fixes: #8565
Here is another pr #7240 tried to address the issue, but it works in a
wrong way.
After this change `o+e>` won't redirect all stdout message then stderr
message and it works more like how bash does.
# User-Facing Changes
For the given python code:
```python
# test.py
import sys
print('aa'*300, flush=True)
print('bb'*999999, file=sys.stderr, flush=True)
print('cc'*300, flush=True)
```
Running `python test.py out+err> a.txt` shoudn't hang nushell, and
`a.txt` keeps output in the same order
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SameTargetRedirection` if we meet `out+err>` redirection
token(which is generated by lex function),
During converting from lite block to block,
LiteElement::SameTargetRedirection will be converted to
PipelineElement::SameTargetRedirection.
Then in the block eval process, if we get
PipelineElement::SameTargetRedirection, we'll invoke `run-external` with
`--redirect-combine` flag, then pipe the result into save command
## What happened internally?
Take the following command as example:
`^ls o+e> log.txt`
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline {
commands: [
SameTargetRedirection {
cmd: (None, LiteCommand { comments: [], parts: [Span { start: 147945, end: 147948}]}),
redirection: (Span { start: 147949, end: 147957 }, LiteCommand { comments: [], parts: [Span { start: 147958, end: 147965 }]})
}
]
}
]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
SameTargetRedirection {
cmd: (None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 147946, end: 147948 }, ty: String, custom_completion: None}, [], false),
span: Span { start: 147945, end: 147948},
ty: Any, custom_completion: None
}),
redirection: (Span { start: 147949, end: 147957}, Expression {expr: String("log.txt"), span: Span { start: 147958, end: 147965 },ty: String,custom_completion: None}
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-utils/standard_library/tests.nu` to run the
tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-05-18 00:47:03 +02:00
|
|
|
),
|
|
|
|
},
|
2022-12-13 04:36:13 +01:00
|
|
|
PipelineElement::And(span, expression) => {
|
|
|
|
PipelineElement::And(*span, wrap_expr_with_collect(working_set, expression))
|
|
|
|
}
|
2022-11-22 19:26:13 +01:00
|
|
|
PipelineElement::Or(span, expression) => {
|
|
|
|
PipelineElement::Or(*span, wrap_expr_with_collect(working_set, expression))
|
2022-11-18 22:46:48 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-08 07:21:24 +01:00
|
|
|
fn wrap_expr_with_collect(working_set: &mut StateWorkingSet, expr: &Expression) -> Expression {
|
|
|
|
let span = expr.span;
|
|
|
|
|
2023-07-13 21:05:03 +02:00
|
|
|
if let Some(decl_id) = working_set.find_decl(b"collect") {
|
2021-11-08 07:21:24 +01:00
|
|
|
let mut output = vec![];
|
|
|
|
|
2023-01-28 18:55:29 +01:00
|
|
|
let var_id = IN_VARIABLE_ID;
|
2021-11-08 07:21:24 +01:00
|
|
|
let mut signature = Signature::new("");
|
|
|
|
signature.required_positional.push(PositionalArg {
|
|
|
|
var_id: Some(var_id),
|
2021-12-30 04:26:40 +01:00
|
|
|
name: "$in".into(),
|
2021-11-08 07:21:24 +01:00
|
|
|
desc: String::new(),
|
|
|
|
shape: SyntaxShape::Any,
|
2022-03-07 21:08:56 +01:00
|
|
|
default_value: None,
|
2021-11-08 07:21:24 +01:00
|
|
|
});
|
|
|
|
|
2022-02-11 00:15:15 +01:00
|
|
|
let block = Block {
|
2023-12-26 18:46:49 +01:00
|
|
|
pipelines: vec![Pipeline::from_vec(vec![expr.clone()])],
|
2021-11-08 07:21:24 +01:00
|
|
|
signature: Box::new(signature),
|
|
|
|
..Default::default()
|
|
|
|
};
|
|
|
|
|
|
|
|
let block_id = working_set.add_block(block);
|
|
|
|
|
2022-04-09 04:55:02 +02:00
|
|
|
output.push(Argument::Positional(Expression {
|
2022-11-10 09:21:49 +01:00
|
|
|
expr: Expr::Closure(block_id),
|
2021-11-08 07:21:24 +01:00
|
|
|
span,
|
2022-04-07 06:34:09 +02:00
|
|
|
ty: Type::Any,
|
2021-11-08 07:21:24 +01:00
|
|
|
custom_completion: None,
|
2022-04-09 04:55:02 +02:00
|
|
|
}));
|
2021-11-08 07:21:24 +01:00
|
|
|
|
2022-10-13 11:04:34 +02:00
|
|
|
output.push(Argument::Named((
|
|
|
|
Spanned {
|
|
|
|
item: "keep-env".to_string(),
|
|
|
|
span: Span::new(0, 0),
|
|
|
|
},
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
)));
|
|
|
|
|
2021-12-30 04:26:40 +01:00
|
|
|
// The containing, synthetic call to `collect`.
|
|
|
|
// We don't want to have a real span as it will confuse flattening
|
|
|
|
// The args are where we'll get the real info
|
2021-11-08 07:21:24 +01:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Call(Box::new(Call {
|
2021-12-30 04:26:40 +01:00
|
|
|
head: Span::new(0, 0),
|
2022-04-09 04:55:02 +02:00
|
|
|
arguments: output,
|
2021-11-08 07:21:24 +01:00
|
|
|
decl_id,
|
2022-02-21 23:22:21 +01:00
|
|
|
redirect_stdout: true,
|
|
|
|
redirect_stderr: false,
|
2023-04-05 18:56:48 +02:00
|
|
|
parser_info: HashMap::new(),
|
2021-11-08 07:21:24 +01:00
|
|
|
})),
|
|
|
|
span,
|
2023-07-27 20:26:28 +02:00
|
|
|
ty: Type::Any,
|
2021-11-08 07:21:24 +01:00
|
|
|
custom_completion: None,
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
Expression::garbage(span)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-06 22:41:30 +02:00
|
|
|
// Parses a vector of u8 to create an AST Block. If a file name is given, then
|
|
|
|
// the name is stored in the working set. When parsing a source without a file
|
|
|
|
// name, the source of bytes is stored as "source"
|
|
|
|
pub fn parse(
|
2021-09-02 10:25:22 +02:00
|
|
|
working_set: &mut StateWorkingSet,
|
2021-09-06 22:41:30 +02:00
|
|
|
fname: Option<&str>,
|
2021-09-02 10:25:22 +02:00
|
|
|
contents: &[u8],
|
|
|
|
scoped: bool,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Block {
|
2021-09-06 22:41:30 +02:00
|
|
|
let name = match fname {
|
2023-04-12 19:36:29 +02:00
|
|
|
Some(fname) => {
|
|
|
|
// use the canonical name for this filename
|
|
|
|
nu_path::expand_to_real_path(fname)
|
|
|
|
.to_string_lossy()
|
|
|
|
.to_string()
|
|
|
|
}
|
2021-09-06 22:41:30 +02:00
|
|
|
None => "source".to_string(),
|
|
|
|
};
|
2021-07-01 02:01:04 +02:00
|
|
|
|
2023-04-09 22:55:47 +02:00
|
|
|
let file_id = working_set.add_file(name, contents);
|
|
|
|
let new_span = working_set.get_span_for_file(file_id);
|
2021-07-01 02:01:04 +02:00
|
|
|
|
2023-04-21 21:00:33 +02:00
|
|
|
let previously_parsed_block = working_set.find_block_by_span(new_span);
|
|
|
|
|
|
|
|
let mut output = {
|
|
|
|
if let Some(block) = previously_parsed_block {
|
|
|
|
return block;
|
|
|
|
} else {
|
|
|
|
let (output, err) = lex(contents, new_span.start, &[], &[], false);
|
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err)
|
|
|
|
}
|
2021-07-01 02:01:04 +02:00
|
|
|
|
2023-04-21 21:00:33 +02:00
|
|
|
parse_block(working_set, &output, new_span, scoped, false)
|
|
|
|
}
|
|
|
|
};
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-02-11 00:15:15 +01:00
|
|
|
let mut seen = vec![];
|
|
|
|
let mut seen_blocks = HashMap::new();
|
|
|
|
|
2023-04-17 00:24:56 +02:00
|
|
|
let mut captures = vec![];
|
|
|
|
match discover_captures_in_closure(
|
|
|
|
working_set,
|
|
|
|
&output,
|
|
|
|
&mut seen,
|
|
|
|
&mut seen_blocks,
|
|
|
|
&mut captures,
|
|
|
|
) {
|
|
|
|
Ok(_) => output.captures = captures.into_iter().map(|(var_id, _)| var_id).collect(),
|
2023-04-07 02:35:45 +02:00
|
|
|
Err(err) => working_set.error(err),
|
2022-11-11 07:51:08 +01:00
|
|
|
}
|
2022-02-11 00:15:15 +01:00
|
|
|
|
2022-02-11 13:37:10 +01:00
|
|
|
// Also check other blocks that might have been imported
|
2023-04-07 02:35:45 +02:00
|
|
|
let mut errors = vec![];
|
2022-02-11 13:37:10 +01:00
|
|
|
for (block_idx, block) in working_set.delta.blocks.iter().enumerate() {
|
|
|
|
let block_id = block_idx + working_set.permanent_state.num_blocks();
|
|
|
|
|
|
|
|
if !seen_blocks.contains_key(&block_id) {
|
2023-04-17 00:24:56 +02:00
|
|
|
let mut captures = vec![];
|
|
|
|
|
|
|
|
match discover_captures_in_closure(
|
|
|
|
working_set,
|
|
|
|
block,
|
|
|
|
&mut seen,
|
|
|
|
&mut seen_blocks,
|
|
|
|
&mut captures,
|
|
|
|
) {
|
|
|
|
Ok(_) => {
|
2022-11-11 07:51:08 +01:00
|
|
|
seen_blocks.insert(block_id, captures);
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
Err(err) => {
|
|
|
|
errors.push(err);
|
|
|
|
}
|
2022-11-11 07:51:08 +01:00
|
|
|
}
|
2022-02-11 13:37:10 +01:00
|
|
|
}
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
for err in errors {
|
|
|
|
working_set.error(err)
|
|
|
|
}
|
2022-02-11 13:37:10 +01:00
|
|
|
|
2022-02-11 00:15:15 +01:00
|
|
|
for (block_id, captures) in seen_blocks.into_iter() {
|
|
|
|
// In theory, we should only be updating captures where we have new information
|
|
|
|
// the only place where this is possible would be blocks that are newly created
|
|
|
|
// by our working set delta. If we ever tried to modify the permanent state, we'd
|
|
|
|
// panic (again, in theory, this shouldn't be possible)
|
|
|
|
let block = working_set.get_block(block_id);
|
|
|
|
let block_captures_empty = block.captures.is_empty();
|
|
|
|
if !captures.is_empty() && block_captures_empty {
|
|
|
|
let block = working_set.get_block_mut(block_id);
|
2022-11-11 07:51:08 +01:00
|
|
|
block.captures = captures.into_iter().map(|(var_id, _)| var_id).collect();
|
2022-02-11 00:15:15 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
output
|
2021-06-30 03:42:56 +02:00
|
|
|
}
|