2021-09-02 10:25:22 +02:00
|
|
|
use crate::{
|
2022-12-21 23:21:03 +01:00
|
|
|
eval::{eval_constant, value_as_string},
|
2023-03-24 12:54:06 +01:00
|
|
|
lex::{lex, lex_signature},
|
2022-12-22 12:41:44 +01:00
|
|
|
lite_parser::{lite_parse, LiteCommand, LiteElement},
|
|
|
|
parse_mut,
|
2023-03-24 02:52:01 +01:00
|
|
|
parse_patterns::{parse_match_pattern, parse_pattern},
|
2021-09-02 10:25:22 +02:00
|
|
|
type_check::{math_result_type, type_compatible},
|
2023-04-07 02:35:45 +02:00
|
|
|
Token, TokenContents,
|
2021-08-17 01:00:00 +02:00
|
|
|
};
|
2021-07-02 03:42:25 +02:00
|
|
|
|
2023-04-05 18:56:48 +02:00
|
|
|
use nu_engine::DIR_VAR_PARSER_INFO;
|
2021-09-02 03:29:43 +02:00
|
|
|
use nu_protocol::{
|
2021-09-04 23:52:57 +02:00
|
|
|
ast::{
|
2022-11-11 07:51:08 +01:00
|
|
|
Argument, Assignment, Bits, Block, Boolean, Call, CellPath, Comparison, Expr, Expression,
|
2023-03-24 10:50:23 +01:00
|
|
|
FullCellPath, ImportPattern, ImportPatternHead, ImportPatternMember, MatchPattern, Math,
|
|
|
|
Operator, PathMember, Pattern, Pipeline, PipelineElement, RangeInclusion, RangeOperator,
|
2021-09-04 23:52:57 +02:00
|
|
|
},
|
2021-09-02 20:21:37 +02:00
|
|
|
engine::StateWorkingSet,
|
2023-05-02 17:17:14 +02:00
|
|
|
span, BlockId, DidYouMean, Flag, ParseError, PositionalArg, Signature, Span, Spanned,
|
|
|
|
SyntaxShape, Type, Unit, VarId, ENV_VARIABLE_ID, IN_VARIABLE_ID,
|
2021-09-02 03:29:43 +02:00
|
|
|
};
|
2021-07-23 23:19:30 +02:00
|
|
|
|
2021-09-26 20:39:19 +02:00
|
|
|
use crate::parse_keywords::{
|
2023-04-05 18:56:48 +02:00
|
|
|
find_dirs_var, is_unaliasable_parser_keyword, parse_alias, parse_def, parse_def_predecl,
|
2023-03-10 22:20:31 +01:00
|
|
|
parse_export_in_block, parse_extern, parse_for, parse_hide, parse_keyword, parse_let_or_const,
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_module, parse_overlay_hide, parse_overlay_new, parse_overlay_use, parse_source,
|
|
|
|
parse_use, parse_where, parse_where_expr, LIB_DIRS_VAR,
|
2021-09-02 03:29:43 +02:00
|
|
|
};
|
2021-07-23 23:19:30 +02:00
|
|
|
|
2022-06-17 20:11:48 +02:00
|
|
|
use itertools::Itertools;
|
2022-01-01 22:42:50 +01:00
|
|
|
use log::trace;
|
2022-03-01 00:31:53 +01:00
|
|
|
use std::{
|
|
|
|
collections::{HashMap, HashSet},
|
|
|
|
num::ParseIntError,
|
2023-04-10 23:52:51 +02:00
|
|
|
str,
|
2022-03-01 00:31:53 +01:00
|
|
|
};
|
2021-11-30 07:14:05 +01:00
|
|
|
|
2021-11-02 21:56:00 +01:00
|
|
|
#[cfg(feature = "plugin")]
|
2021-12-03 15:29:55 +01:00
|
|
|
use crate::parse_keywords::parse_register;
|
2021-11-02 21:56:00 +01:00
|
|
|
|
2021-09-26 20:39:19 +02:00
|
|
|
pub fn garbage(span: Span) -> Expression {
|
2021-07-01 02:01:04 +02:00
|
|
|
Expression::garbage(span)
|
|
|
|
}
|
|
|
|
|
2022-02-15 20:31:14 +01:00
|
|
|
pub fn garbage_pipeline(spans: &[Span]) -> Pipeline {
|
|
|
|
Pipeline::from_vec(vec![garbage(span(spans))])
|
2021-09-10 09:28:43 +02:00
|
|
|
}
|
|
|
|
|
2021-07-01 03:31:02 +02:00
|
|
|
fn is_identifier_byte(b: u8) -> bool {
|
2022-07-27 04:08:54 +02:00
|
|
|
b != b'.'
|
|
|
|
&& b != b'['
|
|
|
|
&& b != b'('
|
|
|
|
&& b != b'{'
|
|
|
|
&& b != b'+'
|
|
|
|
&& b != b'-'
|
|
|
|
&& b != b'*'
|
|
|
|
&& b != b'^'
|
|
|
|
&& b != b'/'
|
|
|
|
&& b != b'='
|
|
|
|
&& b != b'!'
|
|
|
|
&& b != b'<'
|
|
|
|
&& b != b'>'
|
|
|
|
&& b != b'&'
|
|
|
|
&& b != b'|'
|
2021-07-01 03:31:02 +02:00
|
|
|
}
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn is_math_expression_like(working_set: &mut StateWorkingSet, span: Span) -> bool {
|
2022-04-07 04:01:31 +02:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
2022-02-22 16:55:28 +01:00
|
|
|
if bytes.is_empty() {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
2023-03-22 21:14:10 +01:00
|
|
|
if bytes == b"true"
|
|
|
|
|| bytes == b"false"
|
|
|
|
|| bytes == b"null"
|
|
|
|
|| bytes == b"not"
|
|
|
|
|| bytes == b"if"
|
2023-03-24 02:52:01 +01:00
|
|
|
|| bytes == b"match"
|
2023-03-22 21:14:10 +01:00
|
|
|
{
|
2022-03-03 01:55:03 +01:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2022-02-22 16:55:28 +01:00
|
|
|
let b = bytes[0];
|
|
|
|
|
2023-02-16 03:30:56 +01:00
|
|
|
if b == b'(' || b == b'{' || b == b'[' || b == b'$' || b == b'"' || b == b'\'' || b == b'-' {
|
2022-04-07 04:01:31 +02:00
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
let starting_error_count = working_set.parse_errors.len();
|
|
|
|
|
|
|
|
// Number
|
|
|
|
parse_number(working_set, span);
|
|
|
|
if working_set.parse_errors.len() == starting_error_count {
|
2022-04-07 04:01:31 +02:00
|
|
|
return true;
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2022-04-07 04:01:31 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
// Filesize
|
|
|
|
parse_filesize(working_set, span);
|
|
|
|
if working_set.parse_errors.len() == starting_error_count {
|
2022-04-07 04:01:31 +02:00
|
|
|
return true;
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2022-04-07 04:01:31 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
parse_duration(working_set, span);
|
|
|
|
if working_set.parse_errors.len() == starting_error_count {
|
2022-04-07 04:01:31 +02:00
|
|
|
return true;
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2022-04-07 04:01:31 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
parse_datetime(working_set, span);
|
|
|
|
if working_set.parse_errors.len() == starting_error_count {
|
2022-04-07 08:02:28 +02:00
|
|
|
return true;
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2022-04-07 08:02:28 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
parse_binary(working_set, span);
|
|
|
|
if working_set.parse_errors.len() == starting_error_count {
|
2022-04-07 04:01:31 +02:00
|
|
|
return true;
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2022-04-07 04:01:31 +02:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_range(working_set, span);
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
if working_set.parse_errors.len() == starting_error_count {
|
2022-04-07 04:01:31 +02:00
|
|
|
return true;
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2022-04-07 04:01:31 +02:00
|
|
|
|
|
|
|
false
|
2021-10-27 23:52:59 +02:00
|
|
|
}
|
|
|
|
|
2021-07-01 03:31:02 +02:00
|
|
|
fn is_identifier(bytes: &[u8]) -> bool {
|
|
|
|
bytes.iter().all(|x| is_identifier_byte(*x))
|
|
|
|
}
|
|
|
|
|
2023-03-24 02:52:01 +01:00
|
|
|
pub fn is_variable(bytes: &[u8]) -> bool {
|
2021-07-01 03:31:02 +02:00
|
|
|
if bytes.len() > 1 && bytes[0] == b'$' {
|
|
|
|
is_identifier(&bytes[1..])
|
|
|
|
} else {
|
|
|
|
is_identifier(bytes)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-14 20:40:26 +01:00
|
|
|
pub fn trim_quotes(bytes: &[u8]) -> &[u8] {
|
|
|
|
if (bytes.starts_with(b"\"") && bytes.ends_with(b"\"") && bytes.len() > 1)
|
|
|
|
|| (bytes.starts_with(b"\'") && bytes.ends_with(b"\'") && bytes.len() > 1)
|
2022-04-04 22:42:26 +02:00
|
|
|
|| (bytes.starts_with(b"`") && bytes.ends_with(b"`") && bytes.len() > 1)
|
2021-11-14 20:40:26 +01:00
|
|
|
{
|
|
|
|
&bytes[1..(bytes.len() - 1)]
|
|
|
|
} else {
|
|
|
|
bytes
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-05-03 02:37:38 +02:00
|
|
|
pub fn trim_quotes_str(s: &str) -> &str {
|
|
|
|
if (s.starts_with('"') && s.ends_with('"') && s.len() > 1)
|
|
|
|
|| (s.starts_with('\'') && s.ends_with('\'') && s.len() > 1)
|
|
|
|
|| (s.starts_with('`') && s.ends_with('`') && s.len() > 1)
|
|
|
|
{
|
|
|
|
&s[1..(s.len() - 1)]
|
|
|
|
} else {
|
|
|
|
s
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn check_call(working_set: &mut StateWorkingSet, command: Span, sig: &Signature, call: &Call) {
|
2021-10-13 19:53:27 +02:00
|
|
|
// Allow the call to pass if they pass in the help flag
|
2022-04-09 07:17:48 +02:00
|
|
|
if call.named_iter().any(|(n, _, _)| n.item == "help") {
|
2023-04-07 02:35:45 +02:00
|
|
|
return;
|
2021-10-13 19:53:27 +02:00
|
|
|
}
|
|
|
|
|
2022-04-09 04:55:02 +02:00
|
|
|
if call.positional_len() < sig.required_positional.len() {
|
2021-12-27 20:13:52 +01:00
|
|
|
// Comparing the types of all signature positional arguments against the parsed
|
|
|
|
// expressions found in the call. If one type is not found then it could be assumed
|
|
|
|
// that that positional argument is missing from the parsed call
|
|
|
|
for argument in &sig.required_positional {
|
2022-04-09 04:55:02 +02:00
|
|
|
let found = call.positional_iter().fold(false, |ac, expr| {
|
2022-02-14 18:33:47 +01:00
|
|
|
if argument.shape.to_type() == expr.ty || argument.shape == SyntaxShape::Any {
|
2021-12-27 20:13:52 +01:00
|
|
|
true
|
|
|
|
} else {
|
|
|
|
ac
|
|
|
|
}
|
|
|
|
});
|
|
|
|
if !found {
|
2022-04-09 04:55:02 +02:00
|
|
|
if let Some(last) = call.positional_iter().last() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::MissingPositional(
|
2022-01-04 00:14:33 +01:00
|
|
|
argument.name.clone(),
|
2022-12-03 10:44:12 +01:00
|
|
|
Span::new(last.span.end, last.span.end),
|
2022-01-04 00:14:33 +01:00
|
|
|
sig.call_signature(),
|
|
|
|
));
|
2023-04-07 02:35:45 +02:00
|
|
|
return;
|
2022-01-04 00:14:33 +01:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::MissingPositional(
|
2022-01-04 00:14:33 +01:00
|
|
|
argument.name.clone(),
|
2022-12-03 10:44:12 +01:00
|
|
|
Span::new(command.end, command.end),
|
2022-01-04 00:14:33 +01:00
|
|
|
sig.call_signature(),
|
|
|
|
));
|
2023-04-07 02:35:45 +02:00
|
|
|
return;
|
2022-01-04 00:14:33 +01:00
|
|
|
}
|
2021-12-27 20:13:52 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-09 04:55:02 +02:00
|
|
|
let missing = &sig.required_positional[call.positional_len()];
|
|
|
|
if let Some(last) = call.positional_iter().last() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::MissingPositional(
|
2022-01-04 00:14:33 +01:00
|
|
|
missing.name.clone(),
|
2022-12-03 10:44:12 +01:00
|
|
|
Span::new(last.span.end, last.span.end),
|
2022-01-04 00:14:33 +01:00
|
|
|
sig.call_signature(),
|
|
|
|
))
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::MissingPositional(
|
2022-01-04 00:14:33 +01:00
|
|
|
missing.name.clone(),
|
2022-12-03 10:44:12 +01:00
|
|
|
Span::new(command.end, command.end),
|
2022-01-04 00:14:33 +01:00
|
|
|
sig.call_signature(),
|
|
|
|
))
|
|
|
|
}
|
2021-07-02 04:22:54 +02:00
|
|
|
} else {
|
|
|
|
for req_flag in sig.named.iter().filter(|x| x.required) {
|
2022-04-09 07:17:48 +02:00
|
|
|
if call.named_iter().all(|(n, _, _)| n.item != req_flag.long) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::MissingRequiredFlag(
|
2021-07-02 04:22:54 +02:00
|
|
|
req_flag.long.clone(),
|
|
|
|
command,
|
|
|
|
));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn check_name<'a>(working_set: &mut StateWorkingSet, spans: &'a [Span]) -> Option<&'a Span> {
|
2022-08-22 23:19:47 +02:00
|
|
|
let command_len = if !spans.is_empty() {
|
|
|
|
if working_set.get_span_contents(spans[0]) == b"export" {
|
|
|
|
2
|
|
|
|
} else {
|
|
|
|
1
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
return None;
|
|
|
|
};
|
|
|
|
|
2021-09-13 21:59:11 +02:00
|
|
|
if spans.len() == 1 {
|
|
|
|
None
|
2022-08-22 23:19:47 +02:00
|
|
|
} else if spans.len() < command_len + 3 {
|
|
|
|
if working_set.get_span_contents(spans[command_len]) == b"=" {
|
|
|
|
let name =
|
|
|
|
String::from_utf8_lossy(working_set.get_span_contents(span(&spans[..command_len])));
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::AssignmentMismatch(
|
|
|
|
format!("{name} missing name"),
|
|
|
|
"missing name".into(),
|
|
|
|
spans[command_len],
|
|
|
|
));
|
|
|
|
Some(&spans[command_len])
|
2021-09-13 21:59:11 +02:00
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
2022-08-22 23:19:47 +02:00
|
|
|
} else if working_set.get_span_contents(spans[command_len + 1]) != b"=" {
|
|
|
|
let name =
|
|
|
|
String::from_utf8_lossy(working_set.get_span_contents(span(&spans[..command_len])));
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::AssignmentMismatch(
|
|
|
|
format!("{name} missing sign"),
|
|
|
|
"missing equal sign".into(),
|
|
|
|
spans[command_len + 1],
|
|
|
|
));
|
|
|
|
Some(&spans[command_len + 1])
|
2021-09-10 09:28:43 +02:00
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
fn parse_external_arg(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
let contents = working_set.get_span_contents(span);
|
|
|
|
|
|
|
|
if contents.starts_with(b"$") || contents.starts_with(b"(") {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_dollar_expr(working_set, span)
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
} else if contents.starts_with(b"[") {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_list_expression(working_set, span, &SyntaxShape::Any)
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
} else {
|
|
|
|
// Eval stage trims the quotes, so we don't have to do the same thing when parsing.
|
|
|
|
let contents = if contents.starts_with(b"\"") {
|
|
|
|
let (contents, err) = unescape_string(contents, span);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err)
|
|
|
|
}
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
String::from_utf8_lossy(&contents).to_string()
|
|
|
|
} else {
|
|
|
|
String::from_utf8_lossy(contents).to_string()
|
|
|
|
};
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::String(contents),
|
|
|
|
span,
|
|
|
|
ty: Type::String,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
pub fn parse_external_call(
|
2021-10-08 23:51:47 +02:00
|
|
|
working_set: &mut StateWorkingSet,
|
2021-09-02 10:25:22 +02:00
|
|
|
spans: &[Span],
|
Make external command substitution works friendly(like fish shell, trailing ending newlines) (#7156)
# Description
As title, when execute external sub command, auto-trimming end
new-lines, like how fish shell does.
And if the command is executed directly like: `cat tmp`, the result
won't change.
Fixes: #6816
Fixes: #3980
Note that although nushell works correctly by directly replace output of
external command to variable(or other places like string interpolation),
it's not friendly to user, and users almost want to use `str trim` to
trim trailing newline, I think that's why fish shell do this
automatically.
If the pr is ok, as a result, no more `str trim -r` is required when
user is writing scripts which using external commands.
# User-Facing Changes
Before:
<img width="523" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468810-86b04dbb-c147-459a-96a5-e0095eeaab3d.png">
After:
<img width="505" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468599-7b537488-3d6b-458e-9d75-d85780826db0.png">
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace --features=extra -- -D warnings -D
clippy::unwrap_used -A clippy::needless_collect` to check that you're
using the standard code style
- `cargo test --workspace --features=extra` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2022-11-23 04:51:57 +01:00
|
|
|
is_subexpression: bool,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2022-03-21 23:57:48 +01:00
|
|
|
trace!("parse external");
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut args = vec![];
|
2022-01-13 09:17:45 +01:00
|
|
|
|
|
|
|
let head_contents = working_set.get_span_contents(spans[0]);
|
|
|
|
|
|
|
|
let head_span = if head_contents.starts_with(b"^") {
|
2022-12-03 10:44:12 +01:00
|
|
|
Span::new(spans[0].start + 1, spans[0].end)
|
2022-01-06 11:20:31 +01:00
|
|
|
} else {
|
2022-01-13 09:17:45 +01:00
|
|
|
spans[0]
|
2022-01-06 11:20:31 +01:00
|
|
|
};
|
|
|
|
|
2022-03-03 20:05:55 +01:00
|
|
|
let head_contents = working_set.get_span_contents(head_span).to_vec();
|
|
|
|
|
2022-01-13 09:17:45 +01:00
|
|
|
let head = if head_contents.starts_with(b"$") || head_contents.starts_with(b"(") {
|
Make external command substitution works friendly(like fish shell, trailing ending newlines) (#7156)
# Description
As title, when execute external sub command, auto-trimming end
new-lines, like how fish shell does.
And if the command is executed directly like: `cat tmp`, the result
won't change.
Fixes: #6816
Fixes: #3980
Note that although nushell works correctly by directly replace output of
external command to variable(or other places like string interpolation),
it's not friendly to user, and users almost want to use `str trim` to
trim trailing newline, I think that's why fish shell do this
automatically.
If the pr is ok, as a result, no more `str trim -r` is required when
user is writing scripts which using external commands.
# User-Facing Changes
Before:
<img width="523" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468810-86b04dbb-c147-459a-96a5-e0095eeaab3d.png">
After:
<img width="505" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468599-7b537488-3d6b-458e-9d75-d85780826db0.png">
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace --features=extra -- -D warnings -D
clippy::unwrap_used -A clippy::needless_collect` to check that you're
using the standard code style
- `cargo test --workspace --features=extra` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2022-11-23 04:51:57 +01:00
|
|
|
// the expression is inside external_call, so it's a subexpression
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_expression(working_set, &[head_span], true);
|
2022-01-13 09:17:45 +01:00
|
|
|
Box::new(arg)
|
|
|
|
} else {
|
2022-05-01 21:26:29 +02:00
|
|
|
let (contents, err) = unescape_unquote_string(&head_contents, head_span);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err)
|
|
|
|
}
|
2022-05-01 21:26:29 +02:00
|
|
|
|
2022-01-13 09:17:45 +01:00
|
|
|
Box::new(Expression {
|
2022-05-01 21:26:29 +02:00
|
|
|
expr: Expr::String(contents),
|
2022-01-13 09:17:45 +01:00
|
|
|
span: head_span,
|
|
|
|
ty: Type::String,
|
|
|
|
custom_completion: None,
|
|
|
|
})
|
|
|
|
};
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
for span in &spans[1..] {
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_external_arg(working_set, *span);
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
args.push(arg);
|
2021-07-01 02:01:04 +02:00
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
Expression {
|
|
|
|
expr: Expr::ExternalCall(head, args, is_subexpression),
|
|
|
|
span: span(spans),
|
|
|
|
ty: Type::Any,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-01 02:01:04 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
fn parse_long_flag(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
spans: &[Span],
|
|
|
|
spans_idx: &mut usize,
|
|
|
|
sig: &Signature,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> (Option<Spanned<String>>, Option<Expression>) {
|
2021-09-02 10:25:22 +02:00
|
|
|
let arg_span = spans[*spans_idx];
|
|
|
|
let arg_contents = working_set.get_span_contents(arg_span);
|
|
|
|
|
|
|
|
if arg_contents.starts_with(b"--") {
|
2021-10-12 19:44:23 +02:00
|
|
|
// FIXME: only use the first flag you find?
|
2021-09-02 10:25:22 +02:00
|
|
|
let split: Vec<_> = arg_contents.split(|x| *x == b'=').collect();
|
|
|
|
let long_name = String::from_utf8(split[0].into());
|
|
|
|
if let Ok(long_name) = long_name {
|
2021-10-13 19:53:27 +02:00
|
|
|
let long_name = long_name[2..].to_string();
|
2021-09-02 10:25:22 +02:00
|
|
|
if let Some(flag) = sig.get_long_flag(&long_name) {
|
|
|
|
if let Some(arg_shape) = &flag.arg {
|
|
|
|
if split.len() > 1 {
|
|
|
|
// and we also have the argument
|
2022-01-27 02:20:12 +01:00
|
|
|
let long_name_len = long_name.len();
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut span = arg_span;
|
2022-01-27 02:20:12 +01:00
|
|
|
span.start += long_name_len + 3; //offset by long flag and '='
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_value(working_set, span, arg_shape);
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-01-27 02:20:12 +01:00
|
|
|
(
|
|
|
|
Some(Spanned {
|
|
|
|
item: long_name,
|
2022-12-03 10:44:12 +01:00
|
|
|
span: Span::new(arg_span.start, arg_span.start + long_name_len + 2),
|
2022-01-27 02:20:12 +01:00
|
|
|
}),
|
|
|
|
Some(arg),
|
|
|
|
)
|
2021-09-02 10:25:22 +02:00
|
|
|
} else if let Some(arg) = spans.get(*spans_idx + 1) {
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_value(working_set, *arg, arg_shape);
|
2021-09-02 10:25:22 +02:00
|
|
|
|
|
|
|
*spans_idx += 1;
|
2022-01-27 02:20:12 +01:00
|
|
|
(
|
|
|
|
Some(Spanned {
|
|
|
|
item: long_name,
|
|
|
|
span: arg_span,
|
|
|
|
}),
|
|
|
|
Some(arg),
|
|
|
|
)
|
2021-07-08 22:29:00 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::MissingFlagParam(
|
|
|
|
arg_shape.to_string(),
|
|
|
|
arg_span,
|
|
|
|
));
|
2021-09-02 10:25:22 +02:00
|
|
|
(
|
2022-01-27 02:20:12 +01:00
|
|
|
Some(Spanned {
|
|
|
|
item: long_name,
|
|
|
|
span: arg_span,
|
|
|
|
}),
|
2021-09-02 10:25:22 +02:00
|
|
|
None,
|
|
|
|
)
|
2021-07-08 22:29:00 +02:00
|
|
|
}
|
|
|
|
} else {
|
2021-09-02 10:25:22 +02:00
|
|
|
// A flag with no argument
|
2022-01-27 02:20:12 +01:00
|
|
|
(
|
|
|
|
Some(Spanned {
|
|
|
|
item: long_name,
|
|
|
|
span: arg_span,
|
|
|
|
}),
|
|
|
|
None,
|
|
|
|
)
|
2021-07-08 22:29:00 +02:00
|
|
|
}
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownFlag(
|
|
|
|
sig.name.clone(),
|
|
|
|
long_name.clone(),
|
|
|
|
arg_span,
|
|
|
|
sig.clone().formatted_flags(),
|
|
|
|
));
|
2021-09-02 10:25:22 +02:00
|
|
|
(
|
2022-01-27 02:20:12 +01:00
|
|
|
Some(Spanned {
|
|
|
|
item: long_name.clone(),
|
|
|
|
span: arg_span,
|
|
|
|
}),
|
2021-09-02 10:25:22 +02:00
|
|
|
None,
|
|
|
|
)
|
2021-07-08 22:29:00 +02:00
|
|
|
}
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::NonUtf8(arg_span));
|
2022-01-27 02:20:12 +01:00
|
|
|
(
|
|
|
|
Some(Spanned {
|
|
|
|
item: "--".into(),
|
|
|
|
span: arg_span,
|
|
|
|
}),
|
|
|
|
None,
|
|
|
|
)
|
2021-07-08 22:29:00 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
(None, None)
|
2021-07-08 22:29:00 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-08 22:29:00 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
fn parse_short_flags(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
spans: &[Span],
|
|
|
|
spans_idx: &mut usize,
|
|
|
|
positional_idx: usize,
|
|
|
|
sig: &Signature,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Option<Vec<Flag>> {
|
2021-09-02 10:25:22 +02:00
|
|
|
let arg_span = spans[*spans_idx];
|
|
|
|
|
|
|
|
let arg_contents = working_set.get_span_contents(arg_span);
|
|
|
|
|
2023-04-10 23:52:51 +02:00
|
|
|
if let Ok(arg_contents_uft8_ref) = str::from_utf8(arg_contents) {
|
|
|
|
if arg_contents_uft8_ref.starts_with('-') && arg_contents_uft8_ref.len() > 1 {
|
|
|
|
let short_flags = &arg_contents_uft8_ref[1..];
|
2023-04-15 10:24:51 +02:00
|
|
|
let num_chars = short_flags.chars().count();
|
2023-04-10 23:52:51 +02:00
|
|
|
let mut found_short_flags = vec![];
|
|
|
|
let mut unmatched_short_flags = vec![];
|
2023-04-15 10:24:51 +02:00
|
|
|
for (offset, short_flag) in short_flags.char_indices() {
|
2023-04-10 23:52:51 +02:00
|
|
|
let short_flag_span = Span::new(
|
2023-04-15 10:24:51 +02:00
|
|
|
arg_span.start + 1 + offset,
|
|
|
|
arg_span.start + 1 + offset + short_flag.len_utf8(),
|
2023-04-10 23:52:51 +02:00
|
|
|
);
|
2023-04-15 10:24:51 +02:00
|
|
|
if let Some(flag) = sig.get_short_flag(short_flag) {
|
|
|
|
// Allow args in short flag batches as long as it is the last flag.
|
|
|
|
if flag.arg.is_some() && offset < num_chars - 1 {
|
|
|
|
working_set
|
|
|
|
.error(ParseError::OnlyLastFlagInBatchCanTakeArg(short_flag_span));
|
2023-04-10 23:52:51 +02:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
found_short_flags.push(flag);
|
|
|
|
} else {
|
|
|
|
unmatched_short_flags.push(short_flag_span);
|
2021-07-08 22:29:00 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-10 23:52:51 +02:00
|
|
|
if found_short_flags.is_empty() {
|
|
|
|
let arg_contents = working_set.get_span_contents(arg_span);
|
|
|
|
|
|
|
|
// check to see if we have a negative number
|
|
|
|
if let Some(positional) = sig.get_positional(positional_idx) {
|
|
|
|
if positional.shape == SyntaxShape::Int
|
|
|
|
|| positional.shape == SyntaxShape::Number
|
|
|
|
{
|
|
|
|
if String::from_utf8_lossy(arg_contents).parse::<f64>().is_ok() {
|
|
|
|
return None;
|
|
|
|
} else if let Some(first) = unmatched_short_flags.first() {
|
|
|
|
let contents = working_set.get_span_contents(*first);
|
|
|
|
working_set.error(ParseError::UnknownFlag(
|
|
|
|
sig.name.clone(),
|
|
|
|
format!("-{}", String::from_utf8_lossy(contents)),
|
|
|
|
*first,
|
|
|
|
sig.clone().formatted_flags(),
|
|
|
|
));
|
|
|
|
}
|
2021-07-08 22:29:00 +02:00
|
|
|
} else if let Some(first) = unmatched_short_flags.first() {
|
2021-09-21 06:03:06 +02:00
|
|
|
let contents = working_set.get_span_contents(*first);
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownFlag(
|
2021-09-21 06:03:06 +02:00
|
|
|
sig.name.clone(),
|
2021-11-13 01:42:13 +01:00
|
|
|
format!("-{}", String::from_utf8_lossy(contents)),
|
2021-09-21 06:03:06 +02:00
|
|
|
*first,
|
2022-12-13 13:45:33 +01:00
|
|
|
sig.clone().formatted_flags(),
|
2023-04-07 02:35:45 +02:00
|
|
|
));
|
|
|
|
}
|
|
|
|
} else if let Some(first) = unmatched_short_flags.first() {
|
|
|
|
let contents = working_set.get_span_contents(*first);
|
|
|
|
working_set.error(ParseError::UnknownFlag(
|
2021-09-21 06:03:06 +02:00
|
|
|
sig.name.clone(),
|
2021-11-13 01:42:13 +01:00
|
|
|
format!("-{}", String::from_utf8_lossy(contents)),
|
2021-09-21 06:03:06 +02:00
|
|
|
*first,
|
2022-12-13 13:45:33 +01:00
|
|
|
sig.clone().formatted_flags(),
|
2023-04-07 02:35:45 +02:00
|
|
|
));
|
|
|
|
}
|
2023-04-10 23:52:51 +02:00
|
|
|
} else if !unmatched_short_flags.is_empty() {
|
|
|
|
if let Some(first) = unmatched_short_flags.first() {
|
|
|
|
let contents = working_set.get_span_contents(*first);
|
|
|
|
working_set.error(ParseError::UnknownFlag(
|
|
|
|
sig.name.clone(),
|
|
|
|
format!("-{}", String::from_utf8_lossy(contents)),
|
|
|
|
*first,
|
|
|
|
sig.clone().formatted_flags(),
|
|
|
|
));
|
|
|
|
}
|
2021-07-08 22:29:00 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2023-04-10 23:52:51 +02:00
|
|
|
Some(found_short_flags)
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2023-04-10 23:52:51 +02:00
|
|
|
working_set.error(ParseError::NonUtf8(arg_span));
|
2023-04-07 02:35:45 +02:00
|
|
|
None
|
2021-07-08 22:29:00 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-08 22:29:00 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
fn first_kw_idx(
|
|
|
|
working_set: &StateWorkingSet,
|
|
|
|
signature: &Signature,
|
|
|
|
spans: &[Span],
|
|
|
|
spans_idx: usize,
|
|
|
|
positional_idx: usize,
|
|
|
|
) -> (Option<usize>, usize) {
|
|
|
|
for idx in (positional_idx + 1)..signature.num_positionals() {
|
|
|
|
if let Some(PositionalArg {
|
|
|
|
shape: SyntaxShape::Keyword(kw, ..),
|
|
|
|
..
|
|
|
|
}) = signature.get_positional(idx)
|
|
|
|
{
|
|
|
|
#[allow(clippy::needless_range_loop)]
|
|
|
|
for span_idx in spans_idx..spans.len() {
|
|
|
|
let contents = working_set.get_span_contents(spans[span_idx]);
|
2021-08-26 23:48:27 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if contents == kw {
|
|
|
|
return (Some(idx), span_idx);
|
2021-08-26 23:48:27 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
(None, spans.len())
|
|
|
|
}
|
2021-08-26 23:48:27 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
fn calculate_end_span(
|
|
|
|
working_set: &StateWorkingSet,
|
|
|
|
signature: &Signature,
|
|
|
|
spans: &[Span],
|
|
|
|
spans_idx: usize,
|
|
|
|
positional_idx: usize,
|
|
|
|
) -> usize {
|
|
|
|
if signature.rest_positional.is_some() {
|
|
|
|
spans.len()
|
|
|
|
} else {
|
|
|
|
let (kw_pos, kw_idx) =
|
|
|
|
first_kw_idx(working_set, signature, spans, spans_idx, positional_idx);
|
2021-08-26 23:48:27 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if let Some(kw_pos) = kw_pos {
|
|
|
|
// We found a keyword. Keywords, once found, create a guidepost to
|
|
|
|
// show us where the positionals will lay into the arguments. Because they're
|
|
|
|
// keywords, they get to set this by being present
|
2021-08-26 23:48:27 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let positionals_between = kw_pos - positional_idx - 1;
|
|
|
|
if positionals_between > (kw_idx - spans_idx) {
|
|
|
|
kw_idx
|
2021-08-26 23:48:27 +02:00
|
|
|
} else {
|
2021-09-02 10:25:22 +02:00
|
|
|
kw_idx - positionals_between
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// Make space for the remaining require positionals, if we can
|
2022-02-14 18:33:47 +01:00
|
|
|
if signature.num_positionals_after(positional_idx) == 0 {
|
|
|
|
spans.len()
|
|
|
|
} else if positional_idx < signature.required_positional.len()
|
2021-09-02 10:25:22 +02:00
|
|
|
&& spans.len() > (signature.required_positional.len() - positional_idx)
|
|
|
|
{
|
|
|
|
spans.len() - (signature.required_positional.len() - positional_idx - 1)
|
|
|
|
} else {
|
2021-09-04 09:59:38 +02:00
|
|
|
spans_idx + 1
|
2021-07-24 07:57:17 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-24 07:57:17 +02:00
|
|
|
|
2021-12-15 23:56:12 +01:00
|
|
|
pub fn parse_multispan_value(
|
2021-09-02 10:25:22 +02:00
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
spans: &[Span],
|
|
|
|
spans_idx: &mut usize,
|
|
|
|
shape: &SyntaxShape,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2021-09-02 10:25:22 +02:00
|
|
|
match shape {
|
|
|
|
SyntaxShape::VarWithOptType => {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: var with opt type");
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
parse_var_with_opt_type(working_set, spans, spans_idx, false)
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
SyntaxShape::RowCondition => {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: row condition");
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_row_condition(working_set, &spans[*spans_idx..]);
|
2021-09-02 10:25:22 +02:00
|
|
|
*spans_idx = spans.len() - 1;
|
2021-07-16 08:24:46 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
arg
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2022-01-15 16:26:52 +01:00
|
|
|
SyntaxShape::MathExpression => {
|
|
|
|
trace!("parsing: math expression");
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_math_expression(working_set, &spans[*spans_idx..], None);
|
2022-01-15 16:26:52 +01:00
|
|
|
*spans_idx = spans.len() - 1;
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
arg
|
2022-01-15 16:26:52 +01:00
|
|
|
}
|
2022-12-07 20:58:54 +01:00
|
|
|
SyntaxShape::OneOf(shapes) => {
|
2023-03-17 13:37:59 +01:00
|
|
|
// handle for `if` command.
|
2023-04-07 02:35:45 +02:00
|
|
|
//let block_then_exp = shapes.as_slice() == [SyntaxShape::Block, SyntaxShape::Expression];
|
2022-12-07 20:58:54 +01:00
|
|
|
for shape in shapes.iter() {
|
2023-04-07 02:35:45 +02:00
|
|
|
let starting_error_count = working_set.parse_errors.len();
|
2023-04-07 20:09:38 +02:00
|
|
|
let s = parse_multispan_value(working_set, spans, spans_idx, shape);
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
if starting_error_count == working_set.parse_errors.len() {
|
|
|
|
return s;
|
|
|
|
} else if let Some(ParseError::Expected(..)) = working_set.parse_errors.last() {
|
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
|
|
|
continue;
|
2022-12-07 20:58:54 +01:00
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
// `if` is parsing block first and then expression.
|
|
|
|
// when we're writing something like `else if $a`, parsing as a
|
|
|
|
// block will result to error(because it's not a block)
|
|
|
|
//
|
|
|
|
// If parse as a expression also failed, user is more likely concerned
|
|
|
|
// about expression failure rather than "expect block failure"".
|
|
|
|
|
|
|
|
// FIXME FIXME FIXME
|
|
|
|
// if block_then_exp {
|
|
|
|
// match &err {
|
|
|
|
// Some(ParseError::Expected(expected, _)) => {
|
|
|
|
// if expected.starts_with("block") {
|
|
|
|
// err = e
|
|
|
|
// }
|
|
|
|
// }
|
|
|
|
// _ => err = err.or(e),
|
|
|
|
// }
|
|
|
|
// } else {
|
|
|
|
// err = err.or(e)
|
|
|
|
// }
|
2022-12-07 20:58:54 +01:00
|
|
|
}
|
|
|
|
let span = spans[*spans_idx];
|
2022-12-09 14:48:12 +01:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
if working_set.parse_errors.is_empty() {
|
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
format!("one of a list of accepted shapes: {shapes:?}"),
|
|
|
|
span,
|
|
|
|
));
|
2022-12-09 14:48:12 +01:00
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
Expression::garbage(span)
|
2022-12-07 20:58:54 +01:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
SyntaxShape::Expression => {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: expression");
|
|
|
|
|
Make external command substitution works friendly(like fish shell, trailing ending newlines) (#7156)
# Description
As title, when execute external sub command, auto-trimming end
new-lines, like how fish shell does.
And if the command is executed directly like: `cat tmp`, the result
won't change.
Fixes: #6816
Fixes: #3980
Note that although nushell works correctly by directly replace output of
external command to variable(or other places like string interpolation),
it's not friendly to user, and users almost want to use `str trim` to
trim trailing newline, I think that's why fish shell do this
automatically.
If the pr is ok, as a result, no more `str trim -r` is required when
user is writing scripts which using external commands.
# User-Facing Changes
Before:
<img width="523" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468810-86b04dbb-c147-459a-96a5-e0095eeaab3d.png">
After:
<img width="505" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468599-7b537488-3d6b-458e-9d75-d85780826db0.png">
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace --features=extra -- -D warnings -D
clippy::unwrap_used -A clippy::needless_collect` to check that you're
using the standard code style
- `cargo test --workspace --features=extra` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2022-11-23 04:51:57 +01:00
|
|
|
// is it subexpression?
|
|
|
|
// Not sure, but let's make it not, so the behavior is the same as previous version of nushell.
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_expression(working_set, &spans[*spans_idx..], false);
|
2021-09-02 10:25:22 +02:00
|
|
|
*spans_idx = spans.len() - 1;
|
2021-07-08 23:16:25 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
arg
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
SyntaxShape::Keyword(keyword, arg) => {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!(
|
|
|
|
"parsing: keyword({}) {:?}",
|
|
|
|
String::from_utf8_lossy(keyword),
|
|
|
|
arg
|
|
|
|
);
|
2021-09-02 10:25:22 +02:00
|
|
|
let arg_span = spans[*spans_idx];
|
2021-07-08 23:16:25 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let arg_contents = working_set.get_span_contents(arg_span);
|
2021-07-17 07:28:25 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if arg_contents != keyword {
|
|
|
|
// When keywords mismatch, this is a strong indicator of something going wrong.
|
|
|
|
// We won't often override the current error, but as this is a strong indicator
|
|
|
|
// go ahead and override the current error and tell the user about the missing
|
|
|
|
// keyword/literal.
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::ExpectedKeyword(
|
2021-09-02 10:25:22 +02:00
|
|
|
String::from_utf8_lossy(keyword).into(),
|
|
|
|
arg_span,
|
|
|
|
))
|
|
|
|
}
|
2021-07-17 07:28:25 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
*spans_idx += 1;
|
|
|
|
if *spans_idx >= spans.len() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::KeywordMissingArgument(
|
|
|
|
arg.to_string(),
|
|
|
|
String::from_utf8_lossy(keyword).into(),
|
|
|
|
Span::new(spans[*spans_idx - 1].end, spans[*spans_idx - 1].end),
|
|
|
|
));
|
|
|
|
return Expression {
|
|
|
|
expr: Expr::Keyword(
|
|
|
|
keyword.clone(),
|
|
|
|
spans[*spans_idx - 1],
|
|
|
|
Box::new(Expression::garbage(arg_span)),
|
|
|
|
),
|
|
|
|
span: arg_span,
|
|
|
|
ty: Type::Any,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2021-07-08 23:16:25 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
let keyword_span = spans[*spans_idx - 1];
|
2023-04-07 20:09:38 +02:00
|
|
|
let expr = parse_multispan_value(working_set, spans, spans_idx, arg);
|
2021-09-02 10:25:22 +02:00
|
|
|
let ty = expr.ty.clone();
|
2021-07-17 07:28:25 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Keyword(keyword.clone(), keyword_span, Box::new(expr)),
|
|
|
|
span: arg_span,
|
|
|
|
ty,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
// All other cases are single-span values
|
|
|
|
let arg_span = spans[*spans_idx];
|
2021-07-08 23:16:25 +02:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_value(working_set, arg_span, shape)
|
2021-07-08 23:16:25 +02:00
|
|
|
}
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-08 23:16:25 +02:00
|
|
|
|
2022-06-12 21:18:00 +02:00
|
|
|
pub struct ParsedInternalCall {
|
|
|
|
pub call: Box<Call>,
|
|
|
|
pub output: Type,
|
|
|
|
}
|
|
|
|
|
2023-04-05 18:56:48 +02:00
|
|
|
fn attach_parser_info_builtin(working_set: &StateWorkingSet, name: &str, call: &mut Call) {
|
|
|
|
match name {
|
|
|
|
"use" | "overlay use" | "source-env" | "nu-check" => {
|
|
|
|
if let Some(var_id) = find_dirs_var(working_set, LIB_DIRS_VAR) {
|
|
|
|
call.set_parser_info(
|
|
|
|
DIR_VAR_PARSER_INFO.to_owned(),
|
|
|
|
Expression {
|
|
|
|
expr: Expr::Var(var_id),
|
|
|
|
span: call.head,
|
|
|
|
ty: Type::Any,
|
|
|
|
custom_completion: None,
|
|
|
|
},
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
pub fn parse_internal_call(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
command_span: Span,
|
|
|
|
spans: &[Span],
|
|
|
|
decl_id: usize,
|
2022-06-12 21:18:00 +02:00
|
|
|
) -> ParsedInternalCall {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: internal call (decl id: {})", decl_id);
|
|
|
|
|
2022-02-04 03:01:45 +01:00
|
|
|
let mut call = Call::new(command_span);
|
2021-09-02 10:25:22 +02:00
|
|
|
call.decl_id = decl_id;
|
|
|
|
call.head = command_span;
|
2021-07-08 08:19:38 +02:00
|
|
|
|
2022-06-10 17:59:35 +02:00
|
|
|
let decl = working_set.get_decl(decl_id);
|
|
|
|
let signature = decl.signature();
|
2022-06-25 23:23:56 +02:00
|
|
|
let output = signature.output_type.clone();
|
2022-06-12 21:18:00 +02:00
|
|
|
|
2023-04-05 18:56:48 +02:00
|
|
|
if decl.is_builtin() {
|
|
|
|
attach_parser_info_builtin(working_set, decl.name(), &mut call);
|
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
// The index into the positional parameter in the definition
|
|
|
|
let mut positional_idx = 0;
|
2021-07-08 08:19:38 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
// The index into the spans of argument data given to parse
|
|
|
|
// Starting at the first argument
|
|
|
|
let mut spans_idx = 0;
|
2021-07-02 00:40:08 +02:00
|
|
|
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
if let Some(alias) = decl.as_alias() {
|
|
|
|
if let Expression {
|
|
|
|
expr: Expr::Call(wrapped_call),
|
|
|
|
..
|
|
|
|
} = &alias.wrapped_call
|
|
|
|
{
|
|
|
|
// Replace this command's call with the aliased call, but keep the alias name
|
|
|
|
call = *wrapped_call.clone();
|
|
|
|
call.head = command_span;
|
|
|
|
// Skip positionals passed to aliased call
|
|
|
|
positional_idx = call.positional_len();
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownState(
|
|
|
|
"Alias does not point to internal call.".to_string(),
|
|
|
|
command_span,
|
|
|
|
));
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
return ParsedInternalCall {
|
|
|
|
call: Box::new(call),
|
|
|
|
output: Type::Any,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
working_set.type_scope.add_type(output.clone());
|
|
|
|
|
|
|
|
if signature.creates_scope {
|
|
|
|
working_set.enter_scope();
|
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
while spans_idx < spans.len() {
|
|
|
|
let arg_span = spans[spans_idx];
|
2021-07-08 23:16:25 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
let starting_error_count = working_set.parse_errors.len();
|
2021-09-02 10:25:22 +02:00
|
|
|
// Check if we're on a long flag, if so, parse
|
2023-04-07 20:09:38 +02:00
|
|
|
let (long_name, arg) = parse_long_flag(working_set, spans, &mut spans_idx, &signature);
|
2022-12-21 23:33:26 +01:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if let Some(long_name) = long_name {
|
|
|
|
// We found a long flag, like --bar
|
2023-04-07 02:35:45 +02:00
|
|
|
if working_set.parse_errors[starting_error_count..]
|
|
|
|
.iter()
|
|
|
|
.any(|x| matches!(x, ParseError::UnknownFlag(_, _, _, _)))
|
2022-12-21 23:33:26 +01:00
|
|
|
&& signature.allows_unknown_args
|
|
|
|
{
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_value(working_set, arg_span, &SyntaxShape::Any);
|
2022-12-21 23:33:26 +01:00
|
|
|
|
|
|
|
call.add_unknown(arg);
|
|
|
|
} else {
|
|
|
|
call.add_named((long_name, None, arg));
|
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
spans_idx += 1;
|
|
|
|
continue;
|
|
|
|
}
|
2021-07-02 00:40:08 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
let starting_error_count = working_set.parse_errors.len();
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
// Check if we're on a short flag or group of short flags, if so, parse
|
2023-04-07 02:35:45 +02:00
|
|
|
let short_flags = parse_short_flags(
|
2021-09-02 10:25:22 +02:00
|
|
|
working_set,
|
|
|
|
spans,
|
|
|
|
&mut spans_idx,
|
|
|
|
positional_idx,
|
|
|
|
&signature,
|
|
|
|
);
|
2021-07-08 22:29:00 +02:00
|
|
|
|
2022-07-17 14:46:40 +02:00
|
|
|
if let Some(mut short_flags) = short_flags {
|
|
|
|
if short_flags.is_empty() {
|
2022-12-21 23:33:26 +01:00
|
|
|
// workaround for completions (PR #6067)
|
2022-07-17 14:46:40 +02:00
|
|
|
short_flags.push(Flag {
|
|
|
|
long: "".to_string(),
|
|
|
|
short: Some('a'),
|
|
|
|
arg: None,
|
|
|
|
required: false,
|
|
|
|
desc: "".to_string(),
|
|
|
|
var_id: None,
|
|
|
|
default_value: None,
|
|
|
|
})
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
if working_set.parse_errors[starting_error_count..]
|
|
|
|
.iter()
|
|
|
|
.any(|x| matches!(x, ParseError::UnknownFlag(_, _, _, _)))
|
2022-12-21 23:33:26 +01:00
|
|
|
&& signature.allows_unknown_args
|
|
|
|
{
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_value(working_set, arg_span, &SyntaxShape::Any);
|
2022-12-21 23:33:26 +01:00
|
|
|
|
|
|
|
call.add_unknown(arg);
|
|
|
|
} else {
|
|
|
|
for flag in short_flags {
|
|
|
|
if let Some(arg_shape) = flag.arg {
|
|
|
|
if let Some(arg) = spans.get(spans_idx + 1) {
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_value(working_set, *arg, &arg_shape);
|
2022-12-21 23:33:26 +01:00
|
|
|
|
|
|
|
if flag.long.is_empty() {
|
|
|
|
if let Some(short) = flag.short {
|
|
|
|
call.add_named((
|
|
|
|
Spanned {
|
|
|
|
item: String::new(),
|
|
|
|
span: spans[spans_idx],
|
|
|
|
},
|
|
|
|
Some(Spanned {
|
|
|
|
item: short.to_string(),
|
|
|
|
span: spans[spans_idx],
|
|
|
|
}),
|
|
|
|
Some(arg),
|
|
|
|
));
|
|
|
|
}
|
|
|
|
} else {
|
2022-04-09 07:17:48 +02:00
|
|
|
call.add_named((
|
|
|
|
Spanned {
|
2022-12-21 23:33:26 +01:00
|
|
|
item: flag.long.clone(),
|
2022-04-09 07:17:48 +02:00
|
|
|
span: spans[spans_idx],
|
|
|
|
},
|
2022-12-21 23:33:26 +01:00
|
|
|
None,
|
2022-04-09 07:17:48 +02:00
|
|
|
Some(arg),
|
|
|
|
));
|
|
|
|
}
|
2022-12-21 23:33:26 +01:00
|
|
|
spans_idx += 1;
|
2022-04-09 07:17:48 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::MissingFlagParam(
|
|
|
|
arg_shape.to_string(),
|
|
|
|
arg_span,
|
|
|
|
))
|
2022-12-21 23:33:26 +01:00
|
|
|
}
|
|
|
|
} else if flag.long.is_empty() {
|
|
|
|
if let Some(short) = flag.short {
|
2022-04-09 07:17:48 +02:00
|
|
|
call.add_named((
|
|
|
|
Spanned {
|
2022-12-21 23:33:26 +01:00
|
|
|
item: String::new(),
|
2022-04-09 07:17:48 +02:00
|
|
|
span: spans[spans_idx],
|
|
|
|
},
|
2022-12-21 23:33:26 +01:00
|
|
|
Some(Spanned {
|
|
|
|
item: short.to_string(),
|
|
|
|
span: spans[spans_idx],
|
|
|
|
}),
|
2022-04-09 07:17:48 +02:00
|
|
|
None,
|
|
|
|
));
|
|
|
|
}
|
2021-07-08 08:19:38 +02:00
|
|
|
} else {
|
2022-04-09 07:17:48 +02:00
|
|
|
call.add_named((
|
|
|
|
Spanned {
|
2022-12-21 23:33:26 +01:00
|
|
|
item: flag.long.clone(),
|
2022-04-09 07:17:48 +02:00
|
|
|
span: spans[spans_idx],
|
|
|
|
},
|
2022-12-21 23:33:26 +01:00
|
|
|
None,
|
2022-04-09 07:17:48 +02:00
|
|
|
None,
|
|
|
|
));
|
|
|
|
}
|
2021-07-08 08:19:38 +02:00
|
|
|
}
|
2021-07-08 22:29:00 +02:00
|
|
|
}
|
2022-12-21 23:33:26 +01:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
spans_idx += 1;
|
|
|
|
continue;
|
|
|
|
}
|
2021-07-08 22:29:00 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
// Parse a positional arg if there is one
|
|
|
|
if let Some(positional) = signature.get_positional(positional_idx) {
|
|
|
|
let end = calculate_end_span(working_set, &signature, spans, spans_idx, positional_idx);
|
2021-07-17 01:22:01 +02:00
|
|
|
|
2022-02-14 18:33:47 +01:00
|
|
|
let end = if spans.len() > spans_idx && end == spans_idx {
|
|
|
|
end + 1
|
|
|
|
} else {
|
|
|
|
end
|
|
|
|
};
|
|
|
|
|
|
|
|
if spans[..end].is_empty() || spans_idx == end {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::MissingPositional(
|
|
|
|
positional.name.clone(),
|
|
|
|
Span::new(spans[spans_idx].end, spans[spans_idx].end),
|
|
|
|
signature.call_signature(),
|
|
|
|
));
|
2021-12-27 21:04:48 +01:00
|
|
|
positional_idx += 1;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
let arg = parse_multispan_value(
|
2021-09-02 10:25:22 +02:00
|
|
|
working_set,
|
|
|
|
&spans[..end],
|
|
|
|
&mut spans_idx,
|
|
|
|
&positional.shape,
|
|
|
|
);
|
2021-07-23 23:46:55 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let arg = if !type_compatible(&positional.shape.to_type(), &arg.ty) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::TypeMismatch(
|
|
|
|
positional.shape.to_type(),
|
|
|
|
arg.ty,
|
|
|
|
arg.span,
|
|
|
|
));
|
2023-03-28 23:23:10 +02:00
|
|
|
Expression::garbage(arg.span)
|
2021-07-08 08:19:38 +02:00
|
|
|
} else {
|
2021-09-02 10:25:22 +02:00
|
|
|
arg
|
|
|
|
};
|
2022-04-09 04:55:02 +02:00
|
|
|
call.add_positional(arg);
|
2021-09-02 10:25:22 +02:00
|
|
|
positional_idx += 1;
|
2022-12-21 23:33:26 +01:00
|
|
|
} else if signature.allows_unknown_args {
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_value(working_set, arg_span, &SyntaxShape::Any);
|
2022-12-21 23:33:26 +01:00
|
|
|
|
|
|
|
call.add_unknown(arg);
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2022-04-09 04:55:02 +02:00
|
|
|
call.add_positional(Expression::garbage(arg_span));
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::ExtraPositional(
|
|
|
|
signature.call_signature(),
|
|
|
|
arg_span,
|
|
|
|
))
|
2021-07-08 08:19:38 +02:00
|
|
|
}
|
2021-07-02 00:40:08 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
spans_idx += 1;
|
2021-07-08 08:19:38 +02:00
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
check_call(working_set, command_span, &signature, &call);
|
2021-07-31 07:20:40 +02:00
|
|
|
|
2021-10-09 18:10:46 +02:00
|
|
|
if signature.creates_scope {
|
|
|
|
working_set.exit_scope();
|
|
|
|
}
|
|
|
|
|
2022-06-12 21:18:00 +02:00
|
|
|
ParsedInternalCall {
|
|
|
|
call: Box::new(call),
|
|
|
|
output,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-31 07:20:40 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
pub fn parse_call(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
spans: &[Span],
|
2021-12-19 08:46:13 +01:00
|
|
|
head: Span,
|
Make external command substitution works friendly(like fish shell, trailing ending newlines) (#7156)
# Description
As title, when execute external sub command, auto-trimming end
new-lines, like how fish shell does.
And if the command is executed directly like: `cat tmp`, the result
won't change.
Fixes: #6816
Fixes: #3980
Note that although nushell works correctly by directly replace output of
external command to variable(or other places like string interpolation),
it's not friendly to user, and users almost want to use `str trim` to
trim trailing newline, I think that's why fish shell do this
automatically.
If the pr is ok, as a result, no more `str trim -r` is required when
user is writing scripts which using external commands.
# User-Facing Changes
Before:
<img width="523" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468810-86b04dbb-c147-459a-96a5-e0095eeaab3d.png">
After:
<img width="505" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468599-7b537488-3d6b-458e-9d75-d85780826db0.png">
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace --features=extra -- -D warnings -D
clippy::unwrap_used -A clippy::needless_collect` to check that you're
using the standard code style
- `cargo test --workspace --features=extra` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2022-11-23 04:51:57 +01:00
|
|
|
is_subexpression: bool,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: call");
|
|
|
|
|
2021-10-29 22:50:28 +02:00
|
|
|
if spans.is_empty() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownState(
|
|
|
|
"Encountered command with zero spans".into(),
|
|
|
|
span(spans),
|
|
|
|
));
|
|
|
|
return garbage(head);
|
2021-10-27 23:52:59 +02:00
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut pos = 0;
|
|
|
|
let cmd_start = pos;
|
2021-10-29 22:50:28 +02:00
|
|
|
let mut name_spans = vec![];
|
2022-01-10 03:52:01 +01:00
|
|
|
let mut name = vec![];
|
2021-08-17 01:00:00 +02:00
|
|
|
|
2021-10-29 22:50:28 +02:00
|
|
|
for word_span in spans[cmd_start..].iter() {
|
|
|
|
// Find the longest group of words that could form a command
|
2022-04-07 04:01:31 +02:00
|
|
|
|
2021-10-29 22:50:28 +02:00
|
|
|
name_spans.push(*word_span);
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-01-10 03:52:01 +01:00
|
|
|
let name_part = working_set.get_span_contents(*word_span);
|
|
|
|
if name.is_empty() {
|
|
|
|
name.extend(name_part);
|
|
|
|
} else {
|
|
|
|
name.push(b' ');
|
|
|
|
name.extend(name_part);
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2021-10-29 22:50:28 +02:00
|
|
|
pos += 1;
|
|
|
|
}
|
|
|
|
|
2022-06-12 21:18:00 +02:00
|
|
|
let input = working_set.type_scope.get_previous();
|
2022-06-10 17:59:35 +02:00
|
|
|
let mut maybe_decl_id = working_set.find_decl(&name, input);
|
2021-10-29 22:50:28 +02:00
|
|
|
|
|
|
|
while maybe_decl_id.is_none() {
|
|
|
|
// Find the longest command match
|
|
|
|
if name_spans.len() <= 1 {
|
|
|
|
// Keep the first word even if it does not match -- could be external command
|
|
|
|
break;
|
2021-06-30 03:42:56 +02:00
|
|
|
}
|
2021-09-11 14:07:19 +02:00
|
|
|
|
2021-10-29 22:50:28 +02:00
|
|
|
name_spans.pop();
|
|
|
|
pos -= 1;
|
|
|
|
|
2022-01-10 03:52:01 +01:00
|
|
|
let mut name = vec![];
|
|
|
|
for name_span in &name_spans {
|
|
|
|
let name_part = working_set.get_span_contents(*name_span);
|
|
|
|
if name.is_empty() {
|
|
|
|
name.extend(name_part);
|
|
|
|
} else {
|
|
|
|
name.push(b' ');
|
|
|
|
name.extend(name_part);
|
|
|
|
}
|
|
|
|
}
|
2022-06-10 17:59:35 +02:00
|
|
|
maybe_decl_id = working_set.find_decl(&name, input);
|
2021-10-29 22:50:28 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if let Some(decl_id) = maybe_decl_id {
|
2021-09-11 14:07:19 +02:00
|
|
|
// Before the internal parsing we check if there is no let or alias declarations
|
|
|
|
// that are missing their name, e.g.: let = 1 or alias = 2
|
|
|
|
if spans.len() > 1 {
|
|
|
|
let test_equal = working_set.get_span_contents(spans[1]);
|
|
|
|
|
2021-09-11 14:16:40 +02:00
|
|
|
if test_equal == [b'='] {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("incomplete statement");
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownState(
|
|
|
|
"Incomplete statement".into(),
|
|
|
|
span(spans),
|
|
|
|
));
|
|
|
|
return garbage(span(spans));
|
2021-09-11 14:07:19 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
// TODO: Try to remove the clone
|
|
|
|
let decl = working_set.get_decl(decl_id).clone();
|
2022-01-01 22:42:50 +01:00
|
|
|
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
let parsed_call = if let Some(alias) = decl.as_alias() {
|
|
|
|
if let Expression {
|
|
|
|
expr: Expr::ExternalCall(head, args, is_subexpression),
|
|
|
|
span: _,
|
|
|
|
ty,
|
|
|
|
custom_completion,
|
|
|
|
} = &alias.wrapped_call
|
|
|
|
{
|
|
|
|
trace!("parsing: alias of external call");
|
|
|
|
|
|
|
|
let mut final_args = args.clone();
|
|
|
|
|
|
|
|
for arg_span in spans.iter().skip(1) {
|
2023-04-07 20:09:38 +02:00
|
|
|
let arg = parse_external_arg(working_set, *arg_span);
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
final_args.push(arg);
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut head = head.clone();
|
|
|
|
head.span = spans[0]; // replacing the spans preserves syntax highlighting
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression {
|
|
|
|
expr: Expr::ExternalCall(head, final_args, *is_subexpression),
|
|
|
|
span: span(spans),
|
|
|
|
ty: ty.clone(),
|
|
|
|
custom_completion: *custom_completion,
|
|
|
|
};
|
Re-implement aliases (#8123)
# Description
This PR adds an alternative alias implementation. Old aliases still work
but you need to use `old-alias` instead of `alias`.
Instead of replacing spans in the original code and re-parsing, which
proved to be extremely error-prone and a constant source of panics, the
new implementation creates a new command that references the old
command. Consider the new alias defined as `alias ll = ls -l`. The
parser creates a new command called `ll` and remembers that it is
actually a `ls` command called with the `-l` flag. Then, when the parser
sees the `ll` command, it will translate it to `ls -l` and passes to it
any parameters that were passed to the call to `ll`. It works quite
similar to how known externals defined with `extern` are implemented.
The new alias implementation should work the same way as the old
aliases, including exporting from modules, referencing both known and
unknown externals. It seems to preserve custom completions and pipeline
metadata. It is quite robust in most cases but there are some rough
edges (see later).
Fixes https://github.com/nushell/nushell/issues/7648,
https://github.com/nushell/nushell/issues/8026,
https://github.com/nushell/nushell/issues/7512,
https://github.com/nushell/nushell/issues/5780,
https://github.com/nushell/nushell/issues/7754
No effect: https://github.com/nushell/nushell/issues/8122 (we might
revisit the completions code after this PR)
Should use custom command instead:
https://github.com/nushell/nushell/issues/6048
# User-Facing Changes
Since aliases are now basically commands, it has some new implications:
1. `alias spam = "spam"` (requires command call)
* **workaround**: use `alias spam = echo "spam"`
2. `def foo [] { 'foo' }; alias foo = ls -l` (foo defined more than
once)
* **workaround**: use different name (commands also have this
limitation)
4. `alias ls = (ls | sort-by type name -i)`
* **workaround**: Use custom command. _The common issue with this is
that it is currently not easy to pass flags through custom commands and
command referencing itself will lead to stack overflow. Both of these
issues are meant to be addressed._
5. TODO: Help messages, `which` command, `$nu.scope.aliases`, etc.
* Should we treat the aliases as commands or should they be separated
from regular commands?
6. Needs better error message and syntax highlight for recursed alias
(`alias f = f`)
7. Can't create alias with the same name as existing command (`alias ls
= ls -a`)
* Might be possible to add support for it (not 100% sure)
8. Standalone `alias` doesn't list aliases anymore
9. Can't alias parser keywords (e.g., stuff like `alias ou = overlay
use` won't work)
* TODO: Needs a better error message when attempting to do so
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-02-27 08:44:05 +01:00
|
|
|
} else {
|
|
|
|
trace!("parsing: alias of internal call");
|
|
|
|
parse_internal_call(
|
|
|
|
working_set,
|
|
|
|
span(&spans[cmd_start..pos]),
|
|
|
|
&spans[pos..],
|
|
|
|
decl_id,
|
|
|
|
)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
trace!("parsing: internal call");
|
|
|
|
parse_internal_call(
|
|
|
|
working_set,
|
|
|
|
span(&spans[cmd_start..pos]),
|
|
|
|
&spans[pos..],
|
|
|
|
decl_id,
|
|
|
|
)
|
|
|
|
};
|
2022-06-12 21:18:00 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Call(parsed_call.call),
|
|
|
|
span: span(spans),
|
|
|
|
ty: parsed_call.output,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2021-09-12 14:48:19 +02:00
|
|
|
// We might be parsing left-unbounded range ("..10")
|
|
|
|
let bytes = working_set.get_span_contents(spans[0]);
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: range {:?} ", bytes);
|
2022-08-11 18:54:54 +02:00
|
|
|
if let (Some(b'.'), Some(b'.')) = (bytes.first(), bytes.get(1)) {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("-- found leading range indicator");
|
2023-04-07 02:35:45 +02:00
|
|
|
let starting_error_count = working_set.parse_errors.len();
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let range_expr = parse_range(working_set, spans[0]);
|
2023-04-07 02:35:45 +02:00
|
|
|
if working_set.parse_errors.len() == starting_error_count {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("-- successfully parsed range");
|
2023-04-07 02:35:45 +02:00
|
|
|
return range_expr;
|
2021-09-12 14:48:19 +02:00
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2021-09-12 14:48:19 +02:00
|
|
|
}
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: external call");
|
2021-10-29 22:50:28 +02:00
|
|
|
|
|
|
|
// Otherwise, try external command
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_external_call(working_set, spans, is_subexpression)
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_binary(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
|
|
|
let contents = working_set.get_span_contents(span);
|
|
|
|
if contents.starts_with(b"0x[") {
|
|
|
|
parse_binary_with_base(working_set, span, 16, 2, b"0x[", b"]")
|
|
|
|
} else if contents.starts_with(b"0o[") {
|
|
|
|
parse_binary_with_base(working_set, span, 8, 3, b"0o[", b"]")
|
|
|
|
} else {
|
|
|
|
parse_binary_with_base(working_set, span, 2, 8, b"0b[", b"]")
|
2022-03-01 00:31:53 +01:00
|
|
|
}
|
2022-04-11 09:58:57 +02:00
|
|
|
}
|
2022-03-01 00:31:53 +01:00
|
|
|
|
2022-04-11 09:58:57 +02:00
|
|
|
fn parse_binary_with_base(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
span: Span,
|
|
|
|
base: u32,
|
|
|
|
min_digits_per_byte: usize,
|
|
|
|
prefix: &[u8],
|
|
|
|
suffix: &[u8],
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2022-03-01 00:31:53 +01:00
|
|
|
let token = working_set.get_span_contents(span);
|
|
|
|
|
2022-04-11 09:58:57 +02:00
|
|
|
if let Some(token) = token.strip_prefix(prefix) {
|
|
|
|
if let Some(token) = token.strip_suffix(suffix) {
|
|
|
|
let (lexed, err) = lex(
|
|
|
|
token,
|
|
|
|
span.start + prefix.len(),
|
|
|
|
&[b',', b'\r', b'\n'],
|
|
|
|
&[],
|
|
|
|
true,
|
|
|
|
);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2022-03-01 00:31:53 +01:00
|
|
|
|
2022-04-11 09:58:57 +02:00
|
|
|
let mut binary_value = vec![];
|
2022-03-01 00:31:53 +01:00
|
|
|
for token in lexed {
|
|
|
|
match token.contents {
|
|
|
|
TokenContents::Item => {
|
|
|
|
let contents = working_set.get_span_contents(token.span);
|
|
|
|
|
2022-04-11 09:58:57 +02:00
|
|
|
binary_value.extend_from_slice(contents);
|
2022-03-01 00:31:53 +01:00
|
|
|
}
|
2022-12-08 00:02:11 +01:00
|
|
|
TokenContents::Pipe
|
|
|
|
| TokenContents::PipePipe
|
|
|
|
| TokenContents::OutGreaterThan
|
|
|
|
| TokenContents::ErrGreaterThan
|
|
|
|
| TokenContents::OutErrGreaterThan => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("binary".into(), span));
|
|
|
|
return garbage(span);
|
2022-03-01 00:31:53 +01:00
|
|
|
}
|
2022-12-08 00:02:11 +01:00
|
|
|
TokenContents::Comment | TokenContents::Semicolon | TokenContents::Eol => {}
|
2022-03-01 00:31:53 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-11 09:58:57 +02:00
|
|
|
let required_padding = (min_digits_per_byte - binary_value.len() % min_digits_per_byte)
|
|
|
|
% min_digits_per_byte;
|
|
|
|
|
|
|
|
if required_padding != 0 {
|
|
|
|
binary_value = {
|
|
|
|
let mut tail = binary_value;
|
|
|
|
let mut binary_value: Vec<u8> = vec![b'0'; required_padding];
|
|
|
|
binary_value.append(&mut tail);
|
|
|
|
binary_value
|
|
|
|
};
|
2022-03-01 00:31:53 +01:00
|
|
|
}
|
|
|
|
|
2022-04-11 09:58:57 +02:00
|
|
|
let str = String::from_utf8_lossy(&binary_value).to_string();
|
2022-03-01 00:31:53 +01:00
|
|
|
|
2022-04-11 09:58:57 +02:00
|
|
|
match decode_with_base(&str, base, min_digits_per_byte) {
|
2022-03-01 00:31:53 +01:00
|
|
|
Ok(v) => {
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression {
|
|
|
|
expr: Expr::Binary(v),
|
|
|
|
span,
|
|
|
|
ty: Type::Binary,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2022-03-01 00:31:53 +01:00
|
|
|
}
|
|
|
|
Err(x) => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::IncorrectValue(
|
|
|
|
"not a binary value".into(),
|
|
|
|
span,
|
|
|
|
x.to_string(),
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2022-03-01 00:31:53 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
working_set.error(ParseError::Expected("binary".into(), span));
|
|
|
|
garbage(span)
|
2022-03-01 00:31:53 +01:00
|
|
|
}
|
|
|
|
|
2022-04-11 09:58:57 +02:00
|
|
|
fn decode_with_base(s: &str, base: u32, digits_per_byte: usize) -> Result<Vec<u8>, ParseIntError> {
|
2022-06-17 20:11:48 +02:00
|
|
|
s.chars()
|
|
|
|
.chunks(digits_per_byte)
|
|
|
|
.into_iter()
|
|
|
|
.map(|chunk| {
|
|
|
|
let str: String = chunk.collect();
|
|
|
|
u8::from_str_radix(&str, base)
|
|
|
|
})
|
2022-04-11 09:58:57 +02:00
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
|
2023-01-15 16:03:57 +01:00
|
|
|
fn strip_underscores(token: &[u8]) -> String {
|
|
|
|
String::from_utf8_lossy(token)
|
|
|
|
.chars()
|
|
|
|
.filter(|c| *c != '_')
|
|
|
|
.collect()
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_int(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
|
|
|
let token = working_set.get_span_contents(span);
|
|
|
|
|
|
|
|
fn extract_int(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
token: &str,
|
|
|
|
span: Span,
|
|
|
|
radix: u32,
|
|
|
|
) -> Expression {
|
2023-01-15 16:03:57 +01:00
|
|
|
if let Ok(num) = i64::from_str_radix(token, radix) {
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Int(num),
|
|
|
|
span,
|
|
|
|
ty: Type::Int,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-08-08 22:21:21 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::InvalidLiteral(
|
|
|
|
format!("invalid digits for radix {}", radix),
|
|
|
|
"int".into(),
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
|
|
|
|
garbage(span)
|
2021-08-08 22:21:21 +02:00
|
|
|
}
|
2023-01-15 16:03:57 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
let token = strip_underscores(token);
|
|
|
|
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
if token.is_empty() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("int".into(), span));
|
|
|
|
return garbage(span);
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
}
|
|
|
|
|
2023-01-15 16:03:57 +01:00
|
|
|
if let Some(num) = token.strip_prefix("0b") {
|
2023-04-07 02:35:45 +02:00
|
|
|
extract_int(working_set, num, span, 2)
|
2023-01-15 16:03:57 +01:00
|
|
|
} else if let Some(num) = token.strip_prefix("0o") {
|
2023-04-07 02:35:45 +02:00
|
|
|
extract_int(working_set, num, span, 8)
|
2023-01-15 16:03:57 +01:00
|
|
|
} else if let Some(num) = token.strip_prefix("0x") {
|
2023-04-07 02:35:45 +02:00
|
|
|
extract_int(working_set, num, span, 16)
|
2023-01-15 16:03:57 +01:00
|
|
|
} else if let Ok(num) = token.parse::<i64>() {
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Int(num),
|
|
|
|
span,
|
|
|
|
ty: Type::Int,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("int".into(), span));
|
|
|
|
garbage(span)
|
2021-07-01 02:01:04 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-01 02:01:04 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_float(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
|
|
|
let token = working_set.get_span_contents(span);
|
2023-01-15 16:03:57 +01:00
|
|
|
let token = strip_underscores(token);
|
|
|
|
|
|
|
|
if let Ok(x) = token.parse::<f64>() {
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Float(x),
|
|
|
|
span,
|
|
|
|
ty: Type::Float,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("float".into(), span));
|
|
|
|
|
|
|
|
garbage(span)
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
}
|
2021-07-30 05:26:06 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_number(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
|
|
|
let starting_error_count = working_set.parse_errors.len();
|
|
|
|
|
|
|
|
let result = parse_int(working_set, span);
|
|
|
|
if starting_error_count == working_set.parse_errors.len() {
|
|
|
|
return result;
|
|
|
|
} else if !matches!(
|
|
|
|
working_set.parse_errors.last(),
|
|
|
|
Some(ParseError::Expected(_, _))
|
|
|
|
) {
|
|
|
|
} else {
|
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2021-07-30 05:26:06 +02:00
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
let result = parse_float(working_set, span);
|
|
|
|
|
|
|
|
if starting_error_count == working_set.parse_errors.len() {
|
|
|
|
return result;
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("number".into(), span));
|
|
|
|
garbage(span)
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-30 05:26:06 +02:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn parse_range(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: range");
|
|
|
|
|
2021-09-11 13:13:04 +02:00
|
|
|
// Range follows the following syntax: [<from>][<next_operator><next>]<range_operator>[<to>]
|
|
|
|
// where <next_operator> is ".."
|
2023-04-07 13:40:05 +02:00
|
|
|
// and <range_operator> is "..", "..=" or "..<"
|
2021-09-04 23:52:57 +02:00
|
|
|
// and one of the <from> or <to> bounds must be present (just '..' is not allowed since it
|
|
|
|
// looks like parent directory)
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
//bugbug range cannot be [..] because that looks like parent directory
|
2021-09-04 23:52:57 +02:00
|
|
|
|
|
|
|
let contents = working_set.get_span_contents(span);
|
2022-02-24 13:58:53 +01:00
|
|
|
|
2021-09-04 23:52:57 +02:00
|
|
|
let token = if let Ok(s) = String::from_utf8(contents.into()) {
|
|
|
|
s
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::NonUtf8(span));
|
|
|
|
return garbage(span);
|
2021-09-04 23:52:57 +02:00
|
|
|
};
|
|
|
|
|
2022-02-24 13:58:53 +01:00
|
|
|
if !token.contains("..") {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"at least one range bound set".into(),
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2022-02-24 13:58:53 +01:00
|
|
|
}
|
|
|
|
|
2021-09-04 23:52:57 +02:00
|
|
|
// First, figure out what exact operators are used and determine their positions
|
|
|
|
let dotdot_pos: Vec<_> = token.match_indices("..").map(|(pos, _)| pos).collect();
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
let (next_op_pos, range_op_pos) = match dotdot_pos.len() {
|
|
|
|
1 => (None, dotdot_pos[0]),
|
|
|
|
2 => (Some(dotdot_pos[0]), dotdot_pos[1]),
|
|
|
|
_ => {
|
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"one range operator ('..' or '..<') and optionally one next operator ('..')".into(),
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
|
|
|
}
|
|
|
|
};
|
2021-09-04 23:52:57 +02:00
|
|
|
|
2021-09-11 13:13:04 +02:00
|
|
|
let (inclusion, range_op_str, range_op_span) = if let Some(pos) = token.find("..<") {
|
2021-09-04 23:52:57 +02:00
|
|
|
if pos == range_op_pos {
|
|
|
|
let op_str = "..<";
|
|
|
|
let op_span = Span::new(
|
|
|
|
span.start + range_op_pos,
|
|
|
|
span.start + range_op_pos + op_str.len(),
|
|
|
|
);
|
2021-09-11 13:13:04 +02:00
|
|
|
(RangeInclusion::RightExclusive, "..<", op_span)
|
2021-09-04 23:52:57 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"inclusive operator preceding second range bound".into(),
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2021-09-04 23:52:57 +02:00
|
|
|
}
|
|
|
|
} else {
|
2023-04-07 13:40:05 +02:00
|
|
|
let op_str = if token.contains("..=") { "..=" } else { ".." };
|
2021-09-04 23:52:57 +02:00
|
|
|
let op_span = Span::new(
|
|
|
|
span.start + range_op_pos,
|
|
|
|
span.start + range_op_pos + op_str.len(),
|
|
|
|
);
|
2023-04-07 13:40:05 +02:00
|
|
|
(RangeInclusion::Inclusive, op_str, op_span)
|
2021-09-04 23:52:57 +02:00
|
|
|
};
|
|
|
|
|
2021-09-11 13:13:04 +02:00
|
|
|
// Now, based on the operator positions, figure out where the bounds & next are located and
|
2021-09-04 23:52:57 +02:00
|
|
|
// parse them
|
2021-10-12 19:44:23 +02:00
|
|
|
// TODO: Actually parse the next number in the range
|
2021-09-04 23:52:57 +02:00
|
|
|
let from = if token.starts_with("..") {
|
2021-09-11 13:13:04 +02:00
|
|
|
// token starts with either next operator, or range operator -- we don't care which one
|
2021-09-04 23:52:57 +02:00
|
|
|
None
|
|
|
|
} else {
|
|
|
|
let from_span = Span::new(span.start, span.start + dotdot_pos[0]);
|
2023-04-07 02:35:45 +02:00
|
|
|
Some(Box::new(parse_value(
|
2022-03-18 20:03:57 +01:00
|
|
|
working_set,
|
|
|
|
from_span,
|
|
|
|
&SyntaxShape::Number,
|
2023-04-07 02:35:45 +02:00
|
|
|
)))
|
2021-09-04 23:52:57 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
let to = if token.ends_with(range_op_str) {
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
let to_span = Span::new(range_op_span.end, span.end);
|
2023-04-07 02:35:45 +02:00
|
|
|
Some(Box::new(parse_value(
|
2022-03-18 20:03:57 +01:00
|
|
|
working_set,
|
|
|
|
to_span,
|
|
|
|
&SyntaxShape::Number,
|
2023-04-07 02:35:45 +02:00
|
|
|
)))
|
2021-09-04 23:52:57 +02:00
|
|
|
};
|
|
|
|
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("-- from: {:?} to: {:?}", from, to);
|
|
|
|
|
2021-09-04 23:52:57 +02:00
|
|
|
if let (None, None) = (&from, &to) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"at least one range bound set".into(),
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2021-09-04 23:52:57 +02:00
|
|
|
}
|
|
|
|
|
2021-09-11 13:13:04 +02:00
|
|
|
let (next, next_op_span) = if let Some(pos) = next_op_pos {
|
|
|
|
let next_op_span = Span::new(span.start + pos, span.start + pos + "..".len());
|
|
|
|
let next_span = Span::new(next_op_span.end, range_op_span.start);
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
(
|
|
|
|
Some(Box::new(parse_value(
|
|
|
|
working_set,
|
|
|
|
next_span,
|
|
|
|
&SyntaxShape::Number,
|
|
|
|
))),
|
|
|
|
next_op_span,
|
|
|
|
)
|
2021-09-11 13:13:04 +02:00
|
|
|
} else {
|
2021-12-19 08:46:13 +01:00
|
|
|
(None, span)
|
2021-09-11 13:13:04 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
let range_op = RangeOperator {
|
|
|
|
inclusion,
|
|
|
|
span: range_op_span,
|
|
|
|
next_op_span,
|
|
|
|
};
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Range(from, next, to, range_op),
|
|
|
|
span,
|
|
|
|
ty: Type::Range,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-04 23:52:57 +02:00
|
|
|
}
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub(crate) fn parse_dollar_expr(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2022-02-24 13:58:53 +01:00
|
|
|
trace!("parsing: dollar expression");
|
2021-09-02 10:25:22 +02:00
|
|
|
let contents = working_set.get_span_contents(span);
|
2021-07-30 05:26:06 +02:00
|
|
|
|
2021-12-25 21:50:02 +01:00
|
|
|
if contents.starts_with(b"$\"") || contents.starts_with(b"$'") {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_string_interpolation(working_set, span)
|
2023-03-17 03:19:41 +01:00
|
|
|
} else if contents.starts_with(b"$.") {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_simple_cell_path(working_set, Span::new(span.start + 2, span.end))
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
let starting_error_count = working_set.parse_errors.len();
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let expr = parse_range(working_set, span);
|
2023-04-07 02:35:45 +02:00
|
|
|
if starting_error_count == working_set.parse_errors.len() {
|
|
|
|
expr
|
|
|
|
} else {
|
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_full_cell_path(working_set, None, span)
|
2023-04-07 02:35:45 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
}
|
2021-07-30 05:26:06 +02:00
|
|
|
|
2023-03-16 04:06:43 +01:00
|
|
|
pub fn parse_paren_expr(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
span: Span,
|
2023-03-16 21:08:41 +01:00
|
|
|
shape: &SyntaxShape,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
|
|
|
let starting_error_count = working_set.parse_errors.len();
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let expr = parse_range(working_set, span);
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
if starting_error_count == working_set.parse_errors.len() {
|
|
|
|
expr
|
2023-03-16 04:06:43 +01:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
|
|
|
|
|
|
|
if matches!(shape, SyntaxShape::Signature) {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_signature(working_set, span)
|
2023-04-07 02:35:45 +02:00
|
|
|
} else {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_full_cell_path(working_set, None, span)
|
2023-04-07 02:35:45 +02:00
|
|
|
}
|
2023-03-16 04:06:43 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn parse_brace_expr(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
span: Span,
|
|
|
|
shape: &SyntaxShape,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2023-03-16 04:06:43 +01:00
|
|
|
// Try to detect what kind of value we're about to parse
|
|
|
|
// FIXME: In the future, we should work over the token stream so we only have to do this once
|
|
|
|
// before parsing begins
|
|
|
|
|
|
|
|
// FIXME: we're still using the shape because we rely on it to know how to handle syntax where
|
|
|
|
// the parse is ambiguous. We'll need to update the parts of the grammar where this is ambiguous
|
|
|
|
// and then revisit the parsing.
|
|
|
|
|
2023-03-17 03:19:23 +01:00
|
|
|
if span.end <= (span.start + 1) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
format!("non-block value: {shape}"),
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return Expression::garbage(span);
|
2023-03-17 03:19:23 +01:00
|
|
|
}
|
|
|
|
|
2023-03-16 04:06:43 +01:00
|
|
|
let bytes = working_set.get_span_contents(Span::new(span.start + 1, span.end - 1));
|
|
|
|
let (tokens, _) = lex(bytes, span.start + 1, &[b'\r', b'\n', b'\t'], &[b':'], true);
|
|
|
|
|
|
|
|
let second_token = tokens
|
|
|
|
.get(0)
|
|
|
|
.map(|token| working_set.get_span_contents(token.span));
|
|
|
|
|
|
|
|
let second_token_contents = tokens.get(0).map(|token| token.contents);
|
|
|
|
|
|
|
|
let third_token = tokens
|
|
|
|
.get(1)
|
|
|
|
.map(|token| working_set.get_span_contents(token.span));
|
|
|
|
|
|
|
|
if matches!(second_token, None) {
|
|
|
|
// If we're empty, that means an empty record or closure
|
2023-04-11 19:21:52 +02:00
|
|
|
if matches!(shape, SyntaxShape::Closure(_)) {
|
|
|
|
parse_closure_expression(working_set, shape, span)
|
2023-03-16 04:06:43 +01:00
|
|
|
} else if matches!(shape, SyntaxShape::Block) {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_block_expression(working_set, span)
|
2023-03-24 02:52:01 +01:00
|
|
|
} else if matches!(shape, SyntaxShape::MatchBlock) {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_match_block_expression(working_set, span)
|
2023-03-16 04:06:43 +01:00
|
|
|
} else {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_record(working_set, span)
|
2023-03-16 04:06:43 +01:00
|
|
|
}
|
|
|
|
} else if matches!(second_token_contents, Some(TokenContents::Pipe))
|
|
|
|
|| matches!(second_token_contents, Some(TokenContents::PipePipe))
|
|
|
|
{
|
2023-04-11 19:21:52 +02:00
|
|
|
parse_closure_expression(working_set, shape, span)
|
2023-03-16 04:06:43 +01:00
|
|
|
} else if matches!(third_token, Some(b":")) {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_full_cell_path(working_set, None, span)
|
2023-04-11 19:21:52 +02:00
|
|
|
} else if matches!(shape, SyntaxShape::Closure(_)) || matches!(shape, SyntaxShape::Any) {
|
|
|
|
parse_closure_expression(working_set, shape, span)
|
2023-03-16 04:06:43 +01:00
|
|
|
} else if matches!(shape, SyntaxShape::Block) {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_block_expression(working_set, span)
|
2023-03-24 02:52:01 +01:00
|
|
|
} else if matches!(shape, SyntaxShape::MatchBlock) {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_match_block_expression(working_set, span)
|
2023-03-16 04:06:43 +01:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
format!("non-block value: {shape}"),
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
|
|
|
|
Expression::garbage(span)
|
2023-03-16 04:06:43 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn parse_string_interpolation(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2021-09-02 10:25:22 +02:00
|
|
|
#[derive(PartialEq, Eq, Debug)]
|
|
|
|
enum InterpolationMode {
|
|
|
|
String,
|
|
|
|
Expression,
|
|
|
|
}
|
2021-07-30 05:26:06 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let contents = working_set.get_span_contents(span);
|
2021-07-30 05:26:06 +02:00
|
|
|
|
2022-03-03 19:14:03 +01:00
|
|
|
let mut double_quote = false;
|
|
|
|
|
2021-12-25 21:50:02 +01:00
|
|
|
let (start, end) = if contents.starts_with(b"$\"") {
|
2022-03-03 19:14:03 +01:00
|
|
|
double_quote = true;
|
2021-12-25 21:50:02 +01:00
|
|
|
let end = if contents.ends_with(b"\"") && contents.len() > 2 {
|
|
|
|
span.end - 1
|
|
|
|
} else {
|
|
|
|
span.end
|
|
|
|
};
|
|
|
|
(span.start + 2, end)
|
|
|
|
} else if contents.starts_with(b"$'") {
|
|
|
|
let end = if contents.ends_with(b"'") && contents.len() > 2 {
|
|
|
|
span.end - 1
|
|
|
|
} else {
|
|
|
|
span.end
|
|
|
|
};
|
|
|
|
(span.start + 2, end)
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2021-12-25 21:50:02 +01:00
|
|
|
(span.start, span.end)
|
2021-09-02 10:25:22 +02:00
|
|
|
};
|
|
|
|
|
2022-12-03 10:44:12 +01:00
|
|
|
let inner_span = Span::new(start, end);
|
2021-09-02 10:25:22 +02:00
|
|
|
let contents = working_set.get_span_contents(inner_span).to_vec();
|
|
|
|
|
|
|
|
let mut output = vec![];
|
|
|
|
let mut mode = InterpolationMode::String;
|
|
|
|
let mut token_start = start;
|
2022-02-10 17:09:08 +01:00
|
|
|
let mut delimiter_stack = vec![];
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-12-14 14:54:13 +01:00
|
|
|
let mut consecutive_backslashes: usize = 0;
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut b = start;
|
|
|
|
|
|
|
|
while b != end {
|
2022-12-14 14:54:13 +01:00
|
|
|
let current_byte = contents[b - start];
|
|
|
|
|
|
|
|
if mode == InterpolationMode::String {
|
|
|
|
let preceding_consecutive_backslashes = consecutive_backslashes;
|
|
|
|
|
|
|
|
let is_backslash = current_byte == b'\\';
|
|
|
|
consecutive_backslashes = if is_backslash {
|
|
|
|
preceding_consecutive_backslashes + 1
|
2022-11-06 23:17:00 +01:00
|
|
|
} else {
|
2022-12-14 14:54:13 +01:00
|
|
|
0
|
|
|
|
};
|
2022-03-03 19:14:03 +01:00
|
|
|
|
2022-12-14 14:54:13 +01:00
|
|
|
if current_byte == b'(' && (!double_quote || preceding_consecutive_backslashes % 2 == 0)
|
|
|
|
{
|
|
|
|
mode = InterpolationMode::Expression;
|
|
|
|
if token_start < b {
|
|
|
|
let span = Span::new(token_start, b);
|
|
|
|
let str_contents = working_set.get_span_contents(span);
|
2022-03-03 19:14:03 +01:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
let (str_contents, err) = if double_quote {
|
|
|
|
unescape_string(str_contents, span)
|
2022-12-14 14:54:13 +01:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
(str_contents.to_vec(), None)
|
2022-12-14 14:54:13 +01:00
|
|
|
};
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2022-12-14 14:54:13 +01:00
|
|
|
|
|
|
|
output.push(Expression {
|
|
|
|
expr: Expr::String(String::from_utf8_lossy(&str_contents).to_string()),
|
|
|
|
span,
|
|
|
|
ty: Type::String,
|
|
|
|
custom_completion: None,
|
|
|
|
});
|
|
|
|
token_start = b;
|
|
|
|
}
|
2022-11-06 23:17:00 +01:00
|
|
|
}
|
|
|
|
}
|
2022-12-14 14:54:13 +01:00
|
|
|
|
2022-11-06 23:17:00 +01:00
|
|
|
if mode == InterpolationMode::Expression {
|
2022-12-14 14:54:13 +01:00
|
|
|
let byte = current_byte;
|
2022-11-06 23:17:00 +01:00
|
|
|
if let Some(b'\'') = delimiter_stack.last() {
|
|
|
|
if byte == b'\'' {
|
|
|
|
delimiter_stack.pop();
|
|
|
|
}
|
|
|
|
} else if let Some(b'"') = delimiter_stack.last() {
|
|
|
|
if byte == b'"' {
|
|
|
|
delimiter_stack.pop();
|
|
|
|
}
|
|
|
|
} else if let Some(b'`') = delimiter_stack.last() {
|
|
|
|
if byte == b'`' {
|
|
|
|
delimiter_stack.pop();
|
|
|
|
}
|
|
|
|
} else if byte == b'\'' {
|
|
|
|
delimiter_stack.push(b'\'')
|
|
|
|
} else if byte == b'"' {
|
|
|
|
delimiter_stack.push(b'"');
|
|
|
|
} else if byte == b'`' {
|
|
|
|
delimiter_stack.push(b'`')
|
|
|
|
} else if byte == b'(' {
|
|
|
|
delimiter_stack.push(b')');
|
|
|
|
} else if byte == b')' {
|
|
|
|
if let Some(b')') = delimiter_stack.last() {
|
|
|
|
delimiter_stack.pop();
|
|
|
|
}
|
|
|
|
if delimiter_stack.is_empty() {
|
|
|
|
mode = InterpolationMode::String;
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if token_start < b {
|
2022-12-03 10:44:12 +01:00
|
|
|
let span = Span::new(token_start, b + 1);
|
2021-07-30 05:26:06 +02:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let expr = parse_full_cell_path(working_set, None, span);
|
2022-11-06 23:17:00 +01:00
|
|
|
output.push(expr);
|
2022-11-06 20:57:28 +01:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-11-06 23:17:00 +01:00
|
|
|
token_start = b + 1;
|
|
|
|
continue;
|
2021-07-30 05:26:06 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
b += 1;
|
|
|
|
}
|
2021-07-30 05:26:06 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
match mode {
|
|
|
|
InterpolationMode::String => {
|
|
|
|
if token_start < end {
|
2022-12-03 10:44:12 +01:00
|
|
|
let span = Span::new(token_start, end);
|
2021-09-02 10:25:22 +02:00
|
|
|
let str_contents = working_set.get_span_contents(span);
|
2022-03-15 17:09:30 +01:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
let (str_contents, err) = if double_quote {
|
|
|
|
unescape_string(str_contents, span)
|
2022-03-15 17:09:30 +01:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
(str_contents.to_vec(), None)
|
2022-03-15 17:09:30 +01:00
|
|
|
};
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2022-03-15 17:09:30 +01:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
output.push(Expression {
|
2022-03-15 17:09:30 +01:00
|
|
|
expr: Expr::String(String::from_utf8_lossy(&str_contents).to_string()),
|
2021-07-30 05:26:06 +02:00
|
|
|
span,
|
|
|
|
ty: Type::String,
|
2021-09-14 06:59:46 +02:00
|
|
|
custom_completion: None,
|
2021-09-02 10:25:22 +02:00
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
InterpolationMode::Expression => {
|
|
|
|
if token_start < end {
|
2022-12-03 10:44:12 +01:00
|
|
|
let span = Span::new(token_start, end);
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let expr = parse_full_cell_path(working_set, None, span);
|
2021-09-02 10:25:22 +02:00
|
|
|
output.push(expr);
|
|
|
|
}
|
2021-07-30 05:26:06 +02:00
|
|
|
}
|
2021-07-24 07:57:17 +02:00
|
|
|
}
|
2021-07-02 09:15:30 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::StringInterpolation(output),
|
|
|
|
span,
|
|
|
|
ty: Type::String,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-24 07:57:17 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_variable_expr(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2021-09-02 10:25:22 +02:00
|
|
|
let contents = working_set.get_span_contents(span);
|
|
|
|
|
2022-03-19 20:12:10 +01:00
|
|
|
if contents == b"$nothing" {
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression {
|
|
|
|
expr: Expr::Nothing,
|
|
|
|
span,
|
|
|
|
ty: Type::Nothing,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2021-10-29 20:15:17 +02:00
|
|
|
} else if contents == b"$nu" {
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression {
|
|
|
|
expr: Expr::Var(nu_protocol::NU_VARIABLE_ID),
|
|
|
|
span,
|
|
|
|
ty: Type::Any,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2021-11-08 07:21:24 +01:00
|
|
|
} else if contents == b"$in" {
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression {
|
|
|
|
expr: Expr::Var(nu_protocol::IN_VARIABLE_ID),
|
|
|
|
span,
|
|
|
|
ty: Type::Any,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2022-01-04 22:34:42 +01:00
|
|
|
} else if contents == b"$env" {
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression {
|
|
|
|
expr: Expr::Var(nu_protocol::ENV_VARIABLE_ID),
|
|
|
|
span,
|
|
|
|
ty: Type::Any,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
|
2023-05-02 17:17:14 +02:00
|
|
|
if let Some(id) = parse_variable(working_set, span) {
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Var(id),
|
|
|
|
span,
|
|
|
|
ty: working_set.get_variable(id).ty.clone(),
|
|
|
|
custom_completion: None,
|
2021-07-02 09:15:30 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2023-05-02 17:17:14 +02:00
|
|
|
let ws = &*working_set;
|
|
|
|
let suggestion = DidYouMean::new(&ws.list_variables(), ws.get_span_contents(span));
|
|
|
|
working_set.error(ParseError::VariableNotFound(suggestion, span));
|
2023-04-07 02:35:45 +02:00
|
|
|
garbage(span)
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
}
|
2021-07-02 09:15:30 +02:00
|
|
|
|
2021-10-02 04:59:11 +02:00
|
|
|
pub fn parse_cell_path(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
tokens: impl Iterator<Item = Token>,
|
2023-03-16 04:50:58 +01:00
|
|
|
expect_dot: bool,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Vec<PathMember> {
|
2023-03-16 04:50:58 +01:00
|
|
|
enum TokenType {
|
|
|
|
Dot, // .
|
|
|
|
QuestionOrDot, // ? or .
|
|
|
|
PathMember, // an int or string, like `1` or `foo`
|
|
|
|
}
|
|
|
|
|
|
|
|
// Parsing a cell path is essentially a state machine, and this is the state
|
|
|
|
let mut expected_token = if expect_dot {
|
|
|
|
TokenType::Dot
|
|
|
|
} else {
|
|
|
|
TokenType::PathMember
|
|
|
|
};
|
|
|
|
|
2021-10-02 04:59:11 +02:00
|
|
|
let mut tail = vec![];
|
|
|
|
|
|
|
|
for path_element in tokens {
|
|
|
|
let bytes = working_set.get_span_contents(path_element.span);
|
|
|
|
|
2023-03-16 04:50:58 +01:00
|
|
|
match expected_token {
|
|
|
|
TokenType::Dot => {
|
|
|
|
if bytes.len() != 1 || bytes[0] != b'.' {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected('.'.into(), path_element.span));
|
|
|
|
return tail;
|
2023-03-16 04:50:58 +01:00
|
|
|
}
|
|
|
|
expected_token = TokenType::PathMember;
|
2021-10-02 04:59:11 +02:00
|
|
|
}
|
2023-03-16 04:50:58 +01:00
|
|
|
TokenType::QuestionOrDot => {
|
|
|
|
if bytes.len() == 1 && bytes[0] == b'.' {
|
|
|
|
expected_token = TokenType::PathMember;
|
|
|
|
} else if bytes.len() == 1 && bytes[0] == b'?' {
|
|
|
|
if let Some(last) = tail.last_mut() {
|
|
|
|
match last {
|
|
|
|
PathMember::String {
|
|
|
|
ref mut optional, ..
|
|
|
|
} => *optional = true,
|
|
|
|
PathMember::Int {
|
|
|
|
ref mut optional, ..
|
|
|
|
} => *optional = true,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
expected_token = TokenType::Dot;
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(". or ?".into(), path_element.span));
|
|
|
|
return tail;
|
2023-03-16 04:50:58 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
TokenType::PathMember => {
|
2023-04-07 02:35:45 +02:00
|
|
|
let starting_error_count = working_set.parse_errors.len();
|
|
|
|
|
|
|
|
let expr = parse_int(working_set, path_element.span);
|
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
|
|
|
|
|
|
|
match expr {
|
|
|
|
Expression {
|
|
|
|
expr: Expr::Int(val),
|
|
|
|
span,
|
|
|
|
..
|
|
|
|
} => tail.push(PathMember::Int {
|
2023-03-16 04:50:58 +01:00
|
|
|
val: val as usize,
|
|
|
|
span,
|
|
|
|
optional: false,
|
|
|
|
}),
|
|
|
|
_ => {
|
2023-04-07 20:09:38 +02:00
|
|
|
let result = parse_string(working_set, path_element.span);
|
2023-03-16 04:50:58 +01:00
|
|
|
match result {
|
|
|
|
Expression {
|
|
|
|
expr: Expr::String(string),
|
|
|
|
span,
|
|
|
|
..
|
|
|
|
} => {
|
|
|
|
tail.push(PathMember::String {
|
|
|
|
val: string,
|
|
|
|
span,
|
|
|
|
optional: false,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
_ => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"string".into(),
|
|
|
|
path_element.span,
|
|
|
|
));
|
|
|
|
return tail;
|
2023-03-16 04:50:58 +01:00
|
|
|
}
|
2021-10-02 04:59:11 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-03-16 04:50:58 +01:00
|
|
|
expected_token = TokenType::QuestionOrDot;
|
2021-10-02 04:59:11 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
tail
|
2021-10-02 04:59:11 +02:00
|
|
|
}
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn parse_simple_cell_path(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2023-03-17 03:19:41 +01:00
|
|
|
let source = working_set.get_span_contents(span);
|
|
|
|
|
|
|
|
let (tokens, err) = lex(source, span.start, &[b'\n', b'\r'], &[b'.', b'?'], true);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err)
|
|
|
|
}
|
2023-03-17 03:19:41 +01:00
|
|
|
|
|
|
|
let tokens = tokens.into_iter().peekable();
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let cell_path = parse_cell_path(working_set, tokens, false);
|
2023-03-17 03:19:41 +01:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::CellPath(CellPath { members: cell_path }),
|
|
|
|
span,
|
|
|
|
ty: Type::CellPath,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2023-03-17 03:19:41 +01:00
|
|
|
}
|
|
|
|
|
2021-09-26 20:39:19 +02:00
|
|
|
pub fn parse_full_cell_path(
|
2021-09-02 10:25:22 +02:00
|
|
|
working_set: &mut StateWorkingSet,
|
2021-09-09 23:47:20 +02:00
|
|
|
implicit_head: Option<VarId>,
|
2021-09-02 10:25:22 +02:00
|
|
|
span: Span,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2022-12-10 18:23:24 +01:00
|
|
|
trace!("parsing: full cell path");
|
2021-09-26 20:39:19 +02:00
|
|
|
let full_cell_span = span;
|
2021-09-07 00:02:24 +02:00
|
|
|
let source = working_set.get_span_contents(span);
|
2021-07-08 08:19:38 +02:00
|
|
|
|
2023-03-16 04:50:58 +01:00
|
|
|
let (tokens, err) = lex(source, span.start, &[b'\n', b'\r'], &[b'.', b'?'], true);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err)
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2021-09-09 23:47:20 +02:00
|
|
|
let mut tokens = tokens.into_iter().peekable();
|
|
|
|
if let Some(head) = tokens.peek() {
|
2021-09-07 00:02:24 +02:00
|
|
|
let bytes = working_set.get_span_contents(head.span);
|
2021-10-02 04:59:11 +02:00
|
|
|
let (head, expect_dot) = if bytes.starts_with(b"(") {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: paren-head of full cell path");
|
|
|
|
|
2021-11-06 06:50:33 +01:00
|
|
|
let head_span = head.span;
|
2021-09-07 00:02:24 +02:00
|
|
|
let mut start = head.span.start;
|
|
|
|
let mut end = head.span.end;
|
2021-07-08 08:19:38 +02:00
|
|
|
|
2021-09-07 00:02:24 +02:00
|
|
|
if bytes.starts_with(b"(") {
|
|
|
|
start += 1;
|
|
|
|
}
|
|
|
|
if bytes.ends_with(b")") {
|
|
|
|
end -= 1;
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Unclosed(")".into(), Span::new(end, end)));
|
2021-09-07 00:02:24 +02:00
|
|
|
}
|
2021-07-02 09:15:30 +02:00
|
|
|
|
2022-12-03 10:44:12 +01:00
|
|
|
let span = Span::new(start, end);
|
2021-07-02 09:15:30 +02:00
|
|
|
|
2021-09-07 00:02:24 +02:00
|
|
|
let source = working_set.get_span_contents(span);
|
2021-07-02 09:15:30 +02:00
|
|
|
|
2021-11-21 19:13:09 +01:00
|
|
|
let (output, err) = lex(source, span.start, &[b'\n', b'\r'], &[], true);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err)
|
|
|
|
}
|
2021-07-02 09:15:30 +02:00
|
|
|
|
2022-06-12 21:18:00 +02:00
|
|
|
// Creating a Type scope to parse the new block. This will keep track of
|
|
|
|
// the previous input type found in that block
|
2023-04-21 21:00:33 +02:00
|
|
|
let output = parse_block(working_set, &output, span, true, true);
|
2022-06-12 21:18:00 +02:00
|
|
|
working_set
|
|
|
|
.type_scope
|
|
|
|
.add_type(working_set.type_scope.get_last_output());
|
|
|
|
|
2022-06-15 03:31:14 +02:00
|
|
|
let ty = output
|
|
|
|
.pipelines
|
|
|
|
.last()
|
2022-11-18 22:46:48 +01:00
|
|
|
.and_then(|Pipeline { elements, .. }| elements.last())
|
|
|
|
.map(|element| match element {
|
2022-11-22 19:26:13 +01:00
|
|
|
PipelineElement::Expression(_, expr)
|
2022-11-18 22:46:48 +01:00
|
|
|
if matches!(
|
|
|
|
expr,
|
|
|
|
Expression {
|
|
|
|
expr: Expr::BinaryOp(..),
|
|
|
|
..
|
|
|
|
}
|
|
|
|
) =>
|
|
|
|
{
|
|
|
|
expr.ty.clone()
|
|
|
|
}
|
2022-06-15 03:31:14 +02:00
|
|
|
_ => working_set.type_scope.get_last_output(),
|
|
|
|
})
|
|
|
|
.unwrap_or_else(|| working_set.type_scope.get_last_output());
|
|
|
|
|
2021-09-07 00:02:24 +02:00
|
|
|
let block_id = working_set.add_block(output);
|
2021-09-09 23:47:20 +02:00
|
|
|
tokens.next();
|
2021-09-07 00:02:24 +02:00
|
|
|
|
2021-09-09 23:47:20 +02:00
|
|
|
(
|
|
|
|
Expression {
|
|
|
|
expr: Expr::Subexpression(block_id),
|
2021-11-06 06:50:33 +01:00
|
|
|
span: head_span,
|
2022-06-15 03:31:14 +02:00
|
|
|
ty,
|
2021-09-14 06:59:46 +02:00
|
|
|
custom_completion: None,
|
2021-09-09 23:47:20 +02:00
|
|
|
},
|
|
|
|
true,
|
|
|
|
)
|
2021-11-08 00:18:00 +01:00
|
|
|
} else if bytes.starts_with(b"[") {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: table head of full cell path");
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let output = parse_table_expression(working_set, head.span);
|
2021-11-08 00:18:00 +01:00
|
|
|
|
|
|
|
tokens.next();
|
|
|
|
|
2021-11-11 00:14:00 +01:00
|
|
|
(output, true)
|
|
|
|
} else if bytes.starts_with(b"{") {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: record head of full cell path");
|
2023-04-07 20:09:38 +02:00
|
|
|
let output = parse_record(working_set, head.span);
|
2021-11-11 00:14:00 +01:00
|
|
|
|
|
|
|
tokens.next();
|
|
|
|
|
2021-11-08 00:18:00 +01:00
|
|
|
(output, true)
|
2021-09-07 00:02:24 +02:00
|
|
|
} else if bytes.starts_with(b"$") {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: $variable head of full cell path");
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
let out = parse_variable_expr(working_set, head.span);
|
2021-09-07 00:02:24 +02:00
|
|
|
|
2021-09-09 23:47:20 +02:00
|
|
|
tokens.next();
|
|
|
|
|
|
|
|
(out, true)
|
|
|
|
} else if let Some(var_id) = implicit_head {
|
2022-12-10 18:23:24 +01:00
|
|
|
trace!("parsing: implicit head of full cell path");
|
2021-09-09 23:47:20 +02:00
|
|
|
(
|
|
|
|
Expression {
|
|
|
|
expr: Expr::Var(var_id),
|
2022-12-10 18:23:24 +01:00
|
|
|
span: head.span,
|
2022-04-07 06:34:09 +02:00
|
|
|
ty: Type::Any,
|
2021-09-14 06:59:46 +02:00
|
|
|
custom_completion: None,
|
2021-09-09 23:47:20 +02:00
|
|
|
},
|
|
|
|
false,
|
|
|
|
)
|
2021-09-07 00:02:24 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Mismatch(
|
|
|
|
"variable or subexpression".into(),
|
|
|
|
String::from_utf8_lossy(bytes).to_string(),
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2021-09-07 00:02:24 +02:00
|
|
|
};
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let tail = parse_cell_path(working_set, tokens, expect_dot);
|
2021-09-07 00:02:24 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
// FIXME: Get the type of the data at the tail using follow_cell_path() (or something)
|
|
|
|
ty: if !tail.is_empty() {
|
|
|
|
// Until the aforementioned fix is implemented, this is necessary to allow mutable list upserts
|
|
|
|
// such as $a.1 = 2 to work correctly.
|
|
|
|
Type::Any
|
|
|
|
} else {
|
|
|
|
head.ty.clone()
|
2022-12-06 18:51:55 +01:00
|
|
|
},
|
2023-04-07 02:35:45 +02:00
|
|
|
expr: Expr::FullCellPath(Box::new(FullCellPath { head, tail })),
|
|
|
|
span: full_cell_span,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-07 00:02:24 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
garbage(span)
|
2021-09-07 00:02:24 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-02 09:15:30 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_directory(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2022-04-22 22:18:51 +02:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
2022-05-01 20:37:20 +02:00
|
|
|
let (token, err) = unescape_unquote_string(bytes, span);
|
2022-04-22 22:18:51 +02:00
|
|
|
trace!("parsing: directory");
|
|
|
|
|
2022-05-01 20:37:20 +02:00
|
|
|
if err.is_none() {
|
2022-04-22 22:18:51 +02:00
|
|
|
trace!("-- found {}", token);
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
Expression {
|
|
|
|
expr: Expr::Directory(token),
|
|
|
|
span,
|
|
|
|
ty: Type::String,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2022-04-22 22:18:51 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("directory".into(), span));
|
|
|
|
|
|
|
|
garbage(span)
|
2022-04-22 22:18:51 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_filepath(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2021-10-04 21:21:31 +02:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
2022-05-01 20:37:20 +02:00
|
|
|
let (token, err) = unescape_unquote_string(bytes, span);
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: filepath");
|
2021-10-04 21:21:31 +02:00
|
|
|
|
2022-05-01 20:37:20 +02:00
|
|
|
if err.is_none() {
|
2022-01-16 14:55:56 +01:00
|
|
|
trace!("-- found {}", token);
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
Expression {
|
|
|
|
expr: Expr::Filepath(token),
|
|
|
|
span,
|
|
|
|
ty: Type::String,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-10-04 21:21:31 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("filepath".into(), span));
|
|
|
|
|
|
|
|
garbage(span)
|
2021-10-04 21:21:31 +02:00
|
|
|
}
|
|
|
|
}
|
2022-02-24 03:02:48 +01:00
|
|
|
/// Parse a datetime type, eg '2022-02-02'
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_datetime(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2022-02-24 03:02:48 +01:00
|
|
|
trace!("parsing: datetime");
|
|
|
|
|
|
|
|
let bytes = working_set.get_span_contents(span);
|
2022-02-24 13:58:53 +01:00
|
|
|
|
2023-04-18 10:19:08 +02:00
|
|
|
if bytes.len() < 5
|
|
|
|
|| !bytes[0].is_ascii_digit()
|
|
|
|
|| !bytes[1].is_ascii_digit()
|
|
|
|
|| !bytes[2].is_ascii_digit()
|
|
|
|
|| !bytes[3].is_ascii_digit()
|
|
|
|
{
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("datetime".into(), span));
|
|
|
|
return garbage(span);
|
2022-02-24 13:58:53 +01:00
|
|
|
}
|
|
|
|
|
2022-02-24 03:02:48 +01:00
|
|
|
let token = String::from_utf8_lossy(bytes).to_string();
|
|
|
|
|
|
|
|
if let Ok(datetime) = chrono::DateTime::parse_from_rfc3339(&token) {
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression {
|
|
|
|
expr: Expr::DateTime(datetime),
|
|
|
|
span,
|
|
|
|
ty: Type::Date,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2022-02-24 03:02:48 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Just the date
|
2022-02-24 13:58:53 +01:00
|
|
|
let just_date = token.clone() + "T00:00:00+00:00";
|
|
|
|
if let Ok(datetime) = chrono::DateTime::parse_from_rfc3339(&just_date) {
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression {
|
|
|
|
expr: Expr::DateTime(datetime),
|
|
|
|
span,
|
|
|
|
ty: Type::Date,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2022-02-24 03:02:48 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Date and time, assume UTC
|
2022-02-24 13:58:53 +01:00
|
|
|
let datetime = token + "+00:00";
|
|
|
|
if let Ok(datetime) = chrono::DateTime::parse_from_rfc3339(&datetime) {
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression {
|
|
|
|
expr: Expr::DateTime(datetime),
|
|
|
|
span,
|
|
|
|
ty: Type::Date,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2022-02-24 03:02:48 +01:00
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("datetime".into(), span));
|
|
|
|
|
|
|
|
garbage(span)
|
2022-02-24 03:02:48 +01:00
|
|
|
}
|
|
|
|
|
2021-10-05 04:27:39 +02:00
|
|
|
/// Parse a duration type, eg '10day'
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_duration(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: duration");
|
|
|
|
|
2022-03-03 14:16:04 +01:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
|
|
|
|
2023-05-18 01:54:35 +02:00
|
|
|
match parse_unit_value(bytes, span, DURATION_UNIT_GROUPS, Type::Duration, |x| x) {
|
|
|
|
Some(Ok(expr)) => expr,
|
|
|
|
Some(Err(mk_err_for)) => {
|
|
|
|
working_set.error(mk_err_for("duration"));
|
|
|
|
garbage(span)
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
None => {
|
|
|
|
working_set.error(ParseError::Expected(
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
"duration with valid units".into(),
|
2022-03-03 14:16:04 +01:00
|
|
|
span,
|
2023-04-07 02:35:45 +02:00
|
|
|
));
|
2023-05-18 01:54:35 +02:00
|
|
|
garbage(span)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
|
2023-05-18 01:54:35 +02:00
|
|
|
/// Parse a unit type, eg '10kb'
|
|
|
|
pub fn parse_filesize(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
|
|
|
trace!("parsing: filesize");
|
|
|
|
|
|
|
|
let bytes = working_set.get_span_contents(span);
|
|
|
|
|
|
|
|
match parse_unit_value(bytes, span, FILESIZE_UNIT_GROUPS, Type::Filesize, |x| {
|
|
|
|
x.to_uppercase()
|
|
|
|
}) {
|
|
|
|
Some(Ok(expr)) => expr,
|
|
|
|
Some(Err(mk_err_for)) => {
|
|
|
|
working_set.error(mk_err_for("filesize"));
|
2023-04-07 02:35:45 +02:00
|
|
|
garbage(span)
|
|
|
|
}
|
2023-05-18 01:54:35 +02:00
|
|
|
None => {
|
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"filesize with valid units".into(),
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
garbage(span)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
type ParseUnitResult<'res> = Result<Expression, Box<dyn Fn(&'res str) -> ParseError>>;
|
|
|
|
type UnitGroup<'unit> = (Unit, &'unit str, Option<(Unit, i64)>);
|
|
|
|
|
|
|
|
pub fn parse_unit_value<'res>(
|
|
|
|
bytes: &[u8],
|
|
|
|
span: Span,
|
|
|
|
unit_groups: &[UnitGroup],
|
|
|
|
ty: Type,
|
|
|
|
transform: fn(String) -> String,
|
|
|
|
) -> Option<ParseUnitResult<'res>> {
|
|
|
|
if bytes.len() < 2
|
|
|
|
|| !(bytes[0].is_ascii_digit() || (bytes[0] == b'-' && bytes[1].is_ascii_digit()))
|
|
|
|
{
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
|
|
|
let value = transform(String::from_utf8_lossy(bytes).into());
|
|
|
|
|
|
|
|
if let Some((unit, name, convert)) = unit_groups.iter().find(|x| value.ends_with(x.1)) {
|
|
|
|
let lhs_len = value.len() - name.len();
|
|
|
|
let lhs = strip_underscores(value[..lhs_len].as_bytes());
|
|
|
|
let lhs_span = Span::new(span.start, span.start + lhs_len);
|
|
|
|
let unit_span = Span::new(span.start + lhs_len, span.end);
|
|
|
|
|
|
|
|
let (decimal_part, number_part) = modf(match lhs.parse::<f64>() {
|
|
|
|
Ok(it) => it,
|
|
|
|
Err(_) => {
|
|
|
|
let mk_err = move |name| {
|
|
|
|
ParseError::LabeledError(
|
|
|
|
format!("{name} value must be a number"),
|
|
|
|
"not a number".into(),
|
|
|
|
lhs_span,
|
|
|
|
)
|
|
|
|
};
|
|
|
|
return Some(Err(Box::new(mk_err)));
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
let (num, unit) = match convert {
|
|
|
|
Some(convert_to) => (
|
|
|
|
((number_part * convert_to.1 as f64) + (decimal_part * convert_to.1 as f64)) as i64,
|
|
|
|
convert_to.0,
|
|
|
|
),
|
|
|
|
None => (number_part as i64, *unit),
|
|
|
|
};
|
|
|
|
|
|
|
|
trace!("-- found {} {:?}", num, unit);
|
|
|
|
let expr = Expression {
|
|
|
|
expr: Expr::ValueWithUnit(
|
|
|
|
Box::new(Expression {
|
|
|
|
expr: Expr::Int(num),
|
|
|
|
span: lhs_span,
|
|
|
|
ty: Type::Number,
|
|
|
|
custom_completion: None,
|
|
|
|
}),
|
|
|
|
Spanned {
|
|
|
|
item: unit,
|
|
|
|
span: unit_span,
|
|
|
|
},
|
|
|
|
),
|
|
|
|
span,
|
|
|
|
ty,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
|
|
|
|
|
|
|
Some(Ok(expr))
|
|
|
|
} else {
|
|
|
|
None
|
2022-03-03 14:16:04 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-05-18 01:54:35 +02:00
|
|
|
pub const FILESIZE_UNIT_GROUPS: &[UnitGroup] = &[
|
|
|
|
(Unit::Kilobyte, "KB", Some((Unit::Byte, 1000))),
|
|
|
|
(Unit::Megabyte, "MB", Some((Unit::Kilobyte, 1000))),
|
|
|
|
(Unit::Gigabyte, "GB", Some((Unit::Megabyte, 1000))),
|
|
|
|
(Unit::Terabyte, "TB", Some((Unit::Gigabyte, 1000))),
|
|
|
|
(Unit::Petabyte, "PB", Some((Unit::Terabyte, 1000))),
|
|
|
|
(Unit::Exabyte, "EB", Some((Unit::Petabyte, 1000))),
|
|
|
|
(Unit::Zettabyte, "ZB", Some((Unit::Exabyte, 1000))),
|
|
|
|
(Unit::Kibibyte, "KIB", Some((Unit::Byte, 1024))),
|
|
|
|
(Unit::Mebibyte, "MIB", Some((Unit::Kibibyte, 1024))),
|
|
|
|
(Unit::Gibibyte, "GIB", Some((Unit::Mebibyte, 1024))),
|
|
|
|
(Unit::Tebibyte, "TIB", Some((Unit::Gibibyte, 1024))),
|
|
|
|
(Unit::Pebibyte, "PIB", Some((Unit::Tebibyte, 1024))),
|
|
|
|
(Unit::Exbibyte, "EIB", Some((Unit::Pebibyte, 1024))),
|
|
|
|
(Unit::Zebibyte, "ZIB", Some((Unit::Exbibyte, 1024))),
|
|
|
|
(Unit::Byte, "B", None),
|
|
|
|
];
|
|
|
|
|
|
|
|
pub const DURATION_UNIT_GROUPS: &[UnitGroup] = &[
|
|
|
|
(Unit::Nanosecond, "ns", None),
|
|
|
|
(Unit::Microsecond, "us", Some((Unit::Nanosecond, 1000))),
|
|
|
|
(
|
|
|
|
// µ Micro Sign
|
|
|
|
Unit::Microsecond,
|
|
|
|
"\u{00B5}s",
|
|
|
|
Some((Unit::Nanosecond, 1000)),
|
|
|
|
),
|
|
|
|
(
|
|
|
|
// μ Greek small letter Mu
|
|
|
|
Unit::Microsecond,
|
|
|
|
"\u{03BC}s",
|
|
|
|
Some((Unit::Nanosecond, 1000)),
|
|
|
|
),
|
|
|
|
(Unit::Millisecond, "ms", Some((Unit::Microsecond, 1000))),
|
|
|
|
(Unit::Second, "sec", Some((Unit::Millisecond, 1000))),
|
|
|
|
(Unit::Minute, "min", Some((Unit::Second, 60))),
|
|
|
|
(Unit::Hour, "hr", Some((Unit::Minute, 60))),
|
|
|
|
(Unit::Day, "day", Some((Unit::Minute, 1440))),
|
|
|
|
(Unit::Week, "wk", Some((Unit::Day, 7))),
|
|
|
|
];
|
|
|
|
|
2022-09-29 20:24:17 +02:00
|
|
|
// Borrowed from libm at https://github.com/rust-lang/libm/blob/master/src/math/modf.rs
|
2023-05-18 01:54:35 +02:00
|
|
|
fn modf(x: f64) -> (f64, f64) {
|
2022-09-29 20:24:17 +02:00
|
|
|
let rv2: f64;
|
|
|
|
let mut u = x.to_bits();
|
|
|
|
let e = ((u >> 52 & 0x7ff) as i32) - 0x3ff;
|
|
|
|
|
|
|
|
/* no fractional part */
|
|
|
|
if e >= 52 {
|
|
|
|
rv2 = x;
|
|
|
|
if e == 0x400 && (u << 12) != 0 {
|
|
|
|
/* nan */
|
|
|
|
return (x, rv2);
|
2021-10-05 04:27:39 +02:00
|
|
|
}
|
2022-09-29 20:24:17 +02:00
|
|
|
u &= 1 << 63;
|
|
|
|
return (f64::from_bits(u), rv2);
|
2021-10-05 04:27:39 +02:00
|
|
|
}
|
|
|
|
|
2022-09-29 20:24:17 +02:00
|
|
|
/* no integral part*/
|
|
|
|
if e < 0 {
|
|
|
|
u &= 1 << 63;
|
|
|
|
rv2 = f64::from_bits(u);
|
|
|
|
return (x, rv2);
|
2022-02-24 13:58:53 +01:00
|
|
|
}
|
|
|
|
|
2022-09-29 20:24:17 +02:00
|
|
|
let mask = ((!0) >> 12) >> e;
|
|
|
|
if (u & mask) == 0 {
|
|
|
|
rv2 = x;
|
|
|
|
u &= 1 << 63;
|
|
|
|
return (f64::from_bits(u), rv2);
|
|
|
|
}
|
|
|
|
u &= !mask;
|
|
|
|
rv2 = f64::from_bits(u);
|
|
|
|
(x - rv2, rv2)
|
|
|
|
}
|
2021-10-05 04:27:39 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_glob_pattern(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2021-10-04 21:21:31 +02:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
2022-05-01 20:37:20 +02:00
|
|
|
let (token, err) = unescape_unquote_string(bytes, span);
|
|
|
|
trace!("parsing: glob pattern");
|
2021-10-04 21:21:31 +02:00
|
|
|
|
2022-05-01 20:37:20 +02:00
|
|
|
if err.is_none() {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("-- found {}", token);
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
Expression {
|
|
|
|
expr: Expr::GlobPattern(token),
|
|
|
|
span,
|
|
|
|
ty: Type::String,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-10-04 21:21:31 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("glob pattern string".into(), span));
|
|
|
|
|
|
|
|
garbage(span)
|
2021-10-04 21:21:31 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-03 19:14:03 +01:00
|
|
|
pub fn unescape_string(bytes: &[u8], span: Span) -> (Vec<u8>, Option<ParseError>) {
|
|
|
|
let mut output = Vec::new();
|
2023-04-07 02:35:45 +02:00
|
|
|
let mut error = None;
|
2022-03-03 19:14:03 +01:00
|
|
|
|
|
|
|
let mut idx = 0;
|
|
|
|
|
2023-04-18 10:19:08 +02:00
|
|
|
if !bytes.contains(&b'\\') {
|
|
|
|
return (bytes.to_vec(), None);
|
|
|
|
}
|
|
|
|
|
2023-01-28 21:25:53 +01:00
|
|
|
'us_loop: while idx < bytes.len() {
|
2022-03-03 19:14:03 +01:00
|
|
|
if bytes[idx] == b'\\' {
|
|
|
|
// We're in an escape
|
|
|
|
idx += 1;
|
|
|
|
|
|
|
|
match bytes.get(idx) {
|
|
|
|
Some(b'"') => {
|
|
|
|
output.push(b'"');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'\'') => {
|
|
|
|
output.push(b'\'');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'\\') => {
|
|
|
|
output.push(b'\\');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'/') => {
|
|
|
|
output.push(b'/');
|
|
|
|
idx += 1;
|
|
|
|
}
|
2022-03-07 23:39:16 +01:00
|
|
|
Some(b'(') => {
|
|
|
|
output.push(b'(');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b')') => {
|
|
|
|
output.push(b')');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'{') => {
|
|
|
|
output.push(b'{');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'}') => {
|
|
|
|
output.push(b'}');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'$') => {
|
|
|
|
output.push(b'$');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'^') => {
|
|
|
|
output.push(b'^');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'#') => {
|
|
|
|
output.push(b'#');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'|') => {
|
|
|
|
output.push(b'|');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'~') => {
|
|
|
|
output.push(b'~');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'a') => {
|
|
|
|
output.push(0x7);
|
|
|
|
idx += 1;
|
|
|
|
}
|
2022-03-03 19:14:03 +01:00
|
|
|
Some(b'b') => {
|
|
|
|
output.push(0x8);
|
|
|
|
idx += 1;
|
|
|
|
}
|
2022-03-07 23:39:16 +01:00
|
|
|
Some(b'e') => {
|
|
|
|
output.push(0x1b);
|
|
|
|
idx += 1;
|
|
|
|
}
|
2022-03-03 19:14:03 +01:00
|
|
|
Some(b'f') => {
|
|
|
|
output.push(0xc);
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'n') => {
|
|
|
|
output.push(b'\n');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'r') => {
|
|
|
|
output.push(b'\r');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b't') => {
|
|
|
|
output.push(b'\t');
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
Some(b'u') => {
|
2023-01-28 21:25:53 +01:00
|
|
|
let mut digits = String::with_capacity(10);
|
|
|
|
let mut cur_idx = idx + 1; // index of first beyond current end of token
|
|
|
|
|
|
|
|
if let Some(b'{') = bytes.get(idx + 1) {
|
|
|
|
cur_idx = idx + 2;
|
|
|
|
loop {
|
|
|
|
match bytes.get(cur_idx) {
|
|
|
|
Some(b'}') => {
|
|
|
|
cur_idx += 1;
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
Some(c) => {
|
|
|
|
digits.push(*c as char);
|
|
|
|
cur_idx += 1;
|
|
|
|
}
|
|
|
|
_ => {
|
2023-04-07 02:35:45 +02:00
|
|
|
error = error.or(Some(ParseError::InvalidLiteral(
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
"missing '}' for unicode escape '\\u{X...}'".into(),
|
|
|
|
"string".into(),
|
2023-01-28 21:25:53 +01:00
|
|
|
Span::new(span.start + idx, span.end),
|
2023-04-07 02:35:45 +02:00
|
|
|
)));
|
2023-01-28 21:25:53 +01:00
|
|
|
break 'us_loop;
|
2022-03-03 19:14:03 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-01-28 21:25:53 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if (1..=6).contains(&digits.len()) {
|
|
|
|
let int = u32::from_str_radix(&digits, 16);
|
|
|
|
|
|
|
|
if let Ok(int) = int {
|
|
|
|
if int <= 0x10ffff {
|
|
|
|
let result = char::from_u32(int);
|
|
|
|
|
|
|
|
if let Some(result) = result {
|
|
|
|
let mut buffer = vec![0; 4];
|
|
|
|
let result = result.encode_utf8(&mut buffer);
|
|
|
|
|
|
|
|
for elem in result.bytes() {
|
|
|
|
output.push(elem);
|
|
|
|
}
|
|
|
|
|
|
|
|
idx = cur_idx;
|
|
|
|
continue 'us_loop;
|
|
|
|
}
|
|
|
|
}
|
2022-03-03 19:14:03 +01:00
|
|
|
}
|
|
|
|
}
|
2023-01-28 21:25:53 +01:00
|
|
|
// fall through -- escape not accepted above, must be error.
|
2023-04-07 02:35:45 +02:00
|
|
|
error = error.or(Some(ParseError::InvalidLiteral(
|
2023-03-10 22:20:31 +01:00
|
|
|
"invalid unicode escape '\\u{X...}', must be 1-6 hex digits, max value 10FFFF".into(),
|
|
|
|
"string".into(),
|
|
|
|
Span::new(span.start + idx, span.end),
|
2023-04-07 02:35:45 +02:00
|
|
|
)));
|
2023-01-28 21:25:53 +01:00
|
|
|
break 'us_loop;
|
2022-03-03 19:14:03 +01:00
|
|
|
}
|
2023-01-28 21:25:53 +01:00
|
|
|
|
2022-03-03 19:14:03 +01:00
|
|
|
_ => {
|
2023-04-07 02:35:45 +02:00
|
|
|
error = error.or(Some(ParseError::InvalidLiteral(
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
"unrecognized escape after '\\'".into(),
|
|
|
|
"string".into(),
|
2022-12-03 10:44:12 +01:00
|
|
|
Span::new(span.start + idx, span.end),
|
2023-04-07 02:35:45 +02:00
|
|
|
)));
|
2023-01-28 21:25:53 +01:00
|
|
|
break 'us_loop;
|
2022-03-03 19:14:03 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
output.push(bytes[idx]);
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
(output, error)
|
2022-03-03 19:14:03 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn unescape_unquote_string(bytes: &[u8], span: Span) -> (String, Option<ParseError>) {
|
|
|
|
if bytes.starts_with(b"\"") {
|
|
|
|
// Needs unescaping
|
|
|
|
let bytes = trim_quotes(bytes);
|
|
|
|
|
|
|
|
let (bytes, err) = unescape_string(bytes, span);
|
|
|
|
|
|
|
|
if let Ok(token) = String::from_utf8(bytes) {
|
|
|
|
(token, err)
|
|
|
|
} else {
|
|
|
|
(
|
|
|
|
String::new(),
|
|
|
|
Some(ParseError::Expected("string".into(), span)),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
let bytes = trim_quotes(bytes);
|
|
|
|
|
|
|
|
if let Ok(token) = String::from_utf8(bytes.into()) {
|
|
|
|
(token, None)
|
|
|
|
} else {
|
|
|
|
(
|
|
|
|
String::new(),
|
|
|
|
Some(ParseError::Expected("string".into(), span)),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn parse_string(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: string");
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
2022-01-19 15:58:12 +01:00
|
|
|
|
2023-03-10 21:26:14 +01:00
|
|
|
if bytes.is_empty() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("String".into(), span));
|
|
|
|
return Expression::garbage(span);
|
2023-03-10 21:26:14 +01:00
|
|
|
}
|
|
|
|
|
2022-04-26 01:44:44 +02:00
|
|
|
// Check for bare word interpolation
|
|
|
|
if bytes[0] != b'\'' && bytes[0] != b'"' && bytes[0] != b'`' && bytes.contains(&b'(') {
|
2023-04-07 20:09:38 +02:00
|
|
|
return parse_string_interpolation(working_set, span);
|
2022-04-26 01:44:44 +02:00
|
|
|
}
|
|
|
|
|
2022-03-03 19:14:03 +01:00
|
|
|
let (s, err) = unescape_unquote_string(bytes, span);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2021-07-16 22:26:40 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::String(s),
|
|
|
|
span,
|
|
|
|
ty: Type::String,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-02 09:15:30 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_string_strict(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: string, with required delimiters");
|
|
|
|
|
2021-11-04 03:32:35 +01:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
2022-01-19 15:58:12 +01:00
|
|
|
|
|
|
|
// Check for unbalanced quotes:
|
2022-03-24 17:57:03 +01:00
|
|
|
{
|
|
|
|
let bytes = if bytes.starts_with(b"$") {
|
|
|
|
&bytes[1..]
|
|
|
|
} else {
|
|
|
|
bytes
|
|
|
|
};
|
|
|
|
if bytes.starts_with(b"\"") && (bytes.len() == 1 || !bytes.ends_with(b"\"")) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Unclosed("\"".into(), span));
|
|
|
|
return garbage(span);
|
2022-03-24 17:57:03 +01:00
|
|
|
}
|
|
|
|
if bytes.starts_with(b"\'") && (bytes.len() == 1 || !bytes.ends_with(b"\'")) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Unclosed("\'".into(), span));
|
|
|
|
return garbage(span);
|
2022-03-24 17:57:03 +01:00
|
|
|
}
|
2022-01-19 15:58:12 +01:00
|
|
|
}
|
|
|
|
|
2021-11-04 03:32:35 +01:00
|
|
|
let (bytes, quoted) = if (bytes.starts_with(b"\"") && bytes.ends_with(b"\"") && bytes.len() > 1)
|
|
|
|
|| (bytes.starts_with(b"\'") && bytes.ends_with(b"\'") && bytes.len() > 1)
|
|
|
|
{
|
|
|
|
(&bytes[1..(bytes.len() - 1)], true)
|
2022-03-24 17:57:03 +01:00
|
|
|
} else if (bytes.starts_with(b"$\"") && bytes.ends_with(b"\"") && bytes.len() > 2)
|
|
|
|
|| (bytes.starts_with(b"$\'") && bytes.ends_with(b"\'") && bytes.len() > 2)
|
|
|
|
{
|
|
|
|
(&bytes[2..(bytes.len() - 1)], true)
|
2021-11-04 03:32:35 +01:00
|
|
|
} else {
|
|
|
|
(bytes, false)
|
|
|
|
};
|
|
|
|
|
|
|
|
if let Ok(token) = String::from_utf8(bytes.into()) {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("-- found {}", token);
|
|
|
|
|
2021-11-04 03:32:35 +01:00
|
|
|
if quoted {
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::String(token),
|
|
|
|
span,
|
|
|
|
ty: Type::String,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-11-04 03:32:35 +01:00
|
|
|
} else if token.contains(' ') {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("string".into(), span));
|
|
|
|
|
|
|
|
garbage(span)
|
2021-11-04 03:32:35 +01:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::String(token),
|
|
|
|
span,
|
|
|
|
ty: Type::String,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-11-04 03:32:35 +01:00
|
|
|
}
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("string".into(), span));
|
|
|
|
garbage(span)
|
2021-11-04 03:32:35 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-10-12 19:44:23 +02:00
|
|
|
//TODO: Handle error case for unknown shapes
|
2021-09-02 10:25:22 +02:00
|
|
|
pub fn parse_shape_name(
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set: &mut StateWorkingSet,
|
2021-09-02 10:25:22 +02:00
|
|
|
bytes: &[u8],
|
|
|
|
span: Span,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> SyntaxShape {
|
2021-09-02 10:25:22 +02:00
|
|
|
let result = match bytes {
|
|
|
|
b"any" => SyntaxShape::Any,
|
2022-03-01 00:31:53 +01:00
|
|
|
b"binary" => SyntaxShape::Binary,
|
2022-11-10 09:21:49 +01:00
|
|
|
b"block" => SyntaxShape::Block, //FIXME: Blocks should have known output types
|
2022-12-13 17:46:22 +01:00
|
|
|
b"bool" => SyntaxShape::Boolean,
|
2021-09-07 00:02:24 +02:00
|
|
|
b"cell-path" => SyntaxShape::CellPath,
|
2022-12-13 17:46:22 +01:00
|
|
|
b"closure" => SyntaxShape::Closure(None), //FIXME: Blocks should have known output types
|
|
|
|
b"cond" => SyntaxShape::RowCondition,
|
|
|
|
// b"custom" => SyntaxShape::Custom(Box::new(SyntaxShape::Any), SyntaxShape::Int),
|
|
|
|
b"datetime" => SyntaxShape::DateTime,
|
2022-04-22 22:18:51 +02:00
|
|
|
b"directory" => SyntaxShape::Directory,
|
2022-12-13 17:46:22 +01:00
|
|
|
b"duration" => SyntaxShape::Duration,
|
|
|
|
b"error" => SyntaxShape::Error,
|
2022-01-12 16:59:07 +01:00
|
|
|
b"expr" => SyntaxShape::Expression,
|
2023-01-25 13:43:22 +01:00
|
|
|
b"float" | b"decimal" => SyntaxShape::Decimal,
|
2022-01-12 16:59:07 +01:00
|
|
|
b"filesize" => SyntaxShape::Filesize,
|
2022-12-13 17:46:22 +01:00
|
|
|
b"full-cell-path" => SyntaxShape::FullCellPath,
|
2021-09-02 10:25:22 +02:00
|
|
|
b"glob" => SyntaxShape::GlobPattern,
|
2022-01-12 16:59:07 +01:00
|
|
|
b"int" => SyntaxShape::Int,
|
2022-12-13 17:46:22 +01:00
|
|
|
b"import-pattern" => SyntaxShape::ImportPattern,
|
|
|
|
b"keyword" => SyntaxShape::Keyword(vec![], Box::new(SyntaxShape::Any)),
|
2023-04-07 02:35:45 +02:00
|
|
|
_ if bytes.starts_with(b"list") => parse_list_shape(working_set, bytes, span),
|
2021-09-02 10:25:22 +02:00
|
|
|
b"math" => SyntaxShape::MathExpression,
|
2022-12-13 17:46:22 +01:00
|
|
|
b"nothing" => SyntaxShape::Nothing,
|
2022-01-12 16:59:07 +01:00
|
|
|
b"number" => SyntaxShape::Number,
|
2022-12-13 17:46:22 +01:00
|
|
|
b"one-of" => SyntaxShape::OneOf(vec![]),
|
2022-01-12 16:59:07 +01:00
|
|
|
b"operator" => SyntaxShape::Operator,
|
2022-12-13 17:46:22 +01:00
|
|
|
b"path" => SyntaxShape::Filepath,
|
2022-01-12 16:59:07 +01:00
|
|
|
b"range" => SyntaxShape::Range,
|
allow records to have type annotations (#8914)
# Description
follow up to #8529
cleaned up version of #8892
- the original syntax is okay
```nu
def okay [rec: record] {}
```
- you can now add type annotations for fields if you know
them before hand
```nu
def okay [rec: record<name: string>] {}
```
- you can specify multiple fields
```nu
def okay [person: record<name: string age: int>] {}
# an optional comma is allowed
def okay [person: record<name: string, age: int>] {}
```
- if annotations are specified, any use of the command will be type
checked against the specified type
```nu
def unwrap [result: record<ok: bool, value: any>] {}
unwrap {ok: 2, value: "value"}
# errors with
Error: nu::parser::type_mismatch
× Type mismatch.
╭─[entry #4:1:1]
1 │ unwrap {ok: 2, value: "value"}
· ───────┬─────
· ╰── expected record<ok: bool, value: any>, found record<ok: int, value: string>
╰────
```
> here the error is in the `ok` field, since `any` is coerced into any
type
> as a result `unwrap {ok: true, value: "value"}` is okay
- the key must be a string, either quoted or unquoted
```nu
def err [rec: record<{}: list>] {}
# errors with
Error:
× `record` type annotations key not string
╭─[entry #7:1:1]
1 │ def unwrap [result: record<{}: bool, value: any>] {}
· ─┬
· ╰── must be a string
╰────
```
- a key doesn't have to have a type in which case it is assumed to be
`any`
```nu
def okay [person: record<name age>] {}
def okay [person: record<name: string age>] {}
```
- however, if you put a colon, you have to specify a type
```nu
def err [person: record<name: >] {}
# errors with
Error: nu::parser::parse_mismatch
× Parse mismatch during operation.
╭─[entry #12:1:1]
1 │ def unwrap [res: record<name: >] { $res }
· ┬
· ╰── expected type after colon
╰────
```
# User-Facing Changes
**[BREAKING CHANGES]**
- this change adds a field to `SyntaxShape::Record` so any plugins that
used it will have to update and include the field. though if you are
unsure of the type the record expects, `SyntaxShape::Record(vec![])`
will suffice
2023-04-26 15:16:55 +02:00
|
|
|
_ if bytes.starts_with(b"record") => parse_collection_shape(working_set, bytes, span),
|
2022-01-12 16:59:07 +01:00
|
|
|
b"signature" => SyntaxShape::Signature,
|
|
|
|
b"string" => SyntaxShape::String,
|
2022-03-31 10:11:03 +02:00
|
|
|
b"table" => SyntaxShape::Table,
|
2022-12-13 17:46:22 +01:00
|
|
|
b"variable" => SyntaxShape::Variable,
|
|
|
|
b"var-with-opt-type" => SyntaxShape::VarWithOptType,
|
2022-02-11 19:38:10 +01:00
|
|
|
_ => {
|
|
|
|
if bytes.contains(&b'@') {
|
2023-03-26 10:58:33 +02:00
|
|
|
let split: Vec<_> = bytes.split(|b| b == &b'@').collect();
|
|
|
|
|
|
|
|
let shape_span = Span::new(span.start, span.start + split[0].len());
|
|
|
|
let cmd_span = Span::new(span.start + split[0].len() + 1, span.end);
|
2023-04-07 02:35:45 +02:00
|
|
|
let shape = parse_shape_name(working_set, split[0], shape_span);
|
2023-03-26 10:58:33 +02:00
|
|
|
|
|
|
|
let command_name = trim_quotes(split[1]);
|
|
|
|
|
|
|
|
if command_name.is_empty() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("a command name".into(), cmd_span));
|
|
|
|
return SyntaxShape::Any;
|
2023-03-26 10:58:33 +02:00
|
|
|
}
|
2022-02-11 19:38:10 +01:00
|
|
|
|
2022-06-10 17:59:35 +02:00
|
|
|
let decl_id = working_set.find_decl(command_name, &Type::Any);
|
2022-03-10 08:49:02 +01:00
|
|
|
|
|
|
|
if let Some(decl_id) = decl_id {
|
2023-04-07 02:35:45 +02:00
|
|
|
return SyntaxShape::Custom(Box::new(shape), decl_id);
|
2022-03-10 08:49:02 +01:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownCommand(cmd_span));
|
|
|
|
return shape;
|
2022-03-10 08:49:02 +01:00
|
|
|
}
|
2022-02-11 19:38:10 +01:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownType(span));
|
|
|
|
return SyntaxShape::Any;
|
2022-02-11 19:38:10 +01:00
|
|
|
}
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
};
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
result
|
2023-03-24 12:54:06 +01:00
|
|
|
}
|
|
|
|
|
allow records to have type annotations (#8914)
# Description
follow up to #8529
cleaned up version of #8892
- the original syntax is okay
```nu
def okay [rec: record] {}
```
- you can now add type annotations for fields if you know
them before hand
```nu
def okay [rec: record<name: string>] {}
```
- you can specify multiple fields
```nu
def okay [person: record<name: string age: int>] {}
# an optional comma is allowed
def okay [person: record<name: string, age: int>] {}
```
- if annotations are specified, any use of the command will be type
checked against the specified type
```nu
def unwrap [result: record<ok: bool, value: any>] {}
unwrap {ok: 2, value: "value"}
# errors with
Error: nu::parser::type_mismatch
× Type mismatch.
╭─[entry #4:1:1]
1 │ unwrap {ok: 2, value: "value"}
· ───────┬─────
· ╰── expected record<ok: bool, value: any>, found record<ok: int, value: string>
╰────
```
> here the error is in the `ok` field, since `any` is coerced into any
type
> as a result `unwrap {ok: true, value: "value"}` is okay
- the key must be a string, either quoted or unquoted
```nu
def err [rec: record<{}: list>] {}
# errors with
Error:
× `record` type annotations key not string
╭─[entry #7:1:1]
1 │ def unwrap [result: record<{}: bool, value: any>] {}
· ─┬
· ╰── must be a string
╰────
```
- a key doesn't have to have a type in which case it is assumed to be
`any`
```nu
def okay [person: record<name age>] {}
def okay [person: record<name: string age>] {}
```
- however, if you put a colon, you have to specify a type
```nu
def err [person: record<name: >] {}
# errors with
Error: nu::parser::parse_mismatch
× Parse mismatch during operation.
╭─[entry #12:1:1]
1 │ def unwrap [res: record<name: >] { $res }
· ┬
· ╰── expected type after colon
╰────
```
# User-Facing Changes
**[BREAKING CHANGES]**
- this change adds a field to `SyntaxShape::Record` so any plugins that
used it will have to update and include the field. though if you are
unsure of the type the record expects, `SyntaxShape::Record(vec![])`
will suffice
2023-04-26 15:16:55 +02:00
|
|
|
fn parse_collection_shape(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
bytes: &[u8],
|
|
|
|
span: Span,
|
|
|
|
) -> SyntaxShape {
|
|
|
|
assert!(bytes.starts_with(b"record"));
|
|
|
|
let name = "record";
|
|
|
|
let mk_shape = SyntaxShape::Record;
|
|
|
|
|
|
|
|
if bytes == name.as_bytes() {
|
|
|
|
mk_shape(vec![])
|
|
|
|
} else if bytes.starts_with(b"record<") {
|
|
|
|
let Some(inner_span) = prepare_inner_span(working_set, bytes, span, 7) else {
|
|
|
|
return SyntaxShape::Any;
|
|
|
|
};
|
|
|
|
|
|
|
|
// record<> or table<>
|
|
|
|
if inner_span.end - inner_span.start == 0 {
|
|
|
|
return mk_shape(vec![]);
|
|
|
|
}
|
|
|
|
let source = working_set.get_span_contents(inner_span);
|
|
|
|
let (tokens, err) = lex_signature(
|
|
|
|
source,
|
|
|
|
inner_span.start,
|
|
|
|
&[b'\n', b'\r'],
|
|
|
|
&[b':', b','],
|
|
|
|
true,
|
|
|
|
);
|
|
|
|
|
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
// lexer errors cause issues with span overflows
|
|
|
|
return mk_shape(vec![]);
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut sig = vec![];
|
|
|
|
let mut idx = 0;
|
|
|
|
|
|
|
|
let key_error = |span| {
|
|
|
|
ParseError::LabeledError(
|
|
|
|
format!("`{name}` type annotations key not string"),
|
|
|
|
"must be a string".into(),
|
|
|
|
span,
|
|
|
|
)
|
|
|
|
};
|
|
|
|
|
|
|
|
while idx < tokens.len() {
|
|
|
|
let TokenContents::Item = tokens[idx].contents else {
|
|
|
|
working_set.error(key_error(tokens[idx].span));
|
|
|
|
return mk_shape(vec![])
|
|
|
|
};
|
|
|
|
|
|
|
|
let key_bytes = working_set.get_span_contents(tokens[idx].span).to_vec();
|
|
|
|
if key_bytes.first().copied() == Some(b',') {
|
|
|
|
idx += 1;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
let Some(key) = parse_value(working_set, tokens[idx].span, &SyntaxShape::String).as_string() else {
|
|
|
|
working_set.error(key_error(tokens[idx].span));
|
|
|
|
return mk_shape(vec![]);
|
|
|
|
};
|
|
|
|
|
|
|
|
// we want to allow such an annotation
|
|
|
|
// `record<name>` where the user leaves out the type
|
|
|
|
if idx + 1 == tokens.len() {
|
|
|
|
sig.push((key, SyntaxShape::Any));
|
|
|
|
break;
|
|
|
|
} else {
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
let maybe_colon = working_set.get_span_contents(tokens[idx].span).to_vec();
|
|
|
|
match maybe_colon.as_slice() {
|
|
|
|
b":" => {
|
|
|
|
if idx + 1 == tokens.len() {
|
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"type after colon".into(),
|
|
|
|
tokens[idx].span,
|
|
|
|
));
|
|
|
|
break;
|
|
|
|
} else {
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// a key provided without a type
|
|
|
|
b"," => {
|
|
|
|
idx += 1;
|
|
|
|
sig.push((key, SyntaxShape::Any));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
// a key provided without a type
|
|
|
|
_ => {
|
|
|
|
sig.push((key, SyntaxShape::Any));
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let shape_bytes = working_set.get_span_contents(tokens[idx].span).to_vec();
|
|
|
|
let shape = parse_shape_name(working_set, &shape_bytes, tokens[idx].span);
|
|
|
|
sig.push((key, shape));
|
|
|
|
idx += 1;
|
|
|
|
}
|
|
|
|
|
|
|
|
mk_shape(sig)
|
|
|
|
} else {
|
|
|
|
working_set.error(ParseError::UnknownType(span));
|
|
|
|
|
|
|
|
SyntaxShape::Any
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
fn parse_list_shape(working_set: &mut StateWorkingSet, bytes: &[u8], span: Span) -> SyntaxShape {
|
2023-03-24 12:54:06 +01:00
|
|
|
assert!(bytes.starts_with(b"list"));
|
|
|
|
|
|
|
|
if bytes == b"list" {
|
2023-04-07 02:35:45 +02:00
|
|
|
SyntaxShape::List(Box::new(SyntaxShape::Any))
|
2023-03-24 12:54:06 +01:00
|
|
|
} else if bytes.starts_with(b"list<") {
|
allow records to have type annotations (#8914)
# Description
follow up to #8529
cleaned up version of #8892
- the original syntax is okay
```nu
def okay [rec: record] {}
```
- you can now add type annotations for fields if you know
them before hand
```nu
def okay [rec: record<name: string>] {}
```
- you can specify multiple fields
```nu
def okay [person: record<name: string age: int>] {}
# an optional comma is allowed
def okay [person: record<name: string, age: int>] {}
```
- if annotations are specified, any use of the command will be type
checked against the specified type
```nu
def unwrap [result: record<ok: bool, value: any>] {}
unwrap {ok: 2, value: "value"}
# errors with
Error: nu::parser::type_mismatch
× Type mismatch.
╭─[entry #4:1:1]
1 │ unwrap {ok: 2, value: "value"}
· ───────┬─────
· ╰── expected record<ok: bool, value: any>, found record<ok: int, value: string>
╰────
```
> here the error is in the `ok` field, since `any` is coerced into any
type
> as a result `unwrap {ok: true, value: "value"}` is okay
- the key must be a string, either quoted or unquoted
```nu
def err [rec: record<{}: list>] {}
# errors with
Error:
× `record` type annotations key not string
╭─[entry #7:1:1]
1 │ def unwrap [result: record<{}: bool, value: any>] {}
· ─┬
· ╰── must be a string
╰────
```
- a key doesn't have to have a type in which case it is assumed to be
`any`
```nu
def okay [person: record<name age>] {}
def okay [person: record<name: string age>] {}
```
- however, if you put a colon, you have to specify a type
```nu
def err [person: record<name: >] {}
# errors with
Error: nu::parser::parse_mismatch
× Parse mismatch during operation.
╭─[entry #12:1:1]
1 │ def unwrap [res: record<name: >] { $res }
· ┬
· ╰── expected type after colon
╰────
```
# User-Facing Changes
**[BREAKING CHANGES]**
- this change adds a field to `SyntaxShape::Record` so any plugins that
used it will have to update and include the field. though if you are
unsure of the type the record expects, `SyntaxShape::Record(vec![])`
will suffice
2023-04-26 15:16:55 +02:00
|
|
|
let Some(inner_span) = prepare_inner_span(working_set, bytes, span, 5) else {
|
2023-04-07 02:35:45 +02:00
|
|
|
return SyntaxShape::Any;
|
2023-03-24 12:54:06 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
let inner_text = String::from_utf8_lossy(working_set.get_span_contents(inner_span));
|
|
|
|
// remove any extra whitespace, for example `list< string >` becomes `list<string>`
|
2023-04-07 02:35:45 +02:00
|
|
|
let inner_bytes = inner_text.trim().as_bytes().to_vec();
|
2023-03-24 12:54:06 +01:00
|
|
|
|
|
|
|
// list<>
|
|
|
|
if inner_bytes.is_empty() {
|
2023-04-07 02:35:45 +02:00
|
|
|
SyntaxShape::List(Box::new(SyntaxShape::Any))
|
2023-03-24 12:54:06 +01:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
let inner_sig = parse_shape_name(working_set, &inner_bytes, inner_span);
|
2023-03-24 12:54:06 +01:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
SyntaxShape::List(Box::new(inner_sig))
|
2023-03-24 12:54:06 +01:00
|
|
|
}
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownType(span));
|
|
|
|
|
|
|
|
SyntaxShape::List(Box::new(SyntaxShape::Any))
|
2023-03-24 12:54:06 +01:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-16 23:55:12 +02:00
|
|
|
|
allow records to have type annotations (#8914)
# Description
follow up to #8529
cleaned up version of #8892
- the original syntax is okay
```nu
def okay [rec: record] {}
```
- you can now add type annotations for fields if you know
them before hand
```nu
def okay [rec: record<name: string>] {}
```
- you can specify multiple fields
```nu
def okay [person: record<name: string age: int>] {}
# an optional comma is allowed
def okay [person: record<name: string, age: int>] {}
```
- if annotations are specified, any use of the command will be type
checked against the specified type
```nu
def unwrap [result: record<ok: bool, value: any>] {}
unwrap {ok: 2, value: "value"}
# errors with
Error: nu::parser::type_mismatch
× Type mismatch.
╭─[entry #4:1:1]
1 │ unwrap {ok: 2, value: "value"}
· ───────┬─────
· ╰── expected record<ok: bool, value: any>, found record<ok: int, value: string>
╰────
```
> here the error is in the `ok` field, since `any` is coerced into any
type
> as a result `unwrap {ok: true, value: "value"}` is okay
- the key must be a string, either quoted or unquoted
```nu
def err [rec: record<{}: list>] {}
# errors with
Error:
× `record` type annotations key not string
╭─[entry #7:1:1]
1 │ def unwrap [result: record<{}: bool, value: any>] {}
· ─┬
· ╰── must be a string
╰────
```
- a key doesn't have to have a type in which case it is assumed to be
`any`
```nu
def okay [person: record<name age>] {}
def okay [person: record<name: string age>] {}
```
- however, if you put a colon, you have to specify a type
```nu
def err [person: record<name: >] {}
# errors with
Error: nu::parser::parse_mismatch
× Parse mismatch during operation.
╭─[entry #12:1:1]
1 │ def unwrap [res: record<name: >] { $res }
· ┬
· ╰── expected type after colon
╰────
```
# User-Facing Changes
**[BREAKING CHANGES]**
- this change adds a field to `SyntaxShape::Record` so any plugins that
used it will have to update and include the field. though if you are
unsure of the type the record expects, `SyntaxShape::Record(vec![])`
will suffice
2023-04-26 15:16:55 +02:00
|
|
|
fn prepare_inner_span(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
bytes: &[u8],
|
|
|
|
span: Span,
|
|
|
|
prefix_len: usize,
|
|
|
|
) -> Option<Span> {
|
|
|
|
let start = span.start + prefix_len;
|
|
|
|
|
|
|
|
if bytes.ends_with(b">") {
|
|
|
|
let end = span.end - 1;
|
|
|
|
Some(Span::new(start, end))
|
|
|
|
} else if bytes.contains(&b'>') {
|
|
|
|
let angle_start = bytes.split(|it| it == &b'>').collect::<Vec<_>>()[0].len() + 1;
|
|
|
|
let span = Span::new(span.start + angle_start, span.end);
|
|
|
|
|
|
|
|
working_set.error(ParseError::LabeledError(
|
|
|
|
"Extra characters in the parameter name".into(),
|
|
|
|
"extra characters".into(),
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
|
|
|
|
None
|
|
|
|
} else {
|
|
|
|
working_set.error(ParseError::Unclosed(">".into(), span));
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-04 09:59:38 +02:00
|
|
|
pub fn parse_type(_working_set: &StateWorkingSet, bytes: &[u8]) -> Type {
|
2021-10-12 06:49:17 +02:00
|
|
|
match bytes {
|
2022-12-13 17:46:22 +01:00
|
|
|
b"binary" => Type::Binary,
|
2021-10-12 06:49:17 +02:00
|
|
|
b"block" => Type::Block,
|
2022-12-13 17:46:22 +01:00
|
|
|
b"bool" => Type::Bool,
|
|
|
|
b"cellpath" => Type::CellPath,
|
|
|
|
b"closure" => Type::Closure,
|
2022-01-12 16:59:07 +01:00
|
|
|
b"date" => Type::Date,
|
2022-12-13 17:46:22 +01:00
|
|
|
b"duration" => Type::Duration,
|
|
|
|
b"error" => Type::Error,
|
2021-10-12 06:49:17 +02:00
|
|
|
b"filesize" => Type::Filesize,
|
2023-01-25 13:43:22 +01:00
|
|
|
b"float" | b"decimal" => Type::Float,
|
2022-12-13 17:46:22 +01:00
|
|
|
b"int" => Type::Int,
|
|
|
|
b"list" => Type::List(Box::new(Type::Any)),
|
2022-01-12 16:59:07 +01:00
|
|
|
b"number" => Type::Number,
|
2022-12-13 17:46:22 +01:00
|
|
|
b"range" => Type::Range,
|
|
|
|
b"record" => Type::Record(vec![]),
|
|
|
|
b"string" => Type::String,
|
2022-04-07 06:34:09 +02:00
|
|
|
b"table" => Type::Table(vec![]), //FIXME
|
2021-10-12 06:49:17 +02:00
|
|
|
|
2022-04-07 06:34:09 +02:00
|
|
|
_ => Type::Any,
|
2021-07-16 08:24:46 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-16 08:24:46 +02:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn parse_import_pattern(working_set: &mut StateWorkingSet, spans: &[Span]) -> Expression {
|
2023-04-14 20:51:38 +02:00
|
|
|
let Some(head_span) = spans.get(0) else {
|
2023-05-06 20:39:54 +02:00
|
|
|
working_set.error(ParseError::WrongImportPattern(
|
|
|
|
"needs at least one component of import pattern".to_string(),
|
|
|
|
span(spans),
|
|
|
|
));
|
2023-04-07 02:35:45 +02:00
|
|
|
return garbage(span(spans));
|
2021-10-26 23:06:08 +02:00
|
|
|
};
|
2021-09-26 20:39:19 +02:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let head_expr = parse_value(working_set, *head_span, &SyntaxShape::Any);
|
2022-12-21 23:21:03 +01:00
|
|
|
|
|
|
|
let (maybe_module_id, head_name) = match eval_constant(working_set, &head_expr) {
|
|
|
|
Ok(val) => match value_as_string(val, head_expr.span) {
|
|
|
|
Ok(s) => (working_set.find_module(s.as_bytes()), s.into_bytes()),
|
|
|
|
Err(err) => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(err);
|
|
|
|
return garbage(span(spans));
|
2022-12-21 23:21:03 +01:00
|
|
|
}
|
|
|
|
},
|
|
|
|
Err(err) => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(err);
|
|
|
|
return garbage(span(spans));
|
2022-12-21 23:21:03 +01:00
|
|
|
}
|
|
|
|
};
|
2022-02-18 02:58:24 +01:00
|
|
|
|
2023-05-06 20:39:54 +02:00
|
|
|
let mut import_pattern = ImportPattern {
|
|
|
|
head: ImportPatternHead {
|
|
|
|
name: head_name,
|
|
|
|
id: maybe_module_id,
|
|
|
|
span: *head_span,
|
|
|
|
},
|
|
|
|
members: vec![],
|
|
|
|
hidden: HashSet::new(),
|
|
|
|
};
|
2021-09-27 02:23:22 +02:00
|
|
|
|
2023-05-06 20:39:54 +02:00
|
|
|
if spans.len() > 1 {
|
|
|
|
let mut leaf_member_span = None;
|
2021-09-27 02:23:22 +02:00
|
|
|
|
2023-05-06 20:39:54 +02:00
|
|
|
for tail_span in spans[1..].iter() {
|
|
|
|
if let Some(prev_span) = leaf_member_span {
|
|
|
|
let what = if working_set.get_span_contents(prev_span) == b"*" {
|
|
|
|
"glob"
|
|
|
|
} else {
|
|
|
|
"list"
|
|
|
|
};
|
|
|
|
working_set.error(ParseError::WrongImportPattern(
|
|
|
|
format!(
|
|
|
|
"{} member can be only at the end of an import pattern",
|
|
|
|
what
|
|
|
|
),
|
|
|
|
prev_span,
|
|
|
|
));
|
|
|
|
return Expression {
|
|
|
|
expr: Expr::ImportPattern(import_pattern),
|
|
|
|
span: prev_span,
|
|
|
|
ty: Type::List(Box::new(Type::String)),
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
let tail = working_set.get_span_contents(*tail_span);
|
|
|
|
|
|
|
|
if tail == b"*" {
|
|
|
|
import_pattern
|
|
|
|
.members
|
|
|
|
.push(ImportPatternMember::Glob { span: *tail_span });
|
|
|
|
|
|
|
|
leaf_member_span = Some(*tail_span);
|
|
|
|
} else if tail.starts_with(b"[") {
|
|
|
|
let result = parse_list_expression(working_set, *tail_span, &SyntaxShape::String);
|
|
|
|
|
|
|
|
let mut output = vec![];
|
|
|
|
|
|
|
|
if let Expression {
|
2021-09-27 02:23:22 +02:00
|
|
|
expr: Expr::List(list),
|
|
|
|
..
|
2023-05-06 20:39:54 +02:00
|
|
|
} = result
|
|
|
|
{
|
2022-07-29 10:57:10 +02:00
|
|
|
for expr in list {
|
|
|
|
let contents = working_set.get_span_contents(expr.span);
|
|
|
|
output.push((trim_quotes(contents).to_vec(), expr.span));
|
2021-09-27 02:23:22 +02:00
|
|
|
}
|
|
|
|
|
2023-05-06 20:39:54 +02:00
|
|
|
import_pattern
|
|
|
|
.members
|
|
|
|
.push(ImportPatternMember::List { names: output });
|
|
|
|
} else {
|
|
|
|
working_set.error(ParseError::ExportNotFound(result.span));
|
|
|
|
return Expression {
|
|
|
|
expr: Expr::ImportPattern(import_pattern),
|
|
|
|
span: span(spans),
|
|
|
|
ty: Type::List(Box::new(Type::String)),
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2021-09-27 02:23:22 +02:00
|
|
|
}
|
2023-05-06 20:39:54 +02:00
|
|
|
|
|
|
|
leaf_member_span = Some(*tail_span);
|
|
|
|
} else {
|
|
|
|
let tail = trim_quotes(tail);
|
|
|
|
|
|
|
|
import_pattern.members.push(ImportPatternMember::Name {
|
|
|
|
name: tail.to_vec(),
|
|
|
|
span: *tail_span,
|
|
|
|
});
|
2021-09-27 02:23:22 +02:00
|
|
|
}
|
2021-09-26 20:39:19 +02:00
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
Expression {
|
|
|
|
expr: Expr::ImportPattern(import_pattern),
|
|
|
|
span: span(&spans[1..]),
|
|
|
|
ty: Type::List(Box::new(Type::String)),
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-26 20:39:19 +02:00
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
pub fn parse_var_with_opt_type(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
spans: &[Span],
|
|
|
|
spans_idx: &mut usize,
|
2022-11-11 07:51:08 +01:00
|
|
|
mutable: bool,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2021-09-02 10:25:22 +02:00
|
|
|
let bytes = working_set.get_span_contents(spans[*spans_idx]).to_vec();
|
2021-07-16 08:24:46 +02:00
|
|
|
|
2022-04-04 22:42:26 +02:00
|
|
|
if bytes.contains(&b' ')
|
|
|
|
|| bytes.contains(&b'"')
|
|
|
|
|| bytes.contains(&b'\'')
|
|
|
|
|| bytes.contains(&b'`')
|
|
|
|
{
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::VariableNotValid(spans[*spans_idx]));
|
|
|
|
return garbage(spans[*spans_idx]);
|
2021-10-12 07:08:55 +02:00
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if bytes.ends_with(b":") {
|
|
|
|
// We end with colon, so the next span should be the type
|
|
|
|
if *spans_idx + 1 < spans.len() {
|
|
|
|
*spans_idx += 1;
|
|
|
|
let type_bytes = working_set.get_span_contents(spans[*spans_idx]);
|
2021-07-16 08:24:46 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let ty = parse_type(working_set, type_bytes);
|
2021-07-16 08:24:46 +02:00
|
|
|
|
2022-07-27 04:08:54 +02:00
|
|
|
let var_name = bytes[0..(bytes.len() - 1)].to_vec();
|
|
|
|
|
|
|
|
if !is_variable(&var_name) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"valid variable name".into(),
|
|
|
|
spans[*spans_idx],
|
|
|
|
));
|
|
|
|
return garbage(spans[*spans_idx]);
|
2022-07-27 04:08:54 +02:00
|
|
|
}
|
|
|
|
|
2022-11-11 07:51:08 +01:00
|
|
|
let id = working_set.add_variable(var_name, spans[*spans_idx - 1], ty.clone(), mutable);
|
2021-07-16 08:24:46 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::VarDecl(id),
|
|
|
|
span: span(&spans[*spans_idx - 1..*spans_idx + 1]),
|
|
|
|
ty,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-07-16 08:24:46 +02:00
|
|
|
} else {
|
2022-07-27 04:08:54 +02:00
|
|
|
let var_name = bytes[0..(bytes.len() - 1)].to_vec();
|
|
|
|
|
|
|
|
if !is_variable(&var_name) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"valid variable name".into(),
|
|
|
|
spans[*spans_idx],
|
|
|
|
));
|
|
|
|
return garbage(spans[*spans_idx]);
|
2022-07-27 04:08:54 +02:00
|
|
|
}
|
|
|
|
|
2022-11-11 07:51:08 +01:00
|
|
|
let id = working_set.add_variable(var_name, spans[*spans_idx], Type::Any, mutable);
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
working_set.error(ParseError::MissingType(spans[*spans_idx]));
|
|
|
|
Expression {
|
|
|
|
expr: Expr::VarDecl(id),
|
|
|
|
span: spans[*spans_idx],
|
|
|
|
ty: Type::Any,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-07-16 08:24:46 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2022-07-27 04:08:54 +02:00
|
|
|
let var_name = bytes;
|
|
|
|
|
|
|
|
if !is_variable(&var_name) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"valid variable name".into(),
|
|
|
|
spans[*spans_idx],
|
|
|
|
));
|
|
|
|
return garbage(spans[*spans_idx]);
|
2022-07-27 04:08:54 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
let id = working_set.add_variable(
|
|
|
|
var_name,
|
|
|
|
span(&spans[*spans_idx..*spans_idx + 1]),
|
|
|
|
Type::Any,
|
2022-11-11 07:51:08 +01:00
|
|
|
mutable,
|
2022-07-27 04:08:54 +02:00
|
|
|
);
|
2021-07-08 00:55:46 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::VarDecl(id),
|
|
|
|
span: span(&spans[*spans_idx..*spans_idx + 1]),
|
|
|
|
ty: Type::Any,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
}
|
2021-09-09 23:47:20 +02:00
|
|
|
|
|
|
|
pub fn expand_to_cell_path(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
expression: &mut Expression,
|
|
|
|
var_id: VarId,
|
|
|
|
) {
|
2022-12-10 18:23:24 +01:00
|
|
|
trace!("parsing: expanding to cell path");
|
2021-09-09 23:47:20 +02:00
|
|
|
if let Expression {
|
|
|
|
expr: Expr::String(_),
|
|
|
|
span,
|
|
|
|
..
|
|
|
|
} = expression
|
|
|
|
{
|
|
|
|
// Re-parse the string as if it were a cell-path
|
2023-04-07 20:09:38 +02:00
|
|
|
let new_expression = parse_full_cell_path(working_set, Some(var_id), *span);
|
2021-09-09 23:47:20 +02:00
|
|
|
|
|
|
|
*expression = new_expression;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn parse_row_condition(working_set: &mut StateWorkingSet, spans: &[Span]) -> Expression {
|
2022-11-11 07:51:08 +01:00
|
|
|
let var_id = working_set.add_variable(b"$it".to_vec(), span(spans), Type::Any, false);
|
2023-04-07 20:09:38 +02:00
|
|
|
let expression = parse_math_expression(working_set, spans, Some(var_id));
|
2021-09-09 23:47:20 +02:00
|
|
|
let span = span(spans);
|
2021-11-26 04:49:03 +01:00
|
|
|
|
|
|
|
let block_id = match expression.expr {
|
|
|
|
Expr::Block(block_id) => block_id,
|
2022-11-10 09:21:49 +01:00
|
|
|
Expr::Closure(block_id) => block_id,
|
2021-11-26 04:49:03 +01:00
|
|
|
_ => {
|
|
|
|
// We have an expression, so let's convert this into a block.
|
|
|
|
let mut block = Block::new();
|
|
|
|
let mut pipeline = Pipeline::new();
|
2022-11-18 22:46:48 +01:00
|
|
|
pipeline
|
|
|
|
.elements
|
2022-11-22 19:26:13 +01:00
|
|
|
.push(PipelineElement::Expression(None, expression));
|
2021-11-26 04:49:03 +01:00
|
|
|
|
2022-02-15 20:31:14 +01:00
|
|
|
block.pipelines.push(pipeline);
|
2021-11-26 04:49:03 +01:00
|
|
|
|
|
|
|
block.signature.required_positional.push(PositionalArg {
|
|
|
|
name: "$it".into(),
|
|
|
|
desc: "row condition".into(),
|
|
|
|
shape: SyntaxShape::Any,
|
|
|
|
var_id: Some(var_id),
|
2022-03-07 21:08:56 +01:00
|
|
|
default_value: None,
|
2021-11-26 04:49:03 +01:00
|
|
|
});
|
|
|
|
|
|
|
|
working_set.add_block(block)
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
ty: Type::Bool,
|
|
|
|
span,
|
|
|
|
expr: Expr::RowCondition(block_id),
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-16 23:55:12 +02:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn parse_signature(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2021-09-02 10:25:22 +02:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
2021-07-16 23:55:12 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut start = span.start;
|
|
|
|
let mut end = span.end;
|
2021-07-16 23:55:12 +02:00
|
|
|
|
2022-08-30 06:17:10 +02:00
|
|
|
let mut has_paren = false;
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if bytes.starts_with(b"[") {
|
|
|
|
start += 1;
|
2022-08-30 06:17:10 +02:00
|
|
|
} else if bytes.starts_with(b"(") {
|
|
|
|
has_paren = true;
|
|
|
|
start += 1;
|
2021-10-11 22:58:38 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"[ or (".into(),
|
|
|
|
Span::new(start, start + 1),
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-10-11 22:58:38 +02:00
|
|
|
|
2022-08-30 06:17:10 +02:00
|
|
|
if (has_paren && bytes.ends_with(b")")) || (!has_paren && bytes.ends_with(b"]")) {
|
2021-09-02 10:25:22 +02:00
|
|
|
end -= 1;
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Unclosed("] or )".into(), Span::new(end, end)));
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-16 23:55:12 +02:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let sig = parse_signature_helper(working_set, Span::new(start, end));
|
2021-09-06 01:16:27 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Signature(sig),
|
|
|
|
span,
|
|
|
|
ty: Type::Signature,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-06 01:16:27 +02:00
|
|
|
}
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn parse_signature_helper(working_set: &mut StateWorkingSet, span: Span) -> Box<Signature> {
|
2022-03-07 21:08:56 +01:00
|
|
|
#[allow(clippy::enum_variant_names)]
|
2021-09-06 01:16:27 +02:00
|
|
|
enum ParseMode {
|
|
|
|
ArgMode,
|
2022-12-31 12:18:53 +01:00
|
|
|
AfterCommaArgMode,
|
2021-09-06 01:16:27 +02:00
|
|
|
TypeMode,
|
2022-03-07 21:08:56 +01:00
|
|
|
DefaultValueMode,
|
2021-09-06 01:16:27 +02:00
|
|
|
}
|
|
|
|
|
2022-03-07 21:08:56 +01:00
|
|
|
#[derive(Debug)]
|
2021-09-06 01:16:27 +02:00
|
|
|
enum Arg {
|
|
|
|
Positional(PositionalArg, bool), // bool - required
|
2022-03-07 17:44:27 +01:00
|
|
|
RestPositional(PositionalArg),
|
2021-09-06 01:16:27 +02:00
|
|
|
Flag(Flag),
|
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let source = working_set.get_span_contents(span);
|
2021-07-16 23:55:12 +02:00
|
|
|
|
2023-03-24 12:54:06 +01:00
|
|
|
let (output, err) = lex_signature(
|
2022-03-07 21:08:56 +01:00
|
|
|
source,
|
|
|
|
span.start,
|
2022-12-31 12:18:53 +01:00
|
|
|
&[b'\n', b'\r'],
|
|
|
|
&[b':', b'=', b','],
|
2022-03-07 21:08:56 +01:00
|
|
|
false,
|
|
|
|
);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2021-07-16 23:55:12 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut args: Vec<Arg> = vec![];
|
|
|
|
let mut parse_mode = ParseMode::ArgMode;
|
2021-07-16 23:55:12 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
for token in &output {
|
|
|
|
match token {
|
|
|
|
Token {
|
|
|
|
contents: crate::TokenContents::Item,
|
|
|
|
span,
|
|
|
|
} => {
|
|
|
|
let span = *span;
|
2023-04-07 02:35:45 +02:00
|
|
|
let contents = working_set.get_span_contents(span).to_vec();
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-12-28 00:00:44 +01:00
|
|
|
// The : symbol separates types
|
2021-09-02 10:25:22 +02:00
|
|
|
if contents == b":" {
|
|
|
|
match parse_mode {
|
|
|
|
ParseMode::ArgMode => {
|
|
|
|
parse_mode = ParseMode::TypeMode;
|
2021-07-16 23:55:12 +02:00
|
|
|
}
|
2022-12-31 12:18:53 +01:00
|
|
|
ParseMode::AfterCommaArgMode => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set
|
|
|
|
.error(ParseError::Expected("parameter or flag".into(), span));
|
2022-12-31 12:18:53 +01:00
|
|
|
}
|
2022-03-07 21:08:56 +01:00
|
|
|
ParseMode::TypeMode | ParseMode::DefaultValueMode => {
|
2021-09-02 10:25:22 +02:00
|
|
|
// We're seeing two types for the same thing for some reason, error
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("type".into(), span));
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
}
|
2022-12-28 00:00:44 +01:00
|
|
|
}
|
|
|
|
// The = symbol separates a variable from its default value
|
|
|
|
else if contents == b"=" {
|
2022-03-07 21:08:56 +01:00
|
|
|
match parse_mode {
|
|
|
|
ParseMode::ArgMode | ParseMode::TypeMode => {
|
|
|
|
parse_mode = ParseMode::DefaultValueMode;
|
|
|
|
}
|
2022-12-31 12:18:53 +01:00
|
|
|
ParseMode::AfterCommaArgMode => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set
|
|
|
|
.error(ParseError::Expected("parameter or flag".into(), span));
|
2022-12-31 12:18:53 +01:00
|
|
|
}
|
2022-03-07 21:08:56 +01:00
|
|
|
ParseMode::DefaultValueMode => {
|
|
|
|
// We're seeing two default values for some reason, error
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("default value".into(), span));
|
2022-03-07 21:08:56 +01:00
|
|
|
}
|
|
|
|
}
|
2022-12-31 12:18:53 +01:00
|
|
|
}
|
|
|
|
// The , symbol separates params only
|
|
|
|
else if contents == b"," {
|
|
|
|
match parse_mode {
|
|
|
|
ParseMode::ArgMode => parse_mode = ParseMode::AfterCommaArgMode,
|
|
|
|
ParseMode::AfterCommaArgMode => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set
|
|
|
|
.error(ParseError::Expected("parameter or flag".into(), span));
|
2022-12-31 12:18:53 +01:00
|
|
|
}
|
|
|
|
ParseMode::TypeMode => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("type".into(), span));
|
2022-12-31 12:18:53 +01:00
|
|
|
}
|
|
|
|
ParseMode::DefaultValueMode => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("default value".into(), span));
|
2022-12-31 12:18:53 +01:00
|
|
|
}
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
|
|
|
match parse_mode {
|
2022-12-31 12:18:53 +01:00
|
|
|
ParseMode::ArgMode | ParseMode::AfterCommaArgMode => {
|
|
|
|
// Long flag with optional short form following with no whitespace, e.g. --output, --age(-a)
|
2021-09-02 10:25:22 +02:00
|
|
|
if contents.starts_with(b"--") && contents.len() > 2 {
|
2022-12-28 00:00:44 +01:00
|
|
|
// Split the long flag from the short flag with the ( character as delimiter.
|
|
|
|
// The trailing ) is removed further down.
|
2021-09-02 10:25:22 +02:00
|
|
|
let flags: Vec<_> =
|
|
|
|
contents.split(|x| x == &b'(').map(|x| x.to_vec()).collect();
|
|
|
|
|
2021-10-13 19:53:27 +02:00
|
|
|
let long = String::from_utf8_lossy(&flags[0][2..]).to_string();
|
2022-07-27 04:08:54 +02:00
|
|
|
let mut variable_name = flags[0][2..].to_vec();
|
|
|
|
// Replace the '-' in a variable name with '_'
|
|
|
|
(0..variable_name.len()).for_each(|idx| {
|
|
|
|
if variable_name[idx] == b'-' {
|
|
|
|
variable_name[idx] = b'_';
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
if !is_variable(&variable_name) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"valid variable name for this long flag".into(),
|
|
|
|
span,
|
|
|
|
))
|
2022-07-27 04:08:54 +02:00
|
|
|
}
|
|
|
|
|
2022-03-09 10:42:19 +01:00
|
|
|
let var_id =
|
2022-11-11 07:51:08 +01:00
|
|
|
working_set.add_variable(variable_name, span, Type::Any, false);
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-12-28 00:00:44 +01:00
|
|
|
// If there's no short flag, exit now. Otherwise, parse it.
|
2021-09-02 10:25:22 +02:00
|
|
|
if flags.len() == 1 {
|
|
|
|
args.push(Arg::Flag(Flag {
|
|
|
|
arg: None,
|
|
|
|
desc: String::new(),
|
|
|
|
long,
|
|
|
|
short: None,
|
|
|
|
required: false,
|
|
|
|
var_id: Some(var_id),
|
2022-03-07 21:08:56 +01:00
|
|
|
default_value: None,
|
2021-09-02 10:25:22 +02:00
|
|
|
}));
|
2022-07-10 10:32:52 +02:00
|
|
|
} else if flags.len() >= 3 {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"only one short flag alternative".into(),
|
|
|
|
span,
|
|
|
|
));
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
|
|
|
let short_flag = &flags[1];
|
|
|
|
let short_flag = if !short_flag.starts_with(b"-")
|
|
|
|
|| !short_flag.ends_with(b")")
|
|
|
|
{
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"short flag alternative for the long flag".into(),
|
|
|
|
span,
|
|
|
|
));
|
2021-09-02 10:25:22 +02:00
|
|
|
short_flag
|
2021-07-17 20:52:50 +02:00
|
|
|
} else {
|
2022-12-28 00:00:44 +01:00
|
|
|
// Obtain the flag's name by removing the starting - and trailing )
|
2021-09-02 10:25:22 +02:00
|
|
|
&short_flag[1..(short_flag.len() - 1)]
|
|
|
|
};
|
2022-12-28 00:00:44 +01:00
|
|
|
// Note that it is currently possible to make a short flag with non-alphanumeric characters,
|
|
|
|
// like -).
|
2021-07-17 00:39:30 +02:00
|
|
|
|
|
|
|
let short_flag =
|
|
|
|
String::from_utf8_lossy(short_flag).to_string();
|
|
|
|
let chars: Vec<char> = short_flag.chars().collect();
|
2021-10-13 19:53:27 +02:00
|
|
|
let long = String::from_utf8_lossy(&flags[0][2..]).to_string();
|
2022-07-27 09:27:28 +02:00
|
|
|
let mut variable_name = flags[0][2..].to_vec();
|
|
|
|
|
|
|
|
(0..variable_name.len()).for_each(|idx| {
|
|
|
|
if variable_name[idx] == b'-' {
|
|
|
|
variable_name[idx] = b'_';
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
2022-07-27 04:08:54 +02:00
|
|
|
if !is_variable(&variable_name) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"valid variable name for this short flag".into(),
|
|
|
|
span,
|
|
|
|
))
|
2022-07-27 04:08:54 +02:00
|
|
|
}
|
|
|
|
|
2022-11-11 07:51:08 +01:00
|
|
|
let var_id = working_set.add_variable(
|
|
|
|
variable_name,
|
|
|
|
span,
|
|
|
|
Type::Any,
|
|
|
|
false,
|
|
|
|
);
|
2021-07-17 00:39:30 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if chars.len() == 1 {
|
2021-07-17 00:39:30 +02:00
|
|
|
args.push(Arg::Flag(Flag {
|
|
|
|
arg: None,
|
|
|
|
desc: String::new(),
|
2021-09-02 10:25:22 +02:00
|
|
|
long,
|
2021-07-17 00:39:30 +02:00
|
|
|
short: Some(chars[0]),
|
|
|
|
required: false,
|
2021-07-23 23:19:30 +02:00
|
|
|
var_id: Some(var_id),
|
2022-03-07 21:08:56 +01:00
|
|
|
default_value: None,
|
2021-07-17 00:39:30 +02:00
|
|
|
}));
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set
|
|
|
|
.error(ParseError::Expected("short flag".into(), span));
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
}
|
2022-12-31 12:18:53 +01:00
|
|
|
parse_mode = ParseMode::ArgMode;
|
2022-12-28 00:00:44 +01:00
|
|
|
}
|
|
|
|
// Mandatory short flag, e.g. -e (must be one character)
|
|
|
|
else if contents.starts_with(b"-") && contents.len() > 1 {
|
2021-09-02 10:25:22 +02:00
|
|
|
let short_flag = &contents[1..];
|
|
|
|
let short_flag = String::from_utf8_lossy(short_flag).to_string();
|
|
|
|
let chars: Vec<char> = short_flag.chars().collect();
|
|
|
|
|
|
|
|
if chars.len() > 1 {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set
|
|
|
|
.error(ParseError::Expected("short flag".into(), span));
|
2022-01-06 22:06:54 +01:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-01-06 22:06:54 +01:00
|
|
|
let mut encoded_var_name = vec![0u8; 4];
|
|
|
|
let len = chars[0].encode_utf8(&mut encoded_var_name).len();
|
|
|
|
let variable_name = encoded_var_name[0..len].to_vec();
|
2022-12-28 00:00:44 +01:00
|
|
|
|
2022-07-27 04:08:54 +02:00
|
|
|
if !is_variable(&variable_name) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"valid variable name for this short flag".into(),
|
|
|
|
span,
|
|
|
|
))
|
2022-07-27 04:08:54 +02:00
|
|
|
}
|
|
|
|
|
2022-03-09 10:42:19 +01:00
|
|
|
let var_id =
|
2022-11-11 07:51:08 +01:00
|
|
|
working_set.add_variable(variable_name, span, Type::Any, false);
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-01-06 22:06:54 +01:00
|
|
|
args.push(Arg::Flag(Flag {
|
|
|
|
arg: None,
|
|
|
|
desc: String::new(),
|
|
|
|
long: String::new(),
|
|
|
|
short: Some(chars[0]),
|
|
|
|
required: false,
|
|
|
|
var_id: Some(var_id),
|
2022-03-07 21:08:56 +01:00
|
|
|
default_value: None,
|
2022-01-06 22:06:54 +01:00
|
|
|
}));
|
2022-12-31 12:18:53 +01:00
|
|
|
parse_mode = ParseMode::ArgMode;
|
2022-12-28 00:00:44 +01:00
|
|
|
}
|
2022-12-31 12:18:53 +01:00
|
|
|
// Short flag alias for long flag, e.g. --b (-a)
|
|
|
|
// This is the same as the short flag in --b(-a)
|
2022-12-28 00:00:44 +01:00
|
|
|
else if contents.starts_with(b"(-") {
|
2022-12-31 12:18:53 +01:00
|
|
|
if matches!(parse_mode, ParseMode::AfterCommaArgMode) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"parameter or flag".into(),
|
|
|
|
span,
|
|
|
|
));
|
2022-12-31 12:18:53 +01:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
let short_flag = &contents[2..];
|
|
|
|
|
|
|
|
let short_flag = if !short_flag.ends_with(b")") {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set
|
|
|
|
.error(ParseError::Expected("short flag".into(), span));
|
2021-09-02 10:25:22 +02:00
|
|
|
short_flag
|
|
|
|
} else {
|
|
|
|
&short_flag[..(short_flag.len() - 1)]
|
|
|
|
};
|
2021-07-17 20:52:50 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let short_flag = String::from_utf8_lossy(short_flag).to_string();
|
|
|
|
let chars: Vec<char> = short_flag.chars().collect();
|
2021-07-17 20:52:50 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if chars.len() == 1 {
|
|
|
|
match args.last_mut() {
|
|
|
|
Some(Arg::Flag(flag)) => {
|
|
|
|
if flag.short.is_some() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"one short flag".into(),
|
|
|
|
span,
|
|
|
|
));
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
|
|
|
flag.short = Some(chars[0]);
|
2021-07-17 20:52:50 +02:00
|
|
|
}
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
_ => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"unknown flag".into(),
|
|
|
|
span,
|
|
|
|
));
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-17 20:52:50 +02:00
|
|
|
}
|
2021-07-30 00:56:51 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set
|
|
|
|
.error(ParseError::Expected("short flag".into(), span));
|
2021-07-16 23:55:12 +02:00
|
|
|
}
|
2022-12-28 00:00:44 +01:00
|
|
|
}
|
|
|
|
// Positional arg, optional
|
|
|
|
else if contents.ends_with(b"?") {
|
2021-09-02 10:25:22 +02:00
|
|
|
let contents: Vec<_> = contents[..(contents.len() - 1)].into();
|
|
|
|
let name = String::from_utf8_lossy(&contents).to_string();
|
|
|
|
|
2022-07-27 04:08:54 +02:00
|
|
|
if !is_variable(&contents) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"valid variable name for this optional parameter".into(),
|
|
|
|
span,
|
|
|
|
))
|
2022-07-27 04:08:54 +02:00
|
|
|
}
|
|
|
|
|
2022-11-11 07:51:08 +01:00
|
|
|
let var_id =
|
|
|
|
working_set.add_variable(contents, span, Type::Any, false);
|
2021-09-02 10:25:22 +02:00
|
|
|
|
|
|
|
args.push(Arg::Positional(
|
|
|
|
PositionalArg {
|
|
|
|
desc: String::new(),
|
|
|
|
name,
|
|
|
|
shape: SyntaxShape::Any,
|
|
|
|
var_id: Some(var_id),
|
2022-03-07 21:08:56 +01:00
|
|
|
default_value: None,
|
2021-09-02 10:25:22 +02:00
|
|
|
},
|
|
|
|
false,
|
2022-12-31 12:18:53 +01:00
|
|
|
));
|
|
|
|
parse_mode = ParseMode::ArgMode;
|
2022-12-28 00:00:44 +01:00
|
|
|
}
|
|
|
|
// Rest param
|
|
|
|
else if let Some(contents) = contents.strip_prefix(b"...") {
|
2021-09-07 05:37:02 +02:00
|
|
|
let name = String::from_utf8_lossy(contents).to_string();
|
|
|
|
let contents_vec: Vec<u8> = contents.to_vec();
|
2022-12-28 00:00:44 +01:00
|
|
|
|
2022-07-27 04:08:54 +02:00
|
|
|
if !is_variable(&contents_vec) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"valid variable name for this rest parameter".into(),
|
|
|
|
span,
|
|
|
|
))
|
2022-07-27 04:08:54 +02:00
|
|
|
}
|
2021-09-07 05:37:02 +02:00
|
|
|
|
2022-03-09 10:42:19 +01:00
|
|
|
let var_id =
|
2022-11-11 07:51:08 +01:00
|
|
|
working_set.add_variable(contents_vec, span, Type::Any, false);
|
2021-09-07 05:37:02 +02:00
|
|
|
|
2022-03-07 17:44:27 +01:00
|
|
|
args.push(Arg::RestPositional(PositionalArg {
|
|
|
|
desc: String::new(),
|
|
|
|
name,
|
|
|
|
shape: SyntaxShape::Any,
|
|
|
|
var_id: Some(var_id),
|
2022-03-07 21:08:56 +01:00
|
|
|
default_value: None,
|
2022-03-07 17:44:27 +01:00
|
|
|
}));
|
2022-12-31 12:18:53 +01:00
|
|
|
parse_mode = ParseMode::ArgMode;
|
2022-12-28 00:00:44 +01:00
|
|
|
}
|
|
|
|
// Normal param
|
|
|
|
else {
|
2023-04-07 02:35:45 +02:00
|
|
|
let name = String::from_utf8_lossy(&contents).to_string();
|
2021-09-02 10:25:22 +02:00
|
|
|
let contents_vec = contents.to_vec();
|
|
|
|
|
2022-07-27 04:08:54 +02:00
|
|
|
if !is_variable(&contents_vec) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"valid variable name for this parameter".into(),
|
|
|
|
span,
|
|
|
|
))
|
2022-07-27 04:08:54 +02:00
|
|
|
}
|
|
|
|
|
2022-03-09 10:42:19 +01:00
|
|
|
let var_id =
|
2022-11-11 07:51:08 +01:00
|
|
|
working_set.add_variable(contents_vec, span, Type::Any, false);
|
2021-09-02 10:25:22 +02:00
|
|
|
|
|
|
|
// Positional arg, required
|
|
|
|
args.push(Arg::Positional(
|
|
|
|
PositionalArg {
|
|
|
|
desc: String::new(),
|
|
|
|
name,
|
|
|
|
shape: SyntaxShape::Any,
|
|
|
|
var_id: Some(var_id),
|
2022-03-07 21:08:56 +01:00
|
|
|
default_value: None,
|
2021-09-02 10:25:22 +02:00
|
|
|
},
|
|
|
|
true,
|
2022-12-31 12:18:53 +01:00
|
|
|
));
|
|
|
|
parse_mode = ParseMode::ArgMode;
|
2021-07-16 23:55:12 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
ParseMode::TypeMode => {
|
|
|
|
if let Some(last) = args.last_mut() {
|
2023-04-07 02:35:45 +02:00
|
|
|
let syntax_shape = parse_shape_name(working_set, &contents, span);
|
2021-10-12 19:44:23 +02:00
|
|
|
//TODO check if we're replacing a custom parameter already
|
2021-09-02 10:25:22 +02:00
|
|
|
match last {
|
|
|
|
Arg::Positional(PositionalArg { shape, var_id, .. }, ..) => {
|
|
|
|
working_set.set_variable_type(var_id.expect("internal error: all custom parameters must have var_ids"), syntax_shape.to_type());
|
|
|
|
*shape = syntax_shape;
|
|
|
|
}
|
2022-03-07 17:44:27 +01:00
|
|
|
Arg::RestPositional(PositionalArg {
|
|
|
|
shape, var_id, ..
|
|
|
|
}) => {
|
|
|
|
working_set.set_variable_type(var_id.expect("internal error: all custom parameters must have var_ids"), syntax_shape.to_type());
|
|
|
|
*shape = syntax_shape;
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
Arg::Flag(Flag { arg, var_id, .. }) => {
|
2021-10-12 06:49:17 +02:00
|
|
|
// Flags with a boolean type are just present/not-present switches
|
|
|
|
if syntax_shape != SyntaxShape::Boolean {
|
|
|
|
working_set.set_variable_type(var_id.expect("internal error: all custom parameters must have var_ids"), syntax_shape.to_type());
|
|
|
|
*arg = Some(syntax_shape)
|
|
|
|
}
|
2021-07-16 23:55:12 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
parse_mode = ParseMode::ArgMode;
|
2021-07-16 23:55:12 +02:00
|
|
|
}
|
2022-03-07 21:08:56 +01:00
|
|
|
ParseMode::DefaultValueMode => {
|
|
|
|
if let Some(last) = args.last_mut() {
|
2023-04-07 20:09:38 +02:00
|
|
|
let expression = parse_value(working_set, span, &SyntaxShape::Any);
|
2022-03-07 21:08:56 +01:00
|
|
|
|
|
|
|
//TODO check if we're replacing a custom parameter already
|
|
|
|
match last {
|
|
|
|
Arg::Positional(
|
|
|
|
PositionalArg {
|
|
|
|
shape,
|
|
|
|
var_id,
|
|
|
|
default_value,
|
|
|
|
..
|
|
|
|
},
|
|
|
|
required,
|
|
|
|
) => {
|
|
|
|
let var_id = var_id.expect("internal error: all custom parameters must have var_ids");
|
2022-03-09 10:42:19 +01:00
|
|
|
let var_type = &working_set.get_variable(var_id).ty;
|
2022-03-07 21:08:56 +01:00
|
|
|
match var_type {
|
2022-04-07 06:34:09 +02:00
|
|
|
Type::Any => {
|
2022-03-07 21:08:56 +01:00
|
|
|
working_set.set_variable_type(
|
|
|
|
var_id,
|
|
|
|
expression.ty.clone(),
|
|
|
|
);
|
|
|
|
}
|
allow records to have type annotations (#8914)
# Description
follow up to #8529
cleaned up version of #8892
- the original syntax is okay
```nu
def okay [rec: record] {}
```
- you can now add type annotations for fields if you know
them before hand
```nu
def okay [rec: record<name: string>] {}
```
- you can specify multiple fields
```nu
def okay [person: record<name: string age: int>] {}
# an optional comma is allowed
def okay [person: record<name: string, age: int>] {}
```
- if annotations are specified, any use of the command will be type
checked against the specified type
```nu
def unwrap [result: record<ok: bool, value: any>] {}
unwrap {ok: 2, value: "value"}
# errors with
Error: nu::parser::type_mismatch
× Type mismatch.
╭─[entry #4:1:1]
1 │ unwrap {ok: 2, value: "value"}
· ───────┬─────
· ╰── expected record<ok: bool, value: any>, found record<ok: int, value: string>
╰────
```
> here the error is in the `ok` field, since `any` is coerced into any
type
> as a result `unwrap {ok: true, value: "value"}` is okay
- the key must be a string, either quoted or unquoted
```nu
def err [rec: record<{}: list>] {}
# errors with
Error:
× `record` type annotations key not string
╭─[entry #7:1:1]
1 │ def unwrap [result: record<{}: bool, value: any>] {}
· ─┬
· ╰── must be a string
╰────
```
- a key doesn't have to have a type in which case it is assumed to be
`any`
```nu
def okay [person: record<name age>] {}
def okay [person: record<name: string age>] {}
```
- however, if you put a colon, you have to specify a type
```nu
def err [person: record<name: >] {}
# errors with
Error: nu::parser::parse_mismatch
× Parse mismatch during operation.
╭─[entry #12:1:1]
1 │ def unwrap [res: record<name: >] { $res }
· ┬
· ╰── expected type after colon
╰────
```
# User-Facing Changes
**[BREAKING CHANGES]**
- this change adds a field to `SyntaxShape::Record` so any plugins that
used it will have to update and include the field. though if you are
unsure of the type the record expects, `SyntaxShape::Record(vec![])`
will suffice
2023-04-26 15:16:55 +02:00
|
|
|
_ => {
|
|
|
|
if !type_compatible(var_type, &expression.ty) {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(
|
|
|
|
ParseError::AssignmentMismatch(
|
2022-03-07 21:08:56 +01:00
|
|
|
"Default value wrong type".into(),
|
2023-04-07 02:35:45 +02:00
|
|
|
format!(
|
allow records to have type annotations (#8914)
# Description
follow up to #8529
cleaned up version of #8892
- the original syntax is okay
```nu
def okay [rec: record] {}
```
- you can now add type annotations for fields if you know
them before hand
```nu
def okay [rec: record<name: string>] {}
```
- you can specify multiple fields
```nu
def okay [person: record<name: string age: int>] {}
# an optional comma is allowed
def okay [person: record<name: string, age: int>] {}
```
- if annotations are specified, any use of the command will be type
checked against the specified type
```nu
def unwrap [result: record<ok: bool, value: any>] {}
unwrap {ok: 2, value: "value"}
# errors with
Error: nu::parser::type_mismatch
× Type mismatch.
╭─[entry #4:1:1]
1 │ unwrap {ok: 2, value: "value"}
· ───────┬─────
· ╰── expected record<ok: bool, value: any>, found record<ok: int, value: string>
╰────
```
> here the error is in the `ok` field, since `any` is coerced into any
type
> as a result `unwrap {ok: true, value: "value"}` is okay
- the key must be a string, either quoted or unquoted
```nu
def err [rec: record<{}: list>] {}
# errors with
Error:
× `record` type annotations key not string
╭─[entry #7:1:1]
1 │ def unwrap [result: record<{}: bool, value: any>] {}
· ─┬
· ╰── must be a string
╰────
```
- a key doesn't have to have a type in which case it is assumed to be
`any`
```nu
def okay [person: record<name age>] {}
def okay [person: record<name: string age>] {}
```
- however, if you put a colon, you have to specify a type
```nu
def err [person: record<name: >] {}
# errors with
Error: nu::parser::parse_mismatch
× Parse mismatch during operation.
╭─[entry #12:1:1]
1 │ def unwrap [res: record<name: >] { $res }
· ┬
· ╰── expected type after colon
╰────
```
# User-Facing Changes
**[BREAKING CHANGES]**
- this change adds a field to `SyntaxShape::Record` so any plugins that
used it will have to update and include the field. though if you are
unsure of the type the record expects, `SyntaxShape::Record(vec![])`
will suffice
2023-04-26 15:16:55 +02:00
|
|
|
"expected default value to be `{var_type}`"
|
2023-04-07 02:35:45 +02:00
|
|
|
),
|
2022-03-07 21:08:56 +01:00
|
|
|
expression.span,
|
2023-04-07 02:35:45 +02:00
|
|
|
),
|
|
|
|
)
|
2022-03-07 21:08:56 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-04-26 16:14:02 +02:00
|
|
|
|
|
|
|
*default_value = if let Ok(constant) =
|
|
|
|
eval_constant(working_set, &expression)
|
|
|
|
{
|
|
|
|
Some(constant)
|
|
|
|
} else {
|
|
|
|
working_set.error(ParseError::NonConstantDefaultValue(
|
|
|
|
expression.span,
|
|
|
|
));
|
|
|
|
None
|
|
|
|
};
|
|
|
|
|
2022-03-07 21:08:56 +01:00
|
|
|
*shape = expression.ty.to_shape();
|
|
|
|
*required = false;
|
|
|
|
}
|
|
|
|
Arg::RestPositional(..) => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::AssignmentMismatch(
|
|
|
|
"Rest parameter was given a default value".into(),
|
|
|
|
"can't have default value".into(),
|
|
|
|
expression.span,
|
|
|
|
))
|
2022-03-07 21:08:56 +01:00
|
|
|
}
|
|
|
|
Arg::Flag(Flag {
|
|
|
|
arg,
|
|
|
|
var_id,
|
|
|
|
default_value,
|
|
|
|
..
|
|
|
|
}) => {
|
2023-05-03 23:09:36 +02:00
|
|
|
let expression_span = expression.span;
|
|
|
|
|
|
|
|
*default_value = if let Ok(value) =
|
|
|
|
eval_constant(working_set, &expression)
|
|
|
|
{
|
|
|
|
Some(value)
|
|
|
|
} else {
|
|
|
|
working_set.error(ParseError::NonConstantDefaultValue(
|
|
|
|
expression_span,
|
|
|
|
));
|
|
|
|
None
|
|
|
|
};
|
|
|
|
|
2022-03-07 21:08:56 +01:00
|
|
|
let var_id = var_id.expect("internal error: all custom parameters must have var_ids");
|
2022-03-09 10:42:19 +01:00
|
|
|
let var_type = &working_set.get_variable(var_id).ty;
|
2022-03-07 21:08:56 +01:00
|
|
|
let expression_ty = expression.ty.clone();
|
|
|
|
|
|
|
|
// Flags with a boolean type are just present/not-present switches
|
|
|
|
if var_type != &Type::Bool {
|
|
|
|
match var_type {
|
2022-04-07 06:34:09 +02:00
|
|
|
Type::Any => {
|
2022-03-07 21:08:56 +01:00
|
|
|
*arg = Some(expression_ty.to_shape());
|
|
|
|
working_set
|
|
|
|
.set_variable_type(var_id, expression_ty);
|
|
|
|
}
|
|
|
|
t => {
|
|
|
|
if t != &expression_ty {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(
|
|
|
|
ParseError::AssignmentMismatch(
|
2022-12-31 12:18:53 +01:00
|
|
|
"Default value is the wrong type"
|
|
|
|
.into(),
|
|
|
|
format!(
|
allow records to have type annotations (#8914)
# Description
follow up to #8529
cleaned up version of #8892
- the original syntax is okay
```nu
def okay [rec: record] {}
```
- you can now add type annotations for fields if you know
them before hand
```nu
def okay [rec: record<name: string>] {}
```
- you can specify multiple fields
```nu
def okay [person: record<name: string age: int>] {}
# an optional comma is allowed
def okay [person: record<name: string, age: int>] {}
```
- if annotations are specified, any use of the command will be type
checked against the specified type
```nu
def unwrap [result: record<ok: bool, value: any>] {}
unwrap {ok: 2, value: "value"}
# errors with
Error: nu::parser::type_mismatch
× Type mismatch.
╭─[entry #4:1:1]
1 │ unwrap {ok: 2, value: "value"}
· ───────┬─────
· ╰── expected record<ok: bool, value: any>, found record<ok: int, value: string>
╰────
```
> here the error is in the `ok` field, since `any` is coerced into any
type
> as a result `unwrap {ok: true, value: "value"}` is okay
- the key must be a string, either quoted or unquoted
```nu
def err [rec: record<{}: list>] {}
# errors with
Error:
× `record` type annotations key not string
╭─[entry #7:1:1]
1 │ def unwrap [result: record<{}: bool, value: any>] {}
· ─┬
· ╰── must be a string
╰────
```
- a key doesn't have to have a type in which case it is assumed to be
`any`
```nu
def okay [person: record<name age>] {}
def okay [person: record<name: string age>] {}
```
- however, if you put a colon, you have to specify a type
```nu
def err [person: record<name: >] {}
# errors with
Error: nu::parser::parse_mismatch
× Parse mismatch during operation.
╭─[entry #12:1:1]
1 │ def unwrap [res: record<name: >] { $res }
· ┬
· ╰── expected type after colon
╰────
```
# User-Facing Changes
**[BREAKING CHANGES]**
- this change adds a field to `SyntaxShape::Record` so any plugins that
used it will have to update and include the field. though if you are
unsure of the type the record expects, `SyntaxShape::Record(vec![])`
will suffice
2023-04-26 15:16:55 +02:00
|
|
|
"expected default value to be `{t}`"
|
2022-12-31 12:18:53 +01:00
|
|
|
),
|
2022-03-07 21:08:56 +01:00
|
|
|
expression_span,
|
2023-04-07 02:35:45 +02:00
|
|
|
),
|
|
|
|
)
|
2022-03-07 21:08:56 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
parse_mode = ParseMode::ArgMode;
|
|
|
|
}
|
2021-07-16 23:55:12 +02:00
|
|
|
}
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
Token {
|
|
|
|
contents: crate::TokenContents::Comment,
|
|
|
|
span,
|
|
|
|
} => {
|
2022-12-03 10:44:12 +01:00
|
|
|
let contents = working_set.get_span_contents(Span::new(span.start + 1, span.end));
|
2021-09-02 10:25:22 +02:00
|
|
|
|
|
|
|
let mut contents = String::from_utf8_lossy(contents).to_string();
|
|
|
|
contents = contents.trim().into();
|
|
|
|
|
|
|
|
if let Some(last) = args.last_mut() {
|
|
|
|
match last {
|
|
|
|
Arg::Flag(flag) => {
|
|
|
|
if !flag.desc.is_empty() {
|
|
|
|
flag.desc.push('\n');
|
2021-07-17 00:31:36 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
flag.desc.push_str(&contents);
|
|
|
|
}
|
|
|
|
Arg::Positional(positional, ..) => {
|
|
|
|
if !positional.desc.is_empty() {
|
|
|
|
positional.desc.push('\n');
|
2021-07-17 00:31:36 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
positional.desc.push_str(&contents);
|
2021-07-17 00:31:36 +02:00
|
|
|
}
|
2022-03-07 17:44:27 +01:00
|
|
|
Arg::RestPositional(positional) => {
|
|
|
|
if !positional.desc.is_empty() {
|
|
|
|
positional.desc.push('\n');
|
|
|
|
}
|
|
|
|
positional.desc.push_str(&contents);
|
|
|
|
}
|
2021-07-17 00:31:36 +02:00
|
|
|
}
|
|
|
|
}
|
2021-07-16 23:55:12 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
_ => {}
|
2021-07-16 23:55:12 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-16 23:55:12 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut sig = Signature::new(String::new());
|
|
|
|
|
|
|
|
for arg in args {
|
|
|
|
match arg {
|
|
|
|
Arg::Positional(positional, required) => {
|
2021-09-07 05:37:02 +02:00
|
|
|
if required {
|
2022-03-07 21:08:56 +01:00
|
|
|
if !sig.optional_positional.is_empty() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::RequiredAfterOptional(
|
|
|
|
positional.name.clone(),
|
|
|
|
span,
|
|
|
|
))
|
2022-03-07 21:08:56 +01:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
sig.required_positional.push(positional)
|
|
|
|
} else {
|
|
|
|
sig.optional_positional.push(positional)
|
2021-07-17 00:53:45 +02:00
|
|
|
}
|
2021-07-16 23:55:12 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
Arg::Flag(flag) => sig.named.push(flag),
|
2022-03-07 17:44:27 +01:00
|
|
|
Arg::RestPositional(positional) => {
|
|
|
|
if positional.name.is_empty() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::RestNeedsName(span))
|
2022-03-07 17:44:27 +01:00
|
|
|
} else if sig.rest_positional.is_none() {
|
|
|
|
sig.rest_positional = Some(PositionalArg {
|
|
|
|
name: positional.name,
|
|
|
|
..positional
|
|
|
|
})
|
|
|
|
} else {
|
|
|
|
// Too many rest params
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::MultipleRestParams(span))
|
2022-03-07 17:44:27 +01:00
|
|
|
}
|
|
|
|
}
|
2021-07-16 23:55:12 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Box::new(sig)
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-08 09:49:17 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
pub fn parse_list_expression(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
span: Span,
|
|
|
|
element_shape: &SyntaxShape,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2021-09-02 10:25:22 +02:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
2021-07-08 09:49:17 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut start = span.start;
|
|
|
|
let mut end = span.end;
|
2021-07-08 09:49:17 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if bytes.starts_with(b"[") {
|
|
|
|
start += 1;
|
|
|
|
}
|
|
|
|
if bytes.ends_with(b"]") {
|
|
|
|
end -= 1;
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Unclosed("]".into(), Span::new(end, end)));
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-08 09:49:17 +02:00
|
|
|
|
2022-12-03 10:44:12 +01:00
|
|
|
let inner_span = Span::new(start, end);
|
2022-01-03 04:18:23 +01:00
|
|
|
let source = working_set.get_span_contents(inner_span);
|
2021-07-08 09:49:17 +02:00
|
|
|
|
2022-01-03 04:18:23 +01:00
|
|
|
let (output, err) = lex(source, inner_span.start, &[b'\n', b'\r', b','], &[], true);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err)
|
|
|
|
}
|
2021-07-08 09:49:17 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let (output, err) = lite_parse(&output);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err)
|
|
|
|
}
|
2021-07-08 23:45:56 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut args = vec![];
|
2021-08-17 02:26:05 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut contained_type: Option<Type> = None;
|
2021-07-08 09:49:17 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if !output.block.is_empty() {
|
|
|
|
for arg in &output.block[0].commands {
|
|
|
|
let mut spans_idx = 0;
|
2021-07-08 23:45:56 +02:00
|
|
|
|
2022-11-22 19:26:13 +01:00
|
|
|
if let LiteElement::Command(_, command) = arg {
|
2022-11-18 22:46:48 +01:00
|
|
|
while spans_idx < command.parts.len() {
|
2023-04-07 02:35:45 +02:00
|
|
|
let arg = parse_multispan_value(
|
2022-11-18 22:46:48 +01:00
|
|
|
working_set,
|
|
|
|
&command.parts,
|
|
|
|
&mut spans_idx,
|
|
|
|
element_shape,
|
|
|
|
);
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-11-18 22:46:48 +01:00
|
|
|
if let Some(ref ctype) = contained_type {
|
|
|
|
if *ctype != arg.ty {
|
|
|
|
contained_type = Some(Type::Any);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
contained_type = Some(arg.ty.clone());
|
2021-08-17 02:26:05 +02:00
|
|
|
}
|
|
|
|
|
2022-11-18 22:46:48 +01:00
|
|
|
args.push(arg);
|
2021-07-16 08:24:46 +02:00
|
|
|
|
2022-11-18 22:46:48 +01:00
|
|
|
spans_idx += 1;
|
|
|
|
}
|
2021-07-08 09:49:17 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::List(args),
|
|
|
|
span,
|
|
|
|
ty: Type::List(Box::new(if let Some(ty) = contained_type {
|
|
|
|
ty
|
|
|
|
} else {
|
|
|
|
Type::Any
|
|
|
|
})),
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn parse_table_expression(
|
|
|
|
working_set: &mut StateWorkingSet,
|
2021-09-08 20:54:27 +02:00
|
|
|
original_span: Span,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2021-09-08 20:54:27 +02:00
|
|
|
let bytes = working_set.get_span_contents(original_span);
|
2021-07-06 00:58:56 +02:00
|
|
|
|
2021-09-08 20:54:27 +02:00
|
|
|
let mut start = original_span.start;
|
|
|
|
let mut end = original_span.end;
|
2021-07-06 00:58:56 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if bytes.starts_with(b"[") {
|
|
|
|
start += 1;
|
|
|
|
}
|
|
|
|
if bytes.ends_with(b"]") {
|
|
|
|
end -= 1;
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Unclosed("]".into(), Span::new(end, end)));
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-06 00:58:56 +02:00
|
|
|
|
2022-12-03 10:44:12 +01:00
|
|
|
let inner_span = Span::new(start, end);
|
2021-07-06 00:58:56 +02:00
|
|
|
|
2022-01-03 04:18:23 +01:00
|
|
|
let source = working_set.get_span_contents(inner_span);
|
2021-07-06 00:58:56 +02:00
|
|
|
|
2021-11-21 19:13:09 +01:00
|
|
|
let (output, err) = lex(source, start, &[b'\n', b'\r', b','], &[], true);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2021-07-06 00:58:56 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let (output, err) = lite_parse(&output);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2021-07-06 00:58:56 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
match output.block.len() {
|
2023-04-07 02:35:45 +02:00
|
|
|
0 => Expression {
|
|
|
|
expr: Expr::List(vec![]),
|
|
|
|
span: original_span,
|
|
|
|
ty: Type::List(Box::new(Type::Any)),
|
|
|
|
custom_completion: None,
|
|
|
|
},
|
2021-09-02 10:25:22 +02:00
|
|
|
1 => {
|
|
|
|
// List
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_list_expression(working_set, original_span, &SyntaxShape::Any)
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
_ => {
|
2022-11-18 22:46:48 +01:00
|
|
|
match &output.block[0].commands[0] {
|
Support redirect `err` and `out` to different streams (#7685)
# Description
Closes: #7364
# User-Facing Changes
Given the following shell script:
```bash
x=$(printf '=%.0s' {1..100})
echo $x
echo $x 1>&2
```
It supports the following command:
```
bash test.sh out> out.txt err> err.txt
```
Then both `out.txt` and `err.txt` will contain `=`(100 times)
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SeparateRedirection` if we meet two Redirection
token(which is generated by `lex` function),
During converting from lite block to block,
`LiteElement::SeparateRedirection` will be converted to
`PipelineElement::SeparateRedirection`.
Then in the block eval process, if we get
`PipelineElement::SeparateRedirection`, we invoke `save` command with
`--stderr` arguments to acthive our behavior.
## What happened internally?
Take the following command as example:
```
^ls out> out.txt err> err.txt
```
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, Stdout, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, LiteCommand { comments: [], parts: [Span { start: 39063, end: 39070 }] })
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(
None,
LiteCommand { comments: [], parts: [Span { start: 38525, end: 38528 }] }),
// new one! two Redirection merged into one SeparateRedirection.
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, LiteCommand { comments: [], parts: [Span { start: 38534, end: 38541 }] }),
err: (Span { start: 38542, end: 38546 }, LiteCommand { comments: [], parts: [Span { start: 38547, end: 38554 }] })
}
]
}]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, Stdout, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, Expression { expr: String("err.txt"), span: Span { start: 39063, end: 39070 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 38526, end: 38528 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 38525, end: 38528 },
ty: Any,
custom_completion: None
}),
// new one! SeparateRedirection
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, Expression { expr: String("out.txt"), span: Span { start: 38534, end: 38541 }, ty: String, custom_completion: None }),
err: (Span { start: 38542, end: 38546 }, Expression { expr: String("err.txt"), span: Span { start: 38547, end: 38554 }, ty: String, custom_completion: None })
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-01-12 10:22:30 +01:00
|
|
|
LiteElement::Command(_, command)
|
|
|
|
| LiteElement::Redirection(_, _, command)
|
|
|
|
| LiteElement::SeparateRedirection {
|
|
|
|
out: (_, command), ..
|
Avoid blocking when `o+e>` redirects too much stderr message (#8784)
# Description
Fixes: #8565
Here is another pr #7240 tried to address the issue, but it works in a
wrong way.
After this change `o+e>` won't redirect all stdout message then stderr
message and it works more like how bash does.
# User-Facing Changes
For the given python code:
```python
# test.py
import sys
print('aa'*300, flush=True)
print('bb'*999999, file=sys.stderr, flush=True)
print('cc'*300, flush=True)
```
Running `python test.py out+err> a.txt` shoudn't hang nushell, and
`a.txt` keeps output in the same order
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SameTargetRedirection` if we meet `out+err>` redirection
token(which is generated by lex function),
During converting from lite block to block,
LiteElement::SameTargetRedirection will be converted to
PipelineElement::SameTargetRedirection.
Then in the block eval process, if we get
PipelineElement::SameTargetRedirection, we'll invoke `run-external` with
`--redirect-combine` flag, then pipe the result into save command
## What happened internally?
Take the following command as example:
`^ls o+e> log.txt`
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline {
commands: [
SameTargetRedirection {
cmd: (None, LiteCommand { comments: [], parts: [Span { start: 147945, end: 147948}]}),
redirection: (Span { start: 147949, end: 147957 }, LiteCommand { comments: [], parts: [Span { start: 147958, end: 147965 }]})
}
]
}
]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
SameTargetRedirection {
cmd: (None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 147946, end: 147948 }, ty: String, custom_completion: None}, [], false),
span: Span { start: 147945, end: 147948},
ty: Any, custom_completion: None
}),
redirection: (Span { start: 147949, end: 147957}, Expression {expr: String("log.txt"), span: Span { start: 147958, end: 147965 },ty: String,custom_completion: None}
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-utils/standard_library/tests.nu` to run the
tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-05-18 00:47:03 +02:00
|
|
|
}
|
|
|
|
| LiteElement::SameTargetRedirection {
|
|
|
|
cmd: (_, command), ..
|
Support redirect `err` and `out` to different streams (#7685)
# Description
Closes: #7364
# User-Facing Changes
Given the following shell script:
```bash
x=$(printf '=%.0s' {1..100})
echo $x
echo $x 1>&2
```
It supports the following command:
```
bash test.sh out> out.txt err> err.txt
```
Then both `out.txt` and `err.txt` will contain `=`(100 times)
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SeparateRedirection` if we meet two Redirection
token(which is generated by `lex` function),
During converting from lite block to block,
`LiteElement::SeparateRedirection` will be converted to
`PipelineElement::SeparateRedirection`.
Then in the block eval process, if we get
`PipelineElement::SeparateRedirection`, we invoke `save` command with
`--stderr` arguments to acthive our behavior.
## What happened internally?
Take the following command as example:
```
^ls out> out.txt err> err.txt
```
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, Stdout, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, LiteCommand { comments: [], parts: [Span { start: 39063, end: 39070 }] })
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(
None,
LiteCommand { comments: [], parts: [Span { start: 38525, end: 38528 }] }),
// new one! two Redirection merged into one SeparateRedirection.
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, LiteCommand { comments: [], parts: [Span { start: 38534, end: 38541 }] }),
err: (Span { start: 38542, end: 38546 }, LiteCommand { comments: [], parts: [Span { start: 38547, end: 38554 }] })
}
]
}]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, Stdout, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, Expression { expr: String("err.txt"), span: Span { start: 39063, end: 39070 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 38526, end: 38528 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 38525, end: 38528 },
ty: Any,
custom_completion: None
}),
// new one! SeparateRedirection
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, Expression { expr: String("out.txt"), span: Span { start: 38534, end: 38541 }, ty: String, custom_completion: None }),
err: (Span { start: 38542, end: 38546 }, Expression { expr: String("err.txt"), span: Span { start: 38547, end: 38554 }, ty: String, custom_completion: None })
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-01-12 10:22:30 +01:00
|
|
|
} => {
|
2022-11-18 22:46:48 +01:00
|
|
|
let mut table_headers = vec![];
|
|
|
|
|
2023-05-12 05:10:54 +02:00
|
|
|
let headers =
|
|
|
|
parse_list_expression(working_set, command.parts[0], &SyntaxShape::Any);
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-11-18 22:46:48 +01:00
|
|
|
if let Expression {
|
|
|
|
expr: Expr::List(headers),
|
|
|
|
..
|
|
|
|
} = headers
|
|
|
|
{
|
|
|
|
table_headers = headers;
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-11-18 22:46:48 +01:00
|
|
|
match &output.block[1].commands[0] {
|
2022-11-22 19:26:13 +01:00
|
|
|
LiteElement::Command(_, command)
|
Support redirect `err` and `out` to different streams (#7685)
# Description
Closes: #7364
# User-Facing Changes
Given the following shell script:
```bash
x=$(printf '=%.0s' {1..100})
echo $x
echo $x 1>&2
```
It supports the following command:
```
bash test.sh out> out.txt err> err.txt
```
Then both `out.txt` and `err.txt` will contain `=`(100 times)
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SeparateRedirection` if we meet two Redirection
token(which is generated by `lex` function),
During converting from lite block to block,
`LiteElement::SeparateRedirection` will be converted to
`PipelineElement::SeparateRedirection`.
Then in the block eval process, if we get
`PipelineElement::SeparateRedirection`, we invoke `save` command with
`--stderr` arguments to acthive our behavior.
## What happened internally?
Take the following command as example:
```
^ls out> out.txt err> err.txt
```
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, Stdout, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, LiteCommand { comments: [], parts: [Span { start: 39063, end: 39070 }] })
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(
None,
LiteCommand { comments: [], parts: [Span { start: 38525, end: 38528 }] }),
// new one! two Redirection merged into one SeparateRedirection.
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, LiteCommand { comments: [], parts: [Span { start: 38534, end: 38541 }] }),
err: (Span { start: 38542, end: 38546 }, LiteCommand { comments: [], parts: [Span { start: 38547, end: 38554 }] })
}
]
}]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, Stdout, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, Expression { expr: String("err.txt"), span: Span { start: 39063, end: 39070 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 38526, end: 38528 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 38525, end: 38528 },
ty: Any,
custom_completion: None
}),
// new one! SeparateRedirection
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, Expression { expr: String("out.txt"), span: Span { start: 38534, end: 38541 }, ty: String, custom_completion: None }),
err: (Span { start: 38542, end: 38546 }, Expression { expr: String("err.txt"), span: Span { start: 38547, end: 38554 }, ty: String, custom_completion: None })
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-01-12 10:22:30 +01:00
|
|
|
| LiteElement::Redirection(_, _, command)
|
|
|
|
| LiteElement::SeparateRedirection {
|
|
|
|
out: (_, command), ..
|
Avoid blocking when `o+e>` redirects too much stderr message (#8784)
# Description
Fixes: #8565
Here is another pr #7240 tried to address the issue, but it works in a
wrong way.
After this change `o+e>` won't redirect all stdout message then stderr
message and it works more like how bash does.
# User-Facing Changes
For the given python code:
```python
# test.py
import sys
print('aa'*300, flush=True)
print('bb'*999999, file=sys.stderr, flush=True)
print('cc'*300, flush=True)
```
Running `python test.py out+err> a.txt` shoudn't hang nushell, and
`a.txt` keeps output in the same order
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SameTargetRedirection` if we meet `out+err>` redirection
token(which is generated by lex function),
During converting from lite block to block,
LiteElement::SameTargetRedirection will be converted to
PipelineElement::SameTargetRedirection.
Then in the block eval process, if we get
PipelineElement::SameTargetRedirection, we'll invoke `run-external` with
`--redirect-combine` flag, then pipe the result into save command
## What happened internally?
Take the following command as example:
`^ls o+e> log.txt`
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline {
commands: [
SameTargetRedirection {
cmd: (None, LiteCommand { comments: [], parts: [Span { start: 147945, end: 147948}]}),
redirection: (Span { start: 147949, end: 147957 }, LiteCommand { comments: [], parts: [Span { start: 147958, end: 147965 }]})
}
]
}
]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
SameTargetRedirection {
cmd: (None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 147946, end: 147948 }, ty: String, custom_completion: None}, [], false),
span: Span { start: 147945, end: 147948},
ty: Any, custom_completion: None
}),
redirection: (Span { start: 147949, end: 147957}, Expression {expr: String("log.txt"), span: Span { start: 147958, end: 147965 },ty: String,custom_completion: None}
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-utils/standard_library/tests.nu` to run the
tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-05-18 00:47:03 +02:00
|
|
|
}
|
|
|
|
| LiteElement::SameTargetRedirection {
|
|
|
|
cmd: (_, command), ..
|
Support redirect `err` and `out` to different streams (#7685)
# Description
Closes: #7364
# User-Facing Changes
Given the following shell script:
```bash
x=$(printf '=%.0s' {1..100})
echo $x
echo $x 1>&2
```
It supports the following command:
```
bash test.sh out> out.txt err> err.txt
```
Then both `out.txt` and `err.txt` will contain `=`(100 times)
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SeparateRedirection` if we meet two Redirection
token(which is generated by `lex` function),
During converting from lite block to block,
`LiteElement::SeparateRedirection` will be converted to
`PipelineElement::SeparateRedirection`.
Then in the block eval process, if we get
`PipelineElement::SeparateRedirection`, we invoke `save` command with
`--stderr` arguments to acthive our behavior.
## What happened internally?
Take the following command as example:
```
^ls out> out.txt err> err.txt
```
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, Stdout, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, LiteCommand { comments: [], parts: [Span { start: 39063, end: 39070 }] })
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(
None,
LiteCommand { comments: [], parts: [Span { start: 38525, end: 38528 }] }),
// new one! two Redirection merged into one SeparateRedirection.
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, LiteCommand { comments: [], parts: [Span { start: 38534, end: 38541 }] }),
err: (Span { start: 38542, end: 38546 }, LiteCommand { comments: [], parts: [Span { start: 38547, end: 38554 }] })
}
]
}]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, Stdout, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, Expression { expr: String("err.txt"), span: Span { start: 39063, end: 39070 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 38526, end: 38528 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 38525, end: 38528 },
ty: Any,
custom_completion: None
}),
// new one! SeparateRedirection
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, Expression { expr: String("out.txt"), span: Span { start: 38534, end: 38541 }, ty: String, custom_completion: None }),
err: (Span { start: 38542, end: 38546 }, Expression { expr: String("err.txt"), span: Span { start: 38547, end: 38554 }, ty: String, custom_completion: None })
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-01-12 10:22:30 +01:00
|
|
|
} => {
|
2022-11-18 22:46:48 +01:00
|
|
|
let mut rows = vec![];
|
|
|
|
for part in &command.parts {
|
2023-05-12 05:10:54 +02:00
|
|
|
let values =
|
|
|
|
parse_list_expression(working_set, *part, &SyntaxShape::Any);
|
2022-11-18 22:46:48 +01:00
|
|
|
if let Expression {
|
|
|
|
expr: Expr::List(values),
|
|
|
|
span,
|
|
|
|
..
|
|
|
|
} = values
|
|
|
|
{
|
|
|
|
match values.len().cmp(&table_headers.len()) {
|
2023-04-07 02:35:45 +02:00
|
|
|
std::cmp::Ordering::Less => working_set.error(
|
|
|
|
ParseError::MissingColumns(table_headers.len(), span),
|
|
|
|
),
|
2022-11-18 22:46:48 +01:00
|
|
|
std::cmp::Ordering::Equal => {}
|
|
|
|
std::cmp::Ordering::Greater => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::ExtraColumns(
|
|
|
|
table_headers.len(),
|
|
|
|
values[table_headers.len()].span,
|
|
|
|
))
|
2022-11-18 22:46:48 +01:00
|
|
|
}
|
|
|
|
}
|
2021-07-06 03:48:45 +02:00
|
|
|
|
2022-11-18 22:46:48 +01:00
|
|
|
rows.push(values);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Table(table_headers, rows),
|
|
|
|
span: original_span,
|
|
|
|
ty: Type::Table(vec![]), //FIXME
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-08 20:54:27 +02:00
|
|
|
}
|
|
|
|
}
|
2021-07-06 03:48:45 +02:00
|
|
|
}
|
|
|
|
}
|
2021-07-02 09:32:30 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
}
|
2021-07-02 09:32:30 +02:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn parse_block_expression(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing: block expression");
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
2021-07-02 09:32:30 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut start = span.start;
|
|
|
|
let mut end = span.end;
|
2021-07-02 09:32:30 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if bytes.starts_with(b"{") {
|
|
|
|
start += 1;
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("block".into(), span));
|
|
|
|
return garbage(span);
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
if bytes.ends_with(b"}") {
|
|
|
|
end -= 1;
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Unclosed("}".into(), Span::new(end, end)));
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-02 09:32:30 +02:00
|
|
|
|
2022-12-03 10:44:12 +01:00
|
|
|
let inner_span = Span::new(start, end);
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-01-03 06:21:26 +01:00
|
|
|
let source = working_set.get_span_contents(inner_span);
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-01-22 19:24:47 +01:00
|
|
|
let (output, err) = lex(source, start, &[], &[], false);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2021-09-06 04:20:02 +02:00
|
|
|
working_set.enter_scope();
|
2021-09-08 00:00:20 +02:00
|
|
|
|
2022-11-10 09:21:49 +01:00
|
|
|
// Check to see if we have parameters
|
|
|
|
let (signature, amt_to_skip): (Option<(Box<Signature>, Span)>, usize) = match output.first() {
|
|
|
|
Some(Token {
|
|
|
|
contents: TokenContents::Pipe,
|
|
|
|
span,
|
|
|
|
}) => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"block but found closure".into(),
|
|
|
|
*span,
|
|
|
|
));
|
2022-11-10 09:21:49 +01:00
|
|
|
(None, 0)
|
|
|
|
}
|
|
|
|
_ => (None, 0),
|
|
|
|
};
|
|
|
|
|
2023-04-21 21:00:33 +02:00
|
|
|
let mut output = parse_block(working_set, &output[amt_to_skip..], span, false, false);
|
2022-11-10 09:21:49 +01:00
|
|
|
|
|
|
|
if let Some(signature) = signature {
|
|
|
|
output.signature = signature.0;
|
|
|
|
} else if let Some(last) = working_set.delta.scope.last() {
|
|
|
|
// FIXME: this only supports the top $it. Is this sufficient?
|
|
|
|
|
|
|
|
if let Some(var_id) = last.get_var(b"$it") {
|
|
|
|
let mut signature = Signature::new("");
|
|
|
|
signature.required_positional.push(PositionalArg {
|
|
|
|
var_id: Some(*var_id),
|
|
|
|
name: "$it".into(),
|
|
|
|
desc: String::new(),
|
|
|
|
shape: SyntaxShape::Any,
|
|
|
|
default_value: None,
|
|
|
|
});
|
|
|
|
output.signature = Box::new(signature);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
output.span = Some(span);
|
|
|
|
|
|
|
|
working_set.exit_scope();
|
|
|
|
|
|
|
|
let block_id = working_set.add_block(output);
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Block(block_id),
|
|
|
|
span,
|
|
|
|
ty: Type::Block,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2022-11-10 09:21:49 +01:00
|
|
|
}
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn parse_match_block_expression(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2023-03-24 02:52:01 +01:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
|
|
|
|
|
|
|
let mut start = span.start;
|
|
|
|
let mut end = span.end;
|
|
|
|
|
|
|
|
if bytes.starts_with(b"{") {
|
|
|
|
start += 1;
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("closure".into(), span));
|
|
|
|
return garbage(span);
|
2023-03-24 02:52:01 +01:00
|
|
|
}
|
|
|
|
if bytes.ends_with(b"}") {
|
|
|
|
end -= 1;
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Unclosed("}".into(), Span::new(end, end)));
|
2023-03-24 02:52:01 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
let inner_span = Span::new(start, end);
|
|
|
|
|
|
|
|
let source = working_set.get_span_contents(inner_span);
|
|
|
|
|
2023-03-27 00:31:57 +02:00
|
|
|
let (output, err) = lex(source, start, &[b' ', b'\r', b'\n', b',', b'|'], &[], false);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2023-03-24 02:52:01 +01:00
|
|
|
|
|
|
|
let mut position = 0;
|
|
|
|
|
|
|
|
let mut output_matches = vec![];
|
|
|
|
|
|
|
|
while position < output.len() {
|
|
|
|
// Each match gets its own scope
|
|
|
|
|
|
|
|
working_set.enter_scope();
|
|
|
|
|
|
|
|
// First parse the pattern
|
2023-04-07 02:35:45 +02:00
|
|
|
let mut pattern = parse_pattern(working_set, output[position].span);
|
2023-03-24 02:52:01 +01:00
|
|
|
|
|
|
|
position += 1;
|
|
|
|
|
|
|
|
if position >= output.len() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Mismatch(
|
2023-03-24 02:52:01 +01:00
|
|
|
"=>".into(),
|
|
|
|
"end of input".into(),
|
|
|
|
Span::new(output[position - 1].span.end, output[position - 1].span.end),
|
2023-04-07 02:35:45 +02:00
|
|
|
));
|
2023-03-24 02:52:01 +01:00
|
|
|
|
|
|
|
working_set.exit_scope();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2023-03-27 00:31:57 +02:00
|
|
|
// Multiple patterns connected by '|'
|
|
|
|
let mut connector = working_set.get_span_contents(output[position].span);
|
|
|
|
if connector == b"|" && position < output.len() {
|
|
|
|
let mut or_pattern = vec![pattern];
|
|
|
|
|
|
|
|
while connector == b"|" && position < output.len() {
|
|
|
|
connector = b"";
|
|
|
|
|
|
|
|
position += 1;
|
|
|
|
|
|
|
|
if position >= output.len() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Mismatch(
|
2023-03-27 00:31:57 +02:00
|
|
|
"pattern".into(),
|
|
|
|
"end of input".into(),
|
|
|
|
Span::new(output[position - 1].span.end, output[position - 1].span.end),
|
2023-04-07 02:35:45 +02:00
|
|
|
));
|
2023-03-27 00:31:57 +02:00
|
|
|
|
|
|
|
working_set.exit_scope();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
let pattern = parse_pattern(working_set, output[position].span);
|
2023-03-27 00:31:57 +02:00
|
|
|
or_pattern.push(pattern);
|
|
|
|
|
|
|
|
position += 1;
|
|
|
|
if position >= output.len() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Mismatch(
|
2023-03-27 00:31:57 +02:00
|
|
|
"=>".into(),
|
|
|
|
"end of input".into(),
|
|
|
|
Span::new(output[position - 1].span.end, output[position - 1].span.end),
|
2023-04-07 02:35:45 +02:00
|
|
|
));
|
2023-03-27 00:31:57 +02:00
|
|
|
|
|
|
|
working_set.exit_scope();
|
|
|
|
break;
|
|
|
|
} else {
|
|
|
|
connector = working_set.get_span_contents(output[position].span);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
let start = or_pattern
|
|
|
|
.first()
|
|
|
|
.expect("internal error: unexpected state of or-pattern")
|
|
|
|
.span
|
|
|
|
.start;
|
|
|
|
let end = or_pattern
|
|
|
|
.last()
|
|
|
|
.expect("internal error: unexpected state of or-pattern")
|
|
|
|
.span
|
|
|
|
.end;
|
|
|
|
|
|
|
|
pattern = MatchPattern {
|
|
|
|
pattern: Pattern::Or(or_pattern),
|
|
|
|
span: Span::new(start, end),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Then the `=>` arrow
|
|
|
|
if connector != b"=>" {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Mismatch(
|
2023-03-24 02:52:01 +01:00
|
|
|
"=>".into(),
|
|
|
|
"end of input".into(),
|
|
|
|
Span::new(output[position - 1].span.end, output[position - 1].span.end),
|
2023-04-07 02:35:45 +02:00
|
|
|
));
|
2023-03-27 00:31:57 +02:00
|
|
|
} else {
|
|
|
|
position += 1;
|
2023-03-24 02:52:01 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Finally, the value/expression/block that we will run to produce the result
|
|
|
|
if position >= output.len() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Mismatch(
|
2023-03-24 02:52:01 +01:00
|
|
|
"match result".into(),
|
|
|
|
"end of input".into(),
|
|
|
|
Span::new(output[position - 1].span.end, output[position - 1].span.end),
|
2023-04-07 02:35:45 +02:00
|
|
|
));
|
2023-03-24 02:52:01 +01:00
|
|
|
|
|
|
|
working_set.exit_scope();
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
let result = parse_multispan_value(
|
2023-03-24 02:52:01 +01:00
|
|
|
working_set,
|
|
|
|
&[output[position].span],
|
|
|
|
&mut 0,
|
|
|
|
&SyntaxShape::OneOf(vec![SyntaxShape::Block, SyntaxShape::Expression]),
|
|
|
|
);
|
|
|
|
position += 1;
|
|
|
|
working_set.exit_scope();
|
|
|
|
|
|
|
|
output_matches.push((pattern, result));
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::MatchBlock(output_matches),
|
|
|
|
span,
|
|
|
|
ty: Type::Any,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2023-03-24 02:52:01 +01:00
|
|
|
}
|
|
|
|
|
2022-11-10 09:21:49 +01:00
|
|
|
pub fn parse_closure_expression(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
shape: &SyntaxShape,
|
|
|
|
span: Span,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2022-11-10 09:21:49 +01:00
|
|
|
trace!("parsing: closure expression");
|
|
|
|
|
|
|
|
let bytes = working_set.get_span_contents(span);
|
|
|
|
|
|
|
|
let mut start = span.start;
|
|
|
|
let mut end = span.end;
|
|
|
|
|
|
|
|
if bytes.starts_with(b"{") {
|
|
|
|
start += 1;
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("closure".into(), span));
|
|
|
|
return garbage(span);
|
2022-11-10 09:21:49 +01:00
|
|
|
}
|
|
|
|
if bytes.ends_with(b"}") {
|
|
|
|
end -= 1;
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Unclosed("}".into(), Span::new(end, end)));
|
2022-11-10 09:21:49 +01:00
|
|
|
}
|
|
|
|
|
2022-12-03 10:44:12 +01:00
|
|
|
let inner_span = Span::new(start, end);
|
2022-11-10 09:21:49 +01:00
|
|
|
|
|
|
|
let source = working_set.get_span_contents(inner_span);
|
|
|
|
|
|
|
|
let (output, err) = lex(source, start, &[], &[], false);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2022-11-10 09:21:49 +01:00
|
|
|
|
|
|
|
working_set.enter_scope();
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
// Check to see if we have parameters
|
2022-02-17 12:40:24 +01:00
|
|
|
let (signature, amt_to_skip): (Option<(Box<Signature>, Span)>, usize) = match output.first() {
|
2021-09-02 10:25:22 +02:00
|
|
|
Some(Token {
|
|
|
|
contents: TokenContents::Pipe,
|
2021-09-06 01:16:27 +02:00
|
|
|
span,
|
|
|
|
}) => {
|
|
|
|
// We've found a parameter list
|
|
|
|
let start_point = span.start;
|
|
|
|
let mut token_iter = output.iter().enumerate().skip(1);
|
|
|
|
let mut end_span = None;
|
|
|
|
let mut amt_to_skip = 1;
|
|
|
|
|
|
|
|
for token in &mut token_iter {
|
|
|
|
if let Token {
|
2021-09-02 10:25:22 +02:00
|
|
|
contents: TokenContents::Pipe,
|
2021-09-06 01:16:27 +02:00
|
|
|
span,
|
|
|
|
} = token.1
|
|
|
|
{
|
|
|
|
end_span = Some(span);
|
|
|
|
amt_to_skip = token.0;
|
|
|
|
break;
|
2021-08-25 21:29:36 +02:00
|
|
|
}
|
|
|
|
}
|
2021-09-06 01:16:27 +02:00
|
|
|
|
|
|
|
let end_point = if let Some(span) = end_span {
|
|
|
|
span.end
|
|
|
|
} else {
|
|
|
|
end
|
|
|
|
};
|
|
|
|
|
2022-12-03 10:44:12 +01:00
|
|
|
let signature_span = Span::new(start_point, end_point);
|
2023-04-07 20:09:38 +02:00
|
|
|
let signature = parse_signature_helper(working_set, signature_span);
|
2021-09-06 01:16:27 +02:00
|
|
|
|
2022-02-17 12:40:24 +01:00
|
|
|
(Some((signature, signature_span)), amt_to_skip)
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2022-11-10 09:21:49 +01:00
|
|
|
Some(Token {
|
2022-12-08 00:02:11 +01:00
|
|
|
contents: TokenContents::PipePipe,
|
2022-11-10 09:21:49 +01:00
|
|
|
span,
|
2022-12-08 00:02:11 +01:00
|
|
|
}) => (
|
|
|
|
Some((Box::new(Signature::new("closure".to_string())), *span)),
|
|
|
|
1,
|
|
|
|
),
|
2023-04-11 19:21:52 +02:00
|
|
|
_ => (None, 0),
|
2021-09-02 10:25:22 +02:00
|
|
|
};
|
2021-08-25 21:29:36 +02:00
|
|
|
|
2022-01-08 01:40:40 +01:00
|
|
|
// TODO: Finish this
|
2022-11-10 09:21:49 +01:00
|
|
|
if let SyntaxShape::Closure(Some(v)) = shape {
|
2022-02-17 12:40:24 +01:00
|
|
|
if let Some((sig, sig_span)) = &signature {
|
2022-04-08 21:57:27 +02:00
|
|
|
if sig.num_positionals() > v.len() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
format!(
|
|
|
|
"{} closure parameter{}",
|
|
|
|
v.len(),
|
|
|
|
if v.len() > 1 { "s" } else { "" }
|
|
|
|
),
|
|
|
|
*sig_span,
|
|
|
|
));
|
2022-02-17 12:40:24 +01:00
|
|
|
}
|
2021-09-13 09:54:13 +02:00
|
|
|
|
2022-02-17 12:40:24 +01:00
|
|
|
for (expected, PositionalArg { name, shape, .. }) in
|
|
|
|
v.iter().zip(sig.required_positional.iter())
|
|
|
|
{
|
|
|
|
if expected != shape && *shape != SyntaxShape::Any {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::ParameterMismatchType(
|
|
|
|
name.to_owned(),
|
|
|
|
expected.to_string(),
|
|
|
|
shape.to_string(),
|
|
|
|
*sig_span,
|
|
|
|
));
|
2022-02-17 12:40:24 +01:00
|
|
|
}
|
|
|
|
}
|
2021-09-13 09:54:13 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-21 21:00:33 +02:00
|
|
|
let mut output = parse_block(working_set, &output[amt_to_skip..], span, false, false);
|
2021-07-02 09:32:30 +02:00
|
|
|
|
2021-09-06 01:16:27 +02:00
|
|
|
if let Some(signature) = signature {
|
2022-02-17 12:40:24 +01:00
|
|
|
output.signature = signature.0;
|
2021-09-06 04:20:02 +02:00
|
|
|
} else if let Some(last) = working_set.delta.scope.last() {
|
2021-10-12 19:44:23 +02:00
|
|
|
// FIXME: this only supports the top $it. Is this sufficient?
|
2021-09-13 09:31:11 +02:00
|
|
|
|
2021-09-06 04:20:02 +02:00
|
|
|
if let Some(var_id) = last.get_var(b"$it") {
|
|
|
|
let mut signature = Signature::new("");
|
|
|
|
signature.required_positional.push(PositionalArg {
|
|
|
|
var_id: Some(*var_id),
|
|
|
|
name: "$it".into(),
|
|
|
|
desc: String::new(),
|
|
|
|
shape: SyntaxShape::Any,
|
2022-03-07 21:08:56 +01:00
|
|
|
default_value: None,
|
2021-09-06 04:20:02 +02:00
|
|
|
});
|
|
|
|
output.signature = Box::new(signature);
|
|
|
|
}
|
2021-09-06 01:16:27 +02:00
|
|
|
}
|
|
|
|
|
2022-01-30 23:05:25 +01:00
|
|
|
output.span = Some(span);
|
2021-10-25 22:04:23 +02:00
|
|
|
|
2021-09-06 04:20:02 +02:00
|
|
|
working_set.exit_scope();
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let block_id = working_set.add_block(output);
|
2021-07-16 22:26:40 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Closure(block_id),
|
|
|
|
span,
|
|
|
|
ty: Type::Closure,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-02 09:32:30 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
pub fn parse_value(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
span: Span,
|
|
|
|
shape: &SyntaxShape,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2023-04-07 21:01:12 +02:00
|
|
|
trace!("parsing: value: {}", shape);
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
|
|
|
|
2022-02-24 13:58:53 +01:00
|
|
|
if bytes.is_empty() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::IncompleteParser(span));
|
|
|
|
return garbage(span);
|
2022-02-24 13:58:53 +01:00
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
// Check for reserved keyword values
|
|
|
|
match bytes {
|
|
|
|
b"true" => {
|
|
|
|
if matches!(shape, SyntaxShape::Boolean) || matches!(shape, SyntaxShape::Any) {
|
|
|
|
return Expression {
|
|
|
|
expr: Expr::Bool(true),
|
|
|
|
span,
|
|
|
|
ty: Type::Bool,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2022-03-03 19:14:03 +01:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("non-boolean value".into(), span));
|
|
|
|
return Expression::garbage(span);
|
2022-03-03 19:14:03 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
b"false" => {
|
|
|
|
if matches!(shape, SyntaxShape::Boolean) || matches!(shape, SyntaxShape::Any) {
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression {
|
|
|
|
expr: Expr::Bool(false),
|
|
|
|
span,
|
|
|
|
ty: Type::Bool,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2022-03-03 19:14:03 +01:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("non-boolean value".into(), span));
|
|
|
|
return Expression::garbage(span);
|
2022-03-03 19:14:03 +01:00
|
|
|
}
|
2022-03-03 01:55:03 +01:00
|
|
|
}
|
2022-03-03 19:14:03 +01:00
|
|
|
b"null" => {
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression {
|
|
|
|
expr: Expr::Nothing,
|
|
|
|
span,
|
|
|
|
ty: Type::Nothing,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2022-03-03 01:55:03 +01:00
|
|
|
}
|
Require that values that look like numbers parse as numberlike (#8635)
# Description
Require that any value that looks like it might be a number (starts with
a digit, or a '-' + digit, or a '+' + digits, or a special form float
like `-inf`, `inf`, or `NaN`) must now be treated as a number-like
value. Number-like syntax can only parse into number-like values.
Number-like values include: durations, ints, floats, ranges, filesizes,
binary data, etc.
# User-Facing Changes
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
Just making sure we see this for release notes 😅
This breaks any and all numberlike values that were treated as strings
before. Example, we used to allow `3,` as a bare word. Anything like
this would now require quotes or backticks to be treated as a string or
bare word, respectively.
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-03-28 08:31:38 +02:00
|
|
|
b"-inf" | b"inf" | b"NaN" => {
|
2023-04-11 19:21:31 +02:00
|
|
|
return parse_float(working_set, span);
|
Require that values that look like numbers parse as numberlike (#8635)
# Description
Require that any value that looks like it might be a number (starts with
a digit, or a '-' + digit, or a '+' + digits, or a special form float
like `-inf`, `inf`, or `NaN`) must now be treated as a number-like
value. Number-like syntax can only parse into number-like values.
Number-like values include: durations, ints, floats, ranges, filesizes,
binary data, etc.
# User-Facing Changes
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
BREAKING CHANGE
Just making sure we see this for release notes 😅
This breaks any and all numberlike values that were treated as strings
before. Example, we used to allow `3,` as a bare word. Anything like
this would now require quotes or backticks to be treated as a string or
bare word, respectively.
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-03-28 08:31:38 +02:00
|
|
|
}
|
2022-03-03 19:14:03 +01:00
|
|
|
_ => {}
|
2022-03-03 01:55:03 +01:00
|
|
|
}
|
2022-03-03 19:14:03 +01:00
|
|
|
|
2023-03-24 02:52:01 +01:00
|
|
|
if matches!(shape, SyntaxShape::MatchPattern) {
|
|
|
|
return parse_match_pattern(working_set, span);
|
|
|
|
}
|
|
|
|
|
2022-02-24 13:58:53 +01:00
|
|
|
match bytes[0] {
|
2023-04-07 20:09:38 +02:00
|
|
|
b'$' => return parse_dollar_expr(working_set, span),
|
|
|
|
b'(' => return parse_paren_expr(working_set, span, shape),
|
|
|
|
b'{' => return parse_brace_expr(working_set, span, shape),
|
2022-02-24 13:58:53 +01:00
|
|
|
b'[' => match shape {
|
2021-09-02 10:25:22 +02:00
|
|
|
SyntaxShape::Any
|
|
|
|
| SyntaxShape::List(_)
|
|
|
|
| SyntaxShape::Table
|
|
|
|
| SyntaxShape::Signature => {}
|
|
|
|
_ => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("non-[] value".into(), span));
|
|
|
|
return Expression::garbage(span);
|
2021-07-08 23:45:56 +02:00
|
|
|
}
|
2022-02-24 13:58:53 +01:00
|
|
|
},
|
|
|
|
_ => {}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-01 03:31:02 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
match shape {
|
2021-09-14 06:59:46 +02:00
|
|
|
SyntaxShape::Custom(shape, custom_completion) => {
|
2023-04-07 20:09:38 +02:00
|
|
|
let mut expression = parse_value(working_set, span, shape);
|
2022-03-10 08:49:02 +01:00
|
|
|
expression.custom_completion = Some(*custom_completion);
|
2023-04-07 02:35:45 +02:00
|
|
|
expression
|
2021-09-14 06:59:46 +02:00
|
|
|
}
|
2023-04-11 19:21:31 +02:00
|
|
|
SyntaxShape::Number => parse_number(working_set, span),
|
|
|
|
SyntaxShape::Decimal => parse_float(working_set, span),
|
|
|
|
SyntaxShape::Int => parse_int(working_set, span),
|
|
|
|
SyntaxShape::Duration => parse_duration(working_set, span),
|
|
|
|
SyntaxShape::DateTime => parse_datetime(working_set, span),
|
|
|
|
SyntaxShape::Filesize => parse_filesize(working_set, span),
|
2023-04-07 20:09:38 +02:00
|
|
|
SyntaxShape::Range => parse_range(working_set, span),
|
2021-10-04 21:21:31 +02:00
|
|
|
SyntaxShape::Filepath => parse_filepath(working_set, span),
|
2022-04-22 22:18:51 +02:00
|
|
|
SyntaxShape::Directory => parse_directory(working_set, span),
|
2021-10-04 21:21:31 +02:00
|
|
|
SyntaxShape::GlobPattern => parse_glob_pattern(working_set, span),
|
2023-04-07 20:09:38 +02:00
|
|
|
SyntaxShape::String => parse_string(working_set, span),
|
2022-03-01 00:31:53 +01:00
|
|
|
SyntaxShape::Binary => parse_binary(working_set, span),
|
2023-03-24 02:52:01 +01:00
|
|
|
SyntaxShape::MatchPattern => parse_match_pattern(working_set, span),
|
2021-09-02 10:25:22 +02:00
|
|
|
SyntaxShape::Signature => {
|
|
|
|
if bytes.starts_with(b"[") {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_signature(working_set, span)
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("signature".into(), span));
|
|
|
|
|
|
|
|
Expression::garbage(span)
|
2021-07-09 08:23:20 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
SyntaxShape::List(elem) => {
|
|
|
|
if bytes.starts_with(b"[") {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_list_expression(working_set, span, elem)
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("list".into(), span));
|
|
|
|
|
|
|
|
Expression::garbage(span)
|
2021-07-08 09:49:17 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
SyntaxShape::Table => {
|
|
|
|
if bytes.starts_with(b"[") {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_table_expression(working_set, span)
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("table".into(), span));
|
|
|
|
|
|
|
|
Expression::garbage(span)
|
2021-07-02 08:44:37 +02:00
|
|
|
}
|
2021-07-01 02:01:04 +02:00
|
|
|
}
|
2023-04-07 20:09:38 +02:00
|
|
|
SyntaxShape::CellPath => parse_simple_cell_path(working_set, span),
|
2021-10-12 06:49:17 +02:00
|
|
|
SyntaxShape::Boolean => {
|
|
|
|
// Redundant, though we catch bad boolean parses here
|
2022-03-03 01:55:03 +01:00
|
|
|
if bytes == b"true" || bytes == b"false" {
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Bool(true),
|
|
|
|
span,
|
|
|
|
ty: Type::Bool,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-10-12 06:49:17 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("bool".into(), span));
|
|
|
|
|
|
|
|
Expression::garbage(span)
|
2021-10-12 06:49:17 +02:00
|
|
|
}
|
|
|
|
}
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
|
|
|
|
// Be sure to return ParseError::Expected(..) if invoked for one of these shapes, but lex
|
|
|
|
// stream doesn't start with '{'} -- parsing in SyntaxShape::Any arm depends on this error variant.
|
allow records to have type annotations (#8914)
# Description
follow up to #8529
cleaned up version of #8892
- the original syntax is okay
```nu
def okay [rec: record] {}
```
- you can now add type annotations for fields if you know
them before hand
```nu
def okay [rec: record<name: string>] {}
```
- you can specify multiple fields
```nu
def okay [person: record<name: string age: int>] {}
# an optional comma is allowed
def okay [person: record<name: string, age: int>] {}
```
- if annotations are specified, any use of the command will be type
checked against the specified type
```nu
def unwrap [result: record<ok: bool, value: any>] {}
unwrap {ok: 2, value: "value"}
# errors with
Error: nu::parser::type_mismatch
× Type mismatch.
╭─[entry #4:1:1]
1 │ unwrap {ok: 2, value: "value"}
· ───────┬─────
· ╰── expected record<ok: bool, value: any>, found record<ok: int, value: string>
╰────
```
> here the error is in the `ok` field, since `any` is coerced into any
type
> as a result `unwrap {ok: true, value: "value"}` is okay
- the key must be a string, either quoted or unquoted
```nu
def err [rec: record<{}: list>] {}
# errors with
Error:
× `record` type annotations key not string
╭─[entry #7:1:1]
1 │ def unwrap [result: record<{}: bool, value: any>] {}
· ─┬
· ╰── must be a string
╰────
```
- a key doesn't have to have a type in which case it is assumed to be
`any`
```nu
def okay [person: record<name age>] {}
def okay [person: record<name: string age>] {}
```
- however, if you put a colon, you have to specify a type
```nu
def err [person: record<name: >] {}
# errors with
Error: nu::parser::parse_mismatch
× Parse mismatch during operation.
╭─[entry #12:1:1]
1 │ def unwrap [res: record<name: >] { $res }
· ┬
· ╰── expected type after colon
╰────
```
# User-Facing Changes
**[BREAKING CHANGES]**
- this change adds a field to `SyntaxShape::Record` so any plugins that
used it will have to update and include the field. though if you are
unsure of the type the record expects, `SyntaxShape::Record(vec![])`
will suffice
2023-04-26 15:16:55 +02:00
|
|
|
SyntaxShape::Block | SyntaxShape::Closure(..) | SyntaxShape::Record(_) => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
"block, closure or record".into(),
|
|
|
|
span,
|
2023-04-07 02:35:45 +02:00
|
|
|
));
|
|
|
|
|
|
|
|
Expression::garbage(span)
|
|
|
|
}
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
SyntaxShape::Any => {
|
2021-09-07 05:56:30 +02:00
|
|
|
if bytes.starts_with(b"[") {
|
2021-11-08 00:18:00 +01:00
|
|
|
//parse_value(working_set, span, &SyntaxShape::Table)
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_full_cell_path(working_set, None, span)
|
2021-09-07 05:56:30 +02:00
|
|
|
} else {
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
let shapes = [
|
2023-04-11 19:21:31 +02:00
|
|
|
SyntaxShape::Binary,
|
|
|
|
SyntaxShape::Filesize,
|
|
|
|
SyntaxShape::Duration,
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
SyntaxShape::Range,
|
2023-04-11 19:21:31 +02:00
|
|
|
SyntaxShape::DateTime, //FIXME requires 3 failed conversion attempts before failing
|
allow records to have type annotations (#8914)
# Description
follow up to #8529
cleaned up version of #8892
- the original syntax is okay
```nu
def okay [rec: record] {}
```
- you can now add type annotations for fields if you know
them before hand
```nu
def okay [rec: record<name: string>] {}
```
- you can specify multiple fields
```nu
def okay [person: record<name: string age: int>] {}
# an optional comma is allowed
def okay [person: record<name: string, age: int>] {}
```
- if annotations are specified, any use of the command will be type
checked against the specified type
```nu
def unwrap [result: record<ok: bool, value: any>] {}
unwrap {ok: 2, value: "value"}
# errors with
Error: nu::parser::type_mismatch
× Type mismatch.
╭─[entry #4:1:1]
1 │ unwrap {ok: 2, value: "value"}
· ───────┬─────
· ╰── expected record<ok: bool, value: any>, found record<ok: int, value: string>
╰────
```
> here the error is in the `ok` field, since `any` is coerced into any
type
> as a result `unwrap {ok: true, value: "value"}` is okay
- the key must be a string, either quoted or unquoted
```nu
def err [rec: record<{}: list>] {}
# errors with
Error:
× `record` type annotations key not string
╭─[entry #7:1:1]
1 │ def unwrap [result: record<{}: bool, value: any>] {}
· ─┬
· ╰── must be a string
╰────
```
- a key doesn't have to have a type in which case it is assumed to be
`any`
```nu
def okay [person: record<name age>] {}
def okay [person: record<name: string age>] {}
```
- however, if you put a colon, you have to specify a type
```nu
def err [person: record<name: >] {}
# errors with
Error: nu::parser::parse_mismatch
× Parse mismatch during operation.
╭─[entry #12:1:1]
1 │ def unwrap [res: record<name: >] { $res }
· ┬
· ╰── expected type after colon
╰────
```
# User-Facing Changes
**[BREAKING CHANGES]**
- this change adds a field to `SyntaxShape::Record` so any plugins that
used it will have to update and include the field. though if you are
unsure of the type the record expects, `SyntaxShape::Record(vec![])`
will suffice
2023-04-26 15:16:55 +02:00
|
|
|
SyntaxShape::Record(vec![]),
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
SyntaxShape::Closure(None),
|
|
|
|
SyntaxShape::Block,
|
2023-04-11 19:21:31 +02:00
|
|
|
SyntaxShape::Int,
|
|
|
|
SyntaxShape::Number,
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
SyntaxShape::String,
|
|
|
|
];
|
2021-09-07 05:56:30 +02:00
|
|
|
for shape in shapes.iter() {
|
2023-04-07 02:35:45 +02:00
|
|
|
let starting_error_count = working_set.parse_errors.len();
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let s = parse_value(working_set, span, shape);
|
2023-04-07 02:35:45 +02:00
|
|
|
|
|
|
|
if starting_error_count == working_set.parse_errors.len() {
|
|
|
|
return s;
|
|
|
|
} else {
|
2023-04-07 21:01:12 +02:00
|
|
|
match working_set.parse_errors.get(starting_error_count) {
|
2023-04-07 02:35:45 +02:00
|
|
|
Some(ParseError::Expected(_, _)) => {
|
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
return s;
|
|
|
|
}
|
Syntax errors for string and int (#7952)
# Description
Added a few syntax errors in ints and strings, changed parser to stop
and show that error rather than continue trying to parse those tokens as
some other shape. However, I don't see how to push this direction much
further, and most of the classic confusing errors can't be changed.
Flagged as WIP for the moment, but passes all checks and works better
than current release:
1. I have yet to figure out how to make these errors refer back to the
book, as I see some other errors do.
2. How to give syntax error when malformed int is first token in line?
Currently parsed as external command, user gets confusing error message.
3. Would like to be more strict with *decimal* int literals (lacking,
e.g, `0x' prefix). Need to tinker more with the order of parse shape
calls, currently, float is tried after int, so '1.4' has to be passed.
_(Description of your pull request goes here. **Provide examples and/or
screenshots** if your changes affect the user experience.)_
```bash
〉"\z"
Error:
╭─[entry #3:1:1]
1 │ "\z"
· ─┬─
· ╰── Syntax error in string, unrecognized character after escape '\'.
╰────
```
Canonic presentation of a syntax error.
```bash
〉" \u{01ffbogus}"
Error:
× Invalid syntax
╭─[entry #2:1:1]
1 │ " \u{01ffbogus}"
· ───────┬──────
· ╰── Syntax error in string, expecting 1 to 6 hex digits in unicode escape '\u{X...}', max value 10FFFF.
╰────
```
Malformed unicode escape in string, flagged as error.
String parse can be opinionated, it's the last shape tried.
```bash
〉0x22bogus
Error: nu::shell::external_command (link)
× External command failed
╭─[entry #4:1:1]
1 │ 0x22bogus
· ────┬────
· ╰── executable was not found
╰────
help: No such file or directory (os error 2)
```
A *correct* number in first token would be evaluated, but an *incorrect*
one is treated as external command? Confusing to users.
```bash
〉0 + 0x22bogus
Error:
× Invalid syntax
╭─[entry #5:1:1]
1 │ 0 + 0x22bogus
· ────┬────
· ╰── Syntax error in int, invalid digits in radix 16 int.
╰────
```
Can give syntax error if token is unambiguously int literal. e.g has 0b
or 0x prefix, could not be a float.
```bash
〉0 + 098bogus
Error: nu::parser::unsupported_operation (link)
× Types mismatched for operation.
╭─[entry #6:1:1]
1 │ 0 + 098bogus
· ┬ ┬ ────┬───
· │ │ ╰── string
· │ ╰── doesn't support these values.
· ╰── int
╰────
help: Change int or string to be the right types and try again.
```
But *decimal* literal (no prefix) can't be too strict. Parser is going
to try float later. So '1.4' must be passed.
# User-Facing Changes
First and foremost, more specific error messages for typos in string and
int literals. Probably improves interactive user experience.
But a script that was causing and then checking for specific error might
notice a different error message.
_(List of all changes that impact the user experience here. This helps
us keep track of breaking changes.)_
# Tests + Formatting
Added (positive and negative unit tests in `cargo test -p nu-parser`.
Didn't add integration tests.
Make sure you've run and fixed any issues with these commands:
- [x] `cargo fmt --all -- --check` to check standard code formatting
(`cargo fmt --all` applies these changes)
- [x] `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- [x] `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
---------
Co-authored-by: Stefan Holderbach <sholderbach@users.noreply.github.com>
2023-02-13 17:09:50 +01:00
|
|
|
}
|
2021-09-07 05:56:30 +02:00
|
|
|
}
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("any shape".into(), span));
|
|
|
|
garbage(span)
|
2021-07-02 08:44:37 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
x => {
|
|
|
|
working_set.error(ParseError::Expected(x.to_type().to_string(), span));
|
|
|
|
garbage(span)
|
|
|
|
}
|
2021-07-02 08:44:37 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_operator(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2021-09-02 10:25:22 +02:00
|
|
|
let contents = working_set.get_span_contents(span);
|
|
|
|
|
|
|
|
let operator = match contents {
|
2022-11-11 07:51:08 +01:00
|
|
|
b"=" => Operator::Assignment(Assignment::Assign),
|
2022-11-11 19:50:43 +01:00
|
|
|
b"+=" => Operator::Assignment(Assignment::PlusAssign),
|
2022-12-09 17:20:58 +01:00
|
|
|
b"++=" => Operator::Assignment(Assignment::AppendAssign),
|
2022-11-11 19:50:43 +01:00
|
|
|
b"-=" => Operator::Assignment(Assignment::MinusAssign),
|
|
|
|
b"*=" => Operator::Assignment(Assignment::MultiplyAssign),
|
|
|
|
b"/=" => Operator::Assignment(Assignment::DivideAssign),
|
2022-11-11 07:51:08 +01:00
|
|
|
b"==" => Operator::Comparison(Comparison::Equal),
|
|
|
|
b"!=" => Operator::Comparison(Comparison::NotEqual),
|
|
|
|
b"<" => Operator::Comparison(Comparison::LessThan),
|
|
|
|
b"<=" => Operator::Comparison(Comparison::LessThanOrEqual),
|
|
|
|
b">" => Operator::Comparison(Comparison::GreaterThan),
|
|
|
|
b">=" => Operator::Comparison(Comparison::GreaterThanOrEqual),
|
|
|
|
b"=~" => Operator::Comparison(Comparison::RegexMatch),
|
|
|
|
b"!~" => Operator::Comparison(Comparison::NotRegexMatch),
|
|
|
|
b"+" => Operator::Math(Math::Plus),
|
|
|
|
b"++" => Operator::Math(Math::Append),
|
|
|
|
b"-" => Operator::Math(Math::Minus),
|
|
|
|
b"*" => Operator::Math(Math::Multiply),
|
|
|
|
b"/" => Operator::Math(Math::Divide),
|
|
|
|
b"//" => Operator::Math(Math::FloorDivision),
|
|
|
|
b"in" => Operator::Comparison(Comparison::In),
|
|
|
|
b"not-in" => Operator::Comparison(Comparison::NotIn),
|
|
|
|
b"mod" => Operator::Math(Math::Modulo),
|
|
|
|
b"bit-or" => Operator::Bits(Bits::BitOr),
|
|
|
|
b"bit-xor" => Operator::Bits(Bits::BitXor),
|
|
|
|
b"bit-and" => Operator::Bits(Bits::BitAnd),
|
|
|
|
b"bit-shl" => Operator::Bits(Bits::ShiftLeft),
|
|
|
|
b"bit-shr" => Operator::Bits(Bits::ShiftRight),
|
|
|
|
b"starts-with" => Operator::Comparison(Comparison::StartsWith),
|
|
|
|
b"ends-with" => Operator::Comparison(Comparison::EndsWith),
|
2022-12-08 00:02:11 +01:00
|
|
|
b"and" => Operator::Boolean(Boolean::And),
|
|
|
|
b"or" => Operator::Boolean(Boolean::Or),
|
2022-11-26 17:02:37 +01:00
|
|
|
b"xor" => Operator::Boolean(Boolean::Xor),
|
2022-11-11 07:51:08 +01:00
|
|
|
b"**" => Operator::Math(Math::Pow),
|
2022-11-26 22:59:43 +01:00
|
|
|
// WARNING: not actual operators below! Error handling only
|
|
|
|
pow @ (b"^" | b"pow") => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownOperator(
|
|
|
|
match pow {
|
|
|
|
b"^" => "^",
|
|
|
|
b"pow" => "pow",
|
|
|
|
_ => unreachable!(),
|
|
|
|
},
|
|
|
|
"Use '**' for exponentiation or 'bit-xor' for bitwise XOR.",
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2022-11-26 22:59:43 +01:00
|
|
|
}
|
|
|
|
equality @ (b"is" | b"===") => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownOperator(
|
|
|
|
match equality {
|
|
|
|
b"is" => "is",
|
|
|
|
b"===" => "===",
|
|
|
|
_ => unreachable!(),
|
|
|
|
},
|
|
|
|
"Did you mean '=='?",
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2022-11-26 22:59:43 +01:00
|
|
|
}
|
|
|
|
b"contains" => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownOperator(
|
|
|
|
"contains",
|
|
|
|
"Did you mean '$string =~ $pattern' or '$element in $container'?",
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2022-11-26 22:59:43 +01:00
|
|
|
}
|
|
|
|
b"%" => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownOperator(
|
|
|
|
"%",
|
|
|
|
"Did you mean 'mod'?",
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2022-11-26 22:59:43 +01:00
|
|
|
}
|
2022-12-01 11:34:41 +01:00
|
|
|
b"&" => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownOperator(
|
|
|
|
"&",
|
|
|
|
"Did you mean 'bit-and'?",
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2022-12-01 11:34:41 +01:00
|
|
|
}
|
|
|
|
b"<<" => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownOperator(
|
|
|
|
"<<",
|
|
|
|
"Did you mean 'bit-shl'?",
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2022-12-01 11:34:41 +01:00
|
|
|
}
|
|
|
|
b">>" => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownOperator(
|
|
|
|
">>",
|
|
|
|
"Did you mean 'bit-shr'?",
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2022-12-01 11:34:41 +01:00
|
|
|
}
|
|
|
|
bits @ (b"bits-and" | b"bits-xor" | b"bits-or" | b"bits-shl" | b"bits-shr") => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownOperator(
|
|
|
|
match bits {
|
|
|
|
b"bits-and" => "bits-and",
|
|
|
|
b"bits-xor" => "bits-xor",
|
|
|
|
b"bits-or" => "bits-or",
|
|
|
|
b"bits-shl" => "bits-shl",
|
|
|
|
b"bits-shr" => "bits-shr",
|
|
|
|
_ => unreachable!(),
|
|
|
|
},
|
|
|
|
match bits {
|
|
|
|
b"bits-and" => "Did you mean 'bit-and'?",
|
|
|
|
b"bits-xor" => "Did you mean 'bit-xor'?",
|
|
|
|
b"bits-or" => "Did you mean 'bit-or'?",
|
|
|
|
b"bits-shl" => "Did you mean 'bit-shl'?",
|
|
|
|
b"bits-shr" => "Did you mean 'bit-shr'?",
|
|
|
|
_ => unreachable!(),
|
|
|
|
},
|
|
|
|
span,
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2022-12-01 11:34:41 +01:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
_ => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("operator".into(), span));
|
|
|
|
return garbage(span);
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
};
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Operator(operator),
|
|
|
|
span,
|
|
|
|
ty: Type::Any,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
pub fn parse_math_expression(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
spans: &[Span],
|
2021-09-09 23:47:20 +02:00
|
|
|
lhs_row_var_id: Option<VarId>,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2022-12-10 18:23:24 +01:00
|
|
|
trace!("parsing: math expression");
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
// As the expr_stack grows, we increase the required precedence to grow larger
|
|
|
|
// If, at any time, the operator we're looking at is the same or lower precedence
|
|
|
|
// of what is in the expression stack, we collapse the expression stack.
|
|
|
|
//
|
|
|
|
// This leads to an expression stack that grows under increasing precedence and collapses
|
|
|
|
// under decreasing/sustained precedence
|
|
|
|
//
|
|
|
|
// The end result is a stack that we can fold into binary operations as right associations
|
|
|
|
// safely.
|
|
|
|
|
|
|
|
let mut expr_stack: Vec<Expression> = vec![];
|
|
|
|
|
|
|
|
let mut idx = 0;
|
|
|
|
let mut last_prec = 1000000;
|
|
|
|
|
2022-04-06 21:10:25 +02:00
|
|
|
let first_span = working_set.get_span_contents(spans[0]);
|
|
|
|
|
2023-03-24 02:52:01 +01:00
|
|
|
if first_span == b"if" || first_span == b"match" {
|
2023-03-22 21:14:10 +01:00
|
|
|
// If expression
|
|
|
|
if spans.len() > 1 {
|
2023-04-07 20:09:38 +02:00
|
|
|
return parse_call(working_set, spans, spans[0], false);
|
2023-03-22 21:14:10 +01:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"expression".into(),
|
|
|
|
Span::new(spans[0].end, spans[0].end),
|
|
|
|
));
|
|
|
|
return garbage(spans[0]);
|
2023-03-22 21:14:10 +01:00
|
|
|
}
|
|
|
|
} else if first_span == b"not" {
|
2022-04-06 21:10:25 +02:00
|
|
|
if spans.len() > 1 {
|
2023-04-07 20:09:38 +02:00
|
|
|
let remainder = parse_math_expression(working_set, &spans[1..], lhs_row_var_id);
|
2023-04-07 02:35:45 +02:00
|
|
|
return Expression {
|
|
|
|
expr: Expr::UnaryNot(Box::new(remainder)),
|
|
|
|
span: span(spans),
|
|
|
|
ty: Type::Bool,
|
|
|
|
custom_completion: None,
|
|
|
|
};
|
2022-04-06 21:10:25 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"expression".into(),
|
|
|
|
Span::new(spans[0].end, spans[0].end),
|
|
|
|
));
|
|
|
|
return garbage(spans[0]);
|
2022-04-06 21:10:25 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let mut lhs = parse_value(working_set, spans[0], &SyntaxShape::Any);
|
2021-09-02 10:25:22 +02:00
|
|
|
idx += 1;
|
|
|
|
|
2022-04-03 00:41:36 +02:00
|
|
|
if idx >= spans.len() {
|
|
|
|
// We already found the one part of our expression, so let's expand
|
|
|
|
if let Some(row_var_id) = lhs_row_var_id {
|
2023-04-07 20:09:38 +02:00
|
|
|
expand_to_cell_path(working_set, &mut lhs, row_var_id);
|
2022-04-03 00:41:36 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
expr_stack.push(lhs);
|
|
|
|
|
|
|
|
while idx < spans.len() {
|
2023-04-07 02:35:45 +02:00
|
|
|
let op = parse_operator(working_set, spans[idx]);
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let op_prec = op.precedence();
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
idx += 1;
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if idx == spans.len() {
|
|
|
|
// Handle broken math expr `1 +` etc
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::IncompleteMathExpression(spans[idx - 1]));
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
expr_stack.push(Expression::garbage(spans[idx - 1]));
|
|
|
|
expr_stack.push(Expression::garbage(spans[idx - 1]));
|
2021-07-22 21:50:59 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
break;
|
|
|
|
}
|
2021-07-22 21:50:59 +02:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let rhs = parse_value(working_set, spans[idx], &SyntaxShape::Any);
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2022-03-25 04:23:08 +01:00
|
|
|
while op_prec <= last_prec && expr_stack.len() > 1 {
|
2021-11-06 08:31:28 +01:00
|
|
|
// Collapse the right associated operations first
|
|
|
|
// so that we can get back to a stack with a lower precedence
|
|
|
|
let mut rhs = expr_stack
|
|
|
|
.pop()
|
|
|
|
.expect("internal error: expression stack empty");
|
|
|
|
let mut op = expr_stack
|
|
|
|
.pop()
|
|
|
|
.expect("internal error: expression stack empty");
|
|
|
|
|
2022-03-25 04:23:08 +01:00
|
|
|
last_prec = op.precedence();
|
|
|
|
|
|
|
|
if last_prec < op_prec {
|
|
|
|
expr_stack.push(op);
|
|
|
|
expr_stack.push(rhs);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2021-11-06 08:31:28 +01:00
|
|
|
let mut lhs = expr_stack
|
|
|
|
.pop()
|
|
|
|
.expect("internal error: expression stack empty");
|
|
|
|
|
|
|
|
if let Some(row_var_id) = lhs_row_var_id {
|
2023-04-07 20:09:38 +02:00
|
|
|
expand_to_cell_path(working_set, &mut lhs, row_var_id);
|
2021-11-06 08:31:28 +01:00
|
|
|
}
|
2021-09-09 23:47:20 +02:00
|
|
|
|
2021-11-06 08:31:28 +01:00
|
|
|
let (result_ty, err) = math_result_type(working_set, &mut lhs, &mut op, &mut rhs);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2021-11-06 08:31:28 +01:00
|
|
|
let op_span = span(&[lhs.span, rhs.span]);
|
|
|
|
expr_stack.push(Expression {
|
|
|
|
expr: Expr::BinaryOp(Box::new(lhs), Box::new(op), Box::new(rhs)),
|
|
|
|
span: op_span,
|
|
|
|
ty: result_ty,
|
|
|
|
custom_completion: None,
|
|
|
|
});
|
2021-07-02 08:44:37 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
expr_stack.push(op);
|
|
|
|
expr_stack.push(rhs);
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
last_prec = op_prec;
|
2021-07-23 23:19:30 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
idx += 1;
|
|
|
|
}
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
while expr_stack.len() != 1 {
|
|
|
|
let mut rhs = expr_stack
|
|
|
|
.pop()
|
|
|
|
.expect("internal error: expression stack empty");
|
|
|
|
let mut op = expr_stack
|
2021-07-02 08:44:37 +02:00
|
|
|
.pop()
|
|
|
|
.expect("internal error: expression stack empty");
|
2021-09-02 10:25:22 +02:00
|
|
|
let mut lhs = expr_stack
|
|
|
|
.pop()
|
|
|
|
.expect("internal error: expression stack empty");
|
|
|
|
|
2021-09-09 23:47:20 +02:00
|
|
|
if let Some(row_var_id) = lhs_row_var_id {
|
2023-04-07 20:09:38 +02:00
|
|
|
expand_to_cell_path(working_set, &mut lhs, row_var_id);
|
2021-09-09 23:47:20 +02:00
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let (result_ty, err) = math_result_type(working_set, &mut lhs, &mut op, &mut rhs);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err)
|
|
|
|
}
|
2021-07-02 08:44:37 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
let binary_op_span = span(&[lhs.span, rhs.span]);
|
|
|
|
expr_stack.push(Expression {
|
|
|
|
expr: Expr::BinaryOp(Box::new(lhs), Box::new(op), Box::new(rhs)),
|
|
|
|
span: binary_op_span,
|
|
|
|
ty: result_ty,
|
2021-09-14 06:59:46 +02:00
|
|
|
custom_completion: None,
|
2021-09-02 10:25:22 +02:00
|
|
|
});
|
2021-07-01 02:01:04 +02:00
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
expr_stack
|
2021-09-02 10:25:22 +02:00
|
|
|
.pop()
|
2023-04-07 02:35:45 +02:00
|
|
|
.expect("internal error: expression stack empty")
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
pub fn parse_expression(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
spans: &[Span],
|
Make external command substitution works friendly(like fish shell, trailing ending newlines) (#7156)
# Description
As title, when execute external sub command, auto-trimming end
new-lines, like how fish shell does.
And if the command is executed directly like: `cat tmp`, the result
won't change.
Fixes: #6816
Fixes: #3980
Note that although nushell works correctly by directly replace output of
external command to variable(or other places like string interpolation),
it's not friendly to user, and users almost want to use `str trim` to
trim trailing newline, I think that's why fish shell do this
automatically.
If the pr is ok, as a result, no more `str trim -r` is required when
user is writing scripts which using external commands.
# User-Facing Changes
Before:
<img width="523" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468810-86b04dbb-c147-459a-96a5-e0095eeaab3d.png">
After:
<img width="505" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468599-7b537488-3d6b-458e-9d75-d85780826db0.png">
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace --features=extra -- -D warnings -D
clippy::unwrap_used -A clippy::needless_collect` to check that you're
using the standard code style
- `cargo test --workspace --features=extra` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2022-11-23 04:51:57 +01:00
|
|
|
is_subexpression: bool,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Expression {
|
2022-12-10 18:23:24 +01:00
|
|
|
trace!("parsing: expression");
|
|
|
|
|
2021-11-04 03:32:35 +01:00
|
|
|
let mut pos = 0;
|
|
|
|
let mut shorthand = vec![];
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2021-11-04 03:32:35 +01:00
|
|
|
while pos < spans.len() {
|
|
|
|
// Check if there is any environment shorthand
|
|
|
|
let name = working_set.get_span_contents(spans[pos]);
|
2022-03-03 01:55:03 +01:00
|
|
|
|
2022-03-29 22:56:55 +02:00
|
|
|
let split = name.splitn(2, |x| *x == b'=');
|
2021-11-04 03:32:35 +01:00
|
|
|
let split: Vec<_> = split.collect();
|
2023-04-07 13:40:05 +02:00
|
|
|
if !name.starts_with(b"^")
|
|
|
|
&& split.len() == 2
|
|
|
|
&& !split[0].is_empty()
|
|
|
|
&& !split[0].ends_with(b"..")
|
|
|
|
// was range op ..=
|
|
|
|
{
|
2021-11-04 03:32:35 +01:00
|
|
|
let point = split[0].len() + 1;
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
let starting_error_count = working_set.parse_errors.len();
|
|
|
|
|
2021-11-04 03:32:35 +01:00
|
|
|
let lhs = parse_string_strict(
|
|
|
|
working_set,
|
2022-12-03 10:44:12 +01:00
|
|
|
Span::new(spans[pos].start, spans[pos].start + point - 1),
|
2021-11-04 03:32:35 +01:00
|
|
|
);
|
|
|
|
let rhs = if spans[pos].start + point < spans[pos].end {
|
2022-12-03 10:44:12 +01:00
|
|
|
let rhs_span = Span::new(spans[pos].start + point, spans[pos].end);
|
2022-05-07 13:21:29 +02:00
|
|
|
|
|
|
|
if working_set.get_span_contents(rhs_span).starts_with(b"$") {
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_dollar_expr(working_set, rhs_span)
|
2022-05-07 13:21:29 +02:00
|
|
|
} else {
|
|
|
|
parse_string_strict(working_set, rhs_span)
|
|
|
|
}
|
2021-11-04 03:32:35 +01:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::String(String::new()),
|
|
|
|
span: Span::unknown(),
|
|
|
|
ty: Type::Nothing,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-11-04 03:32:35 +01:00
|
|
|
};
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
if starting_error_count == working_set.parse_errors.len() {
|
|
|
|
shorthand.push((lhs, rhs));
|
2021-11-04 03:32:35 +01:00
|
|
|
pos += 1;
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.parse_errors.truncate(starting_error_count);
|
2021-11-04 03:32:35 +01:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if pos == spans.len() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::UnknownCommand(spans[0]));
|
|
|
|
return garbage(span(spans));
|
2021-11-04 03:32:35 +01:00
|
|
|
}
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
let output = if is_math_expression_like(working_set, spans[pos]) {
|
|
|
|
parse_math_expression(working_set, &spans[pos..], None)
|
2021-11-04 03:32:35 +01:00
|
|
|
} else {
|
2023-02-22 13:14:20 +01:00
|
|
|
let bytes = working_set.get_span_contents(spans[pos]).to_vec();
|
2022-04-07 04:01:31 +02:00
|
|
|
|
2022-01-15 16:26:52 +01:00
|
|
|
// For now, check for special parses of certain keywords
|
2023-02-22 13:14:20 +01:00
|
|
|
match bytes.as_slice() {
|
2023-04-07 20:09:38 +02:00
|
|
|
b"def" | b"extern" | b"for" | b"module" | b"use" | b"source" | b"alias" | b"export"
|
|
|
|
| b"hide" => {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::BuiltinCommandInPipeline(
|
2023-02-22 13:14:20 +01:00
|
|
|
String::from_utf8(bytes)
|
|
|
|
.expect("builtin commands bytes should be able to convert to string"),
|
2022-02-15 20:31:14 +01:00
|
|
|
spans[0],
|
2023-04-07 02:35:45 +02:00
|
|
|
));
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_call(working_set, &spans[pos..], spans[0], is_subexpression)
|
2023-04-07 02:35:45 +02:00
|
|
|
}
|
|
|
|
b"let" | b"const" | b"mut" => {
|
|
|
|
working_set.error(ParseError::AssignInPipeline(
|
2023-02-22 13:14:20 +01:00
|
|
|
String::from_utf8(bytes)
|
|
|
|
.expect("builtin commands bytes should be able to convert to string"),
|
2022-05-29 22:16:41 +02:00
|
|
|
String::from_utf8_lossy(match spans.len() {
|
2023-01-20 00:11:48 +01:00
|
|
|
1 | 2 | 3 => b"value",
|
|
|
|
_ => working_set.get_span_contents(spans[3]),
|
|
|
|
})
|
|
|
|
.to_string(),
|
|
|
|
String::from_utf8_lossy(match spans.len() {
|
|
|
|
1 => b"variable",
|
|
|
|
_ => working_set.get_span_contents(spans[1]),
|
|
|
|
})
|
|
|
|
.to_string(),
|
|
|
|
spans[0],
|
2023-04-07 02:35:45 +02:00
|
|
|
));
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_call(working_set, &spans[pos..], spans[0], is_subexpression)
|
2023-04-07 02:35:45 +02:00
|
|
|
}
|
2022-05-07 21:39:22 +02:00
|
|
|
b"overlay" => {
|
|
|
|
if spans.len() > 1 && working_set.get_span_contents(spans[1]) == b"list" {
|
|
|
|
// whitelist 'overlay list'
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_call(working_set, &spans[pos..], spans[0], is_subexpression)
|
2022-05-07 21:39:22 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::BuiltinCommandInPipeline(
|
|
|
|
"overlay".into(),
|
|
|
|
spans[0],
|
|
|
|
));
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_call(working_set, &spans[pos..], spans[0], is_subexpression)
|
2022-05-07 21:39:22 +02:00
|
|
|
}
|
|
|
|
}
|
2023-04-07 20:09:38 +02:00
|
|
|
b"where" => parse_where_expr(working_set, &spans[pos..]),
|
2022-01-15 16:26:52 +01:00
|
|
|
#[cfg(feature = "plugin")]
|
2023-04-07 02:35:45 +02:00
|
|
|
b"register" => {
|
|
|
|
working_set.error(ParseError::BuiltinCommandInPipeline(
|
|
|
|
"plugin".into(),
|
|
|
|
spans[0],
|
|
|
|
));
|
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
parse_call(working_set, &spans[pos..], spans[0], is_subexpression)
|
2023-04-07 02:35:45 +02:00
|
|
|
}
|
2022-01-15 16:26:52 +01:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
_ => parse_call(working_set, &spans[pos..], spans[0], is_subexpression),
|
2022-01-15 16:26:52 +01:00
|
|
|
}
|
2021-11-04 03:32:35 +01:00
|
|
|
};
|
|
|
|
|
2022-06-10 17:59:35 +02:00
|
|
|
let with_env = working_set.find_decl(b"with-env", &Type::Any);
|
2021-11-04 03:32:35 +01:00
|
|
|
|
|
|
|
if !shorthand.is_empty() {
|
|
|
|
if let Some(decl_id) = with_env {
|
|
|
|
let mut block = Block::default();
|
|
|
|
let ty = output.ty.clone();
|
2022-11-18 22:46:48 +01:00
|
|
|
block.pipelines = vec![Pipeline::from_vec(vec![output])];
|
2021-11-04 03:32:35 +01:00
|
|
|
|
|
|
|
let block_id = working_set.add_block(block);
|
|
|
|
|
|
|
|
let mut env_vars = vec![];
|
|
|
|
for sh in shorthand {
|
|
|
|
env_vars.push(sh.0);
|
|
|
|
env_vars.push(sh.1);
|
|
|
|
}
|
|
|
|
|
2022-04-09 04:55:02 +02:00
|
|
|
let arguments = vec![
|
|
|
|
Argument::Positional(Expression {
|
2021-11-04 03:32:35 +01:00
|
|
|
expr: Expr::List(env_vars),
|
|
|
|
span: span(&spans[..pos]),
|
2022-04-07 06:34:09 +02:00
|
|
|
ty: Type::Any,
|
2021-11-04 03:32:35 +01:00
|
|
|
custom_completion: None,
|
2022-04-09 04:55:02 +02:00
|
|
|
}),
|
|
|
|
Argument::Positional(Expression {
|
2022-11-10 09:21:49 +01:00
|
|
|
expr: Expr::Closure(block_id),
|
2021-11-04 03:32:35 +01:00
|
|
|
span: span(&spans[pos..]),
|
2022-11-10 09:21:49 +01:00
|
|
|
ty: Type::Closure,
|
2021-11-04 03:32:35 +01:00
|
|
|
custom_completion: None,
|
2022-04-09 04:55:02 +02:00
|
|
|
}),
|
2021-11-04 03:32:35 +01:00
|
|
|
];
|
|
|
|
|
2022-02-21 18:58:04 +01:00
|
|
|
let expr = Expr::Call(Box::new(Call {
|
2022-12-03 10:44:12 +01:00
|
|
|
head: Span::unknown(),
|
2022-02-21 18:58:04 +01:00
|
|
|
decl_id,
|
2022-04-09 04:55:02 +02:00
|
|
|
arguments,
|
2022-02-21 23:22:21 +01:00
|
|
|
redirect_stdout: true,
|
|
|
|
redirect_stderr: false,
|
2023-04-05 18:56:48 +02:00
|
|
|
parser_info: HashMap::new(),
|
2022-02-21 18:58:04 +01:00
|
|
|
}));
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr,
|
|
|
|
custom_completion: None,
|
|
|
|
span: span(spans),
|
|
|
|
ty,
|
|
|
|
}
|
2021-11-04 03:32:35 +01:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
output
|
2021-11-04 03:32:35 +01:00
|
|
|
}
|
2021-10-27 23:52:59 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
output
|
2021-07-01 02:01:04 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-01 02:01:04 +02:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
pub fn parse_variable(working_set: &mut StateWorkingSet, span: Span) -> Option<VarId> {
|
2021-09-02 10:25:22 +02:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
2021-07-01 02:01:04 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if is_variable(bytes) {
|
|
|
|
if let Some(var_id) = working_set.find_variable(bytes) {
|
2022-06-12 21:18:00 +02:00
|
|
|
let input = working_set.get_variable(var_id).ty.clone();
|
|
|
|
working_set.type_scope.add_type(input);
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Some(var_id)
|
2021-07-01 02:01:04 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
None
|
2021-07-01 02:01:04 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("valid variable name".into(), span));
|
|
|
|
|
|
|
|
None
|
2021-07-01 02:01:04 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-07-01 02:01:04 +02:00
|
|
|
|
2022-02-15 20:31:14 +01:00
|
|
|
pub fn parse_builtin_commands(
|
2021-09-27 14:10:18 +02:00
|
|
|
working_set: &mut StateWorkingSet,
|
2022-01-22 19:24:47 +01:00
|
|
|
lite_command: &LiteCommand,
|
Make external command substitution works friendly(like fish shell, trailing ending newlines) (#7156)
# Description
As title, when execute external sub command, auto-trimming end
new-lines, like how fish shell does.
And if the command is executed directly like: `cat tmp`, the result
won't change.
Fixes: #6816
Fixes: #3980
Note that although nushell works correctly by directly replace output of
external command to variable(or other places like string interpolation),
it's not friendly to user, and users almost want to use `str trim` to
trim trailing newline, I think that's why fish shell do this
automatically.
If the pr is ok, as a result, no more `str trim -r` is required when
user is writing scripts which using external commands.
# User-Facing Changes
Before:
<img width="523" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468810-86b04dbb-c147-459a-96a5-e0095eeaab3d.png">
After:
<img width="505" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468599-7b537488-3d6b-458e-9d75-d85780826db0.png">
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace --features=extra -- -D warnings -D
clippy::unwrap_used -A clippy::needless_collect` to check that you're
using the standard code style
- `cargo test --workspace --features=extra` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2022-11-23 04:51:57 +01:00
|
|
|
is_subexpression: bool,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Pipeline {
|
2023-04-07 20:09:38 +02:00
|
|
|
if !is_math_expression_like(working_set, lite_command.parts[0])
|
2023-03-10 22:20:31 +01:00
|
|
|
&& !is_unaliasable_parser_keyword(working_set, &lite_command.parts)
|
|
|
|
{
|
|
|
|
let name = working_set.get_span_contents(lite_command.parts[0]);
|
|
|
|
if let Some(decl_id) = working_set.find_decl(name, &Type::Any) {
|
|
|
|
let cmd = working_set.get_decl(decl_id);
|
|
|
|
if cmd.is_alias() {
|
|
|
|
// Parse keywords that can be aliased. Note that we check for "unaliasable" keywords
|
|
|
|
// because alias can have any name, therefore, we can't check for "aliasable" keywords.
|
2023-04-07 02:35:45 +02:00
|
|
|
let call_expr = parse_call(
|
2023-03-10 22:20:31 +01:00
|
|
|
working_set,
|
|
|
|
&lite_command.parts,
|
|
|
|
lite_command.parts[0],
|
|
|
|
is_subexpression,
|
|
|
|
);
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Expression {
|
|
|
|
expr: Expr::Call(call),
|
|
|
|
..
|
|
|
|
} = call_expr
|
|
|
|
{
|
|
|
|
// Apply parse keyword side effects
|
|
|
|
let cmd = working_set.get_decl(call.decl_id);
|
|
|
|
match cmd.name() {
|
|
|
|
"overlay hide" => return parse_overlay_hide(working_set, call),
|
|
|
|
"overlay new" => return parse_overlay_new(working_set, call),
|
2023-04-07 20:09:38 +02:00
|
|
|
"overlay use" => return parse_overlay_use(working_set, call),
|
2023-04-07 02:35:45 +02:00
|
|
|
_ => { /* this alias is not a parser keyword */ }
|
2023-03-10 22:20:31 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-01-22 19:24:47 +01:00
|
|
|
let name = working_set.get_span_contents(lite_command.parts[0]);
|
2021-09-27 14:10:18 +02:00
|
|
|
|
2021-09-13 21:59:11 +02:00
|
|
|
match name {
|
2023-04-07 20:09:38 +02:00
|
|
|
b"def" | b"def-env" => parse_def(working_set, lite_command, None),
|
|
|
|
b"extern" => parse_extern(working_set, lite_command, None),
|
|
|
|
b"let" | b"const" => parse_let_or_const(working_set, &lite_command.parts),
|
|
|
|
b"mut" => parse_mut(working_set, &lite_command.parts),
|
2022-01-15 16:26:52 +01:00
|
|
|
b"for" => {
|
2023-04-07 20:09:38 +02:00
|
|
|
let expr = parse_for(working_set, &lite_command.parts);
|
2023-04-07 02:35:45 +02:00
|
|
|
Pipeline::from_vec(vec![expr])
|
2022-01-15 16:26:52 +01:00
|
|
|
}
|
2023-04-07 20:09:38 +02:00
|
|
|
b"alias" => parse_alias(working_set, lite_command, None),
|
2023-05-06 22:55:10 +02:00
|
|
|
b"module" => parse_module(working_set, lite_command, None).0,
|
2022-07-29 10:57:10 +02:00
|
|
|
b"use" => {
|
2023-04-07 20:09:38 +02:00
|
|
|
let (pipeline, _) = parse_use(working_set, &lite_command.parts);
|
2023-04-07 02:35:45 +02:00
|
|
|
pipeline
|
2022-07-29 10:57:10 +02:00
|
|
|
}
|
2023-04-07 20:09:38 +02:00
|
|
|
b"overlay" => parse_keyword(working_set, lite_command, is_subexpression),
|
|
|
|
b"source" | b"source-env" => parse_source(working_set, &lite_command.parts),
|
|
|
|
b"export" => parse_export_in_block(working_set, lite_command),
|
|
|
|
b"hide" => parse_hide(working_set, &lite_command.parts),
|
|
|
|
b"where" => parse_where(working_set, &lite_command.parts),
|
2021-11-02 21:56:00 +01:00
|
|
|
#[cfg(feature = "plugin")]
|
2023-04-07 20:09:38 +02:00
|
|
|
b"register" => parse_register(working_set, &lite_command.parts),
|
2021-09-13 21:59:11 +02:00
|
|
|
_ => {
|
2023-04-07 20:09:38 +02:00
|
|
|
let expr = parse_expression(working_set, &lite_command.parts, is_subexpression);
|
2023-03-10 22:20:31 +01:00
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Pipeline::from_vec(vec![expr])
|
2021-09-27 14:10:18 +02:00
|
|
|
}
|
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-06-30 03:42:56 +02:00
|
|
|
|
2023-04-07 20:09:38 +02:00
|
|
|
pub fn parse_record(working_set: &mut StateWorkingSet, span: Span) -> Expression {
|
2021-11-11 00:14:00 +01:00
|
|
|
let bytes = working_set.get_span_contents(span);
|
|
|
|
|
|
|
|
let mut start = span.start;
|
|
|
|
let mut end = span.end;
|
|
|
|
|
|
|
|
if bytes.starts_with(b"{") {
|
|
|
|
start += 1;
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected(
|
|
|
|
"{".into(),
|
|
|
|
Span::new(start, start + 1),
|
|
|
|
));
|
|
|
|
return garbage(span);
|
2021-11-11 00:14:00 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
if bytes.ends_with(b"}") {
|
|
|
|
end -= 1;
|
|
|
|
} else {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Unclosed("}".into(), Span::new(end, end)));
|
2021-11-11 00:14:00 +01:00
|
|
|
}
|
|
|
|
|
2022-12-03 10:44:12 +01:00
|
|
|
let inner_span = Span::new(start, end);
|
2022-01-03 06:21:26 +01:00
|
|
|
let source = working_set.get_span_contents(inner_span);
|
2021-11-11 00:14:00 +01:00
|
|
|
|
2021-11-21 19:13:09 +01:00
|
|
|
let (tokens, err) = lex(source, start, &[b'\n', b'\r', b','], &[b':'], true);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2021-11-11 00:14:00 +01:00
|
|
|
|
|
|
|
let mut output = vec![];
|
|
|
|
let mut idx = 0;
|
|
|
|
|
2023-03-28 23:23:10 +02:00
|
|
|
let mut field_types = Some(vec![]);
|
2021-11-11 00:14:00 +01:00
|
|
|
while idx < tokens.len() {
|
2023-04-07 20:09:38 +02:00
|
|
|
let field = parse_value(working_set, tokens[idx].span, &SyntaxShape::Any);
|
2021-11-11 00:14:00 +01:00
|
|
|
|
|
|
|
idx += 1;
|
|
|
|
if idx == tokens.len() {
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("record".into(), span));
|
|
|
|
return garbage(span);
|
2021-11-11 00:14:00 +01:00
|
|
|
}
|
|
|
|
let colon = working_set.get_span_contents(tokens[idx].span);
|
|
|
|
idx += 1;
|
|
|
|
if idx == tokens.len() || colon != b":" {
|
|
|
|
//FIXME: need better error
|
2023-04-07 02:35:45 +02:00
|
|
|
working_set.error(ParseError::Expected("record".into(), span));
|
|
|
|
return garbage(span);
|
2021-11-11 00:14:00 +01:00
|
|
|
}
|
2023-04-07 20:09:38 +02:00
|
|
|
let value = parse_value(working_set, tokens[idx].span, &SyntaxShape::Any);
|
2021-11-11 00:14:00 +01:00
|
|
|
idx += 1;
|
|
|
|
|
2023-03-28 23:23:10 +02:00
|
|
|
if let Some(field) = field.as_string() {
|
|
|
|
if let Some(fields) = &mut field_types {
|
|
|
|
fields.push((field, value.ty.clone()));
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// We can't properly see all the field types
|
|
|
|
// so fall back to the Any type later
|
|
|
|
field_types = None;
|
|
|
|
}
|
2021-11-11 00:14:00 +01:00
|
|
|
output.push((field, value));
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Record(output),
|
|
|
|
span,
|
|
|
|
ty: (if let Some(fields) = field_types {
|
|
|
|
Type::Record(fields)
|
|
|
|
} else {
|
|
|
|
Type::Any
|
|
|
|
}),
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
2021-11-11 00:14:00 +01:00
|
|
|
}
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
pub fn parse_block(
|
|
|
|
working_set: &mut StateWorkingSet,
|
2022-11-18 22:46:48 +01:00
|
|
|
tokens: &[Token],
|
2023-04-21 21:00:33 +02:00
|
|
|
span: Span,
|
2021-09-02 10:25:22 +02:00
|
|
|
scoped: bool,
|
2022-04-08 23:41:05 +02:00
|
|
|
is_subexpression: bool,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Block {
|
2022-11-18 22:46:48 +01:00
|
|
|
let (lite_block, err) = lite_parse(tokens);
|
2023-04-07 02:35:45 +02:00
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err);
|
|
|
|
}
|
2022-11-18 22:46:48 +01:00
|
|
|
|
2022-01-01 22:42:50 +01:00
|
|
|
trace!("parsing block: {:?}", lite_block);
|
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if scoped {
|
|
|
|
working_set.enter_scope();
|
|
|
|
}
|
2022-06-12 21:18:00 +02:00
|
|
|
working_set.type_scope.enter_scope();
|
2021-06-30 03:42:56 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
// Pre-declare any definition so that definitions
|
|
|
|
// that share the same block can see each other
|
|
|
|
for pipeline in &lite_block.block {
|
|
|
|
if pipeline.commands.len() == 1 {
|
2022-11-18 22:46:48 +01:00
|
|
|
match &pipeline.commands[0] {
|
Support redirect `err` and `out` to different streams (#7685)
# Description
Closes: #7364
# User-Facing Changes
Given the following shell script:
```bash
x=$(printf '=%.0s' {1..100})
echo $x
echo $x 1>&2
```
It supports the following command:
```
bash test.sh out> out.txt err> err.txt
```
Then both `out.txt` and `err.txt` will contain `=`(100 times)
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SeparateRedirection` if we meet two Redirection
token(which is generated by `lex` function),
During converting from lite block to block,
`LiteElement::SeparateRedirection` will be converted to
`PipelineElement::SeparateRedirection`.
Then in the block eval process, if we get
`PipelineElement::SeparateRedirection`, we invoke `save` command with
`--stderr` arguments to acthive our behavior.
## What happened internally?
Take the following command as example:
```
^ls out> out.txt err> err.txt
```
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, Stdout, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, LiteCommand { comments: [], parts: [Span { start: 39063, end: 39070 }] })
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(
None,
LiteCommand { comments: [], parts: [Span { start: 38525, end: 38528 }] }),
// new one! two Redirection merged into one SeparateRedirection.
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, LiteCommand { comments: [], parts: [Span { start: 38534, end: 38541 }] }),
err: (Span { start: 38542, end: 38546 }, LiteCommand { comments: [], parts: [Span { start: 38547, end: 38554 }] })
}
]
}]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, Stdout, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, Expression { expr: String("err.txt"), span: Span { start: 39063, end: 39070 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 38526, end: 38528 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 38525, end: 38528 },
ty: Any,
custom_completion: None
}),
// new one! SeparateRedirection
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, Expression { expr: String("out.txt"), span: Span { start: 38534, end: 38541 }, ty: String, custom_completion: None }),
err: (Span { start: 38542, end: 38546 }, Expression { expr: String("err.txt"), span: Span { start: 38547, end: 38554 }, ty: String, custom_completion: None })
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-01-12 10:22:30 +01:00
|
|
|
LiteElement::Command(_, command)
|
|
|
|
| LiteElement::Redirection(_, _, command)
|
|
|
|
| LiteElement::SeparateRedirection {
|
|
|
|
out: (_, command), ..
|
Avoid blocking when `o+e>` redirects too much stderr message (#8784)
# Description
Fixes: #8565
Here is another pr #7240 tried to address the issue, but it works in a
wrong way.
After this change `o+e>` won't redirect all stdout message then stderr
message and it works more like how bash does.
# User-Facing Changes
For the given python code:
```python
# test.py
import sys
print('aa'*300, flush=True)
print('bb'*999999, file=sys.stderr, flush=True)
print('cc'*300, flush=True)
```
Running `python test.py out+err> a.txt` shoudn't hang nushell, and
`a.txt` keeps output in the same order
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SameTargetRedirection` if we meet `out+err>` redirection
token(which is generated by lex function),
During converting from lite block to block,
LiteElement::SameTargetRedirection will be converted to
PipelineElement::SameTargetRedirection.
Then in the block eval process, if we get
PipelineElement::SameTargetRedirection, we'll invoke `run-external` with
`--redirect-combine` flag, then pipe the result into save command
## What happened internally?
Take the following command as example:
`^ls o+e> log.txt`
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline {
commands: [
SameTargetRedirection {
cmd: (None, LiteCommand { comments: [], parts: [Span { start: 147945, end: 147948}]}),
redirection: (Span { start: 147949, end: 147957 }, LiteCommand { comments: [], parts: [Span { start: 147958, end: 147965 }]})
}
]
}
]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
SameTargetRedirection {
cmd: (None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 147946, end: 147948 }, ty: String, custom_completion: None}, [], false),
span: Span { start: 147945, end: 147948},
ty: Any, custom_completion: None
}),
redirection: (Span { start: 147949, end: 147957}, Expression {expr: String("log.txt"), span: Span { start: 147958, end: 147965 },ty: String,custom_completion: None}
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-utils/standard_library/tests.nu` to run the
tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-05-18 00:47:03 +02:00
|
|
|
}
|
|
|
|
| LiteElement::SameTargetRedirection {
|
|
|
|
cmd: (_, command), ..
|
2023-04-07 20:09:38 +02:00
|
|
|
} => parse_def_predecl(working_set, &command.parts),
|
2021-10-01 22:16:27 +02:00
|
|
|
}
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
|
|
|
}
|
2021-07-01 02:01:04 +02:00
|
|
|
|
2023-04-18 10:19:08 +02:00
|
|
|
let mut block = Block::new_with_capacity(lite_block.block.len());
|
2022-11-18 22:46:48 +01:00
|
|
|
|
2023-04-18 10:19:08 +02:00
|
|
|
for (idx, pipeline) in lite_block.block.iter().enumerate() {
|
|
|
|
if pipeline.commands.len() > 1 {
|
|
|
|
let mut output = pipeline
|
|
|
|
.commands
|
|
|
|
.iter()
|
|
|
|
.map(|command| match command {
|
|
|
|
LiteElement::Command(span, command) => {
|
|
|
|
trace!("parsing: pipeline element: command");
|
|
|
|
let expr = parse_expression(working_set, &command.parts, is_subexpression);
|
|
|
|
working_set.type_scope.add_type(expr.ty.clone());
|
2022-11-18 22:46:48 +01:00
|
|
|
|
2023-04-18 10:19:08 +02:00
|
|
|
PipelineElement::Expression(*span, expr)
|
|
|
|
}
|
|
|
|
LiteElement::Redirection(span, redirection, command) => {
|
|
|
|
trace!("parsing: pipeline element: redirection");
|
|
|
|
let expr = parse_string(working_set, command.parts[0]);
|
|
|
|
|
|
|
|
working_set.type_scope.add_type(expr.ty.clone());
|
Support redirect `err` and `out` to different streams (#7685)
# Description
Closes: #7364
# User-Facing Changes
Given the following shell script:
```bash
x=$(printf '=%.0s' {1..100})
echo $x
echo $x 1>&2
```
It supports the following command:
```
bash test.sh out> out.txt err> err.txt
```
Then both `out.txt` and `err.txt` will contain `=`(100 times)
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SeparateRedirection` if we meet two Redirection
token(which is generated by `lex` function),
During converting from lite block to block,
`LiteElement::SeparateRedirection` will be converted to
`PipelineElement::SeparateRedirection`.
Then in the block eval process, if we get
`PipelineElement::SeparateRedirection`, we invoke `save` command with
`--stderr` arguments to acthive our behavior.
## What happened internally?
Take the following command as example:
```
^ls out> out.txt err> err.txt
```
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, Stdout, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, LiteCommand { comments: [], parts: [Span { start: 39063, end: 39070 }] })
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(
None,
LiteCommand { comments: [], parts: [Span { start: 38525, end: 38528 }] }),
// new one! two Redirection merged into one SeparateRedirection.
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, LiteCommand { comments: [], parts: [Span { start: 38534, end: 38541 }] }),
err: (Span { start: 38542, end: 38546 }, LiteCommand { comments: [], parts: [Span { start: 38547, end: 38554 }] })
}
]
}]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, Stdout, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, Expression { expr: String("err.txt"), span: Span { start: 39063, end: 39070 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 38526, end: 38528 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 38525, end: 38528 },
ty: Any,
custom_completion: None
}),
// new one! SeparateRedirection
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, Expression { expr: String("out.txt"), span: Span { start: 38534, end: 38541 }, ty: String, custom_completion: None }),
err: (Span { start: 38542, end: 38546 }, Expression { expr: String("err.txt"), span: Span { start: 38547, end: 38554 }, ty: String, custom_completion: None })
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-01-12 10:22:30 +01:00
|
|
|
|
2023-04-18 10:19:08 +02:00
|
|
|
PipelineElement::Redirection(*span, redirection.clone(), expr)
|
|
|
|
}
|
|
|
|
LiteElement::SeparateRedirection {
|
|
|
|
out: (out_span, out_command),
|
|
|
|
err: (err_span, err_command),
|
|
|
|
} => {
|
|
|
|
trace!("parsing: pipeline element: separate redirection");
|
|
|
|
let out_expr = parse_string(working_set, out_command.parts[0]);
|
Support redirect `err` and `out` to different streams (#7685)
# Description
Closes: #7364
# User-Facing Changes
Given the following shell script:
```bash
x=$(printf '=%.0s' {1..100})
echo $x
echo $x 1>&2
```
It supports the following command:
```
bash test.sh out> out.txt err> err.txt
```
Then both `out.txt` and `err.txt` will contain `=`(100 times)
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SeparateRedirection` if we meet two Redirection
token(which is generated by `lex` function),
During converting from lite block to block,
`LiteElement::SeparateRedirection` will be converted to
`PipelineElement::SeparateRedirection`.
Then in the block eval process, if we get
`PipelineElement::SeparateRedirection`, we invoke `save` command with
`--stderr` arguments to acthive our behavior.
## What happened internally?
Take the following command as example:
```
^ls out> out.txt err> err.txt
```
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, Stdout, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, LiteCommand { comments: [], parts: [Span { start: 39063, end: 39070 }] })
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(
None,
LiteCommand { comments: [], parts: [Span { start: 38525, end: 38528 }] }),
// new one! two Redirection merged into one SeparateRedirection.
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, LiteCommand { comments: [], parts: [Span { start: 38534, end: 38541 }] }),
err: (Span { start: 38542, end: 38546 }, LiteCommand { comments: [], parts: [Span { start: 38547, end: 38554 }] })
}
]
}]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, Stdout, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, Expression { expr: String("err.txt"), span: Span { start: 39063, end: 39070 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 38526, end: 38528 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 38525, end: 38528 },
ty: Any,
custom_completion: None
}),
// new one! SeparateRedirection
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, Expression { expr: String("out.txt"), span: Span { start: 38534, end: 38541 }, ty: String, custom_completion: None }),
err: (Span { start: 38542, end: 38546 }, Expression { expr: String("err.txt"), span: Span { start: 38547, end: 38554 }, ty: String, custom_completion: None })
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-01-12 10:22:30 +01:00
|
|
|
|
2023-04-18 10:19:08 +02:00
|
|
|
working_set.type_scope.add_type(out_expr.ty.clone());
|
Support redirect `err` and `out` to different streams (#7685)
# Description
Closes: #7364
# User-Facing Changes
Given the following shell script:
```bash
x=$(printf '=%.0s' {1..100})
echo $x
echo $x 1>&2
```
It supports the following command:
```
bash test.sh out> out.txt err> err.txt
```
Then both `out.txt` and `err.txt` will contain `=`(100 times)
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SeparateRedirection` if we meet two Redirection
token(which is generated by `lex` function),
During converting from lite block to block,
`LiteElement::SeparateRedirection` will be converted to
`PipelineElement::SeparateRedirection`.
Then in the block eval process, if we get
`PipelineElement::SeparateRedirection`, we invoke `save` command with
`--stderr` arguments to acthive our behavior.
## What happened internally?
Take the following command as example:
```
^ls out> out.txt err> err.txt
```
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, Stdout, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, LiteCommand { comments: [], parts: [Span { start: 39063, end: 39070 }] })
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(
None,
LiteCommand { comments: [], parts: [Span { start: 38525, end: 38528 }] }),
// new one! two Redirection merged into one SeparateRedirection.
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, LiteCommand { comments: [], parts: [Span { start: 38534, end: 38541 }] }),
err: (Span { start: 38542, end: 38546 }, LiteCommand { comments: [], parts: [Span { start: 38547, end: 38554 }] })
}
]
}]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, Stdout, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, Expression { expr: String("err.txt"), span: Span { start: 39063, end: 39070 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 38526, end: 38528 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 38525, end: 38528 },
ty: Any,
custom_completion: None
}),
// new one! SeparateRedirection
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, Expression { expr: String("out.txt"), span: Span { start: 38534, end: 38541 }, ty: String, custom_completion: None }),
err: (Span { start: 38542, end: 38546 }, Expression { expr: String("err.txt"), span: Span { start: 38547, end: 38554 }, ty: String, custom_completion: None })
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-01-12 10:22:30 +01:00
|
|
|
|
2023-04-18 10:19:08 +02:00
|
|
|
let err_expr = parse_string(working_set, err_command.parts[0]);
|
Support redirect `err` and `out` to different streams (#7685)
# Description
Closes: #7364
# User-Facing Changes
Given the following shell script:
```bash
x=$(printf '=%.0s' {1..100})
echo $x
echo $x 1>&2
```
It supports the following command:
```
bash test.sh out> out.txt err> err.txt
```
Then both `out.txt` and `err.txt` will contain `=`(100 times)
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SeparateRedirection` if we meet two Redirection
token(which is generated by `lex` function),
During converting from lite block to block,
`LiteElement::SeparateRedirection` will be converted to
`PipelineElement::SeparateRedirection`.
Then in the block eval process, if we get
`PipelineElement::SeparateRedirection`, we invoke `save` command with
`--stderr` arguments to acthive our behavior.
## What happened internally?
Take the following command as example:
```
^ls out> out.txt err> err.txt
```
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, Stdout, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, LiteCommand { comments: [], parts: [Span { start: 39063, end: 39070 }] })
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(
None,
LiteCommand { comments: [], parts: [Span { start: 38525, end: 38528 }] }),
// new one! two Redirection merged into one SeparateRedirection.
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, LiteCommand { comments: [], parts: [Span { start: 38534, end: 38541 }] }),
err: (Span { start: 38542, end: 38546 }, LiteCommand { comments: [], parts: [Span { start: 38547, end: 38554 }] })
}
]
}]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, Stdout, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, Expression { expr: String("err.txt"), span: Span { start: 39063, end: 39070 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 38526, end: 38528 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 38525, end: 38528 },
ty: Any,
custom_completion: None
}),
// new one! SeparateRedirection
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, Expression { expr: String("out.txt"), span: Span { start: 38534, end: 38541 }, ty: String, custom_completion: None }),
err: (Span { start: 38542, end: 38546 }, Expression { expr: String("err.txt"), span: Span { start: 38547, end: 38554 }, ty: String, custom_completion: None })
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-01-12 10:22:30 +01:00
|
|
|
|
2023-04-18 10:19:08 +02:00
|
|
|
working_set.type_scope.add_type(err_expr.ty.clone());
|
2021-09-10 09:28:43 +02:00
|
|
|
|
2023-04-18 10:19:08 +02:00
|
|
|
PipelineElement::SeparateRedirection {
|
|
|
|
out: (*out_span, out_expr),
|
|
|
|
err: (*err_span, err_expr),
|
2022-04-08 23:41:05 +02:00
|
|
|
}
|
|
|
|
}
|
Avoid blocking when `o+e>` redirects too much stderr message (#8784)
# Description
Fixes: #8565
Here is another pr #7240 tried to address the issue, but it works in a
wrong way.
After this change `o+e>` won't redirect all stdout message then stderr
message and it works more like how bash does.
# User-Facing Changes
For the given python code:
```python
# test.py
import sys
print('aa'*300, flush=True)
print('bb'*999999, file=sys.stderr, flush=True)
print('cc'*300, flush=True)
```
Running `python test.py out+err> a.txt` shoudn't hang nushell, and
`a.txt` keeps output in the same order
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SameTargetRedirection` if we meet `out+err>` redirection
token(which is generated by lex function),
During converting from lite block to block,
LiteElement::SameTargetRedirection will be converted to
PipelineElement::SameTargetRedirection.
Then in the block eval process, if we get
PipelineElement::SameTargetRedirection, we'll invoke `run-external` with
`--redirect-combine` flag, then pipe the result into save command
## What happened internally?
Take the following command as example:
`^ls o+e> log.txt`
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline {
commands: [
SameTargetRedirection {
cmd: (None, LiteCommand { comments: [], parts: [Span { start: 147945, end: 147948}]}),
redirection: (Span { start: 147949, end: 147957 }, LiteCommand { comments: [], parts: [Span { start: 147958, end: 147965 }]})
}
]
}
]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
SameTargetRedirection {
cmd: (None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 147946, end: 147948 }, ty: String, custom_completion: None}, [], false),
span: Span { start: 147945, end: 147948},
ty: Any, custom_completion: None
}),
redirection: (Span { start: 147949, end: 147957}, Expression {expr: String("log.txt"), span: Span { start: 147958, end: 147965 },ty: String,custom_completion: None}
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-utils/standard_library/tests.nu` to run the
tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-05-18 00:47:03 +02:00
|
|
|
LiteElement::SameTargetRedirection {
|
|
|
|
cmd: (cmd_span, command),
|
|
|
|
redirection: (redirect_span, redirect_command),
|
|
|
|
} => {
|
|
|
|
trace!("parsing: pipeline element: same target redirection");
|
|
|
|
let expr = parse_expression(working_set, &command.parts, is_subexpression);
|
|
|
|
working_set.type_scope.add_type(expr.ty.clone());
|
|
|
|
let redirect_expr = parse_string(working_set, redirect_command.parts[0]);
|
|
|
|
working_set.type_scope.add_type(redirect_expr.ty.clone());
|
|
|
|
PipelineElement::SameTargetRedirection {
|
|
|
|
cmd: (*cmd_span, expr),
|
|
|
|
redirection: (*redirect_span, redirect_expr),
|
|
|
|
}
|
|
|
|
}
|
2023-04-18 10:19:08 +02:00
|
|
|
})
|
|
|
|
.collect::<Vec<PipelineElement>>();
|
|
|
|
|
|
|
|
if is_subexpression {
|
|
|
|
for element in output.iter_mut().skip(1) {
|
|
|
|
if element.has_in_variable(working_set) {
|
|
|
|
*element = wrap_element_with_collect(working_set, element);
|
2021-11-08 07:21:24 +01:00
|
|
|
}
|
|
|
|
}
|
2021-09-10 09:28:43 +02:00
|
|
|
} else {
|
2023-04-18 10:19:08 +02:00
|
|
|
for element in output.iter_mut() {
|
|
|
|
if element.has_in_variable(working_set) {
|
|
|
|
*element = wrap_element_with_collect(working_set, element);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-11-18 22:46:48 +01:00
|
|
|
|
2023-04-18 10:19:08 +02:00
|
|
|
block.pipelines.push(Pipeline { elements: output })
|
|
|
|
} else {
|
|
|
|
match &pipeline.commands[0] {
|
|
|
|
LiteElement::Command(_, command)
|
|
|
|
| LiteElement::Redirection(_, _, command)
|
|
|
|
| LiteElement::SeparateRedirection {
|
|
|
|
out: (_, command), ..
|
|
|
|
} => {
|
|
|
|
let mut pipeline =
|
|
|
|
parse_builtin_commands(working_set, command, is_subexpression);
|
|
|
|
|
|
|
|
if idx == 0 {
|
|
|
|
if let Some(let_decl_id) = working_set.find_decl(b"let", &Type::Any) {
|
|
|
|
if let Some(let_env_decl_id) =
|
|
|
|
working_set.find_decl(b"let-env", &Type::Any)
|
|
|
|
{
|
|
|
|
for element in pipeline.elements.iter_mut() {
|
|
|
|
if let PipelineElement::Expression(
|
|
|
|
_,
|
|
|
|
Expression {
|
|
|
|
expr: Expr::Call(call),
|
|
|
|
..
|
|
|
|
},
|
|
|
|
) = element
|
|
|
|
{
|
|
|
|
if call.decl_id == let_decl_id
|
|
|
|
|| call.decl_id == let_env_decl_id
|
2022-01-27 00:46:13 +01:00
|
|
|
{
|
2023-04-18 10:19:08 +02:00
|
|
|
// Do an expansion
|
|
|
|
if let Some(Expression {
|
|
|
|
expr: Expr::Keyword(_, _, expr),
|
|
|
|
..
|
|
|
|
}) = call.positional_iter_mut().nth(1)
|
2022-12-13 04:36:13 +01:00
|
|
|
{
|
2023-04-18 10:19:08 +02:00
|
|
|
if expr.has_in_variable(working_set) {
|
|
|
|
*expr = Box::new(wrap_expr_with_collect(
|
|
|
|
working_set,
|
|
|
|
expr,
|
|
|
|
));
|
2022-12-13 04:36:13 +01:00
|
|
|
}
|
2022-01-27 00:46:13 +01:00
|
|
|
}
|
2023-04-18 10:19:08 +02:00
|
|
|
continue;
|
2022-12-13 04:36:13 +01:00
|
|
|
} else if element.has_in_variable(working_set)
|
|
|
|
&& !is_subexpression
|
|
|
|
{
|
|
|
|
*element =
|
|
|
|
wrap_element_with_collect(working_set, element);
|
2022-01-27 00:46:13 +01:00
|
|
|
}
|
2023-04-18 10:19:08 +02:00
|
|
|
} else if element.has_in_variable(working_set)
|
|
|
|
&& !is_subexpression
|
|
|
|
{
|
|
|
|
*element = wrap_element_with_collect(working_set, element);
|
2022-01-27 00:46:13 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-12-13 04:36:13 +01:00
|
|
|
}
|
2023-04-18 10:19:08 +02:00
|
|
|
block.pipelines.push(pipeline)
|
2022-12-13 04:36:13 +01:00
|
|
|
}
|
Avoid blocking when `o+e>` redirects too much stderr message (#8784)
# Description
Fixes: #8565
Here is another pr #7240 tried to address the issue, but it works in a
wrong way.
After this change `o+e>` won't redirect all stdout message then stderr
message and it works more like how bash does.
# User-Facing Changes
For the given python code:
```python
# test.py
import sys
print('aa'*300, flush=True)
print('bb'*999999, file=sys.stderr, flush=True)
print('cc'*300, flush=True)
```
Running `python test.py out+err> a.txt` shoudn't hang nushell, and
`a.txt` keeps output in the same order
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SameTargetRedirection` if we meet `out+err>` redirection
token(which is generated by lex function),
During converting from lite block to block,
LiteElement::SameTargetRedirection will be converted to
PipelineElement::SameTargetRedirection.
Then in the block eval process, if we get
PipelineElement::SameTargetRedirection, we'll invoke `run-external` with
`--redirect-combine` flag, then pipe the result into save command
## What happened internally?
Take the following command as example:
`^ls o+e> log.txt`
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline {
commands: [
SameTargetRedirection {
cmd: (None, LiteCommand { comments: [], parts: [Span { start: 147945, end: 147948}]}),
redirection: (Span { start: 147949, end: 147957 }, LiteCommand { comments: [], parts: [Span { start: 147958, end: 147965 }]})
}
]
}
]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
SameTargetRedirection {
cmd: (None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 147946, end: 147948 }, ty: String, custom_completion: None}, [], false),
span: Span { start: 147945, end: 147948},
ty: Any, custom_completion: None
}),
redirection: (Span { start: 147949, end: 147957}, Expression {expr: String("log.txt"), span: Span { start: 147958, end: 147965 },ty: String,custom_completion: None}
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-utils/standard_library/tests.nu` to run the
tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-05-18 00:47:03 +02:00
|
|
|
LiteElement::SameTargetRedirection {
|
|
|
|
cmd: (span, command),
|
|
|
|
redirection: (redirect_span, redirect_cmd),
|
|
|
|
} => {
|
|
|
|
trace!("parsing: pipeline element: same target redirection");
|
|
|
|
let expr = parse_expression(working_set, &command.parts, is_subexpression);
|
|
|
|
working_set.type_scope.add_type(expr.ty.clone());
|
|
|
|
|
|
|
|
let redirect_expr = parse_string(working_set, redirect_cmd.parts[0]);
|
|
|
|
|
|
|
|
working_set.type_scope.add_type(redirect_expr.ty.clone());
|
|
|
|
|
|
|
|
block.pipelines.push(Pipeline {
|
|
|
|
elements: vec![PipelineElement::SameTargetRedirection {
|
|
|
|
cmd: (*span, expr),
|
|
|
|
redirection: (*redirect_span, redirect_expr),
|
|
|
|
}],
|
|
|
|
})
|
|
|
|
}
|
2021-09-10 09:28:43 +02:00
|
|
|
}
|
2023-04-18 10:19:08 +02:00
|
|
|
}
|
|
|
|
}
|
2021-06-30 03:42:56 +02:00
|
|
|
|
2021-09-02 10:25:22 +02:00
|
|
|
if scoped {
|
|
|
|
working_set.exit_scope();
|
2021-06-30 03:42:56 +02:00
|
|
|
}
|
2022-06-12 21:18:00 +02:00
|
|
|
working_set.type_scope.exit_scope();
|
2021-06-30 03:42:56 +02:00
|
|
|
|
2023-04-21 21:00:33 +02:00
|
|
|
block.span = Some(span);
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
block
|
2021-09-02 10:25:22 +02:00
|
|
|
}
|
2021-06-30 03:42:56 +02:00
|
|
|
|
2022-11-10 09:21:49 +01:00
|
|
|
pub fn discover_captures_in_closure(
|
2021-10-25 22:04:23 +02:00
|
|
|
working_set: &StateWorkingSet,
|
|
|
|
block: &Block,
|
|
|
|
seen: &mut Vec<VarId>,
|
2022-11-11 07:51:08 +01:00
|
|
|
seen_blocks: &mut HashMap<BlockId, Vec<(VarId, Span)>>,
|
2023-04-17 00:24:56 +02:00
|
|
|
output: &mut Vec<(VarId, Span)>,
|
|
|
|
) -> Result<(), ParseError> {
|
2021-10-25 22:04:23 +02:00
|
|
|
for flag in &block.signature.named {
|
|
|
|
if let Some(var_id) = flag.var_id {
|
|
|
|
seen.push(var_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for positional in &block.signature.required_positional {
|
|
|
|
if let Some(var_id) = positional.var_id {
|
|
|
|
seen.push(var_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for positional in &block.signature.optional_positional {
|
|
|
|
if let Some(var_id) = positional.var_id {
|
|
|
|
seen.push(var_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for positional in &block.signature.rest_positional {
|
|
|
|
if let Some(var_id) = positional.var_id {
|
|
|
|
seen.push(var_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-02-15 20:31:14 +01:00
|
|
|
for pipeline in &block.pipelines {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_pipeline(working_set, pipeline, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
|
2023-04-17 00:24:56 +02:00
|
|
|
Ok(())
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
|
2022-02-11 00:15:15 +01:00
|
|
|
fn discover_captures_in_pipeline(
|
2021-10-25 22:04:23 +02:00
|
|
|
working_set: &StateWorkingSet,
|
|
|
|
pipeline: &Pipeline,
|
|
|
|
seen: &mut Vec<VarId>,
|
2022-11-11 07:51:08 +01:00
|
|
|
seen_blocks: &mut HashMap<BlockId, Vec<(VarId, Span)>>,
|
2023-04-17 00:24:56 +02:00
|
|
|
output: &mut Vec<(VarId, Span)>,
|
|
|
|
) -> Result<(), ParseError> {
|
2022-11-18 22:46:48 +01:00
|
|
|
for element in &pipeline.elements {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_pipeline_element(working_set, element, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
|
2023-04-17 00:24:56 +02:00
|
|
|
Ok(())
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
|
2022-11-18 22:46:48 +01:00
|
|
|
// Closes over captured variables
|
|
|
|
pub fn discover_captures_in_pipeline_element(
|
|
|
|
working_set: &StateWorkingSet,
|
|
|
|
element: &PipelineElement,
|
|
|
|
seen: &mut Vec<VarId>,
|
|
|
|
seen_blocks: &mut HashMap<BlockId, Vec<(VarId, Span)>>,
|
2023-04-17 00:24:56 +02:00
|
|
|
output: &mut Vec<(VarId, Span)>,
|
|
|
|
) -> Result<(), ParseError> {
|
2022-11-18 22:46:48 +01:00
|
|
|
match element {
|
2022-11-22 19:26:13 +01:00
|
|
|
PipelineElement::Expression(_, expression)
|
|
|
|
| PipelineElement::Redirection(_, _, expression)
|
2022-12-13 04:36:13 +01:00
|
|
|
| PipelineElement::And(_, expression)
|
2022-11-22 19:26:13 +01:00
|
|
|
| PipelineElement::Or(_, expression) => {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, expression, seen, seen_blocks, output)
|
2022-11-18 22:46:48 +01:00
|
|
|
}
|
Support redirect `err` and `out` to different streams (#7685)
# Description
Closes: #7364
# User-Facing Changes
Given the following shell script:
```bash
x=$(printf '=%.0s' {1..100})
echo $x
echo $x 1>&2
```
It supports the following command:
```
bash test.sh out> out.txt err> err.txt
```
Then both `out.txt` and `err.txt` will contain `=`(100 times)
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SeparateRedirection` if we meet two Redirection
token(which is generated by `lex` function),
During converting from lite block to block,
`LiteElement::SeparateRedirection` will be converted to
`PipelineElement::SeparateRedirection`.
Then in the block eval process, if we get
`PipelineElement::SeparateRedirection`, we invoke `save` command with
`--stderr` arguments to acthive our behavior.
## What happened internally?
Take the following command as example:
```
^ls out> out.txt err> err.txt
```
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, Stdout, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, LiteCommand { comments: [], parts: [Span { start: 39063, end: 39070 }] })
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(
None,
LiteCommand { comments: [], parts: [Span { start: 38525, end: 38528 }] }),
// new one! two Redirection merged into one SeparateRedirection.
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, LiteCommand { comments: [], parts: [Span { start: 38534, end: 38541 }] }),
err: (Span { start: 38542, end: 38546 }, LiteCommand { comments: [], parts: [Span { start: 38547, end: 38554 }] })
}
]
}]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, Stdout, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, Expression { expr: String("err.txt"), span: Span { start: 39063, end: 39070 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 38526, end: 38528 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 38525, end: 38528 },
ty: Any,
custom_completion: None
}),
// new one! SeparateRedirection
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, Expression { expr: String("out.txt"), span: Span { start: 38534, end: 38541 }, ty: String, custom_completion: None }),
err: (Span { start: 38542, end: 38546 }, Expression { expr: String("err.txt"), span: Span { start: 38547, end: 38554 }, ty: String, custom_completion: None })
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-01-12 10:22:30 +01:00
|
|
|
PipelineElement::SeparateRedirection {
|
|
|
|
out: (_, out_expr),
|
|
|
|
err: (_, err_expr),
|
|
|
|
} => {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, out_expr, seen, seen_blocks, output)?;
|
|
|
|
discover_captures_in_expr(working_set, err_expr, seen, seen_blocks, output)?;
|
|
|
|
Ok(())
|
Support redirect `err` and `out` to different streams (#7685)
# Description
Closes: #7364
# User-Facing Changes
Given the following shell script:
```bash
x=$(printf '=%.0s' {1..100})
echo $x
echo $x 1>&2
```
It supports the following command:
```
bash test.sh out> out.txt err> err.txt
```
Then both `out.txt` and `err.txt` will contain `=`(100 times)
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SeparateRedirection` if we meet two Redirection
token(which is generated by `lex` function),
During converting from lite block to block,
`LiteElement::SeparateRedirection` will be converted to
`PipelineElement::SeparateRedirection`.
Then in the block eval process, if we get
`PipelineElement::SeparateRedirection`, we invoke `save` command with
`--stderr` arguments to acthive our behavior.
## What happened internally?
Take the following command as example:
```
^ls out> out.txt err> err.txt
```
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, Stdout, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, LiteCommand { comments: [], parts: [Span { start: 39063, end: 39070 }] })
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(
None,
LiteCommand { comments: [], parts: [Span { start: 38525, end: 38528 }] }),
// new one! two Redirection merged into one SeparateRedirection.
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, LiteCommand { comments: [], parts: [Span { start: 38534, end: 38541 }] }),
err: (Span { start: 38542, end: 38546 }, LiteCommand { comments: [], parts: [Span { start: 38547, end: 38554 }] })
}
]
}]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, Stdout, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, Expression { expr: String("err.txt"), span: Span { start: 39063, end: 39070 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 38526, end: 38528 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 38525, end: 38528 },
ty: Any,
custom_completion: None
}),
// new one! SeparateRedirection
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, Expression { expr: String("out.txt"), span: Span { start: 38534, end: 38541 }, ty: String, custom_completion: None }),
err: (Span { start: 38542, end: 38546 }, Expression { expr: String("err.txt"), span: Span { start: 38547, end: 38554 }, ty: String, custom_completion: None })
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-01-12 10:22:30 +01:00
|
|
|
}
|
Avoid blocking when `o+e>` redirects too much stderr message (#8784)
# Description
Fixes: #8565
Here is another pr #7240 tried to address the issue, but it works in a
wrong way.
After this change `o+e>` won't redirect all stdout message then stderr
message and it works more like how bash does.
# User-Facing Changes
For the given python code:
```python
# test.py
import sys
print('aa'*300, flush=True)
print('bb'*999999, file=sys.stderr, flush=True)
print('cc'*300, flush=True)
```
Running `python test.py out+err> a.txt` shoudn't hang nushell, and
`a.txt` keeps output in the same order
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SameTargetRedirection` if we meet `out+err>` redirection
token(which is generated by lex function),
During converting from lite block to block,
LiteElement::SameTargetRedirection will be converted to
PipelineElement::SameTargetRedirection.
Then in the block eval process, if we get
PipelineElement::SameTargetRedirection, we'll invoke `run-external` with
`--redirect-combine` flag, then pipe the result into save command
## What happened internally?
Take the following command as example:
`^ls o+e> log.txt`
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline {
commands: [
SameTargetRedirection {
cmd: (None, LiteCommand { comments: [], parts: [Span { start: 147945, end: 147948}]}),
redirection: (Span { start: 147949, end: 147957 }, LiteCommand { comments: [], parts: [Span { start: 147958, end: 147965 }]})
}
]
}
]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
SameTargetRedirection {
cmd: (None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 147946, end: 147948 }, ty: String, custom_completion: None}, [], false),
span: Span { start: 147945, end: 147948},
ty: Any, custom_completion: None
}),
redirection: (Span { start: 147949, end: 147957}, Expression {expr: String("log.txt"), span: Span { start: 147958, end: 147965 },ty: String,custom_completion: None}
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-utils/standard_library/tests.nu` to run the
tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-05-18 00:47:03 +02:00
|
|
|
PipelineElement::SameTargetRedirection {
|
|
|
|
cmd: (_, cmd_expr),
|
|
|
|
redirection: (_, redirect_expr),
|
|
|
|
} => {
|
|
|
|
discover_captures_in_expr(working_set, cmd_expr, seen, seen_blocks, output)?;
|
|
|
|
discover_captures_in_expr(working_set, redirect_expr, seen, seen_blocks, output)?;
|
|
|
|
Ok(())
|
|
|
|
}
|
2022-11-18 22:46:48 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-03-24 10:50:23 +01:00
|
|
|
pub fn discover_captures_in_pattern(pattern: &MatchPattern, seen: &mut Vec<VarId>) {
|
|
|
|
match &pattern.pattern {
|
|
|
|
Pattern::Variable(var_id) => seen.push(*var_id),
|
|
|
|
Pattern::List(items) => {
|
|
|
|
for item in items {
|
|
|
|
discover_captures_in_pattern(item, seen)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Pattern::Record(items) => {
|
|
|
|
for item in items {
|
|
|
|
discover_captures_in_pattern(&item.1, seen)
|
|
|
|
}
|
|
|
|
}
|
2023-03-27 00:31:57 +02:00
|
|
|
Pattern::Or(patterns) => {
|
|
|
|
for pattern in patterns {
|
|
|
|
discover_captures_in_pattern(pattern, seen)
|
|
|
|
}
|
|
|
|
}
|
2023-03-31 00:08:53 +02:00
|
|
|
Pattern::Rest(var_id) => seen.push(*var_id),
|
|
|
|
Pattern::Value(_) | Pattern::IgnoreValue | Pattern::IgnoreRest | Pattern::Garbage => {}
|
2023-03-24 10:50:23 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-11-11 07:51:08 +01:00
|
|
|
// Closes over captured variables
|
2022-02-11 00:15:15 +01:00
|
|
|
pub fn discover_captures_in_expr(
|
2021-10-25 22:04:23 +02:00
|
|
|
working_set: &StateWorkingSet,
|
|
|
|
expr: &Expression,
|
|
|
|
seen: &mut Vec<VarId>,
|
2022-11-11 07:51:08 +01:00
|
|
|
seen_blocks: &mut HashMap<BlockId, Vec<(VarId, Span)>>,
|
2023-04-17 00:24:56 +02:00
|
|
|
output: &mut Vec<(VarId, Span)>,
|
|
|
|
) -> Result<(), ParseError> {
|
2021-10-25 22:04:23 +02:00
|
|
|
match &expr.expr {
|
|
|
|
Expr::BinaryOp(lhs, _, rhs) => {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, lhs, seen, seen_blocks, output)?;
|
|
|
|
discover_captures_in_expr(working_set, rhs, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
2022-04-06 21:10:25 +02:00
|
|
|
Expr::UnaryNot(expr) => {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, expr, seen, seen_blocks, output)?;
|
2022-04-06 21:10:25 +02:00
|
|
|
}
|
2022-11-10 09:21:49 +01:00
|
|
|
Expr::Closure(block_id) => {
|
|
|
|
let block = working_set.get_block(*block_id);
|
|
|
|
let results = {
|
|
|
|
let mut seen = vec![];
|
2023-04-17 00:24:56 +02:00
|
|
|
let mut results = vec![];
|
|
|
|
|
|
|
|
discover_captures_in_closure(
|
|
|
|
working_set,
|
|
|
|
block,
|
|
|
|
&mut seen,
|
|
|
|
seen_blocks,
|
|
|
|
&mut results,
|
|
|
|
)?;
|
2022-11-11 07:51:08 +01:00
|
|
|
|
|
|
|
for (var_id, span) in results.iter() {
|
|
|
|
if !seen.contains(var_id) {
|
|
|
|
if let Some(variable) = working_set.get_variable_if_possible(*var_id) {
|
|
|
|
if variable.mutable {
|
|
|
|
return Err(ParseError::CaptureOfMutableVar(*span));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
results
|
2022-11-10 09:21:49 +01:00
|
|
|
};
|
|
|
|
seen_blocks.insert(*block_id, results.clone());
|
2022-11-11 07:51:08 +01:00
|
|
|
for (var_id, span) in results.into_iter() {
|
2022-11-10 09:21:49 +01:00
|
|
|
if !seen.contains(&var_id) {
|
2022-11-11 07:51:08 +01:00
|
|
|
output.push((var_id, span))
|
2022-11-10 09:21:49 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-10-25 22:04:23 +02:00
|
|
|
Expr::Block(block_id) => {
|
2022-05-17 00:49:59 +02:00
|
|
|
let block = working_set.get_block(*block_id);
|
2022-11-10 09:21:49 +01:00
|
|
|
// FIXME: is this correct?
|
2022-05-17 00:49:59 +02:00
|
|
|
let results = {
|
|
|
|
let mut seen = vec![];
|
2023-04-17 00:24:56 +02:00
|
|
|
let mut results = vec![];
|
|
|
|
discover_captures_in_closure(
|
|
|
|
working_set,
|
|
|
|
block,
|
|
|
|
&mut seen,
|
|
|
|
seen_blocks,
|
|
|
|
&mut results,
|
|
|
|
)?;
|
|
|
|
results
|
2022-05-17 00:49:59 +02:00
|
|
|
};
|
2023-04-17 00:24:56 +02:00
|
|
|
|
2022-05-17 00:49:59 +02:00
|
|
|
seen_blocks.insert(*block_id, results.clone());
|
2022-11-11 07:51:08 +01:00
|
|
|
for (var_id, span) in results.into_iter() {
|
2022-05-17 00:49:59 +02:00
|
|
|
if !seen.contains(&var_id) {
|
2022-11-11 07:51:08 +01:00
|
|
|
output.push((var_id, span))
|
2022-02-11 00:15:15 +01:00
|
|
|
}
|
|
|
|
}
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
2022-03-01 00:31:53 +01:00
|
|
|
Expr::Binary(_) => {}
|
2021-10-25 22:04:23 +02:00
|
|
|
Expr::Bool(_) => {}
|
|
|
|
Expr::Call(call) => {
|
2022-01-12 05:06:56 +01:00
|
|
|
let decl = working_set.get_decl(call.decl_id);
|
2022-02-11 00:15:15 +01:00
|
|
|
if let Some(block_id) = decl.get_block_id() {
|
|
|
|
match seen_blocks.get(&block_id) {
|
|
|
|
Some(capture_list) => {
|
|
|
|
output.extend(capture_list);
|
|
|
|
}
|
|
|
|
None => {
|
|
|
|
let block = working_set.get_block(block_id);
|
|
|
|
if !block.captures.is_empty() {
|
2022-11-11 07:51:08 +01:00
|
|
|
output.extend(block.captures.iter().map(|var_id| (*var_id, call.head)));
|
2022-02-11 00:15:15 +01:00
|
|
|
} else {
|
|
|
|
let mut seen = vec![];
|
|
|
|
seen_blocks.insert(block_id, output.clone());
|
|
|
|
|
2023-04-17 00:24:56 +02:00
|
|
|
let mut result = vec![];
|
|
|
|
discover_captures_in_closure(
|
2022-02-11 00:15:15 +01:00
|
|
|
working_set,
|
|
|
|
block,
|
|
|
|
&mut seen,
|
|
|
|
seen_blocks,
|
2023-04-17 00:24:56 +02:00
|
|
|
&mut result,
|
2022-11-11 07:51:08 +01:00
|
|
|
)?;
|
2022-02-11 00:15:15 +01:00
|
|
|
output.extend(&result);
|
|
|
|
seen_blocks.insert(block_id, result);
|
|
|
|
}
|
2022-01-21 17:39:55 +01:00
|
|
|
}
|
|
|
|
}
|
2022-01-12 05:06:56 +01:00
|
|
|
}
|
|
|
|
|
2022-04-09 04:55:02 +02:00
|
|
|
for named in call.named_iter() {
|
2022-04-09 07:17:48 +02:00
|
|
|
if let Some(arg) = &named.2 {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, arg, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-04-09 04:55:02 +02:00
|
|
|
for positional in call.positional_iter() {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, positional, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
Expr::CellPath(_) => {}
|
2022-02-24 03:02:48 +01:00
|
|
|
Expr::DateTime(_) => {}
|
Make external command substitution works friendly(like fish shell, trailing ending newlines) (#7156)
# Description
As title, when execute external sub command, auto-trimming end
new-lines, like how fish shell does.
And if the command is executed directly like: `cat tmp`, the result
won't change.
Fixes: #6816
Fixes: #3980
Note that although nushell works correctly by directly replace output of
external command to variable(or other places like string interpolation),
it's not friendly to user, and users almost want to use `str trim` to
trim trailing newline, I think that's why fish shell do this
automatically.
If the pr is ok, as a result, no more `str trim -r` is required when
user is writing scripts which using external commands.
# User-Facing Changes
Before:
<img width="523" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468810-86b04dbb-c147-459a-96a5-e0095eeaab3d.png">
After:
<img width="505" alt="img"
src="https://user-images.githubusercontent.com/22256154/202468599-7b537488-3d6b-458e-9d75-d85780826db0.png">
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace --features=extra -- -D warnings -D
clippy::unwrap_used -A clippy::needless_collect` to check that you're
using the standard code style
- `cargo test --workspace --features=extra` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2022-11-23 04:51:57 +01:00
|
|
|
Expr::ExternalCall(head, exprs, _) => {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, head, seen, seen_blocks, output)?;
|
2022-01-13 09:17:45 +01:00
|
|
|
|
2021-10-25 22:04:23 +02:00
|
|
|
for expr in exprs {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, expr, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
Expr::Filepath(_) => {}
|
2022-04-22 22:18:51 +02:00
|
|
|
Expr::Directory(_) => {}
|
2021-10-25 22:04:23 +02:00
|
|
|
Expr::Float(_) => {}
|
|
|
|
Expr::FullCellPath(cell_path) => {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, &cell_path.head, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
2021-11-16 00:16:06 +01:00
|
|
|
Expr::ImportPattern(_) => {}
|
2022-09-04 17:36:42 +02:00
|
|
|
Expr::Overlay(_) => {}
|
2021-10-25 22:04:23 +02:00
|
|
|
Expr::Garbage => {}
|
2021-12-20 02:05:33 +01:00
|
|
|
Expr::Nothing => {}
|
2021-10-25 22:04:23 +02:00
|
|
|
Expr::GlobPattern(_) => {}
|
|
|
|
Expr::Int(_) => {}
|
|
|
|
Expr::Keyword(_, _, expr) => {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, expr, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
Expr::List(exprs) => {
|
|
|
|
for expr in exprs {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, expr, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
Expr::Operator(_) => {}
|
|
|
|
Expr::Range(expr1, expr2, expr3, _) => {
|
|
|
|
if let Some(expr) = expr1 {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, expr, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
if let Some(expr) = expr2 {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, expr, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
if let Some(expr) = expr3 {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, expr, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
}
|
2021-11-11 00:14:00 +01:00
|
|
|
Expr::Record(fields) => {
|
|
|
|
for (field_name, field_value) in fields {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, field_name, seen, seen_blocks, output)?;
|
|
|
|
discover_captures_in_expr(working_set, field_value, seen, seen_blocks, output)?;
|
2021-11-11 00:14:00 +01:00
|
|
|
}
|
|
|
|
}
|
2022-01-12 05:06:56 +01:00
|
|
|
Expr::Signature(sig) => {
|
|
|
|
// Something with a declaration, similar to a var decl, will introduce more VarIds into the stack at eval
|
|
|
|
for pos in &sig.required_positional {
|
|
|
|
if let Some(var_id) = pos.var_id {
|
|
|
|
seen.push(var_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for pos in &sig.optional_positional {
|
|
|
|
if let Some(var_id) = pos.var_id {
|
|
|
|
seen.push(var_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if let Some(rest) = &sig.rest_positional {
|
|
|
|
if let Some(var_id) = rest.var_id {
|
|
|
|
seen.push(var_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
for named in &sig.named {
|
|
|
|
if let Some(var_id) = named.var_id {
|
|
|
|
seen.push(var_id);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-10-25 22:04:23 +02:00
|
|
|
Expr::String(_) => {}
|
2021-12-25 21:50:02 +01:00
|
|
|
Expr::StringInterpolation(exprs) => {
|
|
|
|
for expr in exprs {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, expr, seen, seen_blocks, output)?;
|
2021-12-25 21:50:02 +01:00
|
|
|
}
|
|
|
|
}
|
2023-03-24 02:52:01 +01:00
|
|
|
Expr::MatchPattern(_) => {}
|
|
|
|
Expr::MatchBlock(match_block) => {
|
|
|
|
for match_ in match_block {
|
2023-03-24 10:50:23 +01:00
|
|
|
discover_captures_in_pattern(&match_.0, seen);
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, &match_.1, seen, seen_blocks, output)?;
|
2023-03-24 02:52:01 +01:00
|
|
|
}
|
|
|
|
}
|
2021-11-26 04:49:03 +01:00
|
|
|
Expr::RowCondition(block_id) | Expr::Subexpression(block_id) => {
|
2022-05-17 00:49:59 +02:00
|
|
|
let block = working_set.get_block(*block_id);
|
2023-04-17 00:24:56 +02:00
|
|
|
|
2022-05-17 00:49:59 +02:00
|
|
|
let results = {
|
2023-04-17 00:24:56 +02:00
|
|
|
let mut results = vec![];
|
2022-05-17 00:49:59 +02:00
|
|
|
let mut seen = vec![];
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_closure(
|
|
|
|
working_set,
|
|
|
|
block,
|
|
|
|
&mut seen,
|
|
|
|
seen_blocks,
|
|
|
|
&mut results,
|
|
|
|
)?;
|
|
|
|
results
|
2022-05-17 00:49:59 +02:00
|
|
|
};
|
2023-04-18 10:19:08 +02:00
|
|
|
|
2022-05-17 00:49:59 +02:00
|
|
|
seen_blocks.insert(*block_id, results.clone());
|
2022-11-11 07:51:08 +01:00
|
|
|
for (var_id, span) in results.into_iter() {
|
2022-05-17 00:49:59 +02:00
|
|
|
if !seen.contains(&var_id) {
|
2022-11-11 07:51:08 +01:00
|
|
|
output.push((var_id, span))
|
2022-02-11 13:37:10 +01:00
|
|
|
}
|
|
|
|
}
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
Expr::Table(headers, values) => {
|
|
|
|
for header in headers {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, header, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
for row in values {
|
|
|
|
for cell in row {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, cell, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Expr::ValueWithUnit(expr, _) => {
|
2023-04-17 00:24:56 +02:00
|
|
|
discover_captures_in_expr(working_set, expr, seen, seen_blocks, output)?;
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
Expr::Var(var_id) => {
|
2022-01-12 05:06:56 +01:00
|
|
|
if (*var_id > ENV_VARIABLE_ID || *var_id == IN_VARIABLE_ID) && !seen.contains(var_id) {
|
2022-11-11 07:51:08 +01:00
|
|
|
output.push((*var_id, expr.span));
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
Expr::VarDecl(var_id) => {
|
|
|
|
seen.push(*var_id);
|
|
|
|
}
|
|
|
|
}
|
2023-04-17 00:24:56 +02:00
|
|
|
Ok(())
|
2021-10-25 22:04:23 +02:00
|
|
|
}
|
|
|
|
|
2022-11-18 22:46:48 +01:00
|
|
|
fn wrap_element_with_collect(
|
|
|
|
working_set: &mut StateWorkingSet,
|
|
|
|
element: &PipelineElement,
|
|
|
|
) -> PipelineElement {
|
|
|
|
match element {
|
2022-11-22 19:26:13 +01:00
|
|
|
PipelineElement::Expression(span, expression) => {
|
|
|
|
PipelineElement::Expression(*span, wrap_expr_with_collect(working_set, expression))
|
2022-11-18 22:46:48 +01:00
|
|
|
}
|
2022-11-22 19:26:13 +01:00
|
|
|
PipelineElement::Redirection(span, redirection, expression) => {
|
|
|
|
PipelineElement::Redirection(
|
|
|
|
*span,
|
|
|
|
redirection.clone(),
|
|
|
|
wrap_expr_with_collect(working_set, expression),
|
|
|
|
)
|
2022-11-18 22:46:48 +01:00
|
|
|
}
|
Support redirect `err` and `out` to different streams (#7685)
# Description
Closes: #7364
# User-Facing Changes
Given the following shell script:
```bash
x=$(printf '=%.0s' {1..100})
echo $x
echo $x 1>&2
```
It supports the following command:
```
bash test.sh out> out.txt err> err.txt
```
Then both `out.txt` and `err.txt` will contain `=`(100 times)
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SeparateRedirection` if we meet two Redirection
token(which is generated by `lex` function),
During converting from lite block to block,
`LiteElement::SeparateRedirection` will be converted to
`PipelineElement::SeparateRedirection`.
Then in the block eval process, if we get
`PipelineElement::SeparateRedirection`, we invoke `save` command with
`--stderr` arguments to acthive our behavior.
## What happened internally?
Take the following command as example:
```
^ls out> out.txt err> err.txt
```
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, Stdout, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, LiteCommand { comments: [], parts: [Span { start: 39063, end: 39070 }] })
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(
None,
LiteCommand { comments: [], parts: [Span { start: 38525, end: 38528 }] }),
// new one! two Redirection merged into one SeparateRedirection.
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, LiteCommand { comments: [], parts: [Span { start: 38534, end: 38541 }] }),
err: (Span { start: 38542, end: 38546 }, LiteCommand { comments: [], parts: [Span { start: 38547, end: 38554 }] })
}
]
}]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, Stdout, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None }),
Redirection(Span { start: 39058, end: 39062 }, Stderr, Expression { expr: String("err.txt"), span: Span { start: 39063, end: 39070 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 38526, end: 38528 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 38525, end: 38528 },
ty: Any,
custom_completion: None
}),
// new one! SeparateRedirection
SeparateRedirection {
out: (Span { start: 38529, end: 38533 }, Expression { expr: String("out.txt"), span: Span { start: 38534, end: 38541 }, ty: String, custom_completion: None }),
err: (Span { start: 38542, end: 38546 }, Expression { expr: String("err.txt"), span: Span { start: 38547, end: 38554 }, ty: String, custom_completion: None })
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-01-12 10:22:30 +01:00
|
|
|
PipelineElement::SeparateRedirection {
|
|
|
|
out: (out_span, out_exp),
|
|
|
|
err: (err_span, err_exp),
|
|
|
|
} => PipelineElement::SeparateRedirection {
|
|
|
|
out: (*out_span, wrap_expr_with_collect(working_set, out_exp)),
|
|
|
|
err: (*err_span, wrap_expr_with_collect(working_set, err_exp)),
|
|
|
|
},
|
Avoid blocking when `o+e>` redirects too much stderr message (#8784)
# Description
Fixes: #8565
Here is another pr #7240 tried to address the issue, but it works in a
wrong way.
After this change `o+e>` won't redirect all stdout message then stderr
message and it works more like how bash does.
# User-Facing Changes
For the given python code:
```python
# test.py
import sys
print('aa'*300, flush=True)
print('bb'*999999, file=sys.stderr, flush=True)
print('cc'*300, flush=True)
```
Running `python test.py out+err> a.txt` shoudn't hang nushell, and
`a.txt` keeps output in the same order
## About the change
The core idea is that when doing lite-parsing, introduce a new variant
`LiteElement::SameTargetRedirection` if we meet `out+err>` redirection
token(which is generated by lex function),
During converting from lite block to block,
LiteElement::SameTargetRedirection will be converted to
PipelineElement::SameTargetRedirection.
Then in the block eval process, if we get
PipelineElement::SameTargetRedirection, we'll invoke `run-external` with
`--redirect-combine` flag, then pipe the result into save command
## What happened internally?
Take the following command as example:
`^ls o+e> log.txt`
lex parsing result(`Tokens`) are not changed, but `LiteBlock` and
`Block` is changed after this pr.
### LiteBlock before
```rust
LiteBlock {
block: [
LitePipeline { commands: [
Command(None, LiteCommand { comments: [], parts: [Span { start: 39041, end: 39044 }] }),
// actually the span of first Redirection is wrong too..
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, LiteCommand { comments: [], parts: [Span { start: 39050, end: 39057 }] }),
]
}]
}
```
### LiteBlock after
```rust
LiteBlock {
block: [
LitePipeline {
commands: [
SameTargetRedirection {
cmd: (None, LiteCommand { comments: [], parts: [Span { start: 147945, end: 147948}]}),
redirection: (Span { start: 147949, end: 147957 }, LiteCommand { comments: [], parts: [Span { start: 147958, end: 147965 }]})
}
]
}
]
}
```
### Block before
```rust
Pipeline {
elements: [
Expression(None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 39042, end: 39044 }, ty: String, custom_completion: None }, [], false),
span: Span { start: 39041, end: 39044 },
ty: Any, custom_completion: None
}),
Redirection(Span { start: 39058, end: 39062 }, StdoutAndStderr, Expression { expr: String("out.txt"), span: Span { start: 39050, end: 39057 }, ty: String, custom_completion: None })] }
```
### Block after
```rust
Pipeline {
elements: [
SameTargetRedirection {
cmd: (None, Expression {
expr: ExternalCall(Expression { expr: String("ls"), span: Span { start: 147946, end: 147948 }, ty: String, custom_completion: None}, [], false),
span: Span { start: 147945, end: 147948},
ty: Any, custom_completion: None
}),
redirection: (Span { start: 147949, end: 147957}, Expression {expr: String("log.txt"), span: Span { start: 147958, end: 147965 },ty: String,custom_completion: None}
}
]
}
```
# Tests + Formatting
Don't forget to add tests that cover your changes.
Make sure you've run and fixed any issues with these commands:
- `cargo fmt --all -- --check` to check standard code formatting (`cargo
fmt --all` applies these changes)
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used -A
clippy::needless_collect` to check that you're using the standard code
style
- `cargo test --workspace` to check that all tests pass
- `cargo run -- crates/nu-utils/standard_library/tests.nu` to run the
tests for the standard library
> **Note**
> from `nushell` you can also use the `toolkit` as follows
> ```bash
> use toolkit.nu # or use an `env_change` hook to activate it
automatically
> toolkit check pr
> ```
# After Submitting
If your PR had any user-facing changes, update [the
documentation](https://github.com/nushell/nushell.github.io) after the
PR is merged, if necessary. This will help us keep the docs up to date.
2023-05-18 00:47:03 +02:00
|
|
|
PipelineElement::SameTargetRedirection {
|
|
|
|
cmd: (cmd_span, cmd_exp),
|
|
|
|
redirection: (redirect_span, redirect_exp),
|
|
|
|
} => PipelineElement::SameTargetRedirection {
|
|
|
|
cmd: (*cmd_span, wrap_expr_with_collect(working_set, cmd_exp)),
|
|
|
|
redirection: (
|
|
|
|
*redirect_span,
|
|
|
|
wrap_expr_with_collect(working_set, redirect_exp),
|
|
|
|
),
|
|
|
|
},
|
2022-12-13 04:36:13 +01:00
|
|
|
PipelineElement::And(span, expression) => {
|
|
|
|
PipelineElement::And(*span, wrap_expr_with_collect(working_set, expression))
|
|
|
|
}
|
2022-11-22 19:26:13 +01:00
|
|
|
PipelineElement::Or(span, expression) => {
|
|
|
|
PipelineElement::Or(*span, wrap_expr_with_collect(working_set, expression))
|
2022-11-18 22:46:48 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-08 07:21:24 +01:00
|
|
|
fn wrap_expr_with_collect(working_set: &mut StateWorkingSet, expr: &Expression) -> Expression {
|
|
|
|
let span = expr.span;
|
|
|
|
|
2022-06-10 17:59:35 +02:00
|
|
|
if let Some(decl_id) = working_set.find_decl(b"collect", &Type::Any) {
|
2021-11-08 07:21:24 +01:00
|
|
|
let mut output = vec![];
|
|
|
|
|
2023-01-28 18:55:29 +01:00
|
|
|
let var_id = IN_VARIABLE_ID;
|
2021-11-08 07:21:24 +01:00
|
|
|
let mut signature = Signature::new("");
|
|
|
|
signature.required_positional.push(PositionalArg {
|
|
|
|
var_id: Some(var_id),
|
2021-12-30 04:26:40 +01:00
|
|
|
name: "$in".into(),
|
2021-11-08 07:21:24 +01:00
|
|
|
desc: String::new(),
|
|
|
|
shape: SyntaxShape::Any,
|
2022-03-07 21:08:56 +01:00
|
|
|
default_value: None,
|
2021-11-08 07:21:24 +01:00
|
|
|
});
|
|
|
|
|
|
|
|
let mut expr = expr.clone();
|
|
|
|
expr.replace_in_variable(working_set, var_id);
|
|
|
|
|
2022-02-11 00:15:15 +01:00
|
|
|
let block = Block {
|
2022-11-18 22:46:48 +01:00
|
|
|
pipelines: vec![Pipeline::from_vec(vec![expr])],
|
2021-11-08 07:21:24 +01:00
|
|
|
signature: Box::new(signature),
|
|
|
|
..Default::default()
|
|
|
|
};
|
|
|
|
|
|
|
|
let block_id = working_set.add_block(block);
|
|
|
|
|
2022-04-09 04:55:02 +02:00
|
|
|
output.push(Argument::Positional(Expression {
|
2022-11-10 09:21:49 +01:00
|
|
|
expr: Expr::Closure(block_id),
|
2021-11-08 07:21:24 +01:00
|
|
|
span,
|
2022-04-07 06:34:09 +02:00
|
|
|
ty: Type::Any,
|
2021-11-08 07:21:24 +01:00
|
|
|
custom_completion: None,
|
2022-04-09 04:55:02 +02:00
|
|
|
}));
|
2021-11-08 07:21:24 +01:00
|
|
|
|
2022-10-13 11:04:34 +02:00
|
|
|
output.push(Argument::Named((
|
|
|
|
Spanned {
|
|
|
|
item: "keep-env".to_string(),
|
|
|
|
span: Span::new(0, 0),
|
|
|
|
},
|
|
|
|
None,
|
|
|
|
None,
|
|
|
|
)));
|
|
|
|
|
2021-12-30 04:26:40 +01:00
|
|
|
// The containing, synthetic call to `collect`.
|
|
|
|
// We don't want to have a real span as it will confuse flattening
|
|
|
|
// The args are where we'll get the real info
|
2021-11-08 07:21:24 +01:00
|
|
|
Expression {
|
|
|
|
expr: Expr::Call(Box::new(Call {
|
2021-12-30 04:26:40 +01:00
|
|
|
head: Span::new(0, 0),
|
2022-04-09 04:55:02 +02:00
|
|
|
arguments: output,
|
2021-11-08 07:21:24 +01:00
|
|
|
decl_id,
|
2022-02-21 23:22:21 +01:00
|
|
|
redirect_stdout: true,
|
|
|
|
redirect_stderr: false,
|
2023-04-05 18:56:48 +02:00
|
|
|
parser_info: HashMap::new(),
|
2021-11-08 07:21:24 +01:00
|
|
|
})),
|
|
|
|
span,
|
|
|
|
ty: Type::String,
|
|
|
|
custom_completion: None,
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
Expression::garbage(span)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-09-06 22:41:30 +02:00
|
|
|
// Parses a vector of u8 to create an AST Block. If a file name is given, then
|
|
|
|
// the name is stored in the working set. When parsing a source without a file
|
|
|
|
// name, the source of bytes is stored as "source"
|
|
|
|
pub fn parse(
|
2021-09-02 10:25:22 +02:00
|
|
|
working_set: &mut StateWorkingSet,
|
2021-09-06 22:41:30 +02:00
|
|
|
fname: Option<&str>,
|
2021-09-02 10:25:22 +02:00
|
|
|
contents: &[u8],
|
|
|
|
scoped: bool,
|
2023-04-07 02:35:45 +02:00
|
|
|
) -> Block {
|
2021-09-06 22:41:30 +02:00
|
|
|
let name = match fname {
|
2023-04-12 19:36:29 +02:00
|
|
|
Some(fname) => {
|
|
|
|
// use the canonical name for this filename
|
|
|
|
nu_path::expand_to_real_path(fname)
|
|
|
|
.to_string_lossy()
|
|
|
|
.to_string()
|
|
|
|
}
|
2021-09-06 22:41:30 +02:00
|
|
|
None => "source".to_string(),
|
|
|
|
};
|
2021-07-01 02:01:04 +02:00
|
|
|
|
2023-04-09 22:55:47 +02:00
|
|
|
let file_id = working_set.add_file(name, contents);
|
|
|
|
let new_span = working_set.get_span_for_file(file_id);
|
2021-07-01 02:01:04 +02:00
|
|
|
|
2023-04-21 21:00:33 +02:00
|
|
|
let previously_parsed_block = working_set.find_block_by_span(new_span);
|
|
|
|
|
|
|
|
let mut output = {
|
|
|
|
if let Some(block) = previously_parsed_block {
|
|
|
|
return block;
|
|
|
|
} else {
|
|
|
|
let (output, err) = lex(contents, new_span.start, &[], &[], false);
|
|
|
|
if let Some(err) = err {
|
|
|
|
working_set.error(err)
|
|
|
|
}
|
2021-07-01 02:01:04 +02:00
|
|
|
|
2023-04-21 21:00:33 +02:00
|
|
|
parse_block(working_set, &output, new_span, scoped, false)
|
|
|
|
}
|
|
|
|
};
|
2021-09-02 10:25:22 +02:00
|
|
|
|
2022-02-11 00:15:15 +01:00
|
|
|
let mut seen = vec![];
|
|
|
|
let mut seen_blocks = HashMap::new();
|
|
|
|
|
2023-04-17 00:24:56 +02:00
|
|
|
let mut captures = vec![];
|
|
|
|
match discover_captures_in_closure(
|
|
|
|
working_set,
|
|
|
|
&output,
|
|
|
|
&mut seen,
|
|
|
|
&mut seen_blocks,
|
|
|
|
&mut captures,
|
|
|
|
) {
|
|
|
|
Ok(_) => output.captures = captures.into_iter().map(|(var_id, _)| var_id).collect(),
|
2023-04-07 02:35:45 +02:00
|
|
|
Err(err) => working_set.error(err),
|
2022-11-11 07:51:08 +01:00
|
|
|
}
|
2022-02-11 00:15:15 +01:00
|
|
|
|
2022-02-11 13:37:10 +01:00
|
|
|
// Also check other blocks that might have been imported
|
2023-04-07 02:35:45 +02:00
|
|
|
let mut errors = vec![];
|
2022-02-11 13:37:10 +01:00
|
|
|
for (block_idx, block) in working_set.delta.blocks.iter().enumerate() {
|
|
|
|
let block_id = block_idx + working_set.permanent_state.num_blocks();
|
|
|
|
|
|
|
|
if !seen_blocks.contains_key(&block_id) {
|
2023-04-17 00:24:56 +02:00
|
|
|
let mut captures = vec![];
|
|
|
|
|
|
|
|
match discover_captures_in_closure(
|
|
|
|
working_set,
|
|
|
|
block,
|
|
|
|
&mut seen,
|
|
|
|
&mut seen_blocks,
|
|
|
|
&mut captures,
|
|
|
|
) {
|
|
|
|
Ok(_) => {
|
2022-11-11 07:51:08 +01:00
|
|
|
seen_blocks.insert(block_id, captures);
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
Err(err) => {
|
|
|
|
errors.push(err);
|
|
|
|
}
|
2022-11-11 07:51:08 +01:00
|
|
|
}
|
2022-02-11 13:37:10 +01:00
|
|
|
}
|
|
|
|
}
|
2023-04-07 02:35:45 +02:00
|
|
|
for err in errors {
|
|
|
|
working_set.error(err)
|
|
|
|
}
|
2022-02-11 13:37:10 +01:00
|
|
|
|
2022-02-11 00:15:15 +01:00
|
|
|
for (block_id, captures) in seen_blocks.into_iter() {
|
|
|
|
// In theory, we should only be updating captures where we have new information
|
|
|
|
// the only place where this is possible would be blocks that are newly created
|
|
|
|
// by our working set delta. If we ever tried to modify the permanent state, we'd
|
|
|
|
// panic (again, in theory, this shouldn't be possible)
|
|
|
|
let block = working_set.get_block(block_id);
|
|
|
|
let block_captures_empty = block.captures.is_empty();
|
|
|
|
if !captures.is_empty() && block_captures_empty {
|
|
|
|
let block = working_set.get_block_mut(block_id);
|
2022-11-11 07:51:08 +01:00
|
|
|
block.captures = captures.into_iter().map(|(var_id, _)| var_id).collect();
|
2022-02-11 00:15:15 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-07 02:35:45 +02:00
|
|
|
output
|
2021-06-30 03:42:56 +02:00
|
|
|
}
|