Add Range and start Signature support

This commit contains two improvements:

- Support for a Range syntax (and a corresponding Range value)
- Work towards a signature syntax

Implementing the Range syntax resulted in cleaning up how operators in
the core syntax works. There are now two kinds of infix operators

- tight operators (`.` and `..`)
- loose operators

Tight operators may not be interspersed (`$it.left..$it.right` is a
syntax error). Loose operators require whitespace on both sides of the
operator, and can be arbitrarily interspersed. Precedence is left to
right in the core syntax.

Note that delimited syntax (like `( ... )` or `[ ... ]`) is a single
token node in the core syntax. A single token node can be parsed from
beginning to end in a context-free manner.

The rule for `.` is `<token node>.<member>`. The rule for `..` is
`<token node>..<token node>`.

Loose operators all have the same syntactic rule: `<token
node><space><loose op><space><token node>`.

The second aspect of this pull request is the beginning of support for a
signature syntax. Before implementing signatures, a necessary
prerequisite is for the core syntax to support multi-line programs.

That work establishes a few things:

- `;` and newlines are handled in the core grammar, and both count as
  "separators"
- line comments begin with `#` and continue until the end of the line

In this commit, multi-token productions in the core grammar can use
separators interchangably with spaces. However, I think we will
ultimately want a different rule preventing separators from occurring
before an infix operator, so that the end of a line is always
unambiguous. This would avoid gratuitous differences between modules and
repl usage.

We already effectively have this rule, because otherwise `x<newline> |
y` would be a single pipeline, but of course that wouldn't work.
This commit is contained in:
Yehuda Katz 2019-12-04 13:14:52 -08:00
parent 16272b1b20
commit 57af9b5040
64 changed files with 2522 additions and 738 deletions

View File

@ -42,10 +42,10 @@ steps:
echo "##vso[task.prependpath]$HOME/.cargo/bin" echo "##vso[task.prependpath]$HOME/.cargo/bin"
rustup component add rustfmt --toolchain "stable" rustup component add rustfmt --toolchain "stable"
displayName: Install Rust displayName: Install Rust
- bash: RUSTFLAGS="-D warnings" cargo test --all --all-features - bash: RUSTFLAGS="-D warnings" cargo test --all --features=user-visible
condition: eq(variables['style'], 'unflagged') condition: eq(variables['style'], 'unflagged')
displayName: Run tests displayName: Run tests
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all --all-features - bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all --features=user-visible
condition: eq(variables['style'], 'canary') condition: eq(variables['style'], 'canary')
displayName: Run tests displayName: Run tests
- bash: cargo fmt --all -- --check - bash: cargo fmt --all -- --check

View File

@ -1,7 +1,7 @@
image: image:
file: .gitpod.Dockerfile file: .gitpod.Dockerfile
tasks: tasks:
- init: cargo install nu --all-features - init: cargo install nu --features=user-visible
command: nu command: nu
github: github:
prebuilds: prebuilds:

29
Cargo.lock generated
View File

@ -814,6 +814,26 @@ dependencies = [
"cfg-if", "cfg-if",
] ]
[[package]]
name = "enumflags2"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "33121c8782ba948ba332dab29311b026a8716dc65a1599e5b88f392d38496af8"
dependencies = [
"enumflags2_derive",
]
[[package]]
name = "enumflags2_derive"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ecf634c5213044b8d54a46dd282cf5dd1f86bb5cb53e92c409cb4680a7fb9894"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "env_logger" name = "env_logger"
version = "0.6.2" version = "0.6.2"
@ -1896,6 +1916,7 @@ dependencies = [
"nom_locate", "nom_locate",
"nu-build", "nu-build",
"nu-errors", "nu-errors",
"nu-macros",
"nu-parser", "nu-parser",
"nu-protocol", "nu-protocol",
"nu-source", "nu-source",
@ -1983,6 +2004,13 @@ dependencies = [
"toml 0.5.5", "toml 0.5.5",
] ]
[[package]]
name = "nu-macros"
version = "0.1.0"
dependencies = [
"nu-protocol",
]
[[package]] [[package]]
name = "nu-parser" name = "nu-parser"
version = "0.1.0" version = "0.1.0"
@ -1991,6 +2019,7 @@ dependencies = [
"bigdecimal", "bigdecimal",
"cfg-if", "cfg-if",
"derive-new", "derive-new",
"enumflags2",
"getset", "getset",
"indexmap", "indexmap",
"itertools 0.8.2", "itertools 0.8.2",

View File

@ -14,6 +14,7 @@ documentation = "https://book.nushell.sh"
[workspace] [workspace]
members = [ members = [
"crates/nu-macros",
"crates/nu-errors", "crates/nu-errors",
"crates/nu-source", "crates/nu-source",
"crates/nu_plugin_average", "crates/nu_plugin_average",
@ -54,6 +55,8 @@ nu_plugin_sum = {version = "0.1.0", path = "./crates/nu_plugin_sum", optional=tr
nu_plugin_sys = {version = "0.1.0", path = "./crates/nu_plugin_sys", optional=true} nu_plugin_sys = {version = "0.1.0", path = "./crates/nu_plugin_sys", optional=true}
nu_plugin_textview = {version = "0.1.0", path = "./crates/nu_plugin_textview", optional=true} nu_plugin_textview = {version = "0.1.0", path = "./crates/nu_plugin_textview", optional=true}
nu_plugin_tree = {version = "0.1.0", path = "./crates/nu_plugin_tree", optional=true} nu_plugin_tree = {version = "0.1.0", path = "./crates/nu_plugin_tree", optional=true}
nu-macros = { version = "0.1.0", path = "./crates/nu-macros" }
query_interface = "0.3.5" query_interface = "0.3.5"
typetag = "0.1.4" typetag = "0.1.4"
@ -133,19 +136,21 @@ semver = {version = "0.9.0", optional = true}
[features] [features]
default = ["sys", "ps", "textview", "inc", "str"] default = ["sys", "ps", "textview", "inc", "str"]
user-visible = ["sys", "ps", "starship-prompt", "textview", "binaryview", "match", "tree", "average", "sum"]
sys = ["heim", "battery"] sys = ["heim", "battery"]
ps = ["heim", "futures-timer"] ps = ["heim", "futures-timer"]
textview = ["crossterm", "syntect", "onig_sys", "url"] textview = ["crossterm", "syntect", "onig_sys", "url"]
inc = ["semver"]
str = [] str = []
inc = ["semver"]
starship-prompt = ["starship"] starship-prompt = ["starship"]
binaryview = ["nu_plugin_binaryview"] binaryview = ["nu_plugin_binaryview"]
match = ["nu_plugin_match"] match = ["nu_plugin_match"]
tree = ["nu_plugin_tree"] tree = ["nu_plugin_tree"]
average = ["nu_plugin_average"] average = ["nu_plugin_average"]
sum = ["nu_plugin_sum"] sum = ["nu_plugin_sum"]
#trace = ["nu-parser/trace"] trace = ["nu-parser/trace"]
[dependencies.rusqlite] [dependencies.rusqlite]
version = "0.20.0" version = "0.20.0"

View File

@ -55,7 +55,7 @@ cargo install nu
You can also install Nu with all the bells and whistles (be sure to have installed the [dependencies](https://book.nushell.sh/en/installation#dependencies) for your platform): You can also install Nu with all the bells and whistles (be sure to have installed the [dependencies](https://book.nushell.sh/en/installation#dependencies) for your platform):
``` ```
cargo install nu --all-features cargo install nu --features=user-visible
``` ```
## Docker ## Docker

View File

@ -46,3 +46,7 @@ Unify dictionary building, probably around a macro
sys plugin in own crate sys plugin in own crate
textview in own crate textview in own crate
Combine atomic and atomic_parse in parser
at_end_possible_ws needs to be comment and separator sensitive

View File

@ -16,6 +16,9 @@ use std::ops::Range;
pub enum ParseErrorReason { pub enum ParseErrorReason {
/// The parser encountered an EOF rather than what it was expecting /// The parser encountered an EOF rather than what it was expecting
Eof { expected: &'static str, span: Span }, Eof { expected: &'static str, span: Span },
/// The parser expected to see the end of a token stream (possibly the token
/// stream from inside a delimited token node), but found something else.
ExtraTokens { actual: Spanned<String> },
/// The parser encountered something other than what it was expecting /// The parser encountered something other than what it was expecting
Mismatch { Mismatch {
expected: &'static str, expected: &'static str,
@ -43,6 +46,17 @@ impl ParseError {
} }
} }
/// Construct a [ParseErrorReason::ExtraTokens](ParseErrorReason::ExtraTokens)
pub fn extra_tokens(actual: Spanned<impl Into<String>>) -> ParseError {
let Spanned { span, item } = actual;
ParseError {
reason: ParseErrorReason::ExtraTokens {
actual: item.into().spanned(span),
},
}
}
/// Construct a [ParseErrorReason::Mismatch](ParseErrorReason::Mismatch) /// Construct a [ParseErrorReason::Mismatch](ParseErrorReason::Mismatch)
pub fn mismatch(expected: &'static str, actual: Spanned<impl Into<String>>) -> ParseError { pub fn mismatch(expected: &'static str, actual: Spanned<impl Into<String>>) -> ParseError {
let Spanned { span, item } = actual; let Spanned { span, item } = actual;
@ -71,6 +85,9 @@ impl From<ParseError> for ShellError {
fn from(error: ParseError) -> ShellError { fn from(error: ParseError) -> ShellError {
match error.reason { match error.reason {
ParseErrorReason::Eof { expected, span } => ShellError::unexpected_eof(expected, span), ParseErrorReason::Eof { expected, span } => ShellError::unexpected_eof(expected, span),
ParseErrorReason::ExtraTokens { actual } => {
ShellError::type_error("nothing", actual.clone())
}
ParseErrorReason::Mismatch { actual, expected } => { ParseErrorReason::Mismatch { actual, expected } => {
ShellError::type_error(expected, actual.clone()) ShellError::type_error(expected, actual.clone())
} }

View File

@ -0,0 +1,10 @@
[package]
name = "nu-macros"
version = "0.1.0"
authors = ["Yehuda Katz <wycats@gmail.com>"]
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
nu-protocol = { path = "../nu-protocol", version = "0.1.0" }

View File

@ -0,0 +1,25 @@
#[macro_export]
macro_rules! signature {
(def $name:tt {
$usage:tt
$(
$positional_name:tt $positional_ty:tt - $positional_desc:tt
)*
}) => {{
let signature = Signature::new(stringify!($name)).desc($usage);
$(
$crate::positional! { signature, $positional_name $positional_ty - $positional_desc }
)*
signature
}};
}
#[macro_export]
macro_rules! positional {
($ident:tt, $name:tt (optional $shape:tt) - $desc:tt) => {
let $ident = $ident.required(stringify!($name), SyntaxShape::$shape, $desc);
};
($ident:tt, $name:tt ($shape:tt)- $desc:tt) => {
let $ident = $ident.optional(stringify!($name), SyntaxShape::$shape, $desc);
};
}

View File

@ -32,6 +32,7 @@ ansi_term = "0.12.1"
ptree = {version = "0.2" } ptree = {version = "0.2" }
language-reporting = "0.4.0" language-reporting = "0.4.0"
unicode-xid = "0.2.0" unicode-xid = "0.2.0"
enumflags2 = "0.6.2"
[dev-dependencies] [dev-dependencies]
pretty_assertions = "0.6.1" pretty_assertions = "0.6.1"
@ -40,4 +41,4 @@ pretty_assertions = "0.6.1"
nu-build = { version = "0.1.0", path = "../nu-build" } nu-build = { version = "0.1.0", path = "../nu-build" }
[features] [features]
#trace = ["nom-tracable/trace"] trace = ["nom-tracable/trace"]

View File

@ -4,11 +4,13 @@ pub(crate) mod expand_external_tokens;
pub(crate) mod external_command; pub(crate) mod external_command;
pub(crate) mod named; pub(crate) mod named;
pub(crate) mod path; pub(crate) mod path;
pub(crate) mod range;
pub(crate) mod signature;
pub mod syntax_shape; pub mod syntax_shape;
pub(crate) mod tokens_iterator; pub(crate) mod tokens_iterator;
use crate::hir::syntax_shape::Member; use crate::hir::syntax_shape::Member;
use crate::parse::operator::Operator; use crate::parse::operator::CompareOperator;
use crate::parse::parser::Number; use crate::parse::parser::Number;
use crate::parse::unit::Unit; use crate::parse::unit::Unit;
use derive_new::new; use derive_new::new;
@ -24,12 +26,40 @@ use crate::parse::tokens::RawNumber;
pub(crate) use self::binary::Binary; pub(crate) use self::binary::Binary;
pub(crate) use self::path::Path; pub(crate) use self::path::Path;
pub(crate) use self::range::Range;
pub(crate) use self::syntax_shape::ExpandContext; pub(crate) use self::syntax_shape::ExpandContext;
pub(crate) use self::tokens_iterator::TokensIterator; pub(crate) use self::tokens_iterator::TokensIterator;
pub use self::external_command::ExternalCommand; pub use self::external_command::ExternalCommand;
pub use self::named::{NamedArguments, NamedValue}; pub use self::named::{NamedArguments, NamedValue};
#[derive(Debug, Clone)]
pub struct Signature {
unspanned: nu_protocol::Signature,
span: Span,
}
impl Signature {
pub fn new(unspanned: nu_protocol::Signature, span: impl Into<Span>) -> Signature {
Signature {
unspanned,
span: span.into(),
}
}
}
impl HasSpan for Signature {
fn span(&self) -> Span {
self.span
}
}
impl PrettyDebugWithSource for Signature {
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
self.unspanned.pretty_debug(source)
}
}
#[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)] #[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)]
pub struct Call { pub struct Call {
#[get = "pub(crate)"] #[get = "pub(crate)"]
@ -68,6 +98,7 @@ pub enum RawExpression {
Synthetic(Synthetic), Synthetic(Synthetic),
Variable(Variable), Variable(Variable),
Binary(Box<Binary>), Binary(Box<Binary>),
Range(Box<Range>),
Block(Vec<Expression>), Block(Vec<Expression>),
List(Vec<Expression>), List(Vec<Expression>),
Path(Box<Path>), Path(Box<Path>),
@ -90,6 +121,7 @@ impl ShellTypeName for RawExpression {
RawExpression::Variable(..) => "variable", RawExpression::Variable(..) => "variable",
RawExpression::List(..) => "list", RawExpression::List(..) => "list",
RawExpression::Binary(..) => "binary", RawExpression::Binary(..) => "binary",
RawExpression::Range(..) => "range",
RawExpression::Block(..) => "block", RawExpression::Block(..) => "block",
RawExpression::Path(..) => "variable path", RawExpression::Path(..) => "variable path",
RawExpression::Boolean(..) => "boolean", RawExpression::Boolean(..) => "boolean",
@ -159,6 +191,7 @@ impl PrettyDebugWithSource for Expression {
}, },
RawExpression::Variable(_) => b::keyword(self.span.slice(source)), RawExpression::Variable(_) => b::keyword(self.span.slice(source)),
RawExpression::Binary(binary) => binary.pretty_debug(source), RawExpression::Binary(binary) => binary.pretty_debug(source),
RawExpression::Range(range) => range.pretty_debug(source),
RawExpression::Block(_) => b::opaque("block"), RawExpression::Block(_) => b::opaque("block"),
RawExpression::List(list) => b::delimit( RawExpression::List(list) => b::delimit(
"[", "[",
@ -245,7 +278,7 @@ impl Expression {
pub fn infix( pub fn infix(
left: Expression, left: Expression,
op: Spanned<impl Into<Operator>>, op: Spanned<impl Into<CompareOperator>>,
right: Expression, right: Expression,
) -> Expression { ) -> Expression {
let new_span = left.span.until(right.span); let new_span = left.span.until(right.span);
@ -254,6 +287,12 @@ impl Expression {
.into_expr(new_span) .into_expr(new_span)
} }
pub fn range(left: Expression, op: Span, right: Expression) -> Expression {
let new_span = left.span.until(right.span);
RawExpression::Range(Box::new(Range::new(left, op, right))).into_expr(new_span)
}
pub fn file_path(path: impl Into<PathBuf>, outer: impl Into<Span>) -> Expression { pub fn file_path(path: impl Into<PathBuf>, outer: impl Into<Span>) -> Expression {
RawExpression::FilePath(path.into()).into_expr(outer) RawExpression::FilePath(path.into()).into_expr(outer)
} }

View File

@ -1,2 +1,2 @@
#[cfg(test)] #[cfg(test)]
mod tests; pub mod tests;

View File

@ -23,7 +23,7 @@ fn test_parse_string() {
fn test_parse_path() { fn test_parse_path() {
parse_tokens( parse_tokens(
VariablePathShape, VariablePathShape,
vec![b::var("it"), b::op("."), b::bare("cpu")], vec![b::var("it"), b::dot(), b::bare("cpu")],
|tokens| { |tokens| {
let (outer_var, inner_var) = tokens[0].expect_var(); let (outer_var, inner_var) = tokens[0].expect_var();
let bare = tokens[2].expect_bare(); let bare = tokens[2].expect_bare();
@ -39,9 +39,9 @@ fn test_parse_path() {
VariablePathShape, VariablePathShape,
vec![ vec![
b::var("cpu"), b::var("cpu"),
b::op("."), b::dot(),
b::bare("amount"), b::bare("amount"),
b::op("."), b::dot(),
b::string("max ghz"), b::string("max ghz"),
], ],
|tokens| { |tokens| {
@ -145,7 +145,7 @@ fn parse_tokens<T: Eq + HasSpan + Clone + Debug + 'static>(
let expr = match expr { let expr = match expr {
Ok(expr) => expr, Ok(expr) => expr,
Err(err) => { Err(err) => {
print_err(err.into(), context.source().clone()); print_err(err.into(), &context.source().clone());
panic!("Parse failed"); panic!("Parse failed");
} }
}; };
@ -165,12 +165,10 @@ pub fn print_err(err: ShellError, source: &Text) {
let mut source = source.to_string(); let mut source = source.to_string();
source.push_str(" "); source.push_str(" ");
let files = Files::new(source); let files = Files::new(source);
let _ = std::panic::catch_unwind(move || {
let _ = language_reporting::emit( let _ = language_reporting::emit(
&mut writer.lock(), &mut writer.lock(),
&files, &files,
&diag, &diag,
&language_reporting::DefaultConfig, &language_reporting::DefaultConfig,
); );
});
} }

View File

@ -1,4 +1,4 @@
use crate::{hir::Expression, Operator}; use crate::{hir::Expression, CompareOperator};
use derive_new::new; use derive_new::new;
use getset::Getters; use getset::Getters;
@ -11,7 +11,7 @@ use serde::{Deserialize, Serialize};
#[get = "pub"] #[get = "pub"]
pub struct Binary { pub struct Binary {
left: Expression, left: Expression,
op: Spanned<Operator>, op: Spanned<CompareOperator>,
right: Expression, right: Expression,
} }

View File

@ -10,6 +10,7 @@ use crate::{
TokensIterator, TokensIterator,
}; };
use nu_errors::ParseError; use nu_errors::ParseError;
use nu_protocol::SpannedTypeName;
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebug, Span, Spanned, SpannedItem}; use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebug, Span, Spanned, SpannedItem};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@ -195,11 +196,18 @@ impl ExpandExpression for ExternalHeadShape {
UnspannedAtomicToken::Whitespace { .. } => { UnspannedAtomicToken::Whitespace { .. } => {
unreachable!("ExpansionRule doesn't allow Whitespace") unreachable!("ExpansionRule doesn't allow Whitespace")
} }
UnspannedAtomicToken::Separator { .. } => {
unreachable!("ExpansionRule doesn't allow Separator")
}
UnspannedAtomicToken::Comment { .. } => {
unreachable!("ExpansionRule doesn't allow Comment")
}
UnspannedAtomicToken::ShorthandFlag { .. } UnspannedAtomicToken::ShorthandFlag { .. }
| UnspannedAtomicToken::SquareDelimited { .. } => { | UnspannedAtomicToken::SquareDelimited { .. }
| UnspannedAtomicToken::RoundDelimited { .. } => {
return Err(ParseError::mismatch( return Err(ParseError::mismatch(
"external command name", "external command name",
"pipeline".spanned(atom.span), atom.spanned_type_name(),
)) ))
} }
UnspannedAtomicToken::ExternalCommand { command } => { UnspannedAtomicToken::ExternalCommand { command } => {
@ -215,7 +223,10 @@ impl ExpandExpression for ExternalHeadShape {
| UnspannedAtomicToken::GlobPattern { .. } | UnspannedAtomicToken::GlobPattern { .. }
| UnspannedAtomicToken::Word { .. } | UnspannedAtomicToken::Word { .. }
| UnspannedAtomicToken::Dot { .. } | UnspannedAtomicToken::Dot { .. }
| UnspannedAtomicToken::Operator { .. } => Expression::external_command(span, span), | UnspannedAtomicToken::DotDot { .. }
| UnspannedAtomicToken::CompareOperator { .. } => {
Expression::external_command(span, span)
}
}) })
} }
} }
@ -257,6 +268,12 @@ impl ExpandExpression for ExternalContinuationShape {
UnspannedAtomicToken::Whitespace { .. } => { UnspannedAtomicToken::Whitespace { .. } => {
unreachable!("ExpansionRule doesn't allow Whitespace") unreachable!("ExpansionRule doesn't allow Whitespace")
} }
UnspannedAtomicToken::Separator { .. } => {
unreachable!("ExpansionRule doesn't allow Separator")
}
UnspannedAtomicToken::Comment { .. } => {
unreachable!("ExpansionRule doesn't allow Comment")
}
UnspannedAtomicToken::String { body } => Expression::string(*body, span), UnspannedAtomicToken::String { body } => Expression::string(*body, span),
UnspannedAtomicToken::ItVariable { name } => Expression::it_variable(*name, span), UnspannedAtomicToken::ItVariable { name } => Expression::it_variable(*name, span),
UnspannedAtomicToken::Variable { name } => Expression::variable(*name, span), UnspannedAtomicToken::Variable { name } => Expression::variable(*name, span),
@ -265,11 +282,13 @@ impl ExpandExpression for ExternalContinuationShape {
| UnspannedAtomicToken::Word { .. } | UnspannedAtomicToken::Word { .. }
| UnspannedAtomicToken::ShorthandFlag { .. } | UnspannedAtomicToken::ShorthandFlag { .. }
| UnspannedAtomicToken::Dot { .. } | UnspannedAtomicToken::Dot { .. }
| UnspannedAtomicToken::Operator { .. } => Expression::bare(span), | UnspannedAtomicToken::DotDot { .. }
UnspannedAtomicToken::SquareDelimited { .. } => { | UnspannedAtomicToken::CompareOperator { .. } => Expression::bare(span),
UnspannedAtomicToken::SquareDelimited { .. }
| UnspannedAtomicToken::RoundDelimited { .. } => {
return Err(ParseError::mismatch( return Err(ParseError::mismatch(
"external argument", "external argument",
"pipeline".spanned(atom.span), atom.spanned_type_name(),
)) ))
} }
}) })

View File

@ -0,0 +1,33 @@
use crate::hir::Expression;
use derive_new::new;
use getset::Getters;
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Span};
use serde::{Deserialize, Serialize};
#[derive(
Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new,
)]
pub struct Range {
#[get = "pub"]
left: Expression,
#[get = "pub"]
dotdot: Span,
#[get = "pub"]
right: Expression,
}
impl PrettyDebugWithSource for Range {
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
b::delimit(
"<",
self.left.pretty_debug(source)
+ b::space()
+ b::keyword(self.dotdot.slice(source))
+ b::space()
+ self.right.pretty_debug(source),
">",
)
.group()
}
}

View File

@ -0,0 +1,481 @@
use crate::hir;
use crate::hir::syntax_shape::{
expand_atom, expand_syntax, BareShape, ExpandContext, ExpandSyntax, ExpansionRule,
UnspannedAtomicToken, WhitespaceShape,
};
use crate::hir::tokens_iterator::TokensIterator;
use crate::parse::comment::Comment;
use derive_new::new;
use nu_errors::ParseError;
use nu_protocol::{RowType, SpannedTypeName, Type};
use nu_source::{
b, DebugDocBuilder, HasFallibleSpan, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem,
};
use std::fmt::Debug;
// A Signature is a command without implementation.
//
// In Nu, a command is a function combined with macro expansion rules.
//
// def cd
// # Change to a new path.
// optional directory(Path) # the directory to change to
// end
#[derive(new)]
struct Expander<'a, 'b, 'c, 'd> {
iterator: &'b mut TokensIterator<'a>,
context: &'d ExpandContext<'c>,
}
impl<'a, 'b, 'c, 'd> Expander<'a, 'b, 'c, 'd> {
fn expand<O>(&mut self, syntax: impl ExpandSyntax<Output = O>) -> Result<O, ParseError>
where
O: HasFallibleSpan + Clone + std::fmt::Debug + 'static,
{
expand_syntax(&syntax, self.iterator, self.context)
}
fn optional<O>(&mut self, syntax: impl ExpandSyntax<Output = O>) -> Option<O>
where
O: HasFallibleSpan + Clone + std::fmt::Debug + 'static,
{
match expand_syntax(&syntax, self.iterator, self.context) {
Err(_) => None,
Ok(value) => Some(value),
}
}
fn pos(&mut self) -> Span {
self.iterator.span_at_cursor()
}
fn slice_string(&mut self, span: impl Into<Span>) -> String {
span.into().slice(self.context.source()).to_string()
}
}
#[derive(Debug, Copy, Clone)]
struct SignatureShape;
impl ExpandSyntax for SignatureShape {
type Output = hir::Signature;
fn name(&self) -> &'static str {
"signature"
}
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Self::Output, ParseError> {
token_nodes.atomic_parse(|token_nodes| {
let mut expander = Expander::new(token_nodes, context);
let start = expander.pos();
expander.expand(keyword("def"))?;
expander.expand(WhitespaceShape)?;
let name = expander.expand(BareShape)?;
expander.expand(SeparatorShape)?;
let usage = expander.expand(CommentShape)?;
expander.expand(SeparatorShape)?;
let end = expander.pos();
Ok(hir::Signature::new(
nu_protocol::Signature::new(&name.word).desc(expander.slice_string(usage.text)),
start.until(end),
))
})
}
}
fn keyword(kw: &'static str) -> KeywordShape {
KeywordShape { keyword: kw }
}
#[derive(Debug, Copy, Clone)]
struct KeywordShape {
keyword: &'static str,
}
impl ExpandSyntax for KeywordShape {
type Output = Span;
fn name(&self) -> &'static str {
"keyword"
}
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Self::Output, ParseError> {
let atom = expand_atom(token_nodes, "keyword", context, ExpansionRule::new())?;
match &atom.unspanned {
UnspannedAtomicToken::Word { text } => {
let word = text.slice(context.source());
if word == self.keyword {
return Ok(atom.span);
}
}
_ => {}
}
Err(ParseError::mismatch(self.keyword, atom.spanned_type_name()))
}
}
#[derive(Debug, Copy, Clone)]
struct SeparatorShape;
impl ExpandSyntax for SeparatorShape {
type Output = Span;
fn name(&self) -> &'static str {
"separator"
}
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Self::Output, ParseError> {
let atom = expand_atom(token_nodes, "separator", context, ExpansionRule::new())?;
match &atom.unspanned {
UnspannedAtomicToken::Separator { text } => Ok(*text),
_ => Err(ParseError::mismatch("separator", atom.spanned_type_name())),
}
}
}
#[derive(Debug, Copy, Clone)]
struct CommentShape;
impl ExpandSyntax for CommentShape {
type Output = Comment;
fn name(&self) -> &'static str {
"comment"
}
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Self::Output, ParseError> {
let atom = expand_atom(token_nodes, "comment", context, ExpansionRule::new())?;
match &atom.unspanned {
UnspannedAtomicToken::Comment { body } => Ok(Comment::line(body, atom.span)),
_ => Err(ParseError::mismatch("separator", atom.spanned_type_name())),
}
}
}
#[derive(Debug, Copy, Clone, new)]
struct TupleShape<A, B> {
first: A,
second: B,
}
#[derive(Debug, Clone, new)]
struct TupleSyntax<A, B> {
first: A,
second: B,
}
impl<A, B> PrettyDebugWithSource for TupleSyntax<A, B>
where
A: PrettyDebugWithSource,
B: PrettyDebugWithSource,
{
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
b::typed(
"pair",
self.first.pretty_debug(source) + b::space() + self.second.pretty_debug(source),
)
}
}
impl<A, B> HasFallibleSpan for TupleSyntax<A, B>
where
A: HasFallibleSpan + Debug + Clone,
B: HasFallibleSpan + Debug + Clone,
{
fn maybe_span(&self) -> Option<Span> {
match (self.first.maybe_span(), self.second.maybe_span()) {
(Some(first), Some(second)) => Some(first.until(second)),
(Some(first), None) => Some(first),
(None, Some(second)) => Some(second),
(None, None) => None,
}
}
}
impl<A, B, AOut, BOut> ExpandSyntax for TupleShape<A, B>
where
A: ExpandSyntax<Output = AOut> + Debug + Copy,
B: ExpandSyntax<Output = BOut> + Debug + Copy,
AOut: HasFallibleSpan + Debug + Clone + 'static,
BOut: HasFallibleSpan + Debug + Clone + 'static,
{
type Output = TupleSyntax<AOut, BOut>;
fn name(&self) -> &'static str {
"pair"
}
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Self::Output, ParseError> {
token_nodes.atomic_parse(|token_nodes| {
let first = expand_syntax(&self.first, token_nodes, context)?;
let second = expand_syntax(&self.second, token_nodes, context)?;
Ok(TupleSyntax { first, second })
})
}
}
#[derive(Debug, Clone)]
pub struct PositionalParam {
optional: Option<Span>,
name: Identifier,
ty: Spanned<Type>,
desc: Spanned<String>,
span: Span,
}
impl HasSpan for PositionalParam {
fn span(&self) -> Span {
self.span
}
}
impl PrettyDebugWithSource for PositionalParam {
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
(match self.optional {
Some(_) => b::description("optional") + b::space(),
None => b::blank(),
}) + self.ty.pretty_debug(source)
}
}
#[derive(Debug, Copy, Clone)]
pub struct PositionalParamShape;
impl ExpandSyntax for PositionalParamShape {
type Output = PositionalParam;
fn name(&self) -> &'static str {
"positional param"
}
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Self::Output, ParseError> {
token_nodes.atomic_parse(|token_nodes| {
let mut expander = Expander::new(token_nodes, context);
let optional = expander
.optional(TupleShape::new(keyword("optional"), WhitespaceShape))
.map(|s| s.first);
let name = expander.expand(IdentifierShape)?;
expander.optional(WhitespaceShape);
let _ty = expander.expand(TypeShape)?;
Ok(PositionalParam {
optional,
name,
ty: Type::Nothing.spanned(Span::unknown()),
desc: format!("").spanned(Span::unknown()),
span: Span::unknown(),
})
})
}
}
#[derive(Debug, Clone)]
struct Identifier {
body: String,
span: Span,
}
impl HasSpan for Identifier {
fn span(&self) -> Span {
self.span
}
}
impl PrettyDebugWithSource for Identifier {
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
b::typed("id", b::description(self.span.slice(source)))
}
}
#[derive(Debug, Copy, Clone)]
struct IdentifierShape;
impl ExpandSyntax for IdentifierShape {
type Output = Identifier;
fn name(&self) -> &'static str {
"identifier"
}
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Self::Output, ParseError> {
let atom = expand_atom(token_nodes, "identifier", context, ExpansionRule::new())?;
match atom.unspanned {
UnspannedAtomicToken::Word { text } => {
let body = text.slice(context.source());
if is_id(body) {
return Ok(Identifier {
body: body.to_string(),
span: text,
});
}
}
_ => {}
}
Err(ParseError::mismatch("identifier", atom.spanned_type_name()))
}
}
fn is_id(input: &str) -> bool {
let source = nu_source::nom_input(input);
match crate::parse::parser::ident(source) {
Err(_) => false,
Ok((input, _)) => input.fragment.len() == 0,
}
}
#[derive(Debug, Clone, new)]
struct TypeSyntax {
ty: Type,
span: Span,
}
impl HasSpan for TypeSyntax {
fn span(&self) -> Span {
self.span
}
}
impl PrettyDebugWithSource for TypeSyntax {
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
self.ty.pretty_debug(source)
}
}
#[derive(Debug, Copy, Clone)]
struct TypeShape;
impl ExpandSyntax for TypeShape {
type Output = TypeSyntax;
fn name(&self) -> &'static str {
"type"
}
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Self::Output, ParseError> {
let atom = expand_atom(token_nodes, "type", context, ExpansionRule::new())?;
match atom.unspanned {
UnspannedAtomicToken::Word { text } => {
let word = text.slice(context.source());
Ok(TypeSyntax::new(
match word {
"nothing" => Type::Nothing,
"integer" => Type::Int,
"decimal" => Type::Decimal,
"bytesize" => Type::Bytesize,
"string" => Type::String,
"column-path" => Type::ColumnPath,
"pattern" => Type::Pattern,
"boolean" => Type::Boolean,
"date" => Type::Date,
"duration" => Type::Duration,
"filename" => Type::Path,
"binary" => Type::Binary,
"row" => Type::Row(RowType::new()),
"table" => Type::Table(vec![]),
"block" => Type::Block,
_ => return Err(ParseError::mismatch("type", atom.spanned_type_name())),
},
atom.span,
))
}
_ => Err(ParseError::mismatch("type", atom.spanned_type_name())),
}
}
}
#[derive(Debug, Copy, Clone)]
struct TypeAnnotation;
impl ExpandSyntax for TypeAnnotation {
type Output = TypeSyntax;
fn name(&self) -> &'static str {
"type annotation"
}
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Self::Output, ParseError> {
let atom = expand_atom(
token_nodes,
"type annotation",
context,
ExpansionRule::new(),
)?;
match atom.unspanned {
UnspannedAtomicToken::RoundDelimited { nodes, .. } => {
token_nodes.atomic_parse(|token_nodes| {
token_nodes.child(
(&nodes[..]).spanned(atom.span),
context.source().clone(),
|token_nodes| {
let ty = expand_syntax(&TypeShape, token_nodes, context)?;
let next = token_nodes.peek_non_ws();
match next.node {
None => Ok(ty),
Some(node) => {
Err(ParseError::extra_tokens(node.spanned_type_name()))
}
}
},
)
})
}
_ => Err(ParseError::mismatch(
"type annotation",
atom.spanned_type_name(),
)),
}
}
}

View File

@ -7,8 +7,9 @@ use crate::commands::external_command;
use crate::hir; use crate::hir;
use crate::hir::expand_external_tokens::ExternalTokensShape; use crate::hir::expand_external_tokens::ExternalTokensShape;
use crate::hir::syntax_shape::block::AnyBlockShape; use crate::hir::syntax_shape::block::AnyBlockShape;
use crate::hir::syntax_shape::expression::range::RangeShape;
use crate::hir::tokens_iterator::{Peeked, TokensIterator}; use crate::hir::tokens_iterator::{Peeked, TokensIterator};
use crate::parse::operator::Operator; use crate::parse::operator::EvaluationOperator;
use crate::parse::token_tree::TokenNode; use crate::parse::token_tree::TokenNode;
use crate::parse::tokens::{Token, UnspannedToken}; use crate::parse::tokens::{Token, UnspannedToken};
use crate::parse_command::{parse_command_tail, CommandTailShape}; use crate::parse_command::{parse_command_tail, CommandTailShape};
@ -74,6 +75,7 @@ impl FallibleColorSyntax for SyntaxShape {
context, context,
shapes, shapes,
), ),
SyntaxShape::Range => color_fallible_syntax(&RangeShape, token_nodes, context, shapes),
SyntaxShape::Member => { SyntaxShape::Member => {
color_fallible_syntax(&MemberShape, token_nodes, context, shapes) color_fallible_syntax(&MemberShape, token_nodes, context, shapes)
} }
@ -114,6 +116,7 @@ impl FallibleColorSyntax for SyntaxShape {
match self { match self {
SyntaxShape::Any => color_fallible_syntax(&AnyExpressionShape, token_nodes, context), SyntaxShape::Any => color_fallible_syntax(&AnyExpressionShape, token_nodes, context),
SyntaxShape::Int => color_fallible_syntax(&IntShape, token_nodes, context), SyntaxShape::Int => color_fallible_syntax(&IntShape, token_nodes, context),
SyntaxShape::Range => color_fallible_syntax(&RangeShape, token_nodes, context),
SyntaxShape::String => { SyntaxShape::String => {
color_fallible_syntax_with(&StringShape, &FlatShape::String, token_nodes, context) color_fallible_syntax_with(&StringShape, &FlatShape::String, token_nodes, context)
} }
@ -134,6 +137,7 @@ impl ExpandExpression for SyntaxShape {
match self { match self {
SyntaxShape::Any => "shape[any]", SyntaxShape::Any => "shape[any]",
SyntaxShape::Int => "shape[integer]", SyntaxShape::Int => "shape[integer]",
SyntaxShape::Range => "shape[range]",
SyntaxShape::String => "shape[string]", SyntaxShape::String => "shape[string]",
SyntaxShape::Member => "shape[column name]", SyntaxShape::Member => "shape[column name]",
SyntaxShape::ColumnPath => "shape[column path]", SyntaxShape::ColumnPath => "shape[column path]",
@ -152,6 +156,7 @@ impl ExpandExpression for SyntaxShape {
match self { match self {
SyntaxShape::Any => expand_expr(&AnyExpressionShape, token_nodes, context), SyntaxShape::Any => expand_expr(&AnyExpressionShape, token_nodes, context),
SyntaxShape::Int => expand_expr(&IntShape, token_nodes, context), SyntaxShape::Int => expand_expr(&IntShape, token_nodes, context),
SyntaxShape::Range => expand_expr(&RangeShape, token_nodes, context),
SyntaxShape::String => expand_expr(&StringShape, token_nodes, context), SyntaxShape::String => expand_expr(&StringShape, token_nodes, context),
SyntaxShape::Member => { SyntaxShape::Member => {
let syntax = expand_syntax(&MemberShape, token_nodes, context)?; let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
@ -183,7 +188,6 @@ pub trait SignatureRegistry {
pub struct ExpandContext<'context> { pub struct ExpandContext<'context> {
#[get = "pub(crate)"] #[get = "pub(crate)"]
pub registry: Box<dyn SignatureRegistry>, pub registry: Box<dyn SignatureRegistry>,
#[get = "pub(crate)"]
pub source: &'context Text, pub source: &'context Text,
pub homedir: Option<PathBuf>, pub homedir: Option<PathBuf>,
} }
@ -192,6 +196,10 @@ impl<'context> ExpandContext<'context> {
pub(crate) fn homedir(&self) -> Option<&Path> { pub(crate) fn homedir(&self) -> Option<&Path> {
self.homedir.as_ref().map(|h| h.as_path()) self.homedir.as_ref().map(|h| h.as_path())
} }
pub(crate) fn source(&self) -> &'context Text {
self.source
}
} }
pub trait TestSyntax: std::fmt::Debug + Copy { pub trait TestSyntax: std::fmt::Debug + Copy {
@ -568,7 +576,7 @@ impl ExpandSyntax for BarePathShape {
.. ..
}) })
| TokenNode::Token(Token { | TokenNode::Token(Token {
unspanned: UnspannedToken::Operator(Operator::Dot), unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot),
.. ..
}) => true, }) => true,
@ -604,7 +612,10 @@ impl FallibleColorSyntax for BareShape {
} }
// otherwise, fail // otherwise, fail
other => Err(ParseError::mismatch("word", other.spanned_type_name())), other => Err(ParseError::mismatch(
"word",
other.type_name().spanned(other.span()),
)),
}) })
.map_err(|err| err.into()) .map_err(|err| err.into())
} }
@ -1600,7 +1611,7 @@ impl FallibleColorSyntax for SpaceShape {
other => Err(ShellError::type_error( other => Err(ShellError::type_error(
"whitespace", "whitespace",
other.spanned_type_name(), other.type_name().spanned(other.span()),
)), )),
} }
} }

View File

@ -390,6 +390,7 @@ impl FallibleColorSyntax for ShorthandHeadShape {
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
use crate::parse::token_tree::TokenNode; use crate::parse::token_tree::TokenNode;
use crate::parse::tokens::{Token, UnspannedToken}; use crate::parse::tokens::{Token, UnspannedToken};
use nu_protocol::SpannedTypeName;
use nu_source::SpannedItem; use nu_source::SpannedItem;
// A shorthand path must not be at EOF // A shorthand path must not be at EOF

View File

@ -4,6 +4,7 @@ pub(crate) mod file_path;
pub(crate) mod list; pub(crate) mod list;
pub(crate) mod number; pub(crate) mod number;
pub(crate) mod pattern; pub(crate) mod pattern;
pub(crate) mod range;
pub(crate) mod string; pub(crate) mod string;
pub(crate) mod unit; pub(crate) mod unit;
pub(crate) mod variable_path; pub(crate) mod variable_path;

View File

@ -3,6 +3,7 @@ use crate::hir::syntax_shape::{
expand_syntax, expression::expand_file_path, parse_single_node, BarePathShape, expand_syntax, expression::expand_file_path, parse_single_node, BarePathShape,
BarePatternShape, ExpandContext, UnitShape, UnitSyntax, BarePatternShape, ExpandContext, UnitShape, UnitSyntax,
}; };
use crate::parse::operator::EvaluationOperator;
use crate::parse::token_tree::{DelimitedNode, Delimiter, TokenNode}; use crate::parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
use crate::parse::tokens::UnspannedToken; use crate::parse::tokens::UnspannedToken;
use crate::parse::unit::Unit; use crate::parse::unit::Unit;
@ -12,7 +13,7 @@ use crate::{
parse::flag::{Flag, FlagKind}, parse::flag::{Flag, FlagKind},
}; };
use nu_errors::{ParseError, ShellError}; use nu_errors::{ParseError, ShellError};
use nu_protocol::ShellTypeName; use nu_protocol::{ShellTypeName, SpannedTypeName};
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem}; use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem};
use std::ops::Deref; use std::ops::Deref;
@ -52,23 +53,36 @@ pub enum UnspannedAtomicToken<'tokens> {
Word { Word {
text: Span, text: Span,
}, },
#[allow(unused)]
Dot {
text: Span,
},
SquareDelimited { SquareDelimited {
spans: (Span, Span), spans: (Span, Span),
nodes: &'tokens Vec<TokenNode>, nodes: &'tokens Vec<TokenNode>,
}, },
#[allow(unused)]
RoundDelimited {
spans: (Span, Span),
nodes: &'tokens Vec<TokenNode>,
},
ShorthandFlag { ShorthandFlag {
name: Span, name: Span,
}, },
Operator { CompareOperator {
text: Span,
},
Dot {
text: Span,
},
DotDot {
text: Span, text: Span,
}, },
Whitespace { Whitespace {
text: Span, text: Span,
}, },
Separator {
text: Span,
},
Comment {
body: Span,
},
} }
impl<'tokens> UnspannedAtomicToken<'tokens> { impl<'tokens> UnspannedAtomicToken<'tokens> {
@ -80,15 +94,24 @@ impl<'tokens> UnspannedAtomicToken<'tokens> {
} }
} }
impl<'tokens> ShellTypeName for AtomicToken<'tokens> {
fn type_name(&self) -> &'static str {
self.unspanned.type_name()
}
}
impl<'tokens> ShellTypeName for UnspannedAtomicToken<'tokens> { impl<'tokens> ShellTypeName for UnspannedAtomicToken<'tokens> {
fn type_name(&self) -> &'static str { fn type_name(&self) -> &'static str {
match &self { match &self {
UnspannedAtomicToken::Eof { .. } => "eof", UnspannedAtomicToken::Eof { .. } => "eof",
UnspannedAtomicToken::Error { .. } => "error", UnspannedAtomicToken::Error { .. } => "error",
UnspannedAtomicToken::Operator { .. } => "operator", UnspannedAtomicToken::CompareOperator { .. } => "compare operator",
UnspannedAtomicToken::ShorthandFlag { .. } => "shorthand flag", UnspannedAtomicToken::ShorthandFlag { .. } => "shorthand flag",
UnspannedAtomicToken::Whitespace { .. } => "whitespace", UnspannedAtomicToken::Whitespace { .. } => "whitespace",
UnspannedAtomicToken::Separator { .. } => "separator",
UnspannedAtomicToken::Comment { .. } => "comment",
UnspannedAtomicToken::Dot { .. } => "dot", UnspannedAtomicToken::Dot { .. } => "dot",
UnspannedAtomicToken::DotDot { .. } => "dotdot",
UnspannedAtomicToken::Number { .. } => "number", UnspannedAtomicToken::Number { .. } => "number",
UnspannedAtomicToken::Size { .. } => "size", UnspannedAtomicToken::Size { .. } => "size",
UnspannedAtomicToken::String { .. } => "string", UnspannedAtomicToken::String { .. } => "string",
@ -99,6 +122,7 @@ impl<'tokens> ShellTypeName for UnspannedAtomicToken<'tokens> {
UnspannedAtomicToken::GlobPattern { .. } => "file pattern", UnspannedAtomicToken::GlobPattern { .. } => "file pattern",
UnspannedAtomicToken::Word { .. } => "word", UnspannedAtomicToken::Word { .. } => "word",
UnspannedAtomicToken::SquareDelimited { .. } => "array literal", UnspannedAtomicToken::SquareDelimited { .. } => "array literal",
UnspannedAtomicToken::RoundDelimited { .. } => "paren delimited",
} }
} }
} }
@ -109,6 +133,12 @@ pub struct AtomicToken<'tokens> {
pub span: Span, pub span: Span,
} }
impl<'tokens> HasSpan for AtomicToken<'tokens> {
fn span(&self) -> Span {
self.span
}
}
impl<'tokens> Deref for AtomicToken<'tokens> { impl<'tokens> Deref for AtomicToken<'tokens> {
type Target = UnspannedAtomicToken<'tokens>; type Target = UnspannedAtomicToken<'tokens>;
@ -131,31 +161,18 @@ impl<'tokens> AtomicToken<'tokens> {
)) ))
} }
UnspannedAtomicToken::Error { .. } => { UnspannedAtomicToken::Error { .. } => {
return Err(ParseError::mismatch( return Err(ParseError::mismatch(expected, "error".spanned(self.span)))
expected,
"eof atomic token".spanned(self.span),
))
} }
UnspannedAtomicToken::Operator { .. } => { UnspannedAtomicToken::RoundDelimited { .. }
return Err(ParseError::mismatch( | UnspannedAtomicToken::CompareOperator { .. }
expected, | UnspannedAtomicToken::ShorthandFlag { .. }
"operator".spanned(self.span), | UnspannedAtomicToken::Whitespace { .. }
)) | UnspannedAtomicToken::Separator { .. }
} | UnspannedAtomicToken::Comment { .. }
UnspannedAtomicToken::ShorthandFlag { .. } => { | UnspannedAtomicToken::Dot { .. }
return Err(ParseError::mismatch( | UnspannedAtomicToken::DotDot { .. }
expected, | UnspannedAtomicToken::SquareDelimited { .. } => {
"shorthand flag".spanned(self.span), return Err(ParseError::mismatch(expected, self.spanned_type_name()));
))
}
UnspannedAtomicToken::Whitespace { .. } => {
return Err(ParseError::mismatch(
expected,
"whitespace".spanned(self.span),
))
}
UnspannedAtomicToken::Dot { .. } => {
return Err(ParseError::mismatch(expected, "dot".spanned(self.span)))
} }
UnspannedAtomicToken::Number { number } => { UnspannedAtomicToken::Number { number } => {
Expression::number(number.to_number(context.source), self.span) Expression::number(number.to_number(context.source), self.span)
@ -175,41 +192,17 @@ impl<'tokens> AtomicToken<'tokens> {
self.span, self.span,
), ),
UnspannedAtomicToken::Word { text } => Expression::string(*text, *text), UnspannedAtomicToken::Word { text } => Expression::string(*text, *text),
UnspannedAtomicToken::SquareDelimited { .. } => unimplemented!("into_hir"),
}) })
} }
#[cfg(not(coloring_in_tokens))]
pub fn spanned_type_name(&self) -> Spanned<&'static str> {
match &self.unspanned {
UnspannedAtomicToken::Eof { .. } => "eof",
UnspannedAtomicToken::Error { .. } => "error",
UnspannedAtomicToken::Operator { .. } => "operator",
UnspannedAtomicToken::ShorthandFlag { .. } => "shorthand flag",
UnspannedAtomicToken::Whitespace { .. } => "whitespace",
UnspannedAtomicToken::Dot { .. } => "dot",
UnspannedAtomicToken::Number { .. } => "number",
UnspannedAtomicToken::Size { .. } => "size",
UnspannedAtomicToken::String { .. } => "string",
UnspannedAtomicToken::ItVariable { .. } => "$it",
UnspannedAtomicToken::Variable { .. } => "variable",
UnspannedAtomicToken::ExternalCommand { .. } => "external command",
UnspannedAtomicToken::ExternalWord { .. } => "external word",
UnspannedAtomicToken::GlobPattern { .. } => "file pattern",
UnspannedAtomicToken::Word { .. } => "word",
UnspannedAtomicToken::SquareDelimited { .. } => "array literal",
}
.spanned(self.span)
}
pub(crate) fn color_tokens(&self, shapes: &mut Vec<Spanned<FlatShape>>) { pub(crate) fn color_tokens(&self, shapes: &mut Vec<Spanned<FlatShape>>) {
match &self.unspanned { match &self.unspanned {
UnspannedAtomicToken::Eof { .. } => {} UnspannedAtomicToken::Eof { .. } => {}
UnspannedAtomicToken::Error { .. } => { UnspannedAtomicToken::Error { .. } => {
return shapes.push(FlatShape::Error.spanned(self.span)) return shapes.push(FlatShape::Error.spanned(self.span))
} }
UnspannedAtomicToken::Operator { .. } => { UnspannedAtomicToken::CompareOperator { .. } => {
return shapes.push(FlatShape::Operator.spanned(self.span)); return shapes.push(FlatShape::CompareOperator.spanned(self.span));
} }
UnspannedAtomicToken::ShorthandFlag { .. } => { UnspannedAtomicToken::ShorthandFlag { .. } => {
return shapes.push(FlatShape::ShorthandFlag.spanned(self.span)); return shapes.push(FlatShape::ShorthandFlag.spanned(self.span));
@ -305,17 +298,30 @@ impl PrettyDebugWithSource for AtomicToken<'_> {
b::intersperse_with_source(nodes.iter(), b::space(), source), b::intersperse_with_source(nodes.iter(), b::space(), source),
"]", "]",
), ),
UnspannedAtomicToken::RoundDelimited { nodes, .. } => b::delimit(
"(",
b::intersperse_with_source(nodes.iter(), b::space(), source),
")",
),
UnspannedAtomicToken::ShorthandFlag { name } => { UnspannedAtomicToken::ShorthandFlag { name } => {
atom_kind("shorthand flag", b::key(name.slice(source))) atom_kind("shorthand flag", b::key(name.slice(source)))
} }
UnspannedAtomicToken::Dot { .. } => atom(b::kind("dot")), UnspannedAtomicToken::Dot { .. } => atom(b::kind("dot")),
UnspannedAtomicToken::Operator { text } => { UnspannedAtomicToken::DotDot { .. } => atom(b::kind("dotdot")),
UnspannedAtomicToken::CompareOperator { text } => {
atom_kind("operator", b::keyword(text.slice(source))) atom_kind("operator", b::keyword(text.slice(source)))
} }
UnspannedAtomicToken::Whitespace { text } => atom_kind( UnspannedAtomicToken::Whitespace { text } => atom_kind(
"whitespace", "whitespace",
b::description(format!("{:?}", text.slice(source))), b::description(format!("{:?}", text.slice(source))),
), ),
UnspannedAtomicToken::Separator { text } => atom_kind(
"separator",
b::description(format!("{:?}", text.slice(source))),
),
UnspannedAtomicToken::Comment { body } => {
atom_kind("comment", b::description(body.slice(source)))
}
}) })
} }
} }
@ -331,12 +337,15 @@ pub enum WhitespaceHandling {
pub struct ExpansionRule { pub struct ExpansionRule {
pub(crate) allow_external_command: bool, pub(crate) allow_external_command: bool,
pub(crate) allow_external_word: bool, pub(crate) allow_external_word: bool,
pub(crate) allow_operator: bool, pub(crate) allow_cmp_operator: bool,
pub(crate) allow_eval_operator: bool,
pub(crate) allow_eof: bool, pub(crate) allow_eof: bool,
pub(crate) allow_separator: bool,
pub(crate) treat_size_as_word: bool, pub(crate) treat_size_as_word: bool,
pub(crate) separate_members: bool, pub(crate) separate_members: bool,
pub(crate) commit_errors: bool, pub(crate) commit_errors: bool,
pub(crate) whitespace: WhitespaceHandling, pub(crate) whitespace: WhitespaceHandling,
pub(crate) allow_comments: bool,
} }
impl ExpansionRule { impl ExpansionRule {
@ -344,12 +353,15 @@ impl ExpansionRule {
ExpansionRule { ExpansionRule {
allow_external_command: false, allow_external_command: false,
allow_external_word: false, allow_external_word: false,
allow_operator: false, allow_eval_operator: false,
allow_cmp_operator: false,
allow_eof: false, allow_eof: false,
treat_size_as_word: false, treat_size_as_word: false,
separate_members: false, separate_members: false,
commit_errors: false, commit_errors: false,
allow_separator: false,
whitespace: WhitespaceHandling::RejectWhitespace, whitespace: WhitespaceHandling::RejectWhitespace,
allow_comments: false,
} }
} }
@ -360,11 +372,14 @@ impl ExpansionRule {
ExpansionRule { ExpansionRule {
allow_external_command: true, allow_external_command: true,
allow_external_word: true, allow_external_word: true,
allow_operator: true, allow_cmp_operator: true,
allow_eval_operator: true,
allow_eof: true, allow_eof: true,
separate_members: false, separate_members: false,
treat_size_as_word: false, treat_size_as_word: false,
commit_errors: true, commit_errors: true,
allow_separator: true,
allow_comments: true,
whitespace: WhitespaceHandling::AllowWhitespace, whitespace: WhitespaceHandling::AllowWhitespace,
} }
} }
@ -376,14 +391,26 @@ impl ExpansionRule {
} }
#[allow(unused)] #[allow(unused)]
pub fn allow_operator(mut self) -> ExpansionRule { pub fn allow_cmp_operator(mut self) -> ExpansionRule {
self.allow_operator = true; self.allow_cmp_operator = true;
self
}
#[allow(unused)]
pub fn no_cmp_operator(mut self) -> ExpansionRule {
self.allow_cmp_operator = false;
self
}
#[allow(unused)]
pub fn allow_eval_operator(mut self) -> ExpansionRule {
self.allow_eval_operator = true;
self self
} }
#[allow(unused)] #[allow(unused)]
pub fn no_operator(mut self) -> ExpansionRule { pub fn no_operator(mut self) -> ExpansionRule {
self.allow_operator = false; self.allow_eval_operator = false;
self self
} }
@ -440,6 +467,30 @@ impl ExpansionRule {
self.whitespace = WhitespaceHandling::RejectWhitespace; self.whitespace = WhitespaceHandling::RejectWhitespace;
self self
} }
#[allow(unused)]
pub fn allow_separator(mut self) -> ExpansionRule {
self.allow_separator = true;
self
}
#[allow(unused)]
pub fn reject_separator(mut self) -> ExpansionRule {
self.allow_separator = false;
self
}
#[allow(unused)]
pub fn allow_comments(mut self) -> ExpansionRule {
self.allow_comments = true;
self
}
#[allow(unused)]
pub fn reject_comments(mut self) -> ExpansionRule {
self.allow_comments = false;
self
}
} }
pub fn expand_atom<'me, 'content>( pub fn expand_atom<'me, 'content>(
@ -578,6 +629,17 @@ fn expand_atom_inner<'me, 'content>(
.into_atomic_token(error.span)); .into_atomic_token(error.span));
} }
TokenNode::Separator(span) if rule.allow_separator => {
peeked.commit();
return Ok(UnspannedAtomicToken::Separator { text: *span }.into_atomic_token(span));
}
TokenNode::Comment(comment) if rule.allow_comments => {
peeked.commit();
return Ok(UnspannedAtomicToken::Comment { body: comment.text }
.into_atomic_token(comment.span()));
}
// [ ... ] // [ ... ]
TokenNode::Delimited(Spanned { TokenNode::Delimited(Spanned {
item: item:
@ -649,8 +711,16 @@ fn expand_atom_inner<'me, 'content>(
// First, the error cases. Each error case corresponds to a expansion rule // First, the error cases. Each error case corresponds to a expansion rule
// flag that can be used to allow the case // flag that can be used to allow the case
// rule.allow_operator // rule.allow_cmp_operator
UnspannedToken::Operator(_) if !rule.allow_operator => return Err(err.error()), UnspannedToken::CompareOperator(_) if !rule.allow_cmp_operator => {
return Err(err.error())
}
// rule.allow_eval_operator
UnspannedToken::EvaluationOperator(_) if !rule.allow_eval_operator => {
return Err(err.error())
}
// rule.allow_external_command // rule.allow_external_command
UnspannedToken::ExternalCommand(_) if !rule.allow_external_command => { UnspannedToken::ExternalCommand(_) if !rule.allow_external_command => {
return Err(ParseError::mismatch( return Err(ParseError::mismatch(
@ -669,8 +739,15 @@ fn expand_atom_inner<'me, 'content>(
UnspannedToken::Number(number) => { UnspannedToken::Number(number) => {
UnspannedAtomicToken::Number { number }.into_atomic_token(token_span) UnspannedAtomicToken::Number { number }.into_atomic_token(token_span)
} }
UnspannedToken::Operator(_) => { UnspannedToken::CompareOperator(_) => {
UnspannedAtomicToken::Operator { text: token_span }.into_atomic_token(token_span) UnspannedAtomicToken::CompareOperator { text: token_span }
.into_atomic_token(token_span)
}
UnspannedToken::EvaluationOperator(EvaluationOperator::Dot) => {
UnspannedAtomicToken::Dot { text: token_span }.into_atomic_token(token_span)
}
UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot) => {
UnspannedAtomicToken::DotDot { text: token_span }.into_atomic_token(token_span)
} }
UnspannedToken::String(body) => { UnspannedToken::String(body) => {
UnspannedAtomicToken::String { body }.into_atomic_token(token_span) UnspannedAtomicToken::String { body }.into_atomic_token(token_span)

View File

@ -26,9 +26,9 @@ impl ExpandExpression for NumberShape {
) -> Result<hir::Expression, ParseError> { ) -> Result<hir::Expression, ParseError> {
parse_single_node(token_nodes, "Number", |token, token_span, err| { parse_single_node(token_nodes, "Number", |token, token_span, err| {
Ok(match token { Ok(match token {
UnspannedToken::GlobPattern | UnspannedToken::Operator(..) => { UnspannedToken::GlobPattern
return Err(err.error()) | UnspannedToken::CompareOperator(..)
} | UnspannedToken::EvaluationOperator(..) => return Err(err.error()),
UnspannedToken::Variable(tag) if tag.slice(context.source) == "it" => { UnspannedToken::Variable(tag) if tag.slice(context.source) == "it" => {
hir::Expression::it_variable(tag, token_span) hir::Expression::it_variable(tag, token_span)
} }
@ -131,7 +131,8 @@ impl ExpandExpression for IntShape {
parse_single_node(token_nodes, "Integer", |token, token_span, err| { parse_single_node(token_nodes, "Integer", |token, token_span, err| {
Ok(match token { Ok(match token {
UnspannedToken::GlobPattern UnspannedToken::GlobPattern
| UnspannedToken::Operator(..) | UnspannedToken::CompareOperator(..)
| UnspannedToken::EvaluationOperator(..)
| UnspannedToken::ExternalWord => return Err(err.error()), | UnspannedToken::ExternalWord => return Err(err.error()),
UnspannedToken::Variable(span) if span.slice(context.source) == "it" => { UnspannedToken::Variable(span) if span.slice(context.source) == "it" => {
hir::Expression::it_variable(span, token_span) hir::Expression::it_variable(span, token_span)

View File

@ -2,8 +2,9 @@ use crate::hir::syntax_shape::{
expand_atom, expand_bare, expression::expand_file_path, ExpandContext, ExpandExpression, expand_atom, expand_bare, expression::expand_file_path, ExpandContext, ExpandExpression,
ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, UnspannedAtomicToken, ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, UnspannedAtomicToken,
}; };
use crate::parse::operator::EvaluationOperator;
use crate::parse::tokens::{Token, UnspannedToken}; use crate::parse::tokens::{Token, UnspannedToken};
use crate::{hir, hir::TokensIterator, Operator, TokenNode}; use crate::{hir, hir::TokensIterator, TokenNode};
use nu_errors::{ParseError, ShellError}; use nu_errors::{ParseError, ShellError};
#[cfg(coloring_in_tokens)] #[cfg(coloring_in_tokens)]
use nu_protocol::ShellTypeName; use nu_protocol::ShellTypeName;
@ -26,6 +27,8 @@ impl FallibleColorSyntax for PatternShape {
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
use nu_protocol::SpannedTypeName;
token_nodes.atomic(|token_nodes| { token_nodes.atomic(|token_nodes| {
let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?; let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?;
@ -125,7 +128,7 @@ impl ExpandSyntax for BarePatternShape {
.. ..
}) })
| TokenNode::Token(Token { | TokenNode::Token(Token {
unspanned: UnspannedToken::Operator(Operator::Dot), unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot),
.. ..
}) })
| TokenNode::Token(Token { | TokenNode::Token(Token {

View File

@ -0,0 +1,154 @@
use crate::hir::syntax_shape::expression::UnspannedAtomicToken;
use crate::hir::syntax_shape::{
color_fallible_syntax, expand_atom, expand_expr, AnyExpressionShape, ExpandContext,
ExpandExpression, ExpansionRule, FallibleColorSyntax, FlatShape,
};
use crate::parse::operator::EvaluationOperator;
use crate::parse::token_tree::TokenNode;
use crate::parse::tokens::{Token, UnspannedToken};
use crate::{hir, hir::TokensIterator};
use nu_errors::{ParseError, ShellError};
use nu_protocol::SpannedTypeName;
use nu_source::SpannedItem;
#[derive(Debug, Copy, Clone)]
pub struct RangeShape;
impl ExpandExpression for RangeShape {
fn name(&self) -> &'static str {
"range"
}
fn expand_expr<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<hir::Expression, ParseError> {
token_nodes.atomic_parse(|token_nodes| {
let left = expand_expr(&AnyExpressionShape, token_nodes, context)?;
let atom = expand_atom(
token_nodes,
"..",
context,
ExpansionRule::new().allow_eval_operator(),
)?;
let span = match atom.unspanned {
UnspannedAtomicToken::DotDot { text } => text,
_ => return Err(ParseError::mismatch("..", atom.spanned_type_name())),
};
let right = expand_expr(&AnyExpressionShape, token_nodes, context)?;
Ok(hir::Expression::range(left, span, right))
})
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for RangeShape {
type Info = ();
type Input = ();
fn name(&self) -> &'static str {
"RangeShape"
}
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
token_nodes.atomic_parse(|token_nodes| {
color_fallible_syntax(&AnyExpressionShape, token_nodes, context)?;
color_fallible_syntax(&DotDotShape, token_nodes, context)?;
color_fallible_syntax(&AnyExpressionShape, token_nodes, context)
})?;
Ok(())
}
}
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for RangeShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<nu_source::Spanned<FlatShape>>,
) -> Result<(), ShellError> {
token_nodes.atomic_parse(|token_nodes| {
color_fallible_syntax(&AnyExpressionShape, token_nodes, context, shapes)?;
color_fallible_syntax(&DotDotShape, token_nodes, context, shapes)?;
color_fallible_syntax(&AnyExpressionShape, token_nodes, context, shapes)
})?;
Ok(())
}
}
#[derive(Debug, Copy, Clone)]
struct DotDotShape;
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for DotDotShape {
type Info = ();
type Input = ();
fn name(&self) -> &'static str {
".."
}
fn color_syntax<'a, 'b>(
&self,
_input: &Self::Input,
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
) -> Result<Self::Info, ShellError> {
let peeked = token_nodes.peek_any().not_eof("..")?;
match &peeked.node {
TokenNode::Token(Token {
unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot),
span,
}) => {
peeked.commit();
token_nodes.color_shape(FlatShape::DotDot.spanned(span));
Ok(())
}
token => Err(ShellError::type_error("..", token.spanned_type_name())),
}
}
}
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for DotDotShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &Self::Input,
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
shapes: &mut Vec<nu_source::Spanned<FlatShape>>,
) -> Result<Self::Info, ShellError> {
let peeked = token_nodes.peek_any().not_eof("..")?;
match &peeked.node {
TokenNode::Token(Token {
unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot),
span,
}) => {
peeked.commit();
shapes.push(FlatShape::DotDot.spanned(span));
Ok(())
}
token => Err(ShellError::type_error("..", token.spanned_type_name())),
}
}
}

View File

@ -91,7 +91,8 @@ impl ExpandExpression for StringShape {
parse_single_node(token_nodes, "String", |token, token_span, err| { parse_single_node(token_nodes, "String", |token, token_span, err| {
Ok(match token { Ok(match token {
UnspannedToken::GlobPattern UnspannedToken::GlobPattern
| UnspannedToken::Operator(..) | UnspannedToken::CompareOperator(..)
| UnspannedToken::EvaluationOperator(..)
| UnspannedToken::ExternalWord => return Err(err.error()), | UnspannedToken::ExternalWord => return Err(err.error()),
UnspannedToken::Variable(span) => { UnspannedToken::Variable(span) => {
expand_variable(span, token_span, &context.source) expand_variable(span, token_span, &context.source)

View File

@ -5,7 +5,7 @@ use crate::hir::syntax_shape::{
StringShape, TestSyntax, UnspannedAtomicToken, WhitespaceShape, StringShape, TestSyntax, UnspannedAtomicToken, WhitespaceShape,
}; };
use crate::parse::tokens::{RawNumber, UnspannedToken}; use crate::parse::tokens::{RawNumber, UnspannedToken};
use crate::{hir, hir::Expression, hir::TokensIterator, Operator}; use crate::{hir, hir::Expression, hir::TokensIterator, CompareOperator, EvaluationOperator};
use nu_errors::ShellError; use nu_errors::ShellError;
use nu_protocol::{PathMember, ShellTypeName}; use nu_protocol::{PathMember, ShellTypeName};
use nu_source::{ use nu_source::{
@ -271,7 +271,7 @@ impl ExpandSyntax for PathTailShape {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum ExpressionContinuation { pub enum ExpressionContinuation {
DotSuffix(Span, PathMember), DotSuffix(Span, PathMember),
InfixSuffix(Spanned<Operator>, Expression), InfixSuffix(Spanned<CompareOperator>, Expression),
} }
impl PrettyDebugWithSource for ExpressionContinuation { impl PrettyDebugWithSource for ExpressionContinuation {
@ -484,6 +484,8 @@ impl FallibleColorSyntax for VariableShape {
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
use nu_protocol::SpannedTypeName;
let atom = expand_atom( let atom = expand_atom(
token_nodes, token_nodes,
"variable", "variable",
@ -1032,6 +1034,8 @@ impl FallibleColorSyntax for ColorableDotShape {
_context: &ExpandContext, _context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
use nu_protocol::SpannedTypeName;
let peeked = token_nodes.peek_any().not_eof("dot")?; let peeked = token_nodes.peek_any().not_eof("dot")?;
match peeked.node { match peeked.node {
@ -1104,7 +1108,7 @@ impl ExpandSyntax for DotShape {
) -> Result<Self::Output, ParseError> { ) -> Result<Self::Output, ParseError> {
parse_single_node(token_nodes, "dot", |token, token_span, _| { parse_single_node(token_nodes, "dot", |token, token_span, _| {
Ok(match token { Ok(match token {
UnspannedToken::Operator(Operator::Dot) => token_span, UnspannedToken::EvaluationOperator(EvaluationOperator::Dot) => token_span,
_ => { _ => {
return Err(ParseError::mismatch( return Err(ParseError::mismatch(
"dot", "dot",
@ -1143,9 +1147,9 @@ impl FallibleColorSyntax for InfixShape {
"infix operator", "infix operator",
|token, token_span, err| { |token, token_span, err| {
match token { match token {
// If it's an operator (and not `.`), it's a match // If it's a comparison operator, it's a match
UnspannedToken::Operator(operator) if operator != Operator::Dot => { UnspannedToken::CompareOperator(_operator) => {
shapes.push(FlatShape::Operator.spanned(token_span)); shapes.push(FlatShape::CompareOperator.spanned(token_span));
Ok(()) Ok(())
} }
@ -1191,9 +1195,7 @@ impl FallibleColorSyntax for InfixShape {
|token, token_span, _| { |token, token_span, _| {
match token { match token {
// If it's an operator (and not `.`), it's a match // If it's an operator (and not `.`), it's a match
UnspannedToken::Operator(operator) if operator != Operator::Dot => { UnspannedToken::CompareOperator(_operator) => Ok(token_span),
Ok(token_span)
}
// Otherwise, it's not a match // Otherwise, it's not a match
_ => Err(ParseError::mismatch( _ => Err(ParseError::mismatch(
@ -1206,7 +1208,7 @@ impl FallibleColorSyntax for InfixShape {
checkpoint checkpoint
.iterator .iterator
.color_shape(FlatShape::Operator.spanned(operator_span)); .color_shape(FlatShape::CompareOperator.spanned(operator_span));
// An infix operator must be followed by whitespace. If no whitespace was found, fail // An infix operator must be followed by whitespace. If no whitespace was found, fail
color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context)?; color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context)?;
@ -1266,7 +1268,7 @@ impl ExpandSyntax for InfixShape {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct InfixInnerSyntax { pub struct InfixInnerSyntax {
pub operator: Spanned<Operator>, pub operator: Spanned<CompareOperator>,
} }
impl HasSpan for InfixInnerSyntax { impl HasSpan for InfixInnerSyntax {
@ -1298,12 +1300,10 @@ impl ExpandSyntax for InfixInnerShape {
) -> Result<Self::Output, ParseError> { ) -> Result<Self::Output, ParseError> {
parse_single_node(token_nodes, "infix operator", |token, token_span, err| { parse_single_node(token_nodes, "infix operator", |token, token_span, err| {
Ok(match token { Ok(match token {
// If it's an operator (and not `.`), it's a match // If it's a comparison operator, it's a match
UnspannedToken::Operator(operator) if operator != Operator::Dot => { UnspannedToken::CompareOperator(operator) => InfixInnerSyntax {
InfixInnerSyntax {
operator: operator.spanned(token_span), operator: operator.spanned(token_span),
} },
}
// Otherwise, it's not a match // Otherwise, it's not a match
_ => return Err(err.error()), _ => return Err(err.error()),

View File

@ -1,5 +1,5 @@
use crate::parse::flag::{Flag, FlagKind}; use crate::parse::flag::{Flag, FlagKind};
use crate::parse::operator::Operator; use crate::parse::operator::EvaluationOperator;
use crate::parse::token_tree::{Delimiter, TokenNode}; use crate::parse::token_tree::{Delimiter, TokenNode};
use crate::parse::tokens::{RawNumber, UnspannedToken}; use crate::parse::tokens::{RawNumber, UnspannedToken};
use nu_source::{HasSpan, Span, Spanned, SpannedItem, Text}; use nu_source::{HasSpan, Span, Spanned, SpannedItem, Text};
@ -10,8 +10,9 @@ pub enum FlatShape {
CloseDelimiter(Delimiter), CloseDelimiter(Delimiter),
ItVariable, ItVariable,
Variable, Variable,
Operator, CompareOperator,
Dot, Dot,
DotDot,
InternalCommand, InternalCommand,
ExternalCommand, ExternalCommand,
ExternalWord, ExternalWord,
@ -27,7 +28,9 @@ pub enum FlatShape {
Int, Int,
Decimal, Decimal,
Whitespace, Whitespace,
Separator,
Error, Error,
Comment,
Size { number: Span, unit: Span }, Size { number: Span, unit: Span },
} }
@ -41,10 +44,15 @@ impl FlatShape {
UnspannedToken::Number(RawNumber::Decimal(_)) => { UnspannedToken::Number(RawNumber::Decimal(_)) => {
shapes.push(FlatShape::Decimal.spanned(token.span)) shapes.push(FlatShape::Decimal.spanned(token.span))
} }
UnspannedToken::Operator(Operator::Dot) => { UnspannedToken::EvaluationOperator(EvaluationOperator::Dot) => {
shapes.push(FlatShape::Dot.spanned(token.span)) shapes.push(FlatShape::Dot.spanned(token.span))
} }
UnspannedToken::Operator(_) => shapes.push(FlatShape::Operator.spanned(token.span)), UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot) => {
shapes.push(FlatShape::DotDot.spanned(token.span))
}
UnspannedToken::CompareOperator(_) => {
shapes.push(FlatShape::CompareOperator.spanned(token.span))
}
UnspannedToken::String(_) => shapes.push(FlatShape::String.spanned(token.span)), UnspannedToken::String(_) => shapes.push(FlatShape::String.spanned(token.span)),
UnspannedToken::Variable(v) if v.slice(source) == "it" => { UnspannedToken::Variable(v) if v.slice(source) == "it" => {
shapes.push(FlatShape::ItVariable.spanned(token.span)) shapes.push(FlatShape::ItVariable.spanned(token.span))
@ -92,6 +100,8 @@ impl FlatShape {
.. ..
}) => shapes.push(FlatShape::ShorthandFlag.spanned(*span)), }) => shapes.push(FlatShape::ShorthandFlag.spanned(*span)),
TokenNode::Whitespace(_) => shapes.push(FlatShape::Whitespace.spanned(token.span())), TokenNode::Whitespace(_) => shapes.push(FlatShape::Whitespace.spanned(token.span())),
TokenNode::Separator(_) => shapes.push(FlatShape::Separator.spanned(token.span())),
TokenNode::Comment(_) => shapes.push(FlatShape::Comment.spanned(token.span())),
TokenNode::Error(v) => shapes.push(FlatShape::Error.spanned(v.span)), TokenNode::Error(v) => shapes.push(FlatShape::Error.spanned(v.span)),
} }
} }

View File

@ -8,6 +8,7 @@ use crate::TokenNode;
#[allow(unused)] #[allow(unused)]
use getset::{Getters, MutGetters}; use getset::{Getters, MutGetters};
use nu_errors::{ParseError, ShellError}; use nu_errors::{ParseError, ShellError};
use nu_protocol::SpannedTypeName;
use nu_source::{HasFallibleSpan, HasSpan, Span, Spanned, SpannedItem, Tag, Text}; use nu_source::{HasFallibleSpan, HasSpan, Span, Spanned, SpannedItem, Tag, Text};
cfg_if::cfg_if! { cfg_if::cfg_if! {
@ -149,7 +150,7 @@ impl<'content, 'me> PeekedNode<'content, 'me> {
pub fn peek_error(node: &Option<&TokenNode>, eof_span: Span, expected: &'static str) -> ParseError { pub fn peek_error(node: &Option<&TokenNode>, eof_span: Span, expected: &'static str) -> ParseError {
match node { match node {
None => ParseError::unexpected_eof(expected, eof_span), None => ParseError::unexpected_eof(expected, eof_span),
Some(node) => ParseError::mismatch(expected, node.type_name().spanned(node.span())), Some(node) => ParseError::mismatch(expected, node.spanned_type_name()),
} }
} }
@ -498,10 +499,10 @@ impl<'content> TokensIterator<'content> {
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
/// that you'll succeed. /// that you'll succeed.
pub fn atomic_parse<'me, T>( pub fn atomic_parse<'me, T, E>(
&'me mut self, &'me mut self,
block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, ParseError>, block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, E>,
) -> Result<T, ParseError> { ) -> Result<T, E> {
let state = &mut self.state; let state = &mut self.state;
let index = state.index; let index = state.index;

View File

@ -12,9 +12,9 @@ pub use crate::hir::syntax_shape::{
pub use crate::hir::tokens_iterator::TokensIterator; pub use crate::hir::tokens_iterator::TokensIterator;
pub use crate::parse::files::Files; pub use crate::parse::files::Files;
pub use crate::parse::flag::Flag; pub use crate::parse::flag::Flag;
pub use crate::parse::operator::Operator; pub use crate::parse::operator::{CompareOperator, EvaluationOperator};
pub use crate::parse::parser::pipeline;
pub use crate::parse::parser::Number; pub use crate::parse::parser::Number;
pub use crate::parse::parser::{module, pipeline};
pub use crate::parse::token_tree::{Delimiter, TokenNode}; pub use crate::parse::token_tree::{Delimiter, TokenNode};
pub use crate::parse::token_tree_builder::TokenTreeBuilder; pub use crate::parse::token_tree_builder::TokenTreeBuilder;
@ -29,3 +29,12 @@ pub fn parse(input: &str) -> Result<TokenNode, ShellError> {
Err(err) => Err(ShellError::parse_error(err)), Err(err) => Err(ShellError::parse_error(err)),
} }
} }
pub fn parse_script(input: &str) -> Result<TokenNode, ShellError> {
let _ = pretty_env_logger::try_init();
match module(nom_input(input)) {
Ok((_rest, val)) => Ok(val),
Err(err) => Err(ShellError::parse_error(err)),
}
}

View File

@ -1,4 +1,5 @@
pub(crate) mod call_node; pub(crate) mod call_node;
pub(crate) mod comment;
pub(crate) mod files; pub(crate) mod files;
pub(crate) mod flag; pub(crate) mod flag;
pub(crate) mod operator; pub(crate) mod operator;

View File

@ -0,0 +1,42 @@
use derive_new::new;
use getset::Getters;
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span};
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
pub enum CommentKind {
Line,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Getters, new)]
pub struct Comment {
pub(crate) kind: CommentKind,
pub(crate) text: Span,
pub(crate) span: Span,
}
impl Comment {
pub fn line(text: impl Into<Span>, outer: impl Into<Span>) -> Comment {
Comment {
kind: CommentKind::Line,
text: text.into(),
span: outer.into(),
}
}
}
impl PrettyDebugWithSource for Comment {
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
let prefix = match self.kind {
CommentKind::Line => b::description("#"),
};
prefix + b::description(self.text.slice(source))
}
}
impl HasSpan for Comment {
fn span(&self) -> Span {
self.span
}
}

View File

@ -34,13 +34,15 @@ impl language_reporting::ReportingFiles for Files {
} }
fn location(&self, _file: Self::FileId, byte_index: usize) -> Option<Location> { fn location(&self, _file: Self::FileId, byte_index: usize) -> Option<Location> {
trace!("finding location for {}", byte_index);
let source = &self.snippet; let source = &self.snippet;
let mut seen_lines = 0; let mut seen_lines = 0;
let mut seen_bytes = 0; let mut seen_bytes = 0;
for (pos, slice) in source.match_indices('\n') { for (pos, slice) in source.match_indices('\n') {
trace!( trace!(
"SEARCH={} SEEN={} POS={} SLICE={:?} LEN={} ALL={:?}", "searching byte_index={} seen_bytes={} pos={} slice={:?} slice.len={} source={:?}",
byte_index, byte_index,
seen_bytes, seen_bytes,
pos, pos,
@ -50,9 +52,19 @@ impl language_reporting::ReportingFiles for Files {
); );
if pos >= byte_index { if pos >= byte_index {
trace!(
"returning {}:{} seen_lines={} byte_index={} pos={} seen_bytes={}",
seen_lines,
byte_index,
pos,
seen_lines,
byte_index,
seen_bytes
);
return Some(language_reporting::Location::new( return Some(language_reporting::Location::new(
seen_lines, seen_lines,
byte_index - seen_bytes, byte_index - pos,
)); ));
} else { } else {
seen_lines += 1; seen_lines += 1;
@ -61,30 +73,70 @@ impl language_reporting::ReportingFiles for Files {
} }
if seen_lines == 0 { if seen_lines == 0 {
Some(language_reporting::Location::new(0, byte_index)) trace!("seen_lines=0 end={}", source.len() - 1);
// if we got here, there were no newlines in the source
Some(language_reporting::Location::new(0, source.len() - 1))
} else { } else {
panic!("byte index {} wasn't valid", byte_index); trace!(
"last line seen_lines={} end={}",
seen_lines,
source.len() - 1 - byte_index
);
// if we got here and we didn't return, it should mean that we're talking about
// the last line
Some(language_reporting::Location::new(
seen_lines,
source.len() - 1 - byte_index,
))
} }
} }
fn line_span(&self, _file: Self::FileId, lineno: usize) -> Option<Self::Span> { fn line_span(&self, _file: Self::FileId, lineno: usize) -> Option<Self::Span> {
trace!("finding line_span for {}", lineno);
let source = &self.snippet; let source = &self.snippet;
let mut seen_lines = 0; let mut seen_lines = 0;
let mut seen_bytes = 0; let mut seen_bytes = 0;
for (pos, _) in source.match_indices('\n') { for (pos, _) in source.match_indices('\n') {
trace!(
"lineno={} seen_lines={} seen_bytes={} pos={}",
lineno,
seen_lines,
seen_bytes,
pos
);
if seen_lines == lineno { if seen_lines == lineno {
return Some(Span::new(seen_bytes, pos + 1)); trace!("returning start={} end={}", seen_bytes, pos);
// If the number of seen lines is the lineno, seen_bytes is the start of the
// line and pos is the end of the line
return Some(Span::new(seen_bytes, pos));
} else { } else {
// If it's not, increment seen_lines, and move seen_bytes to the beginning of
// the next line
seen_lines += 1; seen_lines += 1;
seen_bytes = pos + 1; seen_bytes = pos + 1;
} }
} }
if seen_lines == 0 { if seen_lines == 0 {
trace!("returning start={} end={}", 0, self.snippet.len() - 1);
// if we got here, there were no newlines in the source
Some(Span::new(0, self.snippet.len() - 1)) Some(Span::new(0, self.snippet.len() - 1))
} else { } else {
None trace!(
"returning start={} end={}",
seen_bytes,
self.snippet.len() - 1
);
// if we got here and we didn't return, it should mean that we're talking about
// the last line
Some(Span::new(seen_bytes, self.snippet.len() - 1))
} }
} }

View File

@ -4,63 +4,102 @@ use serde::{Deserialize, Serialize};
use std::str::FromStr; use std::str::FromStr;
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
pub enum Operator { pub enum CompareOperator {
Equal, Equal,
NotEqual, NotEqual,
LessThan, LessThan,
GreaterThan, GreaterThan,
LessThanOrEqual, LessThanOrEqual,
GreaterThanOrEqual, GreaterThanOrEqual,
Dot,
Contains, Contains,
NotContains, NotContains,
} }
impl PrettyDebug for Operator { impl PrettyDebug for CompareOperator {
fn pretty(&self) -> DebugDocBuilder { fn pretty(&self) -> DebugDocBuilder {
b::operator(self.as_str()) b::operator(self.as_str())
} }
} }
impl Operator { impl CompareOperator {
pub fn print(&self) -> String { pub fn print(&self) -> String {
self.as_str().to_string() self.as_str().to_string()
} }
pub fn as_str(&self) -> &str { pub fn as_str(&self) -> &str {
match *self { match *self {
Operator::Equal => "==", CompareOperator::Equal => "==",
Operator::NotEqual => "!=", CompareOperator::NotEqual => "!=",
Operator::LessThan => "<", CompareOperator::LessThan => "<",
Operator::GreaterThan => ">", CompareOperator::GreaterThan => ">",
Operator::LessThanOrEqual => "<=", CompareOperator::LessThanOrEqual => "<=",
Operator::GreaterThanOrEqual => ">=", CompareOperator::GreaterThanOrEqual => ">=",
Operator::Dot => ".", CompareOperator::Contains => "=~",
Operator::Contains => "=~", CompareOperator::NotContains => "!~",
Operator::NotContains => "!~",
} }
} }
} }
impl From<&str> for Operator { impl From<&str> for CompareOperator {
fn from(input: &str) -> Operator { fn from(input: &str) -> CompareOperator {
Operator::from_str(input).unwrap() CompareOperator::from_str(input).unwrap()
} }
} }
impl FromStr for Operator { impl FromStr for CompareOperator {
type Err = (); type Err = ();
fn from_str(input: &str) -> Result<Self, <Self as std::str::FromStr>::Err> { fn from_str(input: &str) -> Result<Self, <Self as std::str::FromStr>::Err> {
match input { match input {
"==" => Ok(Operator::Equal), "==" => Ok(CompareOperator::Equal),
"!=" => Ok(Operator::NotEqual), "!=" => Ok(CompareOperator::NotEqual),
"<" => Ok(Operator::LessThan), "<" => Ok(CompareOperator::LessThan),
">" => Ok(Operator::GreaterThan), ">" => Ok(CompareOperator::GreaterThan),
"<=" => Ok(Operator::LessThanOrEqual), "<=" => Ok(CompareOperator::LessThanOrEqual),
">=" => Ok(Operator::GreaterThanOrEqual), ">=" => Ok(CompareOperator::GreaterThanOrEqual),
"." => Ok(Operator::Dot), "=~" => Ok(CompareOperator::Contains),
"=~" => Ok(Operator::Contains), "!~" => Ok(CompareOperator::NotContains),
"!~" => Ok(Operator::NotContains), _ => Err(()),
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
pub enum EvaluationOperator {
Dot,
DotDot,
}
impl PrettyDebug for EvaluationOperator {
fn pretty(&self) -> DebugDocBuilder {
b::operator(self.as_str())
}
}
impl EvaluationOperator {
pub fn print(&self) -> String {
self.as_str().to_string()
}
pub fn as_str(&self) -> &str {
match *self {
EvaluationOperator::Dot => ".",
EvaluationOperator::DotDot => "..",
}
}
}
impl From<&str> for EvaluationOperator {
fn from(input: &str) -> EvaluationOperator {
EvaluationOperator::from_str(input).unwrap()
}
}
impl FromStr for EvaluationOperator {
type Err = ();
fn from_str(input: &str) -> Result<Self, <Self as std::str::FromStr>::Err> {
match input {
"." => Ok(EvaluationOperator::Dot),
".." => Ok(EvaluationOperator::DotDot),
_ => Err(()), _ => Err(()),
} }
} }

View File

@ -14,6 +14,7 @@ use nom::sequence::*;
use bigdecimal::BigDecimal; use bigdecimal::BigDecimal;
use derive_new::new; use derive_new::new;
use enumflags2::BitFlags;
use log::trace; use log::trace;
use nom::dbg; use nom::dbg;
use nom::*; use nom::*;
@ -32,7 +33,7 @@ use serde::{Deserialize, Serialize};
use std::fmt::Debug; use std::fmt::Debug;
use std::str::FromStr; use std::str::FromStr;
macro_rules! operator { macro_rules! cmp_operator {
($name:tt : $token:tt ) => { ($name:tt : $token:tt ) => {
#[tracable_parser] #[tracable_parser]
pub fn $name(input: NomSpan) -> IResult<NomSpan, TokenNode> { pub fn $name(input: NomSpan) -> IResult<NomSpan, TokenNode> {
@ -42,21 +43,38 @@ macro_rules! operator {
Ok(( Ok((
input, input,
TokenTreeBuilder::spanned_op(tag.fragment, Span::new(start, end)), TokenTreeBuilder::spanned_cmp_op(tag.fragment, Span::new(start, end)),
)) ))
} }
}; };
} }
operator! { gt: ">" } macro_rules! eval_operator {
operator! { lt: "<" } ($name:tt : $token:tt ) => {
operator! { gte: ">=" } #[tracable_parser]
operator! { lte: "<=" } pub fn $name(input: NomSpan) -> IResult<NomSpan, TokenNode> {
operator! { eq: "==" } let start = input.offset;
operator! { neq: "!=" } let (input, tag) = tag($token)(input)?;
operator! { dot: "." } let end = input.offset;
operator! { cont: "=~" }
operator! { ncont: "!~" } Ok((
input,
TokenTreeBuilder::spanned_eval_op(tag.fragment, Span::new(start, end)),
))
}
};
}
cmp_operator! { gt: ">" }
cmp_operator! { lt: "<" }
cmp_operator! { gte: ">=" }
cmp_operator! { lte: "<=" }
cmp_operator! { eq: "==" }
cmp_operator! { neq: "!=" }
cmp_operator! { cont: "=~" }
cmp_operator! { ncont: "!~" }
eval_operator! { dot: "." }
eval_operator! { dotdot: ".." }
#[derive(Debug, Clone, Eq, PartialEq, Hash, Ord, PartialOrd, Serialize, Deserialize)] #[derive(Debug, Clone, Eq, PartialEq, Hash, Ord, PartialOrd, Serialize, Deserialize)]
pub enum Number { pub enum Number {
@ -213,6 +231,17 @@ pub fn raw_number(input: NomSpan) -> IResult<NomSpan, RawNumber> {
} }
} }
let dotdot_result = dotdot(input);
match dotdot_result {
// If we see a `..` immediately after an integer, it's a range, not a decimal
Ok((dotdot_input, _)) => {
return Ok((input, RawNumber::int(Span::new(start, input.offset))))
}
Err(_) => {}
}
let dot: IResult<NomSpan, NomSpan, (NomSpan, nom::error::ErrorKind)> = tag(".")(input); let dot: IResult<NomSpan, NomSpan, (NomSpan, nom::error::ErrorKind)> = tag(".")(input);
let input = match dot { let input = match dot {
@ -285,7 +314,7 @@ pub fn string(input: NomSpan) -> IResult<NomSpan, TokenNode> {
pub fn external(input: NomSpan) -> IResult<NomSpan, TokenNode> { pub fn external(input: NomSpan) -> IResult<NomSpan, TokenNode> {
let start = input.offset; let start = input.offset;
let (input, _) = tag("^")(input)?; let (input, _) = tag("^")(input)?;
let (input, bare) = take_while(is_bare_char)(input)?; let (input, bare) = take_while(is_file_char)(input)?;
let end = input.offset; let end = input.offset;
Ok(( Ok((
@ -294,52 +323,186 @@ pub fn external(input: NomSpan) -> IResult<NomSpan, TokenNode> {
)) ))
} }
#[tracable_parser] fn word<'a, T, U, V>(
pub fn pattern(input: NomSpan) -> IResult<NomSpan, TokenNode> { start_predicate: impl Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, U>,
next_predicate: impl Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, V> + Copy,
into: impl Fn(Span) -> T,
) -> impl Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, T> {
move |input: NomSpan| {
let start = input.offset; let start = input.offset;
let (input, _) = take_while1(is_start_glob_char)(input)?;
let (input, _) = take_while(is_glob_char)(input)?; let (input, _) = start_predicate(input)?;
let (input, _) = many0(next_predicate)(input)?;
let next_char = &input.fragment.chars().nth(0); let next_char = &input.fragment.chars().nth(0);
if let Some(next_char) = next_char { match next_char {
if is_external_word_char(*next_char) { Some('.') => {}
Some(next_char)
if is_external_word_char(*next_char) || is_glob_specific_char(*next_char) =>
{
return Err(nom::Err::Error(nom::error::make_error( return Err(nom::Err::Error(nom::error::make_error(
input, input,
nom::error::ErrorKind::TakeWhile1, nom::error::ErrorKind::TakeWhile1,
))); )));
} }
_ => {}
} }
let end = input.offset; let end = input.offset;
Ok(( Ok((input, into(Span::new(start, end))))
}
}
pub fn matches(cond: fn(char) -> bool) -> impl Fn(NomSpan) -> IResult<NomSpan, NomSpan> + Copy {
move |input: NomSpan| match input.iter_elements().next() {
Option::Some(c) if cond(c) => Ok((input.slice(1..), input.slice(0..1))),
_ => Err(nom::Err::Error(nom::error::ParseError::from_error_kind(
input, input,
TokenTreeBuilder::spanned_pattern(Span::new(start, end)), nom::error::ErrorKind::Many0,
)) ))),
}
} }
#[tracable_parser] #[tracable_parser]
pub fn bare(input: NomSpan) -> IResult<NomSpan, TokenNode> { pub fn pattern(input: NomSpan) -> IResult<NomSpan, TokenNode> {
let start = input.offset; word(
let (input, _) = take_while1(is_start_bare_char)(input)?; start_pattern,
let (input, last) = take_while(is_bare_char)(input)?; matches(is_glob_char),
TokenTreeBuilder::spanned_pattern,
)(input)
}
let next_char = &input.fragment.chars().nth(0); #[tracable_parser]
let prev_char = last.fragment.chars().nth(0); pub fn start_pattern(input: NomSpan) -> IResult<NomSpan, NomSpan> {
alt((take_while1(is_dot), matches(is_start_glob_char)))(input)
}
if let Some(next_char) = next_char { #[tracable_parser]
if is_external_word_char(*next_char) || is_glob_specific_char(*next_char) { pub fn filename(input: NomSpan) -> IResult<NomSpan, TokenNode> {
return Err(nom::Err::Error(nom::error::make_error( let start_pos = input.offset;
let (mut input, mut saw_special) = match start_file_char(input) {
Err(err) => return Err(err),
Ok((input, special)) => (input, special),
};
loop {
if saw_special.is_empty() {
match continue_file_char(input) {
Err(_) => {
return Ok((
input, input,
nom::error::ErrorKind::TakeWhile1, TokenTreeBuilder::spanned_bare((start_pos, input.offset)),
))); ))
}
Ok((next_input, special)) => {
saw_special |= special;
input = next_input;
} }
} }
} else {
let rest = after_sep_file(input);
let end = input.offset; let (input, span, updated_special) = match rest {
Err(_) => (input, (start_pos, input.offset), saw_special),
Ok((input, new_special)) => {
(input, (start_pos, input.offset), saw_special | new_special)
}
};
Ok((input, TokenTreeBuilder::spanned_bare(Span::new(start, end)))) if updated_special.contains(SawSpecial::Glob) {
return Ok((input, TokenTreeBuilder::spanned_pattern(span)));
} else {
return Ok((input, TokenTreeBuilder::spanned_bare(span)));
}
}
}
}
#[derive(BitFlags, Copy, Clone, Eq, PartialEq)]
enum SawSpecial {
PathSeparator = 0b01,
Glob = 0b10,
}
#[tracable_parser]
fn start_file_char(input: NomSpan) -> IResult<NomSpan, BitFlags<SawSpecial>> {
let path_sep_result = special_file_char(input);
match path_sep_result {
Ok((input, special)) => return Ok((input, special)),
Err(_) => {}
}
start_filename(input).map(|(input, output)| (input, BitFlags::empty()))
}
#[tracable_parser]
fn continue_file_char(input: NomSpan) -> IResult<NomSpan, BitFlags<SawSpecial>> {
let path_sep_result = special_file_char(input);
match path_sep_result {
Ok((input, special)) => return Ok((input, special)),
Err(_) => {}
}
matches(is_file_char)(input).map(|(input, _)| (input, BitFlags::empty()))
}
#[tracable_parser]
fn special_file_char(input: NomSpan) -> IResult<NomSpan, BitFlags<SawSpecial>> {
match matches(is_path_separator)(input) {
Ok((input, _)) => return Ok((input, BitFlags::empty() | SawSpecial::PathSeparator)),
Err(_) => {}
}
let (input, _) = matches(is_glob_specific_char)(input)?;
Ok((input, BitFlags::empty() | SawSpecial::Glob))
}
#[tracable_parser]
fn after_sep_file(input: NomSpan) -> IResult<NomSpan, BitFlags<SawSpecial>> {
fn after_sep_char(c: char) -> bool {
is_external_word_char(c) || is_file_char(c) || c == '.'
}
let start = input.offset;
let original_input = input;
let mut input = input;
let (input, after_glob) = take_while1(after_sep_char)(input)?;
let slice = original_input.slice(0..input.offset - start);
let saw_special = if slice.fragment.chars().any(is_glob_specific_char) {
BitFlags::empty() | SawSpecial::Glob
} else {
BitFlags::empty()
};
Ok((input, saw_special))
}
pub fn start_filename(input: NomSpan) -> IResult<NomSpan, NomSpan> {
alt((take_while1(is_dot), matches(is_start_file_char)))(input)
}
#[tracable_parser]
pub fn member(input: NomSpan) -> IResult<NomSpan, TokenNode> {
word(
matches(is_start_member_char),
matches(is_member_char),
TokenTreeBuilder::spanned_bare,
)(input)
}
#[tracable_parser]
pub fn ident(input: NomSpan) -> IResult<NomSpan, Tag> {
word(matches(is_id_start), matches(is_id_continue), Tag::from)(input)
} }
#[tracable_parser] #[tracable_parser]
@ -348,10 +511,7 @@ pub fn external_word(input: NomSpan) -> IResult<NomSpan, TokenNode> {
let (input, _) = take_while1(is_external_word_char)(input)?; let (input, _) = take_while1(is_external_word_char)(input)?;
let end = input.offset; let end = input.offset;
Ok(( Ok((input, TokenTreeBuilder::spanned_external_word((start, end))))
input,
TokenTreeBuilder::spanned_external_word(Span::new(start, end)),
))
} }
#[tracable_parser] #[tracable_parser]
@ -367,21 +527,40 @@ pub fn var(input: NomSpan) -> IResult<NomSpan, TokenNode> {
)) ))
} }
#[tracable_parser] fn tight<'a>(
pub fn ident(input: NomSpan) -> IResult<NomSpan, Tag> { parser: impl Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, Vec<TokenNode>>,
let start = input.offset; ) -> impl Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, Vec<TokenNode>> {
let (input, _) = take_while1(is_start_bare_char)(input)?; move |input: NomSpan| {
let (input, _) = take_while(is_bare_char)(input)?; let mut result = vec![];
let end = input.offset; let (input, head) = parser(input)?;
result.extend(head);
Ok((input, Tag::from((start, end, None)))) let (input, tail) = opt(alt((many1(range_continuation), many1(dot_member))))(input)?;
let next_char = &input.fragment.chars().nth(0);
if is_boundary(*next_char) {
if let Some(tail) = tail {
for tokens in tail {
result.extend(tokens);
}
}
Ok((input, result))
} else {
Err(nom::Err::Error(nom::error::make_error(
input,
nom::error::ErrorKind::Many0,
)))
}
}
} }
#[tracable_parser] #[tracable_parser]
pub fn flag(input: NomSpan) -> IResult<NomSpan, TokenNode> { pub fn flag(input: NomSpan) -> IResult<NomSpan, TokenNode> {
let start = input.offset; let start = input.offset;
let (input, _) = tag("--")(input)?; let (input, _) = tag("--")(input)?;
let (input, bare) = bare(input)?; let (input, bare) = filename(input)?;
let end = input.offset; let end = input.offset;
Ok(( Ok((
@ -394,7 +573,7 @@ pub fn flag(input: NomSpan) -> IResult<NomSpan, TokenNode> {
pub fn shorthand(input: NomSpan) -> IResult<NomSpan, TokenNode> { pub fn shorthand(input: NomSpan) -> IResult<NomSpan, TokenNode> {
let start = input.offset; let start = input.offset;
let (input, _) = tag("-")(input)?; let (input, _) = tag("-")(input)?;
let (input, bare) = bare(input)?; let (input, bare) = filename(input)?;
let end = input.offset; let end = input.offset;
Ok(( Ok((
@ -413,47 +592,97 @@ pub fn leaf(input: NomSpan) -> IResult<NomSpan, TokenNode> {
#[tracable_parser] #[tracable_parser]
pub fn token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<TokenNode>>> { pub fn token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<TokenNode>>> {
let start = input.offset; let start = input.offset;
let (input, first) = node(input)?; let mut node_list = vec![];
let (input, mut list) = many0(pair(alt((whitespace, dot)), node))(input)?; let mut next_input = input;
let mut before_space_input: Option<NomSpan> = None;
let mut final_space_tokens = 0;
let end = input.offset; loop {
let node_result = tight_node(next_input);
Ok(( let (after_node_input, next_nodes) = match node_result {
input, Err(_) => {
make_token_list(first, list, None).spanned(Span::new(start, end)), if let Some(before_space_input) = before_space_input {
)) next_input = before_space_input;
for _ in 0..final_space_tokens {
node_list.pop();
}
}
break;
}
Ok((after_node_input, next_node)) => (after_node_input, next_node),
};
node_list.extend(next_nodes);
// Special case that allows a parenthesized expression to immediate follow another
// token without a space, which could represent a type annotation.
let maybe_type = delimited_paren(after_node_input);
let after_maybe_type_input = match maybe_type {
Err(_) => after_node_input,
Ok((after_maybe_type_input, parens)) => {
node_list.push(parens);
after_maybe_type_input
}
};
let maybe_space = any_space(after_maybe_type_input);
let after_space_input = match maybe_space {
Err(_) => {
next_input = after_maybe_type_input;
break;
}
Ok((after_space_input, space)) => {
final_space_tokens = space.len();
node_list.extend(space);
before_space_input = Some(after_maybe_type_input);
after_space_input
}
};
next_input = after_space_input;
}
let end = next_input.offset;
Ok((next_input, node_list.spanned(Span::new(start, end))))
} }
#[tracable_parser] #[tracable_parser]
pub fn spaced_token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<TokenNode>>> { pub fn spaced_token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<TokenNode>>> {
let start = input.offset; let start = input.offset;
let (input, pre_ws) = opt(whitespace)(input)?; let (input, pre_ws) = opt(any_space)(input)?;
let (input, items) = token_list(input)?; let (input, items) = token_list(input)?;
let (input, post_ws) = opt(whitespace)(input)?; let (input, post_ws) = opt(any_space)(input)?;
let end = input.offset; let end = input.offset;
let mut out = vec![]; let mut out = vec![];
out.extend(pre_ws); pre_ws.map(|pre_ws| out.extend(pre_ws));
out.extend(items.item); out.extend(items.item);
out.extend(post_ws); post_ws.map(|post_ws| out.extend(post_ws));
Ok((input, out.spanned(Span::new(start, end)))) Ok((input, out.spanned(Span::new(start, end))))
} }
fn make_token_list( fn make_token_list(
first: Vec<TokenNode>, first: Vec<TokenNode>,
list: Vec<(TokenNode, Vec<TokenNode>)>, list: Vec<(Vec<TokenNode>, Vec<TokenNode>)>,
sp_right: Option<TokenNode>, sp_right: Option<TokenNode>,
) -> Vec<TokenNode> { ) -> Vec<TokenNode> {
let mut nodes = vec![]; let mut nodes = vec![];
nodes.extend(first); nodes.extend(first);
for (left, right) in list { for (sep, list) in list {
nodes.push(left); nodes.extend(sep);
nodes.extend(right); nodes.extend(list);
} }
if let Some(sp_right) = sp_right { if let Some(sp_right) = sp_right {
@ -463,6 +692,15 @@ fn make_token_list(
nodes nodes
} }
#[tracable_parser]
pub fn separator(input: NomSpan) -> IResult<NomSpan, TokenNode> {
let left = input.offset;
let (input, ws1) = alt((tag(";"), tag("\n")))(input)?;
let right = input.offset;
Ok((input, TokenTreeBuilder::spanned_sep(Span::new(left, right))))
}
#[tracable_parser] #[tracable_parser]
pub fn whitespace(input: NomSpan) -> IResult<NomSpan, TokenNode> { pub fn whitespace(input: NomSpan) -> IResult<NomSpan, TokenNode> {
let left = input.offset; let left = input.offset;
@ -472,6 +710,30 @@ pub fn whitespace(input: NomSpan) -> IResult<NomSpan, TokenNode> {
Ok((input, TokenTreeBuilder::spanned_ws(Span::new(left, right)))) Ok((input, TokenTreeBuilder::spanned_ws(Span::new(left, right))))
} }
#[tracable_parser]
pub fn any_space(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
let left = input.offset;
let (input, tokens) = many1(alt((whitespace, separator, comment)))(input)?;
let right = input.offset;
Ok((input, tokens))
}
#[tracable_parser]
pub fn comment(input: NomSpan) -> IResult<NomSpan, TokenNode> {
let left = input.offset;
let (input, start) = tag("#")(input)?;
let (input, rest) = not_line_ending(input)?;
let right = input.offset;
let span = (start.offset + 1, right);
Ok((
input,
TokenTreeBuilder::spanned_comment(span, Span::new(left, right)),
))
}
pub fn delimited( pub fn delimited(
input: NomSpan, input: NomSpan,
delimiter: Delimiter, delimiter: Delimiter,
@ -541,62 +803,43 @@ pub fn raw_call(input: NomSpan) -> IResult<NomSpan, Spanned<CallNode>> {
} }
#[tracable_parser] #[tracable_parser]
pub fn bare_path(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> { pub fn range_continuation(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
let (input, head) = alt((bare, dot))(input)?; let original = input;
let (input, tail) = many0(alt((bare, dot, string)))(input)?; let mut result = vec![];
let next_char = &input.fragment.chars().nth(0); let (input, dotdot_result) = dotdot(input)?;
result.push(dotdot_result);
if is_boundary(*next_char) { let (input, node_result) = tight_node(input)?;
let mut result = vec![head]; result.extend(node_result);
result.extend(tail);
Ok((input, result)) Ok((input, result))
} else {
Err(nom::Err::Error(nom::error::make_error(
input,
nom::error::ErrorKind::Many0,
)))
}
} }
#[tracable_parser] #[tracable_parser]
pub fn pattern_path(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> { pub fn dot_member(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
let (input, head) = alt((pattern, dot))(input)?; let (input, dot_result) = dot(input)?;
let (input, member_result) = any_member(input)?;
let (input, tail) = many0(alt((pattern, dot, string)))(input)?; Ok((input, vec![dot_result, member_result]))
let next_char = &input.fragment.chars().nth(0);
if is_boundary(*next_char) {
let mut result = vec![head];
result.extend(tail);
Ok((input, result))
} else {
Err(nom::Err::Error(nom::error::make_error(
input,
nom::error::ErrorKind::Many0,
)))
}
} }
#[tracable_parser] #[tracable_parser]
pub fn node1(input: NomSpan) -> IResult<NomSpan, TokenNode> { pub fn any_member(input: NomSpan) -> IResult<NomSpan, TokenNode> {
alt((leaf, bare, pattern, external_word, delimited_paren))(input) alt((number, string, member))(input)
} }
#[tracable_parser] #[tracable_parser]
pub fn node(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> { pub fn tight_node(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
alt(( alt((
to_list(leaf), tight(to_list(leaf)),
bare_path, tight(to_list(filename)),
pattern_path, tight(to_list(pattern)),
to_list(comment),
to_list(external_word), to_list(external_word),
to_list(delimited_paren), tight(to_list(delimited_paren)),
to_list(delimited_brace), tight(to_list(delimited_brace)),
to_list(delimited_square), tight(to_list(delimited_square)),
))(input) ))(input)
} }
@ -649,6 +892,23 @@ pub fn pipeline(input: NomSpan) -> IResult<NomSpan, TokenNode> {
)) ))
} }
#[tracable_parser]
pub fn module(input: NomSpan) -> IResult<NomSpan, TokenNode> {
let (input, tokens) = spaced_token_list(input)?;
if input.input_len() != 0 {
return Err(Err::Error(error_position!(
input,
nom::error::ErrorKind::Eof
)));
}
Ok((
input,
TokenTreeBuilder::spanned_token_list(tokens.item, tokens.span),
))
}
fn parse_int<T>(frag: &str, neg: Option<T>) -> i64 { fn parse_int<T>(frag: &str, neg: Option<T>) -> i64 {
let int = FromStr::from_str(frag).unwrap(); let int = FromStr::from_str(frag).unwrap();
@ -661,7 +921,7 @@ fn parse_int<T>(frag: &str, neg: Option<T>) -> i64 {
fn is_boundary(c: Option<char>) -> bool { fn is_boundary(c: Option<char>) -> bool {
match c { match c {
None => true, None => true,
Some(')') | Some(']') | Some('}') => true, Some(')') | Some(']') | Some('}') | Some('(') => true,
Some(c) if c.is_whitespace() => true, Some(c) if c.is_whitespace() => true,
_ => false, _ => false,
} }
@ -682,14 +942,25 @@ fn is_glob_specific_char(c: char) -> bool {
} }
fn is_start_glob_char(c: char) -> bool { fn is_start_glob_char(c: char) -> bool {
is_start_bare_char(c) || is_glob_specific_char(c) || c == '.' is_start_file_char(c) || is_glob_specific_char(c) || c == '.'
} }
fn is_glob_char(c: char) -> bool { fn is_glob_char(c: char) -> bool {
is_bare_char(c) || is_glob_specific_char(c) is_file_char(c) || is_glob_specific_char(c)
} }
fn is_start_bare_char(c: char) -> bool { fn is_dot(c: char) -> bool {
c == '.'
}
fn is_path_separator(c: char) -> bool {
match c {
'\\' | '/' | ':' => true,
_ => false,
}
}
fn is_start_file_char(c: char) -> bool {
match c { match c {
'+' => false, '+' => false,
_ if c.is_alphanumeric() => true, _ if c.is_alphanumeric() => true,
@ -698,11 +969,12 @@ fn is_start_bare_char(c: char) -> bool {
'_' => true, '_' => true,
'-' => true, '-' => true,
'~' => true, '~' => true,
'.' => true,
_ => false, _ => false,
} }
} }
fn is_bare_char(c: char) -> bool { fn is_file_char(c: char) -> bool {
match c { match c {
'+' => true, '+' => true,
_ if c.is_alphanumeric() => true, _ if c.is_alphanumeric() => true,
@ -718,6 +990,24 @@ fn is_bare_char(c: char) -> bool {
} }
} }
fn is_start_member_char(c: char) -> bool {
match c {
_ if c.is_alphanumeric() => true,
'_' => true,
'-' => true,
_ => false,
}
}
fn is_member_char(c: char) -> bool {
match c {
_ if c.is_alphanumeric() => true,
'_' => true,
'-' => true,
_ => false,
}
}
fn is_id_start(c: char) -> bool { fn is_id_start(c: char) -> bool {
unicode_xid::UnicodeXID::is_xid_start(c) unicode_xid::UnicodeXID::is_xid_start(c)
} }
@ -775,6 +1065,7 @@ mod tests {
(<$parser:tt> $source:tt -> $tokens:expr) => { (<$parser:tt> $source:tt -> $tokens:expr) => {
let result = apply($parser, stringify!($parser), $source); let result = apply($parser, stringify!($parser), $source);
let (expected_tree, expected_source) = TokenTreeBuilder::build($tokens); let (expected_tree, expected_source) = TokenTreeBuilder::build($tokens);
if result != expected_tree { if result != expected_tree {
@ -884,22 +1175,36 @@ mod tests {
fn test_simple_path() { fn test_simple_path() {
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"chrome.exe" -> b::token_list(vec![b::bare("chrome"), b::op(Operator::Dot), b::bare("exe")]) "chrome.exe" -> b::token_list(vec![b::bare("chrome"), b::dot(), b::bare("exe")])
} }
equal_tokens! { equal_tokens! {
<nodes> <nodes>
".azure" -> b::token_list(vec![b::op(Operator::Dot), b::bare("azure")]) ".azure" -> b::token_list(vec![b::bare(".azure")])
} }
equal_tokens! { equal_tokens! {
<nodes> <nodes>
r"C:\windows\system.dll" -> b::token_list(vec![b::bare(r"C:\windows\system"), b::op(Operator::Dot), b::bare("dll")]) r"C:\windows\system.dll" -> b::token_list(vec![b::bare(r"C:\windows\system.dll")])
} }
equal_tokens! { equal_tokens! {
<nodes> <nodes>
r"C:\Code\-testing\my_tests.js" -> b::token_list(vec![b::bare(r"C:\Code\-testing\my_tests"), b::op(Operator::Dot), b::bare("js")]) r"C:\Code\-testing\my_tests.js" -> b::token_list(vec![b::bare(r"C:\Code\-testing\my_tests.js")])
}
equal_tokens! {
<nodes>
r"C:\Users\example\AppData\Local\Temp\.tmpZ4TVQ2\cd_test_8" -> b::token_list(vec![b::bare(r"C:\Users\example\AppData\Local\Temp\.tmpZ4TVQ2\cd_test_8")])
}
equal_tokens! {
<pipeline>
r"cd C:\Users\wycat\AppData\Local\Temp\.tmpaj5JKi\cd_test_11" -> b::pipeline(vec![vec![
b::bare("cd"),
b::sp(),
b::bare(r"C:\Users\wycat\AppData\Local\Temp\.tmpaj5JKi\cd_test_11")
]])
} }
} }
@ -949,7 +1254,7 @@ mod tests {
fn test_dot_prefixed_name() { fn test_dot_prefixed_name() {
equal_tokens! { equal_tokens! {
<nodes> <nodes>
".azure" -> b::token_list(vec![b::op("."), b::bare("azure")]) ".azure" -> b::token_list(vec![b::bare(".azure")])
} }
} }
@ -1003,33 +1308,43 @@ mod tests {
} }
} }
#[test]
fn test_range() {
let _ = pretty_env_logger::try_init();
equal_tokens! {
<nodes>
"0..2" -> b::token_list(vec![b::int(0), b::dotdot(), b::int(2)])
}
}
#[test] #[test]
fn test_path() { fn test_path() {
let _ = pretty_env_logger::try_init(); let _ = pretty_env_logger::try_init();
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"$it.print" -> b::token_list(vec![b::var("it"), b::op("."), b::bare("print")]) "$it.print" -> b::token_list(vec![b::var("it"), b::dot(), b::bare("print")])
} }
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"$it.0" -> b::token_list(vec![b::var("it"), b::op("."), b::int(0)]) "$it.0" -> b::token_list(vec![b::var("it"), b::dot(), b::int(0)])
} }
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"$head.part1.part2" -> b::token_list(vec![b::var("head"), b::op("."), b::bare("part1"), b::op("."), b::bare("part2")]) "$head.part1.part2" -> b::token_list(vec![b::var("head"), b::dot(), b::bare("part1"), b::dot(), b::bare("part2")])
} }
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"( hello ).world" -> b::token_list(vec![b::parens(vec![b::sp(), b::bare("hello"), b::sp()]), b::op("."), b::bare("world")]) "( hello ).world" -> b::token_list(vec![b::parens(vec![b::sp(), b::bare("hello"), b::sp()]), b::dot(), b::bare("world")])
} }
equal_tokens! { equal_tokens! {
<nodes> <nodes>
r#"( hello )."world""# -> b::token_list(vec![b::parens(vec![b::sp(), b::bare("hello"), b::sp()]), b::op("."), b::string("world")]) r#"( hello )."world""# -> b::token_list(vec![b::parens(vec![b::sp(), b::bare("hello"), b::sp()]), b::dot(), b::string("world")])
} }
} }
@ -1042,11 +1357,11 @@ mod tests {
b::parens(vec![ b::parens(vec![
b::sp(), b::sp(),
b::var("it"), b::var("it"),
b::op("."), b::dot(),
b::bare("is"), b::bare("is"),
b::op("."), b::dot(),
b::string("great news"), b::string("great news"),
b::op("."), b::dot(),
b::bare("right"), b::bare("right"),
b::sp(), b::sp(),
b::bare("yep"), b::bare("yep"),
@ -1054,7 +1369,7 @@ mod tests {
b::var("yep"), b::var("yep"),
b::sp() b::sp()
]), ]),
b::op("."), b::string("world")] b::dot(), b::string("world")]
) )
} }
@ -1063,9 +1378,9 @@ mod tests {
r#"$it."are PAS".0"# -> b::token_list( r#"$it."are PAS".0"# -> b::token_list(
vec![ vec![
b::var("it"), b::var("it"),
b::op("."), b::dot(),
b::string("are PAS"), b::string("are PAS"),
b::op("."), b::dot(),
b::int(0), b::int(0),
] ]
) )
@ -1076,17 +1391,17 @@ mod tests {
fn test_smoke_single_command() { fn test_smoke_single_command() {
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"git add ." -> b::token_list(vec![b::bare("git"), b::sp(), b::bare("add"), b::sp(), b::op(".")]) "git add ." -> b::token_list(vec![b::bare("git"), b::sp(), b::bare("add"), b::sp(), b::bare(".")])
} }
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"open Cargo.toml" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::op("."), b::bare("toml")]) "open Cargo.toml" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::dot(), b::bare("toml")])
} }
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"select package.version" -> b::token_list(vec![b::bare("select"), b::sp(), b::bare("package"), b::op("."), b::bare("version")]) "select package.version" -> b::token_list(vec![b::bare("select"), b::sp(), b::bare("package"), b::dot(), b::bare("version")])
} }
equal_tokens! { equal_tokens! {
@ -1096,12 +1411,12 @@ mod tests {
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"open Cargo.toml --raw" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::op("."), b::bare("toml"), b::sp(), b::flag("raw")]) "open Cargo.toml --raw" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::dot(), b::bare("toml"), b::sp(), b::flag("raw")])
} }
equal_tokens! { equal_tokens! {
<nodes> <nodes>
"open Cargo.toml -r" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::op("."), b::bare("toml"), b::sp(), b::shorthand("r")]) "open Cargo.toml -r" -> b::token_list(vec![b::bare("open"), b::sp(), b::bare("Cargo"), b::dot(), b::bare("toml"), b::sp(), b::shorthand("r")])
} }
equal_tokens! { equal_tokens! {
@ -1117,7 +1432,7 @@ mod tests {
b::sp(), b::sp(),
b::flag("patch"), b::flag("patch"),
b::sp(), b::sp(),
b::bare("package"), b::op("."), b::bare("version") b::bare("package"), b::dot(), b::bare("version")
] ]
) )
} }
@ -1197,7 +1512,7 @@ mod tests {
b::bare("where"), b::bare("where"),
b::sp(), b::sp(),
b::bare("cpu"), b::bare("cpu"),
b::op("."), b::dot(),
b::string("max ghz"), b::string("max ghz"),
b::sp(), b::sp(),
b::op(">"), b::op(">"),
@ -1207,6 +1522,34 @@ mod tests {
); );
} }
#[test]
fn test_signature() {
let _ = pretty_env_logger::try_init();
equal_tokens!(
<module>
"def cd\n # Change to a new path.\n optional directory(Path) # the directory to change to\nend" ->
b::token_list(vec![
b::bare("def"),
b::sp(),
b::bare("cd"),
b::sep("\n"),
b::ws(" "),
b::comment(" Change to a new path."),
b::sep("\n"),
b::ws(" "),
b::bare("optional"),
b::sp(),
b::bare("directory"),
b::parens(vec![b::bare("Path")]),
b::sp(),
b::comment(" the directory to change to"),
b::sep("\n"),
b::bare("end")
])
);
}
// #[test] // #[test]
// fn test_smoke_pipeline() { // fn test_smoke_pipeline() {
// let _ = pretty_env_logger::try_init(); // let _ = pretty_env_logger::try_init();
@ -1279,7 +1622,18 @@ mod tests {
desc: &str, desc: &str,
string: &str, string: &str,
) -> TokenNode { ) -> TokenNode {
f(nom_input(string)).unwrap().1 let result = f(nom_input(string));
match result {
Ok(value) => value.1,
Err(err) => {
let err = nu_errors::ShellError::parse_error(err);
println!("{:?}", string);
crate::hir::baseline_parse::tests::print_err(err, &nu_source::Text::from(string));
panic!("test failed")
}
}
} }
fn span((left, right): (usize, usize)) -> Span { fn span((left, right): (usize, usize)) -> Span {

View File

@ -1,4 +1,4 @@
use crate::parse::{call_node::*, flag::*, operator::*, pipeline::*, tokens::*}; use crate::parse::{call_node::*, comment::*, flag::*, operator::*, pipeline::*, tokens::*};
use derive_new::new; use derive_new::new;
use getset::Getters; use getset::Getters;
use nu_errors::{ParseError, ShellError}; use nu_errors::{ParseError, ShellError};
@ -18,7 +18,9 @@ pub enum TokenNode {
Delimited(Spanned<DelimitedNode>), Delimited(Spanned<DelimitedNode>),
Pipeline(Pipeline), Pipeline(Pipeline),
Flag(Flag), Flag(Flag),
Comment(Comment),
Whitespace(Span), Whitespace(Span),
Separator(Span),
Error(Spanned<ShellError>), Error(Spanned<ShellError>),
} }
@ -39,14 +41,32 @@ impl PrettyDebugWithSource for TokenNode {
"whitespace", "whitespace",
b::description(format!("{:?}", space.slice(source))), b::description(format!("{:?}", space.slice(source))),
), ),
TokenNode::Separator(span) => b::typed(
"separator",
b::description(format!("{:?}", span.slice(source))),
),
TokenNode::Comment(comment) => {
b::typed("comment", b::description(comment.text.slice(source)))
}
TokenNode::Error(_) => b::error("error"), TokenNode::Error(_) => b::error("error"),
} }
} }
} }
impl HasSpan for TokenNode { impl ShellTypeName for TokenNode {
fn span(&self) -> Span { fn type_name(&self) -> &'static str {
self.get_span() match self {
TokenNode::Token(t) => t.type_name(),
TokenNode::Nodes(_) => "nodes",
TokenNode::Call(_) => "command",
TokenNode::Delimited(d) => d.type_name(),
TokenNode::Pipeline(_) => "pipeline",
TokenNode::Flag(_) => "flag",
TokenNode::Whitespace(_) => "whitespace",
TokenNode::Separator(_) => "separator",
TokenNode::Comment(_) => "comment",
TokenNode::Error(_) => "error",
}
} }
} }
@ -107,12 +127,12 @@ impl fmt::Debug for DebugTokenNode<'_> {
impl From<&TokenNode> for Span { impl From<&TokenNode> for Span {
fn from(token: &TokenNode) -> Span { fn from(token: &TokenNode) -> Span {
token.get_span() token.span()
} }
} }
impl TokenNode { impl HasSpan for TokenNode {
pub fn get_span(&self) -> Span { fn span(&self) -> Span {
match self { match self {
TokenNode::Token(t) => t.span, TokenNode::Token(t) => t.span,
TokenNode::Nodes(t) => t.span, TokenNode::Nodes(t) => t.span,
@ -121,27 +141,14 @@ impl TokenNode {
TokenNode::Pipeline(s) => s.span, TokenNode::Pipeline(s) => s.span,
TokenNode::Flag(s) => s.span, TokenNode::Flag(s) => s.span,
TokenNode::Whitespace(s) => *s, TokenNode::Whitespace(s) => *s,
TokenNode::Separator(s) => *s,
TokenNode::Comment(c) => c.span(),
TokenNode::Error(s) => s.span, TokenNode::Error(s) => s.span,
} }
} }
}
pub fn type_name(&self) -> &'static str { impl TokenNode {
match self {
TokenNode::Token(t) => t.type_name(),
TokenNode::Nodes(_) => "nodes",
TokenNode::Call(_) => "command",
TokenNode::Delimited(d) => d.type_name(),
TokenNode::Pipeline(_) => "pipeline",
TokenNode::Flag(_) => "flag",
TokenNode::Whitespace(_) => "whitespace",
TokenNode::Error(_) => "error",
}
}
pub fn spanned_type_name(&self) -> Spanned<&'static str> {
self.type_name().spanned(self.span())
}
pub fn tagged_type_name(&self) -> Tagged<&'static str> { pub fn tagged_type_name(&self) -> Tagged<&'static str> {
self.type_name().tagged(self.span()) self.type_name().tagged(self.span())
} }
@ -244,7 +251,7 @@ impl TokenNode {
pub fn is_dot(&self) -> bool { pub fn is_dot(&self) -> bool {
match self { match self {
TokenNode::Token(Token { TokenNode::Token(Token {
unspanned: UnspannedToken::Operator(Operator::Dot), unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot),
.. ..
}) => true, }) => true,
_ => false, _ => false,
@ -421,7 +428,7 @@ impl TokenNode {
pub fn expect_dot(&self) -> Span { pub fn expect_dot(&self) -> Span {
match self { match self {
TokenNode::Token(Token { TokenNode::Token(Token {
unspanned: UnspannedToken::Operator(Operator::Dot), unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot),
span, span,
}) => *span, }) => *span,
other => panic!("Expected dot, found {:?}", other), other => panic!("Expected dot, found {:?}", other),

View File

@ -1,6 +1,7 @@
use crate::parse::call_node::CallNode; use crate::parse::call_node::CallNode;
use crate::parse::comment::Comment;
use crate::parse::flag::{Flag, FlagKind}; use crate::parse::flag::{Flag, FlagKind};
use crate::parse::operator::Operator; use crate::parse::operator::{CompareOperator, EvaluationOperator};
use crate::parse::pipeline::{Pipeline, PipelineElement}; use crate::parse::pipeline::{Pipeline, PipelineElement};
use crate::parse::token_tree::{DelimitedNode, Delimiter, TokenNode}; use crate::parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
use crate::parse::tokens::{RawNumber, UnspannedToken}; use crate::parse::tokens::{RawNumber, UnspannedToken};
@ -96,7 +97,7 @@ impl TokenTreeBuilder {
TokenNode::Nodes(input.spanned(span.into())) TokenNode::Nodes(input.spanned(span.into()))
} }
pub fn op(input: impl Into<Operator>) -> CurriedToken { pub fn op(input: impl Into<CompareOperator>) -> CurriedToken {
let input = input.into(); let input = input.into();
Box::new(move |b| { Box::new(move |b| {
@ -104,12 +105,39 @@ impl TokenTreeBuilder {
b.pos = end; b.pos = end;
TokenTreeBuilder::spanned_op(input, Span::new(start, end)) TokenTreeBuilder::spanned_cmp_op(input, Span::new(start, end))
}) })
} }
pub fn spanned_op(input: impl Into<Operator>, span: impl Into<Span>) -> TokenNode { pub fn spanned_cmp_op(input: impl Into<CompareOperator>, span: impl Into<Span>) -> TokenNode {
TokenNode::Token(UnspannedToken::Operator(input.into()).into_token(span)) TokenNode::Token(UnspannedToken::CompareOperator(input.into()).into_token(span))
}
pub fn dot() -> CurriedToken {
Box::new(move |b| {
let (start, end) = b.consume(".");
b.pos = end;
TokenTreeBuilder::spanned_eval_op(".", Span::new(start, end))
})
}
pub fn dotdot() -> CurriedToken {
Box::new(move |b| {
let (start, end) = b.consume("..");
b.pos = end;
TokenTreeBuilder::spanned_eval_op("..", Span::new(start, end))
})
}
pub fn spanned_eval_op(
input: impl Into<EvaluationOperator>,
span: impl Into<Span>,
) -> TokenNode {
TokenNode::Token(UnspannedToken::EvaluationOperator(input.into()).into_token(span))
} }
pub fn string(input: impl Into<String>) -> CurriedToken { pub fn string(input: impl Into<String>) -> CurriedToken {
@ -398,6 +426,36 @@ impl TokenTreeBuilder {
TokenNode::Whitespace(span.into()) TokenNode::Whitespace(span.into())
} }
pub fn sep(input: impl Into<String>) -> CurriedToken {
let input = input.into();
Box::new(move |b| {
let (start, end) = b.consume(&input);
TokenTreeBuilder::spanned_sep(Span::new(start, end))
})
}
pub fn spanned_sep(span: impl Into<Span>) -> TokenNode {
TokenNode::Separator(span.into())
}
pub fn comment(input: impl Into<String>) -> CurriedToken {
let input = input.into();
Box::new(move |b| {
let outer_start = b.pos;
b.consume("#");
let (start, end) = b.consume(&input);
let outer_end = b.pos;
TokenTreeBuilder::spanned_comment((start, end), (outer_start, outer_end))
})
}
pub fn spanned_comment(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
TokenNode::Comment(Comment::line(input, span))
}
fn consume(&mut self, input: &str) -> (usize, usize) { fn consume(&mut self, input: &str) -> (usize, usize) {
let start = self.pos; let start = self.pos;
self.pos += input.len(); self.pos += input.len();

View File

@ -1,5 +1,5 @@
use crate::parse::parser::Number; use crate::parse::parser::Number;
use crate::Operator; use crate::{CompareOperator, EvaluationOperator};
use bigdecimal::BigDecimal; use bigdecimal::BigDecimal;
use nu_protocol::ShellTypeName; use nu_protocol::ShellTypeName;
use nu_source::{ use nu_source::{
@ -13,7 +13,8 @@ use std::str::FromStr;
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] #[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum UnspannedToken { pub enum UnspannedToken {
Number(RawNumber), Number(RawNumber),
Operator(Operator), CompareOperator(CompareOperator),
EvaluationOperator(EvaluationOperator),
String(Span), String(Span),
Variable(Span), Variable(Span),
ExternalCommand(Span), ExternalCommand(Span),
@ -35,7 +36,9 @@ impl ShellTypeName for UnspannedToken {
fn type_name(&self) -> &'static str { fn type_name(&self) -> &'static str {
match self { match self {
UnspannedToken::Number(_) => "number", UnspannedToken::Number(_) => "number",
UnspannedToken::Operator(..) => "operator", UnspannedToken::CompareOperator(..) => "comparison operator",
UnspannedToken::EvaluationOperator(EvaluationOperator::Dot) => "dot",
UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot) => "dotdot",
UnspannedToken::String(_) => "string", UnspannedToken::String(_) => "string",
UnspannedToken::Variable(_) => "variable", UnspannedToken::Variable(_) => "variable",
UnspannedToken::ExternalCommand(_) => "syntax error", UnspannedToken::ExternalCommand(_) => "syntax error",
@ -111,7 +114,8 @@ impl PrettyDebugWithSource for Token {
fn pretty_debug(&self, source: &str) -> DebugDocBuilder { fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
match self.unspanned { match self.unspanned {
UnspannedToken::Number(number) => number.pretty_debug(source), UnspannedToken::Number(number) => number.pretty_debug(source),
UnspannedToken::Operator(operator) => operator.pretty(), UnspannedToken::CompareOperator(operator) => operator.pretty(),
UnspannedToken::EvaluationOperator(operator) => operator.pretty(),
UnspannedToken::String(_) => b::primitive(self.span.slice(source)), UnspannedToken::String(_) => b::primitive(self.span.slice(source)),
UnspannedToken::Variable(_) => b::var(self.span.slice(source)), UnspannedToken::Variable(_) => b::var(self.span.slice(source)),
UnspannedToken::ExternalCommand(_) => b::primitive(self.span.slice(source)), UnspannedToken::ExternalCommand(_) => b::primitive(self.span.slice(source)),
@ -155,9 +159,9 @@ impl Token {
} }
} }
pub fn extract_operator(&self) -> Option<Spanned<Operator>> { pub fn extract_operator(&self) -> Option<Spanned<CompareOperator>> {
match self.unspanned { match self.unspanned {
UnspannedToken::Operator(operator) => Some(operator.spanned(self.span)), UnspannedToken::CompareOperator(operator) => Some(operator.spanned(self.span)),
_ => None, _ => None,
} }
} }

View File

@ -8,6 +8,7 @@ mod return_value;
mod signature; mod signature;
mod syntax_shape; mod syntax_shape;
mod type_name; mod type_name;
mod type_shape;
mod value; mod value;
pub use crate::call_info::{CallInfo, EvaluatedArgs}; pub use crate::call_info::{CallInfo, EvaluatedArgs};
@ -17,9 +18,11 @@ pub use crate::return_value::{CommandAction, ReturnSuccess, ReturnValue};
pub use crate::signature::{NamedType, PositionalType, Signature}; pub use crate::signature::{NamedType, PositionalType, Signature};
pub use crate::syntax_shape::SyntaxShape; pub use crate::syntax_shape::SyntaxShape;
pub use crate::type_name::{PrettyType, ShellTypeName, SpannedTypeName}; pub use crate::type_name::{PrettyType, ShellTypeName, SpannedTypeName};
pub use crate::type_shape::{Row as RowType, Type};
pub use crate::value::column_path::{did_you_mean, ColumnPath, PathMember, UnspannedPathMember}; pub use crate::value::column_path::{did_you_mean, ColumnPath, PathMember, UnspannedPathMember};
pub use crate::value::dict::{Dictionary, TaggedDictBuilder}; pub use crate::value::dict::{Dictionary, TaggedDictBuilder};
pub use crate::value::evaluate::{Evaluate, EvaluateTrait, Scope}; pub use crate::value::evaluate::{Evaluate, EvaluateTrait, Scope};
pub use crate::value::primitive::format_primitive; pub use crate::value::primitive::format_primitive;
pub use crate::value::primitive::Primitive; pub use crate::value::primitive::Primitive;
pub use crate::value::range::{Range, RangeInclusion};
pub use crate::value::{UntaggedValue, Value}; pub use crate::value::{UntaggedValue, Value};

View File

@ -1,4 +1,5 @@
use crate::syntax_shape::SyntaxShape; use crate::syntax_shape::SyntaxShape;
use crate::type_shape::Type;
use indexmap::IndexMap; use indexmap::IndexMap;
use nu_source::{b, DebugDocBuilder, PrettyDebug, PrettyDebugWithSource}; use nu_source::{b, DebugDocBuilder, PrettyDebug, PrettyDebugWithSource};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -76,6 +77,8 @@ pub struct Signature {
pub positional: Vec<(PositionalType, Description)>, pub positional: Vec<(PositionalType, Description)>,
pub rest_positional: Option<(SyntaxShape, Description)>, pub rest_positional: Option<(SyntaxShape, Description)>,
pub named: IndexMap<String, (NamedType, Description)>, pub named: IndexMap<String, (NamedType, Description)>,
pub yields: Option<Type>,
pub input: Option<Type>,
pub is_filter: bool, pub is_filter: bool,
} }
@ -98,14 +101,16 @@ impl PrettyDebugWithSource for Signature {
} }
impl Signature { impl Signature {
pub fn new(name: String) -> Signature { pub fn new(name: impl Into<String>) -> Signature {
Signature { Signature {
name, name: name.into(),
usage: String::new(), usage: String::new(),
positional: vec![], positional: vec![],
rest_positional: None, rest_positional: None,
named: IndexMap::new(), named: IndexMap::new(),
is_filter: false, is_filter: false,
yields: None,
input: None,
} }
} }
@ -186,4 +191,14 @@ impl Signature {
self.rest_positional = Some((ty, desc.into())); self.rest_positional = Some((ty, desc.into()));
self self
} }
pub fn yields(mut self, ty: Type) -> Signature {
self.yields = Some(ty);
self
}
pub fn input(mut self, ty: Type) -> Signature {
self.input = Some(ty);
self
}
} }

View File

@ -8,6 +8,7 @@ pub enum SyntaxShape {
Member, Member,
ColumnPath, ColumnPath,
Number, Number,
Range,
Int, Int,
Path, Path,
Pattern, Pattern,
@ -22,6 +23,7 @@ impl PrettyDebug for SyntaxShape {
SyntaxShape::Member => "member shape", SyntaxShape::Member => "member shape",
SyntaxShape::ColumnPath => "column path shape", SyntaxShape::ColumnPath => "column path shape",
SyntaxShape::Number => "number shape", SyntaxShape::Number => "number shape",
SyntaxShape::Range => "range shape",
SyntaxShape::Int => "integer shape", SyntaxShape::Int => "integer shape",
SyntaxShape::Path => "file path shape", SyntaxShape::Path => "file path shape",
SyntaxShape::Pattern => "pattern shape", SyntaxShape::Pattern => "pattern shape",

View File

@ -0,0 +1,382 @@
use crate::value::dict::Dictionary;
use crate::value::primitive::Primitive;
use crate::value::range::RangeInclusion;
use crate::value::{UntaggedValue, Value};
use derive_new::new;
use nu_source::{b, DebugDoc, DebugDocBuilder, PrettyDebug};
use serde::{Deserialize, Deserializer, Serialize};
use std::collections::BTreeMap;
use std::fmt::Debug;
use std::hash::Hash;
/**
This file describes the structural types of the nushell system.
Its primary purpose today is to identify "equivalent" values for the purpose
of merging rows into a single table or identify rows in a table that have the
same shape for reflection.
*/
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize, new)]
pub struct RangeType {
from: (Type, RangeInclusion),
to: (Type, RangeInclusion),
}
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum Type {
Nothing,
Int,
Range(Box<RangeType>),
Decimal,
Bytesize,
String,
Line,
ColumnPath,
Pattern,
Boolean,
Date,
Duration,
Path,
Binary,
Row(Row),
Table(Vec<Type>),
// TODO: Block arguments
Block,
// TODO: Error type
Error,
// Stream markers (used as bookend markers rather than actual values)
BeginningOfStream,
EndOfStream,
}
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, new)]
pub struct Row {
#[new(default)]
map: BTreeMap<Column, Type>,
}
impl Serialize for Row {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.collect_map(self.map.iter())
}
}
impl<'de> Deserialize<'de> for Row {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct RowVisitor;
impl<'de> serde::de::Visitor<'de> for RowVisitor {
type Value = Row;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter, "a row")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where
A: serde::de::MapAccess<'de>,
{
let mut new_map = BTreeMap::new();
loop {
let entry = map.next_entry()?;
match entry {
None => return Ok(Row { map: new_map }),
Some((key, value)) => {
new_map.insert(key, value);
}
}
}
}
}
deserializer.deserialize_map(RowVisitor)
}
}
impl Type {
pub fn from_primitive(primitive: &Primitive) -> Type {
match primitive {
Primitive::Nothing => Type::Nothing,
Primitive::Int(_) => Type::Int,
Primitive::Range(range) => {
let (left_value, left_inclusion) = &range.from;
let (right_value, right_inclusion) = &range.to;
let left_type = (Type::from_primitive(left_value), *left_inclusion);
let right_type = (Type::from_primitive(right_value), *right_inclusion);
let range = RangeType::new(left_type, right_type);
Type::Range(Box::new(range))
}
Primitive::Decimal(_) => Type::Decimal,
Primitive::Bytes(_) => Type::Bytesize,
Primitive::String(_) => Type::String,
Primitive::Line(_) => Type::Line,
Primitive::ColumnPath(_) => Type::ColumnPath,
Primitive::Pattern(_) => Type::Pattern,
Primitive::Boolean(_) => Type::Boolean,
Primitive::Date(_) => Type::Date,
Primitive::Duration(_) => Type::Duration,
Primitive::Path(_) => Type::Path,
Primitive::Binary(_) => Type::Binary,
Primitive::BeginningOfStream => Type::BeginningOfStream,
Primitive::EndOfStream => Type::EndOfStream,
}
}
pub fn from_dictionary(dictionary: &Dictionary) -> Type {
let mut map = BTreeMap::new();
for (key, value) in dictionary.entries.iter() {
let column = Column::String(key.clone());
map.insert(column, Type::from_value(value));
}
Type::Row(Row { map })
}
pub fn from_table<'a>(table: impl IntoIterator<Item = &'a Value>) -> Type {
let mut vec = vec![];
for item in table.into_iter() {
vec.push(Type::from_value(item))
}
Type::Table(vec)
}
pub fn from_value<'a>(value: impl Into<&'a UntaggedValue>) -> Type {
match value.into() {
UntaggedValue::Primitive(p) => Type::from_primitive(p),
UntaggedValue::Row(row) => Type::from_dictionary(row),
UntaggedValue::Table(table) => Type::from_table(table.iter()),
UntaggedValue::Error(_) => Type::Error,
UntaggedValue::Block(_) => Type::Block,
}
}
}
impl PrettyDebug for Type {
fn pretty(&self) -> DebugDocBuilder {
match self {
Type::Nothing => ty("nothing"),
Type::Int => ty("integer"),
Type::Range(range) => {
let (left, left_inclusion) = &range.from;
let (right, right_inclusion) = &range.to;
let left_bracket = b::delimiter(match left_inclusion {
RangeInclusion::Exclusive => "(",
RangeInclusion::Inclusive => "[",
});
let right_bracket = b::delimiter(match right_inclusion {
RangeInclusion::Exclusive => ")",
RangeInclusion::Inclusive => "]",
});
b::typed(
"range",
(left_bracket
+ left.pretty()
+ b::operator(",")
+ b::space()
+ right.pretty()
+ right_bracket)
.group(),
)
}
Type::Decimal => ty("decimal"),
Type::Bytesize => ty("bytesize"),
Type::String => ty("string"),
Type::Line => ty("line"),
Type::ColumnPath => ty("column-path"),
Type::Pattern => ty("pattern"),
Type::Boolean => ty("boolean"),
Type::Date => ty("date"),
Type::Duration => ty("duration"),
Type::Path => ty("path"),
Type::Binary => ty("binary"),
Type::Error => b::error("error"),
Type::BeginningOfStream => b::keyword("beginning-of-stream"),
Type::EndOfStream => b::keyword("end-of-stream"),
Type::Row(row) => (b::kind("row")
+ b::space()
+ b::intersperse(
row.map.iter().map(|(key, ty)| {
(b::key(match key {
Column::String(string) => string.clone(),
Column::Value => "<value>".to_string(),
}) + b::delimit("(", ty.pretty(), ")").into_kind())
.nest()
}),
b::space(),
)
.nest())
.nest(),
Type::Table(table) => {
let mut group: Group<DebugDoc, Vec<(usize, usize)>> = Group::new();
for (i, item) in table.iter().enumerate() {
group.add(item.to_doc(), i);
}
(b::kind("table") + b::space() + b::keyword("of")).group()
+ b::space()
+ (if group.len() == 1 {
let (doc, _) = group.into_iter().nth(0).unwrap();
DebugDocBuilder::from_doc(doc)
} else {
b::intersperse(
group.into_iter().map(|(doc, rows)| {
(b::intersperse(
rows.iter().map(|(from, to)| {
if from == to {
b::description(from)
} else {
(b::description(from)
+ b::space()
+ b::keyword("to")
+ b::space()
+ b::description(to))
.group()
}
}),
b::description(", "),
) + b::description(":")
+ b::space()
+ DebugDocBuilder::from_doc(doc))
.nest()
}),
b::space(),
)
})
}
Type::Block => ty("block"),
}
}
}
#[derive(Debug, new)]
struct DebugEntry<'a> {
key: &'a Column,
value: &'a Type,
}
impl<'a> PrettyDebug for DebugEntry<'a> {
fn pretty(&self) -> DebugDocBuilder {
(b::key(match self.key {
Column::String(string) => string.clone(),
Column::Value => format!("<value>"),
}) + b::delimit("(", self.value.pretty(), ")").into_kind())
}
}
fn ty(name: impl std::fmt::Display) -> DebugDocBuilder {
b::kind(format!("{}", name))
}
pub trait GroupedValue: Debug + Clone {
type Item;
fn new() -> Self;
fn merge(&mut self, value: Self::Item);
}
impl GroupedValue for Vec<(usize, usize)> {
type Item = usize;
fn new() -> Vec<(usize, usize)> {
vec![]
}
fn merge(&mut self, new_value: usize) {
match self.last_mut() {
Some(value) if value.1 == new_value - 1 => {
value.1 += 1;
}
_ => self.push((new_value, new_value)),
}
}
}
#[derive(Debug)]
pub struct Group<K: Debug + Eq + Hash, V: GroupedValue> {
values: indexmap::IndexMap<K, V>,
}
impl<K, G> Group<K, G>
where
K: Debug + Eq + Hash,
G: GroupedValue,
{
pub fn new() -> Group<K, G> {
Group {
values: indexmap::IndexMap::default(),
}
}
pub fn len(&self) -> usize {
self.values.len()
}
pub fn into_iter(self) -> impl Iterator<Item = (K, G)> {
self.values.into_iter()
}
pub fn add(&mut self, key: impl Into<K>, value: impl Into<G::Item>) {
let key = key.into();
let value = value.into();
let group = self.values.get_mut(&key);
match group {
None => {
self.values.insert(key, {
let mut group = G::new();
group.merge(value.into());
group
});
}
Some(group) => {
group.merge(value.into());
}
}
}
}
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
pub enum Column {
String(String),
Value,
}
impl Into<Column> for String {
fn into(self) -> Column {
Column::String(self)
}
}
impl Into<Column> for &String {
fn into(self) -> Column {
Column::String(self.clone())
}
}
impl Into<Column> for &str {
fn into(self) -> Column {
Column::String(self.to_string())
}
}

View File

@ -4,6 +4,7 @@ mod debug;
pub mod dict; pub mod dict;
pub mod evaluate; pub mod evaluate;
pub mod primitive; pub mod primitive;
pub mod range;
mod serde_bigdecimal; mod serde_bigdecimal;
mod serde_bigint; mod serde_bigint;
@ -11,11 +12,12 @@ use crate::type_name::{ShellTypeName, SpannedTypeName};
use crate::value::dict::Dictionary; use crate::value::dict::Dictionary;
use crate::value::evaluate::Evaluate; use crate::value::evaluate::Evaluate;
use crate::value::primitive::Primitive; use crate::value::primitive::Primitive;
use crate::value::range::{Range, RangeInclusion};
use crate::{ColumnPath, PathMember}; use crate::{ColumnPath, PathMember};
use bigdecimal::BigDecimal; use bigdecimal::BigDecimal;
use indexmap::IndexMap; use indexmap::IndexMap;
use nu_errors::ShellError; use nu_errors::ShellError;
use nu_source::{AnchorLocation, HasSpan, Span, Tag}; use nu_source::{AnchorLocation, HasSpan, Span, Spanned, Tag};
use num_bigint::BigInt; use num_bigint::BigInt;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::path::PathBuf; use std::path::PathBuf;
@ -156,6 +158,13 @@ impl UntaggedValue {
UntaggedValue::Primitive(Primitive::Binary(binary)) UntaggedValue::Primitive(Primitive::Binary(binary))
} }
pub fn range(
left: (Spanned<Primitive>, RangeInclusion),
right: (Spanned<Primitive>, RangeInclusion),
) -> UntaggedValue {
UntaggedValue::Primitive(Primitive::Range(Box::new(Range::new(left, right))))
}
pub fn boolean(s: impl Into<bool>) -> UntaggedValue { pub fn boolean(s: impl Into<bool>) -> UntaggedValue {
UntaggedValue::Primitive(Primitive::Boolean(s.into())) UntaggedValue::Primitive(Primitive::Boolean(s.into()))
} }
@ -224,6 +233,23 @@ impl Value {
_ => Err(ShellError::type_error("Path", self.spanned_type_name())), _ => Err(ShellError::type_error("Path", self.spanned_type_name())),
} }
} }
pub fn as_primitive(&self) -> Result<Primitive, ShellError> {
match &self.value {
UntaggedValue::Primitive(primitive) => Ok(primitive.clone()),
_ => Err(ShellError::type_error(
"Primitive",
self.spanned_type_name(),
)),
}
}
pub fn as_u64(&self) -> Result<u64, ShellError> {
match &self.value {
UntaggedValue::Primitive(primitive) => primitive.as_u64(self.tag.span),
_ => Err(ShellError::type_error("integer", self.spanned_type_name())),
}
}
} }
impl Into<UntaggedValue> for &str { impl Into<UntaggedValue> for &str {

View File

@ -28,6 +28,7 @@ impl PrettyType for Primitive {
match self { match self {
Primitive::Nothing => ty("nothing"), Primitive::Nothing => ty("nothing"),
Primitive::Int(_) => ty("integer"), Primitive::Int(_) => ty("integer"),
Primitive::Range(_) => ty("range"),
Primitive::Decimal(_) => ty("decimal"), Primitive::Decimal(_) => ty("decimal"),
Primitive::Bytes(_) => ty("bytesize"), Primitive::Bytes(_) => ty("bytesize"),
Primitive::String(_) => ty("string"), Primitive::String(_) => ty("string"),
@ -51,6 +52,21 @@ impl PrettyDebug for Primitive {
Primitive::Nothing => b::primitive("nothing"), Primitive::Nothing => b::primitive("nothing"),
Primitive::Int(int) => prim(format_args!("{}", int)), Primitive::Int(int) => prim(format_args!("{}", int)),
Primitive::Decimal(decimal) => prim(format_args!("{}", decimal)), Primitive::Decimal(decimal) => prim(format_args!("{}", decimal)),
Primitive::Range(range) => {
let (left, left_inclusion) = &range.from;
let (right, right_inclusion) = &range.to;
b::typed(
"range",
(left_inclusion.debug_left_bracket()
+ left.pretty()
+ b::operator(",")
+ b::space()
+ right.pretty()
+ right_inclusion.debug_right_bracket())
.group(),
)
}
Primitive::Bytes(bytes) => primitive_doc(bytes, "bytesize"), Primitive::Bytes(bytes) => primitive_doc(bytes, "bytesize"),
Primitive::String(string) => prim(string), Primitive::String(string) => prim(string),
Primitive::Line(string) => prim(string), Primitive::Line(string) => prim(string),

View File

@ -1,12 +1,14 @@
use crate::type_name::ShellTypeName; use crate::type_name::ShellTypeName;
use crate::value::column_path::ColumnPath; use crate::value::column_path::ColumnPath;
use crate::value::range::Range;
use crate::value::{serde_bigdecimal, serde_bigint}; use crate::value::{serde_bigdecimal, serde_bigint};
use bigdecimal::BigDecimal; use bigdecimal::BigDecimal;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use chrono_humanize::Humanize; use chrono_humanize::Humanize;
use nu_source::PrettyDebug; use nu_errors::{ExpectedRange, ShellError};
use nu_source::{PrettyDebug, Span, SpannedItem};
use num_bigint::BigInt; use num_bigint::BigInt;
use num_traits::cast::FromPrimitive; use num_traits::cast::{FromPrimitive, ToPrimitive};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::path::PathBuf; use std::path::PathBuf;
@ -25,6 +27,7 @@ pub enum Primitive {
Boolean(bool), Boolean(bool),
Date(DateTime<Utc>), Date(DateTime<Utc>),
Duration(u64), // Duration in seconds Duration(u64), // Duration in seconds
Range(Box<Range>),
Path(PathBuf), Path(PathBuf),
#[serde(with = "serde_bytes")] #[serde(with = "serde_bytes")]
Binary(Vec<u8>), Binary(Vec<u8>),
@ -34,6 +37,25 @@ pub enum Primitive {
EndOfStream, EndOfStream,
} }
impl Primitive {
pub fn as_u64(&self, span: Span) -> Result<u64, ShellError> {
match self {
Primitive::Int(int) => match int.to_u64() {
None => Err(ShellError::range_error(
ExpectedRange::U64,
&format!("{}", int).spanned(span),
"converting an integer into a 64-bit integer",
)),
Some(num) => Ok(num),
},
other => Err(ShellError::type_error(
"integer",
other.type_name().spanned(span),
)),
}
}
}
impl From<BigDecimal> for Primitive { impl From<BigDecimal> for Primitive {
fn from(decimal: BigDecimal) -> Primitive { fn from(decimal: BigDecimal) -> Primitive {
Primitive::Decimal(decimal) Primitive::Decimal(decimal)
@ -51,6 +73,7 @@ impl ShellTypeName for Primitive {
match self { match self {
Primitive::Nothing => "nothing", Primitive::Nothing => "nothing",
Primitive::Int(_) => "integer", Primitive::Int(_) => "integer",
Primitive::Range(_) => "range",
Primitive::Decimal(_) => "decimal", Primitive::Decimal(_) => "decimal",
Primitive::Bytes(_) => "bytes", Primitive::Bytes(_) => "bytes",
Primitive::String(_) => "string", Primitive::String(_) => "string",
@ -91,6 +114,11 @@ pub fn format_primitive(primitive: &Primitive, field_name: Option<&String>) -> S
Primitive::Duration(sec) => format_duration(*sec), Primitive::Duration(sec) => format_duration(*sec),
Primitive::Int(i) => i.to_string(), Primitive::Int(i) => i.to_string(),
Primitive::Decimal(decimal) => decimal.to_string(), Primitive::Decimal(decimal) => decimal.to_string(),
Primitive::Range(range) => format!(
"{}..{}",
format_primitive(&range.from.0.item, None),
format_primitive(&range.to.0.item, None)
),
Primitive::Pattern(s) => s.to_string(), Primitive::Pattern(s) => s.to_string(),
Primitive::String(s) => s.to_owned(), Primitive::String(s) => s.to_owned(),
Primitive::Line(s) => s.to_owned(), Primitive::Line(s) => s.to_owned(),
@ -125,7 +153,8 @@ pub fn format_primitive(primitive: &Primitive, field_name: Option<&String>) -> S
Primitive::Date(d) => d.humanize().to_string(), Primitive::Date(d) => d.humanize().to_string(),
} }
} }
fn format_duration(sec: u64) -> String {
pub fn format_duration(sec: u64) -> String {
let (minutes, seconds) = (sec / 60, sec % 60); let (minutes, seconds) = (sec / 60, sec % 60);
let (hours, minutes) = (minutes / 60, minutes % 60); let (hours, minutes) = (minutes / 60, minutes % 60);
let (days, hours) = (hours / 24, hours % 24); let (days, hours) = (hours / 24, hours % 24);

View File

@ -0,0 +1,32 @@
use crate::value::Primitive;
use derive_new::new;
use nu_source::{b, DebugDocBuilder, Spanned};
use serde::{Deserialize, Serialize};
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Eq, Ord, Serialize, Deserialize, Hash)]
pub enum RangeInclusion {
Inclusive,
Exclusive,
}
impl RangeInclusion {
pub fn debug_left_bracket(&self) -> DebugDocBuilder {
b::delimiter(match self {
RangeInclusion::Exclusive => "(",
RangeInclusion::Inclusive => "[",
})
}
pub fn debug_right_bracket(&self) -> DebugDocBuilder {
b::delimiter(match self {
RangeInclusion::Exclusive => ")",
RangeInclusion::Inclusive => "]",
})
}
}
#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Serialize, Deserialize, new)]
pub struct Range {
pub from: (Spanned<Primitive>, RangeInclusion),
pub to: (Spanned<Primitive>, RangeInclusion),
}

View File

@ -220,10 +220,7 @@ impl<T>
nom_locate::LocatedSpanEx<T, u64>, nom_locate::LocatedSpanEx<T, u64>,
), ),
) -> Span { ) -> Span {
Span { Span::new(input.0.offset, input.1.offset)
start: input.0.offset,
end: input.1.offset,
}
} }
} }
@ -235,10 +232,7 @@ impl From<(usize, usize)> for Span {
impl From<&std::ops::Range<usize>> for Span { impl From<&std::ops::Range<usize>> for Span {
fn from(input: &std::ops::Range<usize>) -> Span { fn from(input: &std::ops::Range<usize>) -> Span {
Span { Span::new(input.start, input.end)
start: input.start,
end: input.end,
}
} }
} }
@ -321,10 +315,7 @@ impl Tag {
pub fn for_char(pos: usize, anchor: AnchorLocation) -> Tag { pub fn for_char(pos: usize, anchor: AnchorLocation) -> Tag {
Tag { Tag {
anchor: Some(anchor), anchor: Some(anchor),
span: Span { span: Span::new(pos, pos + 1),
start: pos,
end: pos + 1,
},
} }
} }
@ -528,12 +519,20 @@ impl Span {
impl language_reporting::ReportingSpan for Span { impl language_reporting::ReportingSpan for Span {
fn with_start(&self, start: usize) -> Self { fn with_start(&self, start: usize) -> Self {
if self.end < start {
Span::new(start, start)
} else {
Span::new(start, self.end) Span::new(start, self.end)
} }
}
fn with_end(&self, end: usize) -> Self { fn with_end(&self, end: usize) -> Self {
if end < self.start {
Span::new(end, end)
} else {
Span::new(self.start, end) Span::new(self.start, end)
} }
}
fn start(&self) -> usize { fn start(&self) -> usize {
self.start self.start

View File

@ -447,7 +447,9 @@ pub fn value_to_json_value(v: &Value) -> Result<serde_json::Value, ShellError> {
UntaggedValue::Table(l) => serde_json::Value::Array(json_list(l)?), UntaggedValue::Table(l) => serde_json::Value::Array(json_list(l)?),
UntaggedValue::Error(e) => return Err(e.clone()), UntaggedValue::Error(e) => return Err(e.clone()),
UntaggedValue::Block(_) => serde_json::Value::Null, UntaggedValue::Block(_) | UntaggedValue::Primitive(Primitive::Range(_)) => {
serde_json::Value::Null
}
UntaggedValue::Primitive(Primitive::Binary(b)) => serde_json::Value::Array( UntaggedValue::Primitive(Primitive::Binary(b)) => serde_json::Value::Array(
b.iter() b.iter()
.map(|x| { .map(|x| {

View File

@ -1,6 +1,7 @@
use crate::commands::WholeStreamCommand; use crate::commands::WholeStreamCommand;
use crate::prelude::*; use crate::prelude::*;
use nu_errors::ShellError; use nu_errors::ShellError;
use nu_macros::signature;
use nu_protocol::{Signature, SyntaxShape}; use nu_protocol::{Signature, SyntaxShape};
pub struct CD; pub struct CD;
@ -11,11 +12,17 @@ impl WholeStreamCommand for CD {
} }
fn signature(&self) -> Signature { fn signature(&self) -> Signature {
Signature::build("cd").optional( signature! {
"directory", def cd {
SyntaxShape::Path, "the directory to change to"
"the directory to change to", directory(optional Path) - "the directory to change to"
) }
}
// Signature::build("cd").optional(
// "directory",
// SyntaxShape::Path,
// "the directory to change to",
// )
} }
fn usage(&self) -> &str { fn usage(&self) -> &str {

View File

@ -378,14 +378,7 @@ pub trait WholeStreamCommand: Send + Sync {
fn name(&self) -> &str; fn name(&self) -> &str;
fn signature(&self) -> Signature { fn signature(&self) -> Signature {
Signature { Signature::new(self.name()).desc(self.usage()).filter()
name: self.name().to_string(),
usage: self.usage().to_string(),
positional: vec![],
rest_positional: None,
named: indexmap::IndexMap::new(),
is_filter: true,
}
} }
fn usage(&self) -> &str; fn usage(&self) -> &str;
@ -405,14 +398,7 @@ pub trait PerItemCommand: Send + Sync {
fn name(&self) -> &str; fn name(&self) -> &str;
fn signature(&self) -> Signature { fn signature(&self) -> Signature {
Signature { Signature::new(self.name()).desc(self.usage()).filter()
name: self.name().to_string(),
usage: self.usage().to_string(),
positional: vec![],
rest_positional: None,
named: indexmap::IndexMap::new(),
is_filter: true,
}
} }
fn usage(&self) -> &str; fn usage(&self) -> &str;

View File

@ -1,5 +1,6 @@
use crate::commands::WholeStreamCommand; use crate::commands::WholeStreamCommand;
use crate::context::CommandRegistry; use crate::context::CommandRegistry;
use crate::deserializer::NumericRange;
use crate::prelude::*; use crate::prelude::*;
use nu_errors::ShellError; use nu_errors::ShellError;
use nu_protocol::{Signature, SyntaxShape}; use nu_protocol::{Signature, SyntaxShape};
@ -7,7 +8,7 @@ use nu_source::Tagged;
#[derive(Deserialize)] #[derive(Deserialize)]
struct RangeArgs { struct RangeArgs {
area: Tagged<String>, area: Tagged<NumericRange>,
} }
pub struct Range; pub struct Range;
@ -20,7 +21,7 @@ impl WholeStreamCommand for Range {
fn signature(&self) -> Signature { fn signature(&self) -> Signature {
Signature::build("range").required( Signature::build("range").required(
"rows ", "rows ",
SyntaxShape::Any, SyntaxShape::Range,
"range of rows to return: Eg) 4..7 (=> from 4 to 7)", "range of rows to return: Eg) 4..7 (=> from 4 to 7)",
) )
} }
@ -39,48 +40,14 @@ impl WholeStreamCommand for Range {
} }
fn range( fn range(
RangeArgs { area: rows }: RangeArgs, RangeArgs { area }: RangeArgs,
RunnableContext { input, name, .. }: RunnableContext, RunnableContext { input, name: _, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> { ) -> Result<OutputStream, ShellError> {
match rows.item.find('.') { let range = area.item;
Some(value) => { let (from, _) = range.from;
let (first, last) = rows.item.split_at(value); let (to, _) = range.to;
let first = match first.parse::<u64>() {
Ok(postion) => postion, return Ok(OutputStream::from_input(
Err(_) => { input.values.skip(*from).take(*to - *from + 1),
if first == "" {
0
} else {
return Err(ShellError::labeled_error(
"no correct start of range",
"'from' needs to be an Integer or empty",
name,
)); ));
}
}
};
let last = match last.trim_start_matches('.').parse::<u64>() {
Ok(postion) => postion,
Err(_) => {
if last == ".." {
std::u64::MAX - 1
} else {
return Err(ShellError::labeled_error(
"no correct end of range",
"'to' needs to be an Integer or empty",
name,
));
}
}
};
Ok(OutputStream::from_input(
input.values.skip(first).take(last - first + 1),
))
}
None => Err(ShellError::labeled_error(
"No correct formatted range found",
"format: <from>..<to>",
name,
)),
}
} }

View File

@ -73,7 +73,7 @@ pub fn value_to_bson_value(v: &Value) -> Result<Bson, ShellError> {
.map(|x| value_to_bson_value(x)) .map(|x| value_to_bson_value(x))
.collect::<Result<_, _>>()?, .collect::<Result<_, _>>()?,
), ),
UntaggedValue::Block(_) => Bson::Null, UntaggedValue::Block(_) | UntaggedValue::Primitive(Primitive::Range(_)) => Bson::Null,
UntaggedValue::Error(e) => return Err(e.clone()), UntaggedValue::Error(e) => return Err(e.clone()),
UntaggedValue::Primitive(Primitive::Binary(b)) => { UntaggedValue::Primitive(Primitive::Binary(b)) => {
Bson::Binary(BinarySubtype::Generic, b.clone()) Bson::Binary(BinarySubtype::Generic, b.clone())

View File

@ -76,7 +76,9 @@ pub fn value_to_json_value(v: &Value) -> Result<serde_json::Value, ShellError> {
UntaggedValue::Table(l) => serde_json::Value::Array(json_list(l)?), UntaggedValue::Table(l) => serde_json::Value::Array(json_list(l)?),
UntaggedValue::Error(e) => return Err(e.clone()), UntaggedValue::Error(e) => return Err(e.clone()),
UntaggedValue::Block(_) => serde_json::Value::Null, UntaggedValue::Block(_) | UntaggedValue::Primitive(Primitive::Range(_)) => {
serde_json::Value::Null
}
UntaggedValue::Primitive(Primitive::Binary(b)) => serde_json::Value::Array( UntaggedValue::Primitive(Primitive::Binary(b)) => serde_json::Value::Array(
b.iter() b.iter()
.map(|x| { .map(|x| {

View File

@ -100,9 +100,10 @@ fn nu_value_to_sqlite_string(v: Value) -> String {
Primitive::Date(d) => format!("'{}'", d), Primitive::Date(d) => format!("'{}'", d),
Primitive::Path(p) => format!("'{}'", p.display().to_string().replace("'", "''")), Primitive::Path(p) => format!("'{}'", p.display().to_string().replace("'", "''")),
Primitive::Binary(u) => format!("x'{}'", encode(u)), Primitive::Binary(u) => format!("x'{}'", encode(u)),
Primitive::BeginningOfStream | Primitive::EndOfStream | Primitive::ColumnPath(_) => { Primitive::BeginningOfStream
"NULL".into() | Primitive::EndOfStream
} | Primitive::ColumnPath(_)
| Primitive::Range(_) => "NULL".into(),
}, },
_ => "NULL".into(), _ => "NULL".into(),
} }

View File

@ -69,6 +69,7 @@ pub fn value_to_toml_value(v: &Value) -> Result<toml::Value, ShellError> {
UntaggedValue::Table(l) => toml::Value::Array(collect_values(l)?), UntaggedValue::Table(l) => toml::Value::Array(collect_values(l)?),
UntaggedValue::Error(e) => return Err(e.clone()), UntaggedValue::Error(e) => return Err(e.clone()),
UntaggedValue::Block(_) => toml::Value::String("<Block>".to_string()), UntaggedValue::Block(_) => toml::Value::String("<Block>".to_string()),
UntaggedValue::Primitive(Primitive::Range(_)) => toml::Value::String("<Range>".to_string()),
UntaggedValue::Primitive(Primitive::Binary(b)) => { UntaggedValue::Primitive(Primitive::Binary(b)) => {
toml::Value::Array(b.iter().map(|x| toml::Value::Integer(*x as i64)).collect()) toml::Value::Array(b.iter().map(|x| toml::Value::Integer(*x as i64)).collect())
} }

View File

@ -85,7 +85,9 @@ pub fn value_to_yaml_value(v: &Value) -> Result<serde_yaml::Value, ShellError> {
serde_yaml::Value::Sequence(out) serde_yaml::Value::Sequence(out)
} }
UntaggedValue::Error(e) => return Err(e.clone()), UntaggedValue::Error(e) => return Err(e.clone()),
UntaggedValue::Block(_) => serde_yaml::Value::Null, UntaggedValue::Block(_) | UntaggedValue::Primitive(Primitive::Range(_)) => {
serde_yaml::Value::Null
}
UntaggedValue::Primitive(Primitive::Binary(b)) => serde_yaml::Value::Sequence( UntaggedValue::Primitive(Primitive::Binary(b)) => serde_yaml::Value::Sequence(
b.iter() b.iter()
.map(|x| serde_yaml::Value::Number(serde_yaml::Number::from(*x))) .map(|x| serde_yaml::Value::Number(serde_yaml::Number::from(*x)))

View File

@ -7,7 +7,7 @@ use chrono::{DateTime, Utc};
use derive_new::new; use derive_new::new;
use log::trace; use log::trace;
use nu_errors::ShellError; use nu_errors::ShellError;
use nu_parser::{hir, Operator}; use nu_parser::{hir, CompareOperator};
use nu_protocol::{ use nu_protocol::{
Evaluate, EvaluateTrait, Primitive, Scope, ShellTypeName, SpannedTypeName, TaggedDictBuilder, Evaluate, EvaluateTrait, Primitive, Scope, ShellTypeName, SpannedTypeName, TaggedDictBuilder,
UntaggedValue, Value, UntaggedValue, Value,
@ -23,7 +23,7 @@ use std::time::SystemTime;
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Clone, new, Serialize)] #[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Clone, new, Serialize)]
pub struct Operation { pub struct Operation {
pub(crate) left: Value, pub(crate) left: Value,
pub(crate) operator: Operator, pub(crate) operator: CompareOperator,
pub(crate) right: Value, pub(crate) right: Value,
} }

View File

@ -1,207 +1,24 @@
use crate::prelude::*; use crate::prelude::*;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use chrono_humanize::Humanize; use chrono_humanize::Humanize;
use derive_new::new;
use indexmap::IndexMap; use indexmap::IndexMap;
use nu_errors::ShellError; use nu_errors::ShellError;
use nu_protocol::RangeInclusion;
use nu_protocol::{ use nu_protocol::{
format_primitive, ColumnPath, Dictionary, Evaluate, Primitive, ShellTypeName, format_primitive, ColumnPath, Dictionary, Evaluate, Primitive, ShellTypeName,
TaggedDictBuilder, UntaggedValue, Value, TaggedDictBuilder, UntaggedValue, Value,
}; };
use nu_source::{b, DebugDoc, PrettyDebug}; use nu_source::{b, PrettyDebug};
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::fmt::Debug; use std::fmt::Debug;
use std::hash::Hash; use std::hash::Hash;
use std::io::Write; use std::io::Write;
use std::path::PathBuf; use std::path::PathBuf;
/**
This file describes the structural types of the nushell system.
Its primary purpose today is to identify "equivalent" values for the purpose
of merging rows into a single table or identify rows in a table that have the
same shape for reflection.
It also serves as the primary vehicle for pretty-printing.
*/
#[allow(unused)]
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum TypeShape { pub struct InlineRange {
Nothing, from: (InlineShape, RangeInclusion),
Int, to: (InlineShape, RangeInclusion),
Decimal,
Bytesize,
String,
Line,
ColumnPath,
Pattern,
Boolean,
Date,
Duration,
Path,
Binary,
Row(BTreeMap<Column, TypeShape>),
Table(Vec<TypeShape>),
// TODO: Block arguments
Block,
// TODO: Error type
Error,
// Stream markers (used as bookend markers rather than actual values)
BeginningOfStream,
EndOfStream,
}
impl TypeShape {
pub fn from_primitive(primitive: &Primitive) -> TypeShape {
match primitive {
Primitive::Nothing => TypeShape::Nothing,
Primitive::Int(_) => TypeShape::Int,
Primitive::Decimal(_) => TypeShape::Decimal,
Primitive::Bytes(_) => TypeShape::Bytesize,
Primitive::String(_) => TypeShape::String,
Primitive::Line(_) => TypeShape::Line,
Primitive::ColumnPath(_) => TypeShape::ColumnPath,
Primitive::Pattern(_) => TypeShape::Pattern,
Primitive::Boolean(_) => TypeShape::Boolean,
Primitive::Date(_) => TypeShape::Date,
Primitive::Duration(_) => TypeShape::Duration,
Primitive::Path(_) => TypeShape::Path,
Primitive::Binary(_) => TypeShape::Binary,
Primitive::BeginningOfStream => TypeShape::BeginningOfStream,
Primitive::EndOfStream => TypeShape::EndOfStream,
}
}
pub fn from_dictionary(dictionary: &Dictionary) -> TypeShape {
let mut map = BTreeMap::new();
for (key, value) in dictionary.entries.iter() {
let column = Column::String(key.clone());
map.insert(column, TypeShape::from_value(value));
}
TypeShape::Row(map)
}
pub fn from_table<'a>(table: impl IntoIterator<Item = &'a Value>) -> TypeShape {
let mut vec = vec![];
for item in table.into_iter() {
vec.push(TypeShape::from_value(item))
}
TypeShape::Table(vec)
}
pub fn from_value<'a>(value: impl Into<&'a UntaggedValue>) -> TypeShape {
match value.into() {
UntaggedValue::Primitive(p) => TypeShape::from_primitive(p),
UntaggedValue::Row(row) => TypeShape::from_dictionary(row),
UntaggedValue::Table(table) => TypeShape::from_table(table.iter()),
UntaggedValue::Error(_) => TypeShape::Error,
UntaggedValue::Block(_) => TypeShape::Block,
}
}
}
impl PrettyDebug for TypeShape {
fn pretty(&self) -> DebugDocBuilder {
match self {
TypeShape::Nothing => ty("nothing"),
TypeShape::Int => ty("integer"),
TypeShape::Decimal => ty("decimal"),
TypeShape::Bytesize => ty("bytesize"),
TypeShape::String => ty("string"),
TypeShape::Line => ty("line"),
TypeShape::ColumnPath => ty("column-path"),
TypeShape::Pattern => ty("pattern"),
TypeShape::Boolean => ty("boolean"),
TypeShape::Date => ty("date"),
TypeShape::Duration => ty("duration"),
TypeShape::Path => ty("path"),
TypeShape::Binary => ty("binary"),
TypeShape::Error => b::error("error"),
TypeShape::BeginningOfStream => b::keyword("beginning-of-stream"),
TypeShape::EndOfStream => b::keyword("end-of-stream"),
TypeShape::Row(row) => (b::kind("row")
+ b::space()
+ b::intersperse(
row.iter().map(|(key, ty)| {
(b::key(match key {
Column::String(string) => string.clone(),
Column::Value => "<value>".to_string(),
}) + b::delimit("(", ty.pretty(), ")").into_kind())
.nest()
}),
b::space(),
)
.nest())
.nest(),
TypeShape::Table(table) => {
let mut group: Group<DebugDoc, Vec<(usize, usize)>> = Group::new();
for (i, item) in table.iter().enumerate() {
group.add(item.to_doc(), i);
}
(b::kind("table") + b::space() + b::keyword("of")).group()
+ b::space()
+ (if group.len() == 1 {
let (doc, _) = group.into_iter().nth(0).unwrap();
DebugDocBuilder::from_doc(doc)
} else {
b::intersperse(
group.into_iter().map(|(doc, rows)| {
(b::intersperse(
rows.iter().map(|(from, to)| {
if from == to {
b::description(from)
} else {
(b::description(from)
+ b::space()
+ b::keyword("to")
+ b::space()
+ b::description(to))
.group()
}
}),
b::description(", "),
) + b::description(":")
+ b::space()
+ DebugDocBuilder::from_doc(doc))
.nest()
}),
b::space(),
)
})
}
TypeShape::Block => ty("block"),
}
}
}
#[derive(Debug, new)]
struct DebugEntry<'a> {
key: &'a Column,
value: &'a TypeShape,
}
impl<'a> PrettyDebug for DebugEntry<'a> {
fn pretty(&self) -> DebugDocBuilder {
(b::key(match self.key {
Column::String(string) => string.clone(),
Column::Value => "<value>".to_owned(),
}) + b::delimit("(", self.value.pretty(), ")").into_kind())
}
}
fn ty(name: impl std::fmt::Display) -> DebugDocBuilder {
b::kind(format!("{}", name))
} }
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
@ -209,6 +26,7 @@ pub enum InlineShape {
Nothing, Nothing,
Int(BigInt), Int(BigInt),
Decimal(BigDecimal), Decimal(BigDecimal),
Range(Box<InlineRange>),
Bytesize(u64), Bytesize(u64),
String(String), String(String),
Line(String), Line(String),
@ -243,6 +61,15 @@ impl InlineShape {
match primitive { match primitive {
Primitive::Nothing => InlineShape::Nothing, Primitive::Nothing => InlineShape::Nothing,
Primitive::Int(int) => InlineShape::Int(int.clone()), Primitive::Int(int) => InlineShape::Int(int.clone()),
Primitive::Range(range) => {
let (left, left_inclusion) = &range.from;
let (right, right_inclusion) = &range.to;
InlineShape::Range(Box::new(InlineRange {
from: (InlineShape::from_primitive(left), *left_inclusion),
to: (InlineShape::from_primitive(right), *right_inclusion),
}))
}
Primitive::Decimal(decimal) => InlineShape::Decimal(decimal.clone()), Primitive::Decimal(decimal) => InlineShape::Decimal(decimal.clone()),
Primitive::Bytes(bytesize) => InlineShape::Bytesize(*bytesize), Primitive::Bytes(bytesize) => InlineShape::Bytesize(*bytesize),
Primitive::String(string) => InlineShape::String(string.clone()), Primitive::String(string) => InlineShape::String(string.clone()),
@ -314,6 +141,17 @@ impl PrettyDebug for FormatInlineShape {
InlineShape::Nothing => b::blank(), InlineShape::Nothing => b::blank(),
InlineShape::Int(int) => b::primitive(format!("{}", int)), InlineShape::Int(int) => b::primitive(format!("{}", int)),
InlineShape::Decimal(decimal) => b::primitive(format!("{}", decimal)), InlineShape::Decimal(decimal) => b::primitive(format!("{}", decimal)),
InlineShape::Range(range) => {
let (left, left_inclusion) = &range.from;
let (right, right_inclusion) = &range.to;
let op = match (left_inclusion, right_inclusion) {
(RangeInclusion::Inclusive, RangeInclusion::Exclusive) => "..",
_ => unimplemented!("No syntax for ranges that aren't inclusive on the left and exclusive on the right")
};
left.clone().format().pretty() + b::operator(op) + right.clone().format().pretty()
}
InlineShape::Bytesize(bytesize) => { InlineShape::Bytesize(bytesize) => {
let byte = byte_unit::Byte::from_bytes(*bytesize as u128); let byte = byte_unit::Byte::from_bytes(*bytesize as u128);
@ -411,51 +249,6 @@ impl GroupedValue for Vec<(usize, usize)> {
} }
} }
#[derive(Debug)]
pub struct Group<K: Debug + Eq + Hash, V: GroupedValue> {
values: indexmap::IndexMap<K, V>,
}
impl<K, G> Group<K, G>
where
K: Debug + Eq + Hash,
G: GroupedValue,
{
pub fn new() -> Group<K, G> {
Group {
values: indexmap::IndexMap::default(),
}
}
pub fn len(&self) -> usize {
self.values.len()
}
pub fn into_iter(self) -> impl Iterator<Item = (K, G)> {
self.values.into_iter()
}
pub fn add(&mut self, key: impl Into<K>, value: impl Into<G::Item>) {
let key = key.into();
let value = value.into();
let group = self.values.get_mut(&key);
match group {
None => {
self.values.insert(key, {
let mut group = G::new();
group.merge(value);
group
});
}
Some(group) => {
group.merge(value);
}
}
}
}
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)] #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum Column { pub enum Column {
String(String), String(String),

View File

@ -1,10 +1,10 @@
use crate::data::base::coerce_compare; use crate::data::base::coerce_compare;
use crate::data::base::shape::{Column, InlineShape, TypeShape}; use crate::data::base::shape::{Column, InlineShape};
use crate::data::primitive::style_primitive; use crate::data::primitive::style_primitive;
use chrono::DateTime; use chrono::DateTime;
use nu_errors::ShellError; use nu_errors::ShellError;
use nu_parser::Operator; use nu_parser::CompareOperator;
use nu_protocol::{Primitive, UntaggedValue}; use nu_protocol::{Primitive, Type, UntaggedValue};
use nu_source::{DebugDocBuilder, PrettyDebug, Tagged}; use nu_source::{DebugDocBuilder, PrettyDebug, Tagged};
pub fn date_from_str(s: Tagged<&str>) -> Result<UntaggedValue, ShellError> { pub fn date_from_str(s: Tagged<&str>) -> Result<UntaggedValue, ShellError> {
@ -22,7 +22,7 @@ pub fn date_from_str(s: Tagged<&str>) -> Result<UntaggedValue, ShellError> {
} }
pub fn compare_values( pub fn compare_values(
operator: Operator, operator: &CompareOperator,
left: &UntaggedValue, left: &UntaggedValue,
right: &UntaggedValue, right: &UntaggedValue,
) -> Result<bool, (&'static str, &'static str)> { ) -> Result<bool, (&'static str, &'static str)> {
@ -34,16 +34,15 @@ pub fn compare_values(
use std::cmp::Ordering; use std::cmp::Ordering;
let result = match (operator, ordering) { let result = match (operator, ordering) {
(Operator::Equal, Ordering::Equal) => true, (CompareOperator::Equal, Ordering::Equal) => true,
(Operator::NotEqual, Ordering::Less) | (Operator::NotEqual, Ordering::Greater) => { (CompareOperator::NotEqual, Ordering::Less)
true | (CompareOperator::NotEqual, Ordering::Greater) => true,
} (CompareOperator::LessThan, Ordering::Less) => true,
(Operator::LessThan, Ordering::Less) => true, (CompareOperator::GreaterThan, Ordering::Greater) => true,
(Operator::GreaterThan, Ordering::Greater) => true, (CompareOperator::GreaterThanOrEqual, Ordering::Greater)
(Operator::GreaterThanOrEqual, Ordering::Greater) | (CompareOperator::GreaterThanOrEqual, Ordering::Equal) => true,
| (Operator::GreaterThanOrEqual, Ordering::Equal) => true, (CompareOperator::LessThanOrEqual, Ordering::Less)
(Operator::LessThanOrEqual, Ordering::Less) | (CompareOperator::LessThanOrEqual, Ordering::Equal) => true,
| (Operator::LessThanOrEqual, Ordering::Equal) => true,
_ => false, _ => false,
}; };
@ -53,7 +52,7 @@ pub fn compare_values(
} }
pub fn format_type<'a>(value: impl Into<&'a UntaggedValue>, width: usize) -> String { pub fn format_type<'a>(value: impl Into<&'a UntaggedValue>, width: usize) -> String {
TypeShape::from_value(value.into()).colored_string(width) Type::from_value(value.into()).colored_string(width)
} }
pub fn format_leaf<'a>(value: impl Into<&'a UntaggedValue>) -> DebugDocBuilder { pub fn format_leaf<'a>(value: impl Into<&'a UntaggedValue>) -> DebugDocBuilder {

View File

@ -1,11 +1,20 @@
use log::trace; use log::trace;
use nu_errors::{CoerceInto, ShellError}; use nu_errors::{CoerceInto, ShellError};
use nu_protocol::{CallInfo, ColumnPath, Evaluate, Primitive, ShellTypeName, UntaggedValue, Value}; use nu_protocol::{
use nu_source::{HasSpan, SpannedItem, Tagged, TaggedItem}; CallInfo, ColumnPath, Evaluate, Primitive, RangeInclusion, ShellTypeName, UntaggedValue, Value,
};
use nu_source::{HasSpan, Spanned, SpannedItem, Tagged, TaggedItem};
use nu_value_ext::ValueExt; use nu_value_ext::ValueExt;
use serde::de; use serde::de;
use serde::{Deserialize, Serialize};
use std::path::PathBuf; use std::path::PathBuf;
#[derive(Copy, Clone, Deserialize, Serialize)]
pub struct NumericRange {
pub from: (Spanned<u64>, RangeInclusion),
pub to: (Spanned<u64>, RangeInclusion),
}
#[derive(Debug)] #[derive(Debug)]
pub struct DeserializerItem<'de> { pub struct DeserializerItem<'de> {
key_struct_field: Option<(String, &'de str)>, key_struct_field: Option<(String, &'de str)>,
@ -406,6 +415,25 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> {
value: UntaggedValue::Primitive(Primitive::String(string)), value: UntaggedValue::Primitive(Primitive::String(string)),
.. ..
} => visit::<Tagged<String>, _>(string.tagged(tag), name, fields, visitor), } => visit::<Tagged<String>, _>(string.tagged(tag), name, fields, visitor),
Value {
value: UntaggedValue::Primitive(Primitive::Range(range)),
..
} => {
let (left, left_inclusion) = range.from;
let (right, right_inclusion) = range.to;
let left_span = left.span;
let right_span = right.span;
let left = left.as_u64(left_span)?;
let right = right.as_u64(right_span)?;
let numeric_range = NumericRange {
from: (left.spanned(left_span), left_inclusion),
to: (right.spanned(right_span), right_inclusion),
};
visit::<Tagged<NumericRange>, _>(numeric_range.tagged(tag), name, fields, visitor)
}
other => Err(ShellError::type_error( other => Err(ShellError::type_error(
name, name,

View File

@ -6,8 +6,8 @@ use log::trace;
use nu_errors::{ArgumentError, ShellError}; use nu_errors::{ArgumentError, ShellError};
use nu_parser::hir::{self, Expression, RawExpression}; use nu_parser::hir::{self, Expression, RawExpression};
use nu_protocol::{ use nu_protocol::{
ColumnPath, Evaluate, Primitive, Scope, TaggedDictBuilder, UnspannedPathMember, UntaggedValue, ColumnPath, Evaluate, Primitive, RangeInclusion, Scope, TaggedDictBuilder, UnspannedPathMember,
Value, UntaggedValue, Value,
}; };
use nu_source::Text; use nu_source::Text;
@ -40,7 +40,7 @@ pub(crate) fn evaluate_baseline_expr(
trace!("left={:?} right={:?}", left.value, right.value); trace!("left={:?} right={:?}", left.value, right.value);
match apply_operator(**binary.op(), &left, &right) { match apply_operator(&**binary.op(), &left, &right) {
Ok(result) => Ok(result.into_value(tag)), Ok(result) => Ok(result.into_value(tag)),
Err((left_type, right_type)) => Err(ShellError::coerce_error( Err((left_type, right_type)) => Err(ShellError::coerce_error(
left_type.spanned(binary.left().span), left_type.spanned(binary.left().span),
@ -48,6 +48,26 @@ pub(crate) fn evaluate_baseline_expr(
)), )),
} }
} }
RawExpression::Range(range) => {
let left = range.left();
let right = range.right();
let left = evaluate_baseline_expr(left, registry, scope, source)?;
let right = evaluate_baseline_expr(right, registry, scope, source)?;
let left_span = left.tag.span;
let right_span = right.tag.span;
let left = (
left.as_primitive()?.spanned(left_span),
RangeInclusion::Inclusive,
);
let right = (
right.as_primitive()?.spanned(right_span),
RangeInclusion::Exclusive,
);
Ok(UntaggedValue::range(left, right).into_value(tag))
}
RawExpression::List(list) => { RawExpression::List(list) => {
let mut exprs = vec![]; let mut exprs = vec![];

View File

@ -1,25 +1,24 @@
use crate::data::value; use crate::data::value;
use nu_parser::Operator; use nu_parser::CompareOperator;
use nu_protocol::{Primitive, ShellTypeName, UntaggedValue, Value}; use nu_protocol::{Primitive, ShellTypeName, UntaggedValue, Value};
use std::ops::Not; use std::ops::Not;
pub fn apply_operator( pub fn apply_operator(
op: Operator, op: &CompareOperator,
left: &Value, left: &Value,
right: &Value, right: &Value,
) -> Result<UntaggedValue, (&'static str, &'static str)> { ) -> Result<UntaggedValue, (&'static str, &'static str)> {
match op { match *op {
Operator::Equal CompareOperator::Equal
| Operator::NotEqual | CompareOperator::NotEqual
| Operator::LessThan | CompareOperator::LessThan
| Operator::GreaterThan | CompareOperator::GreaterThan
| Operator::LessThanOrEqual | CompareOperator::LessThanOrEqual
| Operator::GreaterThanOrEqual => { | CompareOperator::GreaterThanOrEqual => {
value::compare_values(op, left, right).map(UntaggedValue::boolean) value::compare_values(op, left, right).map(UntaggedValue::boolean)
} }
Operator::Dot => Ok(UntaggedValue::boolean(false)), CompareOperator::Contains => contains(left, right).map(UntaggedValue::boolean),
Operator::Contains => contains(left, right).map(UntaggedValue::boolean), CompareOperator::NotContains => contains(left, right)
Operator::NotContains => contains(left, right)
.map(Not::not) .map(Not::not)
.map(UntaggedValue::boolean), .map(UntaggedValue::boolean),
} }

View File

@ -144,7 +144,8 @@ fn paint_flat_shape(flat_shape: &Spanned<FlatShape>, line: &str) -> String {
FlatShape::CloseDelimiter(_) => Color::White.normal(), FlatShape::CloseDelimiter(_) => Color::White.normal(),
FlatShape::ItVariable => Color::Purple.bold(), FlatShape::ItVariable => Color::Purple.bold(),
FlatShape::Variable => Color::Purple.normal(), FlatShape::Variable => Color::Purple.normal(),
FlatShape::Operator => Color::Yellow.normal(), FlatShape::CompareOperator => Color::Yellow.normal(),
FlatShape::DotDot => Color::Yellow.bold(),
FlatShape::Dot => Color::White.normal(), FlatShape::Dot => Color::White.normal(),
FlatShape::InternalCommand => Color::Cyan.bold(), FlatShape::InternalCommand => Color::Cyan.bold(),
FlatShape::ExternalCommand => Color::Cyan.normal(), FlatShape::ExternalCommand => Color::Cyan.normal(),
@ -160,7 +161,8 @@ fn paint_flat_shape(flat_shape: &Spanned<FlatShape>, line: &str) -> String {
FlatShape::ShorthandFlag => Color::Black.bold(), FlatShape::ShorthandFlag => Color::Black.bold(),
FlatShape::Int => Color::Purple.bold(), FlatShape::Int => Color::Purple.bold(),
FlatShape::Decimal => Color::Purple.bold(), FlatShape::Decimal => Color::Purple.bold(),
FlatShape::Whitespace => Color::White.normal(), FlatShape::Whitespace | FlatShape::Separator => Color::White.normal(),
FlatShape::Comment => Color::Black.bold(),
FlatShape::Error => Color::Red.bold(), FlatShape::Error => Color::Red.bold(),
FlatShape::Size { number, unit } => { FlatShape::Size { number, unit } => {
let number = number.slice(line); let number = number.slice(line);

View File

@ -130,8 +130,8 @@ fn filesystem_not_a_directory() {
"cd ferris_did_it.txt" "cd ferris_did_it.txt"
); );
assert!(actual.contains("ferris_did_it.txt")); assert!(actual.contains("ferris_did_it.txt"), "actual={:?}", actual);
assert!(actual.contains("is not a directory")); assert!(actual.contains("is not a directory"), "actual={:?}", actual);
}) })
} }
@ -142,8 +142,16 @@ fn filesystem_directory_not_found() {
"cd dir_that_does_not_exist" "cd dir_that_does_not_exist"
); );
assert!(actual.contains("dir_that_does_not_exist")); assert!(
assert!(actual.contains("directory not found")); actual.contains("dir_that_does_not_exist"),
"actual={:?}",
actual
);
assert!(
actual.contains("directory not found"),
"actual={:?}",
actual
);
} }
#[test] #[test]

View File

@ -248,30 +248,6 @@ fn range_selects_some_rows() {
}); });
} }
#[test]
fn range_selects_all_rows() {
Playground::setup("range_test_3", |dirs, sandbox| {
sandbox.with_files(vec![
EmptyFile("notes.txt"),
EmptyFile("tests.txt"),
EmptyFile("persons.txt"),
]);
let actual = nu!(
cwd: dirs.test(), h::pipeline(
r#"
ls
| get name
| range ..
| count
| echo $it
"#
));
assert_eq!(actual, "3");
});
}
#[test] #[test]
fn split_by() { fn split_by() {
Playground::setup("split_by_test_1", |dirs, sandbox| { Playground::setup("split_by_test_1", |dirs, sandbox| {