forked from extern/nushell
Restructure and streamline token expansion (#1123)
Restructure and streamline token expansion The purpose of this commit is to streamline the token expansion code, by removing aspects of the code that are no longer relevant, removing pointless duplication, and eliminating the need to pass the same arguments to `expand_syntax`. The first big-picture change in this commit is that instead of a handful of `expand_` functions, which take a TokensIterator and ExpandContext, a smaller number of methods on the `TokensIterator` do the same job. The second big-picture change in this commit is fully eliminating the coloring traits, making coloring a responsibility of the base expansion implementations. This also means that the coloring tracer is merged into the expansion tracer, so you can follow a single expansion and see how the expansion process produced colored tokens. One side effect of this change is that the expander itself is marginally more error-correcting. The error correction works by switching from structured expansion to `BackoffColoringMode` when an unexpected token is found, which guarantees that all spans of the source are colored, but may not be the most optimal error recovery strategy. That said, because `BackoffColoringMode` only extends as far as a closing delimiter (`)`, `]`, `}`) or pipe (`|`), it does result in fairly granular correction strategy. The current code still produces an `Err` (plus a complete list of colored shapes) from the parsing process if any errors are encountered, but this could easily be addressed now that the underlying expansion is error-correcting. This commit also colors any spans that are syntax errors in red, and causes the parser to include some additional information about what tokens were expected at any given point where an error was encountered, so that completions and hinting could be more robust in the future. Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com> Co-authored-by: Andrés N. Robalino <andres@androbtech.com>
This commit is contained in:
parent
c8dd7838a8
commit
7efb31a4e4
@ -59,3 +59,4 @@ steps:
|
|||||||
- bash: cargo fmt --all -- --check
|
- bash: cargo fmt --all -- --check
|
||||||
condition: eq(variables['style'], 'fmt')
|
condition: eq(variables['style'], 'fmt')
|
||||||
displayName: Lint
|
displayName: Lint
|
||||||
|
|
10
Cargo.lock
generated
10
Cargo.lock
generated
@ -1870,9 +1870,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "libsqlite3-sys"
|
name = "libsqlite3-sys"
|
||||||
version = "0.17.1"
|
version = "0.16.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "266eb8c361198e8d1f682bc974e5d9e2ae90049fb1943890904d11dad7d4a77d"
|
checksum = "5e5b95e89c330291768dc840238db7f9e204fd208511ab6319b56193a7f2ae25"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc",
|
"cc",
|
||||||
"pkg-config",
|
"pkg-config",
|
||||||
@ -2289,6 +2289,7 @@ dependencies = [
|
|||||||
"roxmltree",
|
"roxmltree",
|
||||||
"rusqlite",
|
"rusqlite",
|
||||||
"rustyline",
|
"rustyline",
|
||||||
|
"semver",
|
||||||
"serde 1.0.104",
|
"serde 1.0.104",
|
||||||
"serde-hjson 0.9.1",
|
"serde-hjson 0.9.1",
|
||||||
"serde_bytes",
|
"serde_bytes",
|
||||||
@ -2331,6 +2332,7 @@ dependencies = [
|
|||||||
"ansi_term 0.12.1",
|
"ansi_term 0.12.1",
|
||||||
"bigdecimal",
|
"bigdecimal",
|
||||||
"derive-new",
|
"derive-new",
|
||||||
|
"getset",
|
||||||
"language-reporting",
|
"language-reporting",
|
||||||
"nom 5.1.0",
|
"nom 5.1.0",
|
||||||
"nom_locate",
|
"nom_locate",
|
||||||
@ -3373,9 +3375,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rusqlite"
|
name = "rusqlite"
|
||||||
version = "0.21.0"
|
version = "0.20.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "64a656821bb6317a84b257737b7934f79c0dbb7eb694710475908280ebad3e64"
|
checksum = "2a194373ef527035645a1bc21b10dc2125f73497e6e155771233eb187aedd051"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bitflags",
|
"bitflags",
|
||||||
"fallible-iterator",
|
"fallible-iterator",
|
||||||
|
15
Cargo.toml
15
Cargo.toml
@ -39,12 +39,12 @@ members = [
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nu-source = { version = "0.8.0", path = "./crates/nu-source" }
|
nu-source = {version = "0.8.0", path = "./crates/nu-source"}
|
||||||
nu-plugin = { version = "0.8.0", path = "./crates/nu-plugin" }
|
nu-plugin = {version = "0.8.0", path = "./crates/nu-plugin"}
|
||||||
nu-protocol = { version = "0.8.0", path = "./crates/nu-protocol" }
|
nu-protocol = {version = "0.8.0", path = "./crates/nu-protocol"}
|
||||||
nu-errors = { version = "0.8.0", path = "./crates/nu-errors" }
|
nu-errors = {version = "0.8.0", path = "./crates/nu-errors"}
|
||||||
nu-parser = { version = "0.8.0", path = "./crates/nu-parser" }
|
nu-parser = {version = "0.8.0", path = "./crates/nu-parser"}
|
||||||
nu-value-ext = { version = "0.8.0", path = "./crates/nu-value-ext" }
|
nu-value-ext = {version = "0.8.0", path = "./crates/nu-value-ext"}
|
||||||
nu_plugin_average = {version = "0.8.0", path = "./crates/nu_plugin_average", optional=true}
|
nu_plugin_average = {version = "0.8.0", path = "./crates/nu_plugin_average", optional=true}
|
||||||
nu_plugin_binaryview = {version = "0.8.0", path = "./crates/nu_plugin_binaryview", optional=true}
|
nu_plugin_binaryview = {version = "0.8.0", path = "./crates/nu_plugin_binaryview", optional=true}
|
||||||
nu_plugin_fetch = {version = "0.8.0", path = "./crates/nu_plugin_fetch", optional=true}
|
nu_plugin_fetch = {version = "0.8.0", path = "./crates/nu_plugin_fetch", optional=true}
|
||||||
@ -133,6 +133,7 @@ onig_sys = {version = "=69.1.0", optional = true }
|
|||||||
crossterm = {version = "0.10.2", optional = true}
|
crossterm = {version = "0.10.2", optional = true}
|
||||||
futures-timer = {version = "1.0.2", optional = true}
|
futures-timer = {version = "1.0.2", optional = true}
|
||||||
url = {version = "2.1.1", optional = true}
|
url = {version = "2.1.1", optional = true}
|
||||||
|
semver = {version = "0.9.0", optional = true}
|
||||||
|
|
||||||
[target.'cfg(unix)'.dependencies]
|
[target.'cfg(unix)'.dependencies]
|
||||||
users = "0.9"
|
users = "0.9"
|
||||||
@ -163,7 +164,7 @@ trace = ["nu-parser/trace"]
|
|||||||
tree = ["nu_plugin_tree"]
|
tree = ["nu_plugin_tree"]
|
||||||
|
|
||||||
[dependencies.rusqlite]
|
[dependencies.rusqlite]
|
||||||
version = "0.21.0"
|
version = "0.20.0"
|
||||||
features = ["bundled", "blob"]
|
features = ["bundled", "blob"]
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
|
8
TODO.md
8
TODO.md
@ -50,3 +50,11 @@ textview in own crate
|
|||||||
Combine atomic and atomic_parse in parser
|
Combine atomic and atomic_parse in parser
|
||||||
|
|
||||||
at_end_possible_ws needs to be comment and separator sensitive
|
at_end_possible_ws needs to be comment and separator sensitive
|
||||||
|
|
||||||
|
Eliminate unnecessary `nodes` parser
|
||||||
|
|
||||||
|
#[derive(HasSpan)]
|
||||||
|
|
||||||
|
Figure out a solution for the duplication in stuff like NumberShape vs. NumberExpressionShape
|
||||||
|
|
||||||
|
use `struct Expander` from signature.rs
|
@ -21,6 +21,7 @@ num-traits = "0.2.10"
|
|||||||
serde = { version = "1.0.103", features = ["derive"] }
|
serde = { version = "1.0.103", features = ["derive"] }
|
||||||
nom = "5.0.1"
|
nom = "5.0.1"
|
||||||
nom_locate = "1.0.0"
|
nom_locate = "1.0.0"
|
||||||
|
getset = "0.0.9"
|
||||||
|
|
||||||
# implement conversions
|
# implement conversions
|
||||||
subprocess = "0.1.18"
|
subprocess = "0.1.18"
|
||||||
|
@ -1,8 +1,11 @@
|
|||||||
use ansi_term::Color;
|
use ansi_term::Color;
|
||||||
use bigdecimal::BigDecimal;
|
use bigdecimal::BigDecimal;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
|
use getset::Getters;
|
||||||
use language_reporting::{Diagnostic, Label, Severity};
|
use language_reporting::{Diagnostic, Label, Severity};
|
||||||
use nu_source::{b, DebugDocBuilder, PrettyDebug, Span, Spanned, SpannedItem, TracableContext};
|
use nu_source::{
|
||||||
|
b, DebugDocBuilder, HasFallibleSpan, PrettyDebug, Span, Spanned, SpannedItem, TracableContext,
|
||||||
|
};
|
||||||
use num_bigint::BigInt;
|
use num_bigint::BigInt;
|
||||||
use num_traits::ToPrimitive;
|
use num_traits::ToPrimitive;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
@ -12,16 +15,16 @@ use std::ops::Range;
|
|||||||
/// A structured reason for a ParseError. Note that parsing in nu is more like macro expansion in
|
/// A structured reason for a ParseError. Note that parsing in nu is more like macro expansion in
|
||||||
/// other languages, so the kinds of errors that can occur during parsing are more contextual than
|
/// other languages, so the kinds of errors that can occur during parsing are more contextual than
|
||||||
/// you might expect.
|
/// you might expect.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
pub enum ParseErrorReason {
|
pub enum ParseErrorReason {
|
||||||
/// The parser encountered an EOF rather than what it was expecting
|
/// The parser encountered an EOF rather than what it was expecting
|
||||||
Eof { expected: &'static str, span: Span },
|
Eof { expected: String, span: Span },
|
||||||
/// The parser expected to see the end of a token stream (possibly the token
|
/// The parser expected to see the end of a token stream (possibly the token
|
||||||
/// stream from inside a delimited token node), but found something else.
|
/// stream from inside a delimited token node), but found something else.
|
||||||
ExtraTokens { actual: Spanned<String> },
|
ExtraTokens { actual: Spanned<String> },
|
||||||
/// The parser encountered something other than what it was expecting
|
/// The parser encountered something other than what it was expecting
|
||||||
Mismatch {
|
Mismatch {
|
||||||
expected: &'static str,
|
expected: String,
|
||||||
actual: Spanned<String>,
|
actual: Spanned<String>,
|
||||||
},
|
},
|
||||||
|
|
||||||
@ -37,16 +40,20 @@ pub enum ParseErrorReason {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// A newtype for `ParseErrorReason`
|
/// A newtype for `ParseErrorReason`
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone, Eq, PartialEq, Getters)]
|
||||||
pub struct ParseError {
|
pub struct ParseError {
|
||||||
|
#[get = "pub"]
|
||||||
reason: ParseErrorReason,
|
reason: ParseErrorReason,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ParseError {
|
impl ParseError {
|
||||||
/// Construct a [ParseErrorReason::Eof](ParseErrorReason::Eof)
|
/// Construct a [ParseErrorReason::Eof](ParseErrorReason::Eof)
|
||||||
pub fn unexpected_eof(expected: &'static str, span: Span) -> ParseError {
|
pub fn unexpected_eof(expected: impl Into<String>, span: Span) -> ParseError {
|
||||||
ParseError {
|
ParseError {
|
||||||
reason: ParseErrorReason::Eof { expected, span },
|
reason: ParseErrorReason::Eof {
|
||||||
|
expected: expected.into(),
|
||||||
|
span,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -62,12 +69,12 @@ impl ParseError {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Construct a [ParseErrorReason::Mismatch](ParseErrorReason::Mismatch)
|
/// Construct a [ParseErrorReason::Mismatch](ParseErrorReason::Mismatch)
|
||||||
pub fn mismatch(expected: &'static str, actual: Spanned<impl Into<String>>) -> ParseError {
|
pub fn mismatch(expected: impl Into<String>, actual: Spanned<impl Into<String>>) -> ParseError {
|
||||||
let Spanned { span, item } = actual;
|
let Spanned { span, item } = actual;
|
||||||
|
|
||||||
ParseError {
|
ParseError {
|
||||||
reason: ParseErrorReason::Mismatch {
|
reason: ParseErrorReason::Mismatch {
|
||||||
expected,
|
expected: expected.into(),
|
||||||
actual: item.into().spanned(span),
|
actual: item.into().spanned(span),
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
@ -728,6 +735,30 @@ impl ProximateShellError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl HasFallibleSpan for ShellError {
|
||||||
|
fn maybe_span(&self) -> Option<Span> {
|
||||||
|
self.error.maybe_span()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasFallibleSpan for ProximateShellError {
|
||||||
|
fn maybe_span(&self) -> Option<Span> {
|
||||||
|
Some(match self {
|
||||||
|
ProximateShellError::SyntaxError { problem } => problem.span,
|
||||||
|
ProximateShellError::UnexpectedEof { span, .. } => *span,
|
||||||
|
ProximateShellError::TypeError { actual, .. } => actual.span,
|
||||||
|
ProximateShellError::MissingProperty { subpath, .. } => subpath.span,
|
||||||
|
ProximateShellError::InvalidIntegerIndex { subpath, .. } => subpath.span,
|
||||||
|
ProximateShellError::MissingValue { span, .. } => return *span,
|
||||||
|
ProximateShellError::ArgumentError { command, .. } => command.span,
|
||||||
|
ProximateShellError::RangeError { actual_kind, .. } => actual_kind.span,
|
||||||
|
ProximateShellError::Diagnostic(_) => return None,
|
||||||
|
ProximateShellError::CoerceError { left, right } => left.span.until(right.span),
|
||||||
|
ProximateShellError::UntaggedRuntimeError { .. } => return None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub struct ShellDiagnostic {
|
pub struct ShellDiagnostic {
|
||||||
pub(crate) diagnostic: Diagnostic<Span>,
|
pub(crate) diagnostic: Diagnostic<Span>,
|
||||||
|
@ -3,7 +3,6 @@ pub mod classified;
|
|||||||
use crate::commands::classified::external::{ExternalArg, ExternalArgs, ExternalCommand};
|
use crate::commands::classified::external::{ExternalArg, ExternalArgs, ExternalCommand};
|
||||||
use crate::commands::classified::ClassifiedCommand;
|
use crate::commands::classified::ClassifiedCommand;
|
||||||
use crate::hir::expand_external_tokens::ExternalTokensShape;
|
use crate::hir::expand_external_tokens::ExternalTokensShape;
|
||||||
use crate::hir::syntax_shape::{expand_syntax, ExpandContext};
|
|
||||||
use crate::hir::tokens_iterator::TokensIterator;
|
use crate::hir::tokens_iterator::TokensIterator;
|
||||||
use nu_errors::ParseError;
|
use nu_errors::ParseError;
|
||||||
use nu_source::{Spanned, Tagged};
|
use nu_source::{Spanned, Tagged};
|
||||||
@ -13,10 +12,10 @@ use nu_source::{Spanned, Tagged};
|
|||||||
// strings.
|
// strings.
|
||||||
pub(crate) fn external_command(
|
pub(crate) fn external_command(
|
||||||
tokens: &mut TokensIterator,
|
tokens: &mut TokensIterator,
|
||||||
context: &ExpandContext,
|
|
||||||
name: Tagged<&str>,
|
name: Tagged<&str>,
|
||||||
) -> Result<ClassifiedCommand, ParseError> {
|
) -> Result<ClassifiedCommand, ParseError> {
|
||||||
let Spanned { item, span } = expand_syntax(&ExternalTokensShape, tokens, context)?.tokens;
|
let Spanned { item, span } = tokens.expand_infallible(ExternalTokensShape).tokens;
|
||||||
|
let full_span = name.span().until(span);
|
||||||
|
|
||||||
Ok(ClassifiedCommand::External(ExternalCommand {
|
Ok(ClassifiedCommand::External(ExternalCommand {
|
||||||
name: name.to_string(),
|
name: name.to_string(),
|
||||||
@ -29,7 +28,7 @@ pub(crate) fn external_command(
|
|||||||
arg: x.item.clone(),
|
arg: x.item.clone(),
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
span,
|
span: full_span,
|
||||||
},
|
},
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
@ -4,18 +4,20 @@ pub mod internal;
|
|||||||
use crate::commands::classified::external::ExternalCommand;
|
use crate::commands::classified::external::ExternalCommand;
|
||||||
use crate::commands::classified::internal::InternalCommand;
|
use crate::commands::classified::internal::InternalCommand;
|
||||||
use crate::hir;
|
use crate::hir;
|
||||||
use crate::parse::token_tree::TokenNode;
|
use crate::parse::token_tree::SpannedToken;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
|
use nu_errors::ParseError;
|
||||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span};
|
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq)]
|
#[derive(Debug, Clone, Eq, PartialEq)]
|
||||||
pub enum ClassifiedCommand {
|
pub enum ClassifiedCommand {
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
Expr(TokenNode),
|
Expr(SpannedToken),
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
Dynamic(hir::Call),
|
Dynamic(hir::Call),
|
||||||
Internal(InternalCommand),
|
Internal(InternalCommand),
|
||||||
External(ExternalCommand),
|
External(ExternalCommand),
|
||||||
|
Error(ParseError),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for ClassifiedCommand {
|
impl PrettyDebugWithSource for ClassifiedCommand {
|
||||||
@ -23,6 +25,7 @@ impl PrettyDebugWithSource for ClassifiedCommand {
|
|||||||
match self {
|
match self {
|
||||||
ClassifiedCommand::Expr(token) => b::typed("command", token.pretty_debug(source)),
|
ClassifiedCommand::Expr(token) => b::typed("command", token.pretty_debug(source)),
|
||||||
ClassifiedCommand::Dynamic(call) => b::typed("command", call.pretty_debug(source)),
|
ClassifiedCommand::Dynamic(call) => b::typed("command", call.pretty_debug(source)),
|
||||||
|
ClassifiedCommand::Error(_) => b::error("no command"),
|
||||||
ClassifiedCommand::Internal(internal) => internal.pretty_debug(source),
|
ClassifiedCommand::Internal(internal) => internal.pretty_debug(source),
|
||||||
ClassifiedCommand::External(external) => external.pretty_debug(source),
|
ClassifiedCommand::External(external) => external.pretty_debug(source),
|
||||||
}
|
}
|
||||||
@ -35,6 +38,7 @@ impl HasSpan for ClassifiedCommand {
|
|||||||
ClassifiedCommand::Expr(node) => node.span(),
|
ClassifiedCommand::Expr(node) => node.span(),
|
||||||
ClassifiedCommand::Internal(command) => command.span(),
|
ClassifiedCommand::Internal(command) => command.span(),
|
||||||
ClassifiedCommand::Dynamic(call) => call.span,
|
ClassifiedCommand::Dynamic(call) => call.span,
|
||||||
|
ClassifiedCommand::Error(_) => Span::unknown(),
|
||||||
ClassifiedCommand::External(command) => command.span(),
|
ClassifiedCommand::External(command) => command.span(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -62,6 +66,9 @@ impl std::ops::Deref for Commands {
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct ClassifiedPipeline {
|
pub struct ClassifiedPipeline {
|
||||||
pub commands: Commands,
|
pub commands: Commands,
|
||||||
|
// this is not a Result to make it crystal clear that these shapes
|
||||||
|
// aren't intended to be used directly with `?`
|
||||||
|
pub failed: Option<nu_errors::ParseError>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ClassifiedPipeline {
|
impl ClassifiedPipeline {
|
||||||
@ -71,6 +78,7 @@ impl ClassifiedPipeline {
|
|||||||
list,
|
list,
|
||||||
span: span.into(),
|
span: span.into(),
|
||||||
},
|
},
|
||||||
|
failed: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,6 @@ pub(crate) mod external_command;
|
|||||||
pub(crate) mod named;
|
pub(crate) mod named;
|
||||||
pub(crate) mod path;
|
pub(crate) mod path;
|
||||||
pub(crate) mod range;
|
pub(crate) mod range;
|
||||||
pub(crate) mod signature;
|
|
||||||
pub mod syntax_shape;
|
pub mod syntax_shape;
|
||||||
pub(crate) mod tokens_iterator;
|
pub(crate) mod tokens_iterator;
|
||||||
|
|
||||||
@ -17,17 +16,17 @@ use derive_new::new;
|
|||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
use nu_protocol::{PathMember, ShellTypeName};
|
use nu_protocol::{PathMember, ShellTypeName};
|
||||||
use nu_source::{
|
use nu_source::{
|
||||||
b, DebugDocBuilder, HasSpan, PrettyDebug, PrettyDebugWithSource, Span, Spanned, SpannedItem,
|
b, DebugDocBuilder, HasSpan, IntoSpanned, PrettyDebug, PrettyDebugRefineKind,
|
||||||
|
PrettyDebugWithSource, Span, Spanned,
|
||||||
};
|
};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use crate::parse::tokens::RawNumber;
|
use crate::parse::number::RawNumber;
|
||||||
|
|
||||||
pub(crate) use self::binary::Binary;
|
pub(crate) use self::binary::Binary;
|
||||||
pub(crate) use self::path::Path;
|
pub(crate) use self::path::Path;
|
||||||
pub(crate) use self::range::Range;
|
pub(crate) use self::range::Range;
|
||||||
pub(crate) use self::syntax_shape::ExpandContext;
|
|
||||||
pub(crate) use self::tokens_iterator::TokensIterator;
|
pub(crate) use self::tokens_iterator::TokensIterator;
|
||||||
|
|
||||||
pub use self::external_command::ExternalCommand;
|
pub use self::external_command::ExternalCommand;
|
||||||
@ -63,9 +62,9 @@ impl PrettyDebugWithSource for Signature {
|
|||||||
#[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)]
|
#[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)]
|
||||||
pub struct Call {
|
pub struct Call {
|
||||||
#[get = "pub(crate)"]
|
#[get = "pub(crate)"]
|
||||||
pub head: Box<Expression>,
|
pub head: Box<SpannedExpression>,
|
||||||
#[get = "pub(crate)"]
|
#[get = "pub(crate)"]
|
||||||
pub positional: Option<Vec<Expression>>,
|
pub positional: Option<Vec<SpannedExpression>>,
|
||||||
#[get = "pub(crate)"]
|
#[get = "pub(crate)"]
|
||||||
pub named: Option<NamedArguments>,
|
pub named: Option<NamedArguments>,
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
@ -85,35 +84,54 @@ impl Call {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Call {
|
impl PrettyDebugWithSource for Call {
|
||||||
|
fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder {
|
||||||
|
match refine {
|
||||||
|
PrettyDebugRefineKind::ContextFree => self.pretty_debug(source),
|
||||||
|
PrettyDebugRefineKind::WithContext => {
|
||||||
|
self.head
|
||||||
|
.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source)
|
||||||
|
+ b::preceded_option(
|
||||||
|
Some(b::space()),
|
||||||
|
self.positional.as_ref().map(|pos| {
|
||||||
|
b::intersperse(
|
||||||
|
pos.iter().map(|expr| {
|
||||||
|
expr.refined_pretty_debug(
|
||||||
|
PrettyDebugRefineKind::WithContext,
|
||||||
|
source,
|
||||||
|
)
|
||||||
|
}),
|
||||||
|
b::space(),
|
||||||
|
)
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
+ b::preceded_option(
|
||||||
|
Some(b::space()),
|
||||||
|
self.named.as_ref().map(|named| {
|
||||||
|
named.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source)
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
b::delimit(
|
b::typed(
|
||||||
"(",
|
"call",
|
||||||
self.head.pretty_debug(source)
|
self.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source),
|
||||||
+ b::preceded_option(
|
|
||||||
Some(b::space()),
|
|
||||||
self.positional.as_ref().map(|pos| {
|
|
||||||
b::intersperse(pos.iter().map(|expr| expr.pretty_debug(source)), b::space())
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
+ b::preceded_option(
|
|
||||||
Some(b::space()),
|
|
||||||
self.named.as_ref().map(|named| named.pretty_debug(source)),
|
|
||||||
),
|
|
||||||
")",
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||||
pub enum RawExpression {
|
pub enum Expression {
|
||||||
Literal(Literal),
|
Literal(Literal),
|
||||||
ExternalWord,
|
ExternalWord,
|
||||||
Synthetic(Synthetic),
|
Synthetic(Synthetic),
|
||||||
Variable(Variable),
|
Variable(Variable),
|
||||||
Binary(Box<Binary>),
|
Binary(Box<Binary>),
|
||||||
Range(Box<Range>),
|
Range(Box<Range>),
|
||||||
Block(Vec<Expression>),
|
Block(Vec<SpannedExpression>),
|
||||||
List(Vec<Expression>),
|
List(Vec<SpannedExpression>),
|
||||||
Path(Box<Path>),
|
Path(Box<Path>),
|
||||||
|
|
||||||
FilePath(PathBuf),
|
FilePath(PathBuf),
|
||||||
@ -123,22 +141,22 @@ pub enum RawExpression {
|
|||||||
Boolean(bool),
|
Boolean(bool),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ShellTypeName for RawExpression {
|
impl ShellTypeName for Expression {
|
||||||
fn type_name(&self) -> &'static str {
|
fn type_name(&self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
RawExpression::Literal(literal) => literal.type_name(),
|
Expression::Literal(literal) => literal.type_name(),
|
||||||
RawExpression::Synthetic(synthetic) => synthetic.type_name(),
|
Expression::Synthetic(synthetic) => synthetic.type_name(),
|
||||||
RawExpression::Command(..) => "command",
|
Expression::Command(..) => "command",
|
||||||
RawExpression::ExternalWord => "external word",
|
Expression::ExternalWord => "external word",
|
||||||
RawExpression::FilePath(..) => "file path",
|
Expression::FilePath(..) => "file path",
|
||||||
RawExpression::Variable(..) => "variable",
|
Expression::Variable(..) => "variable",
|
||||||
RawExpression::List(..) => "list",
|
Expression::List(..) => "list",
|
||||||
RawExpression::Binary(..) => "binary",
|
Expression::Binary(..) => "binary",
|
||||||
RawExpression::Range(..) => "range",
|
Expression::Range(..) => "range",
|
||||||
RawExpression::Block(..) => "block",
|
Expression::Block(..) => "block",
|
||||||
RawExpression::Path(..) => "variable path",
|
Expression::Path(..) => "variable path",
|
||||||
RawExpression::Boolean(..) => "boolean",
|
Expression::Boolean(..) => "boolean",
|
||||||
RawExpression::ExternalCommand(..) => "external",
|
Expression::ExternalCommand(..) => "external",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -156,16 +174,24 @@ impl ShellTypeName for Synthetic {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RawExpression {
|
impl IntoSpanned for Expression {
|
||||||
pub fn into_expr(self, span: impl Into<Span>) -> Expression {
|
type Output = SpannedExpression;
|
||||||
Expression {
|
|
||||||
|
fn into_spanned(self, span: impl Into<Span>) -> Self::Output {
|
||||||
|
SpannedExpression {
|
||||||
expr: self,
|
expr: self,
|
||||||
span: span.into(),
|
span: span.into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn into_unspanned_expr(self) -> Expression {
|
impl Expression {
|
||||||
Expression {
|
pub fn into_expr(self, span: impl Into<Span>) -> SpannedExpression {
|
||||||
|
self.into_spanned(span)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn into_unspanned_expr(self) -> SpannedExpression {
|
||||||
|
SpannedExpression {
|
||||||
expr: self,
|
expr: self,
|
||||||
span: Span::unknown(),
|
span: Span::unknown(),
|
||||||
}
|
}
|
||||||
@ -173,40 +199,93 @@ impl RawExpression {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||||
pub struct Expression {
|
pub struct SpannedExpression {
|
||||||
pub expr: RawExpression,
|
pub expr: Expression,
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl std::ops::Deref for Expression {
|
impl std::ops::Deref for SpannedExpression {
|
||||||
type Target = RawExpression;
|
type Target = Expression;
|
||||||
|
|
||||||
fn deref(&self) -> &RawExpression {
|
fn deref(&self) -> &Expression {
|
||||||
&self.expr
|
&self.expr
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HasSpan for Expression {
|
impl HasSpan for SpannedExpression {
|
||||||
fn span(&self) -> Span {
|
fn span(&self) -> Span {
|
||||||
self.span
|
self.span
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Expression {
|
impl ShellTypeName for SpannedExpression {
|
||||||
|
fn type_name(&self) -> &'static str {
|
||||||
|
self.expr.type_name()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebugWithSource for SpannedExpression {
|
||||||
|
fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder {
|
||||||
|
match refine {
|
||||||
|
PrettyDebugRefineKind::ContextFree => self.refined_pretty_debug(refine, source),
|
||||||
|
PrettyDebugRefineKind::WithContext => match &self.expr {
|
||||||
|
Expression::Literal(literal) => literal
|
||||||
|
.clone()
|
||||||
|
.into_spanned(self.span)
|
||||||
|
.refined_pretty_debug(refine, source),
|
||||||
|
Expression::ExternalWord => {
|
||||||
|
b::delimit("e\"", b::primitive(self.span.slice(source)), "\"").group()
|
||||||
|
}
|
||||||
|
Expression::Synthetic(s) => match s {
|
||||||
|
Synthetic::String(_) => {
|
||||||
|
b::delimit("s\"", b::primitive(self.span.slice(source)), "\"").group()
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Expression::Variable(Variable::Other(_)) => b::keyword(self.span.slice(source)),
|
||||||
|
Expression::Variable(Variable::It(_)) => b::keyword("$it"),
|
||||||
|
Expression::Binary(binary) => binary.pretty_debug(source),
|
||||||
|
Expression::Range(range) => range.pretty_debug(source),
|
||||||
|
Expression::Block(_) => b::opaque("block"),
|
||||||
|
Expression::List(list) => b::delimit(
|
||||||
|
"[",
|
||||||
|
b::intersperse(
|
||||||
|
list.iter()
|
||||||
|
.map(|item| item.refined_pretty_debug(refine, source)),
|
||||||
|
b::space(),
|
||||||
|
),
|
||||||
|
"]",
|
||||||
|
),
|
||||||
|
Expression::Path(path) => path.pretty_debug(source),
|
||||||
|
Expression::FilePath(path) => b::typed("path", b::primitive(path.display())),
|
||||||
|
Expression::ExternalCommand(external) => {
|
||||||
|
b::keyword("^") + b::keyword(external.name.slice(source))
|
||||||
|
}
|
||||||
|
Expression::Command(command) => b::keyword(command.slice(source)),
|
||||||
|
Expression::Boolean(boolean) => match boolean {
|
||||||
|
true => b::primitive("$yes"),
|
||||||
|
false => b::primitive("$no"),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
match &self.expr {
|
match &self.expr {
|
||||||
RawExpression::Literal(literal) => literal.spanned(self.span).pretty_debug(source),
|
Expression::Literal(literal) => {
|
||||||
RawExpression::ExternalWord => {
|
literal.clone().into_spanned(self.span).pretty_debug(source)
|
||||||
|
}
|
||||||
|
Expression::ExternalWord => {
|
||||||
b::typed("external word", b::primitive(self.span.slice(source)))
|
b::typed("external word", b::primitive(self.span.slice(source)))
|
||||||
}
|
}
|
||||||
RawExpression::Synthetic(s) => match s {
|
Expression::Synthetic(s) => match s {
|
||||||
Synthetic::String(s) => b::typed("synthetic", b::primitive(format!("{:?}", s))),
|
Synthetic::String(s) => b::typed("synthetic", b::primitive(format!("{:?}", s))),
|
||||||
},
|
},
|
||||||
RawExpression::Variable(_) => b::keyword(self.span.slice(source)),
|
Expression::Variable(Variable::Other(_)) => b::keyword(self.span.slice(source)),
|
||||||
RawExpression::Binary(binary) => binary.pretty_debug(source),
|
Expression::Variable(Variable::It(_)) => b::keyword("$it"),
|
||||||
RawExpression::Range(range) => range.pretty_debug(source),
|
Expression::Binary(binary) => binary.pretty_debug(source),
|
||||||
RawExpression::Block(_) => b::opaque("block"),
|
Expression::Range(range) => range.pretty_debug(source),
|
||||||
RawExpression::List(list) => b::delimit(
|
Expression::Block(_) => b::opaque("block"),
|
||||||
|
Expression::List(list) => b::delimit(
|
||||||
"[",
|
"[",
|
||||||
b::intersperse(
|
b::intersperse(
|
||||||
list.iter().map(|item| item.pretty_debug(source)),
|
list.iter().map(|item| item.pretty_debug(source)),
|
||||||
@ -214,16 +293,16 @@ impl PrettyDebugWithSource for Expression {
|
|||||||
),
|
),
|
||||||
"]",
|
"]",
|
||||||
),
|
),
|
||||||
RawExpression::Path(path) => path.pretty_debug(source),
|
Expression::Path(path) => path.pretty_debug(source),
|
||||||
RawExpression::FilePath(path) => b::typed("path", b::primitive(path.display())),
|
Expression::FilePath(path) => b::typed("path", b::primitive(path.display())),
|
||||||
RawExpression::ExternalCommand(external) => b::typed(
|
Expression::ExternalCommand(external) => b::typed(
|
||||||
"external command",
|
"command",
|
||||||
b::primitive(external.name.slice(source)),
|
b::keyword("^") + b::primitive(external.name.slice(source)),
|
||||||
),
|
),
|
||||||
RawExpression::Command(command) => {
|
Expression::Command(command) => {
|
||||||
b::typed("command", b::primitive(command.slice(source)))
|
b::typed("command", b::primitive(command.slice(source)))
|
||||||
}
|
}
|
||||||
RawExpression::Boolean(boolean) => match boolean {
|
Expression::Boolean(boolean) => match boolean {
|
||||||
true => b::primitive("$yes"),
|
true => b::primitive("$yes"),
|
||||||
false => b::primitive("$no"),
|
false => b::primitive("$no"),
|
||||||
},
|
},
|
||||||
@ -232,117 +311,91 @@ impl PrettyDebugWithSource for Expression {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Expression {
|
impl Expression {
|
||||||
pub fn number(i: impl Into<Number>, span: impl Into<Span>) -> Expression {
|
pub fn number(i: impl Into<Number>) -> Expression {
|
||||||
let span = span.into();
|
Expression::Literal(Literal::Number(i.into()))
|
||||||
|
|
||||||
RawExpression::Literal(RawLiteral::Number(i.into()).into_literal(span)).into_expr(span)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn size(i: impl Into<Number>, unit: impl Into<Unit>, span: impl Into<Span>) -> Expression {
|
pub fn size(i: impl Into<Number>, unit: impl Into<Unit>) -> Expression {
|
||||||
let span = span.into();
|
Expression::Literal(Literal::Size(i.into(), unit.into()))
|
||||||
|
|
||||||
RawExpression::Literal(RawLiteral::Size(i.into(), unit.into()).into_literal(span))
|
|
||||||
.into_expr(span)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn synthetic_string(s: impl Into<String>) -> Expression {
|
pub fn string(inner: impl Into<Span>) -> Expression {
|
||||||
RawExpression::Synthetic(Synthetic::String(s.into())).into_unspanned_expr()
|
Expression::Literal(Literal::String(inner.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn string(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
pub fn synthetic_string(string: impl Into<String>) -> Expression {
|
||||||
let outer = outer.into();
|
Expression::Synthetic(Synthetic::String(string.into()))
|
||||||
|
|
||||||
RawExpression::Literal(RawLiteral::String(inner.into()).into_literal(outer))
|
|
||||||
.into_expr(outer)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn column_path(members: Vec<Member>, span: impl Into<Span>) -> Expression {
|
pub fn column_path(members: Vec<Member>) -> Expression {
|
||||||
let span = span.into();
|
Expression::Literal(Literal::ColumnPath(members))
|
||||||
|
|
||||||
RawExpression::Literal(RawLiteral::ColumnPath(members).into_literal(span)).into_expr(span)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn path(
|
pub fn path(head: SpannedExpression, tail: Vec<impl Into<PathMember>>) -> Expression {
|
||||||
head: Expression,
|
|
||||||
tail: Vec<impl Into<PathMember>>,
|
|
||||||
span: impl Into<Span>,
|
|
||||||
) -> Expression {
|
|
||||||
let tail = tail.into_iter().map(|t| t.into()).collect();
|
let tail = tail.into_iter().map(|t| t.into()).collect();
|
||||||
RawExpression::Path(Box::new(Path::new(head, tail))).into_expr(span.into())
|
Expression::Path(Box::new(Path::new(head, tail)))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn dot_member(head: Expression, next: impl Into<PathMember>) -> Expression {
|
pub fn dot_member(head: SpannedExpression, next: impl Into<PathMember>) -> Expression {
|
||||||
let Expression { expr: item, span } = head;
|
let SpannedExpression { expr: item, span } = head;
|
||||||
let next = next.into();
|
let next = next.into();
|
||||||
|
|
||||||
let new_span = head.span.until(next.span);
|
|
||||||
|
|
||||||
match item {
|
match item {
|
||||||
RawExpression::Path(path) => {
|
Expression::Path(path) => {
|
||||||
let (head, mut tail) = path.parts();
|
let (head, mut tail) = path.parts();
|
||||||
|
|
||||||
tail.push(next);
|
tail.push(next);
|
||||||
Expression::path(head, tail, new_span)
|
Expression::path(head, tail)
|
||||||
}
|
}
|
||||||
|
|
||||||
other => Expression::path(other.into_expr(span), vec![next], new_span),
|
other => Expression::path(other.into_expr(span), vec![next]),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn infix(
|
pub fn infix(
|
||||||
left: Expression,
|
left: SpannedExpression,
|
||||||
op: Spanned<impl Into<CompareOperator>>,
|
op: Spanned<impl Into<CompareOperator>>,
|
||||||
right: Expression,
|
right: SpannedExpression,
|
||||||
) -> Expression {
|
) -> Expression {
|
||||||
let new_span = left.span.until(right.span);
|
Expression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right)))
|
||||||
|
|
||||||
RawExpression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right)))
|
|
||||||
.into_expr(new_span)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn range(left: Expression, op: Span, right: Expression) -> Expression {
|
pub fn range(left: SpannedExpression, op: Span, right: SpannedExpression) -> Expression {
|
||||||
let new_span = left.span.until(right.span);
|
Expression::Range(Box::new(Range::new(left, op, right)))
|
||||||
|
|
||||||
RawExpression::Range(Box::new(Range::new(left, op, right))).into_expr(new_span)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn file_path(path: impl Into<PathBuf>, outer: impl Into<Span>) -> Expression {
|
pub fn file_path(path: impl Into<PathBuf>) -> Expression {
|
||||||
RawExpression::FilePath(path.into()).into_expr(outer)
|
Expression::FilePath(path.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn list(list: Vec<Expression>, span: impl Into<Span>) -> Expression {
|
pub fn list(list: Vec<SpannedExpression>) -> Expression {
|
||||||
RawExpression::List(list).into_expr(span)
|
Expression::List(list)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn bare(span: impl Into<Span>) -> Expression {
|
pub fn bare() -> Expression {
|
||||||
let span = span.into();
|
Expression::Literal(Literal::Bare)
|
||||||
|
|
||||||
RawExpression::Literal(RawLiteral::Bare.into_literal(span)).into_expr(span)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pattern(inner: impl Into<String>, outer: impl Into<Span>) -> Expression {
|
pub fn pattern(inner: impl Into<String>) -> Expression {
|
||||||
let outer = outer.into();
|
Expression::Literal(Literal::GlobPattern(inner.into()))
|
||||||
|
|
||||||
RawExpression::Literal(RawLiteral::GlobPattern(inner.into()).into_literal(outer))
|
|
||||||
.into_expr(outer)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn variable(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
pub fn variable(inner: impl Into<Span>) -> Expression {
|
||||||
RawExpression::Variable(Variable::Other(inner.into())).into_expr(outer)
|
Expression::Variable(Variable::Other(inner.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn external_command(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
pub fn external_command(inner: impl Into<Span>) -> Expression {
|
||||||
RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).into_expr(outer)
|
Expression::ExternalCommand(ExternalCommand::new(inner.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn it_variable(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
pub fn it_variable(inner: impl Into<Span>) -> Expression {
|
||||||
RawExpression::Variable(Variable::It(inner.into())).into_expr(outer)
|
Expression::Variable(Variable::It(inner.into()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Spanned<Path>> for Expression {
|
impl From<Spanned<Path>> for SpannedExpression {
|
||||||
fn from(path: Spanned<Path>) -> Expression {
|
fn from(path: Spanned<Path>) -> SpannedExpression {
|
||||||
RawExpression::Path(Box::new(path.item)).into_expr(path.span)
|
Expression::Path(Box::new(path.item)).into_expr(path.span)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -352,7 +405,7 @@ impl From<Spanned<Path>> for Expression {
|
|||||||
/// 2. Can be evaluated without additional context
|
/// 2. Can be evaluated without additional context
|
||||||
/// 3. Evaluation cannot produce an error
|
/// 3. Evaluation cannot produce an error
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||||
pub enum RawLiteral {
|
pub enum Literal {
|
||||||
Number(Number),
|
Number(Number),
|
||||||
Size(Number, Unit),
|
Size(Number, Unit),
|
||||||
String(Span),
|
String(Span),
|
||||||
@ -361,9 +414,9 @@ pub enum RawLiteral {
|
|||||||
Bare,
|
Bare,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl RawLiteral {
|
impl Literal {
|
||||||
pub fn into_literal(self, span: impl Into<Span>) -> Literal {
|
pub fn into_spanned(self, span: impl Into<Span>) -> SpannedLiteral {
|
||||||
Literal {
|
SpannedLiteral {
|
||||||
literal: self,
|
literal: self,
|
||||||
span: span.into(),
|
span: span.into(),
|
||||||
}
|
}
|
||||||
@ -371,36 +424,57 @@ impl RawLiteral {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||||
pub struct Literal {
|
pub struct SpannedLiteral {
|
||||||
pub literal: RawLiteral,
|
pub literal: Literal,
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ShellTypeName for Literal {
|
impl ShellTypeName for Literal {
|
||||||
fn type_name(&self) -> &'static str {
|
fn type_name(&self) -> &'static str {
|
||||||
match &self.literal {
|
match &self {
|
||||||
RawLiteral::Number(..) => "number",
|
Literal::Number(..) => "number",
|
||||||
RawLiteral::Size(..) => "size",
|
Literal::Size(..) => "size",
|
||||||
RawLiteral::String(..) => "string",
|
Literal::String(..) => "string",
|
||||||
RawLiteral::ColumnPath(..) => "column path",
|
Literal::ColumnPath(..) => "column path",
|
||||||
RawLiteral::Bare => "string",
|
Literal::Bare => "string",
|
||||||
RawLiteral::GlobPattern(_) => "pattern",
|
Literal::GlobPattern(_) => "pattern",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Literal {
|
impl PrettyDebugWithSource for SpannedLiteral {
|
||||||
|
fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder {
|
||||||
|
match refine {
|
||||||
|
PrettyDebugRefineKind::ContextFree => self.pretty_debug(source),
|
||||||
|
PrettyDebugRefineKind::WithContext => match &self.literal {
|
||||||
|
Literal::Number(number) => number.pretty(),
|
||||||
|
Literal::Size(number, unit) => (number.pretty() + unit.pretty()).group(),
|
||||||
|
Literal::String(string) => b::primitive(format!("{:?}", string.slice(source))),
|
||||||
|
Literal::GlobPattern(pattern) => b::primitive(pattern),
|
||||||
|
Literal::ColumnPath(path) => {
|
||||||
|
b::intersperse_with_source(path.iter(), b::space(), source)
|
||||||
|
}
|
||||||
|
Literal::Bare => b::delimit("b\"", b::primitive(self.span.slice(source)), "\""),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
match &self.literal {
|
match &self.literal {
|
||||||
RawLiteral::Number(number) => number.pretty(),
|
Literal::Number(number) => number.pretty(),
|
||||||
RawLiteral::Size(number, unit) => (number.pretty() + unit.pretty()).group(),
|
Literal::Size(number, unit) => {
|
||||||
RawLiteral::String(string) => b::primitive(format!("{:?}", string.slice(source))),
|
b::typed("size", (number.pretty() + unit.pretty()).group())
|
||||||
RawLiteral::GlobPattern(pattern) => b::typed("pattern", b::primitive(pattern)),
|
}
|
||||||
RawLiteral::ColumnPath(path) => b::typed(
|
Literal::String(string) => b::typed(
|
||||||
|
"string",
|
||||||
|
b::primitive(format!("{:?}", string.slice(source))),
|
||||||
|
),
|
||||||
|
Literal::GlobPattern(pattern) => b::typed("pattern", b::primitive(pattern)),
|
||||||
|
Literal::ColumnPath(path) => b::typed(
|
||||||
"column path",
|
"column path",
|
||||||
b::intersperse_with_source(path.iter(), b::space(), source),
|
b::intersperse_with_source(path.iter(), b::space(), source),
|
||||||
),
|
),
|
||||||
RawLiteral::Bare => b::primitive(self.span.slice(source)),
|
Literal::Bare => b::typed("bare", b::primitive(self.span.slice(source))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,42 +1,99 @@
|
|||||||
use crate::commands::classified::{internal::InternalCommand, ClassifiedCommand};
|
use crate::commands::classified::{internal::InternalCommand, ClassifiedCommand};
|
||||||
use crate::hir::TokensIterator;
|
use crate::hir::expand_external_tokens::{ExternalTokensShape, ExternalTokensSyntax};
|
||||||
use crate::hir::{self, named::NamedValue, syntax_shape::*, NamedArguments};
|
use crate::hir::{
|
||||||
|
self, named::NamedValue, syntax_shape::*, Expression, NamedArguments, SpannedExpression,
|
||||||
|
TokensIterator,
|
||||||
|
};
|
||||||
use crate::parse::files::Files;
|
use crate::parse::files::Files;
|
||||||
use crate::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b};
|
use crate::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b};
|
||||||
use crate::TokenNode;
|
use crate::SpannedToken;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::{ParseError, ShellError};
|
||||||
use nu_protocol::{PathMember, Signature, SyntaxShape};
|
use nu_protocol::{outln, PathMember, Signature, SyntaxShape};
|
||||||
use nu_source::{HasSpan, Span, Tag, Text};
|
use nu_source::{HasSpan, PrettyDebugWithSource, Span, SpannedItem, Tag, Text};
|
||||||
use pretty_assertions::assert_eq;
|
use pretty_assertions::assert_eq;
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_parse_string() {
|
fn test_parse_external() {
|
||||||
parse_tokens(StringShape, vec![b::string("hello")], |tokens| {
|
|
||||||
hir::Expression::string(inner_string_span(tokens[0].span()), tokens[0].span())
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_parse_path() {
|
|
||||||
parse_tokens(
|
parse_tokens(
|
||||||
VariablePathShape,
|
fallible(ExternalTokensShape),
|
||||||
vec![b::var("it"), b::dot(), b::bare("cpu")],
|
"5kb",
|
||||||
|
vec![b::bare("5kb")],
|
||||||
|tokens| {
|
|tokens| {
|
||||||
let (outer_var, inner_var) = tokens[0].expect_var();
|
ExternalTokensSyntax::new(
|
||||||
let bare = tokens[2].expect_bare();
|
vec![format!("5kb").spanned(tokens[0].span())].spanned(tokens[0].span()),
|
||||||
hir::Expression::path(
|
|
||||||
hir::Expression::it_variable(inner_var, outer_var),
|
|
||||||
vec![PathMember::string("cpu", bare)],
|
|
||||||
outer_var.until(bare),
|
|
||||||
)
|
)
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
|
|
||||||
parse_tokens(
|
parse_tokens(
|
||||||
|
fallible(ExternalTokensShape),
|
||||||
|
"cargo +nightly run -- --features all",
|
||||||
|
vec![
|
||||||
|
b::bare("cargo"),
|
||||||
|
b::sp(),
|
||||||
|
b::external_word("+nightly"),
|
||||||
|
b::sp(),
|
||||||
|
b::bare("run"),
|
||||||
|
b::sp(),
|
||||||
|
b::external_word("--"),
|
||||||
|
b::sp(),
|
||||||
|
b::flag("features"),
|
||||||
|
b::sp(),
|
||||||
|
b::bare("all"),
|
||||||
|
],
|
||||||
|
|tokens| {
|
||||||
|
let cargo = format!("cargo").spanned(tokens[0].span());
|
||||||
|
let nightly = format!("+nightly").spanned(tokens[2].span());
|
||||||
|
let run = format!("run").spanned(tokens[4].span());
|
||||||
|
let dashdash = format!("--").spanned(tokens[6].span());
|
||||||
|
let features = format!("--features").spanned(tokens[8].span());
|
||||||
|
let all = format!("all").spanned(tokens[10].span());
|
||||||
|
let span = tokens[0].span().until(tokens[10].span());
|
||||||
|
|
||||||
|
ExternalTokensSyntax::new(
|
||||||
|
vec![cargo, nightly, run, dashdash, features, all].spanned(span),
|
||||||
|
)
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_string() {
|
||||||
|
parse_tokens(
|
||||||
|
CoerceStringShape,
|
||||||
|
r#""hello""#,
|
||||||
|
vec![b::string("hello")],
|
||||||
|
|tokens| {
|
||||||
|
Expression::string(inner_string_span(tokens[0].span())).into_expr(tokens[0].span())
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_path() {
|
||||||
|
let _ = pretty_env_logger::try_init();
|
||||||
|
|
||||||
|
parse_expr(
|
||||||
|
AnyExpressionShape,
|
||||||
|
"$it.cpu",
|
||||||
|
vec![b::it_var(), b::dot(), b::bare("cpu")],
|
||||||
|
|tokens| {
|
||||||
|
let (outer_var, inner_var) = tokens[0].expect_var();
|
||||||
|
let bare = tokens[2].expect_bare();
|
||||||
|
Expression::path(
|
||||||
|
Expression::it_variable(inner_var).into_expr(outer_var),
|
||||||
|
vec![PathMember::string("cpu", bare)],
|
||||||
|
)
|
||||||
|
.into_expr(outer_var.until(bare))
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
parse_expr(
|
||||||
VariablePathShape,
|
VariablePathShape,
|
||||||
|
r#"$cpu.amount."max ghz""#,
|
||||||
vec![
|
vec![
|
||||||
b::var("cpu"),
|
b::var("cpu"),
|
||||||
b::dot(),
|
b::dot(),
|
||||||
@ -49,14 +106,14 @@ fn test_parse_path() {
|
|||||||
let amount = tokens[2].expect_bare();
|
let amount = tokens[2].expect_bare();
|
||||||
let (outer_max_ghz, _) = tokens[4].expect_string();
|
let (outer_max_ghz, _) = tokens[4].expect_string();
|
||||||
|
|
||||||
hir::Expression::path(
|
Expression::path(
|
||||||
hir::Expression::variable(inner_var, outer_var),
|
Expression::variable(inner_var).into_expr(outer_var),
|
||||||
vec![
|
vec![
|
||||||
PathMember::string("amount", amount),
|
PathMember::string("amount", amount),
|
||||||
PathMember::string("max ghz", outer_max_ghz),
|
PathMember::string("max ghz", outer_max_ghz),
|
||||||
],
|
],
|
||||||
outer_var.until(outer_max_ghz),
|
|
||||||
)
|
)
|
||||||
|
.into_expr(outer_var.until(outer_max_ghz))
|
||||||
},
|
},
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -64,7 +121,8 @@ fn test_parse_path() {
|
|||||||
#[test]
|
#[test]
|
||||||
fn test_parse_command() {
|
fn test_parse_command() {
|
||||||
parse_tokens(
|
parse_tokens(
|
||||||
ClassifiedCommandShape,
|
fallible(ClassifiedCommandShape),
|
||||||
|
"ls *.txt",
|
||||||
vec![b::bare("ls"), b::sp(), b::pattern("*.txt")],
|
vec![b::bare("ls"), b::sp(), b::pattern("*.txt")],
|
||||||
|tokens| {
|
|tokens| {
|
||||||
let bare = tokens[0].expect_bare();
|
let bare = tokens[0].expect_bare();
|
||||||
@ -81,8 +139,8 @@ fn test_parse_command() {
|
|||||||
anchor: None,
|
anchor: None,
|
||||||
},
|
},
|
||||||
hir::Call {
|
hir::Call {
|
||||||
head: Box::new(hir::RawExpression::Command(bare).into_expr(bare)),
|
head: Box::new(Expression::Command(bare).into_expr(bare)),
|
||||||
positional: Some(vec![hir::Expression::pattern("*.txt", pat)]),
|
positional: Some(vec![Expression::pattern("*.txt").into_expr(pat)]),
|
||||||
named: Some(NamedArguments { named: map }),
|
named: Some(NamedArguments { named: map }),
|
||||||
span: bare.until(pat),
|
span: bare.until(pat),
|
||||||
},
|
},
|
||||||
@ -91,7 +149,7 @@ fn test_parse_command() {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(new)]
|
#[derive(Debug, Clone, new)]
|
||||||
struct TestRegistry {
|
struct TestRegistry {
|
||||||
#[new(default)]
|
#[new(default)]
|
||||||
signatures: indexmap::IndexMap<String, Signature>,
|
signatures: indexmap::IndexMap<String, Signature>,
|
||||||
@ -104,11 +162,14 @@ impl TestRegistry {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl SignatureRegistry for TestRegistry {
|
impl SignatureRegistry for TestRegistry {
|
||||||
fn has(&self, name: &str) -> Result<bool, ShellError> {
|
fn has(&self, name: &str) -> bool {
|
||||||
Ok(self.signatures.contains_key(name))
|
self.signatures.contains_key(name)
|
||||||
}
|
}
|
||||||
fn get(&self, name: &str) -> Result<Option<Signature>, ShellError> {
|
fn get(&self, name: &str) -> Option<Signature> {
|
||||||
Ok(self.signatures.get(name).cloned())
|
self.signatures.get(name).cloned()
|
||||||
|
}
|
||||||
|
fn clone_box(&self) -> Box<dyn SignatureRegistry> {
|
||||||
|
Box::new(self.clone())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -128,30 +189,91 @@ fn with_empty_context(source: &Text, callback: impl FnOnce(ExpandContext)) {
|
|||||||
callback(ExpandContext::new(Box::new(registry), source, None))
|
callback(ExpandContext::new(Box::new(registry), source, None))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn parse_tokens<T: Eq + HasSpan + Clone + Debug + 'static>(
|
trait Expand {}
|
||||||
shape: impl ExpandSyntax<Output = T>,
|
|
||||||
|
fn parse_tokens<T: Eq + HasSpan + PrettyDebugWithSource + Clone + Debug + 'static>(
|
||||||
|
shape: impl ExpandSyntax<Output = Result<T, ParseError>>,
|
||||||
|
syntax: &str,
|
||||||
tokens: Vec<CurriedToken>,
|
tokens: Vec<CurriedToken>,
|
||||||
expected: impl FnOnce(&[TokenNode]) -> T,
|
expected: impl FnOnce(&[SpannedToken]) -> T,
|
||||||
) {
|
) {
|
||||||
|
// let parsed_tokens = parse(syntax);
|
||||||
let tokens = b::token_list(tokens);
|
let tokens = b::token_list(tokens);
|
||||||
let (tokens, source) = b::build(tokens);
|
let (tokens, source) = b::build(tokens);
|
||||||
let text = Text::from(source);
|
let text = Text::from(&source);
|
||||||
|
|
||||||
|
assert_eq!(syntax, source);
|
||||||
|
|
||||||
with_empty_context(&text, |context| {
|
with_empty_context(&text, |context| {
|
||||||
let tokens = tokens.expect_list();
|
let tokens = tokens.expect_list();
|
||||||
let mut iterator = TokensIterator::all(tokens.item, text.clone(), tokens.span);
|
let mut iterator = TokensIterator::new(&tokens.item, context, tokens.span);
|
||||||
|
|
||||||
let expr = expand_syntax(&shape, &mut iterator, &context);
|
let expr = iterator.expand_syntax(shape);
|
||||||
|
|
||||||
let expr = match expr {
|
let expr = match expr {
|
||||||
Ok(expr) => expr,
|
Ok(expr) => expr,
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
print_err(err.into(), &context.source().clone());
|
outln!("");
|
||||||
|
ptree::print_tree(&iterator.expand_tracer().print(text.clone())).unwrap();
|
||||||
|
outln!("");
|
||||||
|
|
||||||
|
print_err(err.into(), &iterator.context().source().clone());
|
||||||
panic!("Parse failed");
|
panic!("Parse failed");
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
assert_eq!(expr, expected(tokens.item));
|
let expected = expected(&tokens.item);
|
||||||
|
|
||||||
|
if expr != expected {
|
||||||
|
outln!("");
|
||||||
|
ptree::print_tree(&iterator.expand_tracer().print(text.clone())).unwrap();
|
||||||
|
outln!("");
|
||||||
|
|
||||||
|
assert_eq!(expr, expected);
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_expr(
|
||||||
|
shape: impl ExpandSyntax<Output = Result<SpannedExpression, ParseError>>,
|
||||||
|
syntax: &str,
|
||||||
|
tokens: Vec<CurriedToken>,
|
||||||
|
expected: impl FnOnce(&[SpannedToken]) -> SpannedExpression,
|
||||||
|
) {
|
||||||
|
// let parsed_tokens = parse(syntax);
|
||||||
|
let tokens = b::token_list(tokens);
|
||||||
|
let (tokens, source) = b::build(tokens);
|
||||||
|
let text = Text::from(&source);
|
||||||
|
|
||||||
|
assert_eq!(syntax, source);
|
||||||
|
|
||||||
|
with_empty_context(&text, |context| {
|
||||||
|
let tokens = tokens.expect_list();
|
||||||
|
let mut iterator = TokensIterator::new(&tokens.item, context, tokens.span);
|
||||||
|
|
||||||
|
let expr = iterator.expand_syntax(shape);
|
||||||
|
|
||||||
|
let expr = match expr {
|
||||||
|
Ok(expr) => expr,
|
||||||
|
Err(err) => {
|
||||||
|
outln!("");
|
||||||
|
ptree::print_tree(&iterator.expand_tracer().print(text.clone())).unwrap();
|
||||||
|
outln!("");
|
||||||
|
|
||||||
|
print_err(err.into(), &iterator.source());
|
||||||
|
panic!("Parse failed");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let expected = expected(&tokens.item);
|
||||||
|
|
||||||
|
if expr != expected {
|
||||||
|
outln!("");
|
||||||
|
ptree::print_tree(&iterator.expand_tracer().print(text.clone())).unwrap();
|
||||||
|
outln!("");
|
||||||
|
|
||||||
|
assert_eq!(expr, expected);
|
||||||
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use crate::{hir::Expression, CompareOperator};
|
use crate::{hir::SpannedExpression, CompareOperator};
|
||||||
|
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
@ -10,9 +10,9 @@ use serde::{Deserialize, Serialize};
|
|||||||
)]
|
)]
|
||||||
#[get = "pub"]
|
#[get = "pub"]
|
||||||
pub struct Binary {
|
pub struct Binary {
|
||||||
left: Expression,
|
left: SpannedExpression,
|
||||||
op: Spanned<CompareOperator>,
|
op: Spanned<CompareOperator>,
|
||||||
right: Expression,
|
right: SpannedExpression,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Binary {
|
impl PrettyDebugWithSource for Binary {
|
||||||
|
@ -1,17 +1,14 @@
|
|||||||
|
use crate::parse::token_tree::Token;
|
||||||
use crate::{
|
use crate::{
|
||||||
hir::syntax_shape::{
|
hir::syntax_shape::{ExpandSyntax, FlatShape, MaybeSpaceShape},
|
||||||
color_syntax, expand_atom, expand_expr, expand_syntax, AtomicToken, ColorSyntax,
|
|
||||||
ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, MaybeSpaceShape,
|
|
||||||
UnspannedAtomicToken,
|
|
||||||
},
|
|
||||||
hir::Expression,
|
|
||||||
TokensIterator,
|
TokensIterator,
|
||||||
};
|
};
|
||||||
|
use derive_new::new;
|
||||||
use nu_errors::ParseError;
|
use nu_errors::ParseError;
|
||||||
use nu_protocol::SpannedTypeName;
|
use nu_protocol::SpannedTypeName;
|
||||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebug, Span, Spanned, SpannedItem};
|
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebug, Span, Spanned, SpannedItem};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Eq, PartialEq, Clone, new)]
|
||||||
pub struct ExternalTokensSyntax {
|
pub struct ExternalTokensSyntax {
|
||||||
pub tokens: Spanned<Vec<Spanned<String>>>,
|
pub tokens: Spanned<Vec<Spanned<String>>>,
|
||||||
}
|
}
|
||||||
@ -40,57 +37,25 @@ impl ExpandSyntax for ExternalTokensShape {
|
|||||||
type Output = ExternalTokensSyntax;
|
type Output = ExternalTokensSyntax;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"external command"
|
"external tokens"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_syntax<'a, 'b>(
|
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> ExternalTokensSyntax {
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<Self::Output, ParseError> {
|
|
||||||
let mut out: Vec<Spanned<String>> = vec![];
|
let mut out: Vec<Spanned<String>> = vec![];
|
||||||
|
|
||||||
let start = token_nodes.span_at_cursor();
|
let start = token_nodes.span_at_cursor();
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match expand_syntax(&ExternalExpressionShape, token_nodes, context) {
|
match token_nodes.expand_syntax(ExternalExpressionShape) {
|
||||||
Err(_) | Ok(None) => break,
|
Err(_) => break,
|
||||||
Ok(Some(span)) => out.push(span.spanned_string(context.source())),
|
Ok(span) => out.push(span.spanned_string(&token_nodes.source())),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let end = token_nodes.span_at_cursor();
|
let end = token_nodes.span_at_cursor();
|
||||||
|
|
||||||
Ok(ExternalTokensSyntax {
|
ExternalTokensSyntax {
|
||||||
tokens: out.spanned(start.until(end)),
|
tokens: out.spanned(start.until(end)),
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ColorSyntax for ExternalTokensShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"ExternalTokensShape"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Self::Info {
|
|
||||||
loop {
|
|
||||||
// Allow a space
|
|
||||||
color_syntax(&MaybeSpaceShape, token_nodes, context);
|
|
||||||
|
|
||||||
// Process an external expression. External expressions are mostly words, with a
|
|
||||||
// few exceptions (like $variables and path expansion rules)
|
|
||||||
match color_syntax(&ExternalExpressionShape, token_nodes, context).1 {
|
|
||||||
ExternalExpressionResult::Eof => break,
|
|
||||||
ExternalExpressionResult::Processed => continue,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -99,208 +64,112 @@ impl ColorSyntax for ExternalTokensShape {
|
|||||||
pub struct ExternalExpressionShape;
|
pub struct ExternalExpressionShape;
|
||||||
|
|
||||||
impl ExpandSyntax for ExternalExpressionShape {
|
impl ExpandSyntax for ExternalExpressionShape {
|
||||||
type Output = Option<Span>;
|
type Output = Result<Span, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"external expression"
|
"external expression"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_syntax<'a, 'b>(
|
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
||||||
&self,
|
token_nodes.expand_infallible(MaybeSpaceShape);
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<Self::Output, ParseError> {
|
|
||||||
expand_syntax(&MaybeSpaceShape, token_nodes, context)?;
|
|
||||||
|
|
||||||
let first = expand_atom(
|
|
||||||
token_nodes,
|
|
||||||
"external command",
|
|
||||||
context,
|
|
||||||
ExpansionRule::new().allow_external_word(),
|
|
||||||
)?
|
|
||||||
.span;
|
|
||||||
|
|
||||||
|
let first = token_nodes.expand_syntax(ExternalStartToken)?;
|
||||||
let mut last = first;
|
let mut last = first;
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
let continuation = expand_expr(&ExternalContinuationShape, token_nodes, context);
|
let continuation = token_nodes.expand_syntax(ExternalStartToken);
|
||||||
|
|
||||||
if let Ok(continuation) = continuation {
|
if let Ok(continuation) = continuation {
|
||||||
last = continuation.span;
|
last = continuation;
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Some(first.until(last)))
|
Ok(first.until(last))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
struct ExternalHeadShape;
|
struct ExternalStartToken;
|
||||||
|
|
||||||
|
impl ExpandSyntax for ExternalStartToken {
|
||||||
|
type Output = Result<Span, ParseError>;
|
||||||
|
|
||||||
impl ExpandExpression for ExternalHeadShape {
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"external argument"
|
"external start token"
|
||||||
}
|
}
|
||||||
|
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
||||||
|
token_nodes.atomic_parse(|token_nodes| {
|
||||||
|
let mut span: Option<Span> = None;
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
loop {
|
||||||
&self,
|
let boundary = token_nodes.expand_infallible(PeekExternalBoundary);
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<Expression, ParseError> {
|
|
||||||
let atom = expand_atom(
|
|
||||||
token_nodes,
|
|
||||||
"external argument",
|
|
||||||
context,
|
|
||||||
ExpansionRule::new()
|
|
||||||
.allow_external_word()
|
|
||||||
.treat_size_as_word(),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let span = atom.span;
|
if boundary {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
Ok(match &atom.unspanned {
|
let peeked = token_nodes.peek().not_eof("external start token")?;
|
||||||
UnspannedAtomicToken::Eof { .. } => unreachable!("ExpansionRule doesn't allow EOF"),
|
let node = peeked.node;
|
||||||
UnspannedAtomicToken::Error { .. } => unreachable!("ExpansionRule doesn't allow Error"),
|
|
||||||
UnspannedAtomicToken::Size { .. } => unreachable!("ExpansionRule treats size as word"),
|
let new_span = match node.unspanned() {
|
||||||
UnspannedAtomicToken::Whitespace { .. } => {
|
Token::Comment(_)
|
||||||
unreachable!("ExpansionRule doesn't allow Whitespace")
|
| Token::Separator
|
||||||
|
| Token::Whitespace
|
||||||
|
| Token::Pipeline(_) => {
|
||||||
|
return Err(ParseError::mismatch(
|
||||||
|
"external start token",
|
||||||
|
node.spanned_type_name(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
_ => {
|
||||||
|
let node = peeked.commit();
|
||||||
|
node.span()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
span = match span {
|
||||||
|
None => Some(new_span),
|
||||||
|
Some(before) => Some(before.until(new_span)),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
UnspannedAtomicToken::Separator { .. } => {
|
|
||||||
unreachable!("ExpansionRule doesn't allow Separator")
|
match span {
|
||||||
}
|
None => Err(token_nodes.err_next_token("external start token")),
|
||||||
UnspannedAtomicToken::Comment { .. } => {
|
Some(span) => {
|
||||||
unreachable!("ExpansionRule doesn't allow Comment")
|
token_nodes.color_shape(FlatShape::ExternalWord.spanned(span));
|
||||||
}
|
Ok(span)
|
||||||
UnspannedAtomicToken::ShorthandFlag { .. }
|
}
|
||||||
| UnspannedAtomicToken::SquareDelimited { .. }
|
|
||||||
| UnspannedAtomicToken::RoundDelimited { .. } => {
|
|
||||||
return Err(ParseError::mismatch(
|
|
||||||
"external command name",
|
|
||||||
atom.spanned_type_name(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::ExternalCommand { command } => {
|
|
||||||
Expression::external_command(*command, span)
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::Number { number } => {
|
|
||||||
Expression::number(number.to_number(context.source()), span)
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::String { body } => Expression::string(*body, span),
|
|
||||||
UnspannedAtomicToken::ItVariable { name } => Expression::it_variable(*name, span),
|
|
||||||
UnspannedAtomicToken::Variable { name } => Expression::variable(*name, span),
|
|
||||||
UnspannedAtomicToken::ExternalWord { .. }
|
|
||||||
| UnspannedAtomicToken::GlobPattern { .. }
|
|
||||||
| UnspannedAtomicToken::Word { .. }
|
|
||||||
| UnspannedAtomicToken::Dot { .. }
|
|
||||||
| UnspannedAtomicToken::DotDot { .. }
|
|
||||||
| UnspannedAtomicToken::CompareOperator { .. } => {
|
|
||||||
Expression::external_command(span, span)
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
struct ExternalContinuationShape;
|
struct PeekExternalBoundary;
|
||||||
|
|
||||||
impl ExpandExpression for ExternalContinuationShape {
|
impl ExpandSyntax for PeekExternalBoundary {
|
||||||
fn name(&self) -> &'static str {
|
type Output = bool;
|
||||||
"external argument"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<Expression, ParseError> {
|
|
||||||
let atom = expand_atom(
|
|
||||||
token_nodes,
|
|
||||||
"external argument",
|
|
||||||
context,
|
|
||||||
ExpansionRule::new()
|
|
||||||
.allow_external_word()
|
|
||||||
.treat_size_as_word(),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let span = atom.span;
|
|
||||||
|
|
||||||
Ok(match &atom.unspanned {
|
|
||||||
UnspannedAtomicToken::Eof { .. } => unreachable!("ExpansionRule doesn't allow EOF"),
|
|
||||||
UnspannedAtomicToken::Error { .. } => unreachable!("ExpansionRule doesn't allow Error"),
|
|
||||||
UnspannedAtomicToken::Number { number } => {
|
|
||||||
Expression::number(number.to_number(context.source()), span)
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::Size { .. } => unreachable!("ExpansionRule treats size as word"),
|
|
||||||
UnspannedAtomicToken::ExternalCommand { .. } => {
|
|
||||||
unreachable!("ExpansionRule doesn't allow ExternalCommand")
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::Whitespace { .. } => {
|
|
||||||
unreachable!("ExpansionRule doesn't allow Whitespace")
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::Separator { .. } => {
|
|
||||||
unreachable!("ExpansionRule doesn't allow Separator")
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::Comment { .. } => {
|
|
||||||
unreachable!("ExpansionRule doesn't allow Comment")
|
|
||||||
}
|
|
||||||
UnspannedAtomicToken::String { body } => Expression::string(*body, span),
|
|
||||||
UnspannedAtomicToken::ItVariable { name } => Expression::it_variable(*name, span),
|
|
||||||
UnspannedAtomicToken::Variable { name } => Expression::variable(*name, span),
|
|
||||||
UnspannedAtomicToken::ExternalWord { .. }
|
|
||||||
| UnspannedAtomicToken::GlobPattern { .. }
|
|
||||||
| UnspannedAtomicToken::Word { .. }
|
|
||||||
| UnspannedAtomicToken::ShorthandFlag { .. }
|
|
||||||
| UnspannedAtomicToken::Dot { .. }
|
|
||||||
| UnspannedAtomicToken::DotDot { .. }
|
|
||||||
| UnspannedAtomicToken::CompareOperator { .. } => Expression::bare(span),
|
|
||||||
UnspannedAtomicToken::SquareDelimited { .. }
|
|
||||||
| UnspannedAtomicToken::RoundDelimited { .. } => {
|
|
||||||
return Err(ParseError::mismatch(
|
|
||||||
"external argument",
|
|
||||||
atom.spanned_type_name(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ColorSyntax for ExternalExpressionShape {
|
|
||||||
type Info = ExternalExpressionResult;
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"ExternalExpressionShape"
|
"external boundary"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Self::Output {
|
||||||
&self,
|
let next = token_nodes.peek();
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> ExternalExpressionResult {
|
|
||||||
let atom = match expand_atom(
|
|
||||||
token_nodes,
|
|
||||||
"external word",
|
|
||||||
context,
|
|
||||||
ExpansionRule::permissive(),
|
|
||||||
) {
|
|
||||||
Err(_) => unreachable!("TODO: separate infallible expand_atom"),
|
|
||||||
Ok(AtomicToken {
|
|
||||||
unspanned: UnspannedAtomicToken::Eof { .. },
|
|
||||||
..
|
|
||||||
}) => return ExternalExpressionResult::Eof,
|
|
||||||
Ok(atom) => atom,
|
|
||||||
};
|
|
||||||
|
|
||||||
token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes));
|
match next.node {
|
||||||
ExternalExpressionResult::Processed
|
None => true,
|
||||||
|
Some(node) => match node.unspanned() {
|
||||||
|
Token::Delimited(_) => true,
|
||||||
|
Token::Whitespace => true,
|
||||||
|
Token::Comment(_) => true,
|
||||||
|
Token::Separator => true,
|
||||||
|
Token::Call(_) => true,
|
||||||
|
_ => false,
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
|
||||||
pub enum ExternalExpressionResult {
|
|
||||||
Eof,
|
|
||||||
Processed,
|
|
||||||
}
|
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
use crate::hir::Expression;
|
use crate::hir::SpannedExpression;
|
||||||
use crate::Flag;
|
use crate::Flag;
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use log::trace;
|
use log::trace;
|
||||||
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Tag};
|
use nu_source::{b, DebugDocBuilder, PrettyDebugRefineKind, PrettyDebugWithSource, Tag};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
||||||
@ -10,7 +10,7 @@ pub enum NamedValue {
|
|||||||
AbsentSwitch,
|
AbsentSwitch,
|
||||||
PresentSwitch(Tag),
|
PresentSwitch(Tag),
|
||||||
AbsentValue,
|
AbsentValue,
|
||||||
Value(Expression),
|
Value(SpannedExpression),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for NamedValue {
|
impl PrettyDebugWithSource for NamedValue {
|
||||||
@ -22,6 +22,18 @@ impl PrettyDebugWithSource for NamedValue {
|
|||||||
NamedValue::Value(value) => value.pretty_debug(source),
|
NamedValue::Value(value) => value.pretty_debug(source),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder {
|
||||||
|
match refine {
|
||||||
|
PrettyDebugRefineKind::ContextFree => self.pretty_debug(source),
|
||||||
|
PrettyDebugRefineKind::WithContext => match self {
|
||||||
|
NamedValue::AbsentSwitch => b::value("absent"),
|
||||||
|
NamedValue::PresentSwitch(_) => b::value("present"),
|
||||||
|
NamedValue::AbsentValue => b::value("absent"),
|
||||||
|
NamedValue::Value(value) => value.refined_pretty_debug(refine, source),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
#[derive(Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
||||||
@ -60,28 +72,37 @@ impl NamedArguments {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn insert_optional(&mut self, name: impl Into<String>, expr: Option<Expression>) {
|
pub fn insert_optional(&mut self, name: impl Into<String>, expr: Option<SpannedExpression>) {
|
||||||
match expr {
|
match expr {
|
||||||
None => self.named.insert(name.into(), NamedValue::AbsentValue),
|
None => self.named.insert(name.into(), NamedValue::AbsentValue),
|
||||||
Some(expr) => self.named.insert(name.into(), NamedValue::Value(expr)),
|
Some(expr) => self.named.insert(name.into(), NamedValue::Value(expr)),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn insert_mandatory(&mut self, name: impl Into<String>, expr: Expression) {
|
pub fn insert_mandatory(&mut self, name: impl Into<String>, expr: SpannedExpression) {
|
||||||
self.named.insert(name.into(), NamedValue::Value(expr));
|
self.named.insert(name.into(), NamedValue::Value(expr));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for NamedArguments {
|
impl PrettyDebugWithSource for NamedArguments {
|
||||||
|
fn refined_pretty_debug(&self, refine: PrettyDebugRefineKind, source: &str) -> DebugDocBuilder {
|
||||||
|
match refine {
|
||||||
|
PrettyDebugRefineKind::ContextFree => self.pretty_debug(source),
|
||||||
|
PrettyDebugRefineKind::WithContext => b::intersperse(
|
||||||
|
self.named.iter().map(|(key, value)| {
|
||||||
|
b::key(key)
|
||||||
|
+ b::equals()
|
||||||
|
+ value.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source)
|
||||||
|
}),
|
||||||
|
b::space(),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
b::delimit(
|
b::delimit(
|
||||||
"(",
|
"(",
|
||||||
b::intersperse(
|
self.refined_pretty_debug(PrettyDebugRefineKind::WithContext, source),
|
||||||
self.named
|
|
||||||
.iter()
|
|
||||||
.map(|(key, value)| b::key(key) + b::equals() + value.pretty_debug(source)),
|
|
||||||
b::space(),
|
|
||||||
),
|
|
||||||
")",
|
")",
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use crate::hir::Expression;
|
use crate::hir::SpannedExpression;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use getset::{Getters, MutGetters};
|
use getset::{Getters, MutGetters};
|
||||||
use nu_protocol::PathMember;
|
use nu_protocol::PathMember;
|
||||||
@ -21,7 +21,7 @@ use serde::{Deserialize, Serialize};
|
|||||||
)]
|
)]
|
||||||
#[get = "pub"]
|
#[get = "pub"]
|
||||||
pub struct Path {
|
pub struct Path {
|
||||||
head: Expression,
|
head: SpannedExpression,
|
||||||
#[get_mut = "pub(crate)"]
|
#[get_mut = "pub(crate)"]
|
||||||
tail: Vec<PathMember>,
|
tail: Vec<PathMember>,
|
||||||
}
|
}
|
||||||
@ -35,7 +35,7 @@ impl PrettyDebugWithSource for Path {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Path {
|
impl Path {
|
||||||
pub(crate) fn parts(self) -> (Expression, Vec<PathMember>) {
|
pub(crate) fn parts(self) -> (SpannedExpression, Vec<PathMember>) {
|
||||||
(self.head, self.tail)
|
(self.head, self.tail)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use crate::hir::Expression;
|
use crate::hir::SpannedExpression;
|
||||||
|
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
@ -10,11 +10,11 @@ use serde::{Deserialize, Serialize};
|
|||||||
)]
|
)]
|
||||||
pub struct Range {
|
pub struct Range {
|
||||||
#[get = "pub"]
|
#[get = "pub"]
|
||||||
left: Expression,
|
left: SpannedExpression,
|
||||||
#[get = "pub"]
|
#[get = "pub"]
|
||||||
dotdot: Span,
|
dotdot: Span,
|
||||||
#[get = "pub"]
|
#[get = "pub"]
|
||||||
right: Expression,
|
right: SpannedExpression,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Range {
|
impl PrettyDebugWithSource for Range {
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,141 +1,82 @@
|
|||||||
|
use crate::hir::Expression;
|
||||||
use crate::{
|
use crate::{
|
||||||
hir,
|
hir,
|
||||||
hir::syntax_shape::{
|
hir::syntax_shape::{
|
||||||
color_fallible_syntax, color_syntax_with, continue_expression, expand_expr, expand_syntax,
|
ExpandSyntax, ExpressionContinuationShape, MemberShape, PathTailShape, PathTailSyntax,
|
||||||
DelimitedShape, ExpandContext, ExpandExpression, ExpressionContinuationShape,
|
|
||||||
ExpressionListShape, FallibleColorSyntax, MemberShape, PathTailShape, PathTailSyntax,
|
|
||||||
VariablePathShape,
|
VariablePathShape,
|
||||||
},
|
},
|
||||||
hir::tokens_iterator::TokensIterator,
|
hir::tokens_iterator::TokensIterator,
|
||||||
parse::token_tree::Delimiter,
|
|
||||||
};
|
};
|
||||||
use nu_errors::{ParseError, ShellError};
|
use hir::SpannedExpression;
|
||||||
|
use nu_errors::ParseError;
|
||||||
use nu_source::Span;
|
use nu_source::Span;
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct AnyBlockShape;
|
pub struct CoerceBlockShape;
|
||||||
|
|
||||||
impl FallibleColorSyntax for AnyBlockShape {
|
impl ExpandSyntax for CoerceBlockShape {
|
||||||
type Info = ();
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"AnyBlockShape"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let block = token_nodes.peek_non_ws().not_eof("block");
|
|
||||||
|
|
||||||
let block = match block {
|
|
||||||
Err(_) => return Ok(()),
|
|
||||||
Ok(block) => block,
|
|
||||||
};
|
|
||||||
|
|
||||||
// is it just a block?
|
|
||||||
let block = block.node.as_block();
|
|
||||||
|
|
||||||
if let Some((children, spans)) = block {
|
|
||||||
token_nodes.child(children, context.source.clone(), |token_nodes| {
|
|
||||||
color_syntax_with(
|
|
||||||
&DelimitedShape,
|
|
||||||
&(Delimiter::Brace, spans.0, spans.1),
|
|
||||||
token_nodes,
|
|
||||||
context,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, look for a shorthand block. If none found, fail
|
|
||||||
color_fallible_syntax(&ShorthandBlock, token_nodes, context)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExpandExpression for AnyBlockShape {
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"any block"
|
"any block"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
|
||||||
let block = token_nodes.peek_non_ws().not_eof("block")?;
|
|
||||||
|
|
||||||
// is it just a block?
|
// is it just a block?
|
||||||
let block = block.node.as_block();
|
token_nodes
|
||||||
|
.expand_syntax(BlockShape)
|
||||||
if let Some((block, _tags)) = block {
|
.or_else(|_| token_nodes.expand_syntax(ShorthandBlockShape))
|
||||||
let mut iterator =
|
|
||||||
TokensIterator::new(&block.item, block.span, context.source.clone(), false);
|
|
||||||
|
|
||||||
let exprs = expand_syntax(&ExpressionListShape, &mut iterator, context)?.exprs;
|
|
||||||
|
|
||||||
return Ok(hir::RawExpression::Block(exprs.item).into_expr(block.span));
|
|
||||||
}
|
|
||||||
|
|
||||||
expand_syntax(&ShorthandBlock, token_nodes, context)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct ShorthandBlock;
|
pub struct BlockShape;
|
||||||
|
|
||||||
impl FallibleColorSyntax for ShorthandBlock {
|
impl ExpandSyntax for BlockShape {
|
||||||
type Info = ();
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"ShorthandBlock"
|
"block"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<(), ShellError> {
|
let exprs = token_nodes.block()?;
|
||||||
// Try to find a shorthand head. If none found, fail
|
|
||||||
color_fallible_syntax(&ShorthandPath, token_nodes, context)?;
|
|
||||||
|
|
||||||
loop {
|
Ok(hir::Expression::Block(exprs.item).into_expr(exprs.span))
|
||||||
// Check to see whether there's any continuation after the head expression
|
|
||||||
let result = color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context);
|
|
||||||
|
|
||||||
match result {
|
|
||||||
// if no continuation was found, we're done
|
|
||||||
Err(_) => break,
|
|
||||||
// if a continuation was found, look for another one
|
|
||||||
Ok(_) => continue,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExpandExpression for ShorthandBlock {
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct ShorthandBlockShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for ShorthandBlockShape {
|
||||||
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"shorthand block"
|
"shorthand block"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
let mut current = token_nodes.expand_syntax(ShorthandPath)?;
|
||||||
let path = expand_expr(&ShorthandPath, token_nodes, context)?;
|
|
||||||
let start = path.span;
|
loop {
|
||||||
let expr = continue_expression(path, token_nodes, context);
|
match token_nodes.expand_syntax(ExpressionContinuationShape) {
|
||||||
let end = expr.span;
|
Result::Err(_) => break,
|
||||||
let block = hir::RawExpression::Block(vec![expr]).into_expr(start.until(end));
|
Result::Ok(continuation) => current = continuation.append_to(current),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let span = current.span;
|
||||||
|
|
||||||
|
let block = hir::Expression::Block(vec![current]).into_expr(span);
|
||||||
|
|
||||||
Ok(block)
|
Ok(block)
|
||||||
}
|
}
|
||||||
@ -145,74 +86,40 @@ impl ExpandExpression for ShorthandBlock {
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct ShorthandPath;
|
pub struct ShorthandPath;
|
||||||
|
|
||||||
impl FallibleColorSyntax for ShorthandPath {
|
impl ExpandSyntax for ShorthandPath {
|
||||||
type Info = ();
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"ShorthandPath"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
token_nodes.atomic(|token_nodes| {
|
|
||||||
let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context);
|
|
||||||
|
|
||||||
if variable.is_ok() {
|
|
||||||
// if it's a variable path, that's the head part
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
// otherwise, we'll try to find a member path
|
|
||||||
|
|
||||||
// look for a member (`<member>` -> `$it.<member>`)
|
|
||||||
color_fallible_syntax(&MemberShape, token_nodes, context)?;
|
|
||||||
|
|
||||||
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
|
|
||||||
// like any other path.
|
|
||||||
// It's ok if there's no path tail; a single member is sufficient
|
|
||||||
let _ = color_fallible_syntax(&PathTailShape, token_nodes, context);
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExpandExpression for ShorthandPath {
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"shorthand path"
|
"shorthand path"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
|
||||||
// if it's a variable path, that's the head part
|
// if it's a variable path, that's the head part
|
||||||
let path = expand_expr(&VariablePathShape, token_nodes, context);
|
let path = token_nodes.expand_syntax(VariablePathShape);
|
||||||
|
|
||||||
if let Ok(path) = path {
|
if let Ok(path) = path {
|
||||||
return Ok(path);
|
return Ok(path);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Synthesize the head of the shorthand path (`<member>` -> `$it.<member>`)
|
// Synthesize the head of the shorthand path (`<member>` -> `$it.<member>`)
|
||||||
let mut head = expand_expr(&ShorthandHeadShape, token_nodes, context)?;
|
let mut head = token_nodes.expand_syntax(ShorthandHeadShape)?;
|
||||||
|
|
||||||
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
|
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
|
||||||
// like any other path.
|
// like any other path.
|
||||||
let tail = expand_syntax(&PathTailShape, token_nodes, context);
|
let tail = token_nodes.expand_syntax(PathTailShape);
|
||||||
|
|
||||||
match tail {
|
match tail {
|
||||||
Err(_) => Ok(head),
|
Err(_) => Ok(head),
|
||||||
Ok(PathTailSyntax { tail, .. }) => {
|
Ok(PathTailSyntax { tail, span }) => {
|
||||||
|
let span = head.span.until(span);
|
||||||
|
|
||||||
// For each member that `PathTailShape` expanded, join it onto the existing expression
|
// For each member that `PathTailShape` expanded, join it onto the existing expression
|
||||||
// to form a new path
|
// to form a new path
|
||||||
for member in tail {
|
for member in tail {
|
||||||
head = hir::Expression::dot_member(head, member);
|
head = Expression::dot_member(head, member).into_expr(span);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(head)
|
Ok(head)
|
||||||
@ -225,27 +132,28 @@ impl ExpandExpression for ShorthandPath {
|
|||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct ShorthandHeadShape;
|
pub struct ShorthandHeadShape;
|
||||||
|
|
||||||
impl ExpandExpression for ShorthandHeadShape {
|
impl ExpandSyntax for ShorthandHeadShape {
|
||||||
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"shorthand head"
|
"shorthand head"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
let head = token_nodes.expand_syntax(MemberShape)?;
|
||||||
let head = expand_syntax(&MemberShape, token_nodes, context)?;
|
let head = head.to_path_member(&token_nodes.source());
|
||||||
let head = head.to_path_member(context.source);
|
|
||||||
|
|
||||||
// Synthesize an `$it` expression
|
// Synthesize an `$it` expression
|
||||||
let it = synthetic_it();
|
let it = synthetic_it();
|
||||||
let span = head.span;
|
let span = head.span;
|
||||||
|
|
||||||
Ok(hir::Expression::path(it, vec![head], span))
|
Ok(Expression::path(it, vec![head]).into_expr(span))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn synthetic_it() -> hir::Expression {
|
fn synthetic_it() -> hir::SpannedExpression {
|
||||||
hir::Expression::it_variable(Span::unknown(), Span::unknown())
|
Expression::it_variable(Span::unknown()).into_expr(Span::unknown())
|
||||||
}
|
}
|
||||||
|
72
crates/nu-parser/src/hir/syntax_shape/design.md
Normal file
72
crates/nu-parser/src/hir/syntax_shape/design.md
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
# Meaningful Primitive Tokens
|
||||||
|
|
||||||
|
- `int`
|
||||||
|
- `decimal`
|
||||||
|
- `op::name`
|
||||||
|
- `dot`
|
||||||
|
- `dotdot`
|
||||||
|
- `string`
|
||||||
|
- `var::it`
|
||||||
|
- `var::other`
|
||||||
|
- `external-command`
|
||||||
|
- `pattern::glob`
|
||||||
|
- `word`
|
||||||
|
- `comment`
|
||||||
|
- `whitespace`
|
||||||
|
- `separator`
|
||||||
|
- `longhand-flag`
|
||||||
|
- `shorthand-flag`
|
||||||
|
|
||||||
|
# Grouped Tokens
|
||||||
|
|
||||||
|
- `(call head ...tail)`
|
||||||
|
- `(list ...nodes)`
|
||||||
|
- `(paren ...nodes)`
|
||||||
|
- `(square ...nodes)`
|
||||||
|
- `(curly ...nodes)`
|
||||||
|
- `(pipeline ...elements) where elements: pipeline-element`
|
||||||
|
- `(pipeline-element pipe? token)`
|
||||||
|
|
||||||
|
# Atomic Tokens
|
||||||
|
|
||||||
|
- `(unit number unit) where number: number, unit: unit`
|
||||||
|
|
||||||
|
# Expression
|
||||||
|
|
||||||
|
```
|
||||||
|
start(ExpressionStart) continuation(ExpressionContinuation)* ->
|
||||||
|
```
|
||||||
|
|
||||||
|
## ExpressionStart
|
||||||
|
|
||||||
|
```
|
||||||
|
word -> String
|
||||||
|
unit -> Unit
|
||||||
|
number -> Number
|
||||||
|
string -> String
|
||||||
|
var::it -> Var::It
|
||||||
|
var::other -> Var::Other
|
||||||
|
pattern::glob -> Pattern::Glob
|
||||||
|
square -> Array
|
||||||
|
```
|
||||||
|
|
||||||
|
## TightExpressionContinuation
|
||||||
|
|
||||||
|
```
|
||||||
|
dot AnyExpression -> Member
|
||||||
|
dodot AnyExpression -> RangeContinuation
|
||||||
|
```
|
||||||
|
|
||||||
|
## InfixExpressionContinuation
|
||||||
|
|
||||||
|
```
|
||||||
|
whitespace op whitespace AnyExpression -> InfixContinuation
|
||||||
|
```
|
||||||
|
|
||||||
|
## Member
|
||||||
|
|
||||||
|
```
|
||||||
|
int -> Member::Int
|
||||||
|
word -> Member::Word
|
||||||
|
string -> Member::String
|
||||||
|
```
|
@ -1,4 +1,3 @@
|
|||||||
pub(crate) mod atom;
|
|
||||||
pub(crate) mod delimited;
|
pub(crate) mod delimited;
|
||||||
pub(crate) mod file_path;
|
pub(crate) mod file_path;
|
||||||
pub(crate) mod list;
|
pub(crate) mod list;
|
||||||
@ -10,311 +9,64 @@ pub(crate) mod unit;
|
|||||||
pub(crate) mod variable_path;
|
pub(crate) mod variable_path;
|
||||||
|
|
||||||
use crate::hir::syntax_shape::{
|
use crate::hir::syntax_shape::{
|
||||||
color_delimited_square, color_fallible_syntax, color_fallible_syntax_with, expand_atom,
|
BareExpressionShape, DelimitedSquareShape, ExpandContext, ExpandSyntax,
|
||||||
expand_delimited_square, expand_expr, expand_syntax, BareShape, ColorableDotShape, DotShape,
|
ExpressionContinuationShape, NumberExpressionShape, PatternExpressionShape,
|
||||||
ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, ExpressionContinuation,
|
StringExpressionShape, UnitExpressionShape, VariableShape,
|
||||||
ExpressionContinuationShape, FallibleColorSyntax, FlatShape, UnspannedAtomicToken,
|
|
||||||
};
|
};
|
||||||
use crate::{
|
use crate::hir::{SpannedExpression, TokensIterator};
|
||||||
hir,
|
use nu_errors::ParseError;
|
||||||
hir::{Expression, TokensIterator},
|
|
||||||
};
|
|
||||||
use nu_errors::{ParseError, ShellError};
|
|
||||||
use nu_source::{HasSpan, Span, Spanned, SpannedItem, Tag};
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct AnyExpressionShape;
|
pub struct AnyExpressionShape;
|
||||||
|
|
||||||
impl ExpandExpression for AnyExpressionShape {
|
impl ExpandSyntax for AnyExpressionShape {
|
||||||
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"any expression"
|
"any expression"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
token_nodes.atomic_parse(|token_nodes| {
|
||||||
// Look for an expression at the cursor
|
// Look for an atomic expression at the cursor
|
||||||
let head = expand_expr(&AnyExpressionStartShape, token_nodes, context)?;
|
let mut current = token_nodes.expand_syntax(AnyExpressionStartShape)?;
|
||||||
|
|
||||||
Ok(continue_expression(head, token_nodes, context))
|
loop {
|
||||||
}
|
match token_nodes.expand_syntax(ExpressionContinuationShape) {
|
||||||
}
|
Err(_) => return Ok(current),
|
||||||
|
Ok(continuation) => current = continuation.append_to(current),
|
||||||
impl FallibleColorSyntax for AnyExpressionShape {
|
}
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"AnyExpressionShape"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
// Look for an expression at the cursor
|
|
||||||
color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context)?;
|
|
||||||
|
|
||||||
match continue_coloring_expression(token_nodes, context) {
|
|
||||||
Err(_) => {
|
|
||||||
// it's fine for there to be no continuation
|
|
||||||
}
|
}
|
||||||
|
})
|
||||||
Ok(()) => {}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn continue_expression(
|
|
||||||
mut head: hir::Expression,
|
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> hir::Expression {
|
|
||||||
loop {
|
|
||||||
// Check to see whether there's any continuation after the head expression
|
|
||||||
let continuation = expand_syntax(&ExpressionContinuationShape, token_nodes, context);
|
|
||||||
|
|
||||||
match continuation {
|
|
||||||
// If there's no continuation, return the head
|
|
||||||
Err(_) => return head,
|
|
||||||
// Otherwise, form a new expression by combining the head with the continuation
|
|
||||||
Ok(continuation) => match continuation {
|
|
||||||
// If the continuation is a `.member`, form a path with the new member
|
|
||||||
ExpressionContinuation::DotSuffix(_dot, member) => {
|
|
||||||
head = Expression::dot_member(head, member);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise, if the continuation is an infix suffix, form an infix expression
|
|
||||||
ExpressionContinuation::InfixSuffix(op, expr) => {
|
|
||||||
head = Expression::infix(head, op, expr);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn continue_coloring_expression(
|
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
// if there's not even one expression continuation, fail
|
|
||||||
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context)?;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
// Check to see whether there's any continuation after the head expression
|
|
||||||
let result = color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context);
|
|
||||||
|
|
||||||
if result.is_err() {
|
|
||||||
// We already saw one continuation, so just return
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct AnyExpressionStartShape;
|
pub struct AnyExpressionStartShape;
|
||||||
|
|
||||||
impl ExpandExpression for AnyExpressionStartShape {
|
impl ExpandSyntax for AnyExpressionStartShape {
|
||||||
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"any expression start"
|
"any expression start"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
token_nodes
|
||||||
let atom = expand_atom(token_nodes, "expression", context, ExpansionRule::new())?;
|
.expand_syntax(VariableShape)
|
||||||
|
.or_else(|_| token_nodes.expand_syntax(UnitExpressionShape))
|
||||||
match atom.unspanned {
|
.or_else(|_| token_nodes.expand_syntax(BareExpressionShape))
|
||||||
UnspannedAtomicToken::Size { number, unit } => Ok(hir::Expression::size(
|
.or_else(|_| token_nodes.expand_syntax(PatternExpressionShape))
|
||||||
number.to_number(context.source),
|
.or_else(|_| token_nodes.expand_syntax(NumberExpressionShape))
|
||||||
unit.item,
|
.or_else(|_| token_nodes.expand_syntax(StringExpressionShape))
|
||||||
Tag {
|
.or_else(|_| token_nodes.expand_syntax(DelimitedSquareShape))
|
||||||
span: atom.span,
|
|
||||||
anchor: None,
|
|
||||||
},
|
|
||||||
)),
|
|
||||||
|
|
||||||
UnspannedAtomicToken::SquareDelimited { nodes, .. } => {
|
|
||||||
expand_delimited_square(&nodes, atom.span, context)
|
|
||||||
}
|
|
||||||
|
|
||||||
UnspannedAtomicToken::Word { .. } => {
|
|
||||||
let end = expand_syntax(&BareTailShape, token_nodes, context)?;
|
|
||||||
Ok(hir::Expression::bare(atom.span.until_option(end)))
|
|
||||||
}
|
|
||||||
|
|
||||||
other => other
|
|
||||||
.into_atomic_token(atom.span)
|
|
||||||
.to_hir(context, "expression"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FallibleColorSyntax for AnyExpressionStartShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"AnyExpressionStartShape"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let atom = token_nodes.spanned(|token_nodes| {
|
|
||||||
expand_atom(
|
|
||||||
token_nodes,
|
|
||||||
"expression",
|
|
||||||
context,
|
|
||||||
ExpansionRule::permissive(),
|
|
||||||
)
|
|
||||||
});
|
|
||||||
|
|
||||||
let atom = match atom {
|
|
||||||
Spanned {
|
|
||||||
item: Err(_err),
|
|
||||||
span,
|
|
||||||
} => {
|
|
||||||
token_nodes.color_shape(FlatShape::Error.spanned(span));
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
Spanned {
|
|
||||||
item: Ok(value), ..
|
|
||||||
} => value,
|
|
||||||
};
|
|
||||||
|
|
||||||
match atom.unspanned {
|
|
||||||
UnspannedAtomicToken::Size { number, unit } => token_nodes.color_shape(
|
|
||||||
FlatShape::Size {
|
|
||||||
number: number.span(),
|
|
||||||
unit: unit.span,
|
|
||||||
}
|
|
||||||
.spanned(atom.span),
|
|
||||||
),
|
|
||||||
|
|
||||||
UnspannedAtomicToken::SquareDelimited { nodes, spans } => {
|
|
||||||
token_nodes.child(
|
|
||||||
(&nodes[..]).spanned(atom.span),
|
|
||||||
context.source.clone(),
|
|
||||||
|tokens| {
|
|
||||||
color_delimited_square(spans, tokens, atom.span, context);
|
|
||||||
},
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
UnspannedAtomicToken::Word { .. } | UnspannedAtomicToken::Dot { .. } => {
|
|
||||||
token_nodes.color_shape(FlatShape::Word.spanned(atom.span));
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
|
||||||
pub struct BareTailShape;
|
|
||||||
|
|
||||||
impl FallibleColorSyntax for BareTailShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"BareTailShape"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let len = token_nodes.state().shapes().len();
|
|
||||||
|
|
||||||
loop {
|
|
||||||
let word =
|
|
||||||
color_fallible_syntax_with(&BareShape, &FlatShape::Word, token_nodes, context);
|
|
||||||
|
|
||||||
if word.is_ok() {
|
|
||||||
// if a word was found, continue
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// if a word wasn't found, try to find a dot
|
|
||||||
|
|
||||||
// try to find a dot
|
|
||||||
let dot = color_fallible_syntax_with(
|
|
||||||
&ColorableDotShape,
|
|
||||||
&FlatShape::Word,
|
|
||||||
token_nodes,
|
|
||||||
context,
|
|
||||||
);
|
|
||||||
|
|
||||||
match dot {
|
|
||||||
// if a dot was found, try to find another word
|
|
||||||
Ok(_) => continue,
|
|
||||||
// otherwise, we're done
|
|
||||||
Err(_) => break,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if token_nodes.state().shapes().len() > len {
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(ShellError::syntax_error(
|
|
||||||
"No tokens matched BareTailShape".spanned_unknown(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExpandSyntax for BareTailShape {
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"word continuation"
|
|
||||||
}
|
|
||||||
|
|
||||||
type Output = Option<Span>;
|
|
||||||
|
|
||||||
fn expand_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<Option<Span>, ParseError> {
|
|
||||||
let mut end: Option<Span> = None;
|
|
||||||
|
|
||||||
loop {
|
|
||||||
match expand_syntax(&BareShape, token_nodes, context) {
|
|
||||||
Ok(bare) => {
|
|
||||||
end = Some(bare.span);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(_) => match expand_syntax(&DotShape, token_nodes, context) {
|
|
||||||
Ok(dot) => {
|
|
||||||
end = Some(dot);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
Err(_) => break,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(end)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,55 +1,24 @@
|
|||||||
use crate::hir::syntax_shape::{
|
use crate::hir::syntax_shape::ExpandSyntax;
|
||||||
color_syntax, expand_syntax, ColorSyntax, ExpandContext, ExpressionListShape, TokenNode,
|
use crate::hir::SpannedExpression;
|
||||||
};
|
use crate::{hir, hir::TokensIterator};
|
||||||
use crate::{hir, hir::TokensIterator, Delimiter, FlatShape};
|
|
||||||
use nu_errors::ParseError;
|
use nu_errors::ParseError;
|
||||||
use nu_source::{Span, SpannedItem, Tag};
|
|
||||||
|
|
||||||
pub fn expand_delimited_square(
|
|
||||||
children: &[TokenNode],
|
|
||||||
span: Span,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<hir::Expression, ParseError> {
|
|
||||||
let mut tokens = TokensIterator::new(&children, span, context.source.clone(), false);
|
|
||||||
|
|
||||||
let list = expand_syntax(&ExpressionListShape, &mut tokens, context);
|
|
||||||
|
|
||||||
Ok(hir::Expression::list(
|
|
||||||
list?.exprs.item,
|
|
||||||
Tag { span, anchor: None },
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn color_delimited_square(
|
|
||||||
(open, close): (Span, Span),
|
|
||||||
token_nodes: &mut TokensIterator,
|
|
||||||
_span: Span,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) {
|
|
||||||
token_nodes.color_shape(FlatShape::OpenDelimiter(Delimiter::Square).spanned(open));
|
|
||||||
let _list = color_syntax(&ExpressionListShape, token_nodes, context);
|
|
||||||
token_nodes.color_shape(FlatShape::CloseDelimiter(Delimiter::Square).spanned(close));
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct DelimitedShape;
|
pub struct DelimitedSquareShape;
|
||||||
|
|
||||||
impl ColorSyntax for DelimitedShape {
|
impl ExpandSyntax for DelimitedSquareShape {
|
||||||
type Info = ();
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
type Input = (Delimiter, Span, Span);
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"DelimitedShape"
|
"delimited square"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
(delimiter, open, close): &(Delimiter, Span, Span),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Self::Info {
|
let exprs = token_nodes.square()?;
|
||||||
token_nodes.color_shape(FlatShape::OpenDelimiter(*delimiter).spanned(*open));
|
|
||||||
color_syntax(&ExpressionListShape, token_nodes, context);
|
Ok(hir::Expression::list(exprs.item).into_expr(exprs.span))
|
||||||
token_nodes.color_shape(FlatShape::CloseDelimiter(*delimiter).spanned(*close));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,88 +1,62 @@
|
|||||||
use crate::hir::syntax_shape::expression::atom::{
|
|
||||||
expand_atom, ExpansionRule, UnspannedAtomicToken,
|
|
||||||
};
|
|
||||||
use crate::hir::syntax_shape::{
|
use crate::hir::syntax_shape::{
|
||||||
expression::expand_file_path, ExpandContext, ExpandExpression, FallibleColorSyntax, FlatShape,
|
expression::expand_file_path, BarePathShape, DecimalShape, ExpandContext, ExpandSyntax,
|
||||||
|
FlatShape, IntShape, StringShape,
|
||||||
};
|
};
|
||||||
use crate::{hir, hir::TokensIterator};
|
use crate::hir::{Expression, SpannedExpression, TokensIterator};
|
||||||
use nu_errors::{ParseError, ShellError};
|
use crate::parse::token_tree::ExternalWordType;
|
||||||
use nu_source::SpannedItem;
|
use nu_errors::ParseError;
|
||||||
|
use nu_source::{HasSpan, Span};
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct FilePathShape;
|
pub struct FilePathShape;
|
||||||
|
|
||||||
impl FallibleColorSyntax for FilePathShape {
|
impl ExpandSyntax for FilePathShape {
|
||||||
type Info = ();
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"FilePathShape"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let atom = expand_atom(
|
|
||||||
token_nodes,
|
|
||||||
"file path",
|
|
||||||
context,
|
|
||||||
ExpansionRule::permissive(),
|
|
||||||
);
|
|
||||||
|
|
||||||
let atom = match atom {
|
|
||||||
Err(_) => return Ok(()),
|
|
||||||
Ok(atom) => atom,
|
|
||||||
};
|
|
||||||
|
|
||||||
match atom.unspanned {
|
|
||||||
UnspannedAtomicToken::Word { .. }
|
|
||||||
| UnspannedAtomicToken::String { .. }
|
|
||||||
| UnspannedAtomicToken::Number { .. }
|
|
||||||
| UnspannedAtomicToken::Size { .. } => {
|
|
||||||
token_nodes.color_shape(FlatShape::Path.spanned(atom.span));
|
|
||||||
}
|
|
||||||
|
|
||||||
_ => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExpandExpression for FilePathShape {
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"file path"
|
"file path"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
token_nodes
|
||||||
let atom = expand_atom(
|
.expand_syntax(BarePathShape)
|
||||||
token_nodes,
|
.or_else(|_| token_nodes.expand_syntax(ExternalWordShape))
|
||||||
"file path",
|
.map(|span| file_path(span, token_nodes.context()).into_expr(span))
|
||||||
context,
|
.or_else(|_| {
|
||||||
ExpansionRule::new().allow_external_word(),
|
token_nodes.expand_syntax(StringShape).map(|syntax| {
|
||||||
)?;
|
file_path(syntax.inner, token_nodes.context()).into_expr(syntax.span)
|
||||||
|
})
|
||||||
match atom.unspanned {
|
})
|
||||||
UnspannedAtomicToken::Word { text: body }
|
.or_else(|_| {
|
||||||
| UnspannedAtomicToken::ExternalWord { text: body }
|
token_nodes
|
||||||
| UnspannedAtomicToken::String { body } => {
|
.expand_syntax(IntShape)
|
||||||
let path = expand_file_path(body.slice(context.source), context);
|
.or_else(|_| token_nodes.expand_syntax(DecimalShape))
|
||||||
Ok(hir::Expression::file_path(path, atom.span))
|
.map(|number| {
|
||||||
}
|
file_path(number.span(), token_nodes.context()).into_expr(number.span())
|
||||||
|
})
|
||||||
UnspannedAtomicToken::Number { .. } | UnspannedAtomicToken::Size { .. } => {
|
})
|
||||||
let path = atom.span.slice(context.source);
|
.map_err(|_| token_nodes.err_next_token("file path"))
|
||||||
Ok(hir::Expression::file_path(path, atom.span))
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => atom.to_hir(context, "file path"),
|
fn file_path(text: Span, context: &ExpandContext) -> Expression {
|
||||||
}
|
Expression::FilePath(expand_file_path(text.slice(context.source), context))
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct ExternalWordShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for ExternalWordShape {
|
||||||
|
type Output = Result<Span, ParseError>;
|
||||||
|
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
"external word"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
||||||
|
token_nodes.expand_token(ExternalWordType, |span| Ok((FlatShape::ExternalWord, span)))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,18 +1,15 @@
|
|||||||
|
use crate::hir::syntax_shape::flat_shape::FlatShape;
|
||||||
use crate::{
|
use crate::{
|
||||||
hir,
|
hir,
|
||||||
hir::syntax_shape::{
|
hir::syntax_shape::{AnyExpressionShape, ExpandSyntax, MaybeSpaceShape},
|
||||||
color_fallible_syntax, color_syntax, expand_atom, expand_expr, maybe_spaced, spaced,
|
|
||||||
AnyExpressionShape, ColorSyntax, ExpandContext, ExpandSyntax, ExpansionRule,
|
|
||||||
MaybeSpaceShape, SpaceShape,
|
|
||||||
},
|
|
||||||
hir::TokensIterator,
|
hir::TokensIterator,
|
||||||
};
|
};
|
||||||
use nu_errors::ParseError;
|
use derive_new::new;
|
||||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct ExpressionListSyntax {
|
pub struct ExpressionListSyntax {
|
||||||
pub exprs: Spanned<Vec<hir::Expression>>,
|
pub exprs: Spanned<Vec<hir::SpannedExpression>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HasSpan for ExpressionListSyntax {
|
impl HasSpan for ExpressionListSyntax {
|
||||||
@ -40,99 +37,60 @@ impl ExpandSyntax for ExpressionListShape {
|
|||||||
"expression list"
|
"expression list"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_syntax<'a, 'b>(
|
fn expand<'a, 'b>(&self, token_nodes: &mut TokensIterator<'_>) -> ExpressionListSyntax {
|
||||||
&self,
|
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<ExpressionListSyntax, ParseError> {
|
|
||||||
let mut exprs = vec![];
|
|
||||||
|
|
||||||
let start = token_nodes.span_at_cursor();
|
|
||||||
|
|
||||||
if token_nodes.at_end_possible_ws() {
|
|
||||||
return Ok(ExpressionListSyntax {
|
|
||||||
exprs: exprs.spanned(start),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let expr = expand_expr(&maybe_spaced(AnyExpressionShape), token_nodes, context)?;
|
|
||||||
|
|
||||||
exprs.push(expr);
|
|
||||||
|
|
||||||
loop {
|
|
||||||
if token_nodes.at_end_possible_ws() {
|
|
||||||
let end = token_nodes.span_at_cursor();
|
|
||||||
return Ok(ExpressionListSyntax {
|
|
||||||
exprs: exprs.spanned(start.until(end)),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
let expr = expand_expr(&spaced(AnyExpressionShape), token_nodes, context)?;
|
|
||||||
|
|
||||||
exprs.push(expr);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ColorSyntax for ExpressionListShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"ExpressionListShape"
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The intent of this method is to fully color an expression list shape infallibly.
|
|
||||||
/// This means that if we can't expand a token into an expression, we fall back to
|
|
||||||
/// a simpler coloring strategy.
|
|
||||||
///
|
|
||||||
/// This would apply to something like `where x >`, which includes an incomplete
|
|
||||||
/// binary operator. Since we will fail to process it as a binary operator, we'll
|
|
||||||
/// fall back to a simpler coloring and move on.
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) {
|
|
||||||
// We encountered a parsing error and will continue with simpler coloring ("backoff
|
// We encountered a parsing error and will continue with simpler coloring ("backoff
|
||||||
// coloring mode")
|
// coloring mode")
|
||||||
let mut backoff = false;
|
let mut backoff = false;
|
||||||
|
|
||||||
// Consume any leading whitespace
|
let mut exprs = vec![];
|
||||||
color_syntax(&MaybeSpaceShape, token_nodes, context);
|
|
||||||
|
let start = token_nodes.span_at_cursor();
|
||||||
|
|
||||||
|
token_nodes.expand_infallible(MaybeSpaceShape);
|
||||||
|
|
||||||
|
if token_nodes.at_end() {
|
||||||
|
return ExpressionListSyntax {
|
||||||
|
exprs: exprs.spanned(start),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let expr = token_nodes.expand_syntax(AnyExpressionShape);
|
||||||
|
|
||||||
|
match expr {
|
||||||
|
Ok(expr) => exprs.push(expr),
|
||||||
|
Err(_) => backoff = true,
|
||||||
|
}
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
// If we reached the very end of the token stream, we're done
|
|
||||||
if token_nodes.at_end() {
|
if token_nodes.at_end() {
|
||||||
return;
|
let end = token_nodes.span_at_cursor();
|
||||||
|
return ExpressionListSyntax {
|
||||||
|
exprs: exprs.spanned(start.until(end)),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
if backoff {
|
if backoff {
|
||||||
let len = token_nodes.state().shapes().len();
|
let len = token_nodes.state().shapes().len();
|
||||||
|
|
||||||
// If we previously encountered a parsing error, use backoff coloring mode
|
// If we previously encountered a parsing error, use backoff coloring mode
|
||||||
color_syntax(&SimplestExpression, token_nodes, context);
|
token_nodes
|
||||||
|
.expand_infallible(SimplestExpression::new(vec!["expression".to_string()]));
|
||||||
|
|
||||||
if len == token_nodes.state().shapes().len() && !token_nodes.at_end() {
|
if len == token_nodes.state().shapes().len() && !token_nodes.at_end() {
|
||||||
// This should never happen, but if it does, a panic is better than an infinite loop
|
// This should never happen, but if it does, a panic is better than an infinite loop
|
||||||
panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression")
|
panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression")
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Try to color the head of the stream as an expression
|
let expr = token_nodes.atomic_parse(|token_nodes| {
|
||||||
if color_fallible_syntax(&AnyExpressionShape, token_nodes, context).is_err() {
|
token_nodes.expand_infallible(MaybeSpaceShape);
|
||||||
// If no expression was found, switch to backoff coloring mode
|
token_nodes.expand_syntax(AnyExpressionShape)
|
||||||
|
});
|
||||||
|
|
||||||
backoff = true;
|
match expr {
|
||||||
continue;
|
Ok(expr) => exprs.push(expr),
|
||||||
}
|
Err(_) => {
|
||||||
|
backoff = true;
|
||||||
// If an expression was found, consume a space
|
}
|
||||||
if color_fallible_syntax(&SpaceShape, token_nodes, context).is_err() {
|
|
||||||
// If no space was found, we're either at the end or there's an error.
|
|
||||||
// Either way, switch to backoff coloring mode. If we're at the end
|
|
||||||
// it won't have any consequences.
|
|
||||||
backoff = true;
|
|
||||||
}
|
}
|
||||||
// Otherwise, move on to the next expression
|
// Otherwise, move on to the next expression
|
||||||
}
|
}
|
||||||
@ -141,69 +99,72 @@ impl ColorSyntax for ExpressionListShape {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// BackoffColoringMode consumes all of the remaining tokens in an infallible way
|
/// BackoffColoringMode consumes all of the remaining tokens in an infallible way
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Clone, new)]
|
||||||
pub struct BackoffColoringMode;
|
pub struct BackoffColoringMode {
|
||||||
|
allowed: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
impl ColorSyntax for BackoffColoringMode {
|
impl ExpandSyntax for BackoffColoringMode {
|
||||||
type Info = ();
|
type Output = Option<Span>;
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"BackoffColoringMode"
|
"BackoffColoringMode"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Option<Span> {
|
||||||
&self,
|
|
||||||
_input: &Self::Input,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Self::Info {
|
|
||||||
loop {
|
loop {
|
||||||
if token_nodes.at_end() {
|
if token_nodes.at_end() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
let len = token_nodes.state().shapes().len();
|
let len = token_nodes.state().shapes().len();
|
||||||
color_syntax(&SimplestExpression, token_nodes, context);
|
token_nodes.expand_infallible(SimplestExpression::new(self.allowed.clone()));
|
||||||
|
|
||||||
if len == token_nodes.state().shapes().len() && !token_nodes.at_end() {
|
if len == token_nodes.state().shapes().len() && !token_nodes.at_end() {
|
||||||
// This shouldn't happen, but if it does, a panic is better than an infinite loop
|
// This shouldn't happen, but if it does, a panic is better than an infinite loop
|
||||||
panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, token_nodes.state().shapes());
|
panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, token_nodes.state().shapes());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The point of `SimplestExpression` is to serve as an infallible base case for coloring.
|
/// The point of `SimplestExpression` is to serve as an infallible base case for coloring.
|
||||||
/// As a last ditch effort, if we can't find any way to parse the head of the stream as an
|
/// As a last ditch effort, if we can't find any way to parse the head of the stream as an
|
||||||
/// expression, fall back to simple coloring.
|
/// expression, fall back to simple coloring.
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Clone, new)]
|
||||||
pub struct SimplestExpression;
|
pub struct SimplestExpression {
|
||||||
|
valid_shapes: Vec<String>,
|
||||||
|
}
|
||||||
|
|
||||||
impl ColorSyntax for SimplestExpression {
|
impl ExpandSyntax for SimplestExpression {
|
||||||
type Info = ();
|
type Output = Span;
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"SimplestExpression"
|
"SimplestExpression"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Span {
|
||||||
&self,
|
if token_nodes.at_end() {
|
||||||
_input: &(),
|
return Span::unknown();
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
}
|
||||||
context: &ExpandContext,
|
|
||||||
) {
|
|
||||||
let atom = expand_atom(
|
|
||||||
token_nodes,
|
|
||||||
"any token",
|
|
||||||
context,
|
|
||||||
ExpansionRule::permissive(),
|
|
||||||
);
|
|
||||||
|
|
||||||
match atom {
|
let source = token_nodes.source();
|
||||||
Err(_) => {}
|
|
||||||
Ok(atom) => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
|
let peeked = token_nodes.peek();
|
||||||
|
|
||||||
|
match peeked.not_eof("simplest expression") {
|
||||||
|
Err(_) => token_nodes.span_at_cursor(),
|
||||||
|
Ok(peeked) => {
|
||||||
|
let token = peeked.commit();
|
||||||
|
|
||||||
|
for shape in FlatShape::shapes(token, &source) {
|
||||||
|
token_nodes.color_err(shape, self.valid_shapes.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
token.span()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,169 +1,109 @@
|
|||||||
use crate::hir::syntax_shape::{
|
use crate::hir::syntax_shape::{ExpandSyntax, FlatShape};
|
||||||
expand_atom, parse_single_node, ExpandContext, ExpandExpression, ExpansionRule,
|
use crate::hir::{Expression, SpannedExpression};
|
||||||
FallibleColorSyntax, FlatShape, TestSyntax,
|
use crate::hir::{RawNumber, TokensIterator};
|
||||||
};
|
use crate::parse::token_tree::{DecimalType, IntType};
|
||||||
use crate::hir::tokens_iterator::Peeked;
|
use nu_errors::ParseError;
|
||||||
use crate::parse::tokens::UnspannedToken;
|
use nu_source::HasSpan;
|
||||||
use crate::{
|
|
||||||
hir,
|
|
||||||
hir::{RawNumber, TokensIterator},
|
|
||||||
};
|
|
||||||
use nu_errors::{ParseError, ShellError};
|
|
||||||
use nu_source::{Spanned, SpannedItem};
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct NumberShape;
|
pub struct NumberExpressionShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for NumberExpressionShape {
|
||||||
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
|
|
||||||
impl ExpandExpression for NumberShape {
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"number"
|
"number"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
let source = token_nodes.source();
|
||||||
parse_single_node(token_nodes, "Number", |token, token_span, err| {
|
|
||||||
Ok(match token {
|
token_nodes
|
||||||
UnspannedToken::GlobPattern
|
.expand_syntax(NumberShape)
|
||||||
| UnspannedToken::CompareOperator(..)
|
.map(|number| Expression::number(number.to_number(&source)).into_expr(number.span()))
|
||||||
| UnspannedToken::EvaluationOperator(..) => return Err(err.error()),
|
|
||||||
UnspannedToken::Variable(tag) if tag.slice(context.source) == "it" => {
|
|
||||||
hir::Expression::it_variable(tag, token_span)
|
|
||||||
}
|
|
||||||
UnspannedToken::ExternalCommand(tag) => {
|
|
||||||
hir::Expression::external_command(tag, token_span)
|
|
||||||
}
|
|
||||||
UnspannedToken::ExternalWord => {
|
|
||||||
return Err(ParseError::mismatch(
|
|
||||||
"number",
|
|
||||||
"syntax error".spanned(token_span),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
UnspannedToken::Variable(tag) => hir::Expression::variable(tag, token_span),
|
|
||||||
UnspannedToken::Number(number) => {
|
|
||||||
hir::Expression::number(number.to_number(context.source), token_span)
|
|
||||||
}
|
|
||||||
UnspannedToken::Bare => hir::Expression::bare(token_span),
|
|
||||||
UnspannedToken::String(tag) => hir::Expression::string(tag, token_span),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FallibleColorSyntax for NumberShape {
|
#[derive(Debug, Copy, Clone)]
|
||||||
type Info = ();
|
pub struct IntExpressionShape;
|
||||||
type Input = ();
|
|
||||||
|
impl ExpandSyntax for IntExpressionShape {
|
||||||
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"NumberShape"
|
"integer"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
_input: &(),
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
context: &ExpandContext,
|
let source = token_nodes.source();
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let atom = token_nodes.spanned(|token_nodes| {
|
|
||||||
expand_atom(token_nodes, "number", context, ExpansionRule::permissive())
|
|
||||||
});
|
|
||||||
|
|
||||||
let atom = match atom {
|
token_nodes.expand_token(IntType, |number| {
|
||||||
Spanned { item: Err(_), span } => {
|
Ok((
|
||||||
token_nodes.color_shape(FlatShape::Error.spanned(span));
|
FlatShape::Int,
|
||||||
return Ok(());
|
Expression::number(number.to_number(&source)),
|
||||||
}
|
))
|
||||||
Spanned { item: Ok(atom), .. } => atom,
|
})
|
||||||
};
|
|
||||||
|
|
||||||
token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes));
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct IntShape;
|
pub struct IntShape;
|
||||||
|
|
||||||
impl ExpandExpression for IntShape {
|
impl ExpandSyntax for IntShape {
|
||||||
|
type Output = Result<RawNumber, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"integer"
|
"integer"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
) -> Result<RawNumber, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
token_nodes.expand_token(IntType, |number| Ok((FlatShape::Int, number)))
|
||||||
parse_single_node(token_nodes, "Integer", |token, token_span, err| {
|
|
||||||
Ok(match token {
|
|
||||||
UnspannedToken::GlobPattern
|
|
||||||
| UnspannedToken::CompareOperator(..)
|
|
||||||
| UnspannedToken::EvaluationOperator(..)
|
|
||||||
| UnspannedToken::ExternalWord => return Err(err.error()),
|
|
||||||
UnspannedToken::Variable(span) if span.slice(context.source) == "it" => {
|
|
||||||
hir::Expression::it_variable(span, token_span)
|
|
||||||
}
|
|
||||||
UnspannedToken::ExternalCommand(span) => {
|
|
||||||
hir::Expression::external_command(span, token_span)
|
|
||||||
}
|
|
||||||
UnspannedToken::Variable(span) => hir::Expression::variable(span, token_span),
|
|
||||||
UnspannedToken::Number(number @ RawNumber::Int(_)) => {
|
|
||||||
hir::Expression::number(number.to_number(context.source), token_span)
|
|
||||||
}
|
|
||||||
UnspannedToken::Number(_) => return Err(err.error()),
|
|
||||||
UnspannedToken::Bare => hir::Expression::bare(token_span),
|
|
||||||
UnspannedToken::String(span) => hir::Expression::string(span, token_span),
|
|
||||||
})
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FallibleColorSyntax for IntShape {
|
#[derive(Debug, Copy, Clone)]
|
||||||
type Info = ();
|
pub struct DecimalShape;
|
||||||
type Input = ();
|
|
||||||
|
impl ExpandSyntax for DecimalShape {
|
||||||
|
type Output = Result<RawNumber, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"IntShape"
|
"decimal"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
) -> Result<RawNumber, ParseError> {
|
||||||
) -> Result<(), ShellError> {
|
token_nodes.expand_token(DecimalType, |number| Ok((FlatShape::Decimal, number)))
|
||||||
let atom = token_nodes.spanned(|token_nodes| {
|
|
||||||
expand_atom(token_nodes, "integer", context, ExpansionRule::permissive())
|
|
||||||
});
|
|
||||||
|
|
||||||
let atom = match atom {
|
|
||||||
Spanned { item: Err(_), span } => {
|
|
||||||
token_nodes.color_shape(FlatShape::Error.spanned(span));
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
Spanned { item: Ok(atom), .. } => atom,
|
|
||||||
};
|
|
||||||
|
|
||||||
token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes));
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TestSyntax for NumberShape {
|
#[derive(Debug, Copy, Clone)]
|
||||||
fn test<'a, 'b>(
|
pub struct NumberShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for NumberShape {
|
||||||
|
type Output = Result<RawNumber, ParseError>;
|
||||||
|
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
"decimal"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
_context: &ExpandContext,
|
) -> Result<RawNumber, ParseError> {
|
||||||
) -> Option<Peeked<'a, 'b>> {
|
token_nodes
|
||||||
let peeked = token_nodes.peek_any();
|
.expand_syntax(IntShape)
|
||||||
|
.or_else(|_| token_nodes.expand_syntax(DecimalShape))
|
||||||
match peeked.node {
|
|
||||||
Some(token) if token.is_number() => Some(peeked),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,77 +1,66 @@
|
|||||||
use crate::hir::syntax_shape::{
|
use crate::hir::syntax_shape::{
|
||||||
expand_atom, expand_bare, expression::expand_file_path, ExpandContext, ExpandExpression,
|
expand_bare, expression::expand_file_path, BarePathShape, ExpandContext, ExpandSyntax,
|
||||||
ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, UnspannedAtomicToken,
|
ExternalWordShape, StringShape,
|
||||||
};
|
};
|
||||||
|
use crate::hir::{Expression, SpannedExpression};
|
||||||
use crate::parse::operator::EvaluationOperator;
|
use crate::parse::operator::EvaluationOperator;
|
||||||
use crate::parse::tokens::{Token, UnspannedToken};
|
use crate::{hir, hir::TokensIterator, Token};
|
||||||
use crate::{hir, hir::TokensIterator, TokenNode};
|
use nu_errors::ParseError;
|
||||||
use nu_errors::{ParseError, ShellError};
|
use nu_source::Span;
|
||||||
|
|
||||||
use nu_protocol::ShellTypeName;
|
|
||||||
use nu_source::{Span, SpannedItem};
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct PatternShape;
|
pub struct PatternShape;
|
||||||
|
|
||||||
impl FallibleColorSyntax for PatternShape {
|
impl ExpandSyntax for PatternShape {
|
||||||
type Info = ();
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"PatternShape"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
token_nodes.atomic(|token_nodes| {
|
|
||||||
let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?;
|
|
||||||
|
|
||||||
match &atom.unspanned {
|
|
||||||
UnspannedAtomicToken::GlobPattern { .. } | UnspannedAtomicToken::Word { .. } => {
|
|
||||||
token_nodes.color_shape(FlatShape::GlobPattern.spanned(atom.span));
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
other => Err(ShellError::type_error(
|
|
||||||
"pattern",
|
|
||||||
other.type_name().spanned(atom.span),
|
|
||||||
)),
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ExpandExpression for PatternShape {
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"glob pattern"
|
"glob pattern"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
context: &ExpandContext,
|
) -> Result<hir::SpannedExpression, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
let (inner, outer) = token_nodes
|
||||||
let atom = expand_atom(
|
.expand_syntax(BarePatternShape)
|
||||||
token_nodes,
|
.or_else(|_| token_nodes.expand_syntax(BarePathShape))
|
||||||
"pattern",
|
.or_else(|_| token_nodes.expand_syntax(ExternalWordShape))
|
||||||
context,
|
.map(|span| (span, span))
|
||||||
ExpansionRule::new().allow_external_word(),
|
.or_else(|_| {
|
||||||
)?;
|
token_nodes
|
||||||
|
.expand_syntax(StringShape)
|
||||||
|
.map(|syntax| (syntax.inner, syntax.span))
|
||||||
|
})
|
||||||
|
.map_err(|_| token_nodes.err_next_token("glob pattern"))?;
|
||||||
|
|
||||||
match atom.unspanned {
|
Ok(file_pattern(inner, outer, token_nodes.context()))
|
||||||
UnspannedAtomicToken::Word { text: body }
|
}
|
||||||
| UnspannedAtomicToken::String { body }
|
}
|
||||||
| UnspannedAtomicToken::ExternalWord { text: body }
|
|
||||||
| UnspannedAtomicToken::GlobPattern { pattern: body } => {
|
fn file_pattern(body: Span, outer: Span, context: &ExpandContext) -> SpannedExpression {
|
||||||
let path = expand_file_path(body.slice(context.source), context);
|
let path = expand_file_path(body.slice(context.source), context);
|
||||||
Ok(hir::Expression::pattern(path.to_string_lossy(), atom.span))
|
Expression::pattern(path.to_string_lossy()).into_expr(outer)
|
||||||
}
|
}
|
||||||
_ => atom.to_hir(context, "pattern"),
|
|
||||||
}
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct PatternExpressionShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for PatternExpressionShape {
|
||||||
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
|
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
"pattern"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expand<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
|
token_nodes.expand_syntax(BarePatternShape).map(|span| {
|
||||||
|
let path = expand_file_path(span.slice(&token_nodes.source()), token_nodes.context());
|
||||||
|
Expression::pattern(path.to_string_lossy()).into_expr(span)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -79,30 +68,17 @@ impl ExpandExpression for PatternShape {
|
|||||||
pub struct BarePatternShape;
|
pub struct BarePatternShape;
|
||||||
|
|
||||||
impl ExpandSyntax for BarePatternShape {
|
impl ExpandSyntax for BarePatternShape {
|
||||||
type Output = Span;
|
type Output = Result<Span, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"bare pattern"
|
"bare pattern"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_syntax<'a, 'b>(
|
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
||||||
&self,
|
expand_bare(token_nodes, |token| match token.unspanned() {
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
Token::Bare
|
||||||
context: &ExpandContext,
|
| Token::EvaluationOperator(EvaluationOperator::Dot)
|
||||||
) -> Result<Span, ParseError> {
|
| Token::GlobPattern => true,
|
||||||
expand_bare(token_nodes, context, |token| match token {
|
|
||||||
TokenNode::Token(Token {
|
|
||||||
unspanned: UnspannedToken::Bare,
|
|
||||||
..
|
|
||||||
})
|
|
||||||
| TokenNode::Token(Token {
|
|
||||||
unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot),
|
|
||||||
..
|
|
||||||
})
|
|
||||||
| TokenNode::Token(Token {
|
|
||||||
unspanned: UnspannedToken::GlobPattern,
|
|
||||||
..
|
|
||||||
}) => true,
|
|
||||||
|
|
||||||
_ => false,
|
_ => false,
|
||||||
})
|
})
|
||||||
|
@ -1,103 +1,47 @@
|
|||||||
use crate::hir::syntax_shape::expression::UnspannedAtomicToken;
|
use crate::hir::syntax_shape::{AnyExpressionStartShape, ExpandSyntax, FlatShape};
|
||||||
use crate::hir::syntax_shape::{
|
use crate::hir::TokensIterator;
|
||||||
color_fallible_syntax, expand_atom, expand_expr, AnyExpressionShape, ExpandContext,
|
use crate::hir::{Expression, SpannedExpression};
|
||||||
ExpandExpression, ExpansionRule, FallibleColorSyntax, FlatShape,
|
use crate::parse::token_tree::DotDotType;
|
||||||
};
|
use nu_errors::ParseError;
|
||||||
use crate::parse::operator::EvaluationOperator;
|
use nu_source::{HasSpan, Span};
|
||||||
use crate::parse::token_tree::TokenNode;
|
|
||||||
use crate::parse::tokens::{Token, UnspannedToken};
|
|
||||||
use crate::{hir, hir::TokensIterator};
|
|
||||||
use nu_errors::{ParseError, ShellError};
|
|
||||||
use nu_protocol::SpannedTypeName;
|
|
||||||
use nu_source::SpannedItem;
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct RangeShape;
|
pub struct RangeShape;
|
||||||
|
|
||||||
impl ExpandExpression for RangeShape {
|
impl ExpandSyntax for RangeShape {
|
||||||
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"range"
|
"range"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
|
||||||
token_nodes.atomic_parse(|token_nodes| {
|
token_nodes.atomic_parse(|token_nodes| {
|
||||||
let left = expand_expr(&AnyExpressionShape, token_nodes, context)?;
|
let left = token_nodes.expand_syntax(AnyExpressionStartShape)?;
|
||||||
|
let dotdot = token_nodes.expand_syntax(DotDotShape)?;
|
||||||
|
let right = token_nodes.expand_syntax(AnyExpressionStartShape)?;
|
||||||
|
|
||||||
let atom = expand_atom(
|
let span = left.span.until(right.span);
|
||||||
token_nodes,
|
|
||||||
"..",
|
|
||||||
context,
|
|
||||||
ExpansionRule::new().allow_eval_operator(),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let span = match atom.unspanned {
|
Ok(Expression::range(left, dotdot, right).into_expr(span))
|
||||||
UnspannedAtomicToken::DotDot { text } => text,
|
|
||||||
_ => return Err(ParseError::mismatch("..", atom.spanned_type_name())),
|
|
||||||
};
|
|
||||||
|
|
||||||
let right = expand_expr(&AnyExpressionShape, token_nodes, context)?;
|
|
||||||
|
|
||||||
Ok(hir::Expression::range(left, span, right))
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FallibleColorSyntax for RangeShape {
|
|
||||||
type Info = ();
|
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
|
||||||
"RangeShape"
|
|
||||||
}
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
_input: &(),
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
token_nodes.atomic_parse(|token_nodes| {
|
|
||||||
color_fallible_syntax(&AnyExpressionShape, token_nodes, context)?;
|
|
||||||
color_fallible_syntax(&DotDotShape, token_nodes, context)?;
|
|
||||||
color_fallible_syntax(&AnyExpressionShape, token_nodes, context)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
struct DotDotShape;
|
struct DotDotShape;
|
||||||
|
|
||||||
impl FallibleColorSyntax for DotDotShape {
|
impl ExpandSyntax for DotDotShape {
|
||||||
type Info = ();
|
type Output = Result<Span, ParseError>;
|
||||||
type Input = ();
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
".."
|
"dotdot"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
fn expand<'a, 'b>(&self, token_nodes: &'b mut TokensIterator<'a>) -> Result<Span, ParseError> {
|
||||||
&self,
|
token_nodes.expand_token(DotDotType, |token| Ok((FlatShape::DotDot, token.span())))
|
||||||
_input: &Self::Input,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
_context: &ExpandContext,
|
|
||||||
) -> Result<Self::Info, ShellError> {
|
|
||||||
let peeked = token_nodes.peek_any().not_eof("..")?;
|
|
||||||
match &peeked.node {
|
|
||||||
TokenNode::Token(Token {
|
|
||||||
unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot),
|
|
||||||
span,
|
|
||||||
}) => {
|
|
||||||
peeked.commit();
|
|
||||||
token_nodes.color_shape(FlatShape::DotDot.spanned(span));
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
token => Err(ShellError::type_error("..", token.spanned_type_name())),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,90 +1,103 @@
|
|||||||
use crate::hir::syntax_shape::{
|
use crate::hir::syntax_shape::{ExpandSyntax, FlatShape, NumberShape, VariableShape};
|
||||||
expand_atom, expand_variable, parse_single_node, AtomicToken, ExpandContext, ExpandExpression,
|
use crate::hir::TokensIterator;
|
||||||
ExpansionRule, FallibleColorSyntax, FlatShape, TestSyntax, UnspannedAtomicToken,
|
use crate::hir::{Expression, SpannedExpression};
|
||||||
};
|
use crate::parse::token_tree::{BareType, StringType};
|
||||||
use crate::hir::tokens_iterator::Peeked;
|
use nu_errors::ParseError;
|
||||||
use crate::parse::tokens::UnspannedToken;
|
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span};
|
||||||
use crate::{hir, hir::TokensIterator};
|
|
||||||
use nu_errors::{ParseError, ShellError};
|
|
||||||
use nu_source::SpannedItem;
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct StringShape;
|
pub struct CoerceStringShape;
|
||||||
|
|
||||||
impl FallibleColorSyntax for StringShape {
|
impl ExpandSyntax for CoerceStringShape {
|
||||||
type Info = ();
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
type Input = FlatShape;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"StringShape"
|
"StringShape"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
input: &FlatShape,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<(), ShellError> {
|
token_nodes
|
||||||
let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive());
|
.expand_token(StringType, |(inner, outer)| {
|
||||||
|
Ok((
|
||||||
let atom = match atom {
|
FlatShape::String,
|
||||||
Err(_) => return Ok(()),
|
Expression::string(inner).into_expr(outer),
|
||||||
Ok(atom) => atom,
|
))
|
||||||
};
|
})
|
||||||
|
.or_else(|_| {
|
||||||
match atom {
|
token_nodes.expand_token(BareType, |span| {
|
||||||
AtomicToken {
|
Ok((FlatShape::String, Expression::string(span).into_expr(span)))
|
||||||
unspanned: UnspannedAtomicToken::String { .. },
|
})
|
||||||
span,
|
})
|
||||||
} => token_nodes.color_shape((*input).spanned(span)),
|
.or_else(|_| {
|
||||||
atom => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
|
token_nodes
|
||||||
}
|
.expand_syntax(NumberShape)
|
||||||
|
.map(|number| Expression::string(number.span()).into_expr(number.span()))
|
||||||
Ok(())
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExpandExpression for StringShape {
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct StringExpressionShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for StringExpressionShape {
|
||||||
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"string"
|
"string"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_expr<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
) -> Result<hir::Expression, ParseError> {
|
token_nodes.expand_syntax(VariableShape).or_else(|_| {
|
||||||
parse_single_node(token_nodes, "String", |token, token_span, err| {
|
token_nodes.expand_token(StringType, |(inner, outer)| {
|
||||||
Ok(match token {
|
Ok((
|
||||||
UnspannedToken::GlobPattern
|
FlatShape::String,
|
||||||
| UnspannedToken::CompareOperator(..)
|
Expression::string(inner).into_expr(outer),
|
||||||
| UnspannedToken::EvaluationOperator(..)
|
))
|
||||||
| UnspannedToken::ExternalWord => return Err(err.error()),
|
|
||||||
UnspannedToken::Variable(span) => {
|
|
||||||
expand_variable(span, token_span, &context.source)
|
|
||||||
}
|
|
||||||
UnspannedToken::ExternalCommand(span) => {
|
|
||||||
hir::Expression::external_command(span, token_span)
|
|
||||||
}
|
|
||||||
UnspannedToken::Number(_) => hir::Expression::bare(token_span),
|
|
||||||
UnspannedToken::Bare => hir::Expression::bare(token_span),
|
|
||||||
UnspannedToken::String(span) => hir::Expression::string(span, token_span),
|
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TestSyntax for StringShape {
|
#[derive(Debug, Copy, Clone)]
|
||||||
fn test<'a, 'b>(
|
pub struct StringSyntax {
|
||||||
&self,
|
pub inner: Span,
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
pub span: Span,
|
||||||
_context: &ExpandContext,
|
}
|
||||||
) -> Option<Peeked<'a, 'b>> {
|
|
||||||
let peeked = token_nodes.peek_any();
|
|
||||||
|
|
||||||
match peeked.node {
|
impl HasSpan for StringSyntax {
|
||||||
Some(token) if token.is_string() => Some(peeked),
|
fn span(&self) -> Span {
|
||||||
_ => None,
|
self.span
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebugWithSource for StringSyntax {
|
||||||
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
|
b::primitive(self.span.slice(source))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct StringShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for StringShape {
|
||||||
|
type Output = Result<StringSyntax, ParseError>;
|
||||||
|
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
"string"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expand<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
) -> Result<StringSyntax, ParseError> {
|
||||||
|
token_nodes.expand_token(StringType, |(inner, outer)| {
|
||||||
|
Ok((FlatShape::String, StringSyntax { inner, span: outer }))
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,16 +1,19 @@
|
|||||||
use crate::hir::syntax_shape::{ExpandContext, ExpandSyntax};
|
use crate::hir::syntax_shape::flat_shape::FlatShape;
|
||||||
use crate::parse::tokens::RawNumber;
|
use crate::hir::syntax_shape::ExpandSyntax;
|
||||||
use crate::parse::tokens::Token;
|
use crate::hir::TokensIterator;
|
||||||
use crate::parse::tokens::UnspannedToken;
|
use crate::hir::{Expression, SpannedExpression};
|
||||||
|
use crate::parse::number::RawNumber;
|
||||||
|
use crate::parse::token_tree::BareType;
|
||||||
use crate::parse::unit::Unit;
|
use crate::parse::unit::Unit;
|
||||||
use crate::{hir::TokensIterator, TokenNode};
|
|
||||||
use nom::branch::alt;
|
use nom::branch::alt;
|
||||||
use nom::bytes::complete::tag;
|
use nom::bytes::complete::tag;
|
||||||
use nom::character::complete::digit1;
|
use nom::character::complete::digit1;
|
||||||
use nom::combinator::{all_consuming, opt, value};
|
use nom::combinator::{all_consuming, opt, value};
|
||||||
use nom::IResult;
|
use nom::IResult;
|
||||||
use nu_errors::ParseError;
|
use nu_errors::ParseError;
|
||||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
use nu_source::{
|
||||||
|
b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem, Text,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct UnitSyntax {
|
pub struct UnitSyntax {
|
||||||
@ -18,6 +21,17 @@ pub struct UnitSyntax {
|
|||||||
pub span: Span,
|
pub span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl UnitSyntax {
|
||||||
|
pub fn into_expr(self, source: &Text) -> SpannedExpression {
|
||||||
|
let UnitSyntax {
|
||||||
|
unit: (number, unit),
|
||||||
|
span,
|
||||||
|
} = self;
|
||||||
|
|
||||||
|
Expression::size(number.to_number(source), *unit).into_expr(span)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for UnitSyntax {
|
impl PrettyDebugWithSource for UnitSyntax {
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
b::typed(
|
b::typed(
|
||||||
@ -33,42 +47,60 @@ impl HasSpan for UnitSyntax {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct UnitExpressionShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for UnitExpressionShape {
|
||||||
|
type Output = Result<SpannedExpression, ParseError>;
|
||||||
|
|
||||||
|
fn name(&self) -> &'static str {
|
||||||
|
"unit expression"
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expand<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
) -> Result<SpannedExpression, ParseError> {
|
||||||
|
token_nodes
|
||||||
|
.expand_syntax(UnitShape)
|
||||||
|
.map(|unit| unit.into_expr(&token_nodes.source()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub struct UnitShape;
|
pub struct UnitShape;
|
||||||
|
|
||||||
impl ExpandSyntax for UnitShape {
|
impl ExpandSyntax for UnitShape {
|
||||||
type Output = UnitSyntax;
|
type Output = Result<UnitSyntax, ParseError>;
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
fn name(&self) -> &'static str {
|
||||||
"unit"
|
"unit"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn expand_syntax<'a, 'b>(
|
fn expand<'a, 'b>(
|
||||||
&self,
|
&self,
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Result<UnitSyntax, ParseError> {
|
) -> Result<UnitSyntax, ParseError> {
|
||||||
let peeked = token_nodes.peek_any().not_eof("unit")?;
|
let source = token_nodes.source();
|
||||||
|
|
||||||
let span = match peeked.node {
|
token_nodes.expand_token(BareType, |span| {
|
||||||
TokenNode::Token(Token {
|
let unit = unit_size(span.slice(&source), span);
|
||||||
unspanned: UnspannedToken::Bare,
|
|
||||||
span,
|
|
||||||
}) => *span,
|
|
||||||
_ => return Err(peeked.type_error("unit")),
|
|
||||||
};
|
|
||||||
|
|
||||||
let unit = unit_size(span.slice(context.source), span);
|
let (_, (number, unit)) = match unit {
|
||||||
|
Err(_) => return Err(ParseError::mismatch("unit", "word".spanned(span))),
|
||||||
|
Ok((number, unit)) => (number, unit),
|
||||||
|
};
|
||||||
|
|
||||||
let (_, (number, unit)) = match unit {
|
Ok((
|
||||||
Err(_) => return Err(ParseError::mismatch("unit", "word".spanned(span))),
|
FlatShape::Size {
|
||||||
Ok((number, unit)) => (number, unit),
|
number: number.span(),
|
||||||
};
|
unit: unit.span,
|
||||||
|
},
|
||||||
peeked.commit();
|
UnitSyntax {
|
||||||
Ok(UnitSyntax {
|
unit: (number, unit),
|
||||||
unit: (number, unit),
|
span,
|
||||||
span,
|
},
|
||||||
|
))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,13 +1,16 @@
|
|||||||
use crate::parse::flag::{Flag, FlagKind};
|
use crate::parse::flag::{Flag, FlagKind};
|
||||||
|
use crate::parse::number::RawNumber;
|
||||||
use crate::parse::operator::EvaluationOperator;
|
use crate::parse::operator::EvaluationOperator;
|
||||||
use crate::parse::token_tree::{Delimiter, TokenNode};
|
use crate::parse::token_tree::{Delimiter, SpannedToken, Token};
|
||||||
use crate::parse::tokens::{RawNumber, UnspannedToken};
|
use nu_protocol::ShellTypeName;
|
||||||
use nu_source::{HasSpan, Span, Spanned, SpannedItem, Text};
|
use nu_source::{DebugDocBuilder, HasSpan, PrettyDebug, Span, Spanned, SpannedItem, Text};
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
#[derive(Debug, Copy, Clone)]
|
||||||
pub enum FlatShape {
|
pub enum FlatShape {
|
||||||
OpenDelimiter(Delimiter),
|
OpenDelimiter(Delimiter),
|
||||||
CloseDelimiter(Delimiter),
|
CloseDelimiter(Delimiter),
|
||||||
|
Type,
|
||||||
|
Identifier,
|
||||||
ItVariable,
|
ItVariable,
|
||||||
Variable,
|
Variable,
|
||||||
CompareOperator,
|
CompareOperator,
|
||||||
@ -21,88 +24,170 @@ pub enum FlatShape {
|
|||||||
String,
|
String,
|
||||||
Path,
|
Path,
|
||||||
Word,
|
Word,
|
||||||
|
Keyword,
|
||||||
Pipe,
|
Pipe,
|
||||||
GlobPattern,
|
GlobPattern,
|
||||||
Flag,
|
Flag,
|
||||||
ShorthandFlag,
|
ShorthandFlag,
|
||||||
Int,
|
Int,
|
||||||
Decimal,
|
Decimal,
|
||||||
|
Garbage,
|
||||||
Whitespace,
|
Whitespace,
|
||||||
Separator,
|
Separator,
|
||||||
Error,
|
|
||||||
Comment,
|
Comment,
|
||||||
Size { number: Span, unit: Span },
|
Size { number: Span, unit: Span },
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub enum ShapeResult {
|
||||||
|
Success(Spanned<FlatShape>),
|
||||||
|
Fallback {
|
||||||
|
shape: Spanned<FlatShape>,
|
||||||
|
allowed: Vec<String>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasSpan for ShapeResult {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
match self {
|
||||||
|
ShapeResult::Success(shape) => shape.span,
|
||||||
|
ShapeResult::Fallback { shape, .. } => shape.span,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebug for FlatShape {
|
||||||
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
|
unimplemented!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct TraceShape {
|
||||||
|
shape: FlatShape,
|
||||||
|
span: Span,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ShellTypeName for TraceShape {
|
||||||
|
fn type_name(&self) -> &'static str {
|
||||||
|
self.shape.type_name()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebug for TraceShape {
|
||||||
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
|
self.shape.pretty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasSpan for TraceShape {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
self.span
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ShellTypeName for FlatShape {
|
||||||
|
fn type_name(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
FlatShape::OpenDelimiter(Delimiter::Brace) => "open brace",
|
||||||
|
FlatShape::OpenDelimiter(Delimiter::Paren) => "open paren",
|
||||||
|
FlatShape::OpenDelimiter(Delimiter::Square) => "open square",
|
||||||
|
FlatShape::CloseDelimiter(Delimiter::Brace) => "close brace",
|
||||||
|
FlatShape::CloseDelimiter(Delimiter::Paren) => "close paren",
|
||||||
|
FlatShape::CloseDelimiter(Delimiter::Square) => "close square",
|
||||||
|
FlatShape::Type => "type",
|
||||||
|
FlatShape::Identifier => "identifier",
|
||||||
|
FlatShape::ItVariable => "$it",
|
||||||
|
FlatShape::Variable => "variable",
|
||||||
|
FlatShape::CompareOperator => "comparison",
|
||||||
|
FlatShape::Dot => "dot",
|
||||||
|
FlatShape::DotDot => "dotdot",
|
||||||
|
FlatShape::InternalCommand => "internal command",
|
||||||
|
FlatShape::ExternalCommand => "external command",
|
||||||
|
FlatShape::ExternalWord => "external word",
|
||||||
|
FlatShape::BareMember => "bare member",
|
||||||
|
FlatShape::StringMember => "string member",
|
||||||
|
FlatShape::String => "string",
|
||||||
|
FlatShape::Path => "path",
|
||||||
|
FlatShape::Word => "word",
|
||||||
|
FlatShape::Keyword => "keyword",
|
||||||
|
FlatShape::Pipe => "pipe",
|
||||||
|
FlatShape::GlobPattern => "glob",
|
||||||
|
FlatShape::Flag => "flag",
|
||||||
|
FlatShape::ShorthandFlag => "shorthand flag",
|
||||||
|
FlatShape::Int => "int",
|
||||||
|
FlatShape::Decimal => "decimal",
|
||||||
|
FlatShape::Garbage => "garbage",
|
||||||
|
FlatShape::Whitespace => "whitespace",
|
||||||
|
FlatShape::Separator => "separator",
|
||||||
|
FlatShape::Comment => "comment",
|
||||||
|
FlatShape::Size { .. } => "size",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl FlatShape {
|
impl FlatShape {
|
||||||
pub fn from(token: &TokenNode, source: &Text, shapes: &mut Vec<Spanned<FlatShape>>) {
|
pub fn into_trace_shape(self, span: Span) -> TraceShape {
|
||||||
match token {
|
TraceShape { shape: self, span }
|
||||||
TokenNode::Token(token) => match token.unspanned {
|
}
|
||||||
UnspannedToken::Number(RawNumber::Int(_)) => {
|
|
||||||
shapes.push(FlatShape::Int.spanned(token.span))
|
pub fn shapes(token: &SpannedToken, source: &Text) -> Vec<Spanned<FlatShape>> {
|
||||||
}
|
let mut shapes = vec![];
|
||||||
UnspannedToken::Number(RawNumber::Decimal(_)) => {
|
|
||||||
shapes.push(FlatShape::Decimal.spanned(token.span))
|
FlatShape::from(token, source, &mut shapes);
|
||||||
}
|
shapes
|
||||||
UnspannedToken::EvaluationOperator(EvaluationOperator::Dot) => {
|
}
|
||||||
shapes.push(FlatShape::Dot.spanned(token.span))
|
|
||||||
}
|
fn from(token: &SpannedToken, source: &Text, shapes: &mut Vec<Spanned<FlatShape>>) {
|
||||||
UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot) => {
|
let span = token.span();
|
||||||
shapes.push(FlatShape::DotDot.spanned(token.span))
|
|
||||||
}
|
match token.unspanned() {
|
||||||
UnspannedToken::CompareOperator(_) => {
|
Token::Number(RawNumber::Int(_)) => shapes.push(FlatShape::Int.spanned(span)),
|
||||||
shapes.push(FlatShape::CompareOperator.spanned(token.span))
|
Token::Number(RawNumber::Decimal(_)) => shapes.push(FlatShape::Decimal.spanned(span)),
|
||||||
}
|
Token::EvaluationOperator(EvaluationOperator::Dot) => {
|
||||||
UnspannedToken::String(_) => shapes.push(FlatShape::String.spanned(token.span)),
|
shapes.push(FlatShape::Dot.spanned(span))
|
||||||
UnspannedToken::Variable(v) if v.slice(source) == "it" => {
|
|
||||||
shapes.push(FlatShape::ItVariable.spanned(token.span))
|
|
||||||
}
|
|
||||||
UnspannedToken::Variable(_) => shapes.push(FlatShape::Variable.spanned(token.span)),
|
|
||||||
UnspannedToken::ExternalCommand(_) => {
|
|
||||||
shapes.push(FlatShape::ExternalCommand.spanned(token.span))
|
|
||||||
}
|
|
||||||
UnspannedToken::ExternalWord => {
|
|
||||||
shapes.push(FlatShape::ExternalWord.spanned(token.span))
|
|
||||||
}
|
|
||||||
UnspannedToken::GlobPattern => {
|
|
||||||
shapes.push(FlatShape::GlobPattern.spanned(token.span))
|
|
||||||
}
|
|
||||||
UnspannedToken::Bare => shapes.push(FlatShape::Word.spanned(token.span)),
|
|
||||||
},
|
|
||||||
TokenNode::Call(_) => unimplemented!(),
|
|
||||||
TokenNode::Nodes(nodes) => {
|
|
||||||
for node in &nodes.item {
|
|
||||||
FlatShape::from(node, source, shapes);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
TokenNode::Delimited(v) => {
|
Token::EvaluationOperator(EvaluationOperator::DotDot) => {
|
||||||
shapes.push(FlatShape::OpenDelimiter(v.item.delimiter).spanned(v.item.spans.0));
|
shapes.push(FlatShape::DotDot.spanned(span))
|
||||||
for token in &v.item.children {
|
}
|
||||||
|
Token::CompareOperator(_) => shapes.push(FlatShape::CompareOperator.spanned(span)),
|
||||||
|
Token::String(_) => shapes.push(FlatShape::String.spanned(span)),
|
||||||
|
Token::Variable(v) if v.slice(source) == "it" => {
|
||||||
|
shapes.push(FlatShape::ItVariable.spanned(span))
|
||||||
|
}
|
||||||
|
Token::Variable(_) => shapes.push(FlatShape::Variable.spanned(span)),
|
||||||
|
Token::ItVariable(_) => shapes.push(FlatShape::ItVariable.spanned(span)),
|
||||||
|
Token::ExternalCommand(_) => shapes.push(FlatShape::ExternalCommand.spanned(span)),
|
||||||
|
Token::ExternalWord => shapes.push(FlatShape::ExternalWord.spanned(span)),
|
||||||
|
Token::GlobPattern => shapes.push(FlatShape::GlobPattern.spanned(span)),
|
||||||
|
Token::Bare => shapes.push(FlatShape::Word.spanned(span)),
|
||||||
|
Token::Call(_) => unimplemented!(),
|
||||||
|
Token::Delimited(v) => {
|
||||||
|
shapes.push(FlatShape::OpenDelimiter(v.delimiter).spanned(v.spans.0));
|
||||||
|
for token in &v.children {
|
||||||
FlatShape::from(token, source, shapes);
|
FlatShape::from(token, source, shapes);
|
||||||
}
|
}
|
||||||
shapes.push(FlatShape::CloseDelimiter(v.item.delimiter).spanned(v.item.spans.1));
|
shapes.push(FlatShape::CloseDelimiter(v.delimiter).spanned(v.spans.1));
|
||||||
}
|
}
|
||||||
TokenNode::Pipeline(pipeline) => {
|
Token::Pipeline(pipeline) => {
|
||||||
for part in &pipeline.parts {
|
for part in &pipeline.parts {
|
||||||
if part.pipe.is_some() {
|
if part.pipe.is_some() {
|
||||||
shapes.push(FlatShape::Pipe.spanned(part.span()));
|
shapes.push(FlatShape::Pipe.spanned(part.span()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TokenNode::Flag(Flag {
|
Token::Flag(Flag {
|
||||||
kind: FlagKind::Longhand,
|
kind: FlagKind::Longhand,
|
||||||
span,
|
|
||||||
..
|
..
|
||||||
}) => shapes.push(FlatShape::Flag.spanned(*span)),
|
}) => shapes.push(FlatShape::Flag.spanned(span)),
|
||||||
TokenNode::Flag(Flag {
|
Token::Flag(Flag {
|
||||||
kind: FlagKind::Shorthand,
|
kind: FlagKind::Shorthand,
|
||||||
span,
|
|
||||||
..
|
..
|
||||||
}) => shapes.push(FlatShape::ShorthandFlag.spanned(*span)),
|
}) => shapes.push(FlatShape::ShorthandFlag.spanned(span)),
|
||||||
TokenNode::Whitespace(_) => shapes.push(FlatShape::Whitespace.spanned(token.span())),
|
Token::Garbage => shapes.push(FlatShape::Garbage.spanned(span)),
|
||||||
TokenNode::Separator(_) => shapes.push(FlatShape::Separator.spanned(token.span())),
|
Token::Whitespace => shapes.push(FlatShape::Whitespace.spanned(span)),
|
||||||
TokenNode::Comment(_) => shapes.push(FlatShape::Comment.spanned(token.span())),
|
Token::Separator => shapes.push(FlatShape::Separator.spanned(span)),
|
||||||
TokenNode::Error(v) => shapes.push(FlatShape::Error.spanned(v.span)),
|
Token::Comment(_) => shapes.push(FlatShape::Comment.spanned(span)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,7 +1,7 @@
|
|||||||
use crate::hir::syntax_shape::FlatShape;
|
use crate::hir::syntax_shape::flat_shape::{FlatShape, ShapeResult};
|
||||||
use ansi_term::Color;
|
use ansi_term::Color;
|
||||||
use log::trace;
|
use log::trace;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::{ParseError, ShellError};
|
||||||
use nu_source::{Spanned, Text};
|
use nu_source::{Spanned, Text};
|
||||||
use ptree::*;
|
use ptree::*;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
@ -10,14 +10,24 @@ use std::io;
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum FrameChild {
|
pub enum FrameChild {
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
Shape(Spanned<FlatShape>),
|
Shape(ShapeResult),
|
||||||
Frame(ColorFrame),
|
Frame(ColorFrame),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FrameChild {
|
impl FrameChild {
|
||||||
fn colored_leaf_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> {
|
fn colored_leaf_description(&self, text: &Text, f: &mut impl io::Write) -> io::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
FrameChild::Shape(shape) => write!(
|
FrameChild::Shape(ShapeResult::Success(shape)) => write!(
|
||||||
|
f,
|
||||||
|
"{} {:?}",
|
||||||
|
Color::White
|
||||||
|
.bold()
|
||||||
|
.on(Color::Green)
|
||||||
|
.paint(format!("{:?}", shape.item)),
|
||||||
|
shape.span.slice(text)
|
||||||
|
),
|
||||||
|
|
||||||
|
FrameChild::Shape(ShapeResult::Fallback { shape, .. }) => write!(
|
||||||
f,
|
f,
|
||||||
"{} {:?}",
|
"{} {:?}",
|
||||||
Color::White
|
Color::White
|
||||||
@ -43,7 +53,7 @@ impl FrameChild {
|
|||||||
pub struct ColorFrame {
|
pub struct ColorFrame {
|
||||||
description: &'static str,
|
description: &'static str,
|
||||||
children: Vec<FrameChild>,
|
children: Vec<FrameChild>,
|
||||||
error: Option<ShellError>,
|
error: Option<ParseError>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ColorFrame {
|
impl ColorFrame {
|
||||||
@ -98,8 +108,7 @@ impl ColorFrame {
|
|||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
fn add_shape(&mut self, shape: ShapeResult) {
|
||||||
fn add_shape(&mut self, shape: Spanned<FlatShape>) {
|
|
||||||
self.children.push(FrameChild::Shape(shape))
|
self.children.push(FrameChild::Shape(shape))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -107,10 +116,10 @@ impl ColorFrame {
|
|||||||
self.any_child_shape(|_| true)
|
self.any_child_shape(|_| true)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn any_child_shape(&self, predicate: impl Fn(Spanned<FlatShape>) -> bool) -> bool {
|
fn any_child_shape(&self, predicate: impl Fn(&ShapeResult) -> bool) -> bool {
|
||||||
for item in &self.children {
|
for item in &self.children {
|
||||||
if let FrameChild::Shape(shape) = item {
|
if let FrameChild::Shape(shape) = item {
|
||||||
if predicate(*shape) {
|
if predicate(shape) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -172,14 +181,24 @@ impl ColorFrame {
|
|||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum TreeChild {
|
pub enum TreeChild {
|
||||||
Shape(Spanned<FlatShape>, Text),
|
Shape(ShapeResult, Text),
|
||||||
Frame(ColorFrame, Text),
|
Frame(ColorFrame, Text),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TreeChild {
|
impl TreeChild {
|
||||||
fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
fn colored_leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
TreeChild::Shape(shape, text) => write!(
|
TreeChild::Shape(ShapeResult::Success(shape), text) => write!(
|
||||||
|
f,
|
||||||
|
"{} {:?}",
|
||||||
|
Color::White
|
||||||
|
.bold()
|
||||||
|
.on(Color::Green)
|
||||||
|
.paint(format!("{:?}", shape.item)),
|
||||||
|
shape.span.slice(text)
|
||||||
|
),
|
||||||
|
|
||||||
|
TreeChild::Shape(ShapeResult::Fallback { shape, .. }, text) => write!(
|
||||||
f,
|
f,
|
||||||
"{} {:?}",
|
"{} {:?}",
|
||||||
Color::White
|
Color::White
|
||||||
@ -290,8 +309,7 @@ impl ColorTracer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
pub fn add_shape(&mut self, shape: ShapeResult) {
|
||||||
pub fn add_shape(&mut self, shape: Spanned<FlatShape>) {
|
|
||||||
self.current_frame().add_shape(shape);
|
self.current_frame().add_shape(shape);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -302,7 +320,7 @@ impl ColorTracer {
|
|||||||
.push(FrameChild::Frame(current));
|
.push(FrameChild::Frame(current));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn failed(&mut self, error: &ShellError) {
|
pub fn failed(&mut self, error: &ParseError) {
|
||||||
let mut current = self.pop_frame();
|
let mut current = self.pop_frame();
|
||||||
current.error = Some(error.clone());
|
current.error = Some(error.clone());
|
||||||
self.current_frame()
|
self.current_frame()
|
||||||
|
@ -1,26 +1,44 @@
|
|||||||
use crate::hir::Expression;
|
use crate::hir::syntax_shape::flat_shape::TraceShape;
|
||||||
|
use crate::hir::SpannedExpression;
|
||||||
|
use crate::parse::token_tree::SpannedToken;
|
||||||
use ansi_term::Color;
|
use ansi_term::Color;
|
||||||
use log::trace;
|
use log::trace;
|
||||||
use nu_errors::ParseError;
|
use nu_errors::{ParseError, ParseErrorReason};
|
||||||
use nu_protocol::ShellTypeName;
|
use nu_protocol::{ShellTypeName, SpannedTypeName};
|
||||||
use nu_source::{DebugDoc, PrettyDebug, PrettyDebugWithSource, Text};
|
use nu_source::{DebugDoc, PrettyDebug, PrettyDebugWithSource, Span, Spanned, Text};
|
||||||
use ptree::*;
|
use ptree::*;
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
use std::fmt::Debug;
|
||||||
use std::io;
|
use std::io;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum FrameChild {
|
pub enum FrameChild<T: SpannedTypeName> {
|
||||||
Expr(Expression),
|
Expr(T),
|
||||||
Frame(ExprFrame),
|
Shape(Result<TraceShape, TraceShape>),
|
||||||
|
Frame(Box<ExprFrame<T>>),
|
||||||
Result(DebugDoc),
|
Result(DebugDoc),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FrameChild {
|
fn err_desc(error: &ParseError) -> &'static str {
|
||||||
fn get_error_leaf(&self) -> Option<&'static str> {
|
match error.reason() {
|
||||||
|
ParseErrorReason::ExtraTokens { .. } => "extra tokens",
|
||||||
|
ParseErrorReason::Mismatch { .. } => "mismatch",
|
||||||
|
ParseErrorReason::ArgumentError { .. } => "argument error",
|
||||||
|
ParseErrorReason::Eof { .. } => "eof",
|
||||||
|
ParseErrorReason::InternalError { .. } => "internal error",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: SpannedTypeName> FrameChild<T> {
|
||||||
|
fn get_error_leaf(&self) -> Option<(&'static str, &'static str)> {
|
||||||
match self {
|
match self {
|
||||||
FrameChild::Frame(frame) if frame.error.is_some() => {
|
FrameChild::Frame(frame) => {
|
||||||
if frame.children.is_empty() {
|
if let Some(error) = &frame.error {
|
||||||
Some(frame.description)
|
if frame.children.is_empty() {
|
||||||
|
Some((frame.description, err_desc(error)))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
@ -31,15 +49,34 @@ impl FrameChild {
|
|||||||
|
|
||||||
fn to_tree_child(&self, text: &Text) -> TreeChild {
|
fn to_tree_child(&self, text: &Text) -> TreeChild {
|
||||||
match self {
|
match self {
|
||||||
FrameChild::Expr(expr) => TreeChild::OkExpr(expr.clone(), text.clone()),
|
FrameChild::Expr(expr) => TreeChild::OkExpr {
|
||||||
|
source: expr.spanned_type_name().span,
|
||||||
|
desc: expr.spanned_type_name().item,
|
||||||
|
text: text.clone(),
|
||||||
|
},
|
||||||
|
FrameChild::Shape(Ok(shape)) => TreeChild::OkShape {
|
||||||
|
source: shape.spanned_type_name().span,
|
||||||
|
desc: shape.spanned_type_name().item,
|
||||||
|
text: text.clone(),
|
||||||
|
fallback: false,
|
||||||
|
},
|
||||||
|
FrameChild::Shape(Err(shape)) => TreeChild::OkShape {
|
||||||
|
source: shape.spanned_type_name().span,
|
||||||
|
desc: shape.spanned_type_name().item,
|
||||||
|
text: text.clone(),
|
||||||
|
fallback: true,
|
||||||
|
},
|
||||||
FrameChild::Result(result) => {
|
FrameChild::Result(result) => {
|
||||||
let result = result.display();
|
let result = result.display();
|
||||||
TreeChild::OkNonExpr(result)
|
TreeChild::OkNonExpr(result)
|
||||||
}
|
}
|
||||||
FrameChild::Frame(frame) => {
|
FrameChild::Frame(frame) => {
|
||||||
if frame.error.is_some() {
|
if let Some(err) = &frame.error {
|
||||||
if frame.children.is_empty() {
|
if frame.children.is_empty() {
|
||||||
TreeChild::ErrorLeaf(vec![frame.description])
|
TreeChild::ErrorLeaf(
|
||||||
|
vec![(frame.description, err_desc(err))],
|
||||||
|
frame.token_desc(),
|
||||||
|
)
|
||||||
} else {
|
} else {
|
||||||
TreeChild::ErrorFrame(frame.to_tree_frame(text), text.clone())
|
TreeChild::ErrorFrame(frame.to_tree_frame(text), text.clone())
|
||||||
}
|
}
|
||||||
@ -51,14 +88,22 @@ impl FrameChild {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct ExprFrame {
|
pub struct ExprFrame<T: SpannedTypeName> {
|
||||||
description: &'static str,
|
description: &'static str,
|
||||||
children: Vec<FrameChild>,
|
token: Option<SpannedToken>,
|
||||||
|
children: Vec<FrameChild<T>>,
|
||||||
error: Option<ParseError>,
|
error: Option<ParseError>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExprFrame {
|
impl<T: SpannedTypeName> ExprFrame<T> {
|
||||||
|
fn token_desc(&self) -> &'static str {
|
||||||
|
match &self.token {
|
||||||
|
None => "EOF",
|
||||||
|
Some(token) => token.type_name(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn to_tree_frame(&self, text: &Text) -> TreeFrame {
|
fn to_tree_frame(&self, text: &Text) -> TreeFrame {
|
||||||
let mut children = vec![];
|
let mut children = vec![];
|
||||||
let mut errors = vec![];
|
let mut errors = vec![];
|
||||||
@ -68,7 +113,7 @@ impl ExprFrame {
|
|||||||
errors.push(error_leaf);
|
errors.push(error_leaf);
|
||||||
continue;
|
continue;
|
||||||
} else if !errors.is_empty() {
|
} else if !errors.is_empty() {
|
||||||
children.push(TreeChild::ErrorLeaf(errors));
|
children.push(TreeChild::ErrorLeaf(errors, self.token_desc()));
|
||||||
errors = vec![];
|
errors = vec![];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -76,18 +121,27 @@ impl ExprFrame {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if !errors.is_empty() {
|
if !errors.is_empty() {
|
||||||
children.push(TreeChild::ErrorLeaf(errors));
|
children.push(TreeChild::ErrorLeaf(errors, self.token_desc()));
|
||||||
}
|
}
|
||||||
|
|
||||||
TreeFrame {
|
TreeFrame {
|
||||||
description: self.description,
|
description: self.description,
|
||||||
|
token_desc: self.token_desc(),
|
||||||
children,
|
children,
|
||||||
error: self.error.clone(),
|
error: self.error.clone(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_expr(&mut self, expr: Expression) {
|
fn add_return(&mut self, value: T) {
|
||||||
self.children.push(FrameChild::Expr(expr))
|
self.children.push(FrameChild::Expr(value))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_shape(&mut self, shape: TraceShape) {
|
||||||
|
self.children.push(FrameChild::Shape(Ok(shape)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_err_shape(&mut self, shape: TraceShape) {
|
||||||
|
self.children.push(FrameChild::Shape(Err(shape)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_result(&mut self, result: impl PrettyDebug) {
|
fn add_result(&mut self, result: impl PrettyDebug) {
|
||||||
@ -98,6 +152,7 @@ impl ExprFrame {
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct TreeFrame {
|
pub struct TreeFrame {
|
||||||
description: &'static str,
|
description: &'static str,
|
||||||
|
token_desc: &'static str,
|
||||||
children: Vec<TreeChild>,
|
children: Vec<TreeChild>,
|
||||||
error: Option<ParseError>,
|
error: Option<ParseError>,
|
||||||
}
|
}
|
||||||
@ -113,29 +168,43 @@ impl TreeFrame {
|
|||||||
write!(f, "{}", Color::Yellow.bold().paint(self.description))?;
|
write!(f, "{}", Color::Yellow.bold().paint(self.description))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"{}",
|
||||||
|
Color::Black.bold().paint(&format!("({})", self.token_desc))
|
||||||
|
)?;
|
||||||
|
|
||||||
write!(f, " -> ")?;
|
write!(f, " -> ")?;
|
||||||
self.children[0].leaf_description(f)
|
self.children[0].leaf_description(f)
|
||||||
} else if self.error.is_some() {
|
|
||||||
if self.children.is_empty() {
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"{}",
|
|
||||||
Color::White.bold().on(Color::Red).paint(self.description)
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
write!(f, "{}", Color::Red.normal().paint(self.description))
|
|
||||||
}
|
|
||||||
} else if self.has_descendent_green() {
|
|
||||||
write!(f, "{}", Color::Green.normal().paint(self.description))
|
|
||||||
} else {
|
} else {
|
||||||
write!(f, "{}", Color::Yellow.bold().paint(self.description))
|
if self.error.is_some() {
|
||||||
|
if self.children.is_empty() {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"{}",
|
||||||
|
Color::White.bold().on(Color::Red).paint(self.description)
|
||||||
|
)?
|
||||||
|
} else {
|
||||||
|
write!(f, "{}", Color::Red.normal().paint(self.description))?
|
||||||
|
}
|
||||||
|
} else if self.has_descendent_green() {
|
||||||
|
write!(f, "{}", Color::Green.normal().paint(self.description))?
|
||||||
|
} else {
|
||||||
|
write!(f, "{}", Color::Yellow.bold().paint(self.description))?
|
||||||
|
}
|
||||||
|
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"{}",
|
||||||
|
Color::Black.bold().paint(&format!("({})", self.token_desc))
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn has_child_green(&self) -> bool {
|
fn has_child_green(&self) -> bool {
|
||||||
self.children.iter().any(|item| match item {
|
self.children.iter().any(|item| match item {
|
||||||
TreeChild::OkFrame(..) | TreeChild::ErrorFrame(..) | TreeChild::ErrorLeaf(..) => false,
|
TreeChild::OkFrame(..) | TreeChild::ErrorFrame(..) | TreeChild::ErrorLeaf(..) => false,
|
||||||
TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) => true,
|
TreeChild::OkExpr { .. } | TreeChild::OkShape { .. } | TreeChild::OkNonExpr(..) => true,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -163,9 +232,10 @@ impl TreeFrame {
|
|||||||
if self.children.len() == 1 {
|
if self.children.len() == 1 {
|
||||||
let child: &TreeChild = &self.children[0];
|
let child: &TreeChild = &self.children[0];
|
||||||
match child {
|
match child {
|
||||||
TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) | TreeChild::ErrorLeaf(..) => {
|
TreeChild::OkExpr { .. }
|
||||||
vec![]
|
| TreeChild::OkShape { .. }
|
||||||
}
|
| TreeChild::OkNonExpr(..)
|
||||||
|
| TreeChild::ErrorLeaf(..) => vec![],
|
||||||
TreeChild::OkFrame(frame, _) | TreeChild::ErrorFrame(frame, _) => {
|
TreeChild::OkFrame(frame, _) | TreeChild::ErrorFrame(frame, _) => {
|
||||||
frame.children_for_formatting(text)
|
frame.children_for_formatting(text)
|
||||||
}
|
}
|
||||||
@ -179,21 +249,44 @@ impl TreeFrame {
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum TreeChild {
|
pub enum TreeChild {
|
||||||
OkNonExpr(String),
|
OkNonExpr(String),
|
||||||
OkExpr(Expression, Text),
|
OkExpr {
|
||||||
|
source: Span,
|
||||||
|
desc: &'static str,
|
||||||
|
text: Text,
|
||||||
|
},
|
||||||
|
OkShape {
|
||||||
|
source: Span,
|
||||||
|
desc: &'static str,
|
||||||
|
text: Text,
|
||||||
|
fallback: bool,
|
||||||
|
},
|
||||||
OkFrame(TreeFrame, Text),
|
OkFrame(TreeFrame, Text),
|
||||||
ErrorFrame(TreeFrame, Text),
|
ErrorFrame(TreeFrame, Text),
|
||||||
ErrorLeaf(Vec<&'static str>),
|
ErrorLeaf(Vec<(&'static str, &'static str)>, &'static str),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TreeChild {
|
impl TreeChild {
|
||||||
fn leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
fn leaf_description(&self, f: &mut impl io::Write) -> io::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
TreeChild::OkExpr(expr, text) => write!(
|
TreeChild::OkExpr { source, desc, text } => write!(
|
||||||
f,
|
f,
|
||||||
"{} {} {}",
|
"{} {} {}",
|
||||||
Color::Cyan.normal().paint("returns"),
|
Color::Cyan.normal().paint("returns"),
|
||||||
Color::White.bold().on(Color::Green).paint(expr.type_name()),
|
Color::White.bold().on(Color::Green).paint(*desc),
|
||||||
expr.span.slice(text)
|
source.slice(text)
|
||||||
|
),
|
||||||
|
|
||||||
|
TreeChild::OkShape {
|
||||||
|
source,
|
||||||
|
desc,
|
||||||
|
text,
|
||||||
|
fallback,
|
||||||
|
} => write!(
|
||||||
|
f,
|
||||||
|
"{} {} {}",
|
||||||
|
Color::Purple.normal().paint("paints"),
|
||||||
|
Color::White.bold().on(Color::Green).paint(*desc),
|
||||||
|
source.slice(text)
|
||||||
),
|
),
|
||||||
|
|
||||||
TreeChild::OkNonExpr(result) => write!(
|
TreeChild::OkNonExpr(result) => write!(
|
||||||
@ -206,17 +299,21 @@ impl TreeChild {
|
|||||||
.paint(result.to_string())
|
.paint(result.to_string())
|
||||||
),
|
),
|
||||||
|
|
||||||
TreeChild::ErrorLeaf(desc) => {
|
TreeChild::ErrorLeaf(desc, token_desc) => {
|
||||||
let last = desc.len() - 1;
|
let last = desc.len() - 1;
|
||||||
|
|
||||||
for (i, item) in desc.iter().enumerate() {
|
for (i, (desc, err_desc)) in desc.iter().enumerate() {
|
||||||
write!(f, "{}", Color::White.bold().on(Color::Red).paint(*item))?;
|
write!(f, "{}", Color::White.bold().on(Color::Red).paint(*desc))?;
|
||||||
|
|
||||||
|
write!(f, " {}", Color::Black.bold().paint(*err_desc))?;
|
||||||
|
|
||||||
if i != last {
|
if i != last {
|
||||||
write!(f, "{}", Color::White.normal().paint(", "))?;
|
write!(f, "{}", Color::White.normal().paint(", "))?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// write!(f, " {}", Color::Black.bold().paint(*token_desc))?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -236,9 +333,10 @@ impl TreeItem for TreeChild {
|
|||||||
|
|
||||||
fn children(&self) -> Cow<[Self::Child]> {
|
fn children(&self) -> Cow<[Self::Child]> {
|
||||||
match self {
|
match self {
|
||||||
TreeChild::OkExpr(..) | TreeChild::OkNonExpr(..) | TreeChild::ErrorLeaf(..) => {
|
TreeChild::OkExpr { .. }
|
||||||
Cow::Borrowed(&[])
|
| TreeChild::OkShape { .. }
|
||||||
}
|
| TreeChild::OkNonExpr(..)
|
||||||
|
| TreeChild::ErrorLeaf(..) => Cow::Borrowed(&[]),
|
||||||
TreeChild::OkFrame(frame, text) | TreeChild::ErrorFrame(frame, text) => {
|
TreeChild::OkFrame(frame, text) | TreeChild::ErrorFrame(frame, text) => {
|
||||||
Cow::Owned(frame.children_for_formatting(text))
|
Cow::Owned(frame.children_for_formatting(text))
|
||||||
}
|
}
|
||||||
@ -246,39 +344,46 @@ impl TreeItem for TreeChild {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct ExpandTracer {
|
pub struct ExpandTracer<T: SpannedTypeName> {
|
||||||
frame_stack: Vec<ExprFrame>,
|
desc: &'static str,
|
||||||
|
frame_stack: Vec<ExprFrame<T>>,
|
||||||
source: Text,
|
source: Text,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ExpandTracer {
|
impl<T: SpannedTypeName + Debug> ExpandTracer<T> {
|
||||||
pub fn print(&self, source: Text) -> PrintTracer {
|
pub fn print(&self, source: Text) -> PrintTracer {
|
||||||
let root = self.frame_stack[0].to_tree_frame(&source);
|
let root = self.frame_stack[0].to_tree_frame(&source);
|
||||||
|
|
||||||
PrintTracer { root, source }
|
PrintTracer {
|
||||||
|
root,
|
||||||
|
desc: self.desc,
|
||||||
|
source,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new(source: Text) -> ExpandTracer {
|
pub fn new(desc: &'static str, source: Text) -> ExpandTracer<T> {
|
||||||
let root = ExprFrame {
|
let root = ExprFrame {
|
||||||
description: "Trace",
|
description: "Trace",
|
||||||
children: vec![],
|
children: vec![],
|
||||||
|
token: None,
|
||||||
error: None,
|
error: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
ExpandTracer {
|
ExpandTracer {
|
||||||
|
desc,
|
||||||
frame_stack: vec![root],
|
frame_stack: vec![root],
|
||||||
source,
|
source,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn current_frame(&mut self) -> &mut ExprFrame {
|
fn current_frame(&mut self) -> &mut ExprFrame<T> {
|
||||||
let frames = &mut self.frame_stack;
|
let frames = &mut self.frame_stack;
|
||||||
let last = frames.len() - 1;
|
let last = frames.len() - 1;
|
||||||
&mut frames[last]
|
&mut frames[last]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn pop_frame(&mut self) -> ExprFrame {
|
fn pop_frame(&mut self) -> ExprFrame<T> {
|
||||||
let result = self.frame_stack.pop().expect("Can't pop root tracer frame");
|
let result = self.frame_stack.pop().expect("Can't pop root tracer frame");
|
||||||
|
|
||||||
if self.frame_stack.is_empty() {
|
if self.frame_stack.is_empty() {
|
||||||
@ -290,10 +395,11 @@ impl ExpandTracer {
|
|||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn start(&mut self, description: &'static str) {
|
pub fn start(&mut self, description: &'static str, token: Option<SpannedToken>) {
|
||||||
let frame = ExprFrame {
|
let frame = ExprFrame {
|
||||||
description,
|
description,
|
||||||
children: vec![],
|
children: vec![],
|
||||||
|
token,
|
||||||
error: None,
|
error: None,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -301,8 +407,36 @@ impl ExpandTracer {
|
|||||||
self.debug();
|
self.debug();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_expr(&mut self, shape: Expression) {
|
pub fn add_return(&mut self, value: T) {
|
||||||
self.current_frame().add_expr(shape);
|
self.current_frame().add_return(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_shape(&mut self, shape: TraceShape) {
|
||||||
|
self.current_frame().add_shape(shape);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_err_shape(&mut self, shape: TraceShape) {
|
||||||
|
self.current_frame().add_err_shape(shape);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn finish(&mut self) {
|
||||||
|
loop {
|
||||||
|
if self.frame_stack.len() == 1 {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
let frame = self.pop_frame();
|
||||||
|
self.current_frame()
|
||||||
|
.children
|
||||||
|
.push(FrameChild::Frame(Box::new(frame)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn eof_frame(&mut self) {
|
||||||
|
let current = self.pop_frame();
|
||||||
|
self.current_frame()
|
||||||
|
.children
|
||||||
|
.push(FrameChild::Frame(Box::new(current)));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_result(&mut self, result: impl PrettyDebugWithSource) {
|
pub fn add_result(&mut self, result: impl PrettyDebugWithSource) {
|
||||||
@ -316,7 +450,7 @@ impl ExpandTracer {
|
|||||||
let current = self.pop_frame();
|
let current = self.pop_frame();
|
||||||
self.current_frame()
|
self.current_frame()
|
||||||
.children
|
.children
|
||||||
.push(FrameChild::Frame(current));
|
.push(FrameChild::Frame(Box::new(current)));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn failed(&mut self, error: &ParseError) {
|
pub fn failed(&mut self, error: &ParseError) {
|
||||||
@ -324,7 +458,7 @@ impl ExpandTracer {
|
|||||||
current.error = Some(error.clone());
|
current.error = Some(error.clone());
|
||||||
self.current_frame()
|
self.current_frame()
|
||||||
.children
|
.children
|
||||||
.push(FrameChild::Frame(current));
|
.push(FrameChild::Frame(Box::new(current)));
|
||||||
}
|
}
|
||||||
|
|
||||||
fn debug(&self) {
|
fn debug(&self) {
|
||||||
@ -342,6 +476,7 @@ impl ExpandTracer {
|
|||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct PrintTracer {
|
pub struct PrintTracer {
|
||||||
|
desc: &'static str,
|
||||||
root: TreeFrame,
|
root: TreeFrame,
|
||||||
source: Text,
|
source: Text,
|
||||||
}
|
}
|
||||||
@ -350,7 +485,7 @@ impl TreeItem for PrintTracer {
|
|||||||
type Child = TreeChild;
|
type Child = TreeChild;
|
||||||
|
|
||||||
fn write_self<W: io::Write>(&self, f: &mut W, style: &Style) -> io::Result<()> {
|
fn write_self<W: io::Write>(&self, f: &mut W, style: &Style) -> io::Result<()> {
|
||||||
write!(f, "{}", style.paint("Expansion Trace"))
|
write!(f, "{}", style.paint(self.desc))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn children(&self) -> Cow<[Self::Child]> {
|
fn children(&self) -> Cow<[Self::Child]> {
|
||||||
|
56
crates/nu-parser/src/hir/tokens_iterator/into_shapes.rs
Normal file
56
crates/nu-parser/src/hir/tokens_iterator/into_shapes.rs
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
use crate::hir::syntax_shape::flat_shape::{FlatShape, ShapeResult};
|
||||||
|
use nu_source::{Span, Spanned, SpannedItem};
|
||||||
|
|
||||||
|
pub struct FlatShapes {
|
||||||
|
shapes: Vec<ShapeResult>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> IntoIterator for &'a FlatShapes {
|
||||||
|
type Item = &'a ShapeResult;
|
||||||
|
type IntoIter = std::slice::Iter<'a, ShapeResult>;
|
||||||
|
|
||||||
|
fn into_iter(self) -> Self::IntoIter {
|
||||||
|
self.shapes.iter()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait IntoShapes: 'static {
|
||||||
|
fn into_shapes(self, span: Span) -> FlatShapes;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoShapes for FlatShape {
|
||||||
|
fn into_shapes(self, span: Span) -> FlatShapes {
|
||||||
|
FlatShapes {
|
||||||
|
shapes: vec![ShapeResult::Success(self.spanned(span))],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoShapes for Vec<Spanned<FlatShape>> {
|
||||||
|
fn into_shapes(self, _span: Span) -> FlatShapes {
|
||||||
|
FlatShapes {
|
||||||
|
shapes: self.into_iter().map(ShapeResult::Success).collect(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoShapes for Vec<ShapeResult> {
|
||||||
|
fn into_shapes(self, _span: Span) -> FlatShapes {
|
||||||
|
FlatShapes { shapes: self }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoShapes for () {
|
||||||
|
fn into_shapes(self, _span: Span) -> FlatShapes {
|
||||||
|
FlatShapes { shapes: vec![] }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoShapes for Option<FlatShape> {
|
||||||
|
fn into_shapes(self, span: Span) -> FlatShapes {
|
||||||
|
match self {
|
||||||
|
Option::None => ().into_shapes(span),
|
||||||
|
Option::Some(shape) => shape.into_shapes(span),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
30
crates/nu-parser/src/hir/tokens_iterator/pattern.rs
Normal file
30
crates/nu-parser/src/hir/tokens_iterator/pattern.rs
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
use crate::parse::token_tree::{ParseErrorFn, SpannedToken, TokenType};
|
||||||
|
use nu_errors::ParseError;
|
||||||
|
use std::borrow::Cow;
|
||||||
|
|
||||||
|
pub struct Pattern<T> {
|
||||||
|
parts: Vec<Box<dyn TokenType<Output = T>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> TokenType for Pattern<T> {
|
||||||
|
type Output = T;
|
||||||
|
|
||||||
|
fn desc(&self) -> Cow<'static, str> {
|
||||||
|
Cow::Borrowed("pattern")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_token_value(
|
||||||
|
&self,
|
||||||
|
token: &SpannedToken,
|
||||||
|
err: ParseErrorFn<Self::Output>,
|
||||||
|
) -> Result<Self::Output, ParseError> {
|
||||||
|
for part in &self.parts {
|
||||||
|
match part.extract_token_value(token, err) {
|
||||||
|
Err(_) => {}
|
||||||
|
Ok(result) => return Ok(result),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
err()
|
||||||
|
}
|
||||||
|
}
|
105
crates/nu-parser/src/hir/tokens_iterator/state.rs
Normal file
105
crates/nu-parser/src/hir/tokens_iterator/state.rs
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
use crate::hir::syntax_shape::flat_shape::ShapeResult;
|
||||||
|
use crate::hir::syntax_shape::ExpandContext;
|
||||||
|
use crate::hir::tokens_iterator::TokensIterator;
|
||||||
|
use crate::parse::token_tree::SpannedToken;
|
||||||
|
|
||||||
|
use getset::Getters;
|
||||||
|
use nu_errors::ParseError;
|
||||||
|
use nu_protocol::SpannedTypeName;
|
||||||
|
use nu_source::Span;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
#[derive(Getters, Debug, Clone)]
|
||||||
|
pub struct TokensIteratorState<'content> {
|
||||||
|
pub(crate) tokens: &'content [SpannedToken],
|
||||||
|
pub(crate) span: Span,
|
||||||
|
pub(crate) index: usize,
|
||||||
|
pub(crate) seen: indexmap::IndexSet<usize>,
|
||||||
|
#[get = "pub"]
|
||||||
|
pub(crate) shapes: Vec<ShapeResult>,
|
||||||
|
pub(crate) errors: indexmap::IndexMap<Span, Vec<String>>,
|
||||||
|
pub(crate) context: Arc<ExpandContext<'content>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Peeked<'content, 'me> {
|
||||||
|
pub(crate) node: Option<&'content SpannedToken>,
|
||||||
|
pub(crate) iterator: &'me mut TokensIterator<'content>,
|
||||||
|
pub(crate) from: usize,
|
||||||
|
pub(crate) to: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'content, 'me> Peeked<'content, 'me> {
|
||||||
|
pub fn commit(&mut self) -> Option<&'content SpannedToken> {
|
||||||
|
let Peeked {
|
||||||
|
node,
|
||||||
|
iterator,
|
||||||
|
from,
|
||||||
|
to,
|
||||||
|
} = self;
|
||||||
|
|
||||||
|
let node = (*node)?;
|
||||||
|
iterator.commit(*from, *to);
|
||||||
|
Some(node)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn rollback(self) {}
|
||||||
|
|
||||||
|
pub fn not_eof(self, expected: &str) -> Result<PeekedNode<'content, 'me>, ParseError> {
|
||||||
|
match self.node {
|
||||||
|
None => Err(ParseError::unexpected_eof(
|
||||||
|
expected.to_string(),
|
||||||
|
self.iterator.eof_span(),
|
||||||
|
)),
|
||||||
|
Some(node) => Ok(PeekedNode {
|
||||||
|
node,
|
||||||
|
iterator: self.iterator,
|
||||||
|
from: self.from,
|
||||||
|
to: self.to,
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn type_error(&self, expected: &'static str) -> ParseError {
|
||||||
|
peek_error(self.node, self.iterator.eof_span(), expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct PeekedNode<'content, 'me> {
|
||||||
|
pub(crate) node: &'content SpannedToken,
|
||||||
|
pub(crate) iterator: &'me mut TokensIterator<'content>,
|
||||||
|
from: usize,
|
||||||
|
to: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'content, 'me> PeekedNode<'content, 'me> {
|
||||||
|
pub fn commit(self) -> &'content SpannedToken {
|
||||||
|
let PeekedNode {
|
||||||
|
node,
|
||||||
|
iterator,
|
||||||
|
from,
|
||||||
|
to,
|
||||||
|
} = self;
|
||||||
|
|
||||||
|
iterator.commit(from, to);
|
||||||
|
node
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn rollback(self) {}
|
||||||
|
|
||||||
|
pub fn type_error(&self, expected: &'static str) -> ParseError {
|
||||||
|
peek_error(Some(self.node), self.iterator.eof_span(), expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn peek_error(
|
||||||
|
node: Option<&SpannedToken>,
|
||||||
|
eof_span: Span,
|
||||||
|
expected: &'static str,
|
||||||
|
) -> ParseError {
|
||||||
|
match node {
|
||||||
|
None => ParseError::unexpected_eof(expected, eof_span),
|
||||||
|
Some(node) => ParseError::mismatch(expected, node.spanned_type_name()),
|
||||||
|
}
|
||||||
|
}
|
@ -3,12 +3,17 @@ use crate::parse::token_tree_builder::TokenTreeBuilder as b;
|
|||||||
use crate::Span;
|
use crate::Span;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
<<<<<<< HEAD
|
||||||
|
fn supplies_tokens() {
|
||||||
|
let tokens = b::token_list(vec![b::it_var(), b::op("."), b::bare("cpu")]);
|
||||||
|
=======
|
||||||
fn supplies_tokens() -> Result<(), Box<dyn std::error::Error>> {
|
fn supplies_tokens() -> Result<(), Box<dyn std::error::Error>> {
|
||||||
let tokens = b::token_list(vec![b::var("it"), b::op("."), b::bare("cpu")]);
|
let tokens = b::token_list(vec![b::var("it"), b::op("."), b::bare("cpu")]);
|
||||||
|
>>>>>>> master
|
||||||
let (tokens, _) = b::build(tokens);
|
let (tokens, _) = b::build(tokens);
|
||||||
|
|
||||||
let tokens = tokens.expect_list();
|
let tokens = tokens.expect_list();
|
||||||
let mut iterator = TokensIterator::all(tokens, Span::unknown());
|
let mut iterator = TokensIterator::new(tokens, Span::unknown());
|
||||||
|
|
||||||
iterator.next()?.expect_var();
|
iterator.next()?.expect_var();
|
||||||
iterator.next()?.expect_dot();
|
iterator.next()?.expect_dot();
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
#![allow(clippy::large_enum_variant, clippy::type_complexity)]
|
#[macro_use]
|
||||||
|
pub mod macros;
|
||||||
|
|
||||||
pub mod commands;
|
pub mod commands;
|
||||||
pub mod hir;
|
pub mod hir;
|
||||||
@ -8,23 +9,64 @@ pub mod parse_command;
|
|||||||
pub use crate::commands::classified::{
|
pub use crate::commands::classified::{
|
||||||
external::ExternalCommand, internal::InternalCommand, ClassifiedCommand, ClassifiedPipeline,
|
external::ExternalCommand, internal::InternalCommand, ClassifiedCommand, ClassifiedPipeline,
|
||||||
};
|
};
|
||||||
pub use crate::hir::syntax_shape::flat_shape::FlatShape;
|
pub use crate::hir::syntax_shape::flat_shape::{FlatShape, ShapeResult};
|
||||||
pub use crate::hir::syntax_shape::{
|
pub use crate::hir::syntax_shape::{ExpandContext, ExpandSyntax, PipelineShape, SignatureRegistry};
|
||||||
expand_syntax, ExpandContext, ExpandSyntax, PipelineShape, SignatureRegistry,
|
|
||||||
};
|
|
||||||
pub use crate::hir::tokens_iterator::TokensIterator;
|
pub use crate::hir::tokens_iterator::TokensIterator;
|
||||||
pub use crate::parse::files::Files;
|
pub use crate::parse::files::Files;
|
||||||
pub use crate::parse::flag::Flag;
|
pub use crate::parse::flag::Flag;
|
||||||
pub use crate::parse::operator::{CompareOperator, EvaluationOperator};
|
pub use crate::parse::operator::{CompareOperator, EvaluationOperator};
|
||||||
pub use crate::parse::parser::Number;
|
pub use crate::parse::parser::Number;
|
||||||
pub use crate::parse::parser::{module, pipeline};
|
pub use crate::parse::parser::{module, pipeline};
|
||||||
pub use crate::parse::token_tree::{Delimiter, TokenNode};
|
pub use crate::parse::token_tree::{Delimiter, SpannedToken, Token};
|
||||||
pub use crate::parse::token_tree_builder::TokenTreeBuilder;
|
pub use crate::parse::token_tree_builder::TokenTreeBuilder;
|
||||||
|
|
||||||
|
use log::log_enabled;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
use nu_source::nom_input;
|
use nu_protocol::{errln, outln};
|
||||||
|
use nu_source::{nom_input, HasSpan, Text};
|
||||||
|
|
||||||
pub fn parse(input: &str) -> Result<TokenNode, ShellError> {
|
pub fn pipeline_shapes(line: &str, expand_context: ExpandContext) -> Vec<ShapeResult> {
|
||||||
|
let tokens = parse_pipeline(line);
|
||||||
|
|
||||||
|
match tokens {
|
||||||
|
Err(_) => vec![],
|
||||||
|
Ok(v) => {
|
||||||
|
let pipeline = match v.as_pipeline() {
|
||||||
|
Err(_) => return vec![],
|
||||||
|
Ok(v) => v,
|
||||||
|
};
|
||||||
|
|
||||||
|
let tokens = vec![Token::Pipeline(pipeline).into_spanned(v.span())];
|
||||||
|
let mut tokens = TokensIterator::new(&tokens[..], expand_context, v.span());
|
||||||
|
|
||||||
|
let shapes = {
|
||||||
|
// We just constructed a token list that only contains a pipeline, so it can't fail
|
||||||
|
let result = tokens.expand_infallible(PipelineShape);
|
||||||
|
|
||||||
|
if let Some(failure) = result.failed {
|
||||||
|
errln!(
|
||||||
|
"BUG: PipelineShape didn't find a pipeline :: {:#?}",
|
||||||
|
failure
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
tokens.finish_tracer();
|
||||||
|
|
||||||
|
tokens.state().shapes()
|
||||||
|
};
|
||||||
|
|
||||||
|
if log_enabled!(target: "nu::expand_syntax", log::Level::Debug) {
|
||||||
|
outln!("");
|
||||||
|
let _ = ptree::print_tree(&tokens.expand_tracer().clone().print(Text::from(line)));
|
||||||
|
outln!("");
|
||||||
|
}
|
||||||
|
|
||||||
|
shapes.clone()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn parse_pipeline(input: &str) -> Result<SpannedToken, ShellError> {
|
||||||
let _ = pretty_env_logger::try_init();
|
let _ = pretty_env_logger::try_init();
|
||||||
|
|
||||||
match pipeline(nom_input(input)) {
|
match pipeline(nom_input(input)) {
|
||||||
@ -33,7 +75,9 @@ pub fn parse(input: &str) -> Result<TokenNode, ShellError> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_script(input: &str) -> Result<TokenNode, ShellError> {
|
pub use parse_pipeline as parse;
|
||||||
|
|
||||||
|
pub fn parse_script(input: &str) -> Result<SpannedToken, ShellError> {
|
||||||
let _ = pretty_env_logger::try_init();
|
let _ = pretty_env_logger::try_init();
|
||||||
|
|
||||||
match module(nom_input(input)) {
|
match module(nom_input(input)) {
|
||||||
|
9
crates/nu-parser/src/macros.rs
Normal file
9
crates/nu-parser/src/macros.rs
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
#[macro_export]
|
||||||
|
macro_rules! return_ok {
|
||||||
|
($expr:expr) => {
|
||||||
|
match $expr {
|
||||||
|
Ok(val) => return Ok(val),
|
||||||
|
Err(_) => {}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
@ -2,11 +2,11 @@ pub(crate) mod call_node;
|
|||||||
pub(crate) mod comment;
|
pub(crate) mod comment;
|
||||||
pub(crate) mod files;
|
pub(crate) mod files;
|
||||||
pub(crate) mod flag;
|
pub(crate) mod flag;
|
||||||
|
pub(crate) mod number;
|
||||||
pub(crate) mod operator;
|
pub(crate) mod operator;
|
||||||
pub(crate) mod parser;
|
pub(crate) mod parser;
|
||||||
pub(crate) mod pipeline;
|
pub(crate) mod pipeline;
|
||||||
pub(crate) mod token_tree;
|
pub(crate) mod token_tree;
|
||||||
pub(crate) mod token_tree_builder;
|
pub(crate) mod token_tree_builder;
|
||||||
pub(crate) mod tokens;
|
|
||||||
pub(crate) mod unit;
|
pub(crate) mod unit;
|
||||||
pub(crate) mod util;
|
pub(crate) mod util;
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
use crate::TokenNode;
|
use crate::parse::token_tree::SpannedToken;
|
||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource};
|
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)]
|
||||||
pub struct CallNode {
|
pub struct CallNode {
|
||||||
#[get = "pub(crate)"]
|
#[get = "pub(crate)"]
|
||||||
head: Box<TokenNode>,
|
head: Box<SpannedToken>,
|
||||||
#[get = "pub(crate)"]
|
#[get = "pub(crate)"]
|
||||||
children: Option<Vec<TokenNode>>,
|
children: Option<Vec<SpannedToken>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for CallNode {
|
impl PrettyDebugWithSource for CallNode {
|
||||||
@ -29,7 +29,7 @@ impl PrettyDebugWithSource for CallNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl CallNode {
|
impl CallNode {
|
||||||
pub fn new(head: Box<TokenNode>, children: Vec<TokenNode>) -> CallNode {
|
pub fn new(head: Box<SpannedToken>, children: Vec<SpannedToken>) -> CallNode {
|
||||||
if children.is_empty() {
|
if children.is_empty() {
|
||||||
CallNode {
|
CallNode {
|
||||||
head,
|
head,
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span};
|
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Span};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||||
@ -12,15 +12,13 @@ pub enum CommentKind {
|
|||||||
pub struct Comment {
|
pub struct Comment {
|
||||||
pub(crate) kind: CommentKind,
|
pub(crate) kind: CommentKind,
|
||||||
pub(crate) text: Span,
|
pub(crate) text: Span,
|
||||||
pub(crate) span: Span,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Comment {
|
impl Comment {
|
||||||
pub fn line(text: impl Into<Span>, outer: impl Into<Span>) -> Comment {
|
pub fn line(text: impl Into<Span>) -> Comment {
|
||||||
Comment {
|
Comment {
|
||||||
kind: CommentKind::Line,
|
kind: CommentKind::Line,
|
||||||
text: text.into(),
|
text: text.into(),
|
||||||
span: outer.into(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -34,9 +32,3 @@ impl PrettyDebugWithSource for Comment {
|
|||||||
prefix + b::description(self.text.slice(source))
|
prefix + b::description(self.text.slice(source))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HasSpan for Comment {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
self.span
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
@ -15,7 +15,6 @@ pub enum FlagKind {
|
|||||||
pub struct Flag {
|
pub struct Flag {
|
||||||
pub(crate) kind: FlagKind,
|
pub(crate) kind: FlagKind,
|
||||||
pub(crate) name: Span,
|
pub(crate) name: Span,
|
||||||
pub(crate) span: Span,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Flag {
|
impl PrettyDebugWithSource for Flag {
|
||||||
@ -30,10 +29,10 @@ impl PrettyDebugWithSource for Flag {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl Flag {
|
impl Flag {
|
||||||
pub fn color(&self) -> Spanned<FlatShape> {
|
pub fn color(&self, span: impl Into<Span>) -> Spanned<FlatShape> {
|
||||||
match self.kind {
|
match self.kind {
|
||||||
FlagKind::Longhand => FlatShape::Flag.spanned(self.span),
|
FlagKind::Longhand => FlatShape::Flag.spanned(span.into()),
|
||||||
FlagKind::Shorthand => FlatShape::ShorthandFlag.spanned(self.span),
|
FlagKind::Shorthand => FlatShape::ShorthandFlag.spanned(span.into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
70
crates/nu-parser/src/parse/number.rs
Normal file
70
crates/nu-parser/src/parse/number.rs
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
use crate::hir::syntax_shape::FlatShape;
|
||||||
|
use crate::parse::parser::Number;
|
||||||
|
use bigdecimal::BigDecimal;
|
||||||
|
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Text};
|
||||||
|
use num_bigint::BigInt;
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||||
|
pub enum RawNumber {
|
||||||
|
Int(Span),
|
||||||
|
Decimal(Span),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasSpan for RawNumber {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
match self {
|
||||||
|
RawNumber::Int(span) => *span,
|
||||||
|
RawNumber::Decimal(span) => *span,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebugWithSource for RawNumber {
|
||||||
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
|
match self {
|
||||||
|
RawNumber::Int(span) => b::primitive(span.slice(source)),
|
||||||
|
RawNumber::Decimal(span) => b::primitive(span.slice(source)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RawNumber {
|
||||||
|
pub fn as_flat_shape(&self) -> FlatShape {
|
||||||
|
match self {
|
||||||
|
RawNumber::Int(_) => FlatShape::Int,
|
||||||
|
RawNumber::Decimal(_) => FlatShape::Decimal,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn int(span: impl Into<Span>) -> RawNumber {
|
||||||
|
let span = span.into();
|
||||||
|
|
||||||
|
RawNumber::Int(span)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn decimal(span: impl Into<Span>) -> RawNumber {
|
||||||
|
let span = span.into();
|
||||||
|
|
||||||
|
RawNumber::Decimal(span)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn to_number(self, source: &Text) -> Number {
|
||||||
|
match self {
|
||||||
|
RawNumber::Int(tag) => {
|
||||||
|
if let Ok(big_int) = BigInt::from_str(tag.slice(source)) {
|
||||||
|
Number::Int(big_int)
|
||||||
|
} else {
|
||||||
|
unreachable!("Internal error: could not parse text as BigInt as expected")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
RawNumber::Decimal(tag) => {
|
||||||
|
if let Ok(big_decimal) = BigDecimal::from_str(tag.slice(source)) {
|
||||||
|
Number::Decimal(big_decimal)
|
||||||
|
} else {
|
||||||
|
unreachable!("Internal error: could not parse text as BigDecimal as expected")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,8 +1,8 @@
|
|||||||
#![allow(unused)]
|
#![allow(unused)]
|
||||||
|
|
||||||
use crate::parse::{
|
use crate::parse::{
|
||||||
call_node::*, flag::*, operator::*, pipeline::*, token_tree::*, token_tree_builder::*,
|
call_node::*, flag::*, number::*, operator::*, pipeline::*, token_tree::*,
|
||||||
tokens::*, unit::*,
|
token_tree_builder::*, unit::*,
|
||||||
};
|
};
|
||||||
use nom;
|
use nom;
|
||||||
use nom::branch::*;
|
use nom::branch::*;
|
||||||
@ -36,7 +36,7 @@ use std::str::FromStr;
|
|||||||
macro_rules! cmp_operator {
|
macro_rules! cmp_operator {
|
||||||
($name:tt : $token:tt ) => {
|
($name:tt : $token:tt ) => {
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn $name(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn $name(input: NomSpan) -> IResult<NomSpan, $crate::parse::token_tree::SpannedToken> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, tag) = tag($token)(input)?;
|
let (input, tag) = tag($token)(input)?;
|
||||||
let end = input.offset;
|
let end = input.offset;
|
||||||
@ -52,7 +52,7 @@ macro_rules! cmp_operator {
|
|||||||
macro_rules! eval_operator {
|
macro_rules! eval_operator {
|
||||||
($name:tt : $token:tt ) => {
|
($name:tt : $token:tt ) => {
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn $name(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn $name(input: NomSpan) -> IResult<NomSpan, $crate::parse::token_tree::SpannedToken> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, tag) = tag($token)(input)?;
|
let (input, tag) = tag($token)(input)?;
|
||||||
let end = input.offset;
|
let end = input.offset;
|
||||||
@ -209,7 +209,7 @@ impl Into<Number> for BigInt {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn number(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn number(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let (input, number) = raw_number(input)?;
|
let (input, number) = raw_number(input)?;
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
@ -218,12 +218,36 @@ pub fn number(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracable_parser]
|
||||||
|
pub fn int_member(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
|
let start = input.offset;
|
||||||
|
let (input, head) = digit1(input)?;
|
||||||
|
|
||||||
|
match input.fragment.chars().next() {
|
||||||
|
None | Some('.') => Ok((
|
||||||
|
input,
|
||||||
|
Token::Number(RawNumber::int((start, input.offset)))
|
||||||
|
.into_spanned((start, input.offset)),
|
||||||
|
)),
|
||||||
|
other if is_boundary(other) => Ok((
|
||||||
|
input,
|
||||||
|
Token::Number(RawNumber::int((start, input.offset)))
|
||||||
|
.into_spanned((start, input.offset)),
|
||||||
|
)),
|
||||||
|
_ => Err(nom::Err::Error(nom::error::make_error(
|
||||||
|
input,
|
||||||
|
nom::error::ErrorKind::Tag,
|
||||||
|
))),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn raw_number(input: NomSpan) -> IResult<NomSpan, RawNumber> {
|
pub fn raw_number(input: NomSpan) -> IResult<NomSpan, RawNumber> {
|
||||||
let anchoral = input;
|
let anchoral = input;
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, neg) = opt(tag("-"))(input)?;
|
let (input, neg) = opt(tag("-"))(input)?;
|
||||||
let (input, head) = digit1(input)?;
|
let (input, head) = digit1(input)?;
|
||||||
|
let after_int_head = input;
|
||||||
|
|
||||||
match input.fragment.chars().next() {
|
match input.fragment.chars().next() {
|
||||||
None => return Ok((input, RawNumber::int(Span::new(start, input.offset)))),
|
None => return Ok((input, RawNumber::int(Span::new(start, input.offset)))),
|
||||||
@ -255,7 +279,17 @@ pub fn raw_number(input: NomSpan) -> IResult<NomSpan, RawNumber> {
|
|||||||
Err(_) => return Ok((input, RawNumber::int(Span::new(start, input.offset)))),
|
Err(_) => return Ok((input, RawNumber::int(Span::new(start, input.offset)))),
|
||||||
};
|
};
|
||||||
|
|
||||||
let (input, tail) = digit1(input)?;
|
let tail_digits_result: IResult<NomSpan, _> = digit1(input);
|
||||||
|
|
||||||
|
let (input, tail) = match tail_digits_result {
|
||||||
|
Ok((input, tail)) => (input, tail),
|
||||||
|
Err(_) => {
|
||||||
|
return Ok((
|
||||||
|
after_int_head,
|
||||||
|
RawNumber::int((start, after_int_head.offset)),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let end = input.offset;
|
let end = input.offset;
|
||||||
|
|
||||||
@ -272,14 +306,14 @@ pub fn raw_number(input: NomSpan) -> IResult<NomSpan, RawNumber> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn operator(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn operator(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let (input, operator) = alt((gte, lte, neq, gt, lt, eq, cont, ncont))(input)?;
|
let (input, operator) = alt((gte, lte, neq, gt, lt, eq, cont, ncont))(input)?;
|
||||||
|
|
||||||
Ok((input, operator))
|
Ok((input, operator))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn dq_string(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn dq_string(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, _) = char('"')(input)?;
|
let (input, _) = char('"')(input)?;
|
||||||
let start1 = input.offset;
|
let start1 = input.offset;
|
||||||
@ -294,7 +328,7 @@ pub fn dq_string(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn sq_string(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn sq_string(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, _) = char('\'')(input)?;
|
let (input, _) = char('\'')(input)?;
|
||||||
let start1 = input.offset;
|
let start1 = input.offset;
|
||||||
@ -310,12 +344,12 @@ pub fn sq_string(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn string(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn string(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
alt((sq_string, dq_string))(input)
|
alt((sq_string, dq_string))(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn external(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn external(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, _) = tag("^")(input)?;
|
let (input, _) = tag("^")(input)?;
|
||||||
let (input, bare) = take_while(is_file_char)(input)?;
|
let (input, bare) = take_while(is_file_char)(input)?;
|
||||||
@ -373,7 +407,7 @@ pub fn matches(cond: fn(char) -> bool) -> impl Fn(NomSpan) -> IResult<NomSpan, N
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn pattern(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn pattern(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
word(
|
word(
|
||||||
start_pattern,
|
start_pattern,
|
||||||
matches(is_glob_char),
|
matches(is_glob_char),
|
||||||
@ -387,7 +421,7 @@ pub fn start_pattern(input: NomSpan) -> IResult<NomSpan, NomSpan> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn filename(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn filename(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let start_pos = input.offset;
|
let start_pos = input.offset;
|
||||||
|
|
||||||
let (mut input, mut saw_special) = match start_file_char(input) {
|
let (mut input, mut saw_special) = match start_file_char(input) {
|
||||||
@ -495,7 +529,7 @@ pub fn start_filename(input: NomSpan) -> IResult<NomSpan, NomSpan> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn member(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn bare_member(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
word(
|
word(
|
||||||
matches(is_start_member_char),
|
matches(is_start_member_char),
|
||||||
matches(is_member_char),
|
matches(is_member_char),
|
||||||
@ -503,13 +537,22 @@ pub fn member(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
)(input)
|
)(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[tracable_parser]
|
||||||
|
pub fn garbage_member(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
|
word(
|
||||||
|
matches(is_garbage_member_char),
|
||||||
|
matches(is_garbage_member_char),
|
||||||
|
TokenTreeBuilder::spanned_garbage,
|
||||||
|
)(input)
|
||||||
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn ident(input: NomSpan) -> IResult<NomSpan, Tag> {
|
pub fn ident(input: NomSpan) -> IResult<NomSpan, Tag> {
|
||||||
word(matches(is_id_start), matches(is_id_continue), Tag::from)(input)
|
word(matches(is_id_start), matches(is_id_continue), Tag::from)(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn external_word(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn external_word(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, _) = take_while1(is_external_word_char)(input)?;
|
let (input, _) = take_while1(is_external_word_char)(input)?;
|
||||||
let end = input.offset;
|
let end = input.offset;
|
||||||
@ -517,22 +560,48 @@ pub fn external_word(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
Ok((input, TokenTreeBuilder::spanned_external_word((start, end))))
|
Ok((input, TokenTreeBuilder::spanned_external_word((start, end))))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
enum OneOf<T, U> {
|
||||||
|
First(T),
|
||||||
|
Second(U),
|
||||||
|
}
|
||||||
|
|
||||||
|
trait SubParser<'a, T>: Sized + Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, T> {}
|
||||||
|
|
||||||
|
impl<'a, T, U> SubParser<'a, U> for T where T: Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, U> {}
|
||||||
|
|
||||||
|
fn one_of<'a, T, U>(
|
||||||
|
first: impl SubParser<'a, T>,
|
||||||
|
second: impl SubParser<'a, U>,
|
||||||
|
) -> impl SubParser<'a, OneOf<T, U>> {
|
||||||
|
move |input: NomSpan<'a>| -> IResult<NomSpan, OneOf<T, U>> {
|
||||||
|
let first_result = first(input);
|
||||||
|
|
||||||
|
match first_result {
|
||||||
|
Ok((input, val)) => Ok((input, OneOf::First(val))),
|
||||||
|
Err(_) => {
|
||||||
|
let (input, val) = second(input)?;
|
||||||
|
Ok((input, OneOf::Second(val)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn var(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn var(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, _) = tag("$")(input)?;
|
let (input, _) = tag("$")(input)?;
|
||||||
let (input, bare) = ident(input)?;
|
let (input, name) = one_of(tag("it"), ident)(input)?;
|
||||||
let end = input.offset;
|
let end = input.offset;
|
||||||
|
|
||||||
Ok((
|
match name {
|
||||||
input,
|
OneOf::First(it) => Ok((input, TokenTreeBuilder::spanned_it_var(it, (start, end)))),
|
||||||
TokenTreeBuilder::spanned_var(bare, Span::new(start, end)),
|
OneOf::Second(name) => Ok((input, TokenTreeBuilder::spanned_var(name, (start, end)))),
|
||||||
))
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn tight<'a>(
|
fn tight<'a>(
|
||||||
parser: impl Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, Vec<TokenNode>>,
|
parser: impl Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, Vec<SpannedToken>>,
|
||||||
) -> impl Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, Vec<TokenNode>> {
|
) -> impl Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, Vec<SpannedToken>> {
|
||||||
move |input: NomSpan| {
|
move |input: NomSpan| {
|
||||||
let mut result = vec![];
|
let mut result = vec![];
|
||||||
let (input, head) = parser(input)?;
|
let (input, head) = parser(input)?;
|
||||||
@ -560,7 +629,7 @@ fn tight<'a>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn flag(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn flag(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, _) = tag("--")(input)?;
|
let (input, _) = tag("--")(input)?;
|
||||||
let (input, bare) = filename(input)?;
|
let (input, bare) = filename(input)?;
|
||||||
@ -573,7 +642,7 @@ pub fn flag(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn shorthand(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn shorthand(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, _) = tag("-")(input)?;
|
let (input, _) = tag("-")(input)?;
|
||||||
let (input, bare) = filename(input)?;
|
let (input, bare) = filename(input)?;
|
||||||
@ -586,14 +655,14 @@ pub fn shorthand(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn leaf(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn leaf(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let (input, node) = alt((number, string, operator, flag, shorthand, var, external))(input)?;
|
let (input, node) = alt((number, string, operator, flag, shorthand, var, external))(input)?;
|
||||||
|
|
||||||
Ok((input, node))
|
Ok((input, node))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<TokenNode>>> {
|
pub fn token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<SpannedToken>>> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let mut node_list = vec![];
|
let mut node_list = vec![];
|
||||||
|
|
||||||
@ -658,7 +727,7 @@ pub fn token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<TokenNode>>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn spaced_token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<TokenNode>>> {
|
pub fn spaced_token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<SpannedToken>>> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, pre_ws) = opt(any_space)(input)?;
|
let (input, pre_ws) = opt(any_space)(input)?;
|
||||||
let (input, items) = token_list(input)?;
|
let (input, items) = token_list(input)?;
|
||||||
@ -679,10 +748,10 @@ pub fn spaced_token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<TokenNo
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn make_token_list(
|
fn make_token_list(
|
||||||
first: Vec<TokenNode>,
|
first: Vec<SpannedToken>,
|
||||||
list: Vec<(Vec<TokenNode>, Vec<TokenNode>)>,
|
list: Vec<(Vec<SpannedToken>, Vec<SpannedToken>)>,
|
||||||
sp_right: Option<TokenNode>,
|
sp_right: Option<SpannedToken>,
|
||||||
) -> Vec<TokenNode> {
|
) -> Vec<SpannedToken> {
|
||||||
let mut nodes = vec![];
|
let mut nodes = vec![];
|
||||||
|
|
||||||
nodes.extend(first);
|
nodes.extend(first);
|
||||||
@ -700,7 +769,7 @@ fn make_token_list(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn separator(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn separator(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let left = input.offset;
|
let left = input.offset;
|
||||||
let (input, ws1) = alt((tag(";"), tag("\n")))(input)?;
|
let (input, ws1) = alt((tag(";"), tag("\n")))(input)?;
|
||||||
let right = input.offset;
|
let right = input.offset;
|
||||||
@ -709,7 +778,7 @@ pub fn separator(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn whitespace(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn whitespace(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let left = input.offset;
|
let left = input.offset;
|
||||||
let (input, ws1) = space1(input)?;
|
let (input, ws1) = space1(input)?;
|
||||||
let right = input.offset;
|
let right = input.offset;
|
||||||
@ -718,7 +787,7 @@ pub fn whitespace(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn any_space(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
pub fn any_space(input: NomSpan) -> IResult<NomSpan, Vec<SpannedToken>> {
|
||||||
let left = input.offset;
|
let left = input.offset;
|
||||||
let (input, tokens) = many1(alt((whitespace, separator, comment)))(input)?;
|
let (input, tokens) = many1(alt((whitespace, separator, comment)))(input)?;
|
||||||
let right = input.offset;
|
let right = input.offset;
|
||||||
@ -727,7 +796,7 @@ pub fn any_space(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn comment(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn comment(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let left = input.offset;
|
let left = input.offset;
|
||||||
let (input, start) = tag("#")(input)?;
|
let (input, start) = tag("#")(input)?;
|
||||||
let (input, rest) = not_line_ending(input)?;
|
let (input, rest) = not_line_ending(input)?;
|
||||||
@ -744,7 +813,7 @@ pub fn comment(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
pub fn delimited(
|
pub fn delimited(
|
||||||
input: NomSpan,
|
input: NomSpan,
|
||||||
delimiter: Delimiter,
|
delimiter: Delimiter,
|
||||||
) -> IResult<NomSpan, (Span, Span, Spanned<Vec<TokenNode>>)> {
|
) -> IResult<NomSpan, (Span, Span, Spanned<Vec<SpannedToken>>)> {
|
||||||
let left = input.offset;
|
let left = input.offset;
|
||||||
let (input, open_span) = tag(delimiter.open())(input)?;
|
let (input, open_span) = tag(delimiter.open())(input)?;
|
||||||
let (input, inner_items) = opt(spaced_token_list)(input)?;
|
let (input, inner_items) = opt(spaced_token_list)(input)?;
|
||||||
@ -768,7 +837,7 @@ pub fn delimited(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn delimited_paren(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn delimited_paren(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let (input, (left, right, tokens)) = delimited(input, Delimiter::Paren)?;
|
let (input, (left, right, tokens)) = delimited(input, Delimiter::Paren)?;
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
@ -778,7 +847,7 @@ pub fn delimited_paren(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn delimited_square(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn delimited_square(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let (input, (left, right, tokens)) = delimited(input, Delimiter::Square)?;
|
let (input, (left, right, tokens)) = delimited(input, Delimiter::Square)?;
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
@ -788,7 +857,7 @@ pub fn delimited_square(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn delimited_brace(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn delimited_brace(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let (input, (left, right, tokens)) = delimited(input, Delimiter::Brace)?;
|
let (input, (left, right, tokens)) = delimited(input, Delimiter::Brace)?;
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
@ -810,7 +879,7 @@ pub fn raw_call(input: NomSpan) -> IResult<NomSpan, Spanned<CallNode>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn range_continuation(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
pub fn range_continuation(input: NomSpan) -> IResult<NomSpan, Vec<SpannedToken>> {
|
||||||
let original = input;
|
let original = input;
|
||||||
|
|
||||||
let mut result = vec![];
|
let mut result = vec![];
|
||||||
@ -824,7 +893,7 @@ pub fn range_continuation(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn dot_member(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
pub fn dot_member(input: NomSpan) -> IResult<NomSpan, Vec<SpannedToken>> {
|
||||||
let (input, dot_result) = dot(input)?;
|
let (input, dot_result) = dot(input)?;
|
||||||
let (input, member_result) = any_member(input)?;
|
let (input, member_result) = any_member(input)?;
|
||||||
|
|
||||||
@ -832,12 +901,12 @@ pub fn dot_member(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn any_member(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn any_member(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
alt((number, string, member))(input)
|
alt((int_member, string, bare_member, garbage_member))(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn tight_node(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
pub fn tight_node(input: NomSpan) -> IResult<NomSpan, Vec<SpannedToken>> {
|
||||||
alt((
|
alt((
|
||||||
tight(to_list(leaf)),
|
tight(to_list(leaf)),
|
||||||
tight(to_list(filename)),
|
tight(to_list(filename)),
|
||||||
@ -851,8 +920,8 @@ pub fn tight_node(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn to_list(
|
fn to_list(
|
||||||
parser: impl Fn(NomSpan) -> IResult<NomSpan, TokenNode>,
|
parser: impl Fn(NomSpan) -> IResult<NomSpan, SpannedToken>,
|
||||||
) -> impl Fn(NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
) -> impl Fn(NomSpan) -> IResult<NomSpan, Vec<SpannedToken>> {
|
||||||
move |input| {
|
move |input| {
|
||||||
let (input, next) = parser(input)?;
|
let (input, next) = parser(input)?;
|
||||||
|
|
||||||
@ -861,17 +930,18 @@ fn to_list(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn nodes(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn nodes(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let (input, tokens) = token_list(input)?;
|
let (input, tokens) = token_list(input)?;
|
||||||
|
let span = tokens.span;
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
input,
|
input,
|
||||||
TokenTreeBuilder::spanned_token_list(tokens.item, tokens.span),
|
TokenTreeBuilder::spanned_pipeline(vec![PipelineElement::new(None, tokens)], span),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn pipeline(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn pipeline(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let start = input.offset;
|
let start = input.offset;
|
||||||
let (input, head) = spaced_token_list(input)?;
|
let (input, head) = spaced_token_list(input)?;
|
||||||
let (input, items) = many0(tuple((tag("|"), spaced_token_list)))(input)?;
|
let (input, items) = many0(tuple((tag("|"), spaced_token_list)))(input)?;
|
||||||
@ -900,7 +970,7 @@ pub fn pipeline(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tracable_parser]
|
#[tracable_parser]
|
||||||
pub fn module(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
pub fn module(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||||
let (input, tokens) = spaced_token_list(input)?;
|
let (input, tokens) = spaced_token_list(input)?;
|
||||||
|
|
||||||
if input.input_len() != 0 {
|
if input.input_len() != 0 {
|
||||||
@ -999,9 +1069,17 @@ fn is_file_char(c: char) -> bool {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn is_garbage_member_char(c: char) -> bool {
|
||||||
|
match c {
|
||||||
|
c if c.is_whitespace() => false,
|
||||||
|
'.' => false,
|
||||||
|
_ => true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn is_start_member_char(c: char) -> bool {
|
fn is_start_member_char(c: char) -> bool {
|
||||||
match c {
|
match c {
|
||||||
_ if c.is_alphanumeric() => true,
|
_ if c.is_alphabetic() => true,
|
||||||
'_' => true,
|
'_' => true,
|
||||||
'-' => true,
|
'-' => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
@ -1263,7 +1341,7 @@ mod tests {
|
|||||||
fn test_variable() {
|
fn test_variable() {
|
||||||
equal_tokens! {
|
equal_tokens! {
|
||||||
<nodes>
|
<nodes>
|
||||||
"$it" -> b::token_list(vec![b::var("it")])
|
"$it" -> b::token_list(vec![b::it_var()])
|
||||||
}
|
}
|
||||||
|
|
||||||
equal_tokens! {
|
equal_tokens! {
|
||||||
@ -1354,12 +1432,33 @@ mod tests {
|
|||||||
|
|
||||||
equal_tokens! {
|
equal_tokens! {
|
||||||
<nodes>
|
<nodes>
|
||||||
"$it.print" -> b::token_list(vec![b::var("it"), b::dot(), b::bare("print")])
|
"$it.print" -> b::token_list(vec![b::it_var(), b::dot(), b::bare("print")])
|
||||||
}
|
}
|
||||||
|
|
||||||
equal_tokens! {
|
equal_tokens! {
|
||||||
<nodes>
|
<nodes>
|
||||||
"$it.0" -> b::token_list(vec![b::var("it"), b::dot(), b::int(0)])
|
r#"nu.0xATYKARNU.baz"# -> b::token_list(vec![
|
||||||
|
b::bare("nu"),
|
||||||
|
b::dot(),
|
||||||
|
b::garbage("0xATYKARNU"),
|
||||||
|
b::dot(),
|
||||||
|
b::bare("baz")
|
||||||
|
])
|
||||||
|
}
|
||||||
|
|
||||||
|
equal_tokens! {
|
||||||
|
<nodes>
|
||||||
|
"1.b" -> b::token_list(vec![b::int(1), b::dot(), b::bare("b")])
|
||||||
|
}
|
||||||
|
|
||||||
|
equal_tokens! {
|
||||||
|
<nodes>
|
||||||
|
"$it.0" -> b::token_list(vec![b::it_var(), b::dot(), b::int(0)])
|
||||||
|
}
|
||||||
|
|
||||||
|
equal_tokens! {
|
||||||
|
<nodes>
|
||||||
|
"fortune_tellers.2.name" -> b::token_list(vec![b::bare("fortune_tellers"), b::dot(), b::int(2), b::dot(), b::bare("name")])
|
||||||
}
|
}
|
||||||
|
|
||||||
equal_tokens! {
|
equal_tokens! {
|
||||||
@ -1386,7 +1485,7 @@ mod tests {
|
|||||||
vec![
|
vec![
|
||||||
b::parens(vec![
|
b::parens(vec![
|
||||||
b::sp(),
|
b::sp(),
|
||||||
b::var("it"),
|
b::it_var(),
|
||||||
b::dot(),
|
b::dot(),
|
||||||
b::bare("is"),
|
b::bare("is"),
|
||||||
b::dot(),
|
b::dot(),
|
||||||
@ -1407,7 +1506,7 @@ mod tests {
|
|||||||
<nodes>
|
<nodes>
|
||||||
r#"$it."are PAS".0"# -> b::token_list(
|
r#"$it."are PAS".0"# -> b::token_list(
|
||||||
vec![
|
vec![
|
||||||
b::var("it"),
|
b::it_var(),
|
||||||
b::dot(),
|
b::dot(),
|
||||||
b::string("are PAS"),
|
b::string("are PAS"),
|
||||||
b::dot(),
|
b::dot(),
|
||||||
@ -1445,7 +1544,7 @@ mod tests {
|
|||||||
fn test_smoke_single_command_it() {
|
fn test_smoke_single_command_it() {
|
||||||
equal_tokens! {
|
equal_tokens! {
|
||||||
<nodes>
|
<nodes>
|
||||||
"echo $it" -> b::token_list(vec![b::bare("echo"), b::sp(), b::var("it")])
|
"echo $it" -> b::token_list(vec![b::bare("echo"), b::sp(), b::it_var()])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1533,6 +1632,17 @@ mod tests {
|
|||||||
]
|
]
|
||||||
])
|
])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
equal_tokens! {
|
||||||
|
"^echo 1 | ^cat" -> b::pipeline(vec![
|
||||||
|
vec![
|
||||||
|
b::external_command("echo"), b::sp(), b::int(1), b::sp()
|
||||||
|
],
|
||||||
|
vec![
|
||||||
|
b::sp(), b::external_command("cat")
|
||||||
|
]
|
||||||
|
])
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -1631,7 +1741,7 @@ mod tests {
|
|||||||
// b::bare("where"),
|
// b::bare("where"),
|
||||||
// vec![
|
// vec![
|
||||||
// b::sp(),
|
// b::sp(),
|
||||||
// b::var("it"),
|
// b::it_var(),
|
||||||
// b::sp(),
|
// b::sp(),
|
||||||
// b::op("!="),
|
// b::op("!="),
|
||||||
// b::sp(),
|
// b::sp(),
|
||||||
@ -1654,7 +1764,7 @@ mod tests {
|
|||||||
// vec![
|
// vec![
|
||||||
// b::sp(),
|
// b::sp(),
|
||||||
// b::braced(vec![
|
// b::braced(vec![
|
||||||
// b::path(b::var("it"), vec![b::member("size")]),
|
// b::path(b::it_var(), vec![b::member("size")]),
|
||||||
// b::sp(),
|
// b::sp(),
|
||||||
// b::op(">"),
|
// b::op(">"),
|
||||||
// b::sp(),
|
// b::sp(),
|
||||||
@ -1669,10 +1779,13 @@ mod tests {
|
|||||||
// }
|
// }
|
||||||
|
|
||||||
fn apply(
|
fn apply(
|
||||||
f: impl Fn(NomSpan) -> Result<(NomSpan, TokenNode), nom::Err<(NomSpan, nom::error::ErrorKind)>>,
|
f: impl Fn(
|
||||||
|
NomSpan,
|
||||||
|
)
|
||||||
|
-> Result<(NomSpan, SpannedToken), nom::Err<(NomSpan, nom::error::ErrorKind)>>,
|
||||||
desc: &str,
|
desc: &str,
|
||||||
string: &str,
|
string: &str,
|
||||||
) -> TokenNode {
|
) -> SpannedToken {
|
||||||
let result = f(nom_input(string));
|
let result = f(nom_input(string));
|
||||||
|
|
||||||
match result {
|
match result {
|
||||||
@ -1693,20 +1806,15 @@ mod tests {
|
|||||||
|
|
||||||
fn delimited(
|
fn delimited(
|
||||||
delimiter: Spanned<Delimiter>,
|
delimiter: Spanned<Delimiter>,
|
||||||
children: Vec<TokenNode>,
|
children: Vec<SpannedToken>,
|
||||||
left: usize,
|
left: usize,
|
||||||
right: usize,
|
right: usize,
|
||||||
) -> TokenNode {
|
) -> SpannedToken {
|
||||||
let start = Span::for_char(left);
|
let start = Span::for_char(left);
|
||||||
let end = Span::for_char(right);
|
let end = Span::for_char(right);
|
||||||
|
|
||||||
let node = DelimitedNode::new(delimiter.item, (start, end), children);
|
let node = DelimitedNode::new(delimiter.item, (start, end), children);
|
||||||
let spanned = node.spanned(Span::new(left, right));
|
Token::Delimited(node).into_spanned((left, right))
|
||||||
TokenNode::Delimited(spanned)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn token(token: UnspannedToken, left: usize, right: usize) -> TokenNode {
|
|
||||||
TokenNode::Token(token.into_token(Span::new(left, right)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build<T>(block: CurriedNode<T>) -> T {
|
fn build<T>(block: CurriedNode<T>) -> T {
|
||||||
@ -1714,7 +1822,7 @@ mod tests {
|
|||||||
block(&mut builder)
|
block(&mut builder)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn build_token(block: CurriedToken) -> TokenNode {
|
fn build_token(block: CurriedToken) -> SpannedToken {
|
||||||
TokenTreeBuilder::build(block).0
|
TokenTreeBuilder::build(block).0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,23 +1,32 @@
|
|||||||
use crate::TokenNode;
|
use crate::{SpannedToken, Token};
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned};
|
use nu_source::{
|
||||||
|
b, DebugDocBuilder, HasSpan, IntoSpanned, PrettyDebugWithSource, Span, Spanned, SpannedItem,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Getters, new)]
|
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Getters, new)]
|
||||||
pub struct Pipeline {
|
pub struct Pipeline {
|
||||||
#[get = "pub"]
|
#[get = "pub"]
|
||||||
pub(crate) parts: Vec<PipelineElement>,
|
pub(crate) parts: Vec<PipelineElement>,
|
||||||
pub(crate) span: Span,
|
}
|
||||||
|
|
||||||
|
impl IntoSpanned for Pipeline {
|
||||||
|
type Output = Spanned<Pipeline>;
|
||||||
|
|
||||||
|
fn into_spanned(self, span: impl Into<Span>) -> Self::Output {
|
||||||
|
self.spanned(span.into())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
||||||
pub struct Tokens {
|
pub struct Tokens {
|
||||||
pub(crate) tokens: Vec<TokenNode>,
|
pub(crate) tokens: Vec<SpannedToken>,
|
||||||
pub(crate) span: Span,
|
pub(crate) span: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Tokens {
|
impl Tokens {
|
||||||
pub fn iter(&self) -> impl Iterator<Item = &TokenNode> {
|
pub fn iter(&self) -> impl Iterator<Item = &SpannedToken> {
|
||||||
self.tokens.iter()
|
self.tokens.iter()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -38,7 +47,7 @@ impl HasSpan for PipelineElement {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl PipelineElement {
|
impl PipelineElement {
|
||||||
pub fn new(pipe: Option<Span>, tokens: Spanned<Vec<TokenNode>>) -> PipelineElement {
|
pub fn new(pipe: Option<Span>, tokens: Spanned<Vec<SpannedToken>>) -> PipelineElement {
|
||||||
PipelineElement {
|
PipelineElement {
|
||||||
pipe,
|
pipe,
|
||||||
tokens: Tokens {
|
tokens: Tokens {
|
||||||
@ -48,7 +57,7 @@ impl PipelineElement {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn tokens(&self) -> &[TokenNode] {
|
pub fn tokens(&self) -> &[SpannedToken] {
|
||||||
&self.tokens.tokens
|
&self.tokens.tokens
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -65,9 +74,9 @@ impl PrettyDebugWithSource for Pipeline {
|
|||||||
impl PrettyDebugWithSource for PipelineElement {
|
impl PrettyDebugWithSource for PipelineElement {
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
b::intersperse(
|
b::intersperse(
|
||||||
self.tokens.iter().map(|token| match token {
|
self.tokens.iter().map(|token| match token.unspanned() {
|
||||||
TokenNode::Whitespace(_) => b::blank(),
|
Token::Whitespace => b::blank(),
|
||||||
token => token.pretty_debug(source),
|
_ => token.pretty_debug(source),
|
||||||
}),
|
}),
|
||||||
b::space(),
|
b::space(),
|
||||||
)
|
)
|
||||||
|
@ -1,162 +1,275 @@
|
|||||||
use crate::parse::{call_node::*, comment::*, flag::*, operator::*, pipeline::*, tokens::*};
|
#![allow(clippy::type_complexity)]
|
||||||
|
use crate::parse::{call_node::*, comment::*, flag::*, number::*, operator::*, pipeline::*};
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
use nu_errors::{ParseError, ShellError};
|
use nu_errors::{ParseError, ShellError};
|
||||||
use nu_protocol::ShellTypeName;
|
use nu_protocol::{ShellTypeName, SpannedTypeName};
|
||||||
use nu_source::{
|
use nu_source::{
|
||||||
b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem, Tagged,
|
b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem, Text,
|
||||||
TaggedItem, Text,
|
|
||||||
};
|
};
|
||||||
use std::fmt;
|
use std::borrow::Cow;
|
||||||
|
use std::ops::Deref;
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||||
pub enum TokenNode {
|
pub enum Token {
|
||||||
Token(Token),
|
Number(RawNumber),
|
||||||
|
CompareOperator(CompareOperator),
|
||||||
|
EvaluationOperator(EvaluationOperator),
|
||||||
|
String(Span),
|
||||||
|
Variable(Span),
|
||||||
|
ItVariable(Span),
|
||||||
|
ExternalCommand(Span),
|
||||||
|
ExternalWord,
|
||||||
|
GlobPattern,
|
||||||
|
Bare,
|
||||||
|
Garbage,
|
||||||
|
|
||||||
Call(Spanned<CallNode>),
|
Call(CallNode),
|
||||||
Nodes(Spanned<Vec<TokenNode>>),
|
Delimited(DelimitedNode),
|
||||||
Delimited(Spanned<DelimitedNode>),
|
|
||||||
Pipeline(Pipeline),
|
Pipeline(Pipeline),
|
||||||
Flag(Flag),
|
Flag(Flag),
|
||||||
Comment(Comment),
|
Comment(Comment),
|
||||||
Whitespace(Span),
|
Whitespace,
|
||||||
Separator(Span),
|
Separator,
|
||||||
|
|
||||||
Error(Spanned<ShellError>),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for TokenNode {
|
macro_rules! token_type {
|
||||||
|
(struct $name:tt (desc: $desc:tt) -> $out:ty { |$span:ident, $pat:pat| => $do:expr }) => {
|
||||||
|
pub struct $name;
|
||||||
|
|
||||||
|
impl TokenType for $name {
|
||||||
|
type Output = $out;
|
||||||
|
|
||||||
|
fn desc(&self) -> Cow<'static, str> {
|
||||||
|
Cow::Borrowed($desc)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_token_value(
|
||||||
|
&self,
|
||||||
|
token: &SpannedToken,
|
||||||
|
err: ParseErrorFn<$out>,
|
||||||
|
) -> Result<$out, ParseError> {
|
||||||
|
let $span = token.span();
|
||||||
|
|
||||||
|
match *token.unspanned() {
|
||||||
|
$pat => Ok($do),
|
||||||
|
_ => err(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
(struct $name:tt (desc: $desc:tt) -> $out:ty { $pat:pat => $do:expr }) => {
|
||||||
|
pub struct $name;
|
||||||
|
|
||||||
|
impl TokenType for $name {
|
||||||
|
type Output = $out;
|
||||||
|
|
||||||
|
fn desc(&self) -> Cow<'static, str> {
|
||||||
|
Cow::Borrowed($desc)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_token_value(
|
||||||
|
&self,
|
||||||
|
token: &SpannedToken,
|
||||||
|
err: ParseErrorFn<$out>,
|
||||||
|
) -> Result<$out, ParseError> {
|
||||||
|
match token.unspanned().clone() {
|
||||||
|
$pat => Ok($do),
|
||||||
|
_ => err(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub type ParseErrorFn<'a, T> = &'a dyn Fn() -> Result<T, ParseError>;
|
||||||
|
|
||||||
|
token_type!(struct IntType (desc: "integer") -> RawNumber {
|
||||||
|
Token::Number(number @ RawNumber::Int(_)) => number
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct DecimalType (desc: "decimal") -> RawNumber {
|
||||||
|
Token::Number(number @ RawNumber::Decimal(_)) => number
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct StringType (desc: "string") -> (Span, Span) {
|
||||||
|
|outer, Token::String(inner)| => (inner, outer)
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct BareType (desc: "word") -> Span {
|
||||||
|
|span, Token::Bare| => span
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct DotType (desc: "dot") -> Span {
|
||||||
|
|span, Token::EvaluationOperator(EvaluationOperator::Dot)| => span
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct DotDotType (desc: "dotdot") -> Span {
|
||||||
|
|span, Token::EvaluationOperator(EvaluationOperator::DotDot)| => span
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct CompareOperatorType (desc: "compare operator") -> (Span, CompareOperator) {
|
||||||
|
|span, Token::CompareOperator(operator)| => (span, operator)
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct ExternalWordType (desc: "external word") -> Span {
|
||||||
|
|span, Token::ExternalWord| => span
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct ExternalCommandType (desc: "external command") -> (Span, Span) {
|
||||||
|
|outer, Token::ExternalCommand(inner)| => (inner, outer)
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct CommentType (desc: "comment") -> (Comment, Span) {
|
||||||
|
|outer, Token::Comment(comment)| => (comment, outer)
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct SeparatorType (desc: "separator") -> Span {
|
||||||
|
|span, Token::Separator| => span
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct WhitespaceType (desc: "whitespace") -> Span {
|
||||||
|
|span, Token::Whitespace| => span
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct WordType (desc: "word") -> Span {
|
||||||
|
|span, Token::Bare| => span
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct ItVarType (desc: "$it") -> (Span, Span) {
|
||||||
|
|outer, Token::ItVariable(inner)| => (inner, outer)
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct VarType (desc: "variable") -> (Span, Span) {
|
||||||
|
|outer, Token::Variable(inner)| => (inner, outer)
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct PipelineType (desc: "pipeline") -> Pipeline {
|
||||||
|
Token::Pipeline(pipeline) => pipeline
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct BlockType (desc: "block") -> DelimitedNode {
|
||||||
|
Token::Delimited(block @ DelimitedNode { delimiter: Delimiter::Brace, .. }) => block
|
||||||
|
});
|
||||||
|
|
||||||
|
token_type!(struct SquareType (desc: "square") -> DelimitedNode {
|
||||||
|
Token::Delimited(square @ DelimitedNode { delimiter: Delimiter::Square, .. }) => square
|
||||||
|
});
|
||||||
|
|
||||||
|
pub trait TokenType {
|
||||||
|
type Output;
|
||||||
|
|
||||||
|
fn desc(&self) -> Cow<'static, str>;
|
||||||
|
|
||||||
|
fn extract_token_value(
|
||||||
|
&self,
|
||||||
|
token: &SpannedToken,
|
||||||
|
err: ParseErrorFn<Self::Output>,
|
||||||
|
) -> Result<Self::Output, ParseError>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Token {
|
||||||
|
pub fn into_spanned(self, span: impl Into<Span>) -> SpannedToken {
|
||||||
|
SpannedToken {
|
||||||
|
unspanned: self,
|
||||||
|
span: span.into(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)]
|
||||||
|
pub struct SpannedToken {
|
||||||
|
#[get = "pub"]
|
||||||
|
unspanned: Token,
|
||||||
|
span: Span,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Deref for SpannedToken {
|
||||||
|
type Target = Token;
|
||||||
|
fn deref(&self) -> &Self::Target {
|
||||||
|
&self.unspanned
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasSpan for SpannedToken {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
self.span
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ShellTypeName for SpannedToken {
|
||||||
|
fn type_name(&self) -> &'static str {
|
||||||
|
self.unspanned.type_name()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebugWithSource for SpannedToken {
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(token) => token.pretty_debug(source),
|
Token::Number(number) => number.pretty_debug(source),
|
||||||
TokenNode::Call(call) => call.pretty_debug(source),
|
Token::CompareOperator(operator) => operator.pretty_debug(source),
|
||||||
TokenNode::Nodes(nodes) => b::intersperse(
|
Token::EvaluationOperator(operator) => operator.pretty_debug(source),
|
||||||
nodes.iter().map(|node| node.pretty_debug(source)),
|
Token::String(_) | Token::GlobPattern | Token::Bare => {
|
||||||
b::space(),
|
b::primitive(self.span.slice(source))
|
||||||
),
|
}
|
||||||
TokenNode::Delimited(delimited) => delimited.pretty_debug(source),
|
Token::Variable(_) => b::var(self.span.slice(source)),
|
||||||
TokenNode::Pipeline(pipeline) => pipeline.pretty_debug(source),
|
Token::ItVariable(_) => b::keyword(self.span.slice(source)),
|
||||||
TokenNode::Flag(flag) => flag.pretty_debug(source),
|
Token::ExternalCommand(_) => b::description(self.span.slice(source)),
|
||||||
TokenNode::Whitespace(space) => b::typed(
|
Token::ExternalWord => b::description(self.span.slice(source)),
|
||||||
|
Token::Call(call) => call.pretty_debug(source),
|
||||||
|
Token::Delimited(delimited) => delimited.pretty_debug(source),
|
||||||
|
Token::Pipeline(pipeline) => pipeline.pretty_debug(source),
|
||||||
|
Token::Flag(flag) => flag.pretty_debug(source),
|
||||||
|
Token::Garbage => b::error(self.span.slice(source)),
|
||||||
|
Token::Whitespace => b::typed(
|
||||||
"whitespace",
|
"whitespace",
|
||||||
b::description(format!("{:?}", space.slice(source))),
|
b::description(format!("{:?}", self.span.slice(source))),
|
||||||
),
|
),
|
||||||
TokenNode::Separator(span) => b::typed(
|
Token::Separator => b::typed(
|
||||||
"separator",
|
"separator",
|
||||||
b::description(format!("{:?}", span.slice(source))),
|
b::description(format!("{:?}", self.span.slice(source))),
|
||||||
),
|
),
|
||||||
TokenNode::Comment(comment) => {
|
Token::Comment(comment) => {
|
||||||
b::typed("comment", b::description(comment.text.slice(source)))
|
b::typed("comment", b::description(comment.text.slice(source)))
|
||||||
}
|
}
|
||||||
TokenNode::Error(_) => b::error("error"),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ShellTypeName for TokenNode {
|
impl ShellTypeName for Token {
|
||||||
fn type_name(&self) -> &'static str {
|
fn type_name(&self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
TokenNode::Token(t) => t.type_name(),
|
Token::Number(_) => "number",
|
||||||
TokenNode::Nodes(_) => "nodes",
|
Token::CompareOperator(_) => "comparison operator",
|
||||||
TokenNode::Call(_) => "command",
|
Token::EvaluationOperator(EvaluationOperator::Dot) => "dot",
|
||||||
TokenNode::Delimited(d) => d.type_name(),
|
Token::EvaluationOperator(EvaluationOperator::DotDot) => "dot dot",
|
||||||
TokenNode::Pipeline(_) => "pipeline",
|
Token::String(_) => "string",
|
||||||
TokenNode::Flag(_) => "flag",
|
Token::Variable(_) => "variable",
|
||||||
TokenNode::Whitespace(_) => "whitespace",
|
Token::ItVariable(_) => "it variable",
|
||||||
TokenNode::Separator(_) => "separator",
|
Token::ExternalCommand(_) => "external command",
|
||||||
TokenNode::Comment(_) => "comment",
|
Token::ExternalWord => "external word",
|
||||||
TokenNode::Error(_) => "error",
|
Token::GlobPattern => "glob pattern",
|
||||||
|
Token::Bare => "word",
|
||||||
|
Token::Call(_) => "command",
|
||||||
|
Token::Delimited(d) => d.type_name(),
|
||||||
|
Token::Pipeline(_) => "pipeline",
|
||||||
|
Token::Flag(_) => "flag",
|
||||||
|
Token::Garbage => "garbage",
|
||||||
|
Token::Whitespace => "whitespace",
|
||||||
|
Token::Separator => "separator",
|
||||||
|
Token::Comment(_) => "comment",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct DebugTokenNode<'a> {
|
impl From<&SpannedToken> for Span {
|
||||||
node: &'a TokenNode,
|
fn from(token: &SpannedToken) -> Span {
|
||||||
source: &'a Text,
|
token.span
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Debug for DebugTokenNode<'_> {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
|
||||||
match self.node {
|
|
||||||
TokenNode::Token(t) => write!(f, "{:?}", t.debug(self.source)),
|
|
||||||
TokenNode::Call(s) => {
|
|
||||||
write!(f, "(")?;
|
|
||||||
|
|
||||||
write!(f, "{}", s.head().debug(self.source))?;
|
|
||||||
|
|
||||||
if let Some(children) = s.children() {
|
|
||||||
for child in children {
|
|
||||||
write!(f, "{}", child.debug(self.source))?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
write!(f, ")")
|
|
||||||
}
|
|
||||||
|
|
||||||
TokenNode::Delimited(d) => {
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"{}",
|
|
||||||
match d.delimiter {
|
|
||||||
Delimiter::Brace => "{",
|
|
||||||
Delimiter::Paren => "(",
|
|
||||||
Delimiter::Square => "[",
|
|
||||||
}
|
|
||||||
)?;
|
|
||||||
|
|
||||||
for child in d.children() {
|
|
||||||
write!(f, "{:?}", child.old_debug(self.source))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
write!(
|
|
||||||
f,
|
|
||||||
"{}",
|
|
||||||
match d.delimiter {
|
|
||||||
Delimiter::Brace => "}",
|
|
||||||
Delimiter::Paren => ")",
|
|
||||||
Delimiter::Square => "]",
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
TokenNode::Pipeline(pipeline) => write!(f, "{}", pipeline.debug(self.source)),
|
|
||||||
TokenNode::Error(_) => write!(f, "<error>"),
|
|
||||||
rest => write!(f, "{}", rest.span().slice(self.source)),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<&TokenNode> for Span {
|
impl SpannedToken {
|
||||||
fn from(token: &TokenNode) -> Span {
|
|
||||||
token.span()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl HasSpan for TokenNode {
|
|
||||||
fn span(&self) -> Span {
|
|
||||||
match self {
|
|
||||||
TokenNode::Token(t) => t.span,
|
|
||||||
TokenNode::Nodes(t) => t.span,
|
|
||||||
TokenNode::Call(s) => s.span,
|
|
||||||
TokenNode::Delimited(s) => s.span,
|
|
||||||
TokenNode::Pipeline(s) => s.span,
|
|
||||||
TokenNode::Flag(s) => s.span,
|
|
||||||
TokenNode::Whitespace(s) => *s,
|
|
||||||
TokenNode::Separator(s) => *s,
|
|
||||||
TokenNode::Comment(c) => c.span(),
|
|
||||||
TokenNode::Error(s) => s.span,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TokenNode {
|
|
||||||
pub fn tagged_type_name(&self) -> Tagged<&'static str> {
|
|
||||||
self.type_name().tagged(self.span())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn old_debug<'a>(&'a self, source: &'a Text) -> DebugTokenNode<'a> {
|
|
||||||
DebugTokenNode { node: self, source }
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn as_external_arg(&self, source: &Text) -> String {
|
pub fn as_external_arg(&self, source: &Text) -> String {
|
||||||
self.span().slice(source).to_string()
|
self.span().slice(source).to_string()
|
||||||
}
|
}
|
||||||
@ -166,145 +279,105 @@ impl TokenNode {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_variable(&self) -> Result<(Span, Span), ShellError> {
|
pub fn get_variable(&self) -> Result<(Span, Span), ShellError> {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::Variable(inner_span) => Ok((self.span(), *inner_span)),
|
||||||
unspanned: UnspannedToken::Variable(inner_span),
|
_ => Err(ShellError::type_error("variable", self.spanned_type_name())),
|
||||||
span: outer_span,
|
|
||||||
}) => Ok((*outer_span, *inner_span)),
|
|
||||||
_ => Err(ShellError::type_error(
|
|
||||||
"variable",
|
|
||||||
self.type_name().spanned(self.span()),
|
|
||||||
)),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_bare(&self) -> bool {
|
pub fn is_bare(&self) -> bool {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::Bare => true,
|
||||||
unspanned: UnspannedToken::Bare,
|
|
||||||
..
|
|
||||||
}) => true,
|
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_string(&self) -> bool {
|
pub fn is_string(&self) -> bool {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::String(_) => true,
|
||||||
unspanned: UnspannedToken::String(_),
|
|
||||||
..
|
|
||||||
}) => true,
|
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_number(&self) -> bool {
|
pub fn is_number(&self) -> bool {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::Number(_) => true,
|
||||||
unspanned: UnspannedToken::Number(_),
|
|
||||||
..
|
|
||||||
}) => true,
|
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_string(&self) -> Option<(Span, Span)> {
|
pub fn as_string(&self) -> Option<(Span, Span)> {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::String(inner_span) => Some((self.span(), *inner_span)),
|
||||||
unspanned: UnspannedToken::String(inner_span),
|
|
||||||
span: outer_span,
|
|
||||||
}) => Some((*outer_span, *inner_span)),
|
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_pattern(&self) -> bool {
|
pub fn is_pattern(&self) -> bool {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::GlobPattern => true,
|
||||||
unspanned: UnspannedToken::GlobPattern,
|
|
||||||
..
|
|
||||||
}) => true,
|
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_word(&self) -> bool {
|
pub fn is_word(&self) -> bool {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::Bare => true,
|
||||||
unspanned: UnspannedToken::Bare,
|
|
||||||
..
|
|
||||||
}) => true,
|
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_int(&self) -> bool {
|
pub fn is_int(&self) -> bool {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::Number(RawNumber::Int(_)) => true,
|
||||||
unspanned: UnspannedToken::Number(RawNumber::Int(_)),
|
|
||||||
..
|
|
||||||
}) => true,
|
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_dot(&self) -> bool {
|
pub fn is_dot(&self) -> bool {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::EvaluationOperator(EvaluationOperator::Dot) => true,
|
||||||
unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot),
|
|
||||||
..
|
|
||||||
}) => true,
|
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_block(&self) -> Option<(Spanned<&[TokenNode]>, (Span, Span))> {
|
pub fn as_block(&self) -> Option<(Spanned<&[SpannedToken]>, (Span, Span))> {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Delimited(Spanned {
|
Token::Delimited(DelimitedNode {
|
||||||
item:
|
delimiter,
|
||||||
DelimitedNode {
|
children,
|
||||||
delimiter,
|
spans,
|
||||||
children,
|
}) if *delimiter == Delimiter::Brace => {
|
||||||
spans,
|
Some(((&children[..]).spanned(self.span()), *spans))
|
||||||
},
|
|
||||||
span,
|
|
||||||
}) if *delimiter == Delimiter::Brace => Some(((&children[..]).spanned(*span), *spans)),
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn is_external(&self) -> bool {
|
|
||||||
match self {
|
|
||||||
TokenNode::Token(Token {
|
|
||||||
unspanned: UnspannedToken::ExternalCommand(..),
|
|
||||||
..
|
|
||||||
}) => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option<Flag> {
|
|
||||||
match self {
|
|
||||||
TokenNode::Flag(flag @ Flag { .. }) if value == flag.name().slice(source) => {
|
|
||||||
Some(*flag)
|
|
||||||
}
|
}
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_external(&self) -> bool {
|
||||||
|
match self.unspanned() {
|
||||||
|
Token::ExternalCommand(..) => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option<Flag> {
|
||||||
|
match self.unspanned() {
|
||||||
|
Token::Flag(flag @ Flag { .. }) if value == flag.name().slice(source) => Some(*flag),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn as_pipeline(&self) -> Result<Pipeline, ParseError> {
|
pub fn as_pipeline(&self) -> Result<Pipeline, ParseError> {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Pipeline(pipeline) => Ok(pipeline.clone()),
|
Token::Pipeline(pipeline) => Ok(pipeline.clone()),
|
||||||
other => Err(ParseError::mismatch(
|
_ => Err(ParseError::mismatch("pipeline", self.spanned_type_name())),
|
||||||
"pipeline",
|
|
||||||
other.type_name().spanned(other.span()),
|
|
||||||
)),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_whitespace(&self) -> bool {
|
pub fn is_whitespace(&self) -> bool {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Whitespace(_) => true,
|
Token::Whitespace => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -315,7 +388,13 @@ impl TokenNode {
|
|||||||
pub struct DelimitedNode {
|
pub struct DelimitedNode {
|
||||||
pub(crate) delimiter: Delimiter,
|
pub(crate) delimiter: Delimiter,
|
||||||
pub(crate) spans: (Span, Span),
|
pub(crate) spans: (Span, Span),
|
||||||
pub(crate) children: Vec<TokenNode>,
|
pub(crate) children: Vec<SpannedToken>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasSpan for DelimitedNode {
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
self.spans.0.until(self.spans.1)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for DelimitedNode {
|
impl PrettyDebugWithSource for DelimitedNode {
|
||||||
@ -369,79 +448,68 @@ impl Delimiter {
|
|||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
||||||
#[get = "pub(crate)"]
|
#[get = "pub(crate)"]
|
||||||
pub struct PathNode {
|
pub struct PathNode {
|
||||||
head: Box<TokenNode>,
|
head: Box<SpannedToken>,
|
||||||
tail: Vec<TokenNode>,
|
tail: Vec<SpannedToken>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
impl TokenNode {
|
impl SpannedToken {
|
||||||
pub fn expect_external(&self) -> Span {
|
pub fn expect_external(&self) -> Span {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::ExternalCommand(span) => *span,
|
||||||
unspanned: UnspannedToken::ExternalCommand(span),
|
_ => panic!(
|
||||||
..
|
|
||||||
}) => *span,
|
|
||||||
other => panic!(
|
|
||||||
"Only call expect_external if you checked is_external first, found {:?}",
|
"Only call expect_external if you checked is_external first, found {:?}",
|
||||||
other
|
self
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expect_string(&self) -> (Span, Span) {
|
pub fn expect_string(&self) -> (Span, Span) {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::String(inner_span) => (self.span(), *inner_span),
|
||||||
unspanned: UnspannedToken::String(inner_span),
|
|
||||||
span: outer_span,
|
|
||||||
}) => (*outer_span, *inner_span),
|
|
||||||
other => panic!("Expected string, found {:?}", other),
|
other => panic!("Expected string, found {:?}", other),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expect_list(&self) -> Spanned<&[TokenNode]> {
|
pub fn expect_list(&self) -> Spanned<Vec<SpannedToken>> {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Nodes(token_nodes) => token_nodes[..].spanned(token_nodes.span),
|
Token::Pipeline(pipeline) => pipeline
|
||||||
other => panic!("Expected list, found {:?}", other),
|
.parts()
|
||||||
|
.iter()
|
||||||
|
.flat_map(|part| part.tokens())
|
||||||
|
.cloned()
|
||||||
|
.collect::<Vec<SpannedToken>>()
|
||||||
|
.spanned(self.span()),
|
||||||
|
_ => panic!("Expected list, found {:?}", self),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expect_pattern(&self) -> Span {
|
pub fn expect_pattern(&self) -> Span {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::GlobPattern => self.span(),
|
||||||
unspanned: UnspannedToken::GlobPattern,
|
_ => panic!("Expected pattern, found {:?}", self),
|
||||||
span: outer_span,
|
|
||||||
}) => *outer_span,
|
|
||||||
other => panic!("Expected pattern, found {:?}", other),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expect_var(&self) -> (Span, Span) {
|
pub fn expect_var(&self) -> (Span, Span) {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::Variable(inner_span) => (self.span(), *inner_span),
|
||||||
unspanned: UnspannedToken::Variable(inner_span),
|
Token::ItVariable(inner_span) => (self.span(), *inner_span),
|
||||||
span: outer_span,
|
|
||||||
}) => (*outer_span, *inner_span),
|
|
||||||
other => panic!("Expected var, found {:?}", other),
|
other => panic!("Expected var, found {:?}", other),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expect_dot(&self) -> Span {
|
pub fn expect_dot(&self) -> Span {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::EvaluationOperator(EvaluationOperator::Dot) => self.span(),
|
||||||
unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot),
|
|
||||||
span,
|
|
||||||
}) => *span,
|
|
||||||
other => panic!("Expected dot, found {:?}", other),
|
other => panic!("Expected dot, found {:?}", other),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expect_bare(&self) -> Span {
|
pub fn expect_bare(&self) -> Span {
|
||||||
match self {
|
match self.unspanned() {
|
||||||
TokenNode::Token(Token {
|
Token::Bare => self.span(),
|
||||||
unspanned: UnspannedToken::Bare,
|
_ => panic!("Expected bare, found {:?}", self),
|
||||||
span,
|
|
||||||
}) => *span,
|
|
||||||
other => panic!("Expected bare, found {:?}", other),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,10 +1,10 @@
|
|||||||
use crate::parse::call_node::CallNode;
|
use crate::parse::call_node::CallNode;
|
||||||
use crate::parse::comment::Comment;
|
use crate::parse::comment::Comment;
|
||||||
use crate::parse::flag::{Flag, FlagKind};
|
use crate::parse::flag::{Flag, FlagKind};
|
||||||
|
use crate::parse::number::RawNumber;
|
||||||
use crate::parse::operator::{CompareOperator, EvaluationOperator};
|
use crate::parse::operator::{CompareOperator, EvaluationOperator};
|
||||||
use crate::parse::pipeline::{Pipeline, PipelineElement};
|
use crate::parse::pipeline::{Pipeline, PipelineElement};
|
||||||
use crate::parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
|
use crate::parse::token_tree::{DelimitedNode, Delimiter, SpannedToken, Token};
|
||||||
use crate::parse::tokens::{RawNumber, UnspannedToken};
|
|
||||||
use bigdecimal::BigDecimal;
|
use bigdecimal::BigDecimal;
|
||||||
use nu_source::{Span, Spanned, SpannedItem};
|
use nu_source::{Span, Spanned, SpannedItem};
|
||||||
use num_bigint::BigInt;
|
use num_bigint::BigInt;
|
||||||
@ -21,11 +21,11 @@ impl TokenTreeBuilder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type CurriedToken = Box<dyn FnOnce(&mut TokenTreeBuilder) -> TokenNode + 'static>;
|
pub type CurriedToken = Box<dyn FnOnce(&mut TokenTreeBuilder) -> SpannedToken + 'static>;
|
||||||
pub type CurriedCall = Box<dyn FnOnce(&mut TokenTreeBuilder) -> Spanned<CallNode> + 'static>;
|
pub type CurriedCall = Box<dyn FnOnce(&mut TokenTreeBuilder) -> Spanned<CallNode> + 'static>;
|
||||||
|
|
||||||
impl TokenTreeBuilder {
|
impl TokenTreeBuilder {
|
||||||
pub fn build(block: impl FnOnce(&mut Self) -> TokenNode) -> (TokenNode, String) {
|
pub fn build(block: impl FnOnce(&mut Self) -> SpannedToken) -> (SpannedToken, String) {
|
||||||
let mut builder = TokenTreeBuilder::new();
|
let mut builder = TokenTreeBuilder::new();
|
||||||
let node = block(&mut builder);
|
let node = block(&mut builder);
|
||||||
(node, builder.output)
|
(node, builder.output)
|
||||||
@ -77,8 +77,8 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_pipeline(input: Vec<PipelineElement>, span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_pipeline(input: Vec<PipelineElement>, span: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Pipeline(Pipeline::new(input, span.into()))
|
Token::Pipeline(Pipeline::new(input)).into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn token_list(input: Vec<CurriedToken>) -> CurriedToken {
|
pub fn token_list(input: Vec<CurriedToken>) -> CurriedToken {
|
||||||
@ -91,8 +91,28 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_token_list(input: Vec<TokenNode>, span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_token_list(input: Vec<SpannedToken>, span: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Nodes(input.spanned(span.into()))
|
let span = span.into();
|
||||||
|
Token::Pipeline(Pipeline::new(vec![PipelineElement::new(
|
||||||
|
None,
|
||||||
|
input.spanned(span),
|
||||||
|
)]))
|
||||||
|
.into_spanned(span)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn garbage(input: impl Into<String>) -> CurriedToken {
|
||||||
|
let input = input.into();
|
||||||
|
|
||||||
|
Box::new(move |b| {
|
||||||
|
let (start, end) = b.consume(&input);
|
||||||
|
b.pos = end;
|
||||||
|
|
||||||
|
TokenTreeBuilder::spanned_garbage(Span::new(start, end))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn spanned_garbage(span: impl Into<Span>) -> SpannedToken {
|
||||||
|
Token::Garbage.into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn op(input: impl Into<CompareOperator>) -> CurriedToken {
|
pub fn op(input: impl Into<CompareOperator>) -> CurriedToken {
|
||||||
@ -107,8 +127,11 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_cmp_op(input: impl Into<CompareOperator>, span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_cmp_op(
|
||||||
TokenNode::Token(UnspannedToken::CompareOperator(input.into()).into_token(span))
|
input: impl Into<CompareOperator>,
|
||||||
|
span: impl Into<Span>,
|
||||||
|
) -> SpannedToken {
|
||||||
|
Token::CompareOperator(input.into()).into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn dot() -> CurriedToken {
|
pub fn dot() -> CurriedToken {
|
||||||
@ -134,8 +157,8 @@ impl TokenTreeBuilder {
|
|||||||
pub fn spanned_eval_op(
|
pub fn spanned_eval_op(
|
||||||
input: impl Into<EvaluationOperator>,
|
input: impl Into<EvaluationOperator>,
|
||||||
span: impl Into<Span>,
|
span: impl Into<Span>,
|
||||||
) -> TokenNode {
|
) -> SpannedToken {
|
||||||
TokenNode::Token(UnspannedToken::EvaluationOperator(input.into()).into_token(span))
|
Token::EvaluationOperator(input.into()).into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn string(input: impl Into<String>) -> CurriedToken {
|
pub fn string(input: impl Into<String>) -> CurriedToken {
|
||||||
@ -154,8 +177,8 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_string(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_string(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Token(UnspannedToken::String(input.into()).into_token(span))
|
Token::String(input.into()).into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn bare(input: impl Into<String>) -> CurriedToken {
|
pub fn bare(input: impl Into<String>) -> CurriedToken {
|
||||||
@ -169,8 +192,8 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_bare(span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_bare(span: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Token(UnspannedToken::Bare.into_token(span))
|
Token::Bare.into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pattern(input: impl Into<String>) -> CurriedToken {
|
pub fn pattern(input: impl Into<String>) -> CurriedToken {
|
||||||
@ -184,8 +207,8 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_pattern(input: impl Into<Span>) -> TokenNode {
|
pub fn spanned_pattern(input: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Token(UnspannedToken::GlobPattern.into_token(input))
|
Token::GlobPattern.into_spanned(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn external_word(input: impl Into<String>) -> CurriedToken {
|
pub fn external_word(input: impl Into<String>) -> CurriedToken {
|
||||||
@ -199,8 +222,8 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_external_word(input: impl Into<Span>) -> TokenNode {
|
pub fn spanned_external_word(input: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Token(UnspannedToken::ExternalWord.into_token(input))
|
Token::ExternalWord.into_spanned(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn external_command(input: impl Into<String>) -> CurriedToken {
|
pub fn external_command(input: impl Into<String>) -> CurriedToken {
|
||||||
@ -218,8 +241,11 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_external_command(inner: impl Into<Span>, outer: impl Into<Span>) -> TokenNode {
|
pub fn spanned_external_command(
|
||||||
TokenNode::Token(UnspannedToken::ExternalCommand(inner.into()).into_token(outer))
|
inner: impl Into<Span>,
|
||||||
|
outer: impl Into<Span>,
|
||||||
|
) -> SpannedToken {
|
||||||
|
Token::ExternalCommand(inner.into()).into_spanned(outer)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn int(input: impl Into<BigInt>) -> CurriedToken {
|
pub fn int(input: impl Into<BigInt>) -> CurriedToken {
|
||||||
@ -250,8 +276,8 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_number(input: impl Into<RawNumber>, span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_number(input: impl Into<RawNumber>, span: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Token(UnspannedToken::Number(input.into()).into_token(span))
|
Token::Number(input.into()).into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn var(input: impl Into<String>) -> CurriedToken {
|
pub fn var(input: impl Into<String>) -> CurriedToken {
|
||||||
@ -265,8 +291,21 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_var(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_var(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Token(UnspannedToken::Variable(input.into()).into_token(span))
|
Token::Variable(input.into()).into_spanned(span)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn it_var() -> CurriedToken {
|
||||||
|
Box::new(move |b| {
|
||||||
|
let (start, _) = b.consume("$");
|
||||||
|
let (inner_start, end) = b.consume("it");
|
||||||
|
|
||||||
|
TokenTreeBuilder::spanned_it_var(Span::new(inner_start, end), Span::new(start, end))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn spanned_it_var(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||||
|
Token::ItVariable(input.into()).into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn flag(input: impl Into<String>) -> CurriedToken {
|
pub fn flag(input: impl Into<String>) -> CurriedToken {
|
||||||
@ -280,8 +319,9 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_flag(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_flag(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Flag(Flag::new(FlagKind::Longhand, input.into(), span.into()))
|
let span = span.into();
|
||||||
|
Token::Flag(Flag::new(FlagKind::Longhand, input.into())).into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn shorthand(input: impl Into<String>) -> CurriedToken {
|
pub fn shorthand(input: impl Into<String>) -> CurriedToken {
|
||||||
@ -295,8 +335,10 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_shorthand(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_shorthand(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into(), span.into()))
|
let span = span.into();
|
||||||
|
|
||||||
|
Token::Flag(Flag::new(FlagKind::Shorthand, input.into())).into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn call(head: CurriedToken, input: Vec<CurriedToken>) -> CurriedCall {
|
pub fn call(head: CurriedToken, input: Vec<CurriedToken>) -> CurriedCall {
|
||||||
@ -316,7 +358,7 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_call(input: Vec<TokenNode>, span: impl Into<Span>) -> Spanned<CallNode> {
|
pub fn spanned_call(input: Vec<SpannedToken>, span: impl Into<Span>) -> Spanned<CallNode> {
|
||||||
if input.is_empty() {
|
if input.is_empty() {
|
||||||
panic!("BUG: spanned call (TODO)")
|
panic!("BUG: spanned call (TODO)")
|
||||||
}
|
}
|
||||||
@ -337,7 +379,7 @@ impl TokenTreeBuilder {
|
|||||||
input: Vec<CurriedToken>,
|
input: Vec<CurriedToken>,
|
||||||
_open: &str,
|
_open: &str,
|
||||||
_close: &str,
|
_close: &str,
|
||||||
) -> (Span, Span, Span, Vec<TokenNode>) {
|
) -> (Span, Span, Span, Vec<SpannedToken>) {
|
||||||
let (start_open_paren, end_open_paren) = self.consume("(");
|
let (start_open_paren, end_open_paren) = self.consume("(");
|
||||||
let mut output = vec![];
|
let mut output = vec![];
|
||||||
for item in input {
|
for item in input {
|
||||||
@ -362,13 +404,12 @@ impl TokenTreeBuilder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_parens(
|
pub fn spanned_parens(
|
||||||
input: impl Into<Vec<TokenNode>>,
|
input: impl Into<Vec<SpannedToken>>,
|
||||||
spans: (Span, Span),
|
spans: (Span, Span),
|
||||||
span: impl Into<Span>,
|
span: impl Into<Span>,
|
||||||
) -> TokenNode {
|
) -> SpannedToken {
|
||||||
TokenNode::Delimited(
|
Token::Delimited(DelimitedNode::new(Delimiter::Paren, spans, input.into()))
|
||||||
DelimitedNode::new(Delimiter::Paren, spans, input.into()).spanned(span.into()),
|
.into_spanned(span.into())
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn square(input: Vec<CurriedToken>) -> CurriedToken {
|
pub fn square(input: Vec<CurriedToken>) -> CurriedToken {
|
||||||
@ -380,13 +421,12 @@ impl TokenTreeBuilder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_square(
|
pub fn spanned_square(
|
||||||
input: impl Into<Vec<TokenNode>>,
|
input: impl Into<Vec<SpannedToken>>,
|
||||||
spans: (Span, Span),
|
spans: (Span, Span),
|
||||||
span: impl Into<Span>,
|
span: impl Into<Span>,
|
||||||
) -> TokenNode {
|
) -> SpannedToken {
|
||||||
TokenNode::Delimited(
|
Token::Delimited(DelimitedNode::new(Delimiter::Square, spans, input.into()))
|
||||||
DelimitedNode::new(Delimiter::Square, spans, input.into()).spanned(span.into()),
|
.into_spanned(span)
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn braced(input: Vec<CurriedToken>) -> CurriedToken {
|
pub fn braced(input: Vec<CurriedToken>) -> CurriedToken {
|
||||||
@ -398,19 +438,18 @@ impl TokenTreeBuilder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_brace(
|
pub fn spanned_brace(
|
||||||
input: impl Into<Vec<TokenNode>>,
|
input: impl Into<Vec<SpannedToken>>,
|
||||||
spans: (Span, Span),
|
spans: (Span, Span),
|
||||||
span: impl Into<Span>,
|
span: impl Into<Span>,
|
||||||
) -> TokenNode {
|
) -> SpannedToken {
|
||||||
TokenNode::Delimited(
|
Token::Delimited(DelimitedNode::new(Delimiter::Brace, spans, input.into()))
|
||||||
DelimitedNode::new(Delimiter::Brace, spans, input.into()).spanned(span.into()),
|
.into_spanned(span)
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn sp() -> CurriedToken {
|
pub fn sp() -> CurriedToken {
|
||||||
Box::new(|b| {
|
Box::new(|b| {
|
||||||
let (start, end) = b.consume(" ");
|
let (start, end) = b.consume(" ");
|
||||||
TokenNode::Whitespace(Span::new(start, end))
|
Token::Whitespace.into_spanned((start, end))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -423,8 +462,8 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_ws(span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_ws(span: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Whitespace(span.into())
|
Token::Whitespace.into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn sep(input: impl Into<String>) -> CurriedToken {
|
pub fn sep(input: impl Into<String>) -> CurriedToken {
|
||||||
@ -436,8 +475,8 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_sep(span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_sep(span: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Separator(span.into())
|
Token::Separator.into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn comment(input: impl Into<String>) -> CurriedToken {
|
pub fn comment(input: impl Into<String>) -> CurriedToken {
|
||||||
@ -453,8 +492,10 @@ impl TokenTreeBuilder {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn spanned_comment(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
pub fn spanned_comment(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||||
TokenNode::Comment(Comment::line(input, span))
|
let span = span.into();
|
||||||
|
|
||||||
|
Token::Comment(Comment::line(input)).into_spanned(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn consume(&mut self, input: &str) -> (usize, usize) {
|
fn consume(&mut self, input: &str) -> (usize, usize) {
|
||||||
|
@ -1,89 +1,109 @@
|
|||||||
use crate::hir::syntax_shape::{
|
use crate::hir::syntax_shape::{
|
||||||
color_fallible_syntax, color_syntax, expand_expr, flat_shape::FlatShape, spaced,
|
BackoffColoringMode, ExpandSyntax, MaybeSpaceShape, MaybeWhitespaceEof,
|
||||||
BackoffColoringMode, ColorSyntax, MaybeSpaceShape,
|
|
||||||
};
|
};
|
||||||
|
use crate::hir::SpannedExpression;
|
||||||
use crate::TokensIterator;
|
use crate::TokensIterator;
|
||||||
use crate::{
|
use crate::{
|
||||||
hir::{self, ExpandContext, NamedArguments},
|
hir::{self, NamedArguments},
|
||||||
Flag,
|
Flag,
|
||||||
};
|
};
|
||||||
use log::trace;
|
use log::trace;
|
||||||
use nu_source::{PrettyDebugWithSource, Span, Spanned, SpannedItem, Text};
|
|
||||||
|
|
||||||
use nu_errors::{ArgumentError, ParseError};
|
use nu_errors::{ArgumentError, ParseError};
|
||||||
use nu_protocol::{NamedType, PositionalType, Signature};
|
use nu_protocol::{NamedType, PositionalType, Signature, SyntaxShape};
|
||||||
|
use nu_source::{HasFallibleSpan, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem};
|
||||||
|
|
||||||
|
type OptionalHeadTail = (Option<Vec<hir::SpannedExpression>>, Option<NamedArguments>);
|
||||||
|
|
||||||
pub fn parse_command_tail(
|
pub fn parse_command_tail(
|
||||||
config: &Signature,
|
config: &Signature,
|
||||||
context: &ExpandContext,
|
|
||||||
tail: &mut TokensIterator,
|
tail: &mut TokensIterator,
|
||||||
command_span: Span,
|
command_span: Span,
|
||||||
) -> Result<Option<(Option<Vec<hir::Expression>>, Option<NamedArguments>)>, ParseError> {
|
) -> Result<Option<OptionalHeadTail>, ParseError> {
|
||||||
let mut named = NamedArguments::new();
|
let mut named = NamedArguments::new();
|
||||||
trace_remaining("nodes", &tail, context.source());
|
let mut found_error: Option<ParseError> = None;
|
||||||
|
let mut rest_signature = config.clone();
|
||||||
|
|
||||||
|
trace!(target: "nu::parse::trace_remaining", "");
|
||||||
|
|
||||||
|
trace_remaining("nodes", &tail);
|
||||||
|
|
||||||
for (name, kind) in &config.named {
|
for (name, kind) in &config.named {
|
||||||
trace!(target: "nu::parse", "looking for {} : {:?}", name, kind);
|
trace!(target: "nu::parse::trace_remaining", "looking for {} : {:?}", name, kind);
|
||||||
|
|
||||||
|
tail.move_to(0);
|
||||||
|
|
||||||
match &kind.0 {
|
match &kind.0 {
|
||||||
NamedType::Switch => {
|
NamedType::Switch => {
|
||||||
let flag = extract_switch(name, tail, context.source());
|
let switch = extract_switch(name, tail);
|
||||||
|
|
||||||
named.insert_switch(name, flag);
|
match switch {
|
||||||
|
None => named.insert_switch(name, None),
|
||||||
|
Some((_, flag)) => {
|
||||||
|
named.insert_switch(name, Some(*flag));
|
||||||
|
rest_signature.remove_named(name);
|
||||||
|
tail.color_shape(flag.color(flag.span));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
NamedType::Help => {
|
NamedType::Help => {
|
||||||
let flag = extract_switch(name, tail, context.source());
|
let switch = extract_switch(name, tail);
|
||||||
|
|
||||||
named.insert_switch(name, flag);
|
match switch {
|
||||||
if flag.is_some() {
|
None => named.insert_switch(name, None),
|
||||||
return Ok(Some((None, Some(named))));
|
Some((_, flag)) => {
|
||||||
|
named.insert_switch(name, Some(*flag));
|
||||||
|
return Ok(Some((None, Some(named))));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
NamedType::Mandatory(syntax_type) => {
|
NamedType::Mandatory(syntax_type) => {
|
||||||
match extract_mandatory(config, name, tail, context.source(), command_span) {
|
match extract_mandatory(config, name, tail, command_span) {
|
||||||
Err(err) => return Err(err), // produce a correct diagnostic
|
Err(err) => {
|
||||||
|
// remember this error, but continue coloring
|
||||||
|
found_error = Some(err);
|
||||||
|
}
|
||||||
Ok((pos, flag)) => {
|
Ok((pos, flag)) => {
|
||||||
tail.move_to(pos);
|
let result = expand_flag(tail, *syntax_type, flag, pos);
|
||||||
|
|
||||||
if tail.at_end() {
|
match result {
|
||||||
return Err(ParseError::argument_error(
|
Ok(expr) => {
|
||||||
config.name.clone().spanned(flag.span),
|
named.insert_mandatory(name, expr);
|
||||||
ArgumentError::MissingValueForName(name.to_string()),
|
rest_signature.remove_named(name);
|
||||||
));
|
}
|
||||||
|
Err(_) => {
|
||||||
|
found_error = Some(ParseError::argument_error(
|
||||||
|
config.name.clone().spanned(flag.span),
|
||||||
|
ArgumentError::MissingValueForName(name.to_string()),
|
||||||
|
))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let expr = expand_expr(&spaced(*syntax_type), tail, context)?;
|
|
||||||
|
|
||||||
tail.restart();
|
|
||||||
named.insert_mandatory(name, expr);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
NamedType::Optional(syntax_type) => {
|
NamedType::Optional(syntax_type) => {
|
||||||
match extract_optional(name, tail, context.source()) {
|
match extract_optional(name, tail) {
|
||||||
Err(err) => return Err(err), // produce a correct diagnostic
|
Err(err) => {
|
||||||
|
// remember this error, but continue coloring
|
||||||
|
found_error = Some(err);
|
||||||
|
}
|
||||||
Ok(Some((pos, flag))) => {
|
Ok(Some((pos, flag))) => {
|
||||||
tail.move_to(pos);
|
let result = expand_flag(tail, *syntax_type, flag, pos);
|
||||||
|
|
||||||
if tail.at_end() {
|
match result {
|
||||||
return Err(ParseError::argument_error(
|
Ok(expr) => {
|
||||||
config.name.clone().spanned(flag.span),
|
named.insert_optional(name, Some(expr));
|
||||||
ArgumentError::MissingValueForName(name.to_string()),
|
rest_signature.remove_named(name);
|
||||||
));
|
}
|
||||||
|
Err(_) => {
|
||||||
|
found_error = Some(ParseError::argument_error(
|
||||||
|
config.name.clone().spanned(flag.span),
|
||||||
|
ArgumentError::MissingValueForName(name.to_string()),
|
||||||
|
))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let expr = expand_expr(&spaced(*syntax_type), tail, context);
|
|
||||||
|
|
||||||
match expr {
|
|
||||||
Err(_) => named.insert_optional(name, None),
|
|
||||||
Ok(expr) => named.insert_optional(name, Some(expr)),
|
|
||||||
}
|
|
||||||
|
|
||||||
tail.restart();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(None) => {
|
Ok(None) => {
|
||||||
tail.restart();
|
|
||||||
named.insert_optional(name, None);
|
named.insert_optional(name, None);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -91,56 +111,88 @@ pub fn parse_command_tail(
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
trace_remaining("after named", &tail, context.source());
|
trace_remaining("after named", &tail);
|
||||||
|
|
||||||
let mut positional = vec![];
|
let mut positional = vec![];
|
||||||
|
|
||||||
for arg in &config.positional {
|
for arg in &config.positional {
|
||||||
trace!(target: "nu::parse", "Processing positional {:?}", arg);
|
trace!(target: "nu::parse::trace_remaining", "Processing positional {:?}", arg);
|
||||||
|
|
||||||
match &arg.0 {
|
tail.move_to(0);
|
||||||
PositionalType::Mandatory(..) => {
|
|
||||||
if tail.at_end_possible_ws() {
|
let result = expand_spaced_expr(arg.0.syntax_type(), tail);
|
||||||
return Err(ParseError::argument_error(
|
|
||||||
config.name.clone().spanned(command_span),
|
match result {
|
||||||
ArgumentError::MissingMandatoryPositional(arg.0.name().to_string()),
|
Err(_) => match &arg.0 {
|
||||||
));
|
PositionalType::Mandatory(..) => {
|
||||||
}
|
if found_error.is_none() {
|
||||||
}
|
found_error = Some(ParseError::argument_error(
|
||||||
|
config.name.clone().spanned(command_span),
|
||||||
|
ArgumentError::MissingMandatoryPositional(arg.0.name().to_string()),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
PositionalType::Optional(..) => {
|
|
||||||
if tail.at_end_possible_ws() {
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
PositionalType::Optional(..) => {
|
||||||
|
if tail.expand_syntax(MaybeWhitespaceEof).is_ok() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Ok(result) => {
|
||||||
|
rest_signature.shift_positional();
|
||||||
|
positional.push(result);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let result = expand_expr(&spaced(arg.0.syntax_type()), tail, context)?;
|
|
||||||
|
|
||||||
positional.push(result);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
trace_remaining("after positional", &tail, context.source());
|
trace_remaining("after positional", &tail);
|
||||||
|
|
||||||
if let Some((syntax_type, _)) = config.rest_positional {
|
if let Some((syntax_type, _)) = config.rest_positional {
|
||||||
let mut out = vec![];
|
let mut out = vec![];
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
if tail.at_end_possible_ws() {
|
if found_error.is_some() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
let next = expand_expr(&spaced(syntax_type), tail, context)?;
|
tail.move_to(0);
|
||||||
|
|
||||||
out.push(next);
|
trace_remaining("start rest", &tail);
|
||||||
|
eat_any_whitespace(tail);
|
||||||
|
trace_remaining("after whitespace", &tail);
|
||||||
|
|
||||||
|
if tail.at_end() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
match tail.expand_syntax(syntax_type) {
|
||||||
|
Err(err) => found_error = Some(err),
|
||||||
|
Ok(next) => out.push(next),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
positional.extend(out);
|
positional.extend(out);
|
||||||
}
|
}
|
||||||
|
|
||||||
trace_remaining("after rest", &tail, context.source());
|
eat_any_whitespace(tail);
|
||||||
|
|
||||||
trace!(target: "nu::parse", "Constructed positional={:?} named={:?}", positional, named);
|
// Consume any remaining tokens with backoff coloring mode
|
||||||
|
tail.expand_infallible(BackoffColoringMode::new(rest_signature.allowed()));
|
||||||
|
|
||||||
|
// This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring
|
||||||
|
// this solution.
|
||||||
|
tail.sort_shapes();
|
||||||
|
|
||||||
|
if let Some(err) = found_error {
|
||||||
|
return Err(err);
|
||||||
|
}
|
||||||
|
|
||||||
|
trace_remaining("after rest", &tail);
|
||||||
|
|
||||||
|
trace!(target: "nu::parse::trace_remaining", "Constructed positional={:?} named={:?}", positional, named);
|
||||||
|
|
||||||
let positional = if positional.is_empty() {
|
let positional = if positional.is_empty() {
|
||||||
None
|
None
|
||||||
@ -156,237 +208,72 @@ pub fn parse_command_tail(
|
|||||||
Some(named)
|
Some(named)
|
||||||
};
|
};
|
||||||
|
|
||||||
trace!(target: "nu::parse", "Normalized positional={:?} named={:?}", positional, named);
|
trace!(target: "nu::parse::trace_remaining", "Normalized positional={:?} named={:?}", positional, named);
|
||||||
|
|
||||||
Ok(Some((positional, named)))
|
Ok(Some((positional, named)))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
fn eat_any_whitespace(tail: &mut TokensIterator) {
|
||||||
struct ColoringArgs {
|
loop {
|
||||||
vec: Vec<Option<Vec<Spanned<FlatShape>>>>,
|
match tail.expand_infallible(MaybeSpaceShape) {
|
||||||
}
|
None => break,
|
||||||
|
Some(_) => continue,
|
||||||
impl ColoringArgs {
|
|
||||||
fn new(len: usize) -> ColoringArgs {
|
|
||||||
let vec = vec![None; len];
|
|
||||||
ColoringArgs { vec }
|
|
||||||
}
|
|
||||||
|
|
||||||
fn insert(&mut self, pos: usize, shapes: Vec<Spanned<FlatShape>>) {
|
|
||||||
self.vec[pos] = Some(shapes);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn spread_shapes(self, shapes: &mut Vec<Spanned<FlatShape>>) {
|
|
||||||
for item in self.vec {
|
|
||||||
match item {
|
|
||||||
None => {}
|
|
||||||
Some(vec) => {
|
|
||||||
shapes.extend(vec);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone)]
|
fn expand_flag(
|
||||||
pub struct CommandTailShape;
|
token_nodes: &mut TokensIterator,
|
||||||
|
syntax_type: SyntaxShape,
|
||||||
|
flag: Spanned<Flag>,
|
||||||
|
pos: usize,
|
||||||
|
) -> Result<SpannedExpression, ()> {
|
||||||
|
token_nodes.color_shape(flag.color(flag.span));
|
||||||
|
|
||||||
impl ColorSyntax for CommandTailShape {
|
let result = token_nodes.atomic_parse(|token_nodes| {
|
||||||
type Info = ();
|
token_nodes.move_to(pos);
|
||||||
type Input = Signature;
|
|
||||||
|
|
||||||
fn name(&self) -> &'static str {
|
if token_nodes.at_end() {
|
||||||
"CommandTailShape"
|
return Err(ParseError::unexpected_eof("flag", Span::unknown()));
|
||||||
}
|
|
||||||
|
|
||||||
fn color_syntax<'a, 'b>(
|
|
||||||
&self,
|
|
||||||
signature: &Signature,
|
|
||||||
token_nodes: &'b mut TokensIterator<'a>,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) -> Self::Info {
|
|
||||||
use nu_protocol::SyntaxShape;
|
|
||||||
|
|
||||||
let mut args = ColoringArgs::new(token_nodes.len());
|
|
||||||
trace_remaining("nodes", &token_nodes, context.source());
|
|
||||||
|
|
||||||
fn insert_flag(
|
|
||||||
token_nodes: &mut TokensIterator,
|
|
||||||
syntax_type: SyntaxShape,
|
|
||||||
args: &mut ColoringArgs,
|
|
||||||
flag: Flag,
|
|
||||||
pos: usize,
|
|
||||||
context: &ExpandContext,
|
|
||||||
) {
|
|
||||||
let (_, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| {
|
|
||||||
token_nodes.color_shape(flag.color());
|
|
||||||
token_nodes.move_to(pos);
|
|
||||||
|
|
||||||
if token_nodes.at_end() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
// We still want to color the flag even if the following tokens don't match, so don't
|
|
||||||
// propagate the error to the parent atomic block if it fails
|
|
||||||
let _ = token_nodes.atomic(|token_nodes| {
|
|
||||||
// We can live with unmatched syntax after a mandatory flag
|
|
||||||
color_syntax(&MaybeSpaceShape, token_nodes, context);
|
|
||||||
|
|
||||||
// If the part after a mandatory flag isn't present, that's ok, but we
|
|
||||||
// should roll back any whitespace we chomped
|
|
||||||
color_fallible_syntax(&syntax_type, token_nodes, context)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
});
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
});
|
|
||||||
|
|
||||||
args.insert(pos, shapes);
|
|
||||||
token_nodes.restart();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for (name, kind) in &signature.named {
|
let expr = expand_spaced_expr(syntax_type, token_nodes)?;
|
||||||
trace!(target: "nu::color_syntax", "looking for {} : {:?}", name, kind);
|
|
||||||
|
|
||||||
match &kind.0 {
|
Ok(expr)
|
||||||
NamedType::Switch | NamedType::Help => {
|
});
|
||||||
if let Some((pos, flag)) =
|
|
||||||
token_nodes.extract(|t| t.as_flag(name, context.source()))
|
|
||||||
{
|
|
||||||
args.insert(pos, vec![flag.color()])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
NamedType::Mandatory(syntax_type) => {
|
|
||||||
match extract_mandatory(
|
|
||||||
signature,
|
|
||||||
name,
|
|
||||||
token_nodes,
|
|
||||||
context.source(),
|
|
||||||
Span::unknown(),
|
|
||||||
) {
|
|
||||||
Err(_) => {
|
|
||||||
// The mandatory flag didn't exist at all, so there's nothing to color
|
|
||||||
}
|
|
||||||
Ok((pos, flag)) => {
|
|
||||||
insert_flag(token_nodes, *syntax_type, &mut args, flag, pos, context)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
NamedType::Optional(syntax_type) => {
|
|
||||||
match extract_optional(name, token_nodes, context.source()) {
|
|
||||||
Err(_) => {
|
|
||||||
// The optional flag didn't exist at all, so there's nothing to color
|
|
||||||
}
|
|
||||||
Ok(Some((pos, flag))) => {
|
|
||||||
insert_flag(token_nodes, *syntax_type, &mut args, flag, pos, context)
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(None) => {
|
let expr = result.map_err(|_| ())?;
|
||||||
token_nodes.restart();
|
Ok(expr)
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
trace_remaining("after named", &token_nodes, context.source());
|
|
||||||
|
|
||||||
for arg in &signature.positional {
|
|
||||||
trace!("Processing positional {:?}", arg);
|
|
||||||
|
|
||||||
match &arg.0 {
|
|
||||||
PositionalType::Mandatory(..) => {
|
|
||||||
if token_nodes.at_end() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
PositionalType::Optional(..) => {
|
|
||||||
if token_nodes.at_end() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let pos = token_nodes.pos(false);
|
|
||||||
|
|
||||||
match pos {
|
|
||||||
None => break,
|
|
||||||
Some(pos) => {
|
|
||||||
// We can live with an unmatched positional argument. Hopefully it will be
|
|
||||||
// matched by a future token
|
|
||||||
let (_, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| {
|
|
||||||
color_syntax(&MaybeSpaceShape, token_nodes, context);
|
|
||||||
|
|
||||||
// If no match, we should roll back any whitespace we chomped
|
|
||||||
color_fallible_syntax(&arg.0.syntax_type(), token_nodes, context)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
});
|
|
||||||
|
|
||||||
args.insert(pos, shapes);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
trace_remaining("after positional", &token_nodes, context.source());
|
|
||||||
|
|
||||||
if let Some((syntax_type, _)) = signature.rest_positional {
|
|
||||||
loop {
|
|
||||||
if token_nodes.at_end_possible_ws() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
let pos = token_nodes.pos(false);
|
|
||||||
|
|
||||||
match pos {
|
|
||||||
None => break,
|
|
||||||
Some(pos) => {
|
|
||||||
// If any arguments don't match, we'll fall back to backoff coloring mode
|
|
||||||
let (result, shapes) = token_nodes.atomic_returning_shapes(|token_nodes| {
|
|
||||||
color_syntax(&MaybeSpaceShape, token_nodes, context);
|
|
||||||
|
|
||||||
// If no match, we should roll back any whitespace we chomped
|
|
||||||
color_fallible_syntax(&syntax_type, token_nodes, context)?;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
});
|
|
||||||
|
|
||||||
args.insert(pos, shapes);
|
|
||||||
|
|
||||||
match result {
|
|
||||||
Err(_) => break,
|
|
||||||
Ok(_) => continue,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
token_nodes.silently_mutate_shapes(|shapes| args.spread_shapes(shapes));
|
|
||||||
|
|
||||||
// Consume any remaining tokens with backoff coloring mode
|
|
||||||
color_syntax(&BackoffColoringMode, token_nodes, context);
|
|
||||||
|
|
||||||
// This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring
|
|
||||||
// this solution.
|
|
||||||
token_nodes.sort_shapes()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_switch(name: &str, tokens: &mut hir::TokensIterator<'_>, source: &Text) -> Option<Flag> {
|
fn expand_spaced_expr<
|
||||||
tokens.extract(|t| t.as_flag(name, source)).map(|f| f.1)
|
T: HasFallibleSpan + PrettyDebugWithSource + Clone + std::fmt::Debug + 'static,
|
||||||
|
>(
|
||||||
|
syntax: impl ExpandSyntax<Output = Result<T, ParseError>>,
|
||||||
|
token_nodes: &mut TokensIterator,
|
||||||
|
) -> Result<T, ParseError> {
|
||||||
|
token_nodes.atomic_parse(|token_nodes| {
|
||||||
|
token_nodes.expand_infallible(MaybeSpaceShape);
|
||||||
|
token_nodes.expand_syntax(syntax)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extract_switch(
|
||||||
|
name: &str,
|
||||||
|
tokens: &mut hir::TokensIterator<'_>,
|
||||||
|
) -> Option<(usize, Spanned<Flag>)> {
|
||||||
|
let source = tokens.source();
|
||||||
|
tokens.extract(|t| t.as_flag(name, &source).map(|flag| flag.spanned(t.span())))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn extract_mandatory(
|
fn extract_mandatory(
|
||||||
config: &Signature,
|
config: &Signature,
|
||||||
name: &str,
|
name: &str,
|
||||||
tokens: &mut hir::TokensIterator<'_>,
|
tokens: &mut hir::TokensIterator<'_>,
|
||||||
source: &Text,
|
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> Result<(usize, Flag), ParseError> {
|
) -> Result<(usize, Spanned<Flag>), ParseError> {
|
||||||
let flag = tokens.extract(|t| t.as_flag(name, source));
|
let source = tokens.source();
|
||||||
|
let flag = tokens.extract(|t| t.as_flag(name, &source).map(|flag| flag.spanned(t.span())));
|
||||||
|
|
||||||
match flag {
|
match flag {
|
||||||
None => Err(ParseError::argument_error(
|
None => Err(ParseError::argument_error(
|
||||||
@ -404,9 +291,9 @@ fn extract_mandatory(
|
|||||||
fn extract_optional(
|
fn extract_optional(
|
||||||
name: &str,
|
name: &str,
|
||||||
tokens: &mut hir::TokensIterator<'_>,
|
tokens: &mut hir::TokensIterator<'_>,
|
||||||
source: &Text,
|
) -> Result<Option<(usize, Spanned<Flag>)>, ParseError> {
|
||||||
) -> Result<Option<(usize, Flag)>, ParseError> {
|
let source = tokens.source();
|
||||||
let flag = tokens.extract(|t| t.as_flag(name, source));
|
let flag = tokens.extract(|t| t.as_flag(name, &source).map(|flag| flag.spanned(t.span())));
|
||||||
|
|
||||||
match flag {
|
match flag {
|
||||||
None => Ok(None),
|
None => Ok(None),
|
||||||
@ -417,15 +304,24 @@ fn extract_optional(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn trace_remaining(desc: &'static str, tail: &hir::TokensIterator<'_>, source: &Text) {
|
pub fn trace_remaining(desc: &'static str, tail: &hir::TokensIterator<'_>) {
|
||||||
|
let offset = tail.clone().span_at_cursor();
|
||||||
|
let source = tail.source();
|
||||||
|
|
||||||
trace!(
|
trace!(
|
||||||
target: "nu::parse",
|
target: "nu::parse::trace_remaining",
|
||||||
"{} = {:?}",
|
"{} = {}",
|
||||||
desc,
|
desc,
|
||||||
itertools::join(
|
itertools::join(
|
||||||
tail.debug_remaining()
|
tail.debug_remaining()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|i| format!("%{}%", i.debug(source))),
|
.map(|val| {
|
||||||
|
if val.span().start() == offset.start() {
|
||||||
|
format!("<|> %{}%", val.debug(&source))
|
||||||
|
} else {
|
||||||
|
format!("%{}%", val.debug(&source))
|
||||||
|
}
|
||||||
|
}),
|
||||||
" "
|
" "
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
|
@ -109,6 +109,35 @@ pub struct Signature {
|
|||||||
pub is_filter: bool,
|
pub is_filter: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Signature {
|
||||||
|
pub fn shift_positional(&mut self) {
|
||||||
|
self.positional = Vec::from(&self.positional[1..]);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn remove_named(&mut self, name: &str) {
|
||||||
|
self.named.remove(name);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn allowed(&self) -> Vec<String> {
|
||||||
|
let mut allowed = indexmap::IndexSet::new();
|
||||||
|
|
||||||
|
for (name, _) in &self.named {
|
||||||
|
allowed.insert(format!("--{}", name));
|
||||||
|
}
|
||||||
|
|
||||||
|
for (ty, _) in &self.positional {
|
||||||
|
let shape = ty.syntax_type();
|
||||||
|
allowed.insert(shape.display());
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some((shape, _)) = &self.rest_positional {
|
||||||
|
allowed.insert(shape.display());
|
||||||
|
}
|
||||||
|
|
||||||
|
allowed.into_iter().collect()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Signature {
|
impl PrettyDebugWithSource for Signature {
|
||||||
/// Prepare a Signature for pretty-printing
|
/// Prepare a Signature for pretty-printing
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
|
@ -30,16 +30,16 @@ impl PrettyDebug for SyntaxShape {
|
|||||||
/// Prepare SyntaxShape for pretty-printing
|
/// Prepare SyntaxShape for pretty-printing
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
b::kind(match self {
|
b::kind(match self {
|
||||||
SyntaxShape::Any => "any shape",
|
SyntaxShape::Any => "any",
|
||||||
SyntaxShape::String => "string shape",
|
SyntaxShape::String => "string",
|
||||||
SyntaxShape::Member => "member shape",
|
SyntaxShape::Member => "member",
|
||||||
SyntaxShape::ColumnPath => "column path shape",
|
SyntaxShape::ColumnPath => "column path",
|
||||||
SyntaxShape::Number => "number shape",
|
SyntaxShape::Number => "number",
|
||||||
SyntaxShape::Range => "range shape",
|
SyntaxShape::Range => "range",
|
||||||
SyntaxShape::Int => "integer shape",
|
SyntaxShape::Int => "integer",
|
||||||
SyntaxShape::Path => "file path shape",
|
SyntaxShape::Path => "file path",
|
||||||
SyntaxShape::Pattern => "pattern shape",
|
SyntaxShape::Pattern => "pattern",
|
||||||
SyntaxShape::Block => "block shape",
|
SyntaxShape::Block => "block",
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,10 +6,11 @@ mod tracable;
|
|||||||
|
|
||||||
pub use self::meta::{
|
pub use self::meta::{
|
||||||
span_for_spanned_list, tag_for_tagged_list, AnchorLocation, HasFallibleSpan, HasSpan, HasTag,
|
span_for_spanned_list, tag_for_tagged_list, AnchorLocation, HasFallibleSpan, HasSpan, HasTag,
|
||||||
Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem,
|
IntoSpanned, Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem,
|
||||||
};
|
};
|
||||||
pub use self::pretty::{
|
pub use self::pretty::{
|
||||||
b, DebugDoc, DebugDocBuilder, PrettyDebug, PrettyDebugWithSource, ShellAnnotation,
|
b, DebugDoc, DebugDocBuilder, PrettyDebug, PrettyDebugRefineKind, PrettyDebugWithSource,
|
||||||
|
ShellAnnotation,
|
||||||
};
|
};
|
||||||
pub use self::term_colored::TermColored;
|
pub use self::term_colored::TermColored;
|
||||||
pub use self::text::Text;
|
pub use self::text::Text;
|
||||||
|
@ -490,6 +490,10 @@ impl Span {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn contains(&self, pos: usize) -> bool {
|
||||||
|
self.start <= pos && self.end >= pos
|
||||||
|
}
|
||||||
|
|
||||||
pub fn since(&self, other: impl Into<Span>) -> Span {
|
pub fn since(&self, other: impl Into<Span>) -> Span {
|
||||||
let other = other.into();
|
let other = other.into();
|
||||||
|
|
||||||
@ -568,29 +572,66 @@ impl language_reporting::ReportingSpan for Span {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait HasSpan: PrettyDebugWithSource {
|
pub trait IntoSpanned {
|
||||||
fn span(&self) -> Span;
|
type Output: HasFallibleSpan;
|
||||||
|
|
||||||
|
fn into_spanned(self, span: impl Into<Span>) -> Self::Output;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait HasFallibleSpan: PrettyDebugWithSource {
|
impl<T: HasFallibleSpan> IntoSpanned for T {
|
||||||
fn maybe_span(&self) -> Option<Span>;
|
type Output = T;
|
||||||
}
|
fn into_spanned(self, _span: impl Into<Span>) -> Self::Output {
|
||||||
|
self
|
||||||
impl<T: HasSpan> HasFallibleSpan for T {
|
|
||||||
fn maybe_span(&self) -> Option<Span> {
|
|
||||||
Some(HasSpan::span(self))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> HasSpan for Spanned<T>
|
pub trait HasSpan {
|
||||||
|
fn span(&self) -> Span;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T, E> HasSpan for Result<T, E>
|
||||||
where
|
where
|
||||||
Spanned<T>: PrettyDebugWithSource,
|
T: HasSpan,
|
||||||
{
|
{
|
||||||
|
fn span(&self) -> Span {
|
||||||
|
match self {
|
||||||
|
Result::Ok(val) => val.span(),
|
||||||
|
Result::Err(_) => Span::unknown(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> HasSpan for Spanned<T> {
|
||||||
fn span(&self) -> Span {
|
fn span(&self) -> Span {
|
||||||
self.span
|
self.span
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub trait HasFallibleSpan {
|
||||||
|
fn maybe_span(&self) -> Option<Span>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasFallibleSpan for bool {
|
||||||
|
fn maybe_span(&self) -> Option<Span> {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl HasFallibleSpan for () {
|
||||||
|
fn maybe_span(&self) -> Option<Span> {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> HasFallibleSpan for T
|
||||||
|
where
|
||||||
|
T: HasSpan,
|
||||||
|
{
|
||||||
|
fn maybe_span(&self) -> Option<Span> {
|
||||||
|
Some(HasSpan::span(self))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl PrettyDebugWithSource for Option<Span> {
|
impl PrettyDebugWithSource for Option<Span> {
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
match self {
|
match self {
|
||||||
@ -609,8 +650,8 @@ impl HasFallibleSpan for Option<Span> {
|
|||||||
impl PrettyDebugWithSource for Span {
|
impl PrettyDebugWithSource for Span {
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
b::typed(
|
b::typed(
|
||||||
"spanned",
|
"span",
|
||||||
b::keyword("for") + b::space() + b::description(format!("{:?}", source)),
|
b::keyword("for") + b::space() + b::description(format!("{:?}", self.slice(source))),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -628,15 +669,12 @@ where
|
|||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
match self {
|
match self {
|
||||||
None => b::description("nothing"),
|
None => b::description("nothing"),
|
||||||
Some(v) => v.pretty_debug(source),
|
Some(v) => v.pretty_debug(v.span.slice(source)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> HasFallibleSpan for Option<Spanned<T>>
|
impl<T> HasFallibleSpan for Option<Spanned<T>> {
|
||||||
where
|
|
||||||
Spanned<T>: PrettyDebugWithSource,
|
|
||||||
{
|
|
||||||
fn maybe_span(&self) -> Option<Span> {
|
fn maybe_span(&self) -> Option<Span> {
|
||||||
match self {
|
match self {
|
||||||
None => None,
|
None => None,
|
||||||
@ -657,10 +695,7 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> HasFallibleSpan for Option<Tagged<T>>
|
impl<T> HasFallibleSpan for Option<Tagged<T>> {
|
||||||
where
|
|
||||||
Tagged<T>: PrettyDebugWithSource,
|
|
||||||
{
|
|
||||||
fn maybe_span(&self) -> Option<Span> {
|
fn maybe_span(&self) -> Option<Span> {
|
||||||
match self {
|
match self {
|
||||||
None => None,
|
None => None,
|
||||||
@ -669,10 +704,7 @@ where
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> HasSpan for Tagged<T>
|
impl<T> HasSpan for Tagged<T> {
|
||||||
where
|
|
||||||
Tagged<T>: PrettyDebugWithSource,
|
|
||||||
{
|
|
||||||
fn span(&self) -> Span {
|
fn span(&self) -> Span {
|
||||||
self.tag.span
|
self.tag.span
|
||||||
}
|
}
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
use crate::meta::Spanned;
|
||||||
use crate::term_colored::TermColored;
|
use crate::term_colored::TermColored;
|
||||||
use crate::text::Text;
|
use crate::text::Text;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
@ -98,6 +99,21 @@ pub struct DebugDocBuilder {
|
|||||||
pub inner: PrettyDebugDocBuilder,
|
pub inner: PrettyDebugDocBuilder,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl PrettyDebug for bool {
|
||||||
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
|
match self {
|
||||||
|
true => b::primitive("true"),
|
||||||
|
false => b::primitive("false"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PrettyDebug for () {
|
||||||
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
|
b::primitive("nothing")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl PrettyDebug for DebugDocBuilder {
|
impl PrettyDebug for DebugDocBuilder {
|
||||||
fn pretty(&self) -> DebugDocBuilder {
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
self.clone()
|
self.clone()
|
||||||
@ -156,7 +172,7 @@ impl DebugDocBuilder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn typed(kind: &str, value: DebugDocBuilder) -> DebugDocBuilder {
|
pub fn typed(kind: &str, value: DebugDocBuilder) -> DebugDocBuilder {
|
||||||
b::delimit("(", b::kind(kind) + b::space() + value.group(), ")").group()
|
b::kind(kind) + b::delimit("[", value.group(), "]")
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn subtyped(
|
pub fn subtyped(
|
||||||
@ -340,9 +356,23 @@ pub struct DebugDoc {
|
|||||||
pub inner: PrettyDebugDoc,
|
pub inner: PrettyDebugDoc,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub enum PrettyDebugRefineKind {
|
||||||
|
ContextFree,
|
||||||
|
WithContext,
|
||||||
|
}
|
||||||
|
|
||||||
pub trait PrettyDebugWithSource: Sized {
|
pub trait PrettyDebugWithSource: Sized {
|
||||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder;
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder;
|
||||||
|
|
||||||
|
fn refined_pretty_debug(
|
||||||
|
&self,
|
||||||
|
_refine: PrettyDebugRefineKind,
|
||||||
|
source: &str,
|
||||||
|
) -> DebugDocBuilder {
|
||||||
|
self.pretty_debug(source)
|
||||||
|
}
|
||||||
|
|
||||||
// This is a transitional convenience method
|
// This is a transitional convenience method
|
||||||
fn debug(&self, source: impl Into<Text>) -> String
|
fn debug(&self, source: impl Into<Text>) -> String
|
||||||
where
|
where
|
||||||
@ -359,12 +389,27 @@ pub trait PrettyDebugWithSource: Sized {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<T: PrettyDebug> PrettyDebug for Spanned<T> {
|
||||||
|
fn pretty(&self) -> DebugDocBuilder {
|
||||||
|
self.item.pretty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl<T: PrettyDebug> PrettyDebugWithSource for T {
|
impl<T: PrettyDebug> PrettyDebugWithSource for T {
|
||||||
fn pretty_debug(&self, _source: &str) -> DebugDocBuilder {
|
fn pretty_debug(&self, _source: &str) -> DebugDocBuilder {
|
||||||
self.pretty()
|
self.pretty()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<T: PrettyDebugWithSource, E> PrettyDebugWithSource for Result<T, E> {
|
||||||
|
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||||
|
match self {
|
||||||
|
Err(_) => b::error("error"),
|
||||||
|
Ok(val) => val.pretty_debug(source),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub struct DebuggableWithSource<T: PrettyDebugWithSource> {
|
pub struct DebuggableWithSource<T: PrettyDebugWithSource> {
|
||||||
inner: T,
|
inner: T,
|
||||||
source: Text,
|
source: Text,
|
||||||
|
@ -38,7 +38,7 @@ macro_rules! nu {
|
|||||||
});
|
});
|
||||||
|
|
||||||
let mut process = match Command::new($crate::fs::executable_path())
|
let mut process = match Command::new($crate::fs::executable_path())
|
||||||
.env_clear()
|
// .env_clear()
|
||||||
.env("PATH", dummies)
|
.env("PATH", dummies)
|
||||||
.stdin(Stdio::piped())
|
.stdin(Stdio::piped())
|
||||||
.stdout(Stdio::piped())
|
.stdout(Stdio::piped())
|
||||||
@ -53,19 +53,26 @@ macro_rules! nu {
|
|||||||
.write_all(commands.as_bytes())
|
.write_all(commands.as_bytes())
|
||||||
.expect("couldn't write to stdin");
|
.expect("couldn't write to stdin");
|
||||||
|
|
||||||
|
|
||||||
let output = process
|
let output = process
|
||||||
.wait_with_output()
|
.wait_with_output()
|
||||||
.expect("couldn't read from stdout");
|
.expect("couldn't read from stdout");
|
||||||
|
|
||||||
let out = String::from_utf8_lossy(&output.stdout);
|
let out = $crate::macros::read_std(&output.stdout);
|
||||||
let out = out.lines().skip(1).collect::<Vec<_>>().join("\n");
|
let err = $crate::macros::read_std(&output.stderr);
|
||||||
let out = out.replace("\r\n", "");
|
|
||||||
let out = out.replace("\n", "");
|
println!("=== stderr\n{}", err);
|
||||||
|
|
||||||
out
|
out
|
||||||
}};
|
}};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn read_std(std: &[u8]) -> String {
|
||||||
|
let out = String::from_utf8_lossy(std);
|
||||||
|
let out = out.lines().skip(1).collect::<Vec<_>>().join("\n");
|
||||||
|
let out = out.replace("\r\n", "");
|
||||||
|
out.replace("\n", "")
|
||||||
|
}
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! nu_error {
|
macro_rules! nu_error {
|
||||||
(cwd: $cwd:expr, $path:expr, $($part:expr),*) => {{
|
(cwd: $cwd:expr, $path:expr, $($part:expr),*) => {{
|
||||||
@ -106,7 +113,7 @@ macro_rules! nu_error {
|
|||||||
});
|
});
|
||||||
|
|
||||||
let mut process = Command::new($crate::fs::executable_path())
|
let mut process = Command::new($crate::fs::executable_path())
|
||||||
.env_clear()
|
// .env_clear()
|
||||||
.env("PATH", dummies)
|
.env("PATH", dummies)
|
||||||
.stdout(Stdio::piped())
|
.stdout(Stdio::piped())
|
||||||
.stdin(Stdio::piped())
|
.stdin(Stdio::piped())
|
||||||
|
64
src/cli.rs
64
src/cli.rs
@ -8,9 +8,10 @@ use crate::data::config;
|
|||||||
use crate::git::current_branch;
|
use crate::git::current_branch;
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
|
use nu_parser::hir::Expression;
|
||||||
use nu_parser::{
|
use nu_parser::{
|
||||||
expand_syntax, hir, ClassifiedCommand, ClassifiedPipeline, InternalCommand, PipelineShape,
|
hir, ClassifiedCommand, ClassifiedPipeline, InternalCommand, PipelineShape, SpannedToken,
|
||||||
TokenNode, TokensIterator,
|
TokensIterator,
|
||||||
};
|
};
|
||||||
use nu_protocol::{Signature, UntaggedValue, Value};
|
use nu_protocol::{Signature, UntaggedValue, Value};
|
||||||
|
|
||||||
@ -60,16 +61,16 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel
|
|||||||
let name = params.name.clone();
|
let name = params.name.clone();
|
||||||
let fname = fname.to_string();
|
let fname = fname.to_string();
|
||||||
|
|
||||||
if context.get_command(&name)?.is_some() {
|
if context.get_command(&name).is_some() {
|
||||||
trace!("plugin {:?} already loaded.", &name);
|
trace!("plugin {:?} already loaded.", &name);
|
||||||
} else if params.is_filter {
|
} else if params.is_filter {
|
||||||
context.add_commands(vec![whole_stream_command(
|
context.add_commands(vec![whole_stream_command(PluginCommand::new(
|
||||||
PluginCommand::new(name, fname, params),
|
name, fname, params,
|
||||||
)])?;
|
))]);
|
||||||
} else {
|
} else {
|
||||||
context.add_commands(vec![whole_stream_command(PluginSink::new(
|
context.add_commands(vec![whole_stream_command(PluginSink::new(
|
||||||
name, fname, params,
|
name, fname, params,
|
||||||
))])?;
|
))]);
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -346,7 +347,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
|||||||
whole_stream_command(FromXML),
|
whole_stream_command(FromXML),
|
||||||
whole_stream_command(FromYAML),
|
whole_stream_command(FromYAML),
|
||||||
whole_stream_command(FromYML),
|
whole_stream_command(FromYML),
|
||||||
])?;
|
]);
|
||||||
|
|
||||||
cfg_if::cfg_if! {
|
cfg_if::cfg_if! {
|
||||||
if #[cfg(data_processing_primitives)] {
|
if #[cfg(data_processing_primitives)] {
|
||||||
@ -355,7 +356,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
|||||||
whole_stream_command(EvaluateBy),
|
whole_stream_command(EvaluateBy),
|
||||||
whole_stream_command(TSortBy),
|
whole_stream_command(TSortBy),
|
||||||
whole_stream_command(MapMaxBy),
|
whole_stream_command(MapMaxBy),
|
||||||
])?;
|
]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -363,7 +364,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
|||||||
{
|
{
|
||||||
context.add_commands(vec![whole_stream_command(
|
context.add_commands(vec![whole_stream_command(
|
||||||
crate::commands::clip::clipboard::Clip,
|
crate::commands::clip::clipboard::Clip,
|
||||||
)])?;
|
)]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -402,7 +403,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let cwd = context.shell_manager.path()?;
|
let cwd = context.shell_manager.path();
|
||||||
|
|
||||||
rl.set_helper(Some(crate::shell::Helper::new(context.clone())));
|
rl.set_helper(Some(crate::shell::Helper::new(context.clone())));
|
||||||
|
|
||||||
@ -479,7 +480,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
|||||||
|
|
||||||
context.with_host(|host| {
|
context.with_host(|host| {
|
||||||
print_err(err, host, &Text::from(line.clone()));
|
print_err(err, host, &Text::from(line.clone()));
|
||||||
})?;
|
});
|
||||||
|
|
||||||
context.maybe_print_errors(Text::from(line.clone()));
|
context.maybe_print_errors(Text::from(line.clone()));
|
||||||
}
|
}
|
||||||
@ -501,7 +502,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
|||||||
let _ = rl.save_history(&History::path());
|
let _ = rl.save_history(&History::path());
|
||||||
std::process::exit(0);
|
std::process::exit(0);
|
||||||
} else {
|
} else {
|
||||||
context.with_host(|host| host.stdout("CTRL-C pressed (again to quit)"))?;
|
context.with_host(|host| host.stdout("CTRL-C pressed (again to quit)"));
|
||||||
ctrlcbreak = true;
|
ctrlcbreak = true;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -606,26 +607,33 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
|||||||
debug!("=== Parsed ===");
|
debug!("=== Parsed ===");
|
||||||
debug!("{:#?}", result);
|
debug!("{:#?}", result);
|
||||||
|
|
||||||
let mut pipeline = match classify_pipeline(&result, ctx, &Text::from(line)) {
|
let mut pipeline = classify_pipeline(&result, ctx, &Text::from(line));
|
||||||
Ok(pipeline) => pipeline,
|
|
||||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
if let Some(failure) = pipeline.failed {
|
||||||
|
return LineResult::Error(line.to_string(), failure.into());
|
||||||
|
}
|
||||||
|
|
||||||
|
let should_push = match pipeline.commands.list.last() {
|
||||||
|
Some(ClassifiedCommand::External(_)) => false,
|
||||||
|
_ => true,
|
||||||
};
|
};
|
||||||
|
|
||||||
match pipeline.commands.list.last() {
|
if should_push {
|
||||||
Some(ClassifiedCommand::External(_)) => {}
|
pipeline
|
||||||
_ => pipeline
|
|
||||||
.commands
|
.commands
|
||||||
.list
|
.list
|
||||||
.push(ClassifiedCommand::Internal(InternalCommand {
|
.push(ClassifiedCommand::Internal(InternalCommand {
|
||||||
name: "autoview".to_string(),
|
name: "autoview".to_string(),
|
||||||
name_tag: Tag::unknown(),
|
name_tag: Tag::unknown(),
|
||||||
args: hir::Call::new(
|
args: hir::Call::new(
|
||||||
Box::new(hir::Expression::synthetic_string("autoview")),
|
Box::new(
|
||||||
|
Expression::synthetic_string("autoview").into_expr(Span::unknown()),
|
||||||
|
),
|
||||||
None,
|
None,
|
||||||
None,
|
None,
|
||||||
Span::unknown(),
|
Span::unknown(),
|
||||||
),
|
),
|
||||||
})),
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check the config to see if we need to update the path
|
// Check the config to see if we need to update the path
|
||||||
@ -650,19 +658,15 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn classify_pipeline(
|
pub fn classify_pipeline(
|
||||||
pipeline: &TokenNode,
|
pipeline: &SpannedToken,
|
||||||
context: &Context,
|
context: &Context,
|
||||||
source: &Text,
|
source: &Text,
|
||||||
) -> Result<ClassifiedPipeline, ShellError> {
|
) -> ClassifiedPipeline {
|
||||||
let pipeline_list = vec![pipeline.clone()];
|
let pipeline_list = vec![pipeline.clone()];
|
||||||
let mut iterator = TokensIterator::all(&pipeline_list, source.clone(), pipeline.span());
|
let expand_context = context.expand_context(source);
|
||||||
|
let mut iterator = TokensIterator::new(&pipeline_list, expand_context, pipeline.span());
|
||||||
|
|
||||||
let result = expand_syntax(
|
let result = iterator.expand_infallible(PipelineShape);
|
||||||
&PipelineShape,
|
|
||||||
&mut iterator,
|
|
||||||
&context.expand_context(source)?,
|
|
||||||
)
|
|
||||||
.map_err(|err| err.into());
|
|
||||||
|
|
||||||
if log_enabled!(target: "nu::expand_syntax", log::Level::Debug) {
|
if log_enabled!(target: "nu::expand_syntax", log::Level::Debug) {
|
||||||
outln!("");
|
outln!("");
|
||||||
|
@ -70,7 +70,7 @@ pub fn autoview(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
if let Some(table) = table? {
|
if let Some(table) = table {
|
||||||
let mut new_output_stream: OutputStream = stream.to_output_stream();
|
let mut new_output_stream: OutputStream = stream.to_output_stream();
|
||||||
let mut finished = false;
|
let mut finished = false;
|
||||||
let mut current_idx = 0;
|
let mut current_idx = 0;
|
||||||
@ -100,7 +100,7 @@ pub fn autoview(
|
|||||||
let first = &input[0];
|
let first = &input[0];
|
||||||
|
|
||||||
let mut host = context.host.clone();
|
let mut host = context.host.clone();
|
||||||
let mut host = host.lock();
|
let host = host.lock();
|
||||||
|
|
||||||
crate::cli::print_err(first.value.expect_error(), &*host, &context.source);
|
crate::cli::print_err(first.value.expect_error(), &*host, &context.source);
|
||||||
return;
|
return;
|
||||||
@ -108,13 +108,12 @@ pub fn autoview(
|
|||||||
|
|
||||||
let mut command_args = raw.with_input(input);
|
let mut command_args = raw.with_input(input);
|
||||||
let mut named_args = NamedArguments::new();
|
let mut named_args = NamedArguments::new();
|
||||||
named_args.insert_optional("start_number", Some(Expression::number(current_idx, Tag::unknown())));
|
named_args.insert_optional("start_number", Some(Expression::number(current_idx).into_expr(Span::unknown())));
|
||||||
command_args.call_info.args.named = Some(named_args);
|
command_args.call_info.args.named = Some(named_args);
|
||||||
|
|
||||||
let result = table.run(command_args, &context.commands);
|
let result = table.run(command_args, &context.commands);
|
||||||
result.collect::<Vec<_>>().await;
|
result.collect::<Vec<_>>().await;
|
||||||
|
|
||||||
|
|
||||||
if finished {
|
if finished {
|
||||||
break;
|
break;
|
||||||
} else {
|
} else {
|
||||||
@ -130,7 +129,7 @@ pub fn autoview(
|
|||||||
value: UntaggedValue::Primitive(Primitive::String(ref s)),
|
value: UntaggedValue::Primitive(Primitive::String(ref s)),
|
||||||
tag: Tag { anchor, span },
|
tag: Tag { anchor, span },
|
||||||
} if anchor.is_some() => {
|
} if anchor.is_some() => {
|
||||||
if let Some(text) = text? {
|
if let Some(text) = text {
|
||||||
let mut stream = VecDeque::new();
|
let mut stream = VecDeque::new();
|
||||||
stream.push_back(UntaggedValue::string(s).into_value(Tag { anchor, span }));
|
stream.push_back(UntaggedValue::string(s).into_value(Tag { anchor, span }));
|
||||||
let result = text.run(raw.with_input(stream.into()), &context.commands);
|
let result = text.run(raw.with_input(stream.into()), &context.commands);
|
||||||
@ -149,7 +148,7 @@ pub fn autoview(
|
|||||||
value: UntaggedValue::Primitive(Primitive::Line(ref s)),
|
value: UntaggedValue::Primitive(Primitive::Line(ref s)),
|
||||||
tag: Tag { anchor, span },
|
tag: Tag { anchor, span },
|
||||||
} if anchor.is_some() => {
|
} if anchor.is_some() => {
|
||||||
if let Some(text) = text? {
|
if let Some(text) = text {
|
||||||
let mut stream = VecDeque::new();
|
let mut stream = VecDeque::new();
|
||||||
stream.push_back(UntaggedValue::string(s).into_value(Tag { anchor, span }));
|
stream.push_back(UntaggedValue::string(s).into_value(Tag { anchor, span }));
|
||||||
let result = text.run(raw.with_input(stream.into()), &context.commands);
|
let result = text.run(raw.with_input(stream.into()), &context.commands);
|
||||||
@ -184,7 +183,7 @@ pub fn autoview(
|
|||||||
}
|
}
|
||||||
|
|
||||||
Value { value: UntaggedValue::Primitive(Primitive::Binary(ref b)), .. } => {
|
Value { value: UntaggedValue::Primitive(Primitive::Binary(ref b)), .. } => {
|
||||||
if let Some(binary) = binary? {
|
if let Some(binary) = binary {
|
||||||
let mut stream = VecDeque::new();
|
let mut stream = VecDeque::new();
|
||||||
stream.push_back(x);
|
stream.push_back(x);
|
||||||
let result = binary.run(raw.with_input(stream.into()), &context.commands);
|
let result = binary.run(raw.with_input(stream.into()), &context.commands);
|
||||||
@ -199,7 +198,7 @@ pub fn autoview(
|
|||||||
yield Err(e);
|
yield Err(e);
|
||||||
}
|
}
|
||||||
Value { value: ref item, .. } => {
|
Value { value: ref item, .. } => {
|
||||||
if let Some(table) = table? {
|
if let Some(table) = table {
|
||||||
let mut stream = VecDeque::new();
|
let mut stream = VecDeque::new();
|
||||||
stream.push_back(x);
|
stream.push_back(x);
|
||||||
let result = table.run(raw.with_input(stream.into()), &context.commands);
|
let result = table.run(raw.with_input(stream.into()), &context.commands);
|
||||||
|
@ -102,7 +102,7 @@ async fn run_with_iterator_arg(
|
|||||||
input: Option<InputStream>,
|
input: Option<InputStream>,
|
||||||
is_last: bool,
|
is_last: bool,
|
||||||
) -> Result<Option<InputStream>, ShellError> {
|
) -> Result<Option<InputStream>, ShellError> {
|
||||||
let path = context.shell_manager.path()?;
|
let path = context.shell_manager.path();
|
||||||
|
|
||||||
let mut inputs: InputStream = if let Some(input) = input {
|
let mut inputs: InputStream = if let Some(input) = input {
|
||||||
trace_stream!(target: "nu::trace_stream::external::it", "input" = input)
|
trace_stream!(target: "nu::trace_stream::external::it", "input" = input)
|
||||||
@ -180,7 +180,7 @@ async fn run_with_stdin(
|
|||||||
input: Option<InputStream>,
|
input: Option<InputStream>,
|
||||||
is_last: bool,
|
is_last: bool,
|
||||||
) -> Result<Option<InputStream>, ShellError> {
|
) -> Result<Option<InputStream>, ShellError> {
|
||||||
let path = context.shell_manager.path()?;
|
let path = context.shell_manager.path();
|
||||||
|
|
||||||
let mut inputs: InputStream = if let Some(input) = input {
|
let mut inputs: InputStream = if let Some(input) = input {
|
||||||
trace_stream!(target: "nu::trace_stream::external::stdin", "input" = input)
|
trace_stream!(target: "nu::trace_stream::external::stdin", "input" = input)
|
||||||
|
@ -47,18 +47,18 @@ pub(crate) async fn run_internal_command(
|
|||||||
match item {
|
match item {
|
||||||
Ok(ReturnSuccess::Action(action)) => match action {
|
Ok(ReturnSuccess::Action(action)) => match action {
|
||||||
CommandAction::ChangePath(path) => {
|
CommandAction::ChangePath(path) => {
|
||||||
context.shell_manager.set_path(path)?;
|
context.shell_manager.set_path(path);
|
||||||
}
|
}
|
||||||
CommandAction::Exit => std::process::exit(0), // TODO: save history.txt
|
CommandAction::Exit => std::process::exit(0), // TODO: save history.txt
|
||||||
CommandAction::Error(err) => {
|
CommandAction::Error(err) => {
|
||||||
context.error(err)?;
|
context.error(err);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
CommandAction::AutoConvert(tagged_contents, extension) => {
|
CommandAction::AutoConvert(tagged_contents, extension) => {
|
||||||
let contents_tag = tagged_contents.tag.clone();
|
let contents_tag = tagged_contents.tag.clone();
|
||||||
let command_name = format!("from-{}", extension);
|
let command_name = format!("from-{}", extension);
|
||||||
let command = command.clone();
|
let command = command.clone();
|
||||||
if let Some(converter) = context.registry.get_command(&command_name)? {
|
if let Some(converter) = context.registry.get_command(&command_name) {
|
||||||
let new_args = RawCommandArgs {
|
let new_args = RawCommandArgs {
|
||||||
host: context.host.clone(),
|
host: context.host.clone(),
|
||||||
ctrl_c: context.ctrl_c.clone(),
|
ctrl_c: context.ctrl_c.clone(),
|
||||||
@ -100,43 +100,39 @@ pub(crate) async fn run_internal_command(
|
|||||||
value: UntaggedValue::Primitive(Primitive::String(cmd)),
|
value: UntaggedValue::Primitive(Primitive::String(cmd)),
|
||||||
tag,
|
tag,
|
||||||
} => {
|
} => {
|
||||||
let result = context.shell_manager.insert_at_current(Box::new(
|
context.shell_manager.insert_at_current(Box::new(
|
||||||
HelpShell::for_command(
|
HelpShell::for_command(
|
||||||
UntaggedValue::string(cmd).into_value(tag),
|
UntaggedValue::string(cmd).into_value(tag),
|
||||||
&context.registry(),
|
&context.registry(),
|
||||||
)?,
|
)?,
|
||||||
));
|
));
|
||||||
|
|
||||||
result?
|
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
let result = context.shell_manager.insert_at_current(Box::new(
|
context.shell_manager.insert_at_current(Box::new(
|
||||||
HelpShell::index(&context.registry())?,
|
HelpShell::index(&context.registry())?,
|
||||||
));
|
));
|
||||||
|
|
||||||
result?
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
CommandAction::EnterValueShell(value) => {
|
CommandAction::EnterValueShell(value) => {
|
||||||
context
|
context
|
||||||
.shell_manager
|
.shell_manager
|
||||||
.insert_at_current(Box::new(ValueShell::new(value)))?;
|
.insert_at_current(Box::new(ValueShell::new(value)));
|
||||||
}
|
}
|
||||||
CommandAction::EnterShell(location) => {
|
CommandAction::EnterShell(location) => {
|
||||||
context.shell_manager.insert_at_current(Box::new(
|
context.shell_manager.insert_at_current(Box::new(
|
||||||
FilesystemShell::with_location(location, context.registry().clone()),
|
FilesystemShell::with_location(location, context.registry().clone()),
|
||||||
))?;
|
));
|
||||||
}
|
}
|
||||||
CommandAction::PreviousShell => {
|
CommandAction::PreviousShell => {
|
||||||
context.shell_manager.prev()?;
|
context.shell_manager.prev();
|
||||||
}
|
}
|
||||||
CommandAction::NextShell => {
|
CommandAction::NextShell => {
|
||||||
context.shell_manager.next()?;
|
context.shell_manager.next();
|
||||||
}
|
}
|
||||||
CommandAction::LeaveShell => {
|
CommandAction::LeaveShell => {
|
||||||
context.shell_manager.remove_at_current()?;
|
context.shell_manager.remove_at_current();
|
||||||
if context.shell_manager.is_empty()? {
|
if context.shell_manager.is_empty() {
|
||||||
std::process::exit(0); // TODO: save history.txt
|
std::process::exit(0); // TODO: save history.txt
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -154,7 +150,7 @@ pub(crate) async fn run_internal_command(
|
|||||||
let mut buffer = termcolor::Buffer::ansi();
|
let mut buffer = termcolor::Buffer::ansi();
|
||||||
|
|
||||||
let _ = doc.render_raw(
|
let _ = doc.render_raw(
|
||||||
context.with_host(|host| host.width() - 5)?,
|
context.with_host(|host| host.width() - 5),
|
||||||
&mut nu_source::TermColored::new(&mut buffer),
|
&mut nu_source::TermColored::new(&mut buffer),
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -164,7 +160,7 @@ pub(crate) async fn run_internal_command(
|
|||||||
}
|
}
|
||||||
|
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
context.error(err)?;
|
context.error(err);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -29,6 +29,9 @@ pub(crate) async fn run_pipeline(
|
|||||||
return Err(ShellError::unimplemented("Expression-only commands"))
|
return Err(ShellError::unimplemented("Expression-only commands"))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
(Some(ClassifiedCommand::Error(err)), _) => return Err(err.into()),
|
||||||
|
(_, Some(ClassifiedCommand::Error(err))) => return Err(err.clone().into()),
|
||||||
|
|
||||||
(Some(ClassifiedCommand::Internal(left)), _) => {
|
(Some(ClassifiedCommand::Internal(left)), _) => {
|
||||||
run_internal_command(left, ctx, input, Text::from(line)).await?
|
run_internal_command(left, ctx, input, Text::from(line)).await?
|
||||||
}
|
}
|
||||||
|
@ -236,7 +236,7 @@ pub struct RunnableContext {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl RunnableContext {
|
impl RunnableContext {
|
||||||
pub fn get_command(&self, name: &str) -> Result<Option<Arc<Command>>, ShellError> {
|
pub fn get_command(&self, name: &str) -> Option<Arc<Command>> {
|
||||||
self.commands.get_command(name)
|
self.commands.get_command(name)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -51,7 +51,7 @@ impl PerItemCommand for Enter {
|
|||||||
if spec.len() == 2 {
|
if spec.len() == 2 {
|
||||||
let (_, command) = (spec[0], spec[1]);
|
let (_, command) = (spec[0], spec[1]);
|
||||||
|
|
||||||
if registry.has(command)? {
|
if registry.has(command) {
|
||||||
return Ok(vec![Ok(ReturnSuccess::Action(
|
return Ok(vec![Ok(ReturnSuccess::Action(
|
||||||
CommandAction::EnterHelpShell(
|
CommandAction::EnterHelpShell(
|
||||||
UntaggedValue::string(command).into_value(Tag::unknown()),
|
UntaggedValue::string(command).into_value(Tag::unknown()),
|
||||||
@ -74,7 +74,7 @@ impl PerItemCommand for Enter {
|
|||||||
// If it's a file, attempt to open the file as a value and enter it
|
// If it's a file, attempt to open the file as a value and enter it
|
||||||
let cwd = raw_args.shell_manager.path();
|
let cwd = raw_args.shell_manager.path();
|
||||||
|
|
||||||
let full_path = std::path::PathBuf::from(cwd?);
|
let full_path = std::path::PathBuf::from(cwd);
|
||||||
|
|
||||||
let (file_extension, contents, contents_tag) =
|
let (file_extension, contents, contents_tag) =
|
||||||
crate::commands::open::fetch(
|
crate::commands::open::fetch(
|
||||||
@ -90,7 +90,7 @@ impl PerItemCommand for Enter {
|
|||||||
if let Some(extension) = file_extension {
|
if let Some(extension) = file_extension {
|
||||||
let command_name = format!("from-{}", extension);
|
let command_name = format!("from-{}", extension);
|
||||||
if let Some(converter) =
|
if let Some(converter) =
|
||||||
registry.get_command(&command_name)?
|
registry.get_command(&command_name)
|
||||||
{
|
{
|
||||||
let new_args = RawCommandArgs {
|
let new_args = RawCommandArgs {
|
||||||
host: raw_args.host,
|
host: raw_args.host,
|
||||||
|
@ -153,7 +153,8 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
|
|||||||
}
|
}
|
||||||
_ => yield ReturnSuccess::value(x),
|
_ => yield ReturnSuccess::value(x),
|
||||||
}
|
}
|
||||||
Err(_) => {
|
Err(err) => {
|
||||||
|
println!("{:?}", err);
|
||||||
yield Err(ShellError::labeled_error_with_secondary(
|
yield Err(ShellError::labeled_error_with_secondary(
|
||||||
"Could not parse as SQLite",
|
"Could not parse as SQLite",
|
||||||
"input cannot be parsed as SQLite",
|
"input cannot be parsed as SQLite",
|
||||||
|
@ -41,12 +41,12 @@ impl PerItemCommand for Help {
|
|||||||
}) => {
|
}) => {
|
||||||
let mut help = VecDeque::new();
|
let mut help = VecDeque::new();
|
||||||
if document == "commands" {
|
if document == "commands" {
|
||||||
let mut sorted_names = registry.names()?;
|
let mut sorted_names = registry.names();
|
||||||
sorted_names.sort();
|
sorted_names.sort();
|
||||||
for cmd in sorted_names {
|
for cmd in sorted_names {
|
||||||
let mut short_desc = TaggedDictBuilder::new(tag.clone());
|
let mut short_desc = TaggedDictBuilder::new(tag.clone());
|
||||||
let value = command_dict(
|
let value = command_dict(
|
||||||
registry.get_command(&cmd)?.ok_or_else(|| {
|
registry.get_command(&cmd).ok_or_else(|| {
|
||||||
ShellError::labeled_error(
|
ShellError::labeled_error(
|
||||||
format!("Could not load {}", cmd),
|
format!("Could not load {}", cmd),
|
||||||
"could not load command",
|
"could not load command",
|
||||||
@ -72,7 +72,7 @@ impl PerItemCommand for Help {
|
|||||||
|
|
||||||
help.push_back(ReturnSuccess::value(short_desc.into_value()));
|
help.push_back(ReturnSuccess::value(short_desc.into_value()));
|
||||||
}
|
}
|
||||||
} else if let Some(command) = registry.get_command(document)? {
|
} else if let Some(command) = registry.get_command(document) {
|
||||||
return Ok(
|
return Ok(
|
||||||
get_help(&command.name(), &command.usage(), command.signature()).into(),
|
get_help(&command.name(), &command.usage(), command.signature()).into(),
|
||||||
);
|
);
|
||||||
|
@ -40,7 +40,7 @@ impl PerItemCommand for Open {
|
|||||||
|
|
||||||
fn run(call_info: &CallInfo, raw_args: &RawCommandArgs) -> Result<OutputStream, ShellError> {
|
fn run(call_info: &CallInfo, raw_args: &RawCommandArgs) -> Result<OutputStream, ShellError> {
|
||||||
let shell_manager = &raw_args.shell_manager;
|
let shell_manager = &raw_args.shell_manager;
|
||||||
let cwd = PathBuf::from(shell_manager.path()?);
|
let cwd = PathBuf::from(shell_manager.path());
|
||||||
let full_path = cwd;
|
let full_path = cwd;
|
||||||
|
|
||||||
let path = call_info.args.nth(0).ok_or_else(|| {
|
let path = call_info.args.nth(0).ok_or_else(|| {
|
||||||
|
@ -130,7 +130,7 @@ fn save(
|
|||||||
}: RunnableContext,
|
}: RunnableContext,
|
||||||
raw_args: RawCommandArgs,
|
raw_args: RawCommandArgs,
|
||||||
) -> Result<OutputStream, ShellError> {
|
) -> Result<OutputStream, ShellError> {
|
||||||
let mut full_path = PathBuf::from(shell_manager.path()?);
|
let mut full_path = PathBuf::from(shell_manager.path());
|
||||||
let name_tag = name.clone();
|
let name_tag = name.clone();
|
||||||
|
|
||||||
let stream = async_stream! {
|
let stream = async_stream! {
|
||||||
@ -179,7 +179,7 @@ fn save(
|
|||||||
break if !save_raw {
|
break if !save_raw {
|
||||||
if let Some(extension) = full_path.extension() {
|
if let Some(extension) = full_path.extension() {
|
||||||
let command_name = format!("to-{}", extension.to_string_lossy());
|
let command_name = format!("to-{}", extension.to_string_lossy());
|
||||||
if let Some(converter) = registry.get_command(&command_name)? {
|
if let Some(converter) = registry.get_command(&command_name) {
|
||||||
let new_args = RawCommandArgs {
|
let new_args = RawCommandArgs {
|
||||||
host,
|
host,
|
||||||
ctrl_c,
|
ctrl_c,
|
||||||
|
@ -32,16 +32,7 @@ fn shells(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream
|
|||||||
let mut shells_out = VecDeque::new();
|
let mut shells_out = VecDeque::new();
|
||||||
let tag = args.call_info.name_tag;
|
let tag = args.call_info.name_tag;
|
||||||
|
|
||||||
for (index, shell) in args
|
for (index, shell) in args.shell_manager.shells.lock().iter().enumerate() {
|
||||||
.shell_manager
|
|
||||||
.shells
|
|
||||||
.lock()
|
|
||||||
.map_err(|_| {
|
|
||||||
ShellError::labeled_error("Could not list shells", "could not list shells", &tag)
|
|
||||||
})?
|
|
||||||
.iter()
|
|
||||||
.enumerate()
|
|
||||||
{
|
|
||||||
let mut dict = TaggedDictBuilder::new(&tag);
|
let mut dict = TaggedDictBuilder::new(&tag);
|
||||||
|
|
||||||
if index == (*args.shell_manager.current_shell).load(Ordering::SeqCst) {
|
if index == (*args.shell_manager.current_shell).load(Ordering::SeqCst) {
|
||||||
|
@ -5,7 +5,6 @@ use futures::StreamExt;
|
|||||||
use futures_util::pin_mut;
|
use futures_util::pin_mut;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
use nu_protocol::{ReturnSuccess, ReturnValue, Signature, UntaggedValue};
|
use nu_protocol::{ReturnSuccess, ReturnValue, Signature, UntaggedValue};
|
||||||
use nu_source::PrettyDebug;
|
|
||||||
|
|
||||||
pub struct What;
|
pub struct What;
|
||||||
|
|
||||||
@ -14,11 +13,11 @@ pub struct WhatArgs {}
|
|||||||
|
|
||||||
impl WholeStreamCommand for What {
|
impl WholeStreamCommand for What {
|
||||||
fn name(&self) -> &str {
|
fn name(&self) -> &str {
|
||||||
"what?"
|
"describe"
|
||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build("what?")
|
Signature::build("describe")
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
@ -43,7 +42,7 @@ pub fn what(
|
|||||||
pin_mut!(values);
|
pin_mut!(values);
|
||||||
|
|
||||||
while let Some(row) = values.next().await {
|
while let Some(row) = values.next().await {
|
||||||
let name = value::format_leaf(&row).plain_string(100000);
|
let name = value::format_type(&row, 100);
|
||||||
yield ReturnSuccess::value(UntaggedValue::string(name).into_value(Tag::unknown_anchor(row.tag.span)));
|
yield ReturnSuccess::value(UntaggedValue::string(name).into_value(Tag::unknown_anchor(row.tag.span)));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -95,7 +95,7 @@ fn which(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let builtin = commands.has(&item)?;
|
let builtin = commands.has(&item);
|
||||||
if builtin {
|
if builtin {
|
||||||
yield ReturnSuccess::value(entry_builtin!(item, application.tag.clone()));
|
yield ReturnSuccess::value(entry_builtin!(item, application.tag.clone()));
|
||||||
}
|
}
|
||||||
@ -128,7 +128,7 @@ fn which(
|
|||||||
if let Ok(path) = ichwh::which(&item).await {
|
if let Ok(path) = ichwh::which(&item).await {
|
||||||
yield ReturnSuccess::value(entry_path!(item, path.into(), application.tag.clone()));
|
yield ReturnSuccess::value(entry_path!(item, path.into(), application.tag.clone()));
|
||||||
}
|
}
|
||||||
} else if commands.has(&item)? {
|
} else if commands.has(&item) {
|
||||||
yield ReturnSuccess::value(entry_builtin!(item, application.tag.clone()));
|
yield ReturnSuccess::value(entry_builtin!(item, application.tag.clone()));
|
||||||
} else if let Ok(path) = ichwh::which(&item).await {
|
} else if let Ok(path) = ichwh::which(&item).await {
|
||||||
yield ReturnSuccess::value(entry_path!(item, path.into(), application.tag.clone()));
|
yield ReturnSuccess::value(entry_path!(item, path.into(), application.tag.clone()));
|
||||||
|
159
src/context.rs
159
src/context.rs
@ -5,37 +5,29 @@ use crate::stream::{InputStream, OutputStream};
|
|||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
use nu_parser::{hir, hir::syntax_shape::ExpandContext, hir::syntax_shape::SignatureRegistry};
|
use nu_parser::{hir, hir::syntax_shape::ExpandContext, hir::syntax_shape::SignatureRegistry};
|
||||||
use nu_protocol::{errln, Signature};
|
use nu_protocol::Signature;
|
||||||
use nu_source::{Tag, Text};
|
use nu_source::{Tag, Text};
|
||||||
|
use parking_lot::Mutex;
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
use std::sync::atomic::AtomicBool;
|
use std::sync::atomic::AtomicBool;
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::Arc;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct CommandRegistry {
|
pub struct CommandRegistry {
|
||||||
registry: Arc<Mutex<IndexMap<String, Arc<Command>>>>,
|
registry: Arc<Mutex<IndexMap<String, Arc<Command>>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl SignatureRegistry for CommandRegistry {
|
impl SignatureRegistry for CommandRegistry {
|
||||||
fn has(&self, name: &str) -> Result<bool, ShellError> {
|
fn has(&self, name: &str) -> bool {
|
||||||
if let Ok(registry) = self.registry.lock() {
|
let registry = self.registry.lock();
|
||||||
Ok(registry.contains_key(name))
|
registry.contains_key(name)
|
||||||
} else {
|
|
||||||
Err(ShellError::untagged_runtime_error(format!(
|
|
||||||
"Could not load from registry: {}",
|
|
||||||
name
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
fn get(&self, name: &str) -> Result<Option<Signature>, ShellError> {
|
fn get(&self, name: &str) -> Option<Signature> {
|
||||||
if let Ok(registry) = self.registry.lock() {
|
let registry = self.registry.lock();
|
||||||
Ok(registry.get(name).map(|command| command.signature()))
|
registry.get(name).map(|command| command.signature())
|
||||||
} else {
|
}
|
||||||
Err(ShellError::untagged_runtime_error(format!(
|
fn clone_box(&self) -> Box<dyn SignatureRegistry> {
|
||||||
"Could not get from registry: {}",
|
Box::new(self.clone())
|
||||||
name
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -54,53 +46,32 @@ impl CommandRegistry {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn get_command(&self, name: &str) -> Result<Option<Arc<Command>>, ShellError> {
|
pub(crate) fn get_command(&self, name: &str) -> Option<Arc<Command>> {
|
||||||
let registry = self.registry.lock().map_err(|_| {
|
let registry = self.registry.lock();
|
||||||
ShellError::untagged_runtime_error("Internal error: get_command could not get mutex")
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(registry.get(name).cloned())
|
registry.get(name).cloned()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn expect_command(&self, name: &str) -> Result<Arc<Command>, ShellError> {
|
pub(crate) fn expect_command(&self, name: &str) -> Result<Arc<Command>, ShellError> {
|
||||||
self.get_command(name)?.ok_or_else(|| {
|
self.get_command(name).ok_or_else(|| {
|
||||||
ShellError::untagged_runtime_error(format!("Could not load command: {}", name))
|
ShellError::untagged_runtime_error(format!("Could not load command: {}", name))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn has(&self, name: &str) -> Result<bool, ShellError> {
|
pub(crate) fn has(&self, name: &str) -> bool {
|
||||||
let registry = self.registry.lock().map_err(|_| {
|
let registry = self.registry.lock();
|
||||||
ShellError::untagged_runtime_error("Internal error: has could not get mutex")
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(registry.contains_key(name))
|
registry.contains_key(name)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn insert(
|
pub(crate) fn insert(&mut self, name: impl Into<String>, command: Arc<Command>) {
|
||||||
&mut self,
|
let mut registry = self.registry.lock();
|
||||||
name: impl Into<String>,
|
|
||||||
command: Arc<Command>,
|
|
||||||
) -> Result<(), ShellError> {
|
|
||||||
let mut registry = self.registry.lock().map_err(|_| {
|
|
||||||
ShellError::untagged_runtime_error("Internal error: insert could not get mutex")
|
|
||||||
})?;
|
|
||||||
|
|
||||||
registry.insert(name.into(), command);
|
registry.insert(name.into(), command);
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn names(&self) -> Result<Vec<String>, ShellError> {
|
pub(crate) fn names(&self) -> Vec<String> {
|
||||||
let registry = self.registry.lock().map_err(|_| {
|
let registry = self.registry.lock();
|
||||||
ShellError::untagged_runtime_error("Internal error: names could not get mutex")
|
registry.keys().cloned().collect()
|
||||||
})?;
|
|
||||||
Ok(registry.keys().cloned().collect())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn snapshot(&self) -> Result<IndexMap<String, Arc<Command>>, ShellError> {
|
|
||||||
let registry = self.registry.lock().map_err(|_| {
|
|
||||||
ShellError::untagged_runtime_error("Internal error: names could not get mutex")
|
|
||||||
})?;
|
|
||||||
Ok(registry.clone())
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -121,12 +92,12 @@ impl Context {
|
|||||||
pub(crate) fn expand_context<'context>(
|
pub(crate) fn expand_context<'context>(
|
||||||
&'context self,
|
&'context self,
|
||||||
source: &'context Text,
|
source: &'context Text,
|
||||||
) -> Result<ExpandContext<'context>, ShellError> {
|
) -> ExpandContext {
|
||||||
Ok(ExpandContext::new(
|
ExpandContext::new(
|
||||||
Box::new(self.registry.clone()),
|
Box::new(self.registry.clone()),
|
||||||
source,
|
source,
|
||||||
self.shell_manager.homedir()?,
|
self.shell_manager.homedir(),
|
||||||
))
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn basic() -> Result<Context, Box<dyn Error>> {
|
pub(crate) fn basic() -> Result<Context, Box<dyn Error>> {
|
||||||
@ -142,73 +113,47 @@ impl Context {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn error(&mut self, error: ShellError) -> Result<(), ShellError> {
|
pub(crate) fn error(&mut self, error: ShellError) {
|
||||||
self.with_errors(|errors| errors.push(error))
|
self.with_errors(|errors| errors.push(error))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn maybe_print_errors(&mut self, source: Text) -> bool {
|
pub(crate) fn maybe_print_errors(&mut self, source: Text) -> bool {
|
||||||
let errors = self.current_errors.clone();
|
let errors = self.current_errors.clone();
|
||||||
let errors = errors.lock();
|
let mut errors = errors.lock();
|
||||||
|
|
||||||
let host = self.host.clone();
|
let host = self.host.clone();
|
||||||
let host = host.lock();
|
let host = host.lock();
|
||||||
|
|
||||||
let result: bool;
|
if errors.len() > 0 {
|
||||||
|
let error = errors[0].clone();
|
||||||
|
*errors = vec![];
|
||||||
|
|
||||||
match (errors, host) {
|
crate::cli::print_err(error, &*host, &source);
|
||||||
(Err(err), _) => {
|
true
|
||||||
errln!(
|
|
||||||
"Unexpected error attempting to acquire the lock of the current errors: {:?}",
|
|
||||||
err
|
|
||||||
);
|
|
||||||
result = false;
|
|
||||||
}
|
|
||||||
(Ok(mut errors), host) => {
|
|
||||||
if errors.len() > 0 {
|
|
||||||
let error = errors[0].clone();
|
|
||||||
*errors = vec![];
|
|
||||||
|
|
||||||
crate::cli::print_err(error, &*host, &source);
|
|
||||||
result = true;
|
|
||||||
} else {
|
|
||||||
result = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
result
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn with_host<T>(
|
|
||||||
&mut self,
|
|
||||||
block: impl FnOnce(&mut dyn Host) -> T,
|
|
||||||
) -> Result<T, ShellError> {
|
|
||||||
let mut host = self.host.lock();
|
|
||||||
Ok(block(&mut *host))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn with_errors<T>(
|
|
||||||
&mut self,
|
|
||||||
block: impl FnOnce(&mut Vec<ShellError>) -> T,
|
|
||||||
) -> Result<T, ShellError> {
|
|
||||||
if let Ok(mut errors) = self.current_errors.lock() {
|
|
||||||
Ok(block(&mut *errors))
|
|
||||||
} else {
|
} else {
|
||||||
Err(ShellError::untagged_runtime_error(
|
false
|
||||||
"Internal error: could not lock host in with_errors",
|
|
||||||
))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_commands(&mut self, commands: Vec<Arc<Command>>) -> Result<(), ShellError> {
|
pub(crate) fn with_host<T>(&mut self, block: impl FnOnce(&mut dyn Host) -> T) -> T {
|
||||||
|
let mut host = self.host.lock();
|
||||||
|
|
||||||
|
block(&mut *host)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn with_errors<T>(&mut self, block: impl FnOnce(&mut Vec<ShellError>) -> T) -> T {
|
||||||
|
let mut errors = self.current_errors.lock();
|
||||||
|
|
||||||
|
block(&mut *errors)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_commands(&mut self, commands: Vec<Arc<Command>>) {
|
||||||
for command in commands {
|
for command in commands {
|
||||||
self.registry.insert(command.name().to_string(), command)?;
|
self.registry.insert(command.name().to_string(), command);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn get_command(&self, name: &str) -> Result<Option<Arc<Command>>, ShellError> {
|
pub(crate) fn get_command(&self, name: &str) -> Option<Arc<Command>> {
|
||||||
self.registry.get_command(name)
|
self.registry.get_command(name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -29,7 +29,7 @@ pub struct Operation {
|
|||||||
|
|
||||||
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Clone, Hash, Serialize, Deserialize, new)]
|
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Clone, Hash, Serialize, Deserialize, new)]
|
||||||
pub struct Block {
|
pub struct Block {
|
||||||
pub(crate) expressions: Vec<hir::Expression>,
|
pub(crate) expressions: Vec<hir::SpannedExpression>,
|
||||||
pub(crate) source: Text,
|
pub(crate) source: Text,
|
||||||
pub(crate) tag: Tag,
|
pub(crate) tag: Tag,
|
||||||
}
|
}
|
||||||
|
@ -4,7 +4,7 @@ use crate::evaluate::operator::apply_operator;
|
|||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use log::trace;
|
use log::trace;
|
||||||
use nu_errors::{ArgumentError, ShellError};
|
use nu_errors::{ArgumentError, ShellError};
|
||||||
use nu_parser::hir::{self, Expression, RawExpression};
|
use nu_parser::hir::{self, Expression, SpannedExpression};
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
ColumnPath, Evaluate, Primitive, RangeInclusion, Scope, TaggedDictBuilder, UnspannedPathMember,
|
ColumnPath, Evaluate, Primitive, RangeInclusion, Scope, TaggedDictBuilder, UnspannedPathMember,
|
||||||
UntaggedValue, Value,
|
UntaggedValue, Value,
|
||||||
@ -12,7 +12,7 @@ use nu_protocol::{
|
|||||||
use nu_source::Text;
|
use nu_source::Text;
|
||||||
|
|
||||||
pub(crate) fn evaluate_baseline_expr(
|
pub(crate) fn evaluate_baseline_expr(
|
||||||
expr: &Expression,
|
expr: &SpannedExpression,
|
||||||
registry: &CommandRegistry,
|
registry: &CommandRegistry,
|
||||||
scope: &Scope,
|
scope: &Scope,
|
||||||
source: &Text,
|
source: &Text,
|
||||||
@ -22,19 +22,19 @@ pub(crate) fn evaluate_baseline_expr(
|
|||||||
anchor: None,
|
anchor: None,
|
||||||
};
|
};
|
||||||
match &expr.expr {
|
match &expr.expr {
|
||||||
RawExpression::Literal(literal) => Ok(evaluate_literal(literal, source)),
|
Expression::Literal(literal) => Ok(evaluate_literal(literal, expr.span, source)),
|
||||||
RawExpression::ExternalWord => Err(ShellError::argument_error(
|
Expression::ExternalWord => Err(ShellError::argument_error(
|
||||||
"Invalid external word".spanned(tag.span),
|
"Invalid external word".spanned(tag.span),
|
||||||
ArgumentError::InvalidExternalWord,
|
ArgumentError::InvalidExternalWord,
|
||||||
)),
|
)),
|
||||||
RawExpression::FilePath(path) => Ok(UntaggedValue::path(path.clone()).into_value(tag)),
|
Expression::FilePath(path) => Ok(UntaggedValue::path(path.clone()).into_value(tag)),
|
||||||
RawExpression::Synthetic(hir::Synthetic::String(s)) => {
|
Expression::Synthetic(hir::Synthetic::String(s)) => {
|
||||||
Ok(UntaggedValue::string(s).into_untagged_value())
|
Ok(UntaggedValue::string(s).into_untagged_value())
|
||||||
}
|
}
|
||||||
RawExpression::Variable(var) => evaluate_reference(var, scope, source, tag),
|
Expression::Variable(var) => evaluate_reference(var, scope, source, tag),
|
||||||
RawExpression::Command(_) => evaluate_command(tag, scope, source),
|
Expression::Command(_) => evaluate_command(tag, scope, source),
|
||||||
RawExpression::ExternalCommand(external) => evaluate_external(external, scope, source),
|
Expression::ExternalCommand(external) => evaluate_external(external, scope, source),
|
||||||
RawExpression::Binary(binary) => {
|
Expression::Binary(binary) => {
|
||||||
let left = evaluate_baseline_expr(binary.left(), registry, scope, source)?;
|
let left = evaluate_baseline_expr(binary.left(), registry, scope, source)?;
|
||||||
let right = evaluate_baseline_expr(binary.right(), registry, scope, source)?;
|
let right = evaluate_baseline_expr(binary.right(), registry, scope, source)?;
|
||||||
|
|
||||||
@ -48,7 +48,7 @@ pub(crate) fn evaluate_baseline_expr(
|
|||||||
)),
|
)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
RawExpression::Range(range) => {
|
Expression::Range(range) => {
|
||||||
let left = range.left();
|
let left = range.left();
|
||||||
let right = range.right();
|
let right = range.right();
|
||||||
|
|
||||||
@ -68,7 +68,7 @@ pub(crate) fn evaluate_baseline_expr(
|
|||||||
|
|
||||||
Ok(UntaggedValue::range(left, right).into_value(tag))
|
Ok(UntaggedValue::range(left, right).into_value(tag))
|
||||||
}
|
}
|
||||||
RawExpression::List(list) => {
|
Expression::List(list) => {
|
||||||
let mut exprs = vec![];
|
let mut exprs = vec![];
|
||||||
|
|
||||||
for expr in list {
|
for expr in list {
|
||||||
@ -78,13 +78,13 @@ pub(crate) fn evaluate_baseline_expr(
|
|||||||
|
|
||||||
Ok(UntaggedValue::Table(exprs).into_value(tag))
|
Ok(UntaggedValue::Table(exprs).into_value(tag))
|
||||||
}
|
}
|
||||||
RawExpression::Block(block) => Ok(UntaggedValue::Block(Evaluate::new(Block::new(
|
Expression::Block(block) => Ok(UntaggedValue::Block(Evaluate::new(Block::new(
|
||||||
block.clone(),
|
block.clone(),
|
||||||
source.clone(),
|
source.clone(),
|
||||||
tag.clone(),
|
tag.clone(),
|
||||||
)))
|
)))
|
||||||
.into_value(&tag)),
|
.into_value(&tag)),
|
||||||
RawExpression::Path(path) => {
|
Expression::Path(path) => {
|
||||||
let value = evaluate_baseline_expr(path.head(), registry, scope, source)?;
|
let value = evaluate_baseline_expr(path.head(), registry, scope, source)?;
|
||||||
let mut item = value;
|
let mut item = value;
|
||||||
|
|
||||||
@ -122,37 +122,29 @@ pub(crate) fn evaluate_baseline_expr(
|
|||||||
|
|
||||||
Ok(item.value.into_value(tag))
|
Ok(item.value.into_value(tag))
|
||||||
}
|
}
|
||||||
RawExpression::Boolean(_boolean) => unimplemented!(),
|
Expression::Boolean(_boolean) => unimplemented!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn evaluate_literal(literal: &hir::Literal, source: &Text) -> Value {
|
fn evaluate_literal(literal: &hir::Literal, span: Span, source: &Text) -> Value {
|
||||||
match &literal.literal {
|
match &literal {
|
||||||
hir::RawLiteral::ColumnPath(path) => {
|
hir::Literal::ColumnPath(path) => {
|
||||||
let members = path
|
let members = path
|
||||||
.iter()
|
.iter()
|
||||||
.map(|member| member.to_path_member(source))
|
.map(|member| member.to_path_member(source))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
UntaggedValue::Primitive(Primitive::ColumnPath(ColumnPath::new(members)))
|
UntaggedValue::Primitive(Primitive::ColumnPath(ColumnPath::new(members)))
|
||||||
.into_value(&literal.span)
|
.into_value(span)
|
||||||
}
|
}
|
||||||
hir::RawLiteral::Number(int) => match int {
|
hir::Literal::Number(int) => match int {
|
||||||
nu_parser::Number::Int(i) => UntaggedValue::int(i.clone()).into_value(literal.span),
|
nu_parser::Number::Int(i) => UntaggedValue::int(i.clone()).into_value(span),
|
||||||
nu_parser::Number::Decimal(d) => {
|
nu_parser::Number::Decimal(d) => UntaggedValue::decimal(d.clone()).into_value(span),
|
||||||
UntaggedValue::decimal(d.clone()).into_value(literal.span)
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
hir::RawLiteral::Size(int, unit) => unit.compute(&int).into_value(literal.span),
|
hir::Literal::Size(int, unit) => unit.compute(&int).into_value(span),
|
||||||
hir::RawLiteral::String(tag) => {
|
hir::Literal::String(tag) => UntaggedValue::string(tag.slice(source)).into_value(span),
|
||||||
UntaggedValue::string(tag.slice(source)).into_value(literal.span)
|
hir::Literal::GlobPattern(pattern) => UntaggedValue::pattern(pattern).into_value(span),
|
||||||
}
|
hir::Literal::Bare => UntaggedValue::string(span.slice(source)).into_value(span),
|
||||||
hir::RawLiteral::GlobPattern(pattern) => {
|
|
||||||
UntaggedValue::pattern(pattern).into_value(literal.span)
|
|
||||||
}
|
|
||||||
hir::RawLiteral::Bare => {
|
|
||||||
UntaggedValue::string(literal.span.slice(source)).into_value(literal.span)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -69,6 +69,7 @@ macro_rules! trace_out_stream {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) use nu_protocol::{errln, outln};
|
pub(crate) use nu_protocol::{errln, outln};
|
||||||
|
use nu_source::HasFallibleSpan;
|
||||||
|
|
||||||
pub(crate) use crate::commands::command::{
|
pub(crate) use crate::commands::command::{
|
||||||
CallInfoExt, CommandArgs, PerItemCommand, RawCommandArgs, RunnableContext,
|
CallInfoExt, CommandArgs, PerItemCommand, RawCommandArgs, RunnableContext,
|
||||||
@ -131,12 +132,12 @@ where
|
|||||||
fn to_input_stream(self) -> InputStream {
|
fn to_input_stream(self) -> InputStream {
|
||||||
InputStream {
|
InputStream {
|
||||||
values: self
|
values: self
|
||||||
.map(|item| {
|
.map(|item| match item.into() {
|
||||||
if let Ok(result) = item.into() {
|
Ok(result) => result,
|
||||||
result
|
Err(err) => match HasFallibleSpan::maybe_span(&err) {
|
||||||
} else {
|
Some(span) => nu_protocol::UntaggedValue::Error(err).into_value(span),
|
||||||
unreachable!("Internal errors: to_input_stream in inconsistent state")
|
None => nu_protocol::UntaggedValue::Error(err).into_untagged_value(),
|
||||||
}
|
},
|
||||||
})
|
})
|
||||||
.boxed(),
|
.boxed(),
|
||||||
}
|
}
|
||||||
|
@ -1,13 +1,16 @@
|
|||||||
use crate::context::CommandRegistry;
|
use crate::context::CommandRegistry;
|
||||||
|
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
|
use nu_parser::ExpandContext;
|
||||||
use nu_source::{HasSpan, Text};
|
use nu_source::{HasSpan, Text};
|
||||||
use rustyline::completion::{Completer, FilenameCompleter};
|
use rustyline::completion::{Completer, FilenameCompleter};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
#[derive(new)]
|
#[derive(new)]
|
||||||
pub(crate) struct NuCompleter {
|
pub(crate) struct NuCompleter {
|
||||||
pub file_completer: FilenameCompleter,
|
pub file_completer: FilenameCompleter,
|
||||||
pub commands: CommandRegistry,
|
pub commands: CommandRegistry,
|
||||||
|
pub homedir: Option<PathBuf>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl NuCompleter {
|
impl NuCompleter {
|
||||||
@ -17,7 +20,15 @@ impl NuCompleter {
|
|||||||
pos: usize,
|
pos: usize,
|
||||||
context: &rustyline::Context,
|
context: &rustyline::Context,
|
||||||
) -> rustyline::Result<(usize, Vec<rustyline::completion::Pair>)> {
|
) -> rustyline::Result<(usize, Vec<rustyline::completion::Pair>)> {
|
||||||
let commands: Vec<String> = self.commands.names().unwrap_or_else(|_| vec![]);
|
let text = Text::from(line);
|
||||||
|
let expand_context =
|
||||||
|
ExpandContext::new(Box::new(self.commands.clone()), &text, self.homedir.clone());
|
||||||
|
|
||||||
|
#[allow(unused)]
|
||||||
|
// smarter completions
|
||||||
|
let shapes = nu_parser::pipeline_shapes(line, expand_context);
|
||||||
|
|
||||||
|
let commands: Vec<String> = self.commands.names();
|
||||||
|
|
||||||
let line_chars: Vec<_> = line[..pos].chars().collect();
|
let line_chars: Vec<_> = line[..pos].chars().collect();
|
||||||
|
|
||||||
@ -100,8 +111,6 @@ impl NuCompleter {
|
|||||||
if let Ok(val) = nu_parser::parse(&line_copy) {
|
if let Ok(val) = nu_parser::parse(&line_copy) {
|
||||||
let source = Text::from(line);
|
let source = Text::from(line);
|
||||||
let pipeline_list = vec![val.clone()];
|
let pipeline_list = vec![val.clone()];
|
||||||
let mut iterator =
|
|
||||||
nu_parser::TokensIterator::all(&pipeline_list, source.clone(), val.span());
|
|
||||||
|
|
||||||
let expand_context = nu_parser::ExpandContext {
|
let expand_context = nu_parser::ExpandContext {
|
||||||
homedir: None,
|
homedir: None,
|
||||||
@ -109,10 +118,12 @@ impl NuCompleter {
|
|||||||
source: &source,
|
source: &source,
|
||||||
};
|
};
|
||||||
|
|
||||||
let result =
|
let mut iterator =
|
||||||
nu_parser::expand_syntax(&nu_parser::PipelineShape, &mut iterator, &expand_context);
|
nu_parser::TokensIterator::new(&pipeline_list, expand_context, val.span());
|
||||||
|
|
||||||
if let Ok(result) = result {
|
let result = iterator.expand_infallible(nu_parser::PipelineShape);
|
||||||
|
|
||||||
|
if result.failed.is_none() {
|
||||||
for command in result.commands.list {
|
for command in result.commands.list {
|
||||||
if let nu_parser::ClassifiedCommand::Internal(nu_parser::InternalCommand {
|
if let nu_parser::ClassifiedCommand::Internal(nu_parser::InternalCommand {
|
||||||
args,
|
args,
|
||||||
|
@ -10,6 +10,7 @@ use crate::shell::completer::NuCompleter;
|
|||||||
use crate::shell::shell::Shell;
|
use crate::shell::shell::Shell;
|
||||||
use crate::utils::FileStructure;
|
use crate::utils::FileStructure;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
|
use nu_parser::ExpandContext;
|
||||||
use nu_protocol::{Primitive, ReturnSuccess, UntaggedValue};
|
use nu_protocol::{Primitive, ReturnSuccess, UntaggedValue};
|
||||||
use rustyline::completion::FilenameCompleter;
|
use rustyline::completion::FilenameCompleter;
|
||||||
use rustyline::hint::{Hinter, HistoryHinter};
|
use rustyline::hint::{Hinter, HistoryHinter};
|
||||||
@ -38,6 +39,7 @@ impl Clone for FilesystemShell {
|
|||||||
completer: NuCompleter {
|
completer: NuCompleter {
|
||||||
file_completer: FilenameCompleter::new(),
|
file_completer: FilenameCompleter::new(),
|
||||||
commands: self.completer.commands.clone(),
|
commands: self.completer.commands.clone(),
|
||||||
|
homedir: self.homedir(),
|
||||||
},
|
},
|
||||||
hinter: HistoryHinter {},
|
hinter: HistoryHinter {},
|
||||||
}
|
}
|
||||||
@ -54,6 +56,7 @@ impl FilesystemShell {
|
|||||||
completer: NuCompleter {
|
completer: NuCompleter {
|
||||||
file_completer: FilenameCompleter::new(),
|
file_completer: FilenameCompleter::new(),
|
||||||
commands,
|
commands,
|
||||||
|
homedir: dirs::home_dir(),
|
||||||
},
|
},
|
||||||
hinter: HistoryHinter {},
|
hinter: HistoryHinter {},
|
||||||
})
|
})
|
||||||
@ -67,6 +70,7 @@ impl FilesystemShell {
|
|||||||
completer: NuCompleter {
|
completer: NuCompleter {
|
||||||
file_completer: FilenameCompleter::new(),
|
file_completer: FilenameCompleter::new(),
|
||||||
commands,
|
commands,
|
||||||
|
homedir: dirs::home_dir(),
|
||||||
},
|
},
|
||||||
hinter: HistoryHinter {},
|
hinter: HistoryHinter {},
|
||||||
}
|
}
|
||||||
@ -1131,7 +1135,13 @@ impl Shell for FilesystemShell {
|
|||||||
self.completer.complete(line, pos, ctx)
|
self.completer.complete(line, pos, ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hint(&self, line: &str, pos: usize, ctx: &rustyline::Context<'_>) -> Option<String> {
|
fn hint(
|
||||||
|
&self,
|
||||||
|
line: &str,
|
||||||
|
pos: usize,
|
||||||
|
ctx: &rustyline::Context<'_>,
|
||||||
|
_expand_context: ExpandContext,
|
||||||
|
) -> Option<String> {
|
||||||
self.hinter.hint(line, pos, ctx)
|
self.hinter.hint(line, pos, ctx)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -8,6 +8,7 @@ use crate::data::command_dict;
|
|||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use crate::shell::shell::Shell;
|
use crate::shell::shell::Shell;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
|
use nu_parser::ExpandContext;
|
||||||
use nu_protocol::{
|
use nu_protocol::{
|
||||||
Primitive, ReturnSuccess, ShellTypeName, TaggedDictBuilder, UntaggedValue, Value,
|
Primitive, ReturnSuccess, ShellTypeName, TaggedDictBuilder, UntaggedValue, Value,
|
||||||
};
|
};
|
||||||
@ -25,25 +26,28 @@ impl HelpShell {
|
|||||||
let mut cmds = TaggedDictBuilder::new(Tag::unknown());
|
let mut cmds = TaggedDictBuilder::new(Tag::unknown());
|
||||||
let mut specs = Vec::new();
|
let mut specs = Vec::new();
|
||||||
|
|
||||||
let snapshot = registry.snapshot()?;
|
for cmd in registry.names() {
|
||||||
|
if let Some(cmd_value) = registry.get_command(&cmd) {
|
||||||
|
let mut spec = TaggedDictBuilder::new(Tag::unknown());
|
||||||
|
let value = command_dict(cmd_value, Tag::unknown());
|
||||||
|
|
||||||
for (name, cmd) in snapshot.iter() {
|
spec.insert_untagged("name", cmd);
|
||||||
let mut spec = TaggedDictBuilder::new(Tag::unknown());
|
spec.insert_untagged(
|
||||||
let value = command_dict(cmd.clone(), Tag::unknown());
|
"description",
|
||||||
|
value
|
||||||
|
.get_data_by_key("usage".spanned_unknown())
|
||||||
|
.ok_or_else(|| {
|
||||||
|
ShellError::untagged_runtime_error(
|
||||||
|
"Internal error: expected to find usage",
|
||||||
|
)
|
||||||
|
})?
|
||||||
|
.as_string()?,
|
||||||
|
);
|
||||||
|
spec.insert_value("details", value);
|
||||||
|
|
||||||
spec.insert_untagged("name", name.to_string());
|
specs.push(spec.into_value());
|
||||||
spec.insert_untagged(
|
} else {
|
||||||
"description",
|
}
|
||||||
value
|
|
||||||
.get_data_by_key("usage".spanned_unknown())
|
|
||||||
.ok_or_else(|| {
|
|
||||||
ShellError::untagged_runtime_error("Internal error: expected to find usage")
|
|
||||||
})?
|
|
||||||
.as_string()?,
|
|
||||||
);
|
|
||||||
spec.insert_value("details", value);
|
|
||||||
|
|
||||||
specs.push(spec.into_value());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
cmds.insert_untagged("help", UntaggedValue::Table(specs));
|
cmds.insert_untagged("help", UntaggedValue::Table(specs));
|
||||||
@ -240,7 +244,13 @@ impl Shell for HelpShell {
|
|||||||
Ok((replace_pos, completions))
|
Ok((replace_pos, completions))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hint(&self, _line: &str, _pos: usize, _ctx: &rustyline::Context<'_>) -> Option<String> {
|
fn hint(
|
||||||
|
&self,
|
||||||
|
_line: &str,
|
||||||
|
_pos: usize,
|
||||||
|
_ctx: &rustyline::Context<'_>,
|
||||||
|
_context: ExpandContext,
|
||||||
|
) -> Option<String> {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,10 +1,9 @@
|
|||||||
use crate::context::Context;
|
use crate::context::Context;
|
||||||
use ansi_term::Color;
|
use ansi_term::{Color, Style};
|
||||||
use log::{log_enabled, trace};
|
use log::log_enabled;
|
||||||
use nu_parser::hir::syntax_shape::color_fallible_syntax;
|
use nu_parser::{FlatShape, PipelineShape, ShapeResult, Token, TokensIterator};
|
||||||
use nu_parser::{FlatShape, PipelineShape, TokenNode, TokensIterator};
|
use nu_protocol::{errln, outln};
|
||||||
use nu_protocol::outln;
|
use nu_source::{nom_input, HasSpan, Tag, Tagged, Text};
|
||||||
use nu_source::{nom_input, HasSpan, Spanned, Tag, Tagged, Text};
|
|
||||||
use rustyline::completion::Completer;
|
use rustyline::completion::Completer;
|
||||||
use rustyline::error::ReadlineError;
|
use rustyline::error::ReadlineError;
|
||||||
use rustyline::highlight::Highlighter;
|
use rustyline::highlight::Highlighter;
|
||||||
@ -39,10 +38,10 @@ impl Completer for Helper {
|
|||||||
|
|
||||||
impl Hinter for Helper {
|
impl Hinter for Helper {
|
||||||
fn hint(&self, line: &str, pos: usize, ctx: &rustyline::Context<'_>) -> Option<String> {
|
fn hint(&self, line: &str, pos: usize, ctx: &rustyline::Context<'_>) -> Option<String> {
|
||||||
match self.context.shell_manager.hint(line, pos, ctx) {
|
let text = Text::from(line);
|
||||||
Ok(output) => output,
|
self.context
|
||||||
Err(e) => Some(format!("{}", e)),
|
.shell_manager
|
||||||
}
|
.hint(line, pos, ctx, self.context.expand_context(&text))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -71,53 +70,47 @@ impl Highlighter for Helper {
|
|||||||
match tokens {
|
match tokens {
|
||||||
Err(_) => Cow::Borrowed(line),
|
Err(_) => Cow::Borrowed(line),
|
||||||
Ok((_rest, v)) => {
|
Ok((_rest, v)) => {
|
||||||
let mut out = String::new();
|
|
||||||
let pipeline = match v.as_pipeline() {
|
let pipeline = match v.as_pipeline() {
|
||||||
Err(_) => return Cow::Borrowed(line),
|
Err(_) => return Cow::Borrowed(line),
|
||||||
Ok(v) => v,
|
Ok(v) => v,
|
||||||
};
|
};
|
||||||
|
|
||||||
let tokens = vec![TokenNode::Pipeline(pipeline)];
|
|
||||||
let mut tokens = TokensIterator::all(&tokens[..], Text::from(line), v.span());
|
|
||||||
|
|
||||||
let text = Text::from(line);
|
let text = Text::from(line);
|
||||||
match self.context.expand_context(&text) {
|
let expand_context = self.context.expand_context(&text);
|
||||||
Ok(expand_context) => {
|
|
||||||
let shapes = {
|
|
||||||
// We just constructed a token list that only contains a pipeline, so it can't fail
|
|
||||||
if let Err(err) =
|
|
||||||
color_fallible_syntax(&PipelineShape, &mut tokens, &expand_context)
|
|
||||||
{
|
|
||||||
let error_msg = format!("{}", err);
|
|
||||||
return Cow::Owned(error_msg);
|
|
||||||
}
|
|
||||||
tokens.with_color_tracer(|_, tracer| tracer.finish());
|
|
||||||
|
|
||||||
tokens.state().shapes()
|
let tokens = vec![Token::Pipeline(pipeline).into_spanned(v.span())];
|
||||||
};
|
let mut tokens = TokensIterator::new(&tokens[..], expand_context, v.span());
|
||||||
|
|
||||||
trace!(target: "nu::color_syntax", "{:#?}", tokens.color_tracer());
|
let shapes = {
|
||||||
|
// We just constructed a token list that only contains a pipeline, so it can't fail
|
||||||
|
let result = tokens.expand_infallible(PipelineShape);
|
||||||
|
|
||||||
if log_enabled!(target: "nu::color_syntax", log::Level::Debug) {
|
if let Some(failure) = result.failed {
|
||||||
outln!("");
|
errln!(
|
||||||
let _ = ptree::print_tree(
|
"BUG: PipelineShape didn't find a pipeline :: {:#?}",
|
||||||
&tokens.color_tracer().clone().print(Text::from(line)),
|
failure
|
||||||
);
|
);
|
||||||
outln!("");
|
|
||||||
}
|
|
||||||
|
|
||||||
for shape in shapes {
|
|
||||||
let styled = paint_flat_shape(&shape, line);
|
|
||||||
out.push_str(&styled);
|
|
||||||
}
|
|
||||||
|
|
||||||
Cow::Owned(out)
|
|
||||||
}
|
|
||||||
Err(err) => {
|
|
||||||
let error_msg = format!("{}", err);
|
|
||||||
Cow::Owned(error_msg)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
tokens.finish_tracer();
|
||||||
|
|
||||||
|
tokens.state().shapes()
|
||||||
|
};
|
||||||
|
|
||||||
|
if log_enabled!(target: "nu::expand_syntax", log::Level::Debug) {
|
||||||
|
outln!("");
|
||||||
|
let _ =
|
||||||
|
ptree::print_tree(&tokens.expand_tracer().clone().print(Text::from(line)));
|
||||||
|
outln!("");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let mut painter = Painter::new();
|
||||||
|
|
||||||
|
for shape in shapes {
|
||||||
|
painter.paint_shape(&shape, line);
|
||||||
|
}
|
||||||
|
|
||||||
|
Cow::Owned(painter.into_string())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -139,45 +132,75 @@ fn vec_tag<T>(input: Vec<Tagged<T>>) -> Option<Tag> {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fn paint_flat_shape(flat_shape: &Spanned<FlatShape>, line: &str) -> String {
|
struct Painter {
|
||||||
let style = match &flat_shape.item {
|
current: Style,
|
||||||
FlatShape::OpenDelimiter(_) => Color::White.normal(),
|
buffer: String,
|
||||||
FlatShape::CloseDelimiter(_) => Color::White.normal(),
|
}
|
||||||
FlatShape::ItVariable => Color::Purple.bold(),
|
|
||||||
FlatShape::Variable => Color::Purple.normal(),
|
|
||||||
FlatShape::CompareOperator => Color::Yellow.normal(),
|
|
||||||
FlatShape::DotDot => Color::Yellow.bold(),
|
|
||||||
FlatShape::Dot => Color::White.normal(),
|
|
||||||
FlatShape::InternalCommand => Color::Cyan.bold(),
|
|
||||||
FlatShape::ExternalCommand => Color::Cyan.normal(),
|
|
||||||
FlatShape::ExternalWord => Color::Green.bold(),
|
|
||||||
FlatShape::BareMember => Color::Yellow.bold(),
|
|
||||||
FlatShape::StringMember => Color::Yellow.bold(),
|
|
||||||
FlatShape::String => Color::Green.normal(),
|
|
||||||
FlatShape::Path => Color::Cyan.normal(),
|
|
||||||
FlatShape::GlobPattern => Color::Cyan.bold(),
|
|
||||||
FlatShape::Word => Color::Green.normal(),
|
|
||||||
FlatShape::Pipe => Color::Purple.bold(),
|
|
||||||
FlatShape::Flag => Color::Blue.bold(),
|
|
||||||
FlatShape::ShorthandFlag => Color::Blue.bold(),
|
|
||||||
FlatShape::Int => Color::Purple.bold(),
|
|
||||||
FlatShape::Decimal => Color::Purple.bold(),
|
|
||||||
FlatShape::Whitespace | FlatShape::Separator => Color::White.normal(),
|
|
||||||
FlatShape::Comment => Color::Green.bold(),
|
|
||||||
FlatShape::Error => Color::Red.bold(),
|
|
||||||
FlatShape::Size { number, unit } => {
|
|
||||||
let number = number.slice(line);
|
|
||||||
let unit = unit.slice(line);
|
|
||||||
return format!(
|
|
||||||
"{}{}",
|
|
||||||
Color::Purple.bold().paint(number),
|
|
||||||
Color::Cyan.bold().paint(unit)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let body = flat_shape.span.slice(line);
|
impl Painter {
|
||||||
style.paint(body).to_string()
|
fn new() -> Painter {
|
||||||
|
Painter {
|
||||||
|
current: Style::default(),
|
||||||
|
buffer: String::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn into_string(self) -> String {
|
||||||
|
self.buffer
|
||||||
|
}
|
||||||
|
|
||||||
|
fn paint_shape(&mut self, shape: &ShapeResult, line: &str) {
|
||||||
|
let style = match &shape {
|
||||||
|
ShapeResult::Success(shape) => match shape.item {
|
||||||
|
FlatShape::OpenDelimiter(_) => Color::White.normal(),
|
||||||
|
FlatShape::CloseDelimiter(_) => Color::White.normal(),
|
||||||
|
FlatShape::ItVariable | FlatShape::Keyword => Color::Purple.bold(),
|
||||||
|
FlatShape::Variable | FlatShape::Identifier => Color::Purple.normal(),
|
||||||
|
FlatShape::Type => Color::Blue.bold(),
|
||||||
|
FlatShape::CompareOperator => Color::Yellow.normal(),
|
||||||
|
FlatShape::DotDot => Color::Yellow.bold(),
|
||||||
|
FlatShape::Dot => Style::new().fg(Color::White).on(Color::Black),
|
||||||
|
FlatShape::InternalCommand => Color::Cyan.bold(),
|
||||||
|
FlatShape::ExternalCommand => Color::Cyan.normal(),
|
||||||
|
FlatShape::ExternalWord => Color::Green.bold(),
|
||||||
|
FlatShape::BareMember => Color::Yellow.bold(),
|
||||||
|
FlatShape::StringMember => Color::Yellow.bold(),
|
||||||
|
FlatShape::String => Color::Green.normal(),
|
||||||
|
FlatShape::Path => Color::Cyan.normal(),
|
||||||
|
FlatShape::GlobPattern => Color::Cyan.bold(),
|
||||||
|
FlatShape::Word => Color::Green.normal(),
|
||||||
|
FlatShape::Pipe => Color::Purple.bold(),
|
||||||
|
FlatShape::Flag => Color::Blue.bold(),
|
||||||
|
FlatShape::ShorthandFlag => Color::Blue.bold(),
|
||||||
|
FlatShape::Int => Color::Purple.bold(),
|
||||||
|
FlatShape::Decimal => Color::Purple.bold(),
|
||||||
|
FlatShape::Whitespace | FlatShape::Separator => Color::White.normal(),
|
||||||
|
FlatShape::Comment => Color::Green.bold(),
|
||||||
|
FlatShape::Garbage => Style::new().fg(Color::White).on(Color::Red),
|
||||||
|
FlatShape::Size { number, unit } => {
|
||||||
|
let number = number.slice(line);
|
||||||
|
let unit = unit.slice(line);
|
||||||
|
|
||||||
|
self.paint(Color::Purple.bold(), number);
|
||||||
|
self.paint(Color::Cyan.bold(), unit);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
ShapeResult::Fallback { shape, .. } => match shape.item {
|
||||||
|
FlatShape::Whitespace | FlatShape::Separator => Color::White.normal(),
|
||||||
|
_ => Style::new().fg(Color::White).on(Color::Red),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
self.paint(style, shape.span().slice(line));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn paint(&mut self, style: Style, body: &str) {
|
||||||
|
let infix = self.current.infix(style);
|
||||||
|
self.current = style;
|
||||||
|
self.buffer
|
||||||
|
.push_str(&format!("{}{}", infix, style.paint(body)));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl rustyline::Helper for Helper {}
|
impl rustyline::Helper for Helper {}
|
||||||
|
@ -7,6 +7,7 @@ use crate::commands::rm::RemoveArgs;
|
|||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use crate::stream::OutputStream;
|
use crate::stream::OutputStream;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
|
use nu_parser::ExpandContext;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
pub trait Shell: std::fmt::Debug {
|
pub trait Shell: std::fmt::Debug {
|
||||||
@ -34,5 +35,11 @@ pub trait Shell: std::fmt::Debug {
|
|||||||
ctx: &rustyline::Context<'_>,
|
ctx: &rustyline::Context<'_>,
|
||||||
) -> Result<(usize, Vec<rustyline::completion::Pair>), rustyline::error::ReadlineError>;
|
) -> Result<(usize, Vec<rustyline::completion::Pair>), rustyline::error::ReadlineError>;
|
||||||
|
|
||||||
fn hint(&self, _line: &str, _pos: usize, _ctx: &rustyline::Context<'_>) -> Option<String>;
|
fn hint(
|
||||||
|
&self,
|
||||||
|
_line: &str,
|
||||||
|
_pos: usize,
|
||||||
|
_ctx: &rustyline::Context<'_>,
|
||||||
|
_context: ExpandContext,
|
||||||
|
) -> Option<String>;
|
||||||
}
|
}
|
||||||
|
@ -9,10 +9,12 @@ use crate::shell::filesystem_shell::FilesystemShell;
|
|||||||
use crate::shell::shell::Shell;
|
use crate::shell::shell::Shell;
|
||||||
use crate::stream::OutputStream;
|
use crate::stream::OutputStream;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
|
use nu_parser::ExpandContext;
|
||||||
|
use parking_lot::Mutex;
|
||||||
use std::error::Error;
|
use std::error::Error;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::atomic::{AtomicUsize, Ordering};
|
use std::sync::atomic::{AtomicUsize, Ordering};
|
||||||
use std::sync::{Arc, Mutex};
|
use std::sync::Arc;
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct ShellManager {
|
pub struct ShellManager {
|
||||||
@ -30,95 +32,53 @@ impl ShellManager {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn insert_at_current(&mut self, shell: Box<dyn Shell + Send>) -> Result<(), ShellError> {
|
pub fn insert_at_current(&mut self, shell: Box<dyn Shell + Send>) {
|
||||||
if let Ok(mut shells) = self.shells.lock() {
|
self.shells.lock().push(shell);
|
||||||
shells.push(shell);
|
self.current_shell
|
||||||
} else {
|
.store(self.shells.lock().len() - 1, Ordering::SeqCst);
|
||||||
return Err(ShellError::untagged_runtime_error(
|
self.set_path(self.path());
|
||||||
"Internal error: could not lock shells ring buffer",
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let shells_len = if let Ok(shells) = self.shells.lock() {
|
|
||||||
shells.len()
|
|
||||||
} else {
|
|
||||||
return Err(ShellError::untagged_runtime_error(
|
|
||||||
"Internal error: could not lock shells ring buffer",
|
|
||||||
));
|
|
||||||
};
|
|
||||||
|
|
||||||
self.current_shell.store(shells_len - 1, Ordering::SeqCst);
|
|
||||||
self.set_path(self.path()?)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn current_shell(&self) -> usize {
|
pub fn current_shell(&self) -> usize {
|
||||||
self.current_shell.load(Ordering::SeqCst)
|
self.current_shell.load(Ordering::SeqCst)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn remove_at_current(&mut self) -> Result<(), ShellError> {
|
pub fn remove_at_current(&mut self) {
|
||||||
{
|
{
|
||||||
if let Ok(mut shells) = self.shells.lock() {
|
let mut shells = self.shells.lock();
|
||||||
if shells.len() > 0 {
|
if shells.len() > 0 {
|
||||||
if self.current_shell() == shells.len() - 1 {
|
if self.current_shell() == shells.len() - 1 {
|
||||||
shells.pop();
|
shells.pop();
|
||||||
let new_len = shells.len();
|
let new_len = shells.len();
|
||||||
if new_len > 0 {
|
if new_len > 0 {
|
||||||
self.current_shell.store(new_len - 1, Ordering::SeqCst);
|
self.current_shell.store(new_len - 1, Ordering::SeqCst);
|
||||||
} else {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
shells.remove(self.current_shell());
|
return;
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
shells.remove(self.current_shell());
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
return Err(ShellError::untagged_runtime_error(
|
|
||||||
"Internal error: could not lock shells ring buffer",
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.set_path(self.path()?)
|
self.set_path(self.path())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn is_empty(&self) -> Result<bool, ShellError> {
|
pub fn is_empty(&self) -> bool {
|
||||||
if let Ok(shells) = self.shells.lock() {
|
self.shells.lock().is_empty()
|
||||||
Ok(shells.is_empty())
|
|
||||||
} else {
|
|
||||||
Err(ShellError::untagged_runtime_error(
|
|
||||||
"Internal error: could not lock shells ring buffer (is_empty)",
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn path(&self) -> Result<String, ShellError> {
|
pub fn path(&self) -> String {
|
||||||
if let Ok(shells) = self.shells.lock() {
|
self.shells.lock()[self.current_shell()].path()
|
||||||
Ok(shells[self.current_shell()].path())
|
|
||||||
} else {
|
|
||||||
Err(ShellError::untagged_runtime_error(
|
|
||||||
"Internal error: could not lock shells ring buffer (path)",
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pwd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result<OutputStream, ShellError> {
|
pub fn pwd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result<OutputStream, ShellError> {
|
||||||
if let Ok(shells) = self.shells.lock() {
|
let env = self.shells.lock();
|
||||||
shells[self.current_shell()].pwd(args)
|
|
||||||
} else {
|
env[self.current_shell()].pwd(args)
|
||||||
Err(ShellError::untagged_runtime_error(
|
|
||||||
"Internal error: could not lock shells ring buffer (pwd)",
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_path(&mut self, path: String) -> Result<(), ShellError> {
|
pub fn set_path(&mut self, path: String) {
|
||||||
if let Ok(mut shells) = self.shells.lock() {
|
self.shells.lock()[self.current_shell()].set_path(path)
|
||||||
shells[self.current_shell()].set_path(path);
|
|
||||||
Ok(())
|
|
||||||
} else {
|
|
||||||
Err(ShellError::untagged_runtime_error(
|
|
||||||
"Internal error: could not lock shells ring buffer (set_path)",
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn complete(
|
pub fn complete(
|
||||||
@ -127,14 +87,7 @@ impl ShellManager {
|
|||||||
pos: usize,
|
pos: usize,
|
||||||
ctx: &rustyline::Context<'_>,
|
ctx: &rustyline::Context<'_>,
|
||||||
) -> Result<(usize, Vec<rustyline::completion::Pair>), rustyline::error::ReadlineError> {
|
) -> Result<(usize, Vec<rustyline::completion::Pair>), rustyline::error::ReadlineError> {
|
||||||
if let Ok(shells) = self.shells.lock() {
|
self.shells.lock()[self.current_shell()].complete(line, pos, ctx)
|
||||||
shells[self.current_shell()].complete(line, pos, ctx)
|
|
||||||
} else {
|
|
||||||
Err(rustyline::error::ReadlineError::Io(std::io::Error::new(
|
|
||||||
std::io::ErrorKind::Other,
|
|
||||||
"Internal error: could not lock shells ring buffer (complete)",
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn hint(
|
pub fn hint(
|
||||||
@ -142,62 +95,41 @@ impl ShellManager {
|
|||||||
line: &str,
|
line: &str,
|
||||||
pos: usize,
|
pos: usize,
|
||||||
ctx: &rustyline::Context<'_>,
|
ctx: &rustyline::Context<'_>,
|
||||||
) -> Result<Option<String>, ShellError> {
|
context: ExpandContext,
|
||||||
if let Ok(shells) = self.shells.lock() {
|
) -> Option<String> {
|
||||||
Ok(shells[self.current_shell()].hint(line, pos, ctx))
|
self.shells.lock()[self.current_shell()].hint(line, pos, ctx, context)
|
||||||
} else {
|
|
||||||
Err(ShellError::untagged_runtime_error(
|
|
||||||
"Internal error: could not lock shells ring buffer (hint)",
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn next(&mut self) -> Result<(), ShellError> {
|
pub fn next(&mut self) {
|
||||||
{
|
{
|
||||||
if let Ok(shells) = self.shells.lock() {
|
let shell_len = self.shells.lock().len();
|
||||||
let shell_len = shells.len();
|
if self.current_shell() == (shell_len - 1) {
|
||||||
if self.current_shell() == (shell_len - 1) {
|
self.current_shell.store(0, Ordering::SeqCst);
|
||||||
self.current_shell.store(0, Ordering::SeqCst);
|
|
||||||
} else {
|
|
||||||
self.current_shell
|
|
||||||
.store(self.current_shell() + 1, Ordering::SeqCst);
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
return Err(ShellError::untagged_runtime_error(
|
self.current_shell
|
||||||
"Internal error: could not lock shells ring buffer (next)",
|
.store(self.current_shell() + 1, Ordering::SeqCst);
|
||||||
));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.set_path(self.path()?)
|
self.set_path(self.path())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn prev(&mut self) -> Result<(), ShellError> {
|
pub fn prev(&mut self) {
|
||||||
{
|
{
|
||||||
if let Ok(shells) = self.shells.lock() {
|
let shell_len = self.shells.lock().len();
|
||||||
let shell_len = shells.len();
|
if self.current_shell() == 0 {
|
||||||
if self.current_shell() == 0 {
|
self.current_shell.store(shell_len - 1, Ordering::SeqCst);
|
||||||
self.current_shell.store(shell_len - 1, Ordering::SeqCst);
|
|
||||||
} else {
|
|
||||||
self.current_shell
|
|
||||||
.store(self.current_shell() - 1, Ordering::SeqCst);
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
return Err(ShellError::untagged_runtime_error(
|
self.current_shell
|
||||||
"Internal error: could not lock shells ring buffer (prev)",
|
.store(self.current_shell() - 1, Ordering::SeqCst);
|
||||||
));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.set_path(self.path()?)
|
self.set_path(self.path())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn homedir(&self) -> Result<Option<PathBuf>, ShellError> {
|
pub fn homedir(&self) -> Option<PathBuf> {
|
||||||
if let Ok(shells) = self.shells.lock() {
|
let env = self.shells.lock();
|
||||||
Ok(shells[self.current_shell()].homedir())
|
|
||||||
} else {
|
env[self.current_shell()].homedir()
|
||||||
Err(ShellError::untagged_runtime_error(
|
|
||||||
"Internal error: could not lock shells ring buffer (homedir)",
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ls(
|
pub fn ls(
|
||||||
@ -205,23 +137,15 @@ impl ShellManager {
|
|||||||
args: LsArgs,
|
args: LsArgs,
|
||||||
context: &RunnablePerItemContext,
|
context: &RunnablePerItemContext,
|
||||||
) -> Result<OutputStream, ShellError> {
|
) -> Result<OutputStream, ShellError> {
|
||||||
if let Ok(shells) = self.shells.lock() {
|
let env = self.shells.lock();
|
||||||
shells[self.current_shell()].ls(args, context)
|
|
||||||
} else {
|
env[self.current_shell()].ls(args, context)
|
||||||
Err(ShellError::untagged_runtime_error(
|
|
||||||
"Internal error: could not lock shells ring buffer (ls)",
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn cd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result<OutputStream, ShellError> {
|
pub fn cd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result<OutputStream, ShellError> {
|
||||||
if let Ok(shells) = self.shells.lock() {
|
let env = self.shells.lock();
|
||||||
shells[self.current_shell()].cd(args)
|
|
||||||
} else {
|
env[self.current_shell()].cd(args)
|
||||||
Err(ShellError::untagged_runtime_error(
|
|
||||||
"Internal error: could not lock shells ring buffer (cd)",
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn cp(
|
pub fn cp(
|
||||||
@ -231,17 +155,8 @@ impl ShellManager {
|
|||||||
) -> Result<OutputStream, ShellError> {
|
) -> Result<OutputStream, ShellError> {
|
||||||
let shells = self.shells.lock();
|
let shells = self.shells.lock();
|
||||||
|
|
||||||
match shells {
|
let path = shells[self.current_shell()].path();
|
||||||
Ok(x) => {
|
shells[self.current_shell()].cp(args, context.name.clone(), &path)
|
||||||
let path = x[self.current_shell()].path();
|
|
||||||
x[self.current_shell()].cp(args, context.name.clone(), &path)
|
|
||||||
}
|
|
||||||
Err(e) => Err(ShellError::labeled_error(
|
|
||||||
format!("Internal error: could not lock {}", e),
|
|
||||||
"Internal error: could not lock",
|
|
||||||
&context.name,
|
|
||||||
)),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn rm(
|
pub fn rm(
|
||||||
@ -251,17 +166,8 @@ impl ShellManager {
|
|||||||
) -> Result<OutputStream, ShellError> {
|
) -> Result<OutputStream, ShellError> {
|
||||||
let shells = self.shells.lock();
|
let shells = self.shells.lock();
|
||||||
|
|
||||||
match shells {
|
let path = shells[self.current_shell()].path();
|
||||||
Ok(x) => {
|
shells[self.current_shell()].rm(args, context.name.clone(), &path)
|
||||||
let path = x[self.current_shell()].path();
|
|
||||||
x[self.current_shell()].rm(args, context.name.clone(), &path)
|
|
||||||
}
|
|
||||||
Err(e) => Err(ShellError::labeled_error(
|
|
||||||
format!("Internal error: could not lock {}", e),
|
|
||||||
"Internal error: could not lock",
|
|
||||||
&context.name,
|
|
||||||
)),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mkdir(
|
pub fn mkdir(
|
||||||
@ -271,17 +177,8 @@ impl ShellManager {
|
|||||||
) -> Result<OutputStream, ShellError> {
|
) -> Result<OutputStream, ShellError> {
|
||||||
let shells = self.shells.lock();
|
let shells = self.shells.lock();
|
||||||
|
|
||||||
match shells {
|
let path = shells[self.current_shell()].path();
|
||||||
Ok(x) => {
|
shells[self.current_shell()].mkdir(args, context.name.clone(), &path)
|
||||||
let path = x[self.current_shell()].path();
|
|
||||||
x[self.current_shell()].mkdir(args, context.name.clone(), &path)
|
|
||||||
}
|
|
||||||
Err(e) => Err(ShellError::labeled_error(
|
|
||||||
format!("Internal error: could not lock {}", e),
|
|
||||||
"Internal error: could not lock",
|
|
||||||
&context.name,
|
|
||||||
)),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mv(
|
pub fn mv(
|
||||||
@ -291,16 +188,7 @@ impl ShellManager {
|
|||||||
) -> Result<OutputStream, ShellError> {
|
) -> Result<OutputStream, ShellError> {
|
||||||
let shells = self.shells.lock();
|
let shells = self.shells.lock();
|
||||||
|
|
||||||
match shells {
|
let path = shells[self.current_shell()].path();
|
||||||
Ok(x) => {
|
shells[self.current_shell()].mv(args, context.name.clone(), &path)
|
||||||
let path = x[self.current_shell()].path();
|
|
||||||
x[self.current_shell()].mv(args, context.name.clone(), &path)
|
|
||||||
}
|
|
||||||
Err(e) => Err(ShellError::labeled_error(
|
|
||||||
format!("Internal error: could not lock {}", e),
|
|
||||||
"Internal error: could not lock",
|
|
||||||
&context.name,
|
|
||||||
)),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -8,6 +8,7 @@ use crate::prelude::*;
|
|||||||
use crate::shell::shell::Shell;
|
use crate::shell::shell::Shell;
|
||||||
use crate::utils::ValueStructure;
|
use crate::utils::ValueStructure;
|
||||||
use nu_errors::ShellError;
|
use nu_errors::ShellError;
|
||||||
|
use nu_parser::ExpandContext;
|
||||||
use nu_protocol::{ReturnSuccess, ShellTypeName, UntaggedValue, Value};
|
use nu_protocol::{ReturnSuccess, ShellTypeName, UntaggedValue, Value};
|
||||||
use std::ffi::OsStr;
|
use std::ffi::OsStr;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
@ -280,7 +281,13 @@ impl Shell for ValueShell {
|
|||||||
Ok((replace_pos, completions))
|
Ok((replace_pos, completions))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn hint(&self, _line: &str, _pos: usize, _ctx: &rustyline::Context<'_>) -> Option<String> {
|
fn hint(
|
||||||
|
&self,
|
||||||
|
_line: &str,
|
||||||
|
_pos: usize,
|
||||||
|
_ctx: &rustyline::Context<'_>,
|
||||||
|
_context: ExpandContext,
|
||||||
|
) -> Option<String> {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -139,7 +139,8 @@ fn parses_sqlite() {
|
|||||||
| get table_values
|
| get table_values
|
||||||
| nth 2
|
| nth 2
|
||||||
| get x
|
| get x
|
||||||
| echo $it"#
|
| echo $it
|
||||||
|
"#
|
||||||
));
|
));
|
||||||
|
|
||||||
assert_eq!(actual, "hello");
|
assert_eq!(actual, "hello");
|
||||||
|
@ -62,8 +62,8 @@ fn complex_nested_columns() {
|
|||||||
cwd: dirs.test(), pipeline(
|
cwd: dirs.test(), pipeline(
|
||||||
r#"
|
r#"
|
||||||
open los_tres_caballeros.json
|
open los_tres_caballeros.json
|
||||||
| pick nu.0xATYKARNU nu.committers.name nu.releases.version
|
| pick nu."0xATYKARNU" nu.committers.name nu.releases.version
|
||||||
| where $it."nu.releases.version" > "0.8"
|
| where "nu.releases.version" > "0.8"
|
||||||
| get "nu.releases.version"
|
| get "nu.releases.version"
|
||||||
| echo $it
|
| echo $it
|
||||||
"#
|
"#
|
||||||
|
Loading…
Reference in New Issue
Block a user