forked from extern/nushell
Restructure and streamline token expansion (#1123)
Restructure and streamline token expansion The purpose of this commit is to streamline the token expansion code, by removing aspects of the code that are no longer relevant, removing pointless duplication, and eliminating the need to pass the same arguments to `expand_syntax`. The first big-picture change in this commit is that instead of a handful of `expand_` functions, which take a TokensIterator and ExpandContext, a smaller number of methods on the `TokensIterator` do the same job. The second big-picture change in this commit is fully eliminating the coloring traits, making coloring a responsibility of the base expansion implementations. This also means that the coloring tracer is merged into the expansion tracer, so you can follow a single expansion and see how the expansion process produced colored tokens. One side effect of this change is that the expander itself is marginally more error-correcting. The error correction works by switching from structured expansion to `BackoffColoringMode` when an unexpected token is found, which guarantees that all spans of the source are colored, but may not be the most optimal error recovery strategy. That said, because `BackoffColoringMode` only extends as far as a closing delimiter (`)`, `]`, `}`) or pipe (`|`), it does result in fairly granular correction strategy. The current code still produces an `Err` (plus a complete list of colored shapes) from the parsing process if any errors are encountered, but this could easily be addressed now that the underlying expansion is error-correcting. This commit also colors any spans that are syntax errors in red, and causes the parser to include some additional information about what tokens were expected at any given point where an error was encountered, so that completions and hinting could be more robust in the future. Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com> Co-authored-by: Andrés N. Robalino <andres@androbtech.com>
This commit is contained in:
committed by
Andrés N. Robalino
parent
c8dd7838a8
commit
7efb31a4e4
@ -1,13 +1,13 @@
|
||||
use crate::TokenNode;
|
||||
use crate::parse::token_tree::SpannedToken;
|
||||
use getset::Getters;
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource};
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)]
|
||||
pub struct CallNode {
|
||||
#[get = "pub(crate)"]
|
||||
head: Box<TokenNode>,
|
||||
head: Box<SpannedToken>,
|
||||
#[get = "pub(crate)"]
|
||||
children: Option<Vec<TokenNode>>,
|
||||
children: Option<Vec<SpannedToken>>,
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for CallNode {
|
||||
@ -29,7 +29,7 @@ impl PrettyDebugWithSource for CallNode {
|
||||
}
|
||||
|
||||
impl CallNode {
|
||||
pub fn new(head: Box<TokenNode>, children: Vec<TokenNode>) -> CallNode {
|
||||
pub fn new(head: Box<SpannedToken>, children: Vec<SpannedToken>) -> CallNode {
|
||||
if children.is_empty() {
|
||||
CallNode {
|
||||
head,
|
||||
|
@ -1,6 +1,6 @@
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span};
|
||||
use nu_source::{b, DebugDocBuilder, PrettyDebugWithSource, Span};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||
@ -12,15 +12,13 @@ pub enum CommentKind {
|
||||
pub struct Comment {
|
||||
pub(crate) kind: CommentKind,
|
||||
pub(crate) text: Span,
|
||||
pub(crate) span: Span,
|
||||
}
|
||||
|
||||
impl Comment {
|
||||
pub fn line(text: impl Into<Span>, outer: impl Into<Span>) -> Comment {
|
||||
pub fn line(text: impl Into<Span>) -> Comment {
|
||||
Comment {
|
||||
kind: CommentKind::Line,
|
||||
text: text.into(),
|
||||
span: outer.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -34,9 +32,3 @@ impl PrettyDebugWithSource for Comment {
|
||||
prefix + b::description(self.text.slice(source))
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for Comment {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
@ -15,7 +15,6 @@ pub enum FlagKind {
|
||||
pub struct Flag {
|
||||
pub(crate) kind: FlagKind,
|
||||
pub(crate) name: Span,
|
||||
pub(crate) span: Span,
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for Flag {
|
||||
@ -30,10 +29,10 @@ impl PrettyDebugWithSource for Flag {
|
||||
}
|
||||
|
||||
impl Flag {
|
||||
pub fn color(&self) -> Spanned<FlatShape> {
|
||||
pub fn color(&self, span: impl Into<Span>) -> Spanned<FlatShape> {
|
||||
match self.kind {
|
||||
FlagKind::Longhand => FlatShape::Flag.spanned(self.span),
|
||||
FlagKind::Shorthand => FlatShape::ShorthandFlag.spanned(self.span),
|
||||
FlagKind::Longhand => FlatShape::Flag.spanned(span.into()),
|
||||
FlagKind::Shorthand => FlatShape::ShorthandFlag.spanned(span.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
70
crates/nu-parser/src/parse/number.rs
Normal file
70
crates/nu-parser/src/parse/number.rs
Normal file
@ -0,0 +1,70 @@
|
||||
use crate::hir::syntax_shape::FlatShape;
|
||||
use crate::parse::parser::Number;
|
||||
use bigdecimal::BigDecimal;
|
||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Text};
|
||||
use num_bigint::BigInt;
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub enum RawNumber {
|
||||
Int(Span),
|
||||
Decimal(Span),
|
||||
}
|
||||
|
||||
impl HasSpan for RawNumber {
|
||||
fn span(&self) -> Span {
|
||||
match self {
|
||||
RawNumber::Int(span) => *span,
|
||||
RawNumber::Decimal(span) => *span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for RawNumber {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self {
|
||||
RawNumber::Int(span) => b::primitive(span.slice(source)),
|
||||
RawNumber::Decimal(span) => b::primitive(span.slice(source)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RawNumber {
|
||||
pub fn as_flat_shape(&self) -> FlatShape {
|
||||
match self {
|
||||
RawNumber::Int(_) => FlatShape::Int,
|
||||
RawNumber::Decimal(_) => FlatShape::Decimal,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn int(span: impl Into<Span>) -> RawNumber {
|
||||
let span = span.into();
|
||||
|
||||
RawNumber::Int(span)
|
||||
}
|
||||
|
||||
pub fn decimal(span: impl Into<Span>) -> RawNumber {
|
||||
let span = span.into();
|
||||
|
||||
RawNumber::Decimal(span)
|
||||
}
|
||||
|
||||
pub(crate) fn to_number(self, source: &Text) -> Number {
|
||||
match self {
|
||||
RawNumber::Int(tag) => {
|
||||
if let Ok(big_int) = BigInt::from_str(tag.slice(source)) {
|
||||
Number::Int(big_int)
|
||||
} else {
|
||||
unreachable!("Internal error: could not parse text as BigInt as expected")
|
||||
}
|
||||
}
|
||||
RawNumber::Decimal(tag) => {
|
||||
if let Ok(big_decimal) = BigDecimal::from_str(tag.slice(source)) {
|
||||
Number::Decimal(big_decimal)
|
||||
} else {
|
||||
unreachable!("Internal error: could not parse text as BigDecimal as expected")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -1,8 +1,8 @@
|
||||
#![allow(unused)]
|
||||
|
||||
use crate::parse::{
|
||||
call_node::*, flag::*, operator::*, pipeline::*, token_tree::*, token_tree_builder::*,
|
||||
tokens::*, unit::*,
|
||||
call_node::*, flag::*, number::*, operator::*, pipeline::*, token_tree::*,
|
||||
token_tree_builder::*, unit::*,
|
||||
};
|
||||
use nom;
|
||||
use nom::branch::*;
|
||||
@ -36,7 +36,7 @@ use std::str::FromStr;
|
||||
macro_rules! cmp_operator {
|
||||
($name:tt : $token:tt ) => {
|
||||
#[tracable_parser]
|
||||
pub fn $name(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn $name(input: NomSpan) -> IResult<NomSpan, $crate::parse::token_tree::SpannedToken> {
|
||||
let start = input.offset;
|
||||
let (input, tag) = tag($token)(input)?;
|
||||
let end = input.offset;
|
||||
@ -52,7 +52,7 @@ macro_rules! cmp_operator {
|
||||
macro_rules! eval_operator {
|
||||
($name:tt : $token:tt ) => {
|
||||
#[tracable_parser]
|
||||
pub fn $name(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn $name(input: NomSpan) -> IResult<NomSpan, $crate::parse::token_tree::SpannedToken> {
|
||||
let start = input.offset;
|
||||
let (input, tag) = tag($token)(input)?;
|
||||
let end = input.offset;
|
||||
@ -209,7 +209,7 @@ impl Into<Number> for BigInt {
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn number(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn number(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
let (input, number) = raw_number(input)?;
|
||||
|
||||
Ok((
|
||||
@ -218,12 +218,36 @@ pub fn number(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
))
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn int_member(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
let start = input.offset;
|
||||
let (input, head) = digit1(input)?;
|
||||
|
||||
match input.fragment.chars().next() {
|
||||
None | Some('.') => Ok((
|
||||
input,
|
||||
Token::Number(RawNumber::int((start, input.offset)))
|
||||
.into_spanned((start, input.offset)),
|
||||
)),
|
||||
other if is_boundary(other) => Ok((
|
||||
input,
|
||||
Token::Number(RawNumber::int((start, input.offset)))
|
||||
.into_spanned((start, input.offset)),
|
||||
)),
|
||||
_ => Err(nom::Err::Error(nom::error::make_error(
|
||||
input,
|
||||
nom::error::ErrorKind::Tag,
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn raw_number(input: NomSpan) -> IResult<NomSpan, RawNumber> {
|
||||
let anchoral = input;
|
||||
let start = input.offset;
|
||||
let (input, neg) = opt(tag("-"))(input)?;
|
||||
let (input, head) = digit1(input)?;
|
||||
let after_int_head = input;
|
||||
|
||||
match input.fragment.chars().next() {
|
||||
None => return Ok((input, RawNumber::int(Span::new(start, input.offset)))),
|
||||
@ -255,7 +279,17 @@ pub fn raw_number(input: NomSpan) -> IResult<NomSpan, RawNumber> {
|
||||
Err(_) => return Ok((input, RawNumber::int(Span::new(start, input.offset)))),
|
||||
};
|
||||
|
||||
let (input, tail) = digit1(input)?;
|
||||
let tail_digits_result: IResult<NomSpan, _> = digit1(input);
|
||||
|
||||
let (input, tail) = match tail_digits_result {
|
||||
Ok((input, tail)) => (input, tail),
|
||||
Err(_) => {
|
||||
return Ok((
|
||||
after_int_head,
|
||||
RawNumber::int((start, after_int_head.offset)),
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
let end = input.offset;
|
||||
|
||||
@ -272,14 +306,14 @@ pub fn raw_number(input: NomSpan) -> IResult<NomSpan, RawNumber> {
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn operator(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn operator(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
let (input, operator) = alt((gte, lte, neq, gt, lt, eq, cont, ncont))(input)?;
|
||||
|
||||
Ok((input, operator))
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn dq_string(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn dq_string(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
let start = input.offset;
|
||||
let (input, _) = char('"')(input)?;
|
||||
let start1 = input.offset;
|
||||
@ -294,7 +328,7 @@ pub fn dq_string(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn sq_string(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn sq_string(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
let start = input.offset;
|
||||
let (input, _) = char('\'')(input)?;
|
||||
let start1 = input.offset;
|
||||
@ -310,12 +344,12 @@ pub fn sq_string(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn string(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn string(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
alt((sq_string, dq_string))(input)
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn external(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn external(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
let start = input.offset;
|
||||
let (input, _) = tag("^")(input)?;
|
||||
let (input, bare) = take_while(is_file_char)(input)?;
|
||||
@ -373,7 +407,7 @@ pub fn matches(cond: fn(char) -> bool) -> impl Fn(NomSpan) -> IResult<NomSpan, N
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn pattern(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn pattern(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
word(
|
||||
start_pattern,
|
||||
matches(is_glob_char),
|
||||
@ -387,7 +421,7 @@ pub fn start_pattern(input: NomSpan) -> IResult<NomSpan, NomSpan> {
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn filename(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn filename(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
let start_pos = input.offset;
|
||||
|
||||
let (mut input, mut saw_special) = match start_file_char(input) {
|
||||
@ -495,7 +529,7 @@ pub fn start_filename(input: NomSpan) -> IResult<NomSpan, NomSpan> {
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn member(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn bare_member(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
word(
|
||||
matches(is_start_member_char),
|
||||
matches(is_member_char),
|
||||
@ -503,13 +537,22 @@ pub fn member(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
)(input)
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn garbage_member(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
word(
|
||||
matches(is_garbage_member_char),
|
||||
matches(is_garbage_member_char),
|
||||
TokenTreeBuilder::spanned_garbage,
|
||||
)(input)
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn ident(input: NomSpan) -> IResult<NomSpan, Tag> {
|
||||
word(matches(is_id_start), matches(is_id_continue), Tag::from)(input)
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn external_word(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn external_word(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
let start = input.offset;
|
||||
let (input, _) = take_while1(is_external_word_char)(input)?;
|
||||
let end = input.offset;
|
||||
@ -517,22 +560,48 @@ pub fn external_word(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
Ok((input, TokenTreeBuilder::spanned_external_word((start, end))))
|
||||
}
|
||||
|
||||
enum OneOf<T, U> {
|
||||
First(T),
|
||||
Second(U),
|
||||
}
|
||||
|
||||
trait SubParser<'a, T>: Sized + Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, T> {}
|
||||
|
||||
impl<'a, T, U> SubParser<'a, U> for T where T: Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, U> {}
|
||||
|
||||
fn one_of<'a, T, U>(
|
||||
first: impl SubParser<'a, T>,
|
||||
second: impl SubParser<'a, U>,
|
||||
) -> impl SubParser<'a, OneOf<T, U>> {
|
||||
move |input: NomSpan<'a>| -> IResult<NomSpan, OneOf<T, U>> {
|
||||
let first_result = first(input);
|
||||
|
||||
match first_result {
|
||||
Ok((input, val)) => Ok((input, OneOf::First(val))),
|
||||
Err(_) => {
|
||||
let (input, val) = second(input)?;
|
||||
Ok((input, OneOf::Second(val)))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn var(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn var(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
let start = input.offset;
|
||||
let (input, _) = tag("$")(input)?;
|
||||
let (input, bare) = ident(input)?;
|
||||
let (input, name) = one_of(tag("it"), ident)(input)?;
|
||||
let end = input.offset;
|
||||
|
||||
Ok((
|
||||
input,
|
||||
TokenTreeBuilder::spanned_var(bare, Span::new(start, end)),
|
||||
))
|
||||
match name {
|
||||
OneOf::First(it) => Ok((input, TokenTreeBuilder::spanned_it_var(it, (start, end)))),
|
||||
OneOf::Second(name) => Ok((input, TokenTreeBuilder::spanned_var(name, (start, end)))),
|
||||
}
|
||||
}
|
||||
|
||||
fn tight<'a>(
|
||||
parser: impl Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, Vec<TokenNode>>,
|
||||
) -> impl Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, Vec<TokenNode>> {
|
||||
parser: impl Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, Vec<SpannedToken>>,
|
||||
) -> impl Fn(NomSpan<'a>) -> IResult<NomSpan<'a>, Vec<SpannedToken>> {
|
||||
move |input: NomSpan| {
|
||||
let mut result = vec![];
|
||||
let (input, head) = parser(input)?;
|
||||
@ -560,7 +629,7 @@ fn tight<'a>(
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn flag(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn flag(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
let start = input.offset;
|
||||
let (input, _) = tag("--")(input)?;
|
||||
let (input, bare) = filename(input)?;
|
||||
@ -573,7 +642,7 @@ pub fn flag(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn shorthand(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn shorthand(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
let start = input.offset;
|
||||
let (input, _) = tag("-")(input)?;
|
||||
let (input, bare) = filename(input)?;
|
||||
@ -586,14 +655,14 @@ pub fn shorthand(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn leaf(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn leaf(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
let (input, node) = alt((number, string, operator, flag, shorthand, var, external))(input)?;
|
||||
|
||||
Ok((input, node))
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<TokenNode>>> {
|
||||
pub fn token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<SpannedToken>>> {
|
||||
let start = input.offset;
|
||||
let mut node_list = vec![];
|
||||
|
||||
@ -658,7 +727,7 @@ pub fn token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<TokenNode>>> {
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn spaced_token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<TokenNode>>> {
|
||||
pub fn spaced_token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<SpannedToken>>> {
|
||||
let start = input.offset;
|
||||
let (input, pre_ws) = opt(any_space)(input)?;
|
||||
let (input, items) = token_list(input)?;
|
||||
@ -679,10 +748,10 @@ pub fn spaced_token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<TokenNo
|
||||
}
|
||||
|
||||
fn make_token_list(
|
||||
first: Vec<TokenNode>,
|
||||
list: Vec<(Vec<TokenNode>, Vec<TokenNode>)>,
|
||||
sp_right: Option<TokenNode>,
|
||||
) -> Vec<TokenNode> {
|
||||
first: Vec<SpannedToken>,
|
||||
list: Vec<(Vec<SpannedToken>, Vec<SpannedToken>)>,
|
||||
sp_right: Option<SpannedToken>,
|
||||
) -> Vec<SpannedToken> {
|
||||
let mut nodes = vec![];
|
||||
|
||||
nodes.extend(first);
|
||||
@ -700,7 +769,7 @@ fn make_token_list(
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn separator(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn separator(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
let left = input.offset;
|
||||
let (input, ws1) = alt((tag(";"), tag("\n")))(input)?;
|
||||
let right = input.offset;
|
||||
@ -709,7 +778,7 @@ pub fn separator(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn whitespace(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn whitespace(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
let left = input.offset;
|
||||
let (input, ws1) = space1(input)?;
|
||||
let right = input.offset;
|
||||
@ -718,7 +787,7 @@ pub fn whitespace(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn any_space(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
||||
pub fn any_space(input: NomSpan) -> IResult<NomSpan, Vec<SpannedToken>> {
|
||||
let left = input.offset;
|
||||
let (input, tokens) = many1(alt((whitespace, separator, comment)))(input)?;
|
||||
let right = input.offset;
|
||||
@ -727,7 +796,7 @@ pub fn any_space(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn comment(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn comment(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
let left = input.offset;
|
||||
let (input, start) = tag("#")(input)?;
|
||||
let (input, rest) = not_line_ending(input)?;
|
||||
@ -744,7 +813,7 @@ pub fn comment(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn delimited(
|
||||
input: NomSpan,
|
||||
delimiter: Delimiter,
|
||||
) -> IResult<NomSpan, (Span, Span, Spanned<Vec<TokenNode>>)> {
|
||||
) -> IResult<NomSpan, (Span, Span, Spanned<Vec<SpannedToken>>)> {
|
||||
let left = input.offset;
|
||||
let (input, open_span) = tag(delimiter.open())(input)?;
|
||||
let (input, inner_items) = opt(spaced_token_list)(input)?;
|
||||
@ -768,7 +837,7 @@ pub fn delimited(
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn delimited_paren(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn delimited_paren(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
let (input, (left, right, tokens)) = delimited(input, Delimiter::Paren)?;
|
||||
|
||||
Ok((
|
||||
@ -778,7 +847,7 @@ pub fn delimited_paren(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn delimited_square(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn delimited_square(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
let (input, (left, right, tokens)) = delimited(input, Delimiter::Square)?;
|
||||
|
||||
Ok((
|
||||
@ -788,7 +857,7 @@ pub fn delimited_square(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn delimited_brace(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn delimited_brace(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
let (input, (left, right, tokens)) = delimited(input, Delimiter::Brace)?;
|
||||
|
||||
Ok((
|
||||
@ -810,7 +879,7 @@ pub fn raw_call(input: NomSpan) -> IResult<NomSpan, Spanned<CallNode>> {
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn range_continuation(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
||||
pub fn range_continuation(input: NomSpan) -> IResult<NomSpan, Vec<SpannedToken>> {
|
||||
let original = input;
|
||||
|
||||
let mut result = vec![];
|
||||
@ -824,7 +893,7 @@ pub fn range_continuation(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn dot_member(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
||||
pub fn dot_member(input: NomSpan) -> IResult<NomSpan, Vec<SpannedToken>> {
|
||||
let (input, dot_result) = dot(input)?;
|
||||
let (input, member_result) = any_member(input)?;
|
||||
|
||||
@ -832,12 +901,12 @@ pub fn dot_member(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn any_member(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
alt((number, string, member))(input)
|
||||
pub fn any_member(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
alt((int_member, string, bare_member, garbage_member))(input)
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn tight_node(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
||||
pub fn tight_node(input: NomSpan) -> IResult<NomSpan, Vec<SpannedToken>> {
|
||||
alt((
|
||||
tight(to_list(leaf)),
|
||||
tight(to_list(filename)),
|
||||
@ -851,8 +920,8 @@ pub fn tight_node(input: NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
||||
}
|
||||
|
||||
fn to_list(
|
||||
parser: impl Fn(NomSpan) -> IResult<NomSpan, TokenNode>,
|
||||
) -> impl Fn(NomSpan) -> IResult<NomSpan, Vec<TokenNode>> {
|
||||
parser: impl Fn(NomSpan) -> IResult<NomSpan, SpannedToken>,
|
||||
) -> impl Fn(NomSpan) -> IResult<NomSpan, Vec<SpannedToken>> {
|
||||
move |input| {
|
||||
let (input, next) = parser(input)?;
|
||||
|
||||
@ -861,17 +930,18 @@ fn to_list(
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn nodes(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn nodes(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
let (input, tokens) = token_list(input)?;
|
||||
let span = tokens.span;
|
||||
|
||||
Ok((
|
||||
input,
|
||||
TokenTreeBuilder::spanned_token_list(tokens.item, tokens.span),
|
||||
TokenTreeBuilder::spanned_pipeline(vec![PipelineElement::new(None, tokens)], span),
|
||||
))
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn pipeline(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn pipeline(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
let start = input.offset;
|
||||
let (input, head) = spaced_token_list(input)?;
|
||||
let (input, items) = many0(tuple((tag("|"), spaced_token_list)))(input)?;
|
||||
@ -900,7 +970,7 @@ pub fn pipeline(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn module(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
pub fn module(input: NomSpan) -> IResult<NomSpan, SpannedToken> {
|
||||
let (input, tokens) = spaced_token_list(input)?;
|
||||
|
||||
if input.input_len() != 0 {
|
||||
@ -999,9 +1069,17 @@ fn is_file_char(c: char) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
fn is_garbage_member_char(c: char) -> bool {
|
||||
match c {
|
||||
c if c.is_whitespace() => false,
|
||||
'.' => false,
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_start_member_char(c: char) -> bool {
|
||||
match c {
|
||||
_ if c.is_alphanumeric() => true,
|
||||
_ if c.is_alphabetic() => true,
|
||||
'_' => true,
|
||||
'-' => true,
|
||||
_ => false,
|
||||
@ -1263,7 +1341,7 @@ mod tests {
|
||||
fn test_variable() {
|
||||
equal_tokens! {
|
||||
<nodes>
|
||||
"$it" -> b::token_list(vec![b::var("it")])
|
||||
"$it" -> b::token_list(vec![b::it_var()])
|
||||
}
|
||||
|
||||
equal_tokens! {
|
||||
@ -1354,12 +1432,33 @@ mod tests {
|
||||
|
||||
equal_tokens! {
|
||||
<nodes>
|
||||
"$it.print" -> b::token_list(vec![b::var("it"), b::dot(), b::bare("print")])
|
||||
"$it.print" -> b::token_list(vec![b::it_var(), b::dot(), b::bare("print")])
|
||||
}
|
||||
|
||||
equal_tokens! {
|
||||
<nodes>
|
||||
"$it.0" -> b::token_list(vec![b::var("it"), b::dot(), b::int(0)])
|
||||
r#"nu.0xATYKARNU.baz"# -> b::token_list(vec![
|
||||
b::bare("nu"),
|
||||
b::dot(),
|
||||
b::garbage("0xATYKARNU"),
|
||||
b::dot(),
|
||||
b::bare("baz")
|
||||
])
|
||||
}
|
||||
|
||||
equal_tokens! {
|
||||
<nodes>
|
||||
"1.b" -> b::token_list(vec![b::int(1), b::dot(), b::bare("b")])
|
||||
}
|
||||
|
||||
equal_tokens! {
|
||||
<nodes>
|
||||
"$it.0" -> b::token_list(vec![b::it_var(), b::dot(), b::int(0)])
|
||||
}
|
||||
|
||||
equal_tokens! {
|
||||
<nodes>
|
||||
"fortune_tellers.2.name" -> b::token_list(vec![b::bare("fortune_tellers"), b::dot(), b::int(2), b::dot(), b::bare("name")])
|
||||
}
|
||||
|
||||
equal_tokens! {
|
||||
@ -1386,7 +1485,7 @@ mod tests {
|
||||
vec![
|
||||
b::parens(vec![
|
||||
b::sp(),
|
||||
b::var("it"),
|
||||
b::it_var(),
|
||||
b::dot(),
|
||||
b::bare("is"),
|
||||
b::dot(),
|
||||
@ -1407,7 +1506,7 @@ mod tests {
|
||||
<nodes>
|
||||
r#"$it."are PAS".0"# -> b::token_list(
|
||||
vec![
|
||||
b::var("it"),
|
||||
b::it_var(),
|
||||
b::dot(),
|
||||
b::string("are PAS"),
|
||||
b::dot(),
|
||||
@ -1445,7 +1544,7 @@ mod tests {
|
||||
fn test_smoke_single_command_it() {
|
||||
equal_tokens! {
|
||||
<nodes>
|
||||
"echo $it" -> b::token_list(vec![b::bare("echo"), b::sp(), b::var("it")])
|
||||
"echo $it" -> b::token_list(vec![b::bare("echo"), b::sp(), b::it_var()])
|
||||
}
|
||||
}
|
||||
|
||||
@ -1533,6 +1632,17 @@ mod tests {
|
||||
]
|
||||
])
|
||||
}
|
||||
|
||||
equal_tokens! {
|
||||
"^echo 1 | ^cat" -> b::pipeline(vec![
|
||||
vec![
|
||||
b::external_command("echo"), b::sp(), b::int(1), b::sp()
|
||||
],
|
||||
vec![
|
||||
b::sp(), b::external_command("cat")
|
||||
]
|
||||
])
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -1631,7 +1741,7 @@ mod tests {
|
||||
// b::bare("where"),
|
||||
// vec![
|
||||
// b::sp(),
|
||||
// b::var("it"),
|
||||
// b::it_var(),
|
||||
// b::sp(),
|
||||
// b::op("!="),
|
||||
// b::sp(),
|
||||
@ -1654,7 +1764,7 @@ mod tests {
|
||||
// vec![
|
||||
// b::sp(),
|
||||
// b::braced(vec![
|
||||
// b::path(b::var("it"), vec![b::member("size")]),
|
||||
// b::path(b::it_var(), vec![b::member("size")]),
|
||||
// b::sp(),
|
||||
// b::op(">"),
|
||||
// b::sp(),
|
||||
@ -1669,10 +1779,13 @@ mod tests {
|
||||
// }
|
||||
|
||||
fn apply(
|
||||
f: impl Fn(NomSpan) -> Result<(NomSpan, TokenNode), nom::Err<(NomSpan, nom::error::ErrorKind)>>,
|
||||
f: impl Fn(
|
||||
NomSpan,
|
||||
)
|
||||
-> Result<(NomSpan, SpannedToken), nom::Err<(NomSpan, nom::error::ErrorKind)>>,
|
||||
desc: &str,
|
||||
string: &str,
|
||||
) -> TokenNode {
|
||||
) -> SpannedToken {
|
||||
let result = f(nom_input(string));
|
||||
|
||||
match result {
|
||||
@ -1693,20 +1806,15 @@ mod tests {
|
||||
|
||||
fn delimited(
|
||||
delimiter: Spanned<Delimiter>,
|
||||
children: Vec<TokenNode>,
|
||||
children: Vec<SpannedToken>,
|
||||
left: usize,
|
||||
right: usize,
|
||||
) -> TokenNode {
|
||||
) -> SpannedToken {
|
||||
let start = Span::for_char(left);
|
||||
let end = Span::for_char(right);
|
||||
|
||||
let node = DelimitedNode::new(delimiter.item, (start, end), children);
|
||||
let spanned = node.spanned(Span::new(left, right));
|
||||
TokenNode::Delimited(spanned)
|
||||
}
|
||||
|
||||
fn token(token: UnspannedToken, left: usize, right: usize) -> TokenNode {
|
||||
TokenNode::Token(token.into_token(Span::new(left, right)))
|
||||
Token::Delimited(node).into_spanned((left, right))
|
||||
}
|
||||
|
||||
fn build<T>(block: CurriedNode<T>) -> T {
|
||||
@ -1714,7 +1822,7 @@ mod tests {
|
||||
block(&mut builder)
|
||||
}
|
||||
|
||||
fn build_token(block: CurriedToken) -> TokenNode {
|
||||
fn build_token(block: CurriedToken) -> SpannedToken {
|
||||
TokenTreeBuilder::build(block).0
|
||||
}
|
||||
}
|
||||
|
@ -1,23 +1,32 @@
|
||||
use crate::TokenNode;
|
||||
use crate::{SpannedToken, Token};
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use nu_source::{b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned};
|
||||
use nu_source::{
|
||||
b, DebugDocBuilder, HasSpan, IntoSpanned, PrettyDebugWithSource, Span, Spanned, SpannedItem,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Getters, new)]
|
||||
pub struct Pipeline {
|
||||
#[get = "pub"]
|
||||
pub(crate) parts: Vec<PipelineElement>,
|
||||
pub(crate) span: Span,
|
||||
}
|
||||
|
||||
impl IntoSpanned for Pipeline {
|
||||
type Output = Spanned<Pipeline>;
|
||||
|
||||
fn into_spanned(self, span: impl Into<Span>) -> Self::Output {
|
||||
self.spanned(span.into())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
||||
pub struct Tokens {
|
||||
pub(crate) tokens: Vec<TokenNode>,
|
||||
pub(crate) tokens: Vec<SpannedToken>,
|
||||
pub(crate) span: Span,
|
||||
}
|
||||
|
||||
impl Tokens {
|
||||
pub fn iter(&self) -> impl Iterator<Item = &TokenNode> {
|
||||
pub fn iter(&self) -> impl Iterator<Item = &SpannedToken> {
|
||||
self.tokens.iter()
|
||||
}
|
||||
}
|
||||
@ -38,7 +47,7 @@ impl HasSpan for PipelineElement {
|
||||
}
|
||||
|
||||
impl PipelineElement {
|
||||
pub fn new(pipe: Option<Span>, tokens: Spanned<Vec<TokenNode>>) -> PipelineElement {
|
||||
pub fn new(pipe: Option<Span>, tokens: Spanned<Vec<SpannedToken>>) -> PipelineElement {
|
||||
PipelineElement {
|
||||
pipe,
|
||||
tokens: Tokens {
|
||||
@ -48,7 +57,7 @@ impl PipelineElement {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tokens(&self) -> &[TokenNode] {
|
||||
pub fn tokens(&self) -> &[SpannedToken] {
|
||||
&self.tokens.tokens
|
||||
}
|
||||
}
|
||||
@ -65,9 +74,9 @@ impl PrettyDebugWithSource for Pipeline {
|
||||
impl PrettyDebugWithSource for PipelineElement {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
b::intersperse(
|
||||
self.tokens.iter().map(|token| match token {
|
||||
TokenNode::Whitespace(_) => b::blank(),
|
||||
token => token.pretty_debug(source),
|
||||
self.tokens.iter().map(|token| match token.unspanned() {
|
||||
Token::Whitespace => b::blank(),
|
||||
_ => token.pretty_debug(source),
|
||||
}),
|
||||
b::space(),
|
||||
)
|
||||
|
@ -1,162 +1,275 @@
|
||||
use crate::parse::{call_node::*, comment::*, flag::*, operator::*, pipeline::*, tokens::*};
|
||||
#![allow(clippy::type_complexity)]
|
||||
use crate::parse::{call_node::*, comment::*, flag::*, number::*, operator::*, pipeline::*};
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use nu_errors::{ParseError, ShellError};
|
||||
use nu_protocol::ShellTypeName;
|
||||
use nu_protocol::{ShellTypeName, SpannedTypeName};
|
||||
use nu_source::{
|
||||
b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem, Tagged,
|
||||
TaggedItem, Text,
|
||||
b, DebugDocBuilder, HasSpan, PrettyDebugWithSource, Span, Spanned, SpannedItem, Text,
|
||||
};
|
||||
use std::fmt;
|
||||
use std::borrow::Cow;
|
||||
use std::ops::Deref;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub enum TokenNode {
|
||||
Token(Token),
|
||||
pub enum Token {
|
||||
Number(RawNumber),
|
||||
CompareOperator(CompareOperator),
|
||||
EvaluationOperator(EvaluationOperator),
|
||||
String(Span),
|
||||
Variable(Span),
|
||||
ItVariable(Span),
|
||||
ExternalCommand(Span),
|
||||
ExternalWord,
|
||||
GlobPattern,
|
||||
Bare,
|
||||
Garbage,
|
||||
|
||||
Call(Spanned<CallNode>),
|
||||
Nodes(Spanned<Vec<TokenNode>>),
|
||||
Delimited(Spanned<DelimitedNode>),
|
||||
Call(CallNode),
|
||||
Delimited(DelimitedNode),
|
||||
Pipeline(Pipeline),
|
||||
Flag(Flag),
|
||||
Comment(Comment),
|
||||
Whitespace(Span),
|
||||
Separator(Span),
|
||||
|
||||
Error(Spanned<ShellError>),
|
||||
Whitespace,
|
||||
Separator,
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for TokenNode {
|
||||
macro_rules! token_type {
|
||||
(struct $name:tt (desc: $desc:tt) -> $out:ty { |$span:ident, $pat:pat| => $do:expr }) => {
|
||||
pub struct $name;
|
||||
|
||||
impl TokenType for $name {
|
||||
type Output = $out;
|
||||
|
||||
fn desc(&self) -> Cow<'static, str> {
|
||||
Cow::Borrowed($desc)
|
||||
}
|
||||
|
||||
fn extract_token_value(
|
||||
&self,
|
||||
token: &SpannedToken,
|
||||
err: ParseErrorFn<$out>,
|
||||
) -> Result<$out, ParseError> {
|
||||
let $span = token.span();
|
||||
|
||||
match *token.unspanned() {
|
||||
$pat => Ok($do),
|
||||
_ => err(),
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
(struct $name:tt (desc: $desc:tt) -> $out:ty { $pat:pat => $do:expr }) => {
|
||||
pub struct $name;
|
||||
|
||||
impl TokenType for $name {
|
||||
type Output = $out;
|
||||
|
||||
fn desc(&self) -> Cow<'static, str> {
|
||||
Cow::Borrowed($desc)
|
||||
}
|
||||
|
||||
fn extract_token_value(
|
||||
&self,
|
||||
token: &SpannedToken,
|
||||
err: ParseErrorFn<$out>,
|
||||
) -> Result<$out, ParseError> {
|
||||
match token.unspanned().clone() {
|
||||
$pat => Ok($do),
|
||||
_ => err(),
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub type ParseErrorFn<'a, T> = &'a dyn Fn() -> Result<T, ParseError>;
|
||||
|
||||
token_type!(struct IntType (desc: "integer") -> RawNumber {
|
||||
Token::Number(number @ RawNumber::Int(_)) => number
|
||||
});
|
||||
|
||||
token_type!(struct DecimalType (desc: "decimal") -> RawNumber {
|
||||
Token::Number(number @ RawNumber::Decimal(_)) => number
|
||||
});
|
||||
|
||||
token_type!(struct StringType (desc: "string") -> (Span, Span) {
|
||||
|outer, Token::String(inner)| => (inner, outer)
|
||||
});
|
||||
|
||||
token_type!(struct BareType (desc: "word") -> Span {
|
||||
|span, Token::Bare| => span
|
||||
});
|
||||
|
||||
token_type!(struct DotType (desc: "dot") -> Span {
|
||||
|span, Token::EvaluationOperator(EvaluationOperator::Dot)| => span
|
||||
});
|
||||
|
||||
token_type!(struct DotDotType (desc: "dotdot") -> Span {
|
||||
|span, Token::EvaluationOperator(EvaluationOperator::DotDot)| => span
|
||||
});
|
||||
|
||||
token_type!(struct CompareOperatorType (desc: "compare operator") -> (Span, CompareOperator) {
|
||||
|span, Token::CompareOperator(operator)| => (span, operator)
|
||||
});
|
||||
|
||||
token_type!(struct ExternalWordType (desc: "external word") -> Span {
|
||||
|span, Token::ExternalWord| => span
|
||||
});
|
||||
|
||||
token_type!(struct ExternalCommandType (desc: "external command") -> (Span, Span) {
|
||||
|outer, Token::ExternalCommand(inner)| => (inner, outer)
|
||||
});
|
||||
|
||||
token_type!(struct CommentType (desc: "comment") -> (Comment, Span) {
|
||||
|outer, Token::Comment(comment)| => (comment, outer)
|
||||
});
|
||||
|
||||
token_type!(struct SeparatorType (desc: "separator") -> Span {
|
||||
|span, Token::Separator| => span
|
||||
});
|
||||
|
||||
token_type!(struct WhitespaceType (desc: "whitespace") -> Span {
|
||||
|span, Token::Whitespace| => span
|
||||
});
|
||||
|
||||
token_type!(struct WordType (desc: "word") -> Span {
|
||||
|span, Token::Bare| => span
|
||||
});
|
||||
|
||||
token_type!(struct ItVarType (desc: "$it") -> (Span, Span) {
|
||||
|outer, Token::ItVariable(inner)| => (inner, outer)
|
||||
});
|
||||
|
||||
token_type!(struct VarType (desc: "variable") -> (Span, Span) {
|
||||
|outer, Token::Variable(inner)| => (inner, outer)
|
||||
});
|
||||
|
||||
token_type!(struct PipelineType (desc: "pipeline") -> Pipeline {
|
||||
Token::Pipeline(pipeline) => pipeline
|
||||
});
|
||||
|
||||
token_type!(struct BlockType (desc: "block") -> DelimitedNode {
|
||||
Token::Delimited(block @ DelimitedNode { delimiter: Delimiter::Brace, .. }) => block
|
||||
});
|
||||
|
||||
token_type!(struct SquareType (desc: "square") -> DelimitedNode {
|
||||
Token::Delimited(square @ DelimitedNode { delimiter: Delimiter::Square, .. }) => square
|
||||
});
|
||||
|
||||
pub trait TokenType {
|
||||
type Output;
|
||||
|
||||
fn desc(&self) -> Cow<'static, str>;
|
||||
|
||||
fn extract_token_value(
|
||||
&self,
|
||||
token: &SpannedToken,
|
||||
err: ParseErrorFn<Self::Output>,
|
||||
) -> Result<Self::Output, ParseError>;
|
||||
}
|
||||
|
||||
impl Token {
|
||||
pub fn into_spanned(self, span: impl Into<Span>) -> SpannedToken {
|
||||
SpannedToken {
|
||||
unspanned: self,
|
||||
span: span.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)]
|
||||
pub struct SpannedToken {
|
||||
#[get = "pub"]
|
||||
unspanned: Token,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl Deref for SpannedToken {
|
||||
type Target = Token;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.unspanned
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for SpannedToken {
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
}
|
||||
}
|
||||
|
||||
impl ShellTypeName for SpannedToken {
|
||||
fn type_name(&self) -> &'static str {
|
||||
self.unspanned.type_name()
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for SpannedToken {
|
||||
fn pretty_debug(&self, source: &str) -> DebugDocBuilder {
|
||||
match self {
|
||||
TokenNode::Token(token) => token.pretty_debug(source),
|
||||
TokenNode::Call(call) => call.pretty_debug(source),
|
||||
TokenNode::Nodes(nodes) => b::intersperse(
|
||||
nodes.iter().map(|node| node.pretty_debug(source)),
|
||||
b::space(),
|
||||
),
|
||||
TokenNode::Delimited(delimited) => delimited.pretty_debug(source),
|
||||
TokenNode::Pipeline(pipeline) => pipeline.pretty_debug(source),
|
||||
TokenNode::Flag(flag) => flag.pretty_debug(source),
|
||||
TokenNode::Whitespace(space) => b::typed(
|
||||
match self.unspanned() {
|
||||
Token::Number(number) => number.pretty_debug(source),
|
||||
Token::CompareOperator(operator) => operator.pretty_debug(source),
|
||||
Token::EvaluationOperator(operator) => operator.pretty_debug(source),
|
||||
Token::String(_) | Token::GlobPattern | Token::Bare => {
|
||||
b::primitive(self.span.slice(source))
|
||||
}
|
||||
Token::Variable(_) => b::var(self.span.slice(source)),
|
||||
Token::ItVariable(_) => b::keyword(self.span.slice(source)),
|
||||
Token::ExternalCommand(_) => b::description(self.span.slice(source)),
|
||||
Token::ExternalWord => b::description(self.span.slice(source)),
|
||||
Token::Call(call) => call.pretty_debug(source),
|
||||
Token::Delimited(delimited) => delimited.pretty_debug(source),
|
||||
Token::Pipeline(pipeline) => pipeline.pretty_debug(source),
|
||||
Token::Flag(flag) => flag.pretty_debug(source),
|
||||
Token::Garbage => b::error(self.span.slice(source)),
|
||||
Token::Whitespace => b::typed(
|
||||
"whitespace",
|
||||
b::description(format!("{:?}", space.slice(source))),
|
||||
b::description(format!("{:?}", self.span.slice(source))),
|
||||
),
|
||||
TokenNode::Separator(span) => b::typed(
|
||||
Token::Separator => b::typed(
|
||||
"separator",
|
||||
b::description(format!("{:?}", span.slice(source))),
|
||||
b::description(format!("{:?}", self.span.slice(source))),
|
||||
),
|
||||
TokenNode::Comment(comment) => {
|
||||
Token::Comment(comment) => {
|
||||
b::typed("comment", b::description(comment.text.slice(source)))
|
||||
}
|
||||
TokenNode::Error(_) => b::error("error"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ShellTypeName for TokenNode {
|
||||
impl ShellTypeName for Token {
|
||||
fn type_name(&self) -> &'static str {
|
||||
match self {
|
||||
TokenNode::Token(t) => t.type_name(),
|
||||
TokenNode::Nodes(_) => "nodes",
|
||||
TokenNode::Call(_) => "command",
|
||||
TokenNode::Delimited(d) => d.type_name(),
|
||||
TokenNode::Pipeline(_) => "pipeline",
|
||||
TokenNode::Flag(_) => "flag",
|
||||
TokenNode::Whitespace(_) => "whitespace",
|
||||
TokenNode::Separator(_) => "separator",
|
||||
TokenNode::Comment(_) => "comment",
|
||||
TokenNode::Error(_) => "error",
|
||||
Token::Number(_) => "number",
|
||||
Token::CompareOperator(_) => "comparison operator",
|
||||
Token::EvaluationOperator(EvaluationOperator::Dot) => "dot",
|
||||
Token::EvaluationOperator(EvaluationOperator::DotDot) => "dot dot",
|
||||
Token::String(_) => "string",
|
||||
Token::Variable(_) => "variable",
|
||||
Token::ItVariable(_) => "it variable",
|
||||
Token::ExternalCommand(_) => "external command",
|
||||
Token::ExternalWord => "external word",
|
||||
Token::GlobPattern => "glob pattern",
|
||||
Token::Bare => "word",
|
||||
Token::Call(_) => "command",
|
||||
Token::Delimited(d) => d.type_name(),
|
||||
Token::Pipeline(_) => "pipeline",
|
||||
Token::Flag(_) => "flag",
|
||||
Token::Garbage => "garbage",
|
||||
Token::Whitespace => "whitespace",
|
||||
Token::Separator => "separator",
|
||||
Token::Comment(_) => "comment",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DebugTokenNode<'a> {
|
||||
node: &'a TokenNode,
|
||||
source: &'a Text,
|
||||
}
|
||||
|
||||
impl fmt::Debug for DebugTokenNode<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
match self.node {
|
||||
TokenNode::Token(t) => write!(f, "{:?}", t.debug(self.source)),
|
||||
TokenNode::Call(s) => {
|
||||
write!(f, "(")?;
|
||||
|
||||
write!(f, "{}", s.head().debug(self.source))?;
|
||||
|
||||
if let Some(children) = s.children() {
|
||||
for child in children {
|
||||
write!(f, "{}", child.debug(self.source))?;
|
||||
}
|
||||
}
|
||||
|
||||
write!(f, ")")
|
||||
}
|
||||
|
||||
TokenNode::Delimited(d) => {
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
match d.delimiter {
|
||||
Delimiter::Brace => "{",
|
||||
Delimiter::Paren => "(",
|
||||
Delimiter::Square => "[",
|
||||
}
|
||||
)?;
|
||||
|
||||
for child in d.children() {
|
||||
write!(f, "{:?}", child.old_debug(self.source))?;
|
||||
}
|
||||
|
||||
write!(
|
||||
f,
|
||||
"{}",
|
||||
match d.delimiter {
|
||||
Delimiter::Brace => "}",
|
||||
Delimiter::Paren => ")",
|
||||
Delimiter::Square => "]",
|
||||
}
|
||||
)
|
||||
}
|
||||
TokenNode::Pipeline(pipeline) => write!(f, "{}", pipeline.debug(self.source)),
|
||||
TokenNode::Error(_) => write!(f, "<error>"),
|
||||
rest => write!(f, "{}", rest.span().slice(self.source)),
|
||||
}
|
||||
impl From<&SpannedToken> for Span {
|
||||
fn from(token: &SpannedToken) -> Span {
|
||||
token.span
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&TokenNode> for Span {
|
||||
fn from(token: &TokenNode) -> Span {
|
||||
token.span()
|
||||
}
|
||||
}
|
||||
|
||||
impl HasSpan for TokenNode {
|
||||
fn span(&self) -> Span {
|
||||
match self {
|
||||
TokenNode::Token(t) => t.span,
|
||||
TokenNode::Nodes(t) => t.span,
|
||||
TokenNode::Call(s) => s.span,
|
||||
TokenNode::Delimited(s) => s.span,
|
||||
TokenNode::Pipeline(s) => s.span,
|
||||
TokenNode::Flag(s) => s.span,
|
||||
TokenNode::Whitespace(s) => *s,
|
||||
TokenNode::Separator(s) => *s,
|
||||
TokenNode::Comment(c) => c.span(),
|
||||
TokenNode::Error(s) => s.span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenNode {
|
||||
pub fn tagged_type_name(&self) -> Tagged<&'static str> {
|
||||
self.type_name().tagged(self.span())
|
||||
}
|
||||
|
||||
pub fn old_debug<'a>(&'a self, source: &'a Text) -> DebugTokenNode<'a> {
|
||||
DebugTokenNode { node: self, source }
|
||||
}
|
||||
|
||||
impl SpannedToken {
|
||||
pub fn as_external_arg(&self, source: &Text) -> String {
|
||||
self.span().slice(source).to_string()
|
||||
}
|
||||
@ -166,145 +279,105 @@ impl TokenNode {
|
||||
}
|
||||
|
||||
pub fn get_variable(&self) -> Result<(Span, Span), ShellError> {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::Variable(inner_span),
|
||||
span: outer_span,
|
||||
}) => Ok((*outer_span, *inner_span)),
|
||||
_ => Err(ShellError::type_error(
|
||||
"variable",
|
||||
self.type_name().spanned(self.span()),
|
||||
)),
|
||||
match self.unspanned() {
|
||||
Token::Variable(inner_span) => Ok((self.span(), *inner_span)),
|
||||
_ => Err(ShellError::type_error("variable", self.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_bare(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::Bare,
|
||||
..
|
||||
}) => true,
|
||||
match self.unspanned() {
|
||||
Token::Bare => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_string(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::String(_),
|
||||
..
|
||||
}) => true,
|
||||
match self.unspanned() {
|
||||
Token::String(_) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_number(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::Number(_),
|
||||
..
|
||||
}) => true,
|
||||
match self.unspanned() {
|
||||
Token::Number(_) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_string(&self) -> Option<(Span, Span)> {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::String(inner_span),
|
||||
span: outer_span,
|
||||
}) => Some((*outer_span, *inner_span)),
|
||||
match self.unspanned() {
|
||||
Token::String(inner_span) => Some((self.span(), *inner_span)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_pattern(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::GlobPattern,
|
||||
..
|
||||
}) => true,
|
||||
match self.unspanned() {
|
||||
Token::GlobPattern => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_word(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::Bare,
|
||||
..
|
||||
}) => true,
|
||||
match self.unspanned() {
|
||||
Token::Bare => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_int(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::Number(RawNumber::Int(_)),
|
||||
..
|
||||
}) => true,
|
||||
match self.unspanned() {
|
||||
Token::Number(RawNumber::Int(_)) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_dot(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot),
|
||||
..
|
||||
}) => true,
|
||||
match self.unspanned() {
|
||||
Token::EvaluationOperator(EvaluationOperator::Dot) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_block(&self) -> Option<(Spanned<&[TokenNode]>, (Span, Span))> {
|
||||
match self {
|
||||
TokenNode::Delimited(Spanned {
|
||||
item:
|
||||
DelimitedNode {
|
||||
delimiter,
|
||||
children,
|
||||
spans,
|
||||
},
|
||||
span,
|
||||
}) if *delimiter == Delimiter::Brace => Some(((&children[..]).spanned(*span), *spans)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_external(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::ExternalCommand(..),
|
||||
..
|
||||
}) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option<Flag> {
|
||||
match self {
|
||||
TokenNode::Flag(flag @ Flag { .. }) if value == flag.name().slice(source) => {
|
||||
Some(*flag)
|
||||
pub fn as_block(&self) -> Option<(Spanned<&[SpannedToken]>, (Span, Span))> {
|
||||
match self.unspanned() {
|
||||
Token::Delimited(DelimitedNode {
|
||||
delimiter,
|
||||
children,
|
||||
spans,
|
||||
}) if *delimiter == Delimiter::Brace => {
|
||||
Some(((&children[..]).spanned(self.span()), *spans))
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_external(&self) -> bool {
|
||||
match self.unspanned() {
|
||||
Token::ExternalCommand(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option<Flag> {
|
||||
match self.unspanned() {
|
||||
Token::Flag(flag @ Flag { .. }) if value == flag.name().slice(source) => Some(*flag),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_pipeline(&self) -> Result<Pipeline, ParseError> {
|
||||
match self {
|
||||
TokenNode::Pipeline(pipeline) => Ok(pipeline.clone()),
|
||||
other => Err(ParseError::mismatch(
|
||||
"pipeline",
|
||||
other.type_name().spanned(other.span()),
|
||||
)),
|
||||
match self.unspanned() {
|
||||
Token::Pipeline(pipeline) => Ok(pipeline.clone()),
|
||||
_ => Err(ParseError::mismatch("pipeline", self.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_whitespace(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Whitespace(_) => true,
|
||||
match self.unspanned() {
|
||||
Token::Whitespace => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
@ -315,7 +388,13 @@ impl TokenNode {
|
||||
pub struct DelimitedNode {
|
||||
pub(crate) delimiter: Delimiter,
|
||||
pub(crate) spans: (Span, Span),
|
||||
pub(crate) children: Vec<TokenNode>,
|
||||
pub(crate) children: Vec<SpannedToken>,
|
||||
}
|
||||
|
||||
impl HasSpan for DelimitedNode {
|
||||
fn span(&self) -> Span {
|
||||
self.spans.0.until(self.spans.1)
|
||||
}
|
||||
}
|
||||
|
||||
impl PrettyDebugWithSource for DelimitedNode {
|
||||
@ -369,79 +448,68 @@ impl Delimiter {
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
||||
#[get = "pub(crate)"]
|
||||
pub struct PathNode {
|
||||
head: Box<TokenNode>,
|
||||
tail: Vec<TokenNode>,
|
||||
head: Box<SpannedToken>,
|
||||
tail: Vec<SpannedToken>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
impl TokenNode {
|
||||
impl SpannedToken {
|
||||
pub fn expect_external(&self) -> Span {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::ExternalCommand(span),
|
||||
..
|
||||
}) => *span,
|
||||
other => panic!(
|
||||
match self.unspanned() {
|
||||
Token::ExternalCommand(span) => *span,
|
||||
_ => panic!(
|
||||
"Only call expect_external if you checked is_external first, found {:?}",
|
||||
other
|
||||
self
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_string(&self) -> (Span, Span) {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::String(inner_span),
|
||||
span: outer_span,
|
||||
}) => (*outer_span, *inner_span),
|
||||
match self.unspanned() {
|
||||
Token::String(inner_span) => (self.span(), *inner_span),
|
||||
other => panic!("Expected string, found {:?}", other),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_list(&self) -> Spanned<&[TokenNode]> {
|
||||
match self {
|
||||
TokenNode::Nodes(token_nodes) => token_nodes[..].spanned(token_nodes.span),
|
||||
other => panic!("Expected list, found {:?}", other),
|
||||
pub fn expect_list(&self) -> Spanned<Vec<SpannedToken>> {
|
||||
match self.unspanned() {
|
||||
Token::Pipeline(pipeline) => pipeline
|
||||
.parts()
|
||||
.iter()
|
||||
.flat_map(|part| part.tokens())
|
||||
.cloned()
|
||||
.collect::<Vec<SpannedToken>>()
|
||||
.spanned(self.span()),
|
||||
_ => panic!("Expected list, found {:?}", self),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_pattern(&self) -> Span {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::GlobPattern,
|
||||
span: outer_span,
|
||||
}) => *outer_span,
|
||||
other => panic!("Expected pattern, found {:?}", other),
|
||||
match self.unspanned() {
|
||||
Token::GlobPattern => self.span(),
|
||||
_ => panic!("Expected pattern, found {:?}", self),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_var(&self) -> (Span, Span) {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::Variable(inner_span),
|
||||
span: outer_span,
|
||||
}) => (*outer_span, *inner_span),
|
||||
match self.unspanned() {
|
||||
Token::Variable(inner_span) => (self.span(), *inner_span),
|
||||
Token::ItVariable(inner_span) => (self.span(), *inner_span),
|
||||
other => panic!("Expected var, found {:?}", other),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_dot(&self) -> Span {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::Dot),
|
||||
span,
|
||||
}) => *span,
|
||||
match self.unspanned() {
|
||||
Token::EvaluationOperator(EvaluationOperator::Dot) => self.span(),
|
||||
other => panic!("Expected dot, found {:?}", other),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_bare(&self) -> Span {
|
||||
match self {
|
||||
TokenNode::Token(Token {
|
||||
unspanned: UnspannedToken::Bare,
|
||||
span,
|
||||
}) => *span,
|
||||
other => panic!("Expected bare, found {:?}", other),
|
||||
match self.unspanned() {
|
||||
Token::Bare => self.span(),
|
||||
_ => panic!("Expected bare, found {:?}", self),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,10 +1,10 @@
|
||||
use crate::parse::call_node::CallNode;
|
||||
use crate::parse::comment::Comment;
|
||||
use crate::parse::flag::{Flag, FlagKind};
|
||||
use crate::parse::number::RawNumber;
|
||||
use crate::parse::operator::{CompareOperator, EvaluationOperator};
|
||||
use crate::parse::pipeline::{Pipeline, PipelineElement};
|
||||
use crate::parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
|
||||
use crate::parse::tokens::{RawNumber, UnspannedToken};
|
||||
use crate::parse::token_tree::{DelimitedNode, Delimiter, SpannedToken, Token};
|
||||
use bigdecimal::BigDecimal;
|
||||
use nu_source::{Span, Spanned, SpannedItem};
|
||||
use num_bigint::BigInt;
|
||||
@ -21,11 +21,11 @@ impl TokenTreeBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
pub type CurriedToken = Box<dyn FnOnce(&mut TokenTreeBuilder) -> TokenNode + 'static>;
|
||||
pub type CurriedToken = Box<dyn FnOnce(&mut TokenTreeBuilder) -> SpannedToken + 'static>;
|
||||
pub type CurriedCall = Box<dyn FnOnce(&mut TokenTreeBuilder) -> Spanned<CallNode> + 'static>;
|
||||
|
||||
impl TokenTreeBuilder {
|
||||
pub fn build(block: impl FnOnce(&mut Self) -> TokenNode) -> (TokenNode, String) {
|
||||
pub fn build(block: impl FnOnce(&mut Self) -> SpannedToken) -> (SpannedToken, String) {
|
||||
let mut builder = TokenTreeBuilder::new();
|
||||
let node = block(&mut builder);
|
||||
(node, builder.output)
|
||||
@ -77,8 +77,8 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_pipeline(input: Vec<PipelineElement>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Pipeline(Pipeline::new(input, span.into()))
|
||||
pub fn spanned_pipeline(input: Vec<PipelineElement>, span: impl Into<Span>) -> SpannedToken {
|
||||
Token::Pipeline(Pipeline::new(input)).into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn token_list(input: Vec<CurriedToken>) -> CurriedToken {
|
||||
@ -91,8 +91,28 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_token_list(input: Vec<TokenNode>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Nodes(input.spanned(span.into()))
|
||||
pub fn spanned_token_list(input: Vec<SpannedToken>, span: impl Into<Span>) -> SpannedToken {
|
||||
let span = span.into();
|
||||
Token::Pipeline(Pipeline::new(vec![PipelineElement::new(
|
||||
None,
|
||||
input.spanned(span),
|
||||
)]))
|
||||
.into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn garbage(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, end) = b.consume(&input);
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::spanned_garbage(Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_garbage(span: impl Into<Span>) -> SpannedToken {
|
||||
Token::Garbage.into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn op(input: impl Into<CompareOperator>) -> CurriedToken {
|
||||
@ -107,8 +127,11 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_cmp_op(input: impl Into<CompareOperator>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::CompareOperator(input.into()).into_token(span))
|
||||
pub fn spanned_cmp_op(
|
||||
input: impl Into<CompareOperator>,
|
||||
span: impl Into<Span>,
|
||||
) -> SpannedToken {
|
||||
Token::CompareOperator(input.into()).into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn dot() -> CurriedToken {
|
||||
@ -134,8 +157,8 @@ impl TokenTreeBuilder {
|
||||
pub fn spanned_eval_op(
|
||||
input: impl Into<EvaluationOperator>,
|
||||
span: impl Into<Span>,
|
||||
) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::EvaluationOperator(input.into()).into_token(span))
|
||||
) -> SpannedToken {
|
||||
Token::EvaluationOperator(input.into()).into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn string(input: impl Into<String>) -> CurriedToken {
|
||||
@ -154,8 +177,8 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_string(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::String(input.into()).into_token(span))
|
||||
pub fn spanned_string(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||
Token::String(input.into()).into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn bare(input: impl Into<String>) -> CurriedToken {
|
||||
@ -169,8 +192,8 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_bare(span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::Bare.into_token(span))
|
||||
pub fn spanned_bare(span: impl Into<Span>) -> SpannedToken {
|
||||
Token::Bare.into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn pattern(input: impl Into<String>) -> CurriedToken {
|
||||
@ -184,8 +207,8 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_pattern(input: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::GlobPattern.into_token(input))
|
||||
pub fn spanned_pattern(input: impl Into<Span>) -> SpannedToken {
|
||||
Token::GlobPattern.into_spanned(input)
|
||||
}
|
||||
|
||||
pub fn external_word(input: impl Into<String>) -> CurriedToken {
|
||||
@ -199,8 +222,8 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_external_word(input: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::ExternalWord.into_token(input))
|
||||
pub fn spanned_external_word(input: impl Into<Span>) -> SpannedToken {
|
||||
Token::ExternalWord.into_spanned(input)
|
||||
}
|
||||
|
||||
pub fn external_command(input: impl Into<String>) -> CurriedToken {
|
||||
@ -218,8 +241,11 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_external_command(inner: impl Into<Span>, outer: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::ExternalCommand(inner.into()).into_token(outer))
|
||||
pub fn spanned_external_command(
|
||||
inner: impl Into<Span>,
|
||||
outer: impl Into<Span>,
|
||||
) -> SpannedToken {
|
||||
Token::ExternalCommand(inner.into()).into_spanned(outer)
|
||||
}
|
||||
|
||||
pub fn int(input: impl Into<BigInt>) -> CurriedToken {
|
||||
@ -250,8 +276,8 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_number(input: impl Into<RawNumber>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::Number(input.into()).into_token(span))
|
||||
pub fn spanned_number(input: impl Into<RawNumber>, span: impl Into<Span>) -> SpannedToken {
|
||||
Token::Number(input.into()).into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn var(input: impl Into<String>) -> CurriedToken {
|
||||
@ -265,8 +291,21 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_var(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(UnspannedToken::Variable(input.into()).into_token(span))
|
||||
pub fn spanned_var(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||
Token::Variable(input.into()).into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn it_var() -> CurriedToken {
|
||||
Box::new(move |b| {
|
||||
let (start, _) = b.consume("$");
|
||||
let (inner_start, end) = b.consume("it");
|
||||
|
||||
TokenTreeBuilder::spanned_it_var(Span::new(inner_start, end), Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_it_var(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||
Token::ItVariable(input.into()).into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn flag(input: impl Into<String>) -> CurriedToken {
|
||||
@ -280,8 +319,9 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_flag(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Flag(Flag::new(FlagKind::Longhand, input.into(), span.into()))
|
||||
pub fn spanned_flag(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||
let span = span.into();
|
||||
Token::Flag(Flag::new(FlagKind::Longhand, input.into())).into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn shorthand(input: impl Into<String>) -> CurriedToken {
|
||||
@ -295,8 +335,10 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_shorthand(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into(), span.into()))
|
||||
pub fn spanned_shorthand(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||
let span = span.into();
|
||||
|
||||
Token::Flag(Flag::new(FlagKind::Shorthand, input.into())).into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn call(head: CurriedToken, input: Vec<CurriedToken>) -> CurriedCall {
|
||||
@ -316,7 +358,7 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_call(input: Vec<TokenNode>, span: impl Into<Span>) -> Spanned<CallNode> {
|
||||
pub fn spanned_call(input: Vec<SpannedToken>, span: impl Into<Span>) -> Spanned<CallNode> {
|
||||
if input.is_empty() {
|
||||
panic!("BUG: spanned call (TODO)")
|
||||
}
|
||||
@ -337,7 +379,7 @@ impl TokenTreeBuilder {
|
||||
input: Vec<CurriedToken>,
|
||||
_open: &str,
|
||||
_close: &str,
|
||||
) -> (Span, Span, Span, Vec<TokenNode>) {
|
||||
) -> (Span, Span, Span, Vec<SpannedToken>) {
|
||||
let (start_open_paren, end_open_paren) = self.consume("(");
|
||||
let mut output = vec![];
|
||||
for item in input {
|
||||
@ -362,13 +404,12 @@ impl TokenTreeBuilder {
|
||||
}
|
||||
|
||||
pub fn spanned_parens(
|
||||
input: impl Into<Vec<TokenNode>>,
|
||||
input: impl Into<Vec<SpannedToken>>,
|
||||
spans: (Span, Span),
|
||||
span: impl Into<Span>,
|
||||
) -> TokenNode {
|
||||
TokenNode::Delimited(
|
||||
DelimitedNode::new(Delimiter::Paren, spans, input.into()).spanned(span.into()),
|
||||
)
|
||||
) -> SpannedToken {
|
||||
Token::Delimited(DelimitedNode::new(Delimiter::Paren, spans, input.into()))
|
||||
.into_spanned(span.into())
|
||||
}
|
||||
|
||||
pub fn square(input: Vec<CurriedToken>) -> CurriedToken {
|
||||
@ -380,13 +421,12 @@ impl TokenTreeBuilder {
|
||||
}
|
||||
|
||||
pub fn spanned_square(
|
||||
input: impl Into<Vec<TokenNode>>,
|
||||
input: impl Into<Vec<SpannedToken>>,
|
||||
spans: (Span, Span),
|
||||
span: impl Into<Span>,
|
||||
) -> TokenNode {
|
||||
TokenNode::Delimited(
|
||||
DelimitedNode::new(Delimiter::Square, spans, input.into()).spanned(span.into()),
|
||||
)
|
||||
) -> SpannedToken {
|
||||
Token::Delimited(DelimitedNode::new(Delimiter::Square, spans, input.into()))
|
||||
.into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn braced(input: Vec<CurriedToken>) -> CurriedToken {
|
||||
@ -398,19 +438,18 @@ impl TokenTreeBuilder {
|
||||
}
|
||||
|
||||
pub fn spanned_brace(
|
||||
input: impl Into<Vec<TokenNode>>,
|
||||
input: impl Into<Vec<SpannedToken>>,
|
||||
spans: (Span, Span),
|
||||
span: impl Into<Span>,
|
||||
) -> TokenNode {
|
||||
TokenNode::Delimited(
|
||||
DelimitedNode::new(Delimiter::Brace, spans, input.into()).spanned(span.into()),
|
||||
)
|
||||
) -> SpannedToken {
|
||||
Token::Delimited(DelimitedNode::new(Delimiter::Brace, spans, input.into()))
|
||||
.into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn sp() -> CurriedToken {
|
||||
Box::new(|b| {
|
||||
let (start, end) = b.consume(" ");
|
||||
TokenNode::Whitespace(Span::new(start, end))
|
||||
Token::Whitespace.into_spanned((start, end))
|
||||
})
|
||||
}
|
||||
|
||||
@ -423,8 +462,8 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_ws(span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Whitespace(span.into())
|
||||
pub fn spanned_ws(span: impl Into<Span>) -> SpannedToken {
|
||||
Token::Whitespace.into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn sep(input: impl Into<String>) -> CurriedToken {
|
||||
@ -436,8 +475,8 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_sep(span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Separator(span.into())
|
||||
pub fn spanned_sep(span: impl Into<Span>) -> SpannedToken {
|
||||
Token::Separator.into_spanned(span)
|
||||
}
|
||||
|
||||
pub fn comment(input: impl Into<String>) -> CurriedToken {
|
||||
@ -453,8 +492,10 @@ impl TokenTreeBuilder {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_comment(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Comment(Comment::line(input, span))
|
||||
pub fn spanned_comment(input: impl Into<Span>, span: impl Into<Span>) -> SpannedToken {
|
||||
let span = span.into();
|
||||
|
||||
Token::Comment(Comment::line(input)).into_spanned(span)
|
||||
}
|
||||
|
||||
fn consume(&mut self, input: &str) -> (usize, usize) {
|
||||
|
Reference in New Issue
Block a user