mirror of
https://github.com/nushell/nushell.git
synced 2025-04-11 06:48:31 +02:00
This commit should finish the `coloring_in_tokens` feature, which moves the shape accumulator into the token stream. This allows rollbacks of the token stream to also roll back any shapes that were added. This commit also adds a much nicer syntax highlighter trace, which shows all of the paths the highlighter took to arrive at a particular coloring output. This change is fairly substantial, but really improves the understandability of the flow. I intend to update the normal parser with a similar tracing view. In general, this change also fleshes out the concept of "atomic" token stream operations. A good next step would be to try to make the parser more error-correcting, using the coloring infrastructure. A follow-up step would involve merging the parser and highlighter shapes themselves.
130 lines
3.9 KiB
Rust
130 lines
3.9 KiB
Rust
use crate::parser::hir::syntax_shape::{
|
|
expand_atom, expand_variable, parse_single_node, AtomicToken, ExpandContext, ExpandExpression,
|
|
ExpansionRule, FallibleColorSyntax, FlatShape, TestSyntax,
|
|
};
|
|
use crate::parser::hir::tokens_iterator::Peeked;
|
|
use crate::parser::{hir, hir::TokensIterator, RawToken, TokenNode};
|
|
use crate::prelude::*;
|
|
|
|
#[derive(Debug, Copy, Clone)]
|
|
pub struct StringShape;
|
|
|
|
#[cfg(not(coloring_in_tokens))]
|
|
impl FallibleColorSyntax for StringShape {
|
|
type Info = ();
|
|
type Input = FlatShape;
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
input: &FlatShape,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
) -> Result<(), ShellError> {
|
|
let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive());
|
|
|
|
let atom = match atom {
|
|
Err(_) => return Ok(()),
|
|
Ok(atom) => atom,
|
|
};
|
|
|
|
match atom {
|
|
Spanned {
|
|
item: AtomicToken::String { .. },
|
|
span,
|
|
} => shapes.push((*input).spanned(span)),
|
|
other => other.color_tokens(shapes),
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
}
|
|
|
|
#[cfg(coloring_in_tokens)]
|
|
impl FallibleColorSyntax for StringShape {
|
|
type Info = ();
|
|
type Input = FlatShape;
|
|
|
|
fn name(&self) -> &'static str {
|
|
"StringShape"
|
|
}
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
input: &FlatShape,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> Result<(), ShellError> {
|
|
let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive());
|
|
|
|
let atom = match atom {
|
|
Err(_) => return Ok(()),
|
|
Ok(atom) => atom,
|
|
};
|
|
|
|
match atom {
|
|
Spanned {
|
|
item: AtomicToken::String { .. },
|
|
span,
|
|
} => token_nodes.color_shape((*input).spanned(span)),
|
|
atom => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
}
|
|
|
|
impl ExpandExpression for StringShape {
|
|
fn expand_expr<'a, 'b>(
|
|
&self,
|
|
token_nodes: &mut TokensIterator<'_>,
|
|
context: &ExpandContext,
|
|
) -> Result<hir::Expression, ShellError> {
|
|
parse_single_node(token_nodes, "String", |token, token_span, _| {
|
|
Ok(match token {
|
|
RawToken::GlobPattern => {
|
|
return Err(ShellError::type_error(
|
|
"String",
|
|
"glob pattern".tagged(token_span),
|
|
))
|
|
}
|
|
RawToken::Operator(..) => {
|
|
return Err(ShellError::type_error(
|
|
"String",
|
|
"operator".tagged(token_span),
|
|
))
|
|
}
|
|
RawToken::Variable(span) => expand_variable(span, token_span, &context.source),
|
|
RawToken::ExternalCommand(span) => {
|
|
hir::Expression::external_command(span, token_span)
|
|
}
|
|
RawToken::ExternalWord => {
|
|
return Err(ShellError::invalid_external_word(token_span))
|
|
}
|
|
RawToken::Number(_) => hir::Expression::bare(token_span),
|
|
RawToken::Bare => hir::Expression::bare(token_span),
|
|
RawToken::String(span) => hir::Expression::string(span, token_span),
|
|
})
|
|
})
|
|
}
|
|
}
|
|
|
|
impl TestSyntax for StringShape {
|
|
fn test<'a, 'b>(
|
|
&self,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
_context: &ExpandContext,
|
|
) -> Option<Peeked<'a, 'b>> {
|
|
let peeked = token_nodes.peek_any();
|
|
|
|
match peeked.node {
|
|
Some(TokenNode::Token(token)) => match token.item {
|
|
RawToken::String(_) => Some(peeked),
|
|
_ => None,
|
|
},
|
|
|
|
_ => None,
|
|
}
|
|
}
|
|
}
|