mirror of
https://github.com/nushell/nushell.git
synced 2025-05-01 08:34:26 +02:00
The original purpose of this PR was to modernize the external parser to
use the new Shape system.
This commit does include some of that change, but a more important
aspect of this change is an improvement to the expansion trace.
Previous commit 6a7c00ea
adding trace infrastructure to the syntax coloring
feature. This commit adds tracing to the expander.
The bulk of that work, in addition to the tree builder logic, was an
overhaul of the formatter traits to make them more general purpose, and
more structured.
Some highlights:
- `ToDebug` was split into two traits (`ToDebug` and `DebugFormat`)
because implementations needed to become objects, but a convenience
method on `ToDebug` didn't qualify
- `DebugFormat`'s `fmt_debug` method now takes a `DebugFormatter` rather
than a standard formatter, and `DebugFormatter` has a new (but still
limited) facility for structured formatting.
- Implementations of `ExpandSyntax` need to produce output that
implements `DebugFormat`.
Unlike the highlighter changes, these changes are fairly focused in the
trace output, so these changes aren't behind a flag.
1737 lines
52 KiB
Rust
1737 lines
52 KiB
Rust
mod block;
|
|
mod expression;
|
|
pub(crate) mod flat_shape;
|
|
|
|
use crate::cli::external_command;
|
|
use crate::commands::{
|
|
classified::{ClassifiedPipeline, InternalCommand},
|
|
ClassifiedCommand, Command,
|
|
};
|
|
use crate::parser::hir::expand_external_tokens::ExternalTokensShape;
|
|
use crate::parser::hir::syntax_shape::block::AnyBlockShape;
|
|
use crate::parser::hir::tokens_iterator::Peeked;
|
|
use crate::parser::parse_command::{parse_command_tail, CommandTailShape};
|
|
use crate::parser::{hir, hir::TokensIterator, Operator, RawToken, TokenNode};
|
|
use crate::prelude::*;
|
|
use derive_new::new;
|
|
use getset::Getters;
|
|
use serde::{Deserialize, Serialize};
|
|
use std::fmt;
|
|
use std::path::{Path, PathBuf};
|
|
|
|
pub(crate) use self::expression::atom::{expand_atom, AtomicToken, ExpansionRule};
|
|
pub(crate) use self::expression::delimited::{
|
|
color_delimited_square, expand_delimited_square, DelimitedShape,
|
|
};
|
|
pub(crate) use self::expression::file_path::FilePathShape;
|
|
pub(crate) use self::expression::list::{BackoffColoringMode, ExpressionListShape};
|
|
pub(crate) use self::expression::number::{IntShape, NumberShape};
|
|
pub(crate) use self::expression::pattern::{BarePatternShape, PatternShape};
|
|
pub(crate) use self::expression::string::StringShape;
|
|
pub(crate) use self::expression::unit::UnitShape;
|
|
pub(crate) use self::expression::variable_path::{
|
|
ColorableDotShape, ColumnPathShape, DotShape, ExpressionContinuation,
|
|
ExpressionContinuationShape, MemberShape, PathTailShape, VariablePathShape,
|
|
};
|
|
pub(crate) use self::expression::{continue_expression, AnyExpressionShape};
|
|
pub(crate) use self::flat_shape::FlatShape;
|
|
|
|
#[cfg(not(coloring_in_tokens))]
|
|
use crate::parser::hir::tokens_iterator::debug::debug_tokens;
|
|
#[cfg(not(coloring_in_tokens))]
|
|
use crate::parser::parse::pipeline::Pipeline;
|
|
#[cfg(not(coloring_in_tokens))]
|
|
use log::{log_enabled, trace};
|
|
|
|
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
|
|
pub enum SyntaxShape {
|
|
Any,
|
|
String,
|
|
Member,
|
|
ColumnPath,
|
|
Number,
|
|
Int,
|
|
Path,
|
|
Pattern,
|
|
Block,
|
|
}
|
|
|
|
#[cfg(not(coloring_in_tokens))]
|
|
impl FallibleColorSyntax for SyntaxShape {
|
|
type Info = ();
|
|
type Input = ();
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
_input: &(),
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
) -> Result<(), ShellError> {
|
|
match self {
|
|
SyntaxShape::Any => {
|
|
color_fallible_syntax(&AnyExpressionShape, token_nodes, context, shapes)
|
|
}
|
|
SyntaxShape::Int => color_fallible_syntax(&IntShape, token_nodes, context, shapes),
|
|
SyntaxShape::String => color_fallible_syntax_with(
|
|
&StringShape,
|
|
&FlatShape::String,
|
|
token_nodes,
|
|
context,
|
|
shapes,
|
|
),
|
|
SyntaxShape::Member => {
|
|
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)
|
|
}
|
|
SyntaxShape::ColumnPath => {
|
|
color_fallible_syntax(&ColumnPathShape, token_nodes, context, shapes)
|
|
}
|
|
SyntaxShape::Number => {
|
|
color_fallible_syntax(&NumberShape, token_nodes, context, shapes)
|
|
}
|
|
SyntaxShape::Path => {
|
|
color_fallible_syntax(&FilePathShape, token_nodes, context, shapes)
|
|
}
|
|
SyntaxShape::Pattern => {
|
|
color_fallible_syntax(&PatternShape, token_nodes, context, shapes)
|
|
}
|
|
SyntaxShape::Block => {
|
|
color_fallible_syntax(&AnyBlockShape, token_nodes, context, shapes)
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
#[cfg(coloring_in_tokens)]
|
|
impl FallibleColorSyntax for SyntaxShape {
|
|
type Info = ();
|
|
type Input = ();
|
|
|
|
fn name(&self) -> &'static str {
|
|
"SyntaxShape"
|
|
}
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
_input: &(),
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> Result<(), ShellError> {
|
|
match self {
|
|
SyntaxShape::Any => color_fallible_syntax(&AnyExpressionShape, token_nodes, context),
|
|
SyntaxShape::Int => color_fallible_syntax(&IntShape, token_nodes, context),
|
|
SyntaxShape::String => {
|
|
color_fallible_syntax_with(&StringShape, &FlatShape::String, token_nodes, context)
|
|
}
|
|
SyntaxShape::Member => color_fallible_syntax(&MemberShape, token_nodes, context),
|
|
SyntaxShape::ColumnPath => {
|
|
color_fallible_syntax(&ColumnPathShape, token_nodes, context)
|
|
}
|
|
SyntaxShape::Number => color_fallible_syntax(&NumberShape, token_nodes, context),
|
|
SyntaxShape::Path => color_fallible_syntax(&FilePathShape, token_nodes, context),
|
|
SyntaxShape::Pattern => color_fallible_syntax(&PatternShape, token_nodes, context),
|
|
SyntaxShape::Block => color_fallible_syntax(&AnyBlockShape, token_nodes, context),
|
|
}
|
|
}
|
|
}
|
|
|
|
impl ExpandExpression for SyntaxShape {
|
|
fn name(&self) -> &'static str {
|
|
match self {
|
|
SyntaxShape::Any => "any",
|
|
SyntaxShape::Int => "integer",
|
|
SyntaxShape::String => "string",
|
|
SyntaxShape::Member => "column name",
|
|
SyntaxShape::ColumnPath => "column path",
|
|
SyntaxShape::Number => "number",
|
|
SyntaxShape::Path => "file path",
|
|
SyntaxShape::Pattern => "glob pattern",
|
|
SyntaxShape::Block => "block",
|
|
}
|
|
}
|
|
|
|
fn expand_expr<'a, 'b>(
|
|
&self,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> Result<hir::Expression, ParseError> {
|
|
match self {
|
|
SyntaxShape::Any => expand_expr(&AnyExpressionShape, token_nodes, context),
|
|
SyntaxShape::Int => expand_expr(&IntShape, token_nodes, context),
|
|
SyntaxShape::String => expand_expr(&StringShape, token_nodes, context),
|
|
SyntaxShape::Member => {
|
|
let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
|
|
Ok(syntax.to_expr())
|
|
}
|
|
SyntaxShape::ColumnPath => {
|
|
let column_path = expand_syntax(&ColumnPathShape, token_nodes, context)?;
|
|
|
|
let Tagged { item: members, tag } = column_path.path();
|
|
|
|
Ok(hir::Expression::list(
|
|
members.into_iter().map(|s| s.to_expr()).collect(),
|
|
tag,
|
|
))
|
|
}
|
|
SyntaxShape::Number => expand_expr(&NumberShape, token_nodes, context),
|
|
SyntaxShape::Path => expand_expr(&FilePathShape, token_nodes, context),
|
|
SyntaxShape::Pattern => expand_expr(&PatternShape, token_nodes, context),
|
|
SyntaxShape::Block => expand_expr(&AnyBlockShape, token_nodes, context),
|
|
}
|
|
}
|
|
}
|
|
|
|
impl std::fmt::Display for SyntaxShape {
|
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
|
match self {
|
|
SyntaxShape::Any => write!(f, "Any"),
|
|
SyntaxShape::String => write!(f, "String"),
|
|
SyntaxShape::Int => write!(f, "Integer"),
|
|
SyntaxShape::Member => write!(f, "Member"),
|
|
SyntaxShape::ColumnPath => write!(f, "ColumnPath"),
|
|
SyntaxShape::Number => write!(f, "Number"),
|
|
SyntaxShape::Path => write!(f, "Path"),
|
|
SyntaxShape::Pattern => write!(f, "Pattern"),
|
|
SyntaxShape::Block => write!(f, "Block"),
|
|
}
|
|
}
|
|
}
|
|
|
|
#[derive(Getters, new)]
|
|
pub struct ExpandContext<'context> {
|
|
#[get = "pub(crate)"]
|
|
registry: &'context CommandRegistry,
|
|
#[get = "pub(crate)"]
|
|
source: &'context Text,
|
|
homedir: Option<PathBuf>,
|
|
}
|
|
|
|
impl<'context> ExpandContext<'context> {
|
|
pub(crate) fn homedir(&self) -> Option<&Path> {
|
|
self.homedir.as_ref().map(|h| h.as_path())
|
|
}
|
|
|
|
#[cfg(test)]
|
|
pub fn with_empty(source: &Text, callback: impl FnOnce(ExpandContext)) {
|
|
let mut registry = CommandRegistry::new();
|
|
registry.insert(
|
|
"ls",
|
|
crate::commands::whole_stream_command(crate::commands::LS),
|
|
);
|
|
|
|
callback(ExpandContext {
|
|
registry: ®istry,
|
|
source,
|
|
homedir: None,
|
|
})
|
|
}
|
|
}
|
|
|
|
pub trait TestSyntax: std::fmt::Debug + Copy {
|
|
fn test<'a, 'b>(
|
|
&self,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> Option<Peeked<'a, 'b>>;
|
|
}
|
|
|
|
pub trait ExpandExpression: std::fmt::Debug + Copy {
|
|
fn name(&self) -> &'static str;
|
|
|
|
fn expand_expr<'a, 'b>(
|
|
&self,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> Result<hir::Expression, ParseError>;
|
|
}
|
|
|
|
#[cfg(coloring_in_tokens)]
|
|
pub trait FallibleColorSyntax: std::fmt::Debug + Copy {
|
|
type Info;
|
|
type Input;
|
|
|
|
fn name(&self) -> &'static str;
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
input: &Self::Input,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> Result<Self::Info, ShellError>;
|
|
}
|
|
|
|
#[cfg(not(coloring_in_tokens))]
|
|
pub trait FallibleColorSyntax: std::fmt::Debug + Copy {
|
|
type Info;
|
|
type Input;
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
input: &Self::Input,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
) -> Result<Self::Info, ShellError>;
|
|
}
|
|
|
|
#[cfg(not(coloring_in_tokens))]
|
|
pub trait ColorSyntax: std::fmt::Debug + Copy {
|
|
type Info;
|
|
type Input;
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
input: &Self::Input,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
) -> Self::Info;
|
|
}
|
|
|
|
#[cfg(coloring_in_tokens)]
|
|
pub trait ColorSyntax: std::fmt::Debug + Copy {
|
|
type Info;
|
|
type Input;
|
|
|
|
fn name(&self) -> &'static str;
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
input: &Self::Input,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> Self::Info;
|
|
}
|
|
|
|
pub(crate) trait ExpandSyntax: std::fmt::Debug + Copy {
|
|
type Output: HasFallibleSpan + Clone + std::fmt::Debug + 'static;
|
|
|
|
fn name(&self) -> &'static str;
|
|
|
|
fn expand_syntax<'a, 'b>(
|
|
&self,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> Result<Self::Output, ParseError>;
|
|
}
|
|
|
|
pub(crate) fn expand_syntax<'a, 'b, T: ExpandSyntax>(
|
|
shape: &T,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> Result<T::Output, ParseError> {
|
|
token_nodes.expand_frame(shape.name(), |token_nodes| {
|
|
shape.expand_syntax(token_nodes, context)
|
|
})
|
|
}
|
|
|
|
pub(crate) fn expand_expr<'a, 'b, T: ExpandExpression>(
|
|
shape: &T,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> Result<hir::Expression, ParseError> {
|
|
token_nodes.expand_expr_frame(shape.name(), |token_nodes| {
|
|
shape.expand_expr(token_nodes, context)
|
|
})
|
|
}
|
|
|
|
#[cfg(coloring_in_tokens)]
|
|
pub fn color_syntax<'a, 'b, T: ColorSyntax<Info = U, Input = ()>, U>(
|
|
shape: &T,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> ((), U) {
|
|
(
|
|
(),
|
|
token_nodes.color_frame(shape.name(), |token_nodes| {
|
|
shape.color_syntax(&(), token_nodes, context)
|
|
}),
|
|
)
|
|
}
|
|
|
|
#[cfg(not(coloring_in_tokens))]
|
|
pub fn color_syntax<'a, 'b, T: ColorSyntax<Info = U, Input = ()>, U>(
|
|
shape: &T,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
) -> ((), U) {
|
|
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes.state(), context.source));
|
|
|
|
let len = shapes.len();
|
|
let result = shape.color_syntax(&(), token_nodes, context, shapes);
|
|
|
|
trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes.state(), context.source));
|
|
|
|
if log_enabled!(target: "nu::color_syntax", log::Level::Trace) {
|
|
trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>());
|
|
|
|
if len < shapes.len() {
|
|
for i in len..(shapes.len()) {
|
|
trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]);
|
|
}
|
|
} else {
|
|
trace!(target: "nu::color_syntax", "no new shapes");
|
|
}
|
|
}
|
|
|
|
((), result)
|
|
}
|
|
|
|
#[cfg(not(coloring_in_tokens))]
|
|
pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax<Info = U, Input = ()>, U>(
|
|
shape: &T,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
) -> Result<U, ShellError> {
|
|
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes.state(), context.source));
|
|
|
|
if token_nodes.at_end() {
|
|
trace!(target: "nu::color_syntax", "at eof");
|
|
return Err(ShellError::unexpected_eof("coloring", Tag::unknown()));
|
|
}
|
|
|
|
let len = shapes.len();
|
|
let result = shape.color_syntax(&(), token_nodes, context, shapes);
|
|
|
|
trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes.state(), context.source));
|
|
|
|
if log_enabled!(target: "nu::color_syntax", log::Level::Trace) {
|
|
trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>());
|
|
|
|
if len < shapes.len() {
|
|
for i in len..(shapes.len()) {
|
|
trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]);
|
|
}
|
|
} else {
|
|
trace!(target: "nu::color_syntax", "no new shapes");
|
|
}
|
|
}
|
|
|
|
result
|
|
}
|
|
|
|
#[cfg(coloring_in_tokens)]
|
|
pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax<Info = U, Input = ()>, U>(
|
|
shape: &T,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> Result<U, ShellError> {
|
|
token_nodes.color_fallible_frame(shape.name(), |token_nodes| {
|
|
shape.color_syntax(&(), token_nodes, context)
|
|
})
|
|
}
|
|
|
|
#[cfg(not(coloring_in_tokens))]
|
|
pub fn color_syntax_with<'a, 'b, T: ColorSyntax<Info = U, Input = I>, U, I>(
|
|
shape: &T,
|
|
input: &I,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
) -> ((), U) {
|
|
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes.state(), context.source));
|
|
|
|
let len = shapes.len();
|
|
let result = shape.color_syntax(input, token_nodes, context, shapes);
|
|
|
|
trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes.state(), context.source));
|
|
|
|
if log_enabled!(target: "nu::color_syntax", log::Level::Trace) {
|
|
trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>());
|
|
|
|
if len < shapes.len() {
|
|
for i in len..(shapes.len()) {
|
|
trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]);
|
|
}
|
|
} else {
|
|
trace!(target: "nu::color_syntax", "no new shapes");
|
|
}
|
|
}
|
|
|
|
((), result)
|
|
}
|
|
|
|
#[cfg(coloring_in_tokens)]
|
|
pub fn color_syntax_with<'a, 'b, T: ColorSyntax<Info = U, Input = I>, U, I>(
|
|
shape: &T,
|
|
input: &I,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> ((), U) {
|
|
(
|
|
(),
|
|
token_nodes.color_frame(shape.name(), |token_nodes| {
|
|
shape.color_syntax(input, token_nodes, context)
|
|
}),
|
|
)
|
|
}
|
|
|
|
#[cfg(not(coloring_in_tokens))]
|
|
pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax<Info = U, Input = I>, U, I>(
|
|
shape: &T,
|
|
input: &I,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
) -> Result<U, ShellError> {
|
|
token_nodes.color_fallible_frame(std::any::type_name::<T>(), |token_nodes| {
|
|
shape.color_syntax(input, token_nodes, context, shapes)
|
|
})
|
|
}
|
|
|
|
#[cfg(coloring_in_tokens)]
|
|
pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax<Info = U, Input = I>, U, I>(
|
|
shape: &T,
|
|
input: &I,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> Result<U, ShellError> {
|
|
token_nodes.color_fallible_frame(shape.name(), |token_nodes| {
|
|
shape.color_syntax(input, token_nodes, context)
|
|
})
|
|
}
|
|
|
|
impl<T: ExpandExpression> ExpandSyntax for T {
|
|
type Output = hir::Expression;
|
|
|
|
fn name(&self) -> &'static str {
|
|
ExpandExpression::name(self)
|
|
}
|
|
|
|
fn expand_syntax<'a, 'b>(
|
|
&self,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> Result<Self::Output, ParseError> {
|
|
ExpandExpression::expand_expr(self, token_nodes, context)
|
|
}
|
|
}
|
|
|
|
pub trait SkipSyntax: std::fmt::Debug + Copy {
|
|
fn skip<'a, 'b>(
|
|
&self,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> Result<(), ShellError>;
|
|
}
|
|
|
|
enum BarePathState {
|
|
Initial,
|
|
Seen(Span, Span),
|
|
Error(ParseError),
|
|
}
|
|
|
|
impl BarePathState {
|
|
pub fn seen(self, span: Span) -> BarePathState {
|
|
match self {
|
|
BarePathState::Initial => BarePathState::Seen(span, span),
|
|
BarePathState::Seen(start, _) => BarePathState::Seen(start, span),
|
|
BarePathState::Error(err) => BarePathState::Error(err),
|
|
}
|
|
}
|
|
|
|
pub fn end(self, peeked: Peeked, reason: &'static str) -> BarePathState {
|
|
match self {
|
|
BarePathState::Initial => BarePathState::Error(peeked.type_error(reason)),
|
|
BarePathState::Seen(start, end) => BarePathState::Seen(start, end),
|
|
BarePathState::Error(err) => BarePathState::Error(err),
|
|
}
|
|
}
|
|
|
|
pub fn into_bare(self) -> Result<Span, ParseError> {
|
|
match self {
|
|
BarePathState::Initial => unreachable!("into_bare in initial state"),
|
|
BarePathState::Seen(start, end) => Ok(start.until(end)),
|
|
BarePathState::Error(err) => Err(err),
|
|
}
|
|
}
|
|
}
|
|
|
|
pub fn expand_bare<'a, 'b>(
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
_context: &ExpandContext,
|
|
predicate: impl Fn(&TokenNode) -> bool,
|
|
) -> Result<Span, ParseError> {
|
|
let mut state = BarePathState::Initial;
|
|
|
|
loop {
|
|
// Whitespace ends a word
|
|
let mut peeked = token_nodes.peek_any();
|
|
|
|
match peeked.node {
|
|
None => {
|
|
state = state.end(peeked, "word");
|
|
break;
|
|
}
|
|
Some(node) => {
|
|
if predicate(node) {
|
|
state = state.seen(node.span());
|
|
peeked.commit();
|
|
} else {
|
|
state = state.end(peeked, "word");
|
|
break;
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
state.into_bare()
|
|
}
|
|
|
|
#[derive(Debug, Copy, Clone)]
|
|
pub struct BarePathShape;
|
|
|
|
impl ExpandSyntax for BarePathShape {
|
|
type Output = Span;
|
|
|
|
fn name(&self) -> &'static str {
|
|
"shorthand path"
|
|
}
|
|
|
|
fn expand_syntax<'a, 'b>(
|
|
&self,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> Result<Span, ParseError> {
|
|
expand_bare(token_nodes, context, |token| match token {
|
|
TokenNode::Token(Spanned {
|
|
item: RawToken::Bare,
|
|
..
|
|
})
|
|
| TokenNode::Token(Spanned {
|
|
item: RawToken::Operator(Operator::Dot),
|
|
..
|
|
}) => true,
|
|
|
|
_ => false,
|
|
})
|
|
}
|
|
}
|
|
|
|
#[derive(Debug, Copy, Clone)]
|
|
pub struct BareShape;
|
|
|
|
#[cfg(not(coloring_in_tokens))]
|
|
impl FallibleColorSyntax for BareShape {
|
|
type Info = ();
|
|
type Input = FlatShape;
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
input: &FlatShape,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
_context: &ExpandContext,
|
|
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
) -> Result<(), ShellError> {
|
|
token_nodes
|
|
.peek_any_token("word", |token| match token {
|
|
// If it's a bare token, color it
|
|
TokenNode::Token(Spanned {
|
|
item: RawToken::Bare,
|
|
span,
|
|
}) => {
|
|
shapes.push((*input).spanned(*span));
|
|
Ok(())
|
|
}
|
|
|
|
// otherwise, fail
|
|
other => Err(ParseError::mismatch("word", other.tagged_type_name())),
|
|
})
|
|
.map_err(|err| err.into())
|
|
}
|
|
}
|
|
|
|
#[cfg(coloring_in_tokens)]
|
|
impl FallibleColorSyntax for BareShape {
|
|
type Info = ();
|
|
type Input = FlatShape;
|
|
|
|
fn name(&self) -> &'static str {
|
|
"BareShape"
|
|
}
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
input: &FlatShape,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
_context: &ExpandContext,
|
|
) -> Result<(), ShellError> {
|
|
let span = token_nodes.peek_any_token("word", |token| match token {
|
|
// If it's a bare token, color it
|
|
TokenNode::Token(Spanned {
|
|
item: RawToken::Bare,
|
|
span,
|
|
}) => Ok(span),
|
|
|
|
// otherwise, fail
|
|
other => Err(ParseError::mismatch("word", other.tagged_type_name())),
|
|
})?;
|
|
|
|
token_nodes.color_shape((*input).spanned(*span));
|
|
|
|
Ok(())
|
|
}
|
|
}
|
|
|
|
impl ExpandSyntax for BareShape {
|
|
type Output = Spanned<String>;
|
|
|
|
fn name(&self) -> &'static str {
|
|
"word"
|
|
}
|
|
|
|
fn expand_syntax<'a, 'b>(
|
|
&self,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> Result<Self::Output, ParseError> {
|
|
let peeked = token_nodes.peek_any().not_eof("word")?;
|
|
|
|
match peeked.node {
|
|
TokenNode::Token(Spanned {
|
|
item: RawToken::Bare,
|
|
span,
|
|
}) => {
|
|
peeked.commit();
|
|
Ok(span.spanned_string(context.source))
|
|
}
|
|
|
|
other => Err(ParseError::mismatch("word", other.tagged_type_name())),
|
|
}
|
|
}
|
|
}
|
|
|
|
impl TestSyntax for BareShape {
|
|
fn test<'a, 'b>(
|
|
&self,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
_context: &ExpandContext,
|
|
) -> Option<Peeked<'a, 'b>> {
|
|
let peeked = token_nodes.peek_any();
|
|
|
|
match peeked.node {
|
|
Some(token) if token.is_bare() => Some(peeked),
|
|
_ => None,
|
|
}
|
|
}
|
|
}
|
|
|
|
#[derive(Debug, Clone)]
|
|
pub enum CommandSignature {
|
|
Internal(Spanned<Arc<Command>>),
|
|
LiteralExternal { outer: Span, inner: Span },
|
|
External(Span),
|
|
Expression(hir::Expression),
|
|
}
|
|
|
|
impl FormatDebug for CommandSignature {
|
|
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
|
|
match self {
|
|
CommandSignature::Internal(internal) => {
|
|
f.say_str("internal", internal.span.slice(source))
|
|
}
|
|
CommandSignature::LiteralExternal { outer, .. } => {
|
|
f.say_str("external", outer.slice(source))
|
|
}
|
|
CommandSignature::External(external) => {
|
|
write!(f, "external:{}", external.slice(source))
|
|
}
|
|
CommandSignature::Expression(expr) => expr.fmt_debug(f, source),
|
|
}
|
|
}
|
|
}
|
|
|
|
impl HasSpan for CommandSignature {
|
|
fn span(&self) -> Span {
|
|
match self {
|
|
CommandSignature::Internal(spanned) => spanned.span,
|
|
CommandSignature::LiteralExternal { outer, .. } => *outer,
|
|
CommandSignature::External(span) => *span,
|
|
CommandSignature::Expression(expr) => expr.span,
|
|
}
|
|
}
|
|
}
|
|
|
|
impl CommandSignature {
|
|
pub fn to_expression(&self) -> hir::Expression {
|
|
match self {
|
|
CommandSignature::Internal(command) => {
|
|
let span = command.span;
|
|
hir::RawExpression::Command(span).spanned(span)
|
|
}
|
|
CommandSignature::LiteralExternal { outer, inner } => {
|
|
hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*inner))
|
|
.spanned(*outer)
|
|
}
|
|
CommandSignature::External(span) => {
|
|
hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*span)).spanned(*span)
|
|
}
|
|
CommandSignature::Expression(expr) => expr.clone(),
|
|
}
|
|
}
|
|
}
|
|
|
|
#[derive(Debug, Copy, Clone)]
|
|
pub struct PipelineShape;
|
|
|
|
#[cfg(not(coloring_in_tokens))]
|
|
// The failure mode is if the head of the token stream is not a pipeline
|
|
impl FallibleColorSyntax for PipelineShape {
|
|
type Info = ();
|
|
type Input = ();
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
_input: &(),
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
) -> Result<(), ShellError> {
|
|
// Make sure we're looking at a pipeline
|
|
let Pipeline { parts, .. } =
|
|
token_nodes.peek_any_token("pipeline", |node| node.as_pipeline())?;
|
|
|
|
// Enumerate the pipeline parts
|
|
for part in parts {
|
|
// If the pipeline part has a prefix `|`, emit a pipe to color
|
|
if let Some(pipe) = part.pipe {
|
|
shapes.push(FlatShape::Pipe.spanned(pipe));
|
|
}
|
|
|
|
// Create a new iterator containing the tokens in the pipeline part to color
|
|
let mut token_nodes = TokensIterator::new(&part.tokens.item, part.span, false);
|
|
|
|
color_syntax(&MaybeSpaceShape, &mut token_nodes, context, shapes);
|
|
color_syntax(&CommandShape, &mut token_nodes, context, shapes);
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
}
|
|
|
|
#[cfg(coloring_in_tokens)]
|
|
// The failure mode is if the head of the token stream is not a pipeline
|
|
impl FallibleColorSyntax for PipelineShape {
|
|
type Info = ();
|
|
type Input = ();
|
|
|
|
fn name(&self) -> &'static str {
|
|
"PipelineShape"
|
|
}
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
_input: &(),
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> Result<(), ShellError> {
|
|
// Make sure we're looking at a pipeline
|
|
let pipeline = token_nodes.peek_any_token("pipeline", |node| node.as_pipeline())?;
|
|
|
|
let parts = &pipeline.parts[..];
|
|
|
|
// Enumerate the pipeline parts
|
|
for part in parts {
|
|
// If the pipeline part has a prefix `|`, emit a pipe to color
|
|
if let Some(pipe) = part.pipe {
|
|
token_nodes.color_shape(FlatShape::Pipe.spanned(pipe))
|
|
}
|
|
|
|
let tokens: Spanned<&[TokenNode]> = (&part.item.tokens[..]).spanned(part.span);
|
|
|
|
token_nodes.child(tokens, move |token_nodes| {
|
|
color_syntax(&MaybeSpaceShape, token_nodes, context);
|
|
color_syntax(&CommandShape, token_nodes, context);
|
|
});
|
|
}
|
|
|
|
Ok(())
|
|
}
|
|
}
|
|
|
|
#[cfg(coloring_in_tokens)]
|
|
impl ExpandSyntax for PipelineShape {
|
|
type Output = ClassifiedPipeline;
|
|
|
|
fn name(&self) -> &'static str {
|
|
"pipeline"
|
|
}
|
|
|
|
fn expand_syntax<'content, 'me>(
|
|
&self,
|
|
iterator: &'me mut TokensIterator<'content>,
|
|
context: &ExpandContext,
|
|
) -> Result<Self::Output, ParseError> {
|
|
let start = iterator.span_at_cursor();
|
|
|
|
let peeked = iterator.peek_any().not_eof("pipeline")?;
|
|
let pipeline = peeked.commit().as_pipeline()?;
|
|
|
|
let parts = &pipeline.parts[..];
|
|
|
|
let mut out = vec![];
|
|
|
|
for part in parts {
|
|
let tokens: Spanned<&[TokenNode]> = (&part.item.tokens[..]).spanned(part.span);
|
|
|
|
let classified = iterator.child(tokens, move |token_nodes| {
|
|
expand_syntax(&ClassifiedCommandShape, token_nodes, context)
|
|
})?;
|
|
|
|
out.push(classified);
|
|
}
|
|
|
|
let end = iterator.span_at_cursor();
|
|
|
|
Ok(ClassifiedPipeline {
|
|
commands: out.spanned(start.until(end)),
|
|
})
|
|
}
|
|
}
|
|
|
|
#[cfg(not(coloring_in_tokens))]
|
|
impl ExpandSyntax for PipelineShape {
|
|
type Output = ClassifiedPipeline;
|
|
|
|
fn name(&self) -> &'static str {
|
|
"pipeline"
|
|
}
|
|
|
|
fn expand_syntax<'content, 'me>(
|
|
&self,
|
|
iterator: &'me mut TokensIterator<'content>,
|
|
context: &ExpandContext,
|
|
) -> Result<Self::Output, ParseError> {
|
|
let start = iterator.span_at_cursor();
|
|
|
|
let peeked = iterator.peek_any().not_eof("pipeline")?;
|
|
let pipeline = peeked.commit().as_pipeline()?;
|
|
|
|
let parts = &pipeline.parts[..];
|
|
|
|
let mut out = vec![];
|
|
|
|
for part in parts {
|
|
let tokens: Spanned<&[TokenNode]> = (&part.item.tokens[..]).spanned(part.span);
|
|
|
|
let classified = iterator.child(tokens, move |token_nodes| {
|
|
expand_syntax(&ClassifiedCommandShape, token_nodes, context)
|
|
})?;
|
|
|
|
out.push(classified);
|
|
}
|
|
|
|
let end = iterator.span_at_cursor();
|
|
|
|
Ok(ClassifiedPipeline {
|
|
commands: out.spanned(start.until(end)),
|
|
})
|
|
}
|
|
}
|
|
|
|
pub enum CommandHeadKind {
|
|
External,
|
|
Internal(Signature),
|
|
}
|
|
|
|
#[derive(Debug, Copy, Clone)]
|
|
pub struct CommandHeadShape;
|
|
|
|
#[cfg(not(coloring_in_tokens))]
|
|
impl FallibleColorSyntax for CommandHeadShape {
|
|
type Info = CommandHeadKind;
|
|
type Input = ();
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
_input: &(),
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
) -> Result<CommandHeadKind, ShellError> {
|
|
// If we don't ultimately find a token, roll back
|
|
token_nodes.atomic(|token_nodes| {
|
|
// First, take a look at the next token
|
|
let atom = expand_atom(
|
|
token_nodes,
|
|
"command head",
|
|
context,
|
|
ExpansionRule::permissive(),
|
|
)?;
|
|
|
|
match atom.item {
|
|
// If the head is an explicit external command (^cmd), color it as an external command
|
|
AtomicToken::ExternalCommand { .. } => {
|
|
shapes.push(FlatShape::ExternalCommand.spanned(atom.span));
|
|
Ok(CommandHeadKind::External)
|
|
}
|
|
|
|
// If the head is a word, it depends on whether it matches a registered internal command
|
|
AtomicToken::Word { text } => {
|
|
let name = text.slice(context.source);
|
|
|
|
if context.registry.has(name) {
|
|
// If the registry has the command, color it as an internal command
|
|
shapes.push(FlatShape::InternalCommand.spanned(text));
|
|
let command = context.registry.expect_command(name);
|
|
Ok(CommandHeadKind::Internal(command.signature()))
|
|
} else {
|
|
// Otherwise, color it as an external command
|
|
shapes.push(FlatShape::ExternalCommand.spanned(text));
|
|
Ok(CommandHeadKind::External)
|
|
}
|
|
}
|
|
|
|
// Otherwise, we're not actually looking at a command
|
|
_ => Err(ShellError::syntax_error(
|
|
"No command at the head".tagged(atom.span),
|
|
)),
|
|
}
|
|
})
|
|
}
|
|
}
|
|
|
|
#[cfg(coloring_in_tokens)]
|
|
impl FallibleColorSyntax for CommandHeadShape {
|
|
type Info = CommandHeadKind;
|
|
type Input = ();
|
|
|
|
fn name(&self) -> &'static str {
|
|
"CommandHeadShape"
|
|
}
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
_input: &(),
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> Result<CommandHeadKind, ShellError> {
|
|
// If we don't ultimately find a token, roll back
|
|
token_nodes.atomic(|token_nodes| {
|
|
// First, take a look at the next token
|
|
let atom = expand_atom(
|
|
token_nodes,
|
|
"command head",
|
|
context,
|
|
ExpansionRule::permissive(),
|
|
)?;
|
|
|
|
match atom.item {
|
|
// If the head is an explicit external command (^cmd), color it as an external command
|
|
AtomicToken::ExternalCommand { .. } => {
|
|
token_nodes.color_shape(FlatShape::ExternalCommand.spanned(atom.span));
|
|
Ok(CommandHeadKind::External)
|
|
}
|
|
|
|
// If the head is a word, it depends on whether it matches a registered internal command
|
|
AtomicToken::Word { text } => {
|
|
let name = text.slice(context.source);
|
|
|
|
if context.registry.has(name) {
|
|
// If the registry has the command, color it as an internal command
|
|
token_nodes.color_shape(FlatShape::InternalCommand.spanned(text));
|
|
let command = context.registry.expect_command(name);
|
|
Ok(CommandHeadKind::Internal(command.signature()))
|
|
} else {
|
|
// Otherwise, color it as an external command
|
|
token_nodes.color_shape(FlatShape::ExternalCommand.spanned(text));
|
|
Ok(CommandHeadKind::External)
|
|
}
|
|
}
|
|
|
|
// Otherwise, we're not actually looking at a command
|
|
_ => Err(ShellError::syntax_error(
|
|
"No command at the head".tagged(atom.span),
|
|
)),
|
|
}
|
|
})
|
|
}
|
|
}
|
|
|
|
impl ExpandSyntax for CommandHeadShape {
|
|
type Output = CommandSignature;
|
|
|
|
fn name(&self) -> &'static str {
|
|
"command head"
|
|
}
|
|
|
|
fn expand_syntax<'a, 'b>(
|
|
&self,
|
|
token_nodes: &mut TokensIterator<'_>,
|
|
context: &ExpandContext,
|
|
) -> Result<CommandSignature, ParseError> {
|
|
let node =
|
|
parse_single_node_skipping_ws(token_nodes, "command head1", |token, token_span, _| {
|
|
Ok(match token {
|
|
RawToken::ExternalCommand(span) => CommandSignature::LiteralExternal {
|
|
outer: token_span,
|
|
inner: span,
|
|
},
|
|
RawToken::Bare => {
|
|
let name = token_span.slice(context.source);
|
|
if context.registry.has(name) {
|
|
let command = context.registry.expect_command(name);
|
|
CommandSignature::Internal(command.spanned(token_span))
|
|
} else {
|
|
CommandSignature::External(token_span)
|
|
}
|
|
}
|
|
_ => {
|
|
return Err(ShellError::type_error(
|
|
"command head2",
|
|
token.type_name().tagged(token_span),
|
|
))
|
|
}
|
|
})
|
|
});
|
|
|
|
match node {
|
|
Ok(expr) => return Ok(expr),
|
|
Err(_) => match expand_expr(&AnyExpressionShape, token_nodes, context) {
|
|
Ok(expr) => return Ok(CommandSignature::Expression(expr)),
|
|
Err(_) => Err(token_nodes.peek_non_ws().type_error("command head3")),
|
|
},
|
|
}
|
|
}
|
|
}
|
|
|
|
#[derive(Debug, Copy, Clone)]
|
|
pub struct ClassifiedCommandShape;
|
|
|
|
impl ExpandSyntax for ClassifiedCommandShape {
|
|
type Output = ClassifiedCommand;
|
|
|
|
fn name(&self) -> &'static str {
|
|
"classified command"
|
|
}
|
|
|
|
fn expand_syntax<'a, 'b>(
|
|
&self,
|
|
iterator: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> Result<Self::Output, ParseError> {
|
|
let start = iterator.span_at_cursor();
|
|
let head = expand_syntax(&CommandHeadShape, iterator, context)?;
|
|
|
|
match &head {
|
|
CommandSignature::Expression(expr) => {
|
|
Err(ParseError::mismatch("command", expr.tagged_type_name()))
|
|
}
|
|
|
|
// If the command starts with `^`, treat it as an external command no matter what
|
|
CommandSignature::External(name) => {
|
|
let name_str = name.slice(&context.source);
|
|
|
|
external_command(iterator, context, name_str.tagged(name))
|
|
}
|
|
|
|
CommandSignature::LiteralExternal { outer, inner } => {
|
|
let name_str = inner.slice(&context.source);
|
|
|
|
external_command(iterator, context, name_str.tagged(outer))
|
|
}
|
|
|
|
CommandSignature::Internal(command) => {
|
|
let tail =
|
|
parse_command_tail(&command.signature(), &context, iterator, command.span)?;
|
|
|
|
let (positional, named) = match tail {
|
|
None => (None, None),
|
|
Some((positional, named)) => (positional, named),
|
|
};
|
|
|
|
let end = iterator.span_at_cursor();
|
|
|
|
let call = hir::Call {
|
|
head: Box::new(head.to_expression()),
|
|
positional,
|
|
named,
|
|
}
|
|
.spanned(start.until(end));
|
|
|
|
Ok(ClassifiedCommand::Internal(InternalCommand::new(
|
|
command.item.name().to_string(),
|
|
Tag {
|
|
span: command.span,
|
|
anchor: None,
|
|
},
|
|
call,
|
|
)))
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
#[derive(Debug, Copy, Clone)]
|
|
pub struct InternalCommandHeadShape;
|
|
|
|
#[cfg(not(coloring_in_tokens))]
|
|
impl FallibleColorSyntax for InternalCommandHeadShape {
|
|
type Info = ();
|
|
type Input = ();
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
_input: &(),
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
_context: &ExpandContext,
|
|
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
) -> Result<(), ShellError> {
|
|
let peeked_head = token_nodes.peek_non_ws().not_eof("command head4");
|
|
|
|
let peeked_head = match peeked_head {
|
|
Err(_) => return Ok(()),
|
|
Ok(peeked_head) => peeked_head,
|
|
};
|
|
|
|
let _expr = match peeked_head.node {
|
|
TokenNode::Token(Spanned {
|
|
item: RawToken::Bare,
|
|
span,
|
|
}) => shapes.push(FlatShape::Word.spanned(*span)),
|
|
|
|
TokenNode::Token(Spanned {
|
|
item: RawToken::String(_inner_tag),
|
|
span,
|
|
}) => shapes.push(FlatShape::String.spanned(*span)),
|
|
|
|
_node => shapes.push(FlatShape::Error.spanned(peeked_head.node.span())),
|
|
};
|
|
|
|
peeked_head.commit();
|
|
|
|
Ok(())
|
|
}
|
|
}
|
|
|
|
#[cfg(coloring_in_tokens)]
|
|
impl FallibleColorSyntax for InternalCommandHeadShape {
|
|
type Info = ();
|
|
type Input = ();
|
|
|
|
fn name(&self) -> &'static str {
|
|
"InternalCommandHeadShape"
|
|
}
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
_input: &(),
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
_context: &ExpandContext,
|
|
) -> Result<(), ShellError> {
|
|
let peeked_head = token_nodes.peek_non_ws().not_eof("command head4");
|
|
|
|
let peeked_head = match peeked_head {
|
|
Err(_) => return Ok(()),
|
|
Ok(peeked_head) => peeked_head,
|
|
};
|
|
|
|
let node = peeked_head.commit();
|
|
|
|
let _expr = match node {
|
|
TokenNode::Token(Spanned {
|
|
item: RawToken::Bare,
|
|
span,
|
|
}) => token_nodes.color_shape(FlatShape::Word.spanned(*span)),
|
|
|
|
TokenNode::Token(Spanned {
|
|
item: RawToken::String(_inner_tag),
|
|
span,
|
|
}) => token_nodes.color_shape(FlatShape::String.spanned(*span)),
|
|
|
|
_node => token_nodes.color_shape(FlatShape::Error.spanned(node.span())),
|
|
};
|
|
|
|
Ok(())
|
|
}
|
|
}
|
|
|
|
impl ExpandExpression for InternalCommandHeadShape {
|
|
fn name(&self) -> &'static str {
|
|
"internal command head"
|
|
}
|
|
|
|
fn expand_expr(
|
|
&self,
|
|
token_nodes: &mut TokensIterator<'_>,
|
|
_context: &ExpandContext,
|
|
) -> Result<hir::Expression, ParseError> {
|
|
let peeked_head = token_nodes.peek_non_ws().not_eof("command head")?;
|
|
|
|
let expr = match peeked_head.node {
|
|
TokenNode::Token(
|
|
spanned @ Spanned {
|
|
item: RawToken::Bare,
|
|
..
|
|
},
|
|
) => spanned.map(|_| hir::RawExpression::Literal(hir::Literal::Bare)),
|
|
|
|
TokenNode::Token(Spanned {
|
|
item: RawToken::String(inner_span),
|
|
span,
|
|
}) => hir::RawExpression::Literal(hir::Literal::String(*inner_span)).spanned(*span),
|
|
|
|
node => {
|
|
return Err(ParseError::mismatch(
|
|
"command head",
|
|
node.tagged_type_name(),
|
|
))
|
|
}
|
|
};
|
|
|
|
peeked_head.commit();
|
|
|
|
Ok(expr)
|
|
}
|
|
}
|
|
|
|
pub(crate) struct SingleError<'token> {
|
|
expected: &'static str,
|
|
node: &'token Spanned<RawToken>,
|
|
}
|
|
|
|
impl<'token> SingleError<'token> {
|
|
pub(crate) fn error(&self) -> ParseError {
|
|
ParseError::mismatch(self.expected, self.node.type_name().tagged(self.node.span))
|
|
}
|
|
}
|
|
|
|
fn parse_single_node<'a, 'b, T>(
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
expected: &'static str,
|
|
callback: impl FnOnce(RawToken, Span, SingleError) -> Result<T, ParseError>,
|
|
) -> Result<T, ParseError> {
|
|
token_nodes.peek_any_token(expected, |node| match node {
|
|
TokenNode::Token(token) => callback(
|
|
token.item,
|
|
token.span,
|
|
SingleError {
|
|
expected,
|
|
node: token,
|
|
},
|
|
),
|
|
|
|
other => Err(ParseError::mismatch(expected, other.tagged_type_name())),
|
|
})
|
|
}
|
|
|
|
fn parse_single_node_skipping_ws<'a, 'b, T>(
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
expected: &'static str,
|
|
callback: impl FnOnce(RawToken, Span, SingleError) -> Result<T, ShellError>,
|
|
) -> Result<T, ShellError> {
|
|
let peeked = token_nodes.peek_non_ws().not_eof(expected)?;
|
|
|
|
let expr = match peeked.node {
|
|
TokenNode::Token(token) => callback(
|
|
token.item,
|
|
token.span,
|
|
SingleError {
|
|
expected,
|
|
node: token,
|
|
},
|
|
)?,
|
|
|
|
other => return Err(ShellError::type_error(expected, other.tagged_type_name())),
|
|
};
|
|
|
|
peeked.commit();
|
|
|
|
Ok(expr)
|
|
}
|
|
|
|
#[derive(Debug, Copy, Clone)]
|
|
pub struct WhitespaceShape;
|
|
|
|
#[cfg(not(coloring_in_tokens))]
|
|
impl FallibleColorSyntax for WhitespaceShape {
|
|
type Info = ();
|
|
type Input = ();
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
_input: &(),
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
_context: &ExpandContext,
|
|
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
) -> Result<(), ShellError> {
|
|
let peeked = token_nodes.peek_any().not_eof("whitespace");
|
|
|
|
let peeked = match peeked {
|
|
Err(_) => return Ok(()),
|
|
Ok(peeked) => peeked,
|
|
};
|
|
|
|
let _tag = match peeked.node {
|
|
TokenNode::Whitespace(span) => shapes.push(FlatShape::Whitespace.spanned(*span)),
|
|
|
|
_other => return Ok(()),
|
|
};
|
|
|
|
peeked.commit();
|
|
|
|
Ok(())
|
|
}
|
|
}
|
|
|
|
#[cfg(coloring_in_tokens)]
|
|
impl FallibleColorSyntax for WhitespaceShape {
|
|
type Info = ();
|
|
type Input = ();
|
|
|
|
fn name(&self) -> &'static str {
|
|
"WhitespaceShape"
|
|
}
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
_input: &(),
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
_context: &ExpandContext,
|
|
) -> Result<(), ShellError> {
|
|
let peeked = token_nodes.peek_any().not_eof("whitespace");
|
|
|
|
let peeked = match peeked {
|
|
Err(_) => return Ok(()),
|
|
Ok(peeked) => peeked,
|
|
};
|
|
|
|
let node = peeked.commit();
|
|
|
|
let _ = match node {
|
|
TokenNode::Whitespace(span) => {
|
|
token_nodes.color_shape(FlatShape::Whitespace.spanned(*span))
|
|
}
|
|
|
|
_other => return Ok(()),
|
|
};
|
|
|
|
Ok(())
|
|
}
|
|
}
|
|
|
|
impl ExpandSyntax for WhitespaceShape {
|
|
type Output = Span;
|
|
|
|
fn name(&self) -> &'static str {
|
|
"whitespace"
|
|
}
|
|
|
|
fn expand_syntax<'a, 'b>(
|
|
&self,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
_context: &ExpandContext,
|
|
) -> Result<Self::Output, ParseError> {
|
|
let peeked = token_nodes.peek_any().not_eof("whitespace")?;
|
|
|
|
let span = match peeked.node {
|
|
TokenNode::Whitespace(tag) => *tag,
|
|
|
|
other => return Err(ParseError::mismatch("whitespace", other.tagged_type_name())),
|
|
};
|
|
|
|
peeked.commit();
|
|
|
|
Ok(span)
|
|
}
|
|
}
|
|
|
|
#[derive(Debug, Copy, Clone)]
|
|
pub struct SpacedExpression<T: ExpandExpression> {
|
|
inner: T,
|
|
}
|
|
|
|
impl<T: ExpandExpression> ExpandExpression for SpacedExpression<T> {
|
|
fn name(&self) -> &'static str {
|
|
"spaced expression"
|
|
}
|
|
|
|
fn expand_expr<'a, 'b>(
|
|
&self,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> Result<hir::Expression, ParseError> {
|
|
// TODO: Make the name part of the trait
|
|
let peeked = token_nodes.peek_any().not_eof("whitespace")?;
|
|
|
|
match peeked.node {
|
|
TokenNode::Whitespace(_) => {
|
|
peeked.commit();
|
|
expand_expr(&self.inner, token_nodes, context)
|
|
}
|
|
|
|
other => Err(ParseError::mismatch("whitespace", other.tagged_type_name())),
|
|
}
|
|
}
|
|
}
|
|
|
|
pub fn maybe_spaced<T: ExpandExpression>(inner: T) -> MaybeSpacedExpression<T> {
|
|
MaybeSpacedExpression { inner }
|
|
}
|
|
|
|
#[derive(Debug, Copy, Clone)]
|
|
pub struct MaybeSpacedExpression<T: ExpandExpression> {
|
|
inner: T,
|
|
}
|
|
|
|
#[derive(Debug, Copy, Clone)]
|
|
pub struct MaybeSpaceShape;
|
|
|
|
impl ExpandSyntax for MaybeSpaceShape {
|
|
type Output = Option<Span>;
|
|
|
|
fn name(&self) -> &'static str {
|
|
"maybe space"
|
|
}
|
|
|
|
fn expand_syntax<'a, 'b>(
|
|
&self,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
_context: &ExpandContext,
|
|
) -> Result<Self::Output, ParseError> {
|
|
let peeked = token_nodes.peek_any().not_eof("whitespace");
|
|
|
|
let span = match peeked {
|
|
Err(_) => None,
|
|
Ok(peeked) => {
|
|
if let TokenNode::Whitespace(..) = peeked.node {
|
|
let node = peeked.commit();
|
|
Some(node.span())
|
|
} else {
|
|
None
|
|
}
|
|
}
|
|
};
|
|
|
|
Ok(span)
|
|
}
|
|
}
|
|
|
|
#[cfg(not(coloring_in_tokens))]
|
|
impl ColorSyntax for MaybeSpaceShape {
|
|
type Info = ();
|
|
type Input = ();
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
_input: &(),
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
_context: &ExpandContext,
|
|
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
) -> Self::Info {
|
|
let peeked = token_nodes.peek_any().not_eof("whitespace");
|
|
|
|
let peeked = match peeked {
|
|
Err(_) => return,
|
|
Ok(peeked) => peeked,
|
|
};
|
|
|
|
if let TokenNode::Whitespace(span) = peeked.node {
|
|
peeked.commit();
|
|
shapes.push(FlatShape::Whitespace.spanned(*span));
|
|
}
|
|
}
|
|
}
|
|
|
|
#[cfg(coloring_in_tokens)]
|
|
impl ColorSyntax for MaybeSpaceShape {
|
|
type Info = ();
|
|
type Input = ();
|
|
|
|
fn name(&self) -> &'static str {
|
|
"MaybeSpaceShape"
|
|
}
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
_input: &(),
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
_context: &ExpandContext,
|
|
) -> Self::Info {
|
|
let peeked = token_nodes.peek_any().not_eof("whitespace");
|
|
|
|
let peeked = match peeked {
|
|
Err(_) => return,
|
|
Ok(peeked) => peeked,
|
|
};
|
|
|
|
if let TokenNode::Whitespace(span) = peeked.node {
|
|
peeked.commit();
|
|
token_nodes.color_shape(FlatShape::Whitespace.spanned(*span));
|
|
}
|
|
}
|
|
}
|
|
|
|
#[derive(Debug, Copy, Clone)]
|
|
pub struct SpaceShape;
|
|
|
|
#[cfg(not(coloring_in_tokens))]
|
|
impl FallibleColorSyntax for SpaceShape {
|
|
type Info = ();
|
|
type Input = ();
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
_input: &(),
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
_context: &ExpandContext,
|
|
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
) -> Result<(), ShellError> {
|
|
let peeked = token_nodes.peek_any().not_eof("whitespace")?;
|
|
|
|
match peeked.node {
|
|
TokenNode::Whitespace(span) => {
|
|
peeked.commit();
|
|
shapes.push(FlatShape::Whitespace.spanned(*span));
|
|
Ok(())
|
|
}
|
|
|
|
other => Err(ShellError::type_error(
|
|
"whitespace",
|
|
other.tagged_type_name(),
|
|
)),
|
|
}
|
|
}
|
|
}
|
|
|
|
#[cfg(coloring_in_tokens)]
|
|
impl FallibleColorSyntax for SpaceShape {
|
|
type Info = ();
|
|
type Input = ();
|
|
|
|
fn name(&self) -> &'static str {
|
|
"SpaceShape"
|
|
}
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
_input: &(),
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
_context: &ExpandContext,
|
|
) -> Result<(), ShellError> {
|
|
let peeked = token_nodes.peek_any().not_eof("whitespace")?;
|
|
|
|
match peeked.node {
|
|
TokenNode::Whitespace(span) => {
|
|
peeked.commit();
|
|
token_nodes.color_shape(FlatShape::Whitespace.spanned(*span));
|
|
Ok(())
|
|
}
|
|
|
|
other => Err(ShellError::type_error(
|
|
"whitespace",
|
|
other.tagged_type_name(),
|
|
)),
|
|
}
|
|
}
|
|
}
|
|
|
|
impl<T: ExpandExpression> ExpandExpression for MaybeSpacedExpression<T> {
|
|
fn name(&self) -> &'static str {
|
|
"maybe space"
|
|
}
|
|
|
|
fn expand_expr<'a, 'b>(
|
|
&self,
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) -> Result<hir::Expression, ParseError> {
|
|
// TODO: Make the name part of the trait
|
|
let peeked = token_nodes.peek_any().not_eof("whitespace")?;
|
|
|
|
match peeked.node {
|
|
TokenNode::Whitespace(_) => {
|
|
peeked.commit();
|
|
expand_expr(&self.inner, token_nodes, context)
|
|
}
|
|
|
|
_ => {
|
|
peeked.rollback();
|
|
expand_expr(&self.inner, token_nodes, context)
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
pub fn spaced<T: ExpandExpression>(inner: T) -> SpacedExpression<T> {
|
|
SpacedExpression { inner }
|
|
}
|
|
|
|
fn expand_variable(span: Span, token_span: Span, source: &Text) -> hir::Expression {
|
|
if span.slice(source) == "it" {
|
|
hir::Expression::it_variable(span, token_span)
|
|
} else {
|
|
hir::Expression::variable(span, token_span)
|
|
}
|
|
}
|
|
|
|
#[derive(Debug, Copy, Clone)]
|
|
pub struct CommandShape;
|
|
|
|
#[cfg(not(coloring_in_tokens))]
|
|
impl ColorSyntax for CommandShape {
|
|
type Info = ();
|
|
type Input = ();
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
_input: &(),
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
shapes: &mut Vec<Spanned<FlatShape>>,
|
|
) {
|
|
let kind = color_fallible_syntax(&CommandHeadShape, token_nodes, context, shapes);
|
|
|
|
match kind {
|
|
Err(_) => {
|
|
// We didn't find a command, so we'll have to fall back to parsing this pipeline part
|
|
// as a blob of undifferentiated expressions
|
|
color_syntax(&ExpressionListShape, token_nodes, context, shapes);
|
|
}
|
|
|
|
Ok(CommandHeadKind::External) => {
|
|
color_syntax(&ExternalTokensShape, token_nodes, context, shapes);
|
|
}
|
|
Ok(CommandHeadKind::Internal(signature)) => {
|
|
color_syntax_with(&CommandTailShape, &signature, token_nodes, context, shapes);
|
|
}
|
|
};
|
|
}
|
|
}
|
|
|
|
#[cfg(coloring_in_tokens)]
|
|
impl ColorSyntax for CommandShape {
|
|
type Info = ();
|
|
type Input = ();
|
|
|
|
fn name(&self) -> &'static str {
|
|
"CommandShape"
|
|
}
|
|
|
|
fn color_syntax<'a, 'b>(
|
|
&self,
|
|
_input: &(),
|
|
token_nodes: &'b mut TokensIterator<'a>,
|
|
context: &ExpandContext,
|
|
) {
|
|
let kind = color_fallible_syntax(&CommandHeadShape, token_nodes, context);
|
|
|
|
match kind {
|
|
Err(_) => {
|
|
// We didn't find a command, so we'll have to fall back to parsing this pipeline part
|
|
// as a blob of undifferentiated expressions
|
|
color_syntax(&ExpressionListShape, token_nodes, context);
|
|
}
|
|
|
|
Ok(CommandHeadKind::External) => {
|
|
color_syntax(&ExternalTokensShape, token_nodes, context);
|
|
}
|
|
Ok(CommandHeadKind::Internal(signature)) => {
|
|
color_syntax_with(&CommandTailShape, &signature, token_nodes, context);
|
|
}
|
|
};
|
|
}
|
|
}
|