Remove the coloring_in_tokens feature flag

Stabilize and enable
This commit is contained in:
Yehuda Katz 2019-12-11 21:56:12 -08:00
parent 09f903c37a
commit e8800fdd0c
20 changed files with 39 additions and 2030 deletions

View File

@ -42,10 +42,10 @@ steps:
echo "##vso[task.prependpath]$HOME/.cargo/bin"
rustup component add rustfmt --toolchain "stable"
displayName: Install Rust
- bash: RUSTFLAGS="-D warnings" cargo test --all --features=user-visible
- bash: RUSTFLAGS="-D warnings" cargo test --all --features=stable
condition: eq(variables['style'], 'unflagged')
displayName: Run tests
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all --features=user-visible
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all --features=stable
condition: eq(variables['style'], 'canary')
displayName: Run tests
- bash: cargo fmt --all -- --check

View File

@ -1,7 +1,7 @@
image:
file: .gitpod.Dockerfile
tasks:
- init: cargo install nu --features=user-visible
- init: cargo install nu --features=stable
command: nu
github:
prebuilds:

View File

@ -136,7 +136,7 @@ semver = {version = "0.9.0", optional = true}
[features]
default = ["sys", "ps", "textview", "inc", "str"]
user-visible = ["sys", "ps", "starship-prompt", "textview", "binaryview", "match", "tree", "average", "sum"]
stable = ["sys", "ps", "starship-prompt", "textview", "binaryview", "match", "tree", "average", "sum"]
sys = ["heim", "battery"]
ps = ["heim", "futures-timer"]

View File

@ -55,7 +55,7 @@ cargo install nu
You can also install Nu with all the bells and whistles (be sure to have installed the [dependencies](https://book.nushell.sh/en/installation#dependencies) for your platform):
```
cargo install nu --features=user-visible
cargo install nu --features=stable
```
## Docker

View File

@ -41,4 +41,5 @@ pretty_assertions = "0.6.1"
nu-build = { version = "0.1.0", path = "../nu-build" }
[features]
stable = []
trace = ["nom-tracable/trace"]

View File

@ -1,5 +1,3 @@
#[cfg(not(coloring_in_tokens))]
use crate::hir::syntax_shape::FlatShape;
use crate::{
hir::syntax_shape::{
color_syntax, expand_atom, expand_expr, expand_syntax, AtomicToken, ColorSyntax,
@ -69,33 +67,6 @@ impl ExpandSyntax for ExternalTokensShape {
}
}
#[cfg(not(coloring_in_tokens))]
impl ColorSyntax for ExternalTokensShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Self::Info {
loop {
// Allow a space
color_syntax(&MaybeSpaceShape, token_nodes, context, shapes);
// Process an external expression. External expressions are mostly words, with a
// few exceptions (like $variables and path expansion rules)
match color_syntax(&ExternalExpressionShape, token_nodes, context, shapes).1 {
ExternalExpressionResult::Eof => break,
ExternalExpressionResult::Processed => continue,
}
}
}
}
#[cfg(coloring_in_tokens)]
impl ColorSyntax for ExternalTokensShape {
type Info = ();
type Input = ();
@ -295,7 +266,6 @@ impl ExpandExpression for ExternalContinuationShape {
}
}
#[cfg(coloring_in_tokens)]
impl ColorSyntax for ExternalExpressionShape {
type Info = ExternalExpressionResult;
type Input = ();
@ -334,34 +304,3 @@ pub enum ExternalExpressionResult {
Eof,
Processed,
}
#[cfg(not(coloring_in_tokens))]
impl ColorSyntax for ExternalExpressionShape {
type Info = ExternalExpressionResult;
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> ExternalExpressionResult {
let atom = match expand_atom(
token_nodes,
"external word",
context,
ExpansionRule::permissive(),
) {
Err(_) => unreachable!("TODO: separate infallible expand_atom"),
Ok(AtomicToken {
unspanned: UnspannedAtomicToken::Eof { .. },
..
}) => return ExternalExpressionResult::Eof,
Ok(atom) => atom,
};
atom.color_tokens(shapes);
return ExternalExpressionResult::Processed;
}
}

View File

@ -43,62 +43,8 @@ pub(crate) use self::expression::variable_path::{
pub(crate) use self::expression::{continue_expression, AnyExpressionShape};
pub(crate) use self::flat_shape::FlatShape;
#[cfg(not(coloring_in_tokens))]
use crate::hir::tokens_iterator::debug::debug_tokens;
#[cfg(not(coloring_in_tokens))]
use crate::parse::pipeline::Pipeline;
#[cfg(not(coloring_in_tokens))]
use log::{log_enabled, trace};
use nu_protocol::SyntaxShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for SyntaxShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
match self {
SyntaxShape::Any => {
color_fallible_syntax(&AnyExpressionShape, token_nodes, context, shapes)
}
SyntaxShape::Int => color_fallible_syntax(&IntShape, token_nodes, context, shapes),
SyntaxShape::String => color_fallible_syntax_with(
&StringShape,
&FlatShape::String,
token_nodes,
context,
shapes,
),
SyntaxShape::Range => color_fallible_syntax(&RangeShape, token_nodes, context, shapes),
SyntaxShape::Member => {
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)
}
SyntaxShape::ColumnPath => {
color_fallible_syntax(&ColumnPathShape, token_nodes, context, shapes)
}
SyntaxShape::Number => {
color_fallible_syntax(&NumberShape, token_nodes, context, shapes)
}
SyntaxShape::Path => {
color_fallible_syntax(&FilePathShape, token_nodes, context, shapes)
}
SyntaxShape::Pattern => {
color_fallible_syntax(&PatternShape, token_nodes, context, shapes)
}
SyntaxShape::Block => {
color_fallible_syntax(&AnyBlockShape, token_nodes, context, shapes)
}
}
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for SyntaxShape {
type Info = ();
type Input = ();
@ -220,7 +166,6 @@ pub trait ExpandExpression: std::fmt::Debug + Copy {
) -> Result<hir::Expression, ParseError>;
}
#[cfg(coloring_in_tokens)]
pub trait FallibleColorSyntax: std::fmt::Debug + Copy {
type Info;
type Input;
@ -235,35 +180,6 @@ pub trait FallibleColorSyntax: std::fmt::Debug + Copy {
) -> Result<Self::Info, ShellError>;
}
#[cfg(not(coloring_in_tokens))]
pub trait FallibleColorSyntax: std::fmt::Debug + Copy {
type Info;
type Input;
fn color_syntax<'a, 'b>(
&self,
input: &Self::Input,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<Self::Info, ShellError>;
}
#[cfg(not(coloring_in_tokens))]
pub trait ColorSyntax: std::fmt::Debug + Copy {
type Info;
type Input;
fn color_syntax<'a, 'b>(
&self,
input: &Self::Input,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Self::Info;
}
#[cfg(coloring_in_tokens)]
pub trait ColorSyntax: std::fmt::Debug + Copy {
type Info;
type Input;
@ -310,7 +226,6 @@ pub(crate) fn expand_expr<'a, 'b, T: ExpandExpression>(
})
}
#[cfg(coloring_in_tokens)]
pub fn color_syntax<'a, 'b, T: ColorSyntax<Info = U, Input = ()>, U>(
shape: &T,
token_nodes: &'b mut TokensIterator<'a>,
@ -324,70 +239,6 @@ pub fn color_syntax<'a, 'b, T: ColorSyntax<Info = U, Input = ()>, U>(
)
}
#[cfg(not(coloring_in_tokens))]
pub fn color_syntax<'a, 'b, T: ColorSyntax<Info = U, Input = ()>, U>(
shape: &T,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> ((), U) {
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes.state(), context.source));
let len = shapes.len();
let result = shape.color_syntax(&(), token_nodes, context, shapes);
trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes.state(), context.source));
if log_enabled!(target: "nu::color_syntax", log::Level::Trace) {
trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>());
if len < shapes.len() {
for i in len..(shapes.len()) {
trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]);
}
} else {
trace!(target: "nu::color_syntax", "no new shapes");
}
}
((), result)
}
#[cfg(not(coloring_in_tokens))]
pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax<Info = U, Input = ()>, U>(
shape: &T,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<U, ShellError> {
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes.state(), context.source));
if token_nodes.at_end() {
trace!(target: "nu::color_syntax", "at eof");
return Err(ShellError::unexpected_eof("coloring", Tag::unknown()));
}
let len = shapes.len();
let result = shape.color_syntax(&(), token_nodes, context, shapes);
trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes.state(), context.source));
if log_enabled!(target: "nu::color_syntax", log::Level::Trace) {
trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>());
if len < shapes.len() {
for i in len..(shapes.len()) {
trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]);
}
} else {
trace!(target: "nu::color_syntax", "no new shapes");
}
}
result
}
#[cfg(coloring_in_tokens)]
pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax<Info = U, Input = ()>, U>(
shape: &T,
token_nodes: &'b mut TokensIterator<'a>,
@ -398,37 +249,6 @@ pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax<Info = U, Input = ()
})
}
#[cfg(not(coloring_in_tokens))]
pub fn color_syntax_with<'a, 'b, T: ColorSyntax<Info = U, Input = I>, U, I>(
shape: &T,
input: &I,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> ((), U) {
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes.state(), context.source));
let len = shapes.len();
let result = shape.color_syntax(input, token_nodes, context, shapes);
trace!(target: "nu::color_syntax", "ok :: {:?}", debug_tokens(token_nodes.state(), context.source));
if log_enabled!(target: "nu::color_syntax", log::Level::Trace) {
trace!(target: "nu::color_syntax", "after {}", std::any::type_name::<T>());
if len < shapes.len() {
for i in len..(shapes.len()) {
trace!(target: "nu::color_syntax", "new shape :: {:?}", shapes[i]);
}
} else {
trace!(target: "nu::color_syntax", "no new shapes");
}
}
((), result)
}
#[cfg(coloring_in_tokens)]
pub fn color_syntax_with<'a, 'b, T: ColorSyntax<Info = U, Input = I>, U, I>(
shape: &T,
input: &I,
@ -443,20 +263,6 @@ pub fn color_syntax_with<'a, 'b, T: ColorSyntax<Info = U, Input = I>, U, I>(
)
}
#[cfg(not(coloring_in_tokens))]
pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax<Info = U, Input = I>, U, I>(
shape: &T,
input: &I,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<U, ShellError> {
token_nodes.color_fallible_frame(std::any::type_name::<T>(), |token_nodes| {
shape.color_syntax(input, token_nodes, context, shapes)
})
}
#[cfg(coloring_in_tokens)]
pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax<Info = U, Input = I>, U, I>(
shape: &T,
input: &I,
@ -588,40 +394,6 @@ impl ExpandSyntax for BarePathShape {
#[derive(Debug, Copy, Clone)]
pub struct BareShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for BareShape {
type Info = ();
type Input = FlatShape;
fn color_syntax<'a, 'b>(
&self,
input: &FlatShape,
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
token_nodes
.peek_any_token("word", |token| match token {
// If it's a bare token, color it
TokenNode::Token(Token {
unspanned: UnspannedToken::Bare,
span,
}) => {
shapes.push((*input).spanned(*span));
Ok(())
}
// otherwise, fail
other => Err(ParseError::mismatch(
"word",
other.type_name().spanned(other.span()),
)),
})
.map_err(|err| err.into())
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for BareShape {
type Info = ();
type Input = FlatShape;
@ -780,43 +552,6 @@ impl CommandSignature {
#[derive(Debug, Copy, Clone)]
pub struct PipelineShape;
#[cfg(not(coloring_in_tokens))]
// The failure mode is if the head of the token stream is not a pipeline
impl FallibleColorSyntax for PipelineShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
// Make sure we're looking at a pipeline
let Pipeline { parts, .. } =
token_nodes.peek_any_token("pipeline", |node| node.as_pipeline())?;
// Enumerate the pipeline parts
for part in parts {
// If the pipeline part has a prefix `|`, emit a pipe to color
if let Some(pipe) = part.pipe {
shapes.push(FlatShape::Pipe.spanned(pipe));
}
// Create a new iterator containing the tokens in the pipeline part to color
let mut token_nodes =
TokensIterator::new(&part.tokens(), part.span(), context.source.clone(), false);
color_syntax(&MaybeSpaceShape, &mut token_nodes, context, shapes);
color_syntax(&CommandShape, &mut token_nodes, context, shapes);
}
Ok(())
}
}
#[cfg(coloring_in_tokens)]
// The failure mode is if the head of the token stream is not a pipeline
impl FallibleColorSyntax for PipelineShape {
type Info = ();
@ -856,46 +591,6 @@ impl FallibleColorSyntax for PipelineShape {
}
}
#[cfg(coloring_in_tokens)]
impl ExpandSyntax for PipelineShape {
type Output = ClassifiedPipeline;
fn name(&self) -> &'static str {
"pipeline"
}
fn expand_syntax<'content, 'me>(
&self,
iterator: &'me mut TokensIterator<'content>,
context: &ExpandContext,
) -> Result<Self::Output, ParseError> {
let start = iterator.span_at_cursor();
let peeked = iterator.peek_any().not_eof("pipeline")?;
let pipeline = peeked.commit().as_pipeline()?;
let parts = &pipeline.parts[..];
let mut out = vec![];
for part in parts {
let tokens: Spanned<&[TokenNode]> = part.tokens().spanned(part.span());
let classified =
iterator.child(tokens, context.source.clone(), move |token_nodes| {
expand_syntax(&ClassifiedCommandShape, token_nodes, context)
})?;
out.push(classified);
}
let end = iterator.span_at_cursor();
Ok(ClassifiedPipeline::commands(out, start.until(end)))
}
}
#[cfg(not(coloring_in_tokens))]
impl ExpandSyntax for PipelineShape {
type Output = ClassifiedPipeline;
@ -942,61 +637,6 @@ pub enum CommandHeadKind {
#[derive(Debug, Copy, Clone)]
pub struct CommandHeadShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for CommandHeadShape {
type Info = CommandHeadKind;
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<CommandHeadKind, ShellError> {
// If we don't ultimately find a token, roll back
token_nodes.atomic(|token_nodes| {
// First, take a look at the next token
let atom = expand_atom(
token_nodes,
"command head",
context,
ExpansionRule::permissive(),
)?;
match &atom.unspanned {
// If the head is an explicit external command (^cmd), color it as an external command
UnspannedAtomicToken::ExternalCommand { .. } => {
shapes.push(FlatShape::ExternalCommand.spanned(atom.span));
Ok(CommandHeadKind::External)
}
// If the head is a word, it depends on whether it matches a registered internal command
UnspannedAtomicToken::Word { text } => {
let name = text.slice(context.source);
if context.registry.has(name) {
// If the registry has the command, color it as an internal command
shapes.push(FlatShape::InternalCommand.spanned(text));
let signature = context.registry.get(name).unwrap();
Ok(CommandHeadKind::Internal(signature))
} else {
// Otherwise, color it as an external command
shapes.push(FlatShape::ExternalCommand.spanned(text));
Ok(CommandHeadKind::External)
}
}
// Otherwise, we're not actually looking at a command
_ => Err(ShellError::syntax_error(
"No command at the head".spanned(atom.span),
)),
}
})
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for CommandHeadShape {
type Info = CommandHeadKind;
type Input = ();
@ -1170,46 +810,6 @@ impl ExpandSyntax for ClassifiedCommandShape {
#[derive(Debug, Copy, Clone)]
pub struct InternalCommandHeadShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for InternalCommandHeadShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let peeked_head = token_nodes.peek_non_ws().not_eof("command head4");
let peeked_head = match peeked_head {
Err(_) => return Ok(()),
Ok(peeked_head) => peeked_head,
};
let _expr = match peeked_head.node {
TokenNode::Token(Token {
unspanned: UnspannedToken::Bare,
span,
}) => shapes.push(FlatShape::Word.spanned(*span)),
TokenNode::Token(Token {
unspanned: UnspannedToken::String(_inner_tag),
span,
}) => shapes.push(FlatShape::String.spanned(*span)),
_node => shapes.push(FlatShape::Error.spanned(peeked_head.node.span())),
};
peeked_head.commit();
Ok(())
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for InternalCommandHeadShape {
type Info = ();
type Input = ();
@ -1358,38 +958,6 @@ fn parse_single_node_skipping_ws<'a, 'b, T>(
#[derive(Debug, Copy, Clone)]
pub struct WhitespaceShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for WhitespaceShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let peeked = token_nodes.peek_any().not_eof("whitespace");
let peeked = match peeked {
Err(_) => return Ok(()),
Ok(peeked) => peeked,
};
let _tag = match peeked.node {
TokenNode::Whitespace(span) => shapes.push(FlatShape::Whitespace.spanned(*span)),
_other => return Ok(()),
};
peeked.commit();
Ok(())
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for WhitespaceShape {
type Info = ();
type Input = ();
@ -1530,33 +1098,6 @@ impl ExpandSyntax for MaybeSpaceShape {
}
}
#[cfg(not(coloring_in_tokens))]
impl ColorSyntax for MaybeSpaceShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Self::Info {
let peeked = token_nodes.peek_any().not_eof("whitespace");
let peeked = match peeked {
Err(_) => return,
Ok(peeked) => peeked,
};
if let TokenNode::Whitespace(span) = peeked.node {
peeked.commit();
shapes.push(FlatShape::Whitespace.spanned(*span));
}
}
}
#[cfg(coloring_in_tokens)]
impl ColorSyntax for MaybeSpaceShape {
type Info = ();
type Input = ();
@ -1588,36 +1129,6 @@ impl ColorSyntax for MaybeSpaceShape {
#[derive(Debug, Copy, Clone)]
pub struct SpaceShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for SpaceShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let peeked = token_nodes.peek_any().not_eof("whitespace")?;
match peeked.node {
TokenNode::Whitespace(span) => {
peeked.commit();
shapes.push(FlatShape::Whitespace.spanned(*span));
Ok(())
}
other => Err(ShellError::type_error(
"whitespace",
other.type_name().spanned(other.span()),
)),
}
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for SpaceShape {
type Info = ();
type Input = ();
@ -1691,38 +1202,6 @@ fn expand_variable(span: Span, token_span: Span, source: &Text) -> hir::Expressi
#[derive(Debug, Copy, Clone)]
pub struct CommandShape;
#[cfg(not(coloring_in_tokens))]
impl ColorSyntax for CommandShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) {
let kind = color_fallible_syntax(&CommandHeadShape, token_nodes, context, shapes);
match kind {
Err(_) => {
// We didn't find a command, so we'll have to fall back to parsing this pipeline part
// as a blob of undifferentiated expressions
color_syntax(&ExpressionListShape, token_nodes, context, shapes);
}
Ok(CommandHeadKind::External) => {
color_syntax(&ExternalTokensShape, token_nodes, context, shapes);
}
Ok(CommandHeadKind::Internal(signature)) => {
color_syntax_with(&CommandTailShape, &signature, token_nodes, context, shapes);
}
};
}
}
#[cfg(coloring_in_tokens)]
impl ColorSyntax for CommandShape {
type Info = ();
type Input = ();

View File

@ -1,5 +1,3 @@
#[cfg(not(coloring_in_tokens))]
use crate::hir::syntax_shape::FlatShape;
use crate::{
hir,
hir::syntax_shape::{
@ -13,62 +11,10 @@ use crate::{
};
use nu_errors::{ParseError, ShellError};
use nu_source::Span;
#[cfg(not(coloring_in_tokens))]
use nu_source::Spanned;
#[derive(Debug, Copy, Clone)]
pub struct AnyBlockShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for AnyBlockShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let block = token_nodes.peek_non_ws().not_eof("block");
let block = match block {
Err(_) => return Ok(()),
Ok(block) => block,
};
// is it just a block?
let block = block.node.as_block();
match block {
// If so, color it as a block
Some((children, spans)) => {
let mut token_nodes = TokensIterator::new(
children.item,
children.span,
context.source.clone(),
false,
);
color_syntax_with(
&DelimitedShape,
&(Delimiter::Brace, spans.0, spans.1),
&mut token_nodes,
context,
shapes,
);
return Ok(());
}
_ => {}
}
// Otherwise, look for a shorthand block. If none found, fail
color_fallible_syntax(&ShorthandBlock, token_nodes, context, shapes)
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for AnyBlockShape {
type Info = ();
type Input = ();
@ -149,39 +95,6 @@ impl ExpandExpression for AnyBlockShape {
#[derive(Debug, Copy, Clone)]
pub struct ShorthandBlock;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for ShorthandBlock {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
// Try to find a shorthand head. If none found, fail
color_fallible_syntax(&ShorthandPath, token_nodes, context, shapes)?;
loop {
// Check to see whether there's any continuation after the head expression
let result =
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes);
match result {
// if no continuation was found, we're done
Err(_) => break,
// if a continuation was found, look for another one
Ok(_) => continue,
}
}
Ok(())
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for ShorthandBlock {
type Info = ();
type Input = ();
@ -239,52 +152,6 @@ impl ExpandExpression for ShorthandBlock {
#[derive(Debug, Copy, Clone)]
pub struct ShorthandPath;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for ShorthandPath {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
token_nodes.atomic(|token_nodes| {
let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context, shapes);
match variable {
Ok(_) => {
// if it's a variable path, that's the head part
return Ok(());
}
Err(_) => {
// otherwise, we'll try to find a member path
}
}
// look for a member (`<member>` -> `$it.<member>`)
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
// like any other path.
let tail = color_fallible_syntax(&PathTailShape, token_nodes, context, shapes);
match tail {
Ok(_) => {}
Err(_) => {
// It's ok if there's no path tail; a single member is sufficient
}
}
Ok(())
})
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for ShorthandPath {
type Info = ();
type Input = ();
@ -376,100 +243,6 @@ impl ExpandExpression for ShorthandPath {
#[derive(Debug, Copy, Clone)]
pub struct ShorthandHeadShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for ShorthandHeadShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
use crate::parse::token_tree::TokenNode;
use crate::parse::tokens::{Token, UnspannedToken};
use nu_protocol::SpannedTypeName;
use nu_source::SpannedItem;
// A shorthand path must not be at EOF
let peeked = token_nodes.peek_non_ws().not_eof("shorthand path head")?;
match peeked.node {
// If the head of a shorthand path is a bare token, it expands to `$it.bare`
TokenNode::Token(Token {
unspanned: UnspannedToken::Bare,
span,
}) => {
peeked.commit();
shapes.push(FlatShape::BareMember.spanned(*span));
Ok(())
}
// If the head of a shorthand path is a string, it expands to `$it."some string"`
TokenNode::Token(Token {
unspanned: UnspannedToken::String(_),
span: outer,
}) => {
peeked.commit();
shapes.push(FlatShape::StringMember.spanned(*outer));
Ok(())
}
other => Err(ShellError::type_error(
"shorthand head",
other.spanned_type_name(),
)),
}
}
}
#[cfg(coloring_in_tokens)]
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for ShorthandHeadShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
// A shorthand path must not be at EOF
let peeked = token_nodes.peek_non_ws().not_eof("shorthand path head")?;
match peeked.node {
// If the head of a shorthand path is a bare token, it expands to `$it.bare`
TokenNode::Token(Spanned {
item: UnspannedToken::Bare,
span,
}) => {
peeked.commit();
shapes.push(FlatShape::BareMember.spanned(*span));
Ok(())
}
// If the head of a shorthand path is a string, it expands to `$it."some string"`
TokenNode::Token(Spanned {
item: UnspannedToken::String(_),
span: outer,
}) => {
peeked.commit();
shapes.push(FlatShape::StringMember.spanned(*outer));
Ok(())
}
other => Err(ShellError::type_error(
"shorthand head",
other.tagged_type_name(),
)),
}
}
}
impl ExpandExpression for ShorthandHeadShape {
fn name(&self) -> &'static str {
"shorthand head"

View File

@ -43,34 +43,6 @@ impl ExpandExpression for AnyExpressionShape {
}
}
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for AnyExpressionShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
// Look for an expression at the cursor
color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context, shapes)?;
match continue_coloring_expression(token_nodes, context, shapes) {
Err(_) => {
// it's fine for there to be no continuation
}
Ok(()) => {}
}
Ok(())
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for AnyExpressionShape {
type Info = ();
type Input = ();
@ -128,32 +100,6 @@ pub(crate) fn continue_expression(
}
}
#[cfg(not(coloring_in_tokens))]
pub(crate) fn continue_coloring_expression(
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
// if there's not even one expression continuation, fail
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes)?;
loop {
// Check to see whether there's any continuation after the head expression
let result =
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes);
match result {
Err(_) => {
// We already saw one continuation, so just return
return Ok(());
}
Ok(_) => {}
}
}
}
#[cfg(coloring_in_tokens)]
pub(crate) fn continue_coloring_expression(
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
@ -221,66 +167,6 @@ impl ExpandExpression for AnyExpressionStartShape {
}
}
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for AnyExpressionStartShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let atom = token_nodes.spanned(|token_nodes| {
expand_atom(
token_nodes,
"expression",
context,
ExpansionRule::permissive(),
)
});
let atom = match atom {
Spanned {
item: Err(_err),
span,
} => {
shapes.push(FlatShape::Error.spanned(span));
return Ok(());
}
Spanned {
item: Ok(value), ..
} => value,
};
match &atom.unspanned {
UnspannedAtomicToken::Size { number, unit } => shapes.push(
FlatShape::Size {
number: number.span(),
unit: unit.span.into(),
}
.spanned(atom.span),
),
UnspannedAtomicToken::SquareDelimited { nodes, spans } => {
color_delimited_square(*spans, &nodes, atom.span.into(), context, shapes)
}
UnspannedAtomicToken::Word { .. } => {
shapes.push(FlatShape::Word.spanned(atom.span));
}
_ => atom.color_tokens(shapes),
}
Ok(())
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for AnyExpressionStartShape {
type Info = ();
type Input = ();
@ -351,64 +237,6 @@ impl FallibleColorSyntax for AnyExpressionStartShape {
#[derive(Debug, Copy, Clone)]
pub struct BareTailShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for BareTailShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let len = shapes.len();
loop {
let word = color_fallible_syntax_with(
&BareShape,
&FlatShape::Word,
token_nodes,
context,
shapes,
);
match word {
// if a word was found, continue
Ok(_) => continue,
// if a word wasn't found, try to find a dot
Err(_) => {}
}
// try to find a dot
let dot = color_fallible_syntax_with(
&ColorableDotShape,
&FlatShape::Word,
token_nodes,
context,
shapes,
);
match dot {
// if a dot was found, try to find another word
Ok(_) => continue,
// otherwise, we're done
Err(_) => break,
}
}
if shapes.len() > len {
Ok(())
} else {
Err(ShellError::syntax_error(
"No tokens matched BareTailShape".spanned_unknown(),
))
}
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for BareTailShape {
type Info = ();
type Input = ();

View File

@ -3,8 +3,6 @@ use crate::hir::syntax_shape::{
};
use crate::{hir, hir::TokensIterator, Delimiter, FlatShape};
use nu_errors::ParseError;
#[cfg(not(coloring_in_tokens))]
use nu_source::Spanned;
use nu_source::{Span, SpannedItem, Tag};
pub fn expand_delimited_square(
@ -22,21 +20,6 @@ pub fn expand_delimited_square(
))
}
#[cfg(not(coloring_in_tokens))]
pub fn color_delimited_square(
(open, close): (Span, Span),
children: &Vec<TokenNode>,
span: Span,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) {
shapes.push(FlatShape::OpenDelimiter(Delimiter::Square).spanned(open));
let mut tokens = TokensIterator::new(&children, span, context.source.clone(), false);
let _list = color_syntax(&ExpressionListShape, &mut tokens, context, shapes);
shapes.push(FlatShape::CloseDelimiter(Delimiter::Square).spanned(close));
}
#[cfg(coloring_in_tokens)]
pub fn color_delimited_square(
(open, close): (Span, Span),
token_nodes: &mut TokensIterator,
@ -51,24 +34,6 @@ pub fn color_delimited_square(
#[derive(Debug, Copy, Clone)]
pub struct DelimitedShape;
#[cfg(not(coloring_in_tokens))]
impl ColorSyntax for DelimitedShape {
type Info = ();
type Input = (Delimiter, Span, Span);
fn color_syntax<'a, 'b>(
&self,
(delimiter, open, close): &(Delimiter, Span, Span),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Self::Info {
shapes.push(FlatShape::OpenDelimiter(*delimiter).spanned(*open));
color_syntax(&ExpressionListShape, token_nodes, context, shapes);
shapes.push(FlatShape::CloseDelimiter(*delimiter).spanned(*close));
}
}
#[cfg(coloring_in_tokens)]
impl ColorSyntax for DelimitedShape {
type Info = ();
type Input = (Delimiter, Span, Span);

View File

@ -11,46 +11,6 @@ use nu_source::SpannedItem;
#[derive(Debug, Copy, Clone)]
pub struct FilePathShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for FilePathShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<nu_source::Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let atom = expand_atom(
token_nodes,
"file path",
context,
ExpansionRule::permissive(),
);
let atom = match atom {
Err(_) => return Ok(()),
Ok(atom) => atom,
};
match &atom.unspanned {
UnspannedAtomicToken::Word { .. }
| UnspannedAtomicToken::String { .. }
| UnspannedAtomicToken::Number { .. }
| UnspannedAtomicToken::Size { .. } => {
shapes.push(FlatShape::Path.spanned(atom.span));
}
_ => atom.color_tokens(shapes),
}
Ok(())
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for FilePathShape {
type Info = ();
type Input = ();

View File

@ -1,5 +1,3 @@
#[cfg(not(coloring_in_tokens))]
use crate::hir::syntax_shape::FlatShape;
use crate::{
hir,
hir::syntax_shape::{
@ -76,77 +74,6 @@ impl ExpandSyntax for ExpressionListShape {
}
}
#[cfg(not(coloring_in_tokens))]
impl ColorSyntax for ExpressionListShape {
type Info = ();
type Input = ();
/// The intent of this method is to fully color an expression list shape infallibly.
/// This means that if we can't expand a token into an expression, we fall back to
/// a simpler coloring strategy.
///
/// This would apply to something like `where x >`, which includes an incomplete
/// binary operator. Since we will fail to process it as a binary operator, we'll
/// fall back to a simpler coloring and move on.
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) {
// We encountered a parsing error and will continue with simpler coloring ("backoff
// coloring mode")
let mut backoff = false;
// Consume any leading whitespace
color_syntax(&MaybeSpaceShape, token_nodes, context, shapes);
loop {
// If we reached the very end of the token stream, we're done
if token_nodes.at_end() {
return;
}
if backoff {
let len = shapes.len();
// If we previously encountered a parsing error, use backoff coloring mode
color_syntax(&SimplestExpression, token_nodes, context, shapes);
if len == shapes.len() && !token_nodes.at_end() {
// This should never happen, but if it does, a panic is better than an infinite loop
panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression")
}
} else {
// Try to color the head of the stream as an expression
match color_fallible_syntax(&AnyExpressionShape, token_nodes, context, shapes) {
// If no expression was found, switch to backoff coloring mode
Err(_) => {
backoff = true;
continue;
}
Ok(_) => {}
}
// If an expression was found, consume a space
match color_fallible_syntax(&SpaceShape, token_nodes, context, shapes) {
Err(_) => {
// If no space was found, we're either at the end or there's an error.
// Either way, switch to backoff coloring mode. If we're at the end
// it won't have any consequences.
backoff = true;
}
Ok(_) => {
// Otherwise, move on to the next expression
}
}
}
}
}
}
#[cfg(coloring_in_tokens)]
impl ColorSyntax for ExpressionListShape {
type Info = ();
type Input = ();
@ -223,35 +150,6 @@ impl ColorSyntax for ExpressionListShape {
#[derive(Debug, Copy, Clone)]
pub struct BackoffColoringMode;
#[cfg(not(coloring_in_tokens))]
impl ColorSyntax for BackoffColoringMode {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &Self::Input,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Self::Info {
loop {
if token_nodes.at_end() {
break;
}
let len = shapes.len();
color_syntax(&SimplestExpression, token_nodes, context, shapes);
if len == shapes.len() && !token_nodes.at_end() {
// This shouldn't happen, but if it does, a panic is better than an infinite loop
panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, shapes);
}
}
}
}
#[cfg(coloring_in_tokens)]
impl ColorSyntax for BackoffColoringMode {
type Info = ();
type Input = ();
@ -288,33 +186,6 @@ impl ColorSyntax for BackoffColoringMode {
#[derive(Debug, Copy, Clone)]
pub struct SimplestExpression;
#[cfg(not(coloring_in_tokens))]
impl ColorSyntax for SimplestExpression {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) {
let atom = expand_atom(
token_nodes,
"any token",
context,
ExpansionRule::permissive(),
);
match atom {
Err(_) => {}
Ok(atom) => atom.color_tokens(shapes),
}
}
}
#[cfg(coloring_in_tokens)]
impl ColorSyntax for SimplestExpression {
type Info = ();
type Input = ();

View File

@ -52,37 +52,6 @@ impl ExpandExpression for NumberShape {
}
}
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for NumberShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let atom = token_nodes.spanned(|token_nodes| {
expand_atom(token_nodes, "number", context, ExpansionRule::permissive())
});
let atom = match atom {
Spanned { item: Err(_), span } => {
shapes.push(FlatShape::Error.spanned(span));
return Ok(());
}
Spanned { item: Ok(atom), .. } => atom,
};
atom.color_tokens(shapes);
Ok(())
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for NumberShape {
type Info = ();
type Input = ();
@ -152,37 +121,6 @@ impl ExpandExpression for IntShape {
}
}
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for IntShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let atom = token_nodes.spanned(|token_nodes| {
expand_atom(token_nodes, "integer", context, ExpansionRule::permissive())
});
let atom = match atom {
Spanned { item: Err(_), span } => {
shapes.push(FlatShape::Error.spanned(span));
return Ok(());
}
Spanned { item: Ok(atom), .. } => atom,
};
atom.color_tokens(shapes);
Ok(())
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for IntShape {
type Info = ();
type Input = ();

View File

@ -6,45 +6,13 @@ use crate::parse::operator::EvaluationOperator;
use crate::parse::tokens::{Token, UnspannedToken};
use crate::{hir, hir::TokensIterator, TokenNode};
use nu_errors::{ParseError, ShellError};
#[cfg(coloring_in_tokens)]
use nu_protocol::ShellTypeName;
#[cfg(not(coloring_in_tokens))]
use nu_source::Spanned;
use nu_source::{Span, SpannedItem};
#[derive(Debug, Copy, Clone)]
pub struct PatternShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for PatternShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
use nu_protocol::SpannedTypeName;
token_nodes.atomic(|token_nodes| {
let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?;
match &atom.unspanned {
UnspannedAtomicToken::GlobPattern { .. } | UnspannedAtomicToken::Word { .. } => {
shapes.push(FlatShape::GlobPattern.spanned(atom.span));
Ok(())
}
_ => Err(ShellError::type_error("pattern", atom.spanned_type_name())),
}
})
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for PatternShape {
type Info = ();
type Input = ();

View File

@ -46,7 +46,6 @@ impl ExpandExpression for RangeShape {
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for RangeShape {
type Info = ();
type Input = ();
@ -71,32 +70,9 @@ impl FallibleColorSyntax for RangeShape {
}
}
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for RangeShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<nu_source::Spanned<FlatShape>>,
) -> Result<(), ShellError> {
token_nodes.atomic_parse(|token_nodes| {
color_fallible_syntax(&AnyExpressionShape, token_nodes, context, shapes)?;
color_fallible_syntax(&DotDotShape, token_nodes, context, shapes)?;
color_fallible_syntax(&AnyExpressionShape, token_nodes, context, shapes)
})?;
Ok(())
}
}
#[derive(Debug, Copy, Clone)]
struct DotDotShape;
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for DotDotShape {
type Info = ();
type Input = ();
@ -125,30 +101,3 @@ impl FallibleColorSyntax for DotDotShape {
}
}
}
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for DotDotShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &Self::Input,
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
shapes: &mut Vec<nu_source::Spanned<FlatShape>>,
) -> Result<Self::Info, ShellError> {
let peeked = token_nodes.peek_any().not_eof("..")?;
match &peeked.node {
TokenNode::Token(Token {
unspanned: UnspannedToken::EvaluationOperator(EvaluationOperator::DotDot),
span,
}) => {
peeked.commit();
shapes.push(FlatShape::DotDot.spanned(span));
Ok(())
}
token => Err(ShellError::type_error("..", token.spanned_type_name())),
}
}
}

View File

@ -6,45 +6,11 @@ use crate::hir::tokens_iterator::Peeked;
use crate::parse::tokens::UnspannedToken;
use crate::{hir, hir::TokensIterator};
use nu_errors::{ParseError, ShellError};
#[cfg(not(coloring_in_tokens))]
use nu_source::Spanned;
use nu_source::SpannedItem;
#[derive(Debug, Copy, Clone)]
pub struct StringShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for StringShape {
type Info = ();
type Input = FlatShape;
fn color_syntax<'a, 'b>(
&self,
input: &FlatShape,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive());
let atom = match atom {
Err(_) => return Ok(()),
Ok(atom) => atom,
};
match atom {
AtomicToken {
unspanned: UnspannedAtomicToken::String { .. },
span,
} => shapes.push((*input).spanned(span)),
other => other.color_tokens(shapes),
}
Ok(())
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for StringShape {
type Info = ();
type Input = FlatShape;

View File

@ -57,48 +57,6 @@ impl ExpandExpression for VariablePathShape {
}
}
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for VariablePathShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
token_nodes.atomic(|token_nodes| {
// If the head of the token stream is not a variable, fail
color_fallible_syntax(&VariableShape, token_nodes, context, shapes)?;
loop {
// look for a dot at the head of a stream
let dot = color_fallible_syntax_with(
&ColorableDotShape,
&FlatShape::Dot,
token_nodes,
context,
shapes,
);
// if there's no dot, we're done
match dot {
Err(_) => break,
Ok(_) => {}
}
// otherwise, look for a member, and if you don't find one, fail
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
}
Ok(())
})
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for VariablePathShape {
type Info = ();
type Input = ();
@ -144,40 +102,6 @@ impl FallibleColorSyntax for VariablePathShape {
#[derive(Debug, Copy, Clone)]
pub struct PathTailShape;
#[cfg(not(coloring_in_tokens))]
/// The failure mode of `PathTailShape` is a dot followed by a non-member
impl FallibleColorSyntax for PathTailShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
token_nodes.atomic(|token_nodes| loop {
let result = color_fallible_syntax_with(
&ColorableDotShape,
&FlatShape::Dot,
token_nodes,
context,
shapes,
);
match result {
Err(_) => return Ok(()),
Ok(_) => {}
}
// If we've seen a dot but not a member, fail
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
})
}
}
#[cfg(coloring_in_tokens)]
/// The failure mode of `PathTailShape` is a dot followed by a non-member
impl FallibleColorSyntax for PathTailShape {
type Info = ();
@ -342,60 +266,6 @@ pub enum ContinuationInfo {
Infix,
}
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for ExpressionContinuationShape {
type Info = ContinuationInfo;
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<ContinuationInfo, ShellError> {
token_nodes.atomic(|token_nodes| {
// Try to expand a `.`
let dot = color_fallible_syntax_with(
&ColorableDotShape,
&FlatShape::Dot,
token_nodes,
context,
shapes,
);
match dot {
Ok(_) => {
// we found a dot, so let's keep looking for a member; if no member was found, fail
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
Ok(ContinuationInfo::Dot)
}
Err(_) => {
let mut new_shapes = vec![];
let result = token_nodes.atomic(|token_nodes| {
// we didn't find a dot, so let's see if we're looking at an infix. If not found, fail
color_fallible_syntax(&InfixShape, token_nodes, context, &mut new_shapes)?;
// now that we've seen an infix shape, look for any expression. If not found, fail
color_fallible_syntax(
&AnyExpressionShape,
token_nodes,
context,
&mut new_shapes,
)?;
Ok(ContinuationInfo::Infix)
})?;
shapes.extend(new_shapes);
Ok(result)
}
}
})
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for ExpressionContinuationShape {
type Info = ContinuationInfo;
type Input = ();
@ -472,47 +342,6 @@ impl ExpandExpression for VariableShape {
}
}
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for VariableShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
use nu_protocol::SpannedTypeName;
let atom = expand_atom(
token_nodes,
"variable",
context,
ExpansionRule::permissive(),
);
let atom = match atom {
Err(err) => return Err(err.into()),
Ok(atom) => atom,
};
match &atom.unspanned {
UnspannedAtomicToken::Variable { .. } => {
shapes.push(FlatShape::Variable.spanned(atom.span));
Ok(())
}
UnspannedAtomicToken::ItVariable { .. } => {
shapes.push(FlatShape::ItVariable.spanned(atom.span));
Ok(())
}
_ => Err(ShellError::type_error("variable", atom.spanned_type_name())),
}
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for VariableShape {
type Info = ();
type Input = ();
@ -710,57 +539,6 @@ pub fn expand_column_path<'a, 'b>(
#[derive(Debug, Copy, Clone)]
pub struct ColumnPathShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for ColumnPathShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
// If there's not even one member shape, fail
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
loop {
let checkpoint = token_nodes.checkpoint();
match color_fallible_syntax_with(
&ColorableDotShape,
&FlatShape::Dot,
checkpoint.iterator,
context,
shapes,
) {
Err(_) => {
// we already saw at least one member shape, so return successfully
return Ok(());
}
Ok(_) => {
match color_fallible_syntax(&MemberShape, checkpoint.iterator, context, shapes)
{
Err(_) => {
// we saw a dot but not a member (but we saw at least one member),
// so don't commit the dot but return successfully
return Ok(());
}
Ok(_) => {
// we saw a dot and a member, so commit it and continue on
checkpoint.commit();
}
}
}
}
}
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for ColumnPathShape {
type Info = ();
type Input = ();
@ -854,45 +632,6 @@ impl ExpandSyntax for ColumnPathShape {
#[derive(Debug, Copy, Clone)]
pub struct MemberShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for MemberShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let bare = color_fallible_syntax_with(
&BareShape,
&FlatShape::BareMember,
token_nodes,
context,
shapes,
);
match bare {
Ok(_) => return Ok(()),
Err(_) => {
// If we don't have a bare word, we'll look for a string
}
}
// Look for a string token. If we don't find one, fail
color_fallible_syntax_with(
&StringShape,
&FlatShape::StringMember,
token_nodes,
context,
shapes,
)
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for MemberShape {
type Info = ();
type Input = ();
@ -1022,35 +761,6 @@ pub struct DotShape;
#[derive(Debug, Copy, Clone)]
pub struct ColorableDotShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for ColorableDotShape {
type Info = ();
type Input = FlatShape;
fn color_syntax<'a, 'b>(
&self,
input: &FlatShape,
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
use nu_protocol::SpannedTypeName;
let peeked = token_nodes.peek_any().not_eof("dot")?;
match peeked.node {
node if node.is_dot() => {
peeked.commit();
shapes.push((*input).spanned(node.span()));
Ok(())
}
other => Err(ShellError::type_error("dot", other.spanned_type_name())),
}
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for ColorableDotShape {
type Info = ();
type Input = FlatShape;
@ -1123,52 +833,6 @@ impl ExpandSyntax for DotShape {
#[derive(Debug, Copy, Clone)]
pub struct InfixShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for InfixShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
outer_shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let checkpoint = token_nodes.checkpoint();
let mut shapes = vec![];
// An infix operator must be prefixed by whitespace. If no whitespace was found, fail
color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context, &mut shapes)?;
// Parse the next TokenNode after the whitespace
parse_single_node(
checkpoint.iterator,
"infix operator",
|token, token_span, err| {
match token {
// If it's a comparison operator, it's a match
UnspannedToken::CompareOperator(_operator) => {
shapes.push(FlatShape::CompareOperator.spanned(token_span));
Ok(())
}
// Otherwise, it's not a match
_ => Err(err.error()),
}
},
)?;
// An infix operator must be followed by whitespace. If no whitespace was found, fail
color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context, &mut shapes)?;
outer_shapes.extend(shapes);
checkpoint.commit();
Ok(())
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for InfixShape {
type Info = ();
type Input = ();

View File

@ -1,7 +1,7 @@
pub(crate) mod debug;
use self::debug::{ColorTracer, ExpandTracer};
#[cfg(coloring_in_tokens)]
use crate::hir::syntax_shape::FlatShape;
use crate::hir::Expression;
use crate::TokenNode;
@ -11,8 +11,6 @@ use nu_errors::{ParseError, ShellError};
use nu_protocol::SpannedTypeName;
use nu_source::{HasFallibleSpan, HasSpan, Span, Spanned, SpannedItem, Tag, Text};
cfg_if::cfg_if! {
if #[cfg(coloring_in_tokens)] {
#[derive(Getters, Debug)]
pub struct TokensIteratorState<'content> {
tokens: &'content [TokenNode],
@ -23,17 +21,6 @@ cfg_if::cfg_if! {
#[get = "pub"]
shapes: Vec<Spanned<FlatShape>>,
}
} else {
#[derive(Getters, Debug)]
pub struct TokensIteratorState<'content> {
tokens: &'content [TokenNode],
span: Span,
skip_ws: bool,
index: usize,
seen: indexmap::IndexSet<usize>,
}
}
}
#[derive(Getters, MutGetters, Debug)]
pub struct TokensIterator<'content> {
@ -53,7 +40,7 @@ pub struct Checkpoint<'content, 'me> {
pub(crate) iterator: &'me mut TokensIterator<'content>,
index: usize,
seen: indexmap::IndexSet<usize>,
#[cfg(coloring_in_tokens)]
shape_start: usize,
committed: bool,
}
@ -71,7 +58,7 @@ impl<'content, 'me> std::ops::Drop for Checkpoint<'content, 'me> {
state.index = self.index;
state.seen = self.seen.clone();
#[cfg(coloring_in_tokens)]
state.shapes.truncate(self.shape_start);
}
}
@ -161,8 +148,6 @@ impl<'content> TokensIterator<'content> {
source: Text,
skip_ws: bool,
) -> TokensIterator<'content> {
cfg_if::cfg_if! {
if #[cfg(coloring_in_tokens)] {
TokensIterator {
state: TokensIteratorState {
tokens: items,
@ -175,21 +160,6 @@ impl<'content> TokensIterator<'content> {
color_tracer: ColorTracer::new(source.clone()),
expand_tracer: ExpandTracer::new(source.clone()),
}
} else {
TokensIterator {
state: TokensIteratorState {
tokens: items,
span,
skip_ws,
index: 0,
seen: indexmap::IndexSet::new(),
},
color_tracer: ColorTracer::new(source.clone()),
expand_tracer: ExpandTracer::new(source.clone()),
}
}
}
}
pub fn all(
@ -217,13 +187,11 @@ impl<'content> TokensIterator<'content> {
result.spanned(start.until(end))
}
#[cfg(coloring_in_tokens)]
pub fn color_shape(&mut self, shape: Spanned<FlatShape>) {
self.with_color_tracer(|_, tracer| tracer.add_shape(shape));
self.state.shapes.push(shape);
}
#[cfg(coloring_in_tokens)]
pub fn mutate_shapes(&mut self, block: impl FnOnce(&mut Vec<Spanned<FlatShape>>)) {
let new_shapes: Vec<Spanned<FlatShape>> = {
let shapes = &mut self.state.shapes;
@ -239,13 +207,11 @@ impl<'content> TokensIterator<'content> {
});
}
#[cfg(coloring_in_tokens)]
pub fn silently_mutate_shapes(&mut self, block: impl FnOnce(&mut Vec<Spanned<FlatShape>>)) {
let shapes = &mut self.state.shapes;
block(shapes);
}
#[cfg(coloring_in_tokens)]
pub fn sort_shapes(&mut self) {
// This is pretty dubious, but it works. We should look into a better algorithm that doesn't end up requiring
// this solution.
@ -255,7 +221,6 @@ impl<'content> TokensIterator<'content> {
.sort_by(|a, b| a.span.start().cmp(&b.span.start()));
}
#[cfg(coloring_in_tokens)]
pub fn child<'me, T>(
&'me mut self,
tokens: Spanned<&'me [TokenNode]>,
@ -271,8 +236,6 @@ impl<'content> TokensIterator<'content> {
let mut expand_tracer = ExpandTracer::new(source.clone());
std::mem::swap(&mut expand_tracer, &mut self.expand_tracer);
cfg_if::cfg_if! {
if #[cfg(coloring_in_tokens)] {
let mut iterator = TokensIterator {
state: TokensIteratorState {
tokens: tokens.item,
@ -285,20 +248,6 @@ impl<'content> TokensIterator<'content> {
color_tracer,
expand_tracer,
};
} else {
let mut iterator = TokensIterator {
state: TokensIteratorState {
tokens: tokens.item,
span: tokens.span,
skip_ws: false,
index: 0,
seen: indexmap::IndexSet::new(),
},
color_tracer,
expand_tracer,
};
}
}
let result = block(&mut iterator);
@ -309,39 +258,6 @@ impl<'content> TokensIterator<'content> {
result
}
#[cfg(not(coloring_in_tokens))]
pub fn child<'me, T>(
&'me mut self,
tokens: Spanned<&'me [TokenNode]>,
source: Text,
block: impl FnOnce(&mut TokensIterator<'me>) -> T,
) -> T {
let mut color_tracer = ColorTracer::new(source.clone());
std::mem::swap(&mut color_tracer, &mut self.color_tracer);
let mut expand_tracer = ExpandTracer::new(source.clone());
std::mem::swap(&mut expand_tracer, &mut self.expand_tracer);
let mut iterator = TokensIterator {
state: TokensIteratorState {
tokens: tokens.item,
span: tokens.span,
skip_ws: false,
index: 0,
seen: indexmap::IndexSet::new(),
},
color_tracer,
expand_tracer,
};
let result = block(&mut iterator);
std::mem::swap(&mut iterator.color_tracer, &mut self.color_tracer);
std::mem::swap(&mut iterator.expand_tracer, &mut self.expand_tracer);
result
}
pub fn with_color_tracer(
&mut self,
block: impl FnOnce(&mut TokensIteratorState, &mut ColorTracer),
@ -362,7 +278,6 @@ impl<'content> TokensIterator<'content> {
block(state, tracer)
}
#[cfg(coloring_in_tokens)]
pub fn color_frame<T>(
&mut self,
desc: &'static str,
@ -455,7 +370,7 @@ impl<'content> TokensIterator<'content> {
let state = &mut self.state;
let index = state.index;
#[cfg(coloring_in_tokens)]
let shape_start = state.shapes.len();
let seen = state.seen.clone();
@ -464,7 +379,7 @@ impl<'content> TokensIterator<'content> {
index,
seen,
committed: false,
#[cfg(coloring_in_tokens)]
shape_start,
}
}
@ -478,7 +393,7 @@ impl<'content> TokensIterator<'content> {
let state = &mut self.state;
let index = state.index;
#[cfg(coloring_in_tokens)]
let shape_start = state.shapes.len();
let seen = state.seen.clone();
@ -487,7 +402,7 @@ impl<'content> TokensIterator<'content> {
index,
seen,
committed: false,
#[cfg(coloring_in_tokens)]
shape_start,
};
@ -506,7 +421,7 @@ impl<'content> TokensIterator<'content> {
let state = &mut self.state;
let index = state.index;
#[cfg(coloring_in_tokens)]
let shape_start = state.shapes.len();
let seen = state.seen.clone();
@ -515,7 +430,7 @@ impl<'content> TokensIterator<'content> {
index,
seen,
committed: false,
#[cfg(coloring_in_tokens)]
shape_start,
};
@ -525,7 +440,6 @@ impl<'content> TokensIterator<'content> {
return Ok(value);
}
#[cfg(coloring_in_tokens)]
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
/// that you'll succeed.
pub fn atomic_returning_shapes<'me, T>(
@ -641,7 +555,7 @@ impl<'content> TokensIterator<'content> {
// index: state.index,
// seen: state.seen.clone(),
// skip_ws: state.skip_ws,
// #[cfg(coloring_in_tokens)]
//
// shapes: state.shapes.clone(),
// },
// color_tracer: self.color_tracer.clone(),

View File

@ -183,198 +183,6 @@ impl ColoringArgs {
#[derive(Debug, Copy, Clone)]
pub struct CommandTailShape;
#[cfg(not(coloring_in_tokens))]
impl ColorSyntax for CommandTailShape {
type Info = ();
type Input = Signature;
fn color_syntax<'a, 'b>(
&self,
signature: &Signature,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Self::Info {
let mut args = ColoringArgs::new(token_nodes.len());
for (name, kind) in &signature.named {
trace!(target: "nu::color_syntax", "looking for {} : {:?}", name, kind);
match &kind.0 {
NamedType::Switch => {
match token_nodes.extract(|t| t.as_flag(name, context.source())) {
Some((pos, flag)) => args.insert(pos, vec![flag.color()]),
None => {}
}
}
NamedType::Mandatory(syntax_type) => {
match extract_mandatory(
signature,
name,
token_nodes,
context.source(),
Span::unknown(),
) {
Err(_) => {
// The mandatory flag didn't exist at all, so there's nothing to color
}
Ok((pos, flag)) => {
let mut shapes = vec![flag.color()];
token_nodes.move_to(pos);
if token_nodes.at_end() {
args.insert(pos, shapes);
token_nodes.restart();
continue;
}
// We can live with unmatched syntax after a mandatory flag
let _ = token_nodes.atomic(|token_nodes| {
color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes);
// If the part after a mandatory flag isn't present, that's ok, but we
// should roll back any whitespace we chomped
color_fallible_syntax(
syntax_type,
token_nodes,
context,
&mut shapes,
)
});
args.insert(pos, shapes);
token_nodes.restart();
}
}
}
NamedType::Optional(syntax_type) => {
match extract_optional(name, token_nodes, context.source()) {
Err(_) => {
// The optional flag didn't exist at all, so there's nothing to color
}
Ok(Some((pos, flag))) => {
let mut shapes = vec![flag.color()];
token_nodes.move_to(pos);
if token_nodes.at_end() {
args.insert(pos, shapes);
token_nodes.restart();
continue;
}
// We can live with unmatched syntax after an optional flag
let _ = token_nodes.atomic(|token_nodes| {
color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes);
// If the part after a mandatory flag isn't present, that's ok, but we
// should roll back any whitespace we chomped
color_fallible_syntax(
syntax_type,
token_nodes,
context,
&mut shapes,
)
});
args.insert(pos, shapes);
token_nodes.restart();
}
Ok(None) => {
token_nodes.restart();
}
}
}
};
}
for arg in &signature.positional {
trace!("Processing positional {:?}", arg);
match arg.0 {
PositionalType::Mandatory(..) => {
if token_nodes.at_end() {
break;
}
}
PositionalType::Optional(..) => {
if token_nodes.at_end() {
break;
}
}
}
let mut shapes = vec![];
let pos = token_nodes.pos(false);
match pos {
None => break,
Some(pos) => {
// We can live with an unmatched positional argument. Hopefully it will be
// matched by a future token
let _ = token_nodes.atomic(|token_nodes| {
color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes);
// If no match, we should roll back any whitespace we chomped
color_fallible_syntax(
&arg.0.syntax_type(),
token_nodes,
context,
&mut shapes,
)?;
args.insert(pos, shapes);
Ok(())
});
}
}
}
if let Some((syntax_type, _)) = signature.rest_positional {
loop {
if token_nodes.at_end_possible_ws() {
break;
}
let pos = token_nodes.pos(false);
match pos {
None => break,
Some(pos) => {
let mut shapes = vec![];
// If any arguments don't match, we'll fall back to backoff coloring mode
let result = token_nodes.atomic(|token_nodes| {
color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes);
// If no match, we should roll back any whitespace we chomped
color_fallible_syntax(&syntax_type, token_nodes, context, &mut shapes)?;
args.insert(pos, shapes);
Ok(())
});
match result {
Err(_) => break,
Ok(_) => continue,
}
}
}
}
}
args.spread_shapes(shapes);
// Consume any remaining tokens with backoff coloring mode
color_syntax(&BackoffColoringMode, token_nodes, context, shapes);
shapes.sort_by(|a, b| a.span.start().cmp(&b.span.start()));
}
}
#[cfg(coloring_in_tokens)]
impl ColorSyntax for CommandTailShape {
type Info = ();
type Input = Signature;

View File

@ -80,20 +80,6 @@ impl Highlighter for Helper {
let text = Text::from(line);
let expand_context = self.context.expand_context(&text);
#[cfg(not(coloring_in_tokens))]
let shapes = {
let mut shapes = vec![];
color_fallible_syntax(
&PipelineShape,
&mut tokens,
&expand_context,
&mut shapes,
)
.unwrap();
shapes
};
#[cfg(coloring_in_tokens)]
let shapes = {
// We just constructed a token list that only contains a pipeline, so it can't fail
color_fallible_syntax(&PipelineShape, &mut tokens, &expand_context).unwrap();