Merge pull request #45 from wycats/highlighting

Highlighting
This commit is contained in:
Yehuda Katz 2019-05-30 22:46:53 -07:00 committed by GitHub
commit 3497e02629
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 526 additions and 356 deletions

View File

@ -20,6 +20,10 @@ crate fn evaluate_expr(expr: &ast::Expression, scope: &Scope) -> Result<Value, S
match expr {
Expression::Leaf(l) => Ok(evaluate_leaf(l)),
Expression::Parenthesized(p) => evaluate_expr(&p.expr, scope),
Expression::Flag(f) => Err(ShellError::string(format!(
"can't evaluate the flag {}",
f.print()
))),
Expression::Block(b) => evaluate_block(&b, scope),
Expression::Path(p) => evaluate_path(&p, scope),
Expression::Binary(b) => evaluate_binary(b, scope),

View File

@ -14,7 +14,7 @@ use parser::PipelineParser;
pub fn parse(input: &str, _registry: &dyn CommandRegistry) -> Result<Pipeline, ShellError> {
let parser = PipelineParser::new();
let tokens = Lexer::new(input);
let tokens = Lexer::new(input, false);
match parser.parse(tokens) {
Ok(val) => Ok(val),

View File

@ -45,6 +45,7 @@ impl FromStr for Operator {
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq)]
pub enum Expression {
Leaf(Leaf),
Flag(Flag),
Parenthesized(Box<Parenthesized>),
Block(Box<Block>),
Binary(Box<Binary>),
@ -56,6 +57,7 @@ impl Expression {
crate fn print(&self) -> String {
match self {
Expression::Leaf(l) => l.print(),
Expression::Flag(f) => f.print(),
Expression::Parenthesized(p) => p.print(),
Expression::Block(b) => b.print(),
Expression::VariableReference(r) => r.print(),
@ -67,6 +69,7 @@ impl Expression {
crate fn as_external_arg(&self) -> String {
match self {
Expression::Leaf(l) => l.as_external_arg(),
Expression::Flag(f) => f.as_external_arg(),
Expression::Parenthesized(p) => p.as_external_arg(),
Expression::Block(b) => b.as_external_arg(),
Expression::VariableReference(r) => r.as_external_arg(),
@ -262,7 +265,7 @@ impl Binary {
}
}
#[derive(Debug, Clone)]
#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq)]
pub enum Flag {
Shorthand(String),
Longhand(String),
@ -270,12 +273,17 @@ pub enum Flag {
impl Flag {
#[allow(unused)]
fn print(&self) -> String {
crate fn print(&self) -> String {
match self {
Flag::Shorthand(s) => format!("-{}", s),
Flag::Longhand(s) => format!("--{}", s),
}
}
#[allow(unused)]
crate fn as_external_arg(&self) -> String {
self.print()
}
}
#[derive(new, Debug, Clone)]

View File

@ -92,7 +92,7 @@ impl TopToken {
Dollar => Token::Dollar,
Bare => Token::Bare,
Pipe => Token::Pipe,
Dot => Token::Dot,
Dot => Token::Bare,
OpenBrace => Token::OpenBrace,
CloseBrace => Token::CloseBrace,
OpenParen => Token::OpenParen,
@ -180,7 +180,7 @@ impl AfterVariableToken {
let result = match self {
END => return None,
Dot => Token::Dot,
Dot => Token::PathDot,
Whitespace => Token::Whitespace,
Error => unreachable!("Don't call to_token with the error variant"),
};
@ -340,6 +340,7 @@ impl SpannedToken<'source> {
pub enum Token {
Variable,
Dot,
PathDot,
Member,
Num,
SQString,
@ -375,14 +376,15 @@ pub enum Token {
crate struct Lexer<'source> {
lexer: logos::Lexer<TopToken, &'source str>,
first: bool,
// state: LexerState,
whitespace: bool, // state: LexerState
}
impl Lexer<'source> {
crate fn new(source: &str) -> Lexer<'_> {
crate fn new(source: &str, whitespace: bool) -> Lexer<'_> {
Lexer {
first: true,
lexer: logos::Logos::lexer(source),
whitespace
// state: LexerState::default(),
}
}
@ -400,7 +402,7 @@ impl Iterator for Lexer<'source> {
TopToken::Error => {
return Some(Err(lex_error(&self.lexer.range(), self.lexer.source)))
}
TopToken::Whitespace => return self.next(),
TopToken::Whitespace if !self.whitespace => return self.next(),
other => {
return spanned(other.to_token()?, self.lexer.slice(), &self.lexer.range())
}
@ -415,7 +417,7 @@ impl Iterator for Lexer<'source> {
match token {
TopToken::Error => return Some(Err(lex_error(&range, self.lexer.source))),
TopToken::Whitespace => return self.next(),
TopToken::Whitespace if !self.whitespace => return self.next(),
other => return spanned(other.to_token()?, slice, &range),
}
}
@ -429,7 +431,7 @@ impl Iterator for Lexer<'source> {
AfterMemberDot::Error => {
return Some(Err(lex_error(&range, self.lexer.source)))
}
AfterMemberDot::Whitespace => self.next(),
AfterMemberDot::Whitespace if !self.whitespace => self.next(),
other => return spanned(other.to_token()?, slice, &range),
}
}
@ -443,8 +445,7 @@ impl Iterator for Lexer<'source> {
AfterVariableToken::Error => {
return Some(Err(lex_error(&range, self.lexer.source)))
}
AfterVariableToken::Whitespace => self.next(),
AfterVariableToken::Whitespace if !self.whitespace => self.next(),
other => return spanned(other.to_token()?, slice, &range),
}
}
@ -508,7 +509,7 @@ mod tests {
use pretty_assertions::assert_eq;
fn assert_lex(source: &str, tokens: &[TestToken<'_>]) {
let lex = Lexer::new(source);
let lex = Lexer::new(source, false);
let mut current = 0;
let expected_tokens: Vec<SpannedToken> = tokens
@ -546,6 +547,7 @@ mod tests {
enum TokenDesc {
Ws,
Member,
PathDot,
Top(TopToken),
Var(VariableToken),
}
@ -575,6 +577,10 @@ mod tests {
TokenDesc::Ws => {
SpannedToken::new(Span::new(range), self.source, Token::Whitespace)
}
TokenDesc::PathDot => {
SpannedToken::new(Span::new(range), self.source, Token::PathDot)
}
}
}
}
@ -650,42 +656,45 @@ mod tests {
#[test]
fn test_tokenize_path() {
assert_lex("$var.bar", tokens![ "$" Var("var") "." Member("bar") ]);
assert_lex("$it.bar", tokens![ "$" Var("it") "." Member("bar") ]);
assert_lex("$var. bar", tokens![ "$" Var("var") "." SP Member("bar") ]);
assert_lex("$it. bar", tokens![ "$" Var("it") "." SP Member("bar") ]);
assert_lex("$var.bar", tokens![ "$" Var("var") "???." Member("bar") ]);
assert_lex("$it.bar", tokens![ "$" Var("it") "???." Member("bar") ]);
assert_lex(
"$var. bar",
tokens![ "$" Var("var") "???." SP Member("bar") ],
);
assert_lex("$it. bar", tokens![ "$" Var("it") "???." SP Member("bar") ]);
}
#[test]
fn test_tokenize_operator() {
assert_lex(
"$it.cpu > 10",
tokens![ "$" Var("it") "." Member("cpu") SP ">" SP Num("10") ],
tokens![ "$" Var("it") "???." Member("cpu") SP ">" SP Num("10") ],
);
assert_lex(
"$it.cpu < 10",
tokens![ "$" Var("it") "." Member("cpu") SP "<" SP Num("10") ],
tokens![ "$" Var("it") "???." Member("cpu") SP "<" SP Num("10") ],
);
assert_lex(
"$it.cpu >= 10",
tokens![ "$" Var("it") "." Member("cpu") SP ">=" SP Num("10") ],
tokens![ "$" Var("it") "???." Member("cpu") SP ">=" SP Num("10") ],
);
assert_lex(
"$it.cpu <= 10",
tokens![ "$" Var("it") "." Member("cpu") SP "<=" SP Num("10") ],
tokens![ "$" Var("it") "???." Member("cpu") SP "<=" SP Num("10") ],
);
assert_lex(
"$it.cpu == 10",
tokens![ "$" Var("it") "." Member("cpu") SP "==" SP Num("10") ],
tokens![ "$" Var("it") "???." Member("cpu") SP "==" SP Num("10") ],
);
assert_lex(
"$it.cpu != 10",
tokens![ "$" Var("it") "." Member("cpu") SP "!=" SP Num("10") ],
tokens![ "$" Var("it") "???." Member("cpu") SP "!=" SP Num("10") ],
);
}
@ -698,13 +707,18 @@ mod tests {
assert_lex(
"ls | where { $it.cpu > 10 }",
tokens![ Bare("ls") SP "|" SP Bare("where") SP "{" SP "$" Var("it") "." Member("cpu") SP ">" SP Num("10") SP "}" ],
tokens![ Bare("ls") SP "|" SP Bare("where") SP "{" SP "$" Var("it") "???." Member("cpu") SP ">" SP Num("10") SP "}" ],
);
assert_lex(
"open input2.json | from-json | select glossary",
tokens![ Bare("open") SP Bare("input2") "." Member("json") SP "|" SP Bare("from-json") SP "|" SP Bare("select") SP Bare("glossary") ],
tokens![ Bare("open") SP Bare("input2") "???." Member("json") SP "|" SP Bare("from-json") SP "|" SP Bare("select") SP Bare("glossary") ],
);
assert_lex(
"git add . -v",
tokens![ Bare("git") SP Bare("add") SP Bare(".") SP "-" Bare("v") ],
)
}
fn tok(name: &str, value: &'source str) -> TestToken<'source> {
@ -721,7 +735,10 @@ mod tests {
fn tk(name: &'source str) -> TestToken<'source> {
let token = match name {
"???." => return TestToken::new(TokenDesc::PathDot, "."),
"." => TopToken::Dot,
"--" => TopToken::DashDash,
"-" => TopToken::Dash,
"$" => TopToken::Dollar,
"|" => TopToken::Pipe,
"{" => TopToken::OpenBrace,

View File

@ -52,10 +52,11 @@ WholeExpression: Expression = {
PathHead: Expression = {
<WholeExpression>,
<BarePath> => Expression::Leaf(Leaf::Bare(<>)),
<Flag> => Expression::Flag(<>),
}
PathExpression: Expression = {
<head:WholeExpression> <tail: ( "." <Member> )+> => Expression::Path(Box::new(Path::new(head, tail)))
<head:WholeExpression> <tail: ( "???." <Member> )+> => Expression::Path(Box::new(Path::new(head, tail)))
}
Expr: Expression = {
@ -92,7 +93,7 @@ String: String = {
}
BarePath: BarePath = {
<head: "bare"> <tail: ( "." <"member"> )*> => BarePath::from_tokens(head, tail)
<head: "bare"> <tail: ( "???." <"member"> )*> => BarePath::from_tokens(head, tail)
}
Int: i64 = {
@ -119,6 +120,7 @@ extern {
"-" => SpannedToken { token: Token::Dash, .. },
"--" => SpannedToken { token: Token::DashDash, .. },
"$" => SpannedToken { token: Token::Dollar, .. },
"???." => SpannedToken { token: Token::PathDot, .. },
"num" => SpannedToken { token: Token::Num, .. },
"member" => SpannedToken { token: Token::Member, .. },
"variable" => SpannedToken { token: Token::Variable, .. },

File diff suppressed because it is too large Load Diff

View File

@ -1,15 +1,17 @@
use crate::shell::completer::NuCompleter;
use crate::parser::lexer::SpannedToken;
use crate::prelude::*;
use ansi_term::Color;
use log::debug;
use rustyline::completion::{self, Completer, FilenameCompleter};
use rustyline::error::ReadlineError;
use rustyline::highlight::{Highlighter, MatchingBracketHighlighter};
use rustyline::highlight::Highlighter;
use rustyline::hint::{Hinter, HistoryHinter};
use std::borrow::Cow::{self, Owned};
crate struct Helper {
completer: NuCompleter,
highlighter: MatchingBracketHighlighter,
hinter: HistoryHinter,
}
@ -20,7 +22,6 @@ impl Helper {
file_completer: FilenameCompleter::new(),
commands,
},
highlighter: MatchingBracketHighlighter::new(),
hinter: HistoryHinter {},
}
}
@ -54,12 +55,78 @@ impl Highlighter for Helper {
Owned("\x1b[1m".to_owned() + hint + "\x1b[m")
}
fn highlight<'l>(&self, line: &'l str, pos: usize) -> Cow<'l, str> {
self.highlighter.highlight(line, pos)
fn highlight<'l>(&self, line: &'l str, _pos: usize) -> Cow<'l, str> {
let tokens = crate::parser::lexer::Lexer::new(line, true);
let tokens: Result<Vec<(usize, SpannedToken, usize)>, _> = tokens.collect();
match tokens {
Err(_) => Cow::Borrowed(line),
Ok(v) => {
let mut out = String::new();
let mut iter = v.iter();
let mut state = State::Command;
loop {
match iter.next() {
None => return Cow::Owned(out),
Some((start, token, end)) => {
let (style, new_state) = token_style(&token, state);
debug!("token={:?}", token);
debug!("style={:?}", style);
debug!("new_state={:?}", new_state);
state = new_state;
let slice = &line[*start..*end];
let styled = style.paint(slice);
out.push_str(&styled.to_string());
}
}
}
}
}
}
fn highlight_char(&self, line: &str, pos: usize) -> bool {
self.highlighter.highlight_char(line, pos)
fn highlight_char(&self, _line: &str, _pos: usize) -> bool {
true
}
}
#[derive(Debug)]
enum State {
Command,
Flag,
Var,
Bare,
None,
}
fn token_style(
token: &crate::parser::lexer::SpannedToken,
state: State,
) -> (ansi_term::Style, State) {
use crate::parser::lexer::Token::*;
match (state, &token.token) {
(State::Command, Bare) => (Color::Cyan.bold(), State::None),
(State::Command, Whitespace) => (Color::White.normal(), State::Command),
(State::Flag, Bare) => (Color::Black.bold(), State::None),
(State::Var, Variable) => (Color::Yellow.bold(), State::None),
(State::Bare, Dot) => (Color::Green.normal(), State::Bare),
(State::Bare, Member) => (Color::Green.normal(), State::Bare),
(_, Dash) | (_, DashDash) => (Color::Black.bold(), State::Flag),
(_, Dollar) => (Color::Yellow.bold(), State::Var),
(_, Bare) => (Color::Green.normal(), State::Bare),
(_, Member) => (Color::Cyan.normal(), State::None),
(_, Num) => (Color::Purple.bold(), State::None),
(_, DQString) | (_, SQString) => (Color::Green.normal(), State::None),
(_, Pipe) => (Color::White.normal(), State::Command),
_ => (Color::White.normal(), State::None),
}
}