Finish nom upgrade

This commit is contained in:
Yehuda Katz 2019-06-22 10:08:53 -04:00
parent e981129f1f
commit 3b35dcb619
9 changed files with 11 additions and 98 deletions

View File

@ -1 +0,0 @@
paths = ["C:\\Users\\wycat\\Code\\nom_locate"]

View File

@ -1,7 +1,7 @@
use crate::object::base::Block; use crate::object::base::Block;
use crate::parser::{ use crate::parser::{
hir::{self, Expression, RawExpression}, hir::{self, Expression, RawExpression},
CommandRegistry, Span, Spanned, Text, CommandRegistry, Spanned, Text,
}; };
use crate::prelude::*; use crate::prelude::*;
use derive_new::new; use derive_new::new;

View File

@ -9,9 +9,7 @@ use derive_new::new;
use getset::Getters; use getset::Getters;
crate use baseline_parse::baseline_parse_single_token; crate use baseline_parse::baseline_parse_single_token;
crate use baseline_parse_tokens::{ crate use baseline_parse_tokens::{baseline_parse_next_expr, ExpressionKindHint};
baseline_parse_next_expr, baseline_parse_tokens, ExpressionKindHint,
};
crate use binary::Binary; crate use binary::Binary;
crate use named::NamedArguments; crate use named::NamedArguments;
crate use path::Path; crate use path::Path;

View File

@ -1,20 +1,4 @@
use crate::errors::ShellError; use crate::parser::{hir, RawToken, Token};
use crate::parser::{hir, CommandRegistry, RawToken, Token, TokenNode};
// pub fn baseline_parse_token(
// token_node: TokenNode,
// _registry: &dyn CommandRegistry,
// ) -> Result<hir::Expression, ShellError> {
// match token_node {
// TokenNode::Token(token) => Ok(baseline_parse_single_token(token)),
// TokenNode::Call(_call) => Err(ShellError::unimplemented("baseline_parse Call")),
// TokenNode::Delimited(_delimited) => {
// Err(ShellError::unimplemented("baseline_parse Delimited"))
// }
// TokenNode::Pipeline(_pipeline) => Err(ShellError::unimplemented("baseline_parse Pipeline")),
// TokenNode::Path(_path) => Err(ShellError::unimplemented("baseline_parse Path")),
// }
// }
pub fn baseline_parse_single_token(token: &Token, source: &str) -> hir::Expression { pub fn baseline_parse_single_token(token: &Token, source: &str) -> hir::Expression {
match *token.item() { match *token.item() {

View File

@ -23,6 +23,7 @@ pub fn baseline_parse_tokens(
Ok(exprs) Ok(exprs)
} }
#[allow(unused)]
#[derive(Debug)] #[derive(Debug)]
pub enum ExpressionKindHint { pub enum ExpressionKindHint {
Literal, Literal,
@ -134,15 +135,15 @@ pub fn baseline_parse_semantic_token(
) -> Result<hir::Expression, ShellError> { ) -> Result<hir::Expression, ShellError> {
match token { match token {
TokenNode::Token(token) => Ok(baseline_parse_single_token(token, source)), TokenNode::Token(token) => Ok(baseline_parse_single_token(token, source)),
TokenNode::Call(call) => unimplemented!(), TokenNode::Call(_call) => unimplemented!(),
TokenNode::Delimited(delimited) => unimplemented!(), TokenNode::Delimited(_delimited) => unimplemented!(),
TokenNode::Pipeline(pipeline) => unimplemented!(), TokenNode::Pipeline(_pipeline) => unimplemented!(),
TokenNode::Operator(_op) => unreachable!(), TokenNode::Operator(_op) => unreachable!(),
TokenNode::Flag(flag) => unimplemented!(), TokenNode::Flag(_flag) => unimplemented!(),
TokenNode::Identifier(_span) => unreachable!(), TokenNode::Identifier(_span) => unreachable!(),
TokenNode::Whitespace(_span) => unreachable!(), TokenNode::Whitespace(_span) => unreachable!(),
TokenNode::Error(error) => Err(*error.item.clone()), TokenNode::Error(error) => Err(*error.item.clone()),
TokenNode::Path(path) => unimplemented!(), TokenNode::Path(_path) => unimplemented!(),
} }
} }
@ -154,24 +155,3 @@ fn next_token(nodes: &mut impl Iterator<Item = &'a TokenNode>) -> Option<&'a Tok
} }
} }
} }
fn baseline_parse_token(
token_node: &TokenNode,
_registry: &dyn CommandRegistry,
source: &str,
) -> Result<hir::Expression, ShellError> {
match token_node {
TokenNode::Token(token) => Ok(hir::baseline_parse_single_token(token, source)),
TokenNode::Call(_call) => Err(ShellError::unimplemented("baseline_parse Call")),
TokenNode::Delimited(_delimited) => {
Err(ShellError::unimplemented("baseline_parse Delimited"))
}
TokenNode::Pipeline(_pipeline) => Err(ShellError::unimplemented("baseline_parse Pipeline")),
TokenNode::Path(_path) => Err(ShellError::unimplemented("baseline_parse Path")),
TokenNode::Operator(_op) => Err(ShellError::unimplemented("baseline_parse Operator")),
TokenNode::Flag(_op) => Err(ShellError::unimplemented("baseline_parse Flag")),
TokenNode::Identifier(_op) => Err(ShellError::unimplemented("baseline_parse Identifier")),
TokenNode::Whitespace(_op) => Err(ShellError::unimplemented("baseline_parse Whitespace")),
TokenNode::Error(err) => Err(*err.item.clone()),
}
}

View File

@ -1,4 +1,4 @@
use crate::parser::{hir::Expression, Operator, Spanned}; use crate::parser::{hir::Expression, Spanned};
use derive_new::new; use derive_new::new;
use getset::Getters; use getset::Getters;

View File

@ -32,7 +32,7 @@ impl<T> Spanned<T> {
} }
crate fn copy_span<U>(&self, output: U) -> Spanned<U> { crate fn copy_span<U>(&self, output: U) -> Spanned<U> {
let Spanned { span, item } = self; let Spanned { span, .. } = self;
Spanned { Spanned {
span: *span, span: *span,

View File

@ -44,13 +44,6 @@ impl TokenNode {
self.span().slice(source) self.span().slice(source)
} }
pub fn is_ws(&self) -> bool {
match self {
TokenNode::Whitespace(_) => true,
_ => false,
}
}
pub fn is_bare(&self) -> bool { pub fn is_bare(&self) -> bool {
match self { match self {
TokenNode::Token(Spanned { TokenNode::Token(Spanned {
@ -61,20 +54,6 @@ impl TokenNode {
} }
} }
crate fn as_string(&self, source: &str) -> Option<Spanned<String>> {
match self {
TokenNode::Token(Spanned {
item: RawToken::Bare,
span,
}) => Some(Spanned::from_item(span.slice(source).to_string(), span)),
TokenNode::Token(Spanned {
item: RawToken::String(inner),
span,
}) => Some(Spanned::from_item(inner.slice(source).to_string(), span)),
_ => None,
}
}
crate fn as_flag(&self, value: &str, source: &str) -> Option<Spanned<Flag>> { crate fn as_flag(&self, value: &str, source: &str) -> Option<Spanned<Flag>> {
match self { match self {
TokenNode::Flag( TokenNode::Flag(

View File

@ -1,5 +1,3 @@
use crate::parser::parse2::flag::*;
use crate::parser::parse2::operator::*;
use crate::parser::parse2::span::*; use crate::parser::parse2::span::*;
use crate::parser::parse2::unit::*; use crate::parser::parse2::unit::*;
@ -13,28 +11,3 @@ pub enum RawToken {
} }
pub type Token = Spanned<RawToken>; pub type Token = Spanned<RawToken>;
impl Token {
pub fn to_semantic_token(&self) -> Option<SemanticToken> {
let semantic_token = match self.item {
RawToken::Integer(int) => RawSemanticToken::Integer(int),
RawToken::Size(int, unit) => RawSemanticToken::Size(int, unit),
RawToken::String(span) => RawSemanticToken::String(span),
RawToken::Variable(span) => RawSemanticToken::Variable(span),
RawToken::Bare => RawSemanticToken::Bare,
};
Some(Spanned::from_item(semantic_token, self.span))
}
}
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum RawSemanticToken {
Integer(i64),
Size(i64, Unit),
String(Span),
Variable(Span),
Bare,
}
pub type SemanticToken = Spanned<RawSemanticToken>;