mirror of
https://github.com/nushell/nushell.git
synced 2025-08-13 12:27:42 +02:00
Clean up old parser code
This commit is contained in:
26
src/parser/parse/call_node.rs
Normal file
26
src/parser/parse/call_node.rs
Normal file
@ -0,0 +1,26 @@
|
||||
use crate::parser::TokenNode;
|
||||
use getset::Getters;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters)]
|
||||
pub struct CallNode {
|
||||
#[get = "crate"]
|
||||
head: Box<TokenNode>,
|
||||
#[get = "crate"]
|
||||
children: Option<Vec<TokenNode>>,
|
||||
}
|
||||
|
||||
impl CallNode {
|
||||
pub fn new(head: Box<TokenNode>, children: Vec<TokenNode>) -> CallNode {
|
||||
if children.len() == 0 {
|
||||
CallNode {
|
||||
head,
|
||||
children: None,
|
||||
}
|
||||
} else {
|
||||
CallNode {
|
||||
head,
|
||||
children: Some(children),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
77
src/parser/parse/files.rs
Normal file
77
src/parser/parse/files.rs
Normal file
@ -0,0 +1,77 @@
|
||||
use crate::parser::parse::span::Span;
|
||||
use derive_new::new;
|
||||
use language_reporting::{FileName, Location};
|
||||
|
||||
#[derive(new, Debug, Clone)]
|
||||
pub struct Files {
|
||||
snippet: String,
|
||||
}
|
||||
|
||||
impl language_reporting::ReportingFiles for Files {
|
||||
type Span = Span;
|
||||
type FileId = usize;
|
||||
|
||||
fn byte_span(
|
||||
&self,
|
||||
_file: Self::FileId,
|
||||
from_index: usize,
|
||||
to_index: usize,
|
||||
) -> Option<Self::Span> {
|
||||
Some(Span::from((from_index, to_index)))
|
||||
}
|
||||
fn file_id(&self, _span: Self::Span) -> Self::FileId {
|
||||
0
|
||||
}
|
||||
fn file_name(&self, _file: Self::FileId) -> FileName {
|
||||
FileName::Verbatim(format!("<eval>"))
|
||||
}
|
||||
fn byte_index(&self, _file: Self::FileId, _line: usize, _column: usize) -> Option<usize> {
|
||||
unimplemented!("byte_index")
|
||||
}
|
||||
fn location(&self, _file: Self::FileId, byte_index: usize) -> Option<Location> {
|
||||
let source = &self.snippet;
|
||||
let mut seen_lines = 0;
|
||||
let mut seen_bytes = 0;
|
||||
|
||||
for (pos, _) in source.match_indices('\n') {
|
||||
if pos > byte_index {
|
||||
return Some(language_reporting::Location::new(
|
||||
seen_lines,
|
||||
byte_index - seen_bytes,
|
||||
));
|
||||
} else {
|
||||
seen_lines += 1;
|
||||
seen_bytes = pos;
|
||||
}
|
||||
}
|
||||
|
||||
if seen_lines == 0 {
|
||||
Some(language_reporting::Location::new(0, byte_index))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
fn line_span(&self, _file: Self::FileId, lineno: usize) -> Option<Self::Span> {
|
||||
let source = &self.snippet;
|
||||
let mut seen_lines = 0;
|
||||
let mut seen_bytes = 0;
|
||||
|
||||
for (pos, _) in source.match_indices('\n') {
|
||||
if seen_lines == lineno {
|
||||
return Some(Span::from((seen_bytes, pos)));
|
||||
} else {
|
||||
seen_lines += 1;
|
||||
seen_bytes = pos + 1;
|
||||
}
|
||||
}
|
||||
|
||||
if seen_lines == 0 {
|
||||
Some(Span::from((0, self.snippet.len() - 1)))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
fn source(&self, span: Self::Span) -> Option<String> {
|
||||
Some(self.snippet[span.start..span.end].to_string())
|
||||
}
|
||||
}
|
17
src/parser/parse/flag.rs
Normal file
17
src/parser/parse/flag.rs
Normal file
@ -0,0 +1,17 @@
|
||||
use crate::parser::Span;
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||
pub enum FlagKind {
|
||||
Shorthand,
|
||||
Longhand,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Getters, new)]
|
||||
#[get = "crate"]
|
||||
pub struct Flag {
|
||||
kind: FlagKind,
|
||||
name: Span,
|
||||
}
|
51
src/parser/parse/operator.rs
Normal file
51
src/parser/parse/operator.rs
Normal file
@ -0,0 +1,51 @@
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||
pub enum Operator {
|
||||
Equal,
|
||||
NotEqual,
|
||||
LessThan,
|
||||
GreaterThan,
|
||||
LessThanOrEqual,
|
||||
GreaterThanOrEqual,
|
||||
}
|
||||
|
||||
impl Operator {
|
||||
#[allow(unused)]
|
||||
pub fn print(&self) -> String {
|
||||
self.as_str().to_string()
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
match *self {
|
||||
Operator::Equal => "==",
|
||||
Operator::NotEqual => "!=",
|
||||
Operator::LessThan => "<",
|
||||
Operator::GreaterThan => ">",
|
||||
Operator::LessThanOrEqual => "<=",
|
||||
Operator::GreaterThanOrEqual => ">=",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for Operator {
|
||||
fn from(input: &str) -> Operator {
|
||||
Operator::from_str(input).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for Operator {
|
||||
type Err = ();
|
||||
fn from_str(input: &str) -> Result<Self, <Self as std::str::FromStr>::Err> {
|
||||
match input {
|
||||
"==" => Ok(Operator::Equal),
|
||||
"!=" => Ok(Operator::NotEqual),
|
||||
"<" => Ok(Operator::LessThan),
|
||||
">" => Ok(Operator::GreaterThan),
|
||||
"<=" => Ok(Operator::LessThanOrEqual),
|
||||
">=" => Ok(Operator::GreaterThanOrEqual),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
1043
src/parser/parse/parser.rs
Normal file
1043
src/parser/parse/parser.rs
Normal file
File diff suppressed because it is too large
Load Diff
18
src/parser/parse/pipeline.rs
Normal file
18
src/parser/parse/pipeline.rs
Normal file
@ -0,0 +1,18 @@
|
||||
use crate::parser::{CallNode, Span, Spanned};
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, new)]
|
||||
pub struct Pipeline {
|
||||
crate parts: Vec<PipelineElement>,
|
||||
crate post_ws: Option<Span>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
||||
pub struct PipelineElement {
|
||||
pub pre_ws: Option<Span>,
|
||||
#[get = "crate"]
|
||||
call: Spanned<CallNode>,
|
||||
pub post_ws: Option<Span>,
|
||||
pub post_pipe: Option<Span>,
|
||||
}
|
129
src/parser/parse/span.rs
Normal file
129
src/parser/parse/span.rs
Normal file
@ -0,0 +1,129 @@
|
||||
use crate::Text;
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
|
||||
#[derive(
|
||||
new, Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters,
|
||||
)]
|
||||
#[get = "crate"]
|
||||
pub struct Spanned<T> {
|
||||
crate span: Span,
|
||||
crate item: T,
|
||||
}
|
||||
|
||||
impl<T> std::ops::Deref for Spanned<T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &T {
|
||||
&self.item
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Spanned<T> {
|
||||
crate fn from_item(item: T, span: impl Into<Span>) -> Spanned<T> {
|
||||
Spanned {
|
||||
span: span.into(),
|
||||
item,
|
||||
}
|
||||
}
|
||||
|
||||
crate fn map<U>(self, input: impl FnOnce(T) -> U) -> Spanned<U> {
|
||||
let Spanned { span, item } = self;
|
||||
|
||||
let mapped = input(item);
|
||||
Spanned { span, item: mapped }
|
||||
}
|
||||
|
||||
crate fn copy_span<U>(&self, output: U) -> Spanned<U> {
|
||||
let Spanned { span, .. } = self;
|
||||
|
||||
Spanned {
|
||||
span: *span,
|
||||
item: output,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn source(&self, source: &Text) -> Text {
|
||||
Text::from(self.span().slice(source))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
|
||||
pub struct Span {
|
||||
crate start: usize,
|
||||
crate end: usize,
|
||||
// source: &'source str,
|
||||
}
|
||||
|
||||
impl From<&Span> for Span {
|
||||
fn from(input: &Span) -> Span {
|
||||
*input
|
||||
}
|
||||
}
|
||||
|
||||
impl From<nom_locate::LocatedSpan<&str>> for Span {
|
||||
fn from(input: nom_locate::LocatedSpan<&str>) -> Span {
|
||||
Span {
|
||||
start: input.offset,
|
||||
end: input.offset + input.fragment.len(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> From<(nom_locate::LocatedSpan<T>, nom_locate::LocatedSpan<T>)> for Span {
|
||||
fn from(input: (nom_locate::LocatedSpan<T>, nom_locate::LocatedSpan<T>)) -> Span {
|
||||
Span {
|
||||
start: input.0.offset,
|
||||
end: input.1.offset,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<(usize, usize)> for Span {
|
||||
fn from(input: (usize, usize)) -> Span {
|
||||
Span {
|
||||
start: input.0,
|
||||
end: input.1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&std::ops::Range<usize>> for Span {
|
||||
fn from(input: &std::ops::Range<usize>) -> Span {
|
||||
Span {
|
||||
start: input.start,
|
||||
end: input.end,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Span {
|
||||
pub fn slice(&self, source: &'a str) -> &'a str {
|
||||
&source[self.start..self.end]
|
||||
}
|
||||
}
|
||||
|
||||
impl language_reporting::ReportingSpan for Span {
|
||||
fn with_start(&self, start: usize) -> Self {
|
||||
Span {
|
||||
start,
|
||||
end: self.end,
|
||||
}
|
||||
}
|
||||
|
||||
fn with_end(&self, end: usize) -> Self {
|
||||
Span {
|
||||
start: self.start,
|
||||
end,
|
||||
}
|
||||
}
|
||||
|
||||
fn start(&self) -> usize {
|
||||
self.start
|
||||
}
|
||||
|
||||
fn end(&self) -> usize {
|
||||
self.end
|
||||
}
|
||||
}
|
204
src/parser/parse/text.rs
Normal file
204
src/parser/parse/text.rs
Normal file
@ -0,0 +1,204 @@
|
||||
use std::cmp::Ordering;
|
||||
use std::hash::Hash;
|
||||
use std::hash::Hasher;
|
||||
use std::ops::Range;
|
||||
use std::sync::Arc;
|
||||
|
||||
/// A "Text" is like a string except that it can be cheaply cloned.
|
||||
/// You can also "extract" subtexts quite cheaply. You can also deref
|
||||
/// an `&Text` into a `&str` for interoperability.
|
||||
///
|
||||
/// Used to represent the value of an input file.
|
||||
#[derive(Clone)]
|
||||
pub struct Text {
|
||||
text: Arc<String>,
|
||||
start: usize,
|
||||
end: usize,
|
||||
}
|
||||
|
||||
impl Text {
|
||||
/// Modifies this restrict to a subset of its current range.
|
||||
pub fn select(&mut self, range: Range<usize>) {
|
||||
let len = range.end - range.start;
|
||||
let new_start = self.start + range.start;
|
||||
let new_end = new_start + len;
|
||||
assert!(new_end <= self.end);
|
||||
|
||||
self.start = new_start;
|
||||
self.end = new_end;
|
||||
}
|
||||
|
||||
/// Extract a new `Text` that is a subset of an old `Text`
|
||||
/// -- `text.extract(1..3)` is similar to `&foo[1..3]` except that
|
||||
/// it gives back an owned value instead of a borrowed value.
|
||||
pub fn slice(&self, range: Range<usize>) -> Self {
|
||||
let mut result = self.clone();
|
||||
result.select(range);
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Arc<String>> for Text {
|
||||
fn from(text: Arc<String>) -> Self {
|
||||
let end = text.len();
|
||||
Self {
|
||||
text,
|
||||
start: 0,
|
||||
end,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<str> for Text {
|
||||
fn as_ref(&self) -> &str {
|
||||
&*self
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for Text {
|
||||
fn from(text: String) -> Self {
|
||||
Text::from(Arc::new(text))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&String> for Text {
|
||||
fn from(text: &String) -> Self {
|
||||
Text::from(text.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for Text {
|
||||
fn from(text: &str) -> Self {
|
||||
Text::from(text.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl std::borrow::Borrow<str> for Text {
|
||||
fn borrow(&self) -> &str {
|
||||
&*self
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for Text {
|
||||
type Target = str;
|
||||
|
||||
fn deref(&self) -> &str {
|
||||
&self.text[self.start..self.end]
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Text {
|
||||
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
<str as std::fmt::Display>::fmt(self, fmt)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Text {
|
||||
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
<str as std::fmt::Debug>::fmt(self, fmt)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<Text> for Text {
|
||||
fn eq(&self, other: &Text) -> bool {
|
||||
let this: &str = self;
|
||||
let other: &str = other;
|
||||
this == other
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Text {}
|
||||
|
||||
impl PartialEq<str> for Text {
|
||||
fn eq(&self, other: &str) -> bool {
|
||||
let this: &str = self;
|
||||
this == other
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<String> for Text {
|
||||
fn eq(&self, other: &String) -> bool {
|
||||
let this: &str = self;
|
||||
let other: &str = other;
|
||||
this == other
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<Text> for str {
|
||||
fn eq(&self, other: &Text) -> bool {
|
||||
other == self
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<Text> for String {
|
||||
fn eq(&self, other: &Text) -> bool {
|
||||
other == self
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> PartialEq<&T> for Text
|
||||
where
|
||||
Text: PartialEq<T>,
|
||||
{
|
||||
fn eq(&self, other: &&T) -> bool {
|
||||
self == *other
|
||||
}
|
||||
}
|
||||
|
||||
impl Hash for Text {
|
||||
fn hash<H: Hasher>(&self, state: &mut H) {
|
||||
<str as Hash>::hash(self, state)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd<Text> for Text {
|
||||
fn partial_cmp(&self, other: &Text) -> Option<Ordering> {
|
||||
let this: &str = self;
|
||||
let other: &str = other;
|
||||
this.partial_cmp(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Text {
|
||||
fn cmp(&self, other: &Text) -> Ordering {
|
||||
let this: &str = self;
|
||||
let other: &str = other;
|
||||
this.cmp(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd<str> for Text {
|
||||
fn partial_cmp(&self, other: &str) -> Option<Ordering> {
|
||||
let this: &str = self;
|
||||
this.partial_cmp(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd<String> for Text {
|
||||
fn partial_cmp(&self, other: &String) -> Option<Ordering> {
|
||||
let this: &str = self;
|
||||
let other: &str = other;
|
||||
this.partial_cmp(other)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd<Text> for str {
|
||||
fn partial_cmp(&self, other: &Text) -> Option<Ordering> {
|
||||
other.partial_cmp(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd<Text> for String {
|
||||
fn partial_cmp(&self, other: &Text) -> Option<Ordering> {
|
||||
other.partial_cmp(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: ?Sized> PartialOrd<&T> for Text
|
||||
where
|
||||
Text: PartialOrd<T>,
|
||||
{
|
||||
fn partial_cmp(&self, other: &&T) -> Option<Ordering> {
|
||||
self.partial_cmp(*other)
|
||||
}
|
||||
}
|
93
src/parser/parse/token_tree.rs
Normal file
93
src/parser/parse/token_tree.rs
Normal file
@ -0,0 +1,93 @@
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::parse::{call_node::*, flag::*, operator::*, pipeline::*, span::*, tokens::*};
|
||||
use crate::Text;
|
||||
use derive_new::new;
|
||||
use enum_utils::FromStr;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub enum TokenNode {
|
||||
Token(Token),
|
||||
#[allow(unused)]
|
||||
Call(Spanned<CallNode>),
|
||||
Delimited(Spanned<DelimitedNode>),
|
||||
Pipeline(Spanned<Pipeline>),
|
||||
Operator(Spanned<Operator>),
|
||||
Flag(Spanned<Flag>),
|
||||
Identifier(Span),
|
||||
Whitespace(Span),
|
||||
#[allow(unused)]
|
||||
Error(Spanned<Box<ShellError>>),
|
||||
Path(Spanned<PathNode>),
|
||||
}
|
||||
|
||||
impl TokenNode {
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
TokenNode::Token(t) => t.span,
|
||||
TokenNode::Call(s) => s.span,
|
||||
TokenNode::Delimited(s) => s.span,
|
||||
TokenNode::Pipeline(s) => s.span,
|
||||
TokenNode::Operator(s) => s.span,
|
||||
TokenNode::Flag(s) => s.span,
|
||||
TokenNode::Identifier(s) => *s,
|
||||
TokenNode::Whitespace(s) => *s,
|
||||
TokenNode::Error(s) => s.span,
|
||||
TokenNode::Path(s) => s.span,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_external_arg(&self, source: &Text) -> String {
|
||||
self.span().slice(source).to_string()
|
||||
}
|
||||
|
||||
pub fn source(&self, source: &'a Text) -> &'a str {
|
||||
self.span().slice(source)
|
||||
}
|
||||
|
||||
pub fn is_bare(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::Bare,
|
||||
..
|
||||
}) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
crate fn as_flag(&self, value: &str, source: &Text) -> Option<Spanned<Flag>> {
|
||||
match self {
|
||||
TokenNode::Flag(
|
||||
flag @ Spanned {
|
||||
item: Flag { .. }, ..
|
||||
},
|
||||
) if value == flag.name().slice(source) => Some(*flag),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_pipeline(&self) -> Result<Pipeline, ShellError> {
|
||||
match self {
|
||||
TokenNode::Pipeline(Spanned { item, .. }) => Ok(item.clone()),
|
||||
_ => Err(ShellError::string("unimplemented")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, new)]
|
||||
pub struct DelimitedNode {
|
||||
delimiter: Delimiter,
|
||||
children: Vec<TokenNode>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, FromStr)]
|
||||
pub enum Delimiter {
|
||||
Paren,
|
||||
Brace,
|
||||
Square,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, new)]
|
||||
pub struct PathNode {
|
||||
head: Box<TokenNode>,
|
||||
tail: Vec<TokenNode>,
|
||||
}
|
402
src/parser/parse/token_tree_builder.rs
Normal file
402
src/parser/parse/token_tree_builder.rs
Normal file
@ -0,0 +1,402 @@
|
||||
#[allow(unused)]
|
||||
use crate::prelude::*;
|
||||
|
||||
use crate::parser::parse::flag::{Flag, FlagKind};
|
||||
use crate::parser::parse::operator::Operator;
|
||||
use crate::parser::parse::pipeline::{Pipeline, PipelineElement};
|
||||
use crate::parser::parse::span::{Span, Spanned};
|
||||
use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, PathNode, TokenNode};
|
||||
use crate::parser::parse::tokens::{RawToken, Token};
|
||||
use crate::parser::parse::unit::Unit;
|
||||
use crate::parser::CallNode;
|
||||
use derive_new::new;
|
||||
|
||||
#[derive(new)]
|
||||
pub struct TokenTreeBuilder {
|
||||
#[new(default)]
|
||||
pos: usize,
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub type CurriedNode<T> = Box<dyn FnOnce(&mut TokenTreeBuilder) -> T + 'static>;
|
||||
pub type CurriedToken = Box<dyn FnOnce(&mut TokenTreeBuilder) -> TokenNode + 'static>;
|
||||
pub type CurriedCall = Box<dyn FnOnce(&mut TokenTreeBuilder) -> Spanned<CallNode> + 'static>;
|
||||
|
||||
#[allow(unused)]
|
||||
impl TokenTreeBuilder {
|
||||
pub fn build(block: impl FnOnce(&mut Self) -> TokenNode) -> TokenNode {
|
||||
let mut builder = TokenTreeBuilder::new();
|
||||
block(&mut builder)
|
||||
}
|
||||
|
||||
pub fn pipeline(input: Vec<(Option<&str>, CurriedCall, Option<&str>)>) -> CurriedToken {
|
||||
let input: Vec<(Option<String>, CurriedCall, Option<String>)> = input
|
||||
.into_iter()
|
||||
.map(|(pre, call, post)| {
|
||||
(
|
||||
pre.map(|s| s.to_string()),
|
||||
call,
|
||||
post.map(|s| s.to_string()),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
Box::new(move |b| {
|
||||
let start = b.pos;
|
||||
|
||||
let mut out: Vec<PipelineElement> = vec![];
|
||||
|
||||
let mut input = input.into_iter().peekable();
|
||||
let (pre, call, post) = input
|
||||
.next()
|
||||
.expect("A pipeline must contain at least one element");
|
||||
|
||||
let pre_span = pre.map(|pre| b.consume(&pre));
|
||||
let call = call(b);
|
||||
let post_span = post.map(|post| b.consume(&post));
|
||||
let pipe = input.peek().map(|_| Span::from(b.consume("|")));
|
||||
out.push(PipelineElement::new(
|
||||
pre_span.map(Span::from),
|
||||
call,
|
||||
post_span.map(Span::from),
|
||||
pipe,
|
||||
));
|
||||
|
||||
loop {
|
||||
match input.next() {
|
||||
None => break,
|
||||
Some((pre, call, post)) => {
|
||||
let pre_span = pre.map(|pre| b.consume(&pre));
|
||||
let call = call(b);
|
||||
let post_span = post.map(|post| b.consume(&post));
|
||||
|
||||
let pipe = input.peek().map(|_| Span::from(b.consume("|")));
|
||||
|
||||
out.push(PipelineElement::new(
|
||||
pre_span.map(Span::from),
|
||||
call,
|
||||
post_span.map(Span::from),
|
||||
pipe,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let end = b.pos;
|
||||
|
||||
TokenTreeBuilder::spanned_pipeline((out, None), (start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_pipeline(
|
||||
input: (Vec<PipelineElement>, Option<Span>),
|
||||
span: impl Into<Span>,
|
||||
) -> TokenNode {
|
||||
TokenNode::Pipeline(Spanned::from_item(
|
||||
Pipeline::new(input.0, input.1.into()),
|
||||
span,
|
||||
))
|
||||
}
|
||||
|
||||
pub fn op(input: impl Into<Operator>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, end) = b.consume(input.as_str());
|
||||
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::spanned_op(input, (start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_op(input: impl Into<Operator>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Operator(Spanned::from_item(input.into(), span.into()))
|
||||
}
|
||||
|
||||
pub fn string(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, _) = b.consume("\"");
|
||||
let (inner_start, inner_end) = b.consume(&input);
|
||||
let (_, end) = b.consume("\"");
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::spanned_string((inner_start, inner_end), (start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_string(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(Spanned::from_item(
|
||||
RawToken::String(input.into()),
|
||||
span.into(),
|
||||
))
|
||||
}
|
||||
|
||||
pub fn bare(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, end) = b.consume(&input);
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::spanned_bare((start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_bare(input: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(Spanned::from_item(RawToken::Bare, input.into()))
|
||||
}
|
||||
|
||||
pub fn int(input: impl Into<i64>) -> CurriedToken {
|
||||
let int = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, end) = b.consume(&int.to_string());
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::spanned_int(int, (start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_int(input: impl Into<i64>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(Token::from_item(RawToken::Integer(input.into()), span))
|
||||
}
|
||||
|
||||
pub fn size(int: impl Into<i64>, unit: impl Into<Unit>) -> CurriedToken {
|
||||
let int = int.into();
|
||||
let unit = unit.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, _) = b.consume(&int.to_string());
|
||||
let (_, end) = b.consume(unit.as_str());
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::spanned_size((int, unit), (start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_size(
|
||||
input: (impl Into<i64>, impl Into<Unit>),
|
||||
span: impl Into<Span>,
|
||||
) -> TokenNode {
|
||||
let (int, unit) = (input.0.into(), input.1.into());
|
||||
|
||||
TokenNode::Token(Spanned::from_item(RawToken::Size(int, unit), span))
|
||||
}
|
||||
|
||||
pub fn path(head: CurriedToken, tail: Vec<CurriedToken>) -> CurriedToken {
|
||||
Box::new(move |b| {
|
||||
let start = b.pos;
|
||||
let head = head(b);
|
||||
|
||||
let mut output = vec![];
|
||||
|
||||
for item in tail {
|
||||
b.consume(".");
|
||||
|
||||
output.push(item(b));
|
||||
}
|
||||
|
||||
let end = b.pos;
|
||||
|
||||
TokenTreeBuilder::spanned_path((head, output), (start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_path(input: (TokenNode, Vec<TokenNode>), span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Path(Spanned::from_item(
|
||||
PathNode::new(Box::new(input.0), input.1),
|
||||
span,
|
||||
))
|
||||
}
|
||||
|
||||
pub fn var(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, _) = b.consume("$");
|
||||
let (inner_start, end) = b.consume(&input);
|
||||
|
||||
TokenTreeBuilder::spanned_var((inner_start, end), (start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_var(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(Spanned::from_item(
|
||||
RawToken::Variable(input.into()),
|
||||
span.into(),
|
||||
))
|
||||
}
|
||||
|
||||
pub fn flag(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, _) = b.consume("--");
|
||||
let (inner_start, end) = b.consume(&input);
|
||||
|
||||
TokenTreeBuilder::spanned_flag((inner_start, end), (start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_flag(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Flag(Spanned::from_item(
|
||||
Flag::new(FlagKind::Longhand, input.into()),
|
||||
span.into(),
|
||||
))
|
||||
}
|
||||
|
||||
pub fn shorthand(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, _) = b.consume("-");
|
||||
let (inner_start, end) = b.consume(&input);
|
||||
|
||||
TokenTreeBuilder::spanned_shorthand((inner_start, end), (start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_shorthand(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Flag(Spanned::from_item(
|
||||
Flag::new(FlagKind::Shorthand, input.into()),
|
||||
span.into(),
|
||||
))
|
||||
}
|
||||
|
||||
pub fn ident(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, end) = b.consume(&input);
|
||||
TokenTreeBuilder::spanned_ident((start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_ident(span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Identifier(span.into())
|
||||
}
|
||||
|
||||
pub fn call(head: CurriedToken, input: Vec<CurriedToken>) -> CurriedCall {
|
||||
Box::new(move |b| {
|
||||
let start = b.pos;
|
||||
|
||||
let head_node = head(b);
|
||||
|
||||
let mut nodes = vec![head_node];
|
||||
for item in input {
|
||||
nodes.push(item(b));
|
||||
}
|
||||
|
||||
let end = b.pos;
|
||||
|
||||
TokenTreeBuilder::spanned_call(nodes, (start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_call(input: Vec<TokenNode>, span: impl Into<Span>) -> Spanned<CallNode> {
|
||||
if input.len() == 0 {
|
||||
panic!("BUG: spanned call (TODO)")
|
||||
}
|
||||
|
||||
let mut input = input.into_iter();
|
||||
|
||||
let head = input.next().unwrap();
|
||||
let tail = input.collect();
|
||||
|
||||
Spanned::from_item(CallNode::new(Box::new(head), tail), span)
|
||||
}
|
||||
|
||||
pub fn parens(input: Vec<CurriedToken>) -> CurriedToken {
|
||||
Box::new(move |b| {
|
||||
let (start, _) = b.consume("(");
|
||||
let mut output = vec![];
|
||||
for item in input {
|
||||
output.push(item(b));
|
||||
}
|
||||
|
||||
let (_, end) = b.consume(")");
|
||||
|
||||
TokenTreeBuilder::spanned_parens(output, (start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_parens(input: impl Into<Vec<TokenNode>>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Delimited(Spanned::from_item(
|
||||
DelimitedNode::new(Delimiter::Paren, input.into()),
|
||||
span,
|
||||
))
|
||||
}
|
||||
|
||||
pub fn square(input: Vec<CurriedToken>) -> CurriedToken {
|
||||
Box::new(move |b| {
|
||||
let (start, _) = b.consume("[");
|
||||
let mut output = vec![];
|
||||
for item in input {
|
||||
output.push(item(b));
|
||||
}
|
||||
|
||||
let (_, end) = b.consume("]");
|
||||
|
||||
TokenTreeBuilder::spanned_square(output, (start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_square(input: impl Into<Vec<TokenNode>>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Delimited(Spanned::from_item(
|
||||
DelimitedNode::new(Delimiter::Square, input.into()),
|
||||
span,
|
||||
))
|
||||
}
|
||||
|
||||
pub fn braced(input: Vec<CurriedToken>) -> CurriedToken {
|
||||
Box::new(move |b| {
|
||||
let (start, _) = b.consume("{ ");
|
||||
let mut output = vec![];
|
||||
for item in input {
|
||||
output.push(item(b));
|
||||
}
|
||||
|
||||
let (_, end) = b.consume(" }");
|
||||
|
||||
TokenTreeBuilder::spanned_brace(output, (start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_brace(input: impl Into<Vec<TokenNode>>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Delimited(Spanned::from_item(
|
||||
DelimitedNode::new(Delimiter::Brace, input.into()),
|
||||
span,
|
||||
))
|
||||
}
|
||||
|
||||
pub fn sp() -> CurriedToken {
|
||||
Box::new(|b| {
|
||||
let (start, end) = b.consume(" ");
|
||||
TokenNode::Whitespace(Span::from((start, end)))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn ws(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, end) = b.consume(&input);
|
||||
TokenTreeBuilder::spanned_ws((start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_ws(span: impl Into<Span>) -> TokenNode {
|
||||
let span = span.into();
|
||||
|
||||
TokenNode::Whitespace(span.into())
|
||||
}
|
||||
|
||||
fn consume(&mut self, input: &str) -> (usize, usize) {
|
||||
let start = self.pos;
|
||||
self.pos += input.len();
|
||||
(start, self.pos)
|
||||
}
|
||||
}
|
13
src/parser/parse/tokens.rs
Normal file
13
src/parser/parse/tokens.rs
Normal file
@ -0,0 +1,13 @@
|
||||
use crate::parser::parse::span::*;
|
||||
use crate::parser::parse::unit::*;
|
||||
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub enum RawToken {
|
||||
Integer(i64),
|
||||
Size(i64, Unit),
|
||||
String(Span),
|
||||
Variable(Span),
|
||||
Bare,
|
||||
}
|
||||
|
||||
pub type Token = Spanned<RawToken>;
|
58
src/parser/parse/unit.rs
Normal file
58
src/parser/parse/unit.rs
Normal file
@ -0,0 +1,58 @@
|
||||
use serde_derive::{Deserialize, Serialize};
|
||||
use std::str::FromStr;
|
||||
use crate::object::base::Value;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||
pub enum Unit {
|
||||
B,
|
||||
KB,
|
||||
MB,
|
||||
GB,
|
||||
TB,
|
||||
PB,
|
||||
}
|
||||
|
||||
impl Unit {
|
||||
pub fn as_str(&self) -> &str {
|
||||
match *self {
|
||||
Unit::B => "B",
|
||||
Unit::KB => "KB",
|
||||
Unit::MB => "MB",
|
||||
Unit::GB => "GB",
|
||||
Unit::TB => "TB",
|
||||
Unit::PB => "PB",
|
||||
}
|
||||
}
|
||||
|
||||
crate fn compute(&self, size: i64) -> Value {
|
||||
Value::int(match self {
|
||||
Unit::B => size,
|
||||
Unit::KB => size * 1024,
|
||||
Unit::MB => size * 1024 * 1024,
|
||||
Unit::GB => size * 1024 * 1024 * 1024,
|
||||
Unit::TB => size * 1024 * 1024 * 1024 * 1024,
|
||||
Unit::PB => size * 1024 * 1024 * 1024 * 1024 * 1024,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for Unit {
|
||||
fn from(input: &str) -> Unit {
|
||||
Unit::from_str(input).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for Unit {
|
||||
type Err = ();
|
||||
fn from_str(input: &str) -> Result<Self, <Self as std::str::FromStr>::Err> {
|
||||
match input {
|
||||
"B" | "b" => Ok(Unit::B),
|
||||
"KB" | "kb" | "Kb" => Ok(Unit::KB),
|
||||
"MB" | "mb" | "Mb" => Ok(Unit::MB),
|
||||
"GB" | "gb" | "Gb" => Ok(Unit::GB),
|
||||
"TB" | "tb" | "Tb" => Ok(Unit::TB),
|
||||
"PB" | "pb" | "Pb" => Ok(Unit::PB),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
}
|
1
src/parser/parse/util.rs
Normal file
1
src/parser/parse/util.rs
Normal file
@ -0,0 +1 @@
|
||||
|
Reference in New Issue
Block a user