mirror of
https://github.com/nushell/nushell.git
synced 2024-12-22 15:13:01 +01:00
Overhaul the expansion system
The main thrust of this (very large) commit is an overhaul of the expansion system. The parsing pipeline is: - Lightly parse the source file for atoms, basic delimiters and pipeline structure into a token tree - Expand the token tree into a HIR (high-level intermediate representation) based upon the baseline syntax rules for expressions and the syntactic shape of commands. Somewhat non-traditionally, nu doesn't have an AST at all. It goes directly from the token tree, which doesn't represent many important distinctions (like the difference between `hello` and `5KB`) directly into a high-level representation that doesn't have a direct correspondence to the source code. At a high level, nu commands work like macros, in the sense that the syntactic shape of the invocation of a command depends on the definition of a command. However, commands do not have the ability to perform unrestricted expansions of the token tree. Instead, they describe their arguments in terms of syntactic shapes, and the expander expands the token tree into HIR based upon that definition. For example, the `where` command says that it takes a block as its first required argument, and the description of the block syntactic shape expands the syntax `cpu > 10` into HIR that represents `{ $it.cpu > 10 }`. This commit overhauls that system so that the syntactic shapes are described in terms of a few new traits (`ExpandSyntax` and `ExpandExpression` are the primary ones) that are more composable than the previous system. The first big win of this new system is the addition of the `ColumnPath` shape, which looks like `cpu."max ghz"` or `package.version`. Previously, while a variable path could look like `$it.cpu."max ghz"`, the tail of a variable path could not be easily reused in other contexts. Now, that tail is its own syntactic shape, and it can be used as part of a command's signature. This cleans up commands like `inc`, `add` and `edit` as well as shorthand blocks, which can now look like `| where cpu."max ghz" > 10`
This commit is contained in:
parent
e18892000a
commit
1ad9d6f199
22
Cargo.lock
generated
22
Cargo.lock
generated
@ -1491,6 +1491,25 @@ dependencies = [
|
|||||||
"version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nom-tracable"
|
||||||
|
version = "0.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
dependencies = [
|
||||||
|
"nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"nom-tracable-macros 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "nom-tracable-macros"
|
||||||
|
version = "0.4.0"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
dependencies = [
|
||||||
|
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nom_locate"
|
name = "nom_locate"
|
||||||
version = "1.0.0"
|
version = "1.0.0"
|
||||||
@ -1550,6 +1569,7 @@ dependencies = [
|
|||||||
"natural 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"natural 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"neso 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"neso 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"nom-tracable 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"num-bigint 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"num-bigint 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
@ -3140,6 +3160,8 @@ dependencies = [
|
|||||||
"checksum nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9667ddcc6cc8a43afc9b7917599d7216aa09c463919ea32c59ed6cac8bc945"
|
"checksum nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9667ddcc6cc8a43afc9b7917599d7216aa09c463919ea32c59ed6cac8bc945"
|
||||||
"checksum nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2ad2a91a8e869eeb30b9cb3119ae87773a8f4ae617f41b1eb9c154b2905f7bd6"
|
"checksum nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2ad2a91a8e869eeb30b9cb3119ae87773a8f4ae617f41b1eb9c154b2905f7bd6"
|
||||||
"checksum nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e9761d859320e381010a4f7f8ed425f2c924de33ad121ace447367c713ad561b"
|
"checksum nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e9761d859320e381010a4f7f8ed425f2c924de33ad121ace447367c713ad561b"
|
||||||
|
"checksum nom-tracable 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "edaa64ad2837d831d4a17966c9a83aa5101cc320730f5b724811c8f7442a2528"
|
||||||
|
"checksum nom-tracable-macros 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fd25f70877a9fe68bd406b3dd3ff99e94ce9de776cf2a96e0d99de90b53d4765"
|
||||||
"checksum nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f932834fd8e391fc7710e2ba17e8f9f8645d846b55aa63207e17e110a1e1ce35"
|
"checksum nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f932834fd8e391fc7710e2ba17e8f9f8645d846b55aa63207e17e110a1e1ce35"
|
||||||
"checksum ntapi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f26e041cd983acbc087e30fcba770380cfa352d0e392e175b2344ebaf7ea0602"
|
"checksum ntapi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f26e041cd983acbc087e30fcba770380cfa352d0e392e175b2344ebaf7ea0602"
|
||||||
"checksum num-bigint 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f9c3f34cdd24f334cb265d9bf8bfa8a241920d026916785747a92f0e55541a1a"
|
"checksum num-bigint 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f9c3f34cdd24f334cb265d9bf8bfa8a241920d026916785747a92f0e55541a1a"
|
||||||
|
@ -55,6 +55,7 @@ surf = "1.0.2"
|
|||||||
url = "2.1.0"
|
url = "2.1.0"
|
||||||
roxmltree = "0.7.0"
|
roxmltree = "0.7.0"
|
||||||
nom_locate = "1.0.0"
|
nom_locate = "1.0.0"
|
||||||
|
nom-tracable = "0.4.0"
|
||||||
enum-utils = "0.1.1"
|
enum-utils = "0.1.1"
|
||||||
unicode-xid = "0.2.0"
|
unicode-xid = "0.2.0"
|
||||||
serde_ini = "0.2.0"
|
serde_ini = "0.2.0"
|
||||||
@ -95,6 +96,8 @@ textview = ["syntect", "onig_sys", "crossterm"]
|
|||||||
binaryview = ["image", "crossterm"]
|
binaryview = ["image", "crossterm"]
|
||||||
sys = ["heim", "battery"]
|
sys = ["heim", "battery"]
|
||||||
ps = ["heim"]
|
ps = ["heim"]
|
||||||
|
trace = ["nom-tracable/trace"]
|
||||||
|
all = ["raw-key", "textview", "binaryview", "sys", "ps", "clipboard", "ptree"]
|
||||||
|
|
||||||
[dependencies.rusqlite]
|
[dependencies.rusqlite]
|
||||||
version = "0.20.0"
|
version = "0.20.0"
|
||||||
|
188
src/cli.rs
188
src/cli.rs
@ -1,4 +1,3 @@
|
|||||||
use crate::commands::autoview;
|
|
||||||
use crate::commands::classified::{
|
use crate::commands::classified::{
|
||||||
ClassifiedCommand, ClassifiedInputStream, ClassifiedPipeline, ExternalCommand, InternalCommand,
|
ClassifiedCommand, ClassifiedInputStream, ClassifiedPipeline, ExternalCommand, InternalCommand,
|
||||||
StreamNext,
|
StreamNext,
|
||||||
@ -13,7 +12,12 @@ pub(crate) use crate::errors::ShellError;
|
|||||||
use crate::fuzzysearch::{interactive_fuzzy_search, SelectionResult};
|
use crate::fuzzysearch::{interactive_fuzzy_search, SelectionResult};
|
||||||
use crate::git::current_branch;
|
use crate::git::current_branch;
|
||||||
use crate::parser::registry::Signature;
|
use crate::parser::registry::Signature;
|
||||||
use crate::parser::{hir, CallNode, Pipeline, PipelineElement, TokenNode};
|
use crate::parser::{
|
||||||
|
hir,
|
||||||
|
hir::syntax_shape::{CommandHeadShape, CommandSignature, ExpandSyntax},
|
||||||
|
hir::{expand_external_tokens::expand_external_tokens, tokens_iterator::TokensIterator},
|
||||||
|
parse_command_tail, Pipeline, PipelineElement, TokenNode,
|
||||||
|
};
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
|
|
||||||
use log::{debug, trace};
|
use log::{debug, trace};
|
||||||
@ -25,6 +29,7 @@ use std::io::{BufRead, BufReader, Write};
|
|||||||
use std::iter::Iterator;
|
use std::iter::Iterator;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::sync::atomic::{AtomicBool, Ordering};
|
use std::sync::atomic::{AtomicBool, Ordering};
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum MaybeOwned<'a, T> {
|
pub enum MaybeOwned<'a, T> {
|
||||||
@ -75,7 +80,7 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel
|
|||||||
let name = params.name.clone();
|
let name = params.name.clone();
|
||||||
let fname = fname.to_string();
|
let fname = fname.to_string();
|
||||||
|
|
||||||
if context.has_command(&name) {
|
if let Some(_) = context.get_command(&name) {
|
||||||
trace!("plugin {:?} already loaded.", &name);
|
trace!("plugin {:?} already loaded.", &name);
|
||||||
} else {
|
} else {
|
||||||
if params.is_filter {
|
if params.is_filter {
|
||||||
@ -428,21 +433,11 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
LineResult::Error(mut line, err) => {
|
LineResult::Error(line, err) => {
|
||||||
rl.add_history_entry(line.clone());
|
rl.add_history_entry(line.clone());
|
||||||
let diag = err.to_diagnostic();
|
|
||||||
context.with_host(|host| {
|
context.with_host(|host| {
|
||||||
let writer = host.err_termcolor();
|
print_err(err, host, &Text::from(line));
|
||||||
line.push_str(" ");
|
|
||||||
let files = crate::parser::Files::new(line);
|
|
||||||
let _ = std::panic::catch_unwind(move || {
|
|
||||||
let _ = language_reporting::emit(
|
|
||||||
&mut writer.lock(),
|
|
||||||
&files,
|
|
||||||
&diag,
|
|
||||||
&language_reporting::DefaultConfig,
|
|
||||||
);
|
|
||||||
});
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -459,6 +454,14 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn chomp_newline(s: &str) -> &str {
|
||||||
|
if s.ends_with('\n') {
|
||||||
|
&s[..s.len() - 1]
|
||||||
|
} else {
|
||||||
|
s
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
enum LineResult {
|
enum LineResult {
|
||||||
Success(String),
|
Success(String),
|
||||||
Error(String, ShellError),
|
Error(String, ShellError),
|
||||||
@ -471,9 +474,11 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
|||||||
Ok(line) if line.trim() == "" => LineResult::Success(line.clone()),
|
Ok(line) if line.trim() == "" => LineResult::Success(line.clone()),
|
||||||
|
|
||||||
Ok(line) => {
|
Ok(line) => {
|
||||||
let result = match crate::parser::parse(&line, uuid::Uuid::nil()) {
|
let line = chomp_newline(line);
|
||||||
|
|
||||||
|
let result = match crate::parser::parse(&line, uuid::Uuid::new_v4()) {
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
return LineResult::Error(line.clone(), err);
|
return LineResult::Error(line.to_string(), err);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(val) => val,
|
Ok(val) => val,
|
||||||
@ -484,7 +489,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
|||||||
|
|
||||||
let mut pipeline = match classify_pipeline(&result, ctx, &Text::from(line)) {
|
let mut pipeline = match classify_pipeline(&result, ctx, &Text::from(line)) {
|
||||||
Ok(pipeline) => pipeline,
|
Ok(pipeline) => pipeline,
|
||||||
Err(err) => return LineResult::Error(line.clone(), err),
|
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||||
};
|
};
|
||||||
|
|
||||||
match pipeline.commands.last() {
|
match pipeline.commands.last() {
|
||||||
@ -492,7 +497,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
|||||||
_ => pipeline
|
_ => pipeline
|
||||||
.commands
|
.commands
|
||||||
.push(ClassifiedCommand::Internal(InternalCommand {
|
.push(ClassifiedCommand::Internal(InternalCommand {
|
||||||
command: whole_stream_command(autoview::Autoview),
|
name: "autoview".to_string(),
|
||||||
name_tag: Tag::unknown(),
|
name_tag: Tag::unknown(),
|
||||||
args: hir::Call::new(
|
args: hir::Call::new(
|
||||||
Box::new(hir::Expression::synthetic_string("autoview")),
|
Box::new(hir::Expression::synthetic_string("autoview")),
|
||||||
@ -514,16 +519,24 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
|||||||
input = match (item, next) {
|
input = match (item, next) {
|
||||||
(None, _) => break,
|
(None, _) => break,
|
||||||
|
|
||||||
|
(Some(ClassifiedCommand::Dynamic(_)), _)
|
||||||
|
| (_, Some(ClassifiedCommand::Dynamic(_))) => {
|
||||||
|
return LineResult::Error(
|
||||||
|
line.to_string(),
|
||||||
|
ShellError::unimplemented("Dynamic commands"),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
(Some(ClassifiedCommand::Expr(_)), _) => {
|
(Some(ClassifiedCommand::Expr(_)), _) => {
|
||||||
return LineResult::Error(
|
return LineResult::Error(
|
||||||
line.clone(),
|
line.to_string(),
|
||||||
ShellError::unimplemented("Expression-only commands"),
|
ShellError::unimplemented("Expression-only commands"),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
(_, Some(ClassifiedCommand::Expr(_))) => {
|
(_, Some(ClassifiedCommand::Expr(_))) => {
|
||||||
return LineResult::Error(
|
return LineResult::Error(
|
||||||
line.clone(),
|
line.to_string(),
|
||||||
ShellError::unimplemented("Expression-only commands"),
|
ShellError::unimplemented("Expression-only commands"),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@ -536,7 +549,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
|||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
Ok(val) => ClassifiedInputStream::from_input_stream(val),
|
Ok(val) => ClassifiedInputStream::from_input_stream(val),
|
||||||
Err(err) => return LineResult::Error(line.clone(), err),
|
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||||
},
|
},
|
||||||
|
|
||||||
(Some(ClassifiedCommand::Internal(left)), Some(_)) => {
|
(Some(ClassifiedCommand::Internal(left)), Some(_)) => {
|
||||||
@ -545,7 +558,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
|||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
Ok(val) => ClassifiedInputStream::from_input_stream(val),
|
Ok(val) => ClassifiedInputStream::from_input_stream(val),
|
||||||
Err(err) => return LineResult::Error(line.clone(), err),
|
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -555,7 +568,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
|||||||
.await
|
.await
|
||||||
{
|
{
|
||||||
Ok(val) => ClassifiedInputStream::from_input_stream(val),
|
Ok(val) => ClassifiedInputStream::from_input_stream(val),
|
||||||
Err(err) => return LineResult::Error(line.clone(), err),
|
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -564,20 +577,20 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
|||||||
Some(ClassifiedCommand::External(_)),
|
Some(ClassifiedCommand::External(_)),
|
||||||
) => match left.run(ctx, input, StreamNext::External).await {
|
) => match left.run(ctx, input, StreamNext::External).await {
|
||||||
Ok(val) => val,
|
Ok(val) => val,
|
||||||
Err(err) => return LineResult::Error(line.clone(), err),
|
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||||
},
|
},
|
||||||
|
|
||||||
(Some(ClassifiedCommand::External(left)), Some(_)) => {
|
(Some(ClassifiedCommand::External(left)), Some(_)) => {
|
||||||
match left.run(ctx, input, StreamNext::Internal).await {
|
match left.run(ctx, input, StreamNext::Internal).await {
|
||||||
Ok(val) => val,
|
Ok(val) => val,
|
||||||
Err(err) => return LineResult::Error(line.clone(), err),
|
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
(Some(ClassifiedCommand::External(left)), None) => {
|
(Some(ClassifiedCommand::External(left)), None) => {
|
||||||
match left.run(ctx, input, StreamNext::Last).await {
|
match left.run(ctx, input, StreamNext::Last).await {
|
||||||
Ok(val) => val,
|
Ok(val) => val,
|
||||||
Err(err) => return LineResult::Error(line.clone(), err),
|
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
@ -585,7 +598,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
|||||||
is_first_command = false;
|
is_first_command = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
LineResult::Success(line.clone())
|
LineResult::Success(line.to_string())
|
||||||
}
|
}
|
||||||
Err(ReadlineError::Interrupted) => LineResult::CtrlC,
|
Err(ReadlineError::Interrupted) => LineResult::CtrlC,
|
||||||
Err(ReadlineError::Eof) => LineResult::Break,
|
Err(ReadlineError::Eof) => LineResult::Break,
|
||||||
@ -616,80 +629,91 @@ fn classify_pipeline(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn classify_command(
|
fn classify_command(
|
||||||
command: &PipelineElement,
|
command: &Tagged<PipelineElement>,
|
||||||
context: &Context,
|
context: &Context,
|
||||||
source: &Text,
|
source: &Text,
|
||||||
) -> Result<ClassifiedCommand, ShellError> {
|
) -> Result<ClassifiedCommand, ShellError> {
|
||||||
let call = command.call();
|
let mut iterator = TokensIterator::new(&command.tokens.item, command.tag, true);
|
||||||
|
|
||||||
|
let head = CommandHeadShape
|
||||||
|
.expand_syntax(&mut iterator, &context.expand_context(source, command.tag))?;
|
||||||
|
|
||||||
|
match &head {
|
||||||
|
CommandSignature::Expression(_) => Err(ShellError::syntax_error(
|
||||||
|
"Unexpected expression in command position".tagged(command.tag),
|
||||||
|
)),
|
||||||
|
|
||||||
match call {
|
|
||||||
// If the command starts with `^`, treat it as an external command no matter what
|
// If the command starts with `^`, treat it as an external command no matter what
|
||||||
call if call.head().is_external() => {
|
CommandSignature::External(name) => {
|
||||||
let name_tag = call.head().expect_external();
|
let name_str = name.slice(source);
|
||||||
let name = name_tag.slice(source);
|
|
||||||
|
|
||||||
Ok(external_command(call, source, name.tagged(name_tag)))
|
external_command(&mut iterator, source, name_str.tagged(name))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Otherwise, if the command is a bare word, we'll need to triage it
|
CommandSignature::LiteralExternal { outer, inner } => {
|
||||||
call if call.head().is_bare() => {
|
let name_str = inner.slice(source);
|
||||||
let head = call.head();
|
|
||||||
let name = head.source(source);
|
|
||||||
|
|
||||||
match context.has_command(name) {
|
external_command(&mut iterator, source, name_str.tagged(outer))
|
||||||
// if the command is in the registry, it's an internal command
|
|
||||||
true => {
|
|
||||||
let command = context.get_command(name);
|
|
||||||
let config = command.signature();
|
|
||||||
|
|
||||||
trace!(target: "nu::build_pipeline", "classifying {:?}", config);
|
|
||||||
|
|
||||||
let args: hir::Call = config.parse_args(call, &context, source)?;
|
|
||||||
|
|
||||||
trace!(target: "nu::build_pipeline", "args :: {}", args.debug(source));
|
|
||||||
|
|
||||||
Ok(ClassifiedCommand::Internal(InternalCommand {
|
|
||||||
command,
|
|
||||||
name_tag: head.tag(),
|
|
||||||
args,
|
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// otherwise, it's an external command
|
CommandSignature::Internal(command) => {
|
||||||
false => Ok(external_command(call, source, name.tagged(head.tag()))),
|
let tail = parse_command_tail(
|
||||||
}
|
&command.signature(),
|
||||||
}
|
&context.expand_context(source, command.tag),
|
||||||
|
&mut iterator,
|
||||||
|
command.tag,
|
||||||
|
)?;
|
||||||
|
|
||||||
// If the command is something else (like a number or a variable), that is currently unsupported.
|
let (positional, named) = match tail {
|
||||||
// We might support `$somevar` as a curried command in the future.
|
None => (None, None),
|
||||||
call => Err(ShellError::invalid_command(call.head().tag())),
|
Some((positional, named)) => (positional, named),
|
||||||
|
};
|
||||||
|
|
||||||
|
let call = hir::Call {
|
||||||
|
head: Box::new(head.to_expression()),
|
||||||
|
positional,
|
||||||
|
named,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(ClassifiedCommand::Internal(InternalCommand::new(
|
||||||
|
command.name().to_string(),
|
||||||
|
command.tag,
|
||||||
|
call,
|
||||||
|
)))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Classify this command as an external command, which doesn't give special meaning
|
// Classify this command as an external command, which doesn't give special meaning
|
||||||
// to nu syntactic constructs, and passes all arguments to the external command as
|
// to nu syntactic constructs, and passes all arguments to the external command as
|
||||||
// strings.
|
// strings.
|
||||||
fn external_command(
|
pub(crate) fn external_command(
|
||||||
call: &Tagged<CallNode>,
|
tokens: &mut TokensIterator,
|
||||||
source: &Text,
|
source: &Text,
|
||||||
name: Tagged<&str>,
|
name: Tagged<&str>,
|
||||||
) -> ClassifiedCommand {
|
) -> Result<ClassifiedCommand, ShellError> {
|
||||||
let arg_list_strings: Vec<Tagged<String>> = match call.children() {
|
let arg_list_strings = expand_external_tokens(tokens, source)?;
|
||||||
Some(args) => args
|
|
||||||
.iter()
|
|
||||||
.filter_map(|i| match i {
|
|
||||||
TokenNode::Whitespace(_) => None,
|
|
||||||
other => Some(other.as_external_arg(source).tagged(other.tag())),
|
|
||||||
})
|
|
||||||
.collect(),
|
|
||||||
None => vec![],
|
|
||||||
};
|
|
||||||
|
|
||||||
let (name, tag) = name.into_parts();
|
Ok(ClassifiedCommand::External(ExternalCommand {
|
||||||
|
|
||||||
ClassifiedCommand::External(ExternalCommand {
|
|
||||||
name: name.to_string(),
|
name: name.to_string(),
|
||||||
name_tag: tag,
|
name_tag: name.tag(),
|
||||||
args: arg_list_strings,
|
args: arg_list_strings,
|
||||||
})
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn print_err(err: ShellError, host: &dyn Host, source: &Text) {
|
||||||
|
let diag = err.to_diagnostic();
|
||||||
|
|
||||||
|
let writer = host.err_termcolor();
|
||||||
|
let mut source = source.to_string();
|
||||||
|
source.push_str(" ");
|
||||||
|
let files = crate::parser::Files::new(source);
|
||||||
|
let _ = std::panic::catch_unwind(move || {
|
||||||
|
let _ = language_reporting::emit(
|
||||||
|
&mut writer.lock(),
|
||||||
|
&files,
|
||||||
|
&diag,
|
||||||
|
&language_reporting::DefaultConfig,
|
||||||
|
);
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
@ -75,6 +75,7 @@ pub(crate) use command::{
|
|||||||
UnevaluatedCallInfo, WholeStreamCommand,
|
UnevaluatedCallInfo, WholeStreamCommand,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
pub(crate) use classified::ClassifiedCommand;
|
||||||
pub(crate) use config::Config;
|
pub(crate) use config::Config;
|
||||||
pub(crate) use cp::Cpy;
|
pub(crate) use cp::Cpy;
|
||||||
pub(crate) use date::Date;
|
pub(crate) use date::Date;
|
||||||
|
@ -58,21 +58,21 @@ pub fn autoview(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
} else if is_single_anchored_text_value(&input) {
|
// } else if is_single_origined_text_value(&input) {
|
||||||
let text = context.get_command("textview");
|
// let text = context.get_command("textview");
|
||||||
if let Some(text) = text {
|
// if let Some(text) = text {
|
||||||
let result = text.run(raw.with_input(input), &context.commands, false);
|
// let result = text.run(raw.with_input(input), &context.commands);
|
||||||
result.collect::<Vec<_>>().await;
|
// result.collect::<Vec<_>>().await;
|
||||||
} else {
|
// } else {
|
||||||
for i in input {
|
// for i in input {
|
||||||
match i.item {
|
// match i.item {
|
||||||
Value::Primitive(Primitive::String(s)) => {
|
// Value::Primitive(Primitive::String(s)) => {
|
||||||
println!("{}", s);
|
// println!("{}", s);
|
||||||
}
|
// }
|
||||||
_ => {}
|
// _ => {}
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
} else if is_single_text_value(&input) {
|
} else if is_single_text_value(&input) {
|
||||||
for i in input {
|
for i in input {
|
||||||
match i.item {
|
match i.item {
|
||||||
@ -111,7 +111,8 @@ fn is_single_text_value(input: &Vec<Tagged<Value>>) -> bool {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_single_anchored_text_value(input: &Vec<Tagged<Value>>) -> bool {
|
#[allow(unused)]
|
||||||
|
fn is_single_origined_text_value(input: &Vec<Tagged<Value>>) -> bool {
|
||||||
if input.len() != 1 {
|
if input.len() != 1 {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -1,12 +1,11 @@
|
|||||||
use crate::commands::Command;
|
|
||||||
use crate::parser::{hir, TokenNode};
|
use crate::parser::{hir, TokenNode};
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use bytes::{BufMut, BytesMut};
|
use bytes::{BufMut, BytesMut};
|
||||||
|
use derive_new::new;
|
||||||
use futures::stream::StreamExt;
|
use futures::stream::StreamExt;
|
||||||
use futures_codec::{Decoder, Encoder, Framed};
|
use futures_codec::{Decoder, Encoder, Framed};
|
||||||
use log::{log_enabled, trace};
|
use log::{log_enabled, trace};
|
||||||
use std::io::{Error, ErrorKind};
|
use std::io::{Error, ErrorKind};
|
||||||
use std::sync::Arc;
|
|
||||||
use subprocess::Exec;
|
use subprocess::Exec;
|
||||||
|
|
||||||
/// A simple `Codec` implementation that splits up data into lines.
|
/// A simple `Codec` implementation that splits up data into lines.
|
||||||
@ -77,19 +76,28 @@ pub(crate) struct ClassifiedPipeline {
|
|||||||
pub(crate) commands: Vec<ClassifiedCommand>,
|
pub(crate) commands: Vec<ClassifiedCommand>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Eq, PartialEq)]
|
||||||
pub(crate) enum ClassifiedCommand {
|
pub(crate) enum ClassifiedCommand {
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
Expr(TokenNode),
|
Expr(TokenNode),
|
||||||
Internal(InternalCommand),
|
Internal(InternalCommand),
|
||||||
|
#[allow(unused)]
|
||||||
|
Dynamic(hir::Call),
|
||||||
External(ExternalCommand),
|
External(ExternalCommand),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(new, Debug, Eq, PartialEq)]
|
||||||
pub(crate) struct InternalCommand {
|
pub(crate) struct InternalCommand {
|
||||||
pub(crate) command: Arc<Command>,
|
pub(crate) name: String,
|
||||||
pub(crate) name_tag: Tag,
|
pub(crate) name_tag: Tag,
|
||||||
pub(crate) args: hir::Call,
|
pub(crate) args: hir::Call,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(new, Debug, Eq, PartialEq)]
|
||||||
|
pub(crate) struct DynamicCommand {
|
||||||
|
pub(crate) args: hir::Call,
|
||||||
|
}
|
||||||
|
|
||||||
impl InternalCommand {
|
impl InternalCommand {
|
||||||
pub(crate) async fn run(
|
pub(crate) async fn run(
|
||||||
self,
|
self,
|
||||||
@ -100,15 +108,17 @@ impl InternalCommand {
|
|||||||
) -> Result<InputStream, ShellError> {
|
) -> Result<InputStream, ShellError> {
|
||||||
if log_enabled!(log::Level::Trace) {
|
if log_enabled!(log::Level::Trace) {
|
||||||
trace!(target: "nu::run::internal", "->");
|
trace!(target: "nu::run::internal", "->");
|
||||||
trace!(target: "nu::run::internal", "{}", self.command.name());
|
trace!(target: "nu::run::internal", "{}", self.name);
|
||||||
trace!(target: "nu::run::internal", "{}", self.args.debug(&source));
|
trace!(target: "nu::run::internal", "{}", self.args.debug(&source));
|
||||||
}
|
}
|
||||||
|
|
||||||
let objects: InputStream =
|
let objects: InputStream =
|
||||||
trace_stream!(target: "nu::trace_stream::internal", "input" = input.objects);
|
trace_stream!(target: "nu::trace_stream::internal", "input" = input.objects);
|
||||||
|
|
||||||
|
let command = context.expect_command(&self.name);
|
||||||
|
|
||||||
let result = context.run_command(
|
let result = context.run_command(
|
||||||
self.command,
|
command,
|
||||||
self.name_tag.clone(),
|
self.name_tag.clone(),
|
||||||
context.source_map.clone(),
|
context.source_map.clone(),
|
||||||
self.args,
|
self.args,
|
||||||
@ -185,6 +195,7 @@ impl InternalCommand {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Eq, PartialEq)]
|
||||||
pub(crate) struct ExternalCommand {
|
pub(crate) struct ExternalCommand {
|
||||||
pub(crate) name: String,
|
pub(crate) name: String,
|
||||||
|
|
||||||
@ -192,6 +203,7 @@ pub(crate) struct ExternalCommand {
|
|||||||
pub(crate) args: Vec<Tagged<String>>,
|
pub(crate) args: Vec<Tagged<String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
pub(crate) enum StreamNext {
|
pub(crate) enum StreamNext {
|
||||||
Last,
|
Last,
|
||||||
External,
|
External,
|
||||||
@ -221,6 +233,8 @@ impl ExternalCommand {
|
|||||||
|
|
||||||
process = Exec::cmd(&self.name);
|
process = Exec::cmd(&self.name);
|
||||||
|
|
||||||
|
trace!(target: "nu::run::external", "command = {:?}", process);
|
||||||
|
|
||||||
if arg_string.contains("$it") {
|
if arg_string.contains("$it") {
|
||||||
let mut first = true;
|
let mut first = true;
|
||||||
|
|
||||||
@ -275,6 +289,8 @@ impl ExternalCommand {
|
|||||||
|
|
||||||
process = process.cwd(context.shell_manager.path());
|
process = process.cwd(context.shell_manager.path());
|
||||||
|
|
||||||
|
trace!(target: "nu::run::external", "cwd = {:?}", context.shell_manager.path());
|
||||||
|
|
||||||
let mut process = match stream_next {
|
let mut process = match stream_next {
|
||||||
StreamNext::Last => process,
|
StreamNext::Last => process,
|
||||||
StreamNext::External | StreamNext::Internal => {
|
StreamNext::External | StreamNext::Internal => {
|
||||||
@ -282,11 +298,18 @@ impl ExternalCommand {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
trace!(target: "nu::run::external", "set up stdout pipe");
|
||||||
|
|
||||||
if let Some(stdin) = stdin {
|
if let Some(stdin) = stdin {
|
||||||
process = process.stdin(stdin);
|
process = process.stdin(stdin);
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut popen = process.popen()?;
|
trace!(target: "nu::run::external", "set up stdin pipe");
|
||||||
|
trace!(target: "nu::run::external", "built process {:?}", process);
|
||||||
|
|
||||||
|
let mut popen = process.popen().unwrap();
|
||||||
|
|
||||||
|
trace!(target: "nu::run::external", "next = {:?}", stream_next);
|
||||||
|
|
||||||
match stream_next {
|
match stream_next {
|
||||||
StreamNext::Last => {
|
StreamNext::Last => {
|
||||||
|
@ -507,6 +507,15 @@ pub enum Command {
|
|||||||
PerItem(Arc<dyn PerItemCommand>),
|
PerItem(Arc<dyn PerItemCommand>),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Debug for Command {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
Command::WholeStream(command) => write!(f, "WholeStream({})", command.name()),
|
||||||
|
Command::PerItem(command) => write!(f, "PerItem({})", command.name()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Command {
|
impl Command {
|
||||||
pub fn name(&self) -> &str {
|
pub fn name(&self) -> &str {
|
||||||
match self {
|
match self {
|
||||||
|
@ -54,11 +54,10 @@ fn run(
|
|||||||
output.push_str(&s);
|
output.push_str(&s);
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
return Err(ShellError::labeled_error(
|
return Err(ShellError::type_error(
|
||||||
"Expect a string from pipeline",
|
"a string-compatible value",
|
||||||
"not a string-compatible value",
|
i.tagged_type_name(),
|
||||||
i.tag(),
|
))
|
||||||
));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -15,7 +15,7 @@ impl PerItemCommand for Enter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> registry::Signature {
|
fn signature(&self) -> registry::Signature {
|
||||||
Signature::build("enter").required("location", SyntaxShape::Block)
|
Signature::build("enter").required("location", SyntaxShape::Path)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
@ -33,14 +33,14 @@ impl PerItemCommand for Enter {
|
|||||||
let raw_args = raw_args.clone();
|
let raw_args = raw_args.clone();
|
||||||
match call_info.args.expect_nth(0)? {
|
match call_info.args.expect_nth(0)? {
|
||||||
Tagged {
|
Tagged {
|
||||||
item: Value::Primitive(Primitive::String(location)),
|
item: Value::Primitive(Primitive::Path(location)),
|
||||||
..
|
..
|
||||||
} => {
|
} => {
|
||||||
let location = location.to_string();
|
let location_string = location.display().to_string();
|
||||||
let location_clone = location.to_string();
|
let location_clone = location_string.clone();
|
||||||
|
|
||||||
if location.starts_with("help") {
|
if location.starts_with("help") {
|
||||||
let spec = location.split(":").collect::<Vec<&str>>();
|
let spec = location_string.split(":").collect::<Vec<&str>>();
|
||||||
|
|
||||||
let (_, command) = (spec[0], spec[1]);
|
let (_, command) = (spec[0], spec[1]);
|
||||||
|
|
||||||
|
@ -53,7 +53,7 @@ fn run(
|
|||||||
};
|
};
|
||||||
let path_buf = path.as_path()?;
|
let path_buf = path.as_path()?;
|
||||||
let path_str = path_buf.display().to_string();
|
let path_str = path_buf.display().to_string();
|
||||||
let path_span = path.span();
|
let path_span = path.tag.span;
|
||||||
let has_raw = call_info.args.has("raw");
|
let has_raw = call_info.args.has("raw");
|
||||||
let registry = registry.clone();
|
let registry = registry.clone();
|
||||||
let raw_args = raw_args.clone();
|
let raw_args = raw_args.clone();
|
||||||
|
@ -16,7 +16,7 @@ impl WholeStreamCommand for First {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build("first").required("amount", SyntaxShape::Literal)
|
Signature::build("first").required("amount", SyntaxShape::Int)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
|
@ -1,14 +1,16 @@
|
|||||||
use crate::commands::WholeStreamCommand;
|
use crate::commands::WholeStreamCommand;
|
||||||
|
use crate::data::meta::tag_for_tagged_list;
|
||||||
use crate::data::Value;
|
use crate::data::Value;
|
||||||
use crate::errors::ShellError;
|
use crate::errors::ShellError;
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
|
use log::trace;
|
||||||
|
|
||||||
pub struct Get;
|
pub struct Get;
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
pub struct GetArgs {
|
pub struct GetArgs {
|
||||||
member: Tagged<String>,
|
member: ColumnPath,
|
||||||
rest: Vec<Tagged<String>>,
|
rest: Vec<ColumnPath>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl WholeStreamCommand for Get {
|
impl WholeStreamCommand for Get {
|
||||||
@ -18,8 +20,8 @@ impl WholeStreamCommand for Get {
|
|||||||
|
|
||||||
fn signature(&self) -> Signature {
|
fn signature(&self) -> Signature {
|
||||||
Signature::build("get")
|
Signature::build("get")
|
||||||
.required("member", SyntaxShape::Member)
|
.required("member", SyntaxShape::ColumnPath)
|
||||||
.rest(SyntaxShape::Member)
|
.rest(SyntaxShape::ColumnPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn usage(&self) -> &str {
|
fn usage(&self) -> &str {
|
||||||
@ -35,40 +37,35 @@ impl WholeStreamCommand for Get {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_member(path: &Tagged<String>, obj: &Tagged<Value>) -> Result<Tagged<Value>, ShellError> {
|
pub type ColumnPath = Vec<Tagged<String>>;
|
||||||
|
|
||||||
|
pub fn get_column_path(
|
||||||
|
path: &ColumnPath,
|
||||||
|
obj: &Tagged<Value>,
|
||||||
|
) -> Result<Tagged<Value>, ShellError> {
|
||||||
let mut current = Some(obj);
|
let mut current = Some(obj);
|
||||||
for p in path.split(".") {
|
for p in path.iter() {
|
||||||
if let Some(obj) = current {
|
if let Some(obj) = current {
|
||||||
current = match obj.get_data_by_key(p) {
|
current = match obj.get_data_by_key(&p) {
|
||||||
Some(v) => Some(v),
|
Some(v) => Some(v),
|
||||||
None =>
|
None =>
|
||||||
// Before we give up, see if they gave us a path that matches a field name by itself
|
// Before we give up, see if they gave us a path that matches a field name by itself
|
||||||
{
|
{
|
||||||
match obj.get_data_by_key(&path.item) {
|
|
||||||
Some(v) => return Ok(v.clone()),
|
|
||||||
None => {
|
|
||||||
let possibilities = obj.data_descriptors();
|
let possibilities = obj.data_descriptors();
|
||||||
|
|
||||||
let mut possible_matches: Vec<_> = possibilities
|
let mut possible_matches: Vec<_> = possibilities
|
||||||
.iter()
|
.iter()
|
||||||
.map(|x| {
|
.map(|x| (natural::distance::levenshtein_distance(x, &p), x))
|
||||||
(natural::distance::levenshtein_distance(x, &path.item), x)
|
|
||||||
})
|
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
possible_matches.sort();
|
possible_matches.sort();
|
||||||
|
|
||||||
if possible_matches.len() > 0 {
|
|
||||||
return Err(ShellError::labeled_error(
|
return Err(ShellError::labeled_error(
|
||||||
"Unknown column",
|
"Unknown column",
|
||||||
format!("did you mean '{}'?", possible_matches[0].1),
|
format!("did you mean '{}'?", possible_matches[0].1),
|
||||||
path.tag(),
|
tag_for_tagged_list(path.iter().map(|p| p.tag())),
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -97,6 +94,8 @@ pub fn get(
|
|||||||
}: GetArgs,
|
}: GetArgs,
|
||||||
RunnableContext { input, .. }: RunnableContext,
|
RunnableContext { input, .. }: RunnableContext,
|
||||||
) -> Result<OutputStream, ShellError> {
|
) -> Result<OutputStream, ShellError> {
|
||||||
|
trace!("get {:?} {:?}", member, fields);
|
||||||
|
|
||||||
let stream = input
|
let stream = input
|
||||||
.values
|
.values
|
||||||
.map(move |item| {
|
.map(move |item| {
|
||||||
@ -107,10 +106,10 @@ pub fn get(
|
|||||||
let fields = vec![&member, &fields]
|
let fields = vec![&member, &fields]
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flatten()
|
.flatten()
|
||||||
.collect::<Vec<&Tagged<String>>>();
|
.collect::<Vec<&ColumnPath>>();
|
||||||
|
|
||||||
for field in &fields {
|
for column_path in &fields {
|
||||||
match get_member(field, &item) {
|
match get_column_path(column_path, &item) {
|
||||||
Ok(Tagged {
|
Ok(Tagged {
|
||||||
item: Value::Table(l),
|
item: Value::Table(l),
|
||||||
..
|
..
|
||||||
|
@ -54,7 +54,7 @@ fn run(
|
|||||||
};
|
};
|
||||||
let path_buf = path.as_path()?;
|
let path_buf = path.as_path()?;
|
||||||
let path_str = path_buf.display().to_string();
|
let path_str = path_buf.display().to_string();
|
||||||
let path_span = path.span();
|
let path_span = path.tag.span;
|
||||||
let has_raw = call_info.args.has("raw");
|
let has_raw = call_info.args.has("raw");
|
||||||
let registry = registry.clone();
|
let registry = registry.clone();
|
||||||
let raw_args = raw_args.clone();
|
let raw_args = raw_args.clone();
|
||||||
|
@ -143,15 +143,16 @@ fn save(
|
|||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
yield Err(ShellError::labeled_error(
|
yield Err(ShellError::labeled_error(
|
||||||
"Save requires a filepath",
|
"Save requires a filepath (1)",
|
||||||
"needs path",
|
"needs path",
|
||||||
name_tag,
|
name_tag,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
None => {
|
None => {
|
||||||
|
eprintln!("{:?} {:?}", anchor, source_map);
|
||||||
yield Err(ShellError::labeled_error(
|
yield Err(ShellError::labeled_error(
|
||||||
"Save requires a filepath",
|
"Save requires a filepath (2)",
|
||||||
"needs path",
|
"needs path",
|
||||||
name_tag,
|
name_tag,
|
||||||
));
|
));
|
||||||
@ -159,7 +160,7 @@ fn save(
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
yield Err(ShellError::labeled_error(
|
yield Err(ShellError::labeled_error(
|
||||||
"Save requires a filepath",
|
"Save requires a filepath (3)",
|
||||||
"needs path",
|
"needs path",
|
||||||
name_tag,
|
name_tag,
|
||||||
));
|
));
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
use crate::commands::WholeStreamCommand;
|
use crate::commands::WholeStreamCommand;
|
||||||
use crate::errors::ShellError;
|
use crate::errors::ShellError;
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
|
use log::trace;
|
||||||
|
|
||||||
pub struct SkipWhile;
|
pub struct SkipWhile;
|
||||||
|
|
||||||
@ -38,7 +39,9 @@ pub fn skip_while(
|
|||||||
RunnableContext { input, .. }: RunnableContext,
|
RunnableContext { input, .. }: RunnableContext,
|
||||||
) -> Result<OutputStream, ShellError> {
|
) -> Result<OutputStream, ShellError> {
|
||||||
let objects = input.values.skip_while(move |item| {
|
let objects = input.values.skip_while(move |item| {
|
||||||
|
trace!("ITEM = {:?}", item);
|
||||||
let result = condition.invoke(&item);
|
let result = condition.invoke(&item);
|
||||||
|
trace!("RESULT = {:?}", result);
|
||||||
|
|
||||||
let return_value = match result {
|
let return_value = match result {
|
||||||
Ok(ref v) if v.is_true() => true,
|
Ok(ref v) if v.is_true() => true,
|
||||||
|
@ -38,8 +38,8 @@ fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream,
|
|||||||
let anchor = v.anchor();
|
let anchor = v.anchor();
|
||||||
let span = v.tag().span;
|
let span = v.tag().span;
|
||||||
let mut dict = TaggedDictBuilder::new(v.tag());
|
let mut dict = TaggedDictBuilder::new(v.tag());
|
||||||
dict.insert("start", Value::int(span.start as i64));
|
dict.insert("start", Value::int(span.start() as i64));
|
||||||
dict.insert("end", Value::int(span.end as i64));
|
dict.insert("end", Value::int(span.end() as i64));
|
||||||
tags.insert_tagged("span", dict.into_tagged_value());
|
tags.insert_tagged("span", dict.into_tagged_value());
|
||||||
|
|
||||||
match source_map.get(&anchor) {
|
match source_map.get(&anchor) {
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
use crate::commands::{Command, UnevaluatedCallInfo};
|
use crate::commands::{Command, UnevaluatedCallInfo};
|
||||||
use crate::parser::hir;
|
use crate::parser::{hir, hir::syntax_shape::ExpandContext};
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
|
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
@ -53,13 +53,17 @@ impl CommandRegistry {
|
|||||||
registry.get(name).map(|c| c.clone())
|
registry.get(name).map(|c| c.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn expect_command(&self, name: &str) -> Arc<Command> {
|
||||||
|
self.get_command(name).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn has(&self, name: &str) -> bool {
|
pub(crate) fn has(&self, name: &str) -> bool {
|
||||||
let registry = self.registry.lock().unwrap();
|
let registry = self.registry.lock().unwrap();
|
||||||
|
|
||||||
registry.contains_key(name)
|
registry.contains_key(name)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert(&mut self, name: impl Into<String>, command: Arc<Command>) {
|
pub(crate) fn insert(&mut self, name: impl Into<String>, command: Arc<Command>) {
|
||||||
let mut registry = self.registry.lock().unwrap();
|
let mut registry = self.registry.lock().unwrap();
|
||||||
registry.insert(name.into(), command);
|
registry.insert(name.into(), command);
|
||||||
}
|
}
|
||||||
@ -83,6 +87,14 @@ impl Context {
|
|||||||
&self.registry
|
&self.registry
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn expand_context<'context>(
|
||||||
|
&'context self,
|
||||||
|
source: &'context Text,
|
||||||
|
tag: Tag,
|
||||||
|
) -> ExpandContext<'context> {
|
||||||
|
ExpandContext::new(&self.registry, tag, source, self.shell_manager.homedir())
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn basic() -> Result<Context, Box<dyn Error>> {
|
pub(crate) fn basic() -> Result<Context, Box<dyn Error>> {
|
||||||
let registry = CommandRegistry::new();
|
let registry = CommandRegistry::new();
|
||||||
Ok(Context {
|
Ok(Context {
|
||||||
@ -109,12 +121,12 @@ impl Context {
|
|||||||
self.source_map.insert(uuid, anchor_location);
|
self.source_map.insert(uuid, anchor_location);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn has_command(&self, name: &str) -> bool {
|
pub(crate) fn get_command(&self, name: &str) -> Option<Arc<Command>> {
|
||||||
self.registry.has(name)
|
self.registry.get_command(name)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn get_command(&self, name: &str) -> Arc<Command> {
|
pub(crate) fn expect_command(&self, name: &str) -> Arc<Command> {
|
||||||
self.registry.get_command(name).unwrap()
|
self.registry.expect_command(name)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn run_command<'a>(
|
pub(crate) fn run_command<'a>(
|
||||||
|
133
src/data/base.rs
133
src/data/base.rs
@ -8,6 +8,7 @@ use crate::Text;
|
|||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use chrono_humanize::Humanize;
|
use chrono_humanize::Humanize;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
|
use log::trace;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
@ -217,6 +218,14 @@ impl Block {
|
|||||||
|
|
||||||
let mut last = None;
|
let mut last = None;
|
||||||
|
|
||||||
|
trace!(
|
||||||
|
"EXPRS = {:?}",
|
||||||
|
self.expressions
|
||||||
|
.iter()
|
||||||
|
.map(|e| format!("{}", e))
|
||||||
|
.collect::<Vec<_>>()
|
||||||
|
);
|
||||||
|
|
||||||
for expr in self.expressions.iter() {
|
for expr in self.expressions.iter() {
|
||||||
last = Some(evaluate_baseline_expr(
|
last = Some(evaluate_baseline_expr(
|
||||||
&expr,
|
&expr,
|
||||||
@ -394,13 +403,34 @@ impl Tagged<Value> {
|
|||||||
pub(crate) fn debug(&self) -> ValueDebug<'_> {
|
pub(crate) fn debug(&self) -> ValueDebug<'_> {
|
||||||
ValueDebug { value: self }
|
ValueDebug { value: self }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn as_column_path(&self) -> Result<Tagged<Vec<Tagged<String>>>, ShellError> {
|
||||||
|
let mut out: Vec<Tagged<String>> = vec![];
|
||||||
|
|
||||||
|
match &self.item {
|
||||||
|
Value::Table(table) => {
|
||||||
|
for item in table {
|
||||||
|
out.push(item.as_string()?.tagged(item.tag));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
other => {
|
||||||
|
return Err(ShellError::type_error(
|
||||||
|
"column name",
|
||||||
|
other.type_name().tagged(self.tag),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(out.tagged(self.tag))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Value {
|
impl Value {
|
||||||
pub(crate) fn type_name(&self) -> String {
|
pub(crate) fn type_name(&self) -> String {
|
||||||
match self {
|
match self {
|
||||||
Value::Primitive(p) => p.type_name(),
|
Value::Primitive(p) => p.type_name(),
|
||||||
Value::Row(_) => format!("object"),
|
Value::Row(_) => format!("row"),
|
||||||
Value::Table(_) => format!("list"),
|
Value::Table(_) => format!("list"),
|
||||||
Value::Block(_) => format!("block"),
|
Value::Block(_) => format!("block"),
|
||||||
}
|
}
|
||||||
@ -443,6 +473,22 @@ impl Value {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_data_by_column_path(
|
||||||
|
&self,
|
||||||
|
tag: Tag,
|
||||||
|
path: &Vec<Tagged<String>>,
|
||||||
|
) -> Option<Tagged<&Value>> {
|
||||||
|
let mut current = self;
|
||||||
|
for p in path {
|
||||||
|
match current.get_data_by_key(p) {
|
||||||
|
Some(v) => current = v,
|
||||||
|
None => return None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(Tagged::from_item(current, tag))
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_data_by_path(&self, tag: Tag, path: &str) -> Option<Tagged<&Value>> {
|
pub fn get_data_by_path(&self, tag: Tag, path: &str) -> Option<Tagged<&Value>> {
|
||||||
let mut current = self;
|
let mut current = self;
|
||||||
for p in path.split(".") {
|
for p in path.split(".") {
|
||||||
@ -508,6 +554,58 @@ impl Value {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn insert_data_at_column_path(
|
||||||
|
&self,
|
||||||
|
tag: Tag,
|
||||||
|
split_path: &Vec<Tagged<String>>,
|
||||||
|
new_value: Value,
|
||||||
|
) -> Option<Tagged<Value>> {
|
||||||
|
let mut new_obj = self.clone();
|
||||||
|
|
||||||
|
if let Value::Row(ref mut o) = new_obj {
|
||||||
|
let mut current = o;
|
||||||
|
|
||||||
|
if split_path.len() == 1 {
|
||||||
|
// Special case for inserting at the top level
|
||||||
|
current.entries.insert(
|
||||||
|
split_path[0].item.clone(),
|
||||||
|
Tagged::from_item(new_value, tag),
|
||||||
|
);
|
||||||
|
return Some(Tagged::from_item(new_obj, tag));
|
||||||
|
}
|
||||||
|
|
||||||
|
for idx in 0..split_path.len() {
|
||||||
|
match current.entries.get_mut(&split_path[idx].item) {
|
||||||
|
Some(next) => {
|
||||||
|
if idx == (split_path.len() - 2) {
|
||||||
|
match &mut next.item {
|
||||||
|
Value::Row(o) => {
|
||||||
|
o.entries.insert(
|
||||||
|
split_path[idx + 1].to_string(),
|
||||||
|
Tagged::from_item(new_value, tag),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Some(Tagged::from_item(new_obj, tag));
|
||||||
|
} else {
|
||||||
|
match next.item {
|
||||||
|
Value::Row(ref mut o) => {
|
||||||
|
current = o;
|
||||||
|
}
|
||||||
|
_ => return None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => return None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
pub fn replace_data_at_path(
|
pub fn replace_data_at_path(
|
||||||
&self,
|
&self,
|
||||||
tag: Tag,
|
tag: Tag,
|
||||||
@ -543,6 +641,39 @@ impl Value {
|
|||||||
None
|
None
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn replace_data_at_column_path(
|
||||||
|
&self,
|
||||||
|
tag: Tag,
|
||||||
|
split_path: &Vec<Tagged<String>>,
|
||||||
|
replaced_value: Value,
|
||||||
|
) -> Option<Tagged<Value>> {
|
||||||
|
let mut new_obj = self.clone();
|
||||||
|
|
||||||
|
if let Value::Row(ref mut o) = new_obj {
|
||||||
|
let mut current = o;
|
||||||
|
for idx in 0..split_path.len() {
|
||||||
|
match current.entries.get_mut(&split_path[idx].item) {
|
||||||
|
Some(next) => {
|
||||||
|
if idx == (split_path.len() - 1) {
|
||||||
|
*next = Tagged::from_item(replaced_value, tag);
|
||||||
|
return Some(Tagged::from_item(new_obj, tag));
|
||||||
|
} else {
|
||||||
|
match next.item {
|
||||||
|
Value::Row(ref mut o) => {
|
||||||
|
current = o;
|
||||||
|
}
|
||||||
|
_ => return None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => return None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_data(&self, desc: &String) -> MaybeOwned<'_, Value> {
|
pub fn get_data(&self, desc: &String) -> MaybeOwned<'_, Value> {
|
||||||
match self {
|
match self {
|
||||||
p @ Value::Primitive(_) => MaybeOwned::Borrowed(p),
|
p @ Value::Primitive(_) => MaybeOwned::Borrowed(p),
|
||||||
|
123
src/data/meta.rs
123
src/data/meta.rs
@ -1,4 +1,5 @@
|
|||||||
use crate::context::{AnchorLocation, SourceMap};
|
use crate::context::{AnchorLocation, SourceMap};
|
||||||
|
use crate::parser::parse::parser::TracableContext;
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use crate::Text;
|
use crate::Text;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
@ -119,10 +120,7 @@ impl From<&Tag> for Tag {
|
|||||||
|
|
||||||
impl From<nom_locate::LocatedSpanEx<&str, Uuid>> for Span {
|
impl From<nom_locate::LocatedSpanEx<&str, Uuid>> for Span {
|
||||||
fn from(input: nom_locate::LocatedSpanEx<&str, Uuid>) -> Span {
|
fn from(input: nom_locate::LocatedSpanEx<&str, Uuid>) -> Span {
|
||||||
Span {
|
Span::new(input.offset, input.offset + input.fragment.len())
|
||||||
start: input.offset,
|
|
||||||
end: input.offset + input.fragment.len(),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -147,10 +145,7 @@ impl<T>
|
|||||||
|
|
||||||
impl From<(usize, usize)> for Span {
|
impl From<(usize, usize)> for Span {
|
||||||
fn from(input: (usize, usize)) -> Span {
|
fn from(input: (usize, usize)) -> Span {
|
||||||
Span {
|
Span::new(input.0, input.1)
|
||||||
start: input.0,
|
|
||||||
end: input.1,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -164,7 +159,7 @@ impl From<&std::ops::Range<usize>> for Span {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(
|
#[derive(
|
||||||
Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters,
|
Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, new,
|
||||||
)]
|
)]
|
||||||
pub struct Tag {
|
pub struct Tag {
|
||||||
pub anchor: Uuid,
|
pub anchor: Uuid,
|
||||||
@ -189,11 +184,20 @@ impl From<&Span> for Tag {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<(usize, usize, TracableContext)> for Tag {
|
||||||
|
fn from((start, end, context): (usize, usize, TracableContext)) -> Self {
|
||||||
|
Tag {
|
||||||
|
anchor: context.origin,
|
||||||
|
span: Span::new(start, end),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl From<(usize, usize, Uuid)> for Tag {
|
impl From<(usize, usize, Uuid)> for Tag {
|
||||||
fn from((start, end, anchor): (usize, usize, Uuid)) -> Self {
|
fn from((start, end, anchor): (usize, usize, Uuid)) -> Self {
|
||||||
Tag {
|
Tag {
|
||||||
anchor,
|
anchor,
|
||||||
span: Span { start, end },
|
span: Span::new(start, end),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -201,24 +205,17 @@ impl From<(usize, usize, Uuid)> for Tag {
|
|||||||
impl From<(usize, usize, Option<Uuid>)> for Tag {
|
impl From<(usize, usize, Option<Uuid>)> for Tag {
|
||||||
fn from((start, end, anchor): (usize, usize, Option<Uuid>)) -> Self {
|
fn from((start, end, anchor): (usize, usize, Option<Uuid>)) -> Self {
|
||||||
Tag {
|
Tag {
|
||||||
anchor: if let Some(uuid) = anchor {
|
anchor: anchor.unwrap_or(uuid::Uuid::nil()),
|
||||||
uuid
|
span: Span::new(start, end),
|
||||||
} else {
|
|
||||||
uuid::Uuid::nil()
|
|
||||||
},
|
|
||||||
span: Span { start, end },
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<nom_locate::LocatedSpanEx<&str, Uuid>> for Tag {
|
impl From<nom_locate::LocatedSpanEx<&str, TracableContext>> for Tag {
|
||||||
fn from(input: nom_locate::LocatedSpanEx<&str, Uuid>) -> Tag {
|
fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Tag {
|
||||||
Tag {
|
Tag {
|
||||||
anchor: input.extra,
|
anchor: input.extra.origin,
|
||||||
span: Span {
|
span: Span::new(input.offset, input.offset + input.fragment.len()),
|
||||||
start: input.offset,
|
|
||||||
end: input.offset + input.fragment.len(),
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -265,10 +262,7 @@ impl Tag {
|
|||||||
);
|
);
|
||||||
|
|
||||||
Tag {
|
Tag {
|
||||||
span: Span {
|
span: Span::new(self.span.start, other.span.end),
|
||||||
start: self.span.start,
|
|
||||||
end: other.span.end,
|
|
||||||
},
|
|
||||||
anchor: self.anchor,
|
anchor: self.anchor,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -276,18 +270,46 @@ impl Tag {
|
|||||||
pub fn slice<'a>(&self, source: &'a str) -> &'a str {
|
pub fn slice<'a>(&self, source: &'a str) -> &'a str {
|
||||||
self.span.slice(source)
|
self.span.slice(source)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn string<'a>(&self, source: &'a str) -> String {
|
||||||
|
self.span.slice(source).to_string()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn tagged_slice<'a>(&self, source: &'a str) -> Tagged<&'a str> {
|
||||||
|
self.span.slice(source).tagged(self)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn tagged_string<'a>(&self, source: &'a str) -> Tagged<String> {
|
||||||
|
self.span.slice(source).to_string().tagged(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn tag_for_tagged_list(mut iter: impl Iterator<Item = Tag>) -> Tag {
|
||||||
|
let first = iter.next();
|
||||||
|
|
||||||
|
let first = match first {
|
||||||
|
None => return Tag::unknown(),
|
||||||
|
Some(first) => first,
|
||||||
|
};
|
||||||
|
|
||||||
|
let last = iter.last();
|
||||||
|
|
||||||
|
match last {
|
||||||
|
None => first,
|
||||||
|
Some(last) => first.until(last),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
|
||||||
pub struct Span {
|
pub struct Span {
|
||||||
pub(crate) start: usize,
|
start: usize,
|
||||||
pub(crate) end: usize,
|
end: usize,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<Option<Span>> for Span {
|
impl From<Option<Span>> for Span {
|
||||||
fn from(input: Option<Span>) -> Span {
|
fn from(input: Option<Span>) -> Span {
|
||||||
match input {
|
match input {
|
||||||
None => Span { start: 0, end: 0 },
|
None => Span::new(0, 0),
|
||||||
Some(span) => span,
|
Some(span) => span,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -295,7 +317,18 @@ impl From<Option<Span>> for Span {
|
|||||||
|
|
||||||
impl Span {
|
impl Span {
|
||||||
pub fn unknown() -> Span {
|
pub fn unknown() -> Span {
|
||||||
Span { start: 0, end: 0 }
|
Span::new(0, 0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new(start: usize, end: usize) -> Span {
|
||||||
|
assert!(
|
||||||
|
end >= start,
|
||||||
|
"Can't create a Span whose end < start, start={}, end={}",
|
||||||
|
start,
|
||||||
|
end
|
||||||
|
);
|
||||||
|
|
||||||
|
Span { start, end }
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -308,6 +341,14 @@ impl Span {
|
|||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
pub fn start(&self) -> usize {
|
||||||
|
self.start
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn end(&self) -> usize {
|
||||||
|
self.end
|
||||||
|
}
|
||||||
|
|
||||||
pub fn is_unknown(&self) -> bool {
|
pub fn is_unknown(&self) -> bool {
|
||||||
self.start == 0 && self.end == 0
|
self.start == 0 && self.end == 0
|
||||||
}
|
}
|
||||||
@ -319,17 +360,11 @@ impl Span {
|
|||||||
|
|
||||||
impl language_reporting::ReportingSpan for Span {
|
impl language_reporting::ReportingSpan for Span {
|
||||||
fn with_start(&self, start: usize) -> Self {
|
fn with_start(&self, start: usize) -> Self {
|
||||||
Span {
|
Span::new(start, self.end)
|
||||||
start,
|
|
||||||
end: self.end,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn with_end(&self, end: usize) -> Self {
|
fn with_end(&self, end: usize) -> Self {
|
||||||
Span {
|
Span::new(self.start, end)
|
||||||
start: self.start,
|
|
||||||
end,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn start(&self) -> usize {
|
fn start(&self) -> usize {
|
||||||
@ -344,20 +379,14 @@ impl language_reporting::ReportingSpan for Span {
|
|||||||
impl language_reporting::ReportingSpan for Tag {
|
impl language_reporting::ReportingSpan for Tag {
|
||||||
fn with_start(&self, start: usize) -> Self {
|
fn with_start(&self, start: usize) -> Self {
|
||||||
Tag {
|
Tag {
|
||||||
span: Span {
|
span: Span::new(start, self.span.end),
|
||||||
start,
|
|
||||||
end: self.span.end,
|
|
||||||
},
|
|
||||||
anchor: self.anchor,
|
anchor: self.anchor,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn with_end(&self, end: usize) -> Self {
|
fn with_end(&self, end: usize) -> Self {
|
||||||
Tag {
|
Tag {
|
||||||
span: Span {
|
span: Span::new(self.span.start, end),
|
||||||
start: self.span.start,
|
|
||||||
end,
|
|
||||||
},
|
|
||||||
anchor: self.anchor,
|
anchor: self.anchor,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
|
|
||||||
|
use crate::parser::parse::parser::TracableContext;
|
||||||
use ansi_term::Color;
|
use ansi_term::Color;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use language_reporting::{Diagnostic, Label, Severity};
|
use language_reporting::{Diagnostic, Label, Severity};
|
||||||
@ -62,6 +63,14 @@ impl ShellError {
|
|||||||
.start()
|
.start()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn unexpected_eof(expected: impl Into<String>, tag: Tag) -> ShellError {
|
||||||
|
ProximateShellError::UnexpectedEof {
|
||||||
|
expected: expected.into(),
|
||||||
|
tag,
|
||||||
|
}
|
||||||
|
.start()
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn range_error(
|
pub(crate) fn range_error(
|
||||||
expected: impl Into<ExpectedRange>,
|
expected: impl Into<ExpectedRange>,
|
||||||
actual: &Tagged<impl fmt::Debug>,
|
actual: &Tagged<impl fmt::Debug>,
|
||||||
@ -82,6 +91,7 @@ impl ShellError {
|
|||||||
.start()
|
.start()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[allow(unused)]
|
||||||
pub(crate) fn invalid_command(problem: impl Into<Tag>) -> ShellError {
|
pub(crate) fn invalid_command(problem: impl Into<Tag>) -> ShellError {
|
||||||
ProximateShellError::InvalidCommand {
|
ProximateShellError::InvalidCommand {
|
||||||
command: problem.into(),
|
command: problem.into(),
|
||||||
@ -133,7 +143,7 @@ impl ShellError {
|
|||||||
|
|
||||||
pub(crate) fn parse_error(
|
pub(crate) fn parse_error(
|
||||||
error: nom::Err<(
|
error: nom::Err<(
|
||||||
nom_locate::LocatedSpanEx<&str, uuid::Uuid>,
|
nom_locate::LocatedSpanEx<&str, TracableContext>,
|
||||||
nom::error::ErrorKind,
|
nom::error::ErrorKind,
|
||||||
)>,
|
)>,
|
||||||
) -> ShellError {
|
) -> ShellError {
|
||||||
@ -235,7 +245,6 @@ impl ShellError {
|
|||||||
Label::new_primary(tag)
|
Label::new_primary(tag)
|
||||||
.with_message(format!("Expected {}, found {}", expected, actual)),
|
.with_message(format!("Expected {}, found {}", expected, actual)),
|
||||||
),
|
),
|
||||||
|
|
||||||
ProximateShellError::TypeError {
|
ProximateShellError::TypeError {
|
||||||
expected,
|
expected,
|
||||||
actual:
|
actual:
|
||||||
@ -246,6 +255,11 @@ impl ShellError {
|
|||||||
} => Diagnostic::new(Severity::Error, "Type Error")
|
} => Diagnostic::new(Severity::Error, "Type Error")
|
||||||
.with_label(Label::new_primary(tag).with_message(expected)),
|
.with_label(Label::new_primary(tag).with_message(expected)),
|
||||||
|
|
||||||
|
ProximateShellError::UnexpectedEof {
|
||||||
|
expected, tag
|
||||||
|
} => Diagnostic::new(Severity::Error, format!("Unexpected end of input"))
|
||||||
|
.with_label(Label::new_primary(tag).with_message(format!("Expected {}", expected))),
|
||||||
|
|
||||||
ProximateShellError::RangeError {
|
ProximateShellError::RangeError {
|
||||||
kind,
|
kind,
|
||||||
operation,
|
operation,
|
||||||
@ -267,10 +281,10 @@ impl ShellError {
|
|||||||
problem:
|
problem:
|
||||||
Tagged {
|
Tagged {
|
||||||
tag,
|
tag,
|
||||||
..
|
item
|
||||||
},
|
},
|
||||||
} => Diagnostic::new(Severity::Error, "Syntax Error")
|
} => Diagnostic::new(Severity::Error, "Syntax Error")
|
||||||
.with_label(Label::new_primary(tag).with_message("Unexpected external command")),
|
.with_label(Label::new_primary(tag).with_message(item)),
|
||||||
|
|
||||||
ProximateShellError::MissingProperty { subpath, expr } => {
|
ProximateShellError::MissingProperty { subpath, expr } => {
|
||||||
let subpath = subpath.into_label();
|
let subpath = subpath.into_label();
|
||||||
@ -340,6 +354,10 @@ impl ShellError {
|
|||||||
pub(crate) fn unexpected(title: impl Into<String>) -> ShellError {
|
pub(crate) fn unexpected(title: impl Into<String>) -> ShellError {
|
||||||
ShellError::string(&format!("Unexpected: {}", title.into()))
|
ShellError::string(&format!("Unexpected: {}", title.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn unreachable(title: impl Into<String>) -> ShellError {
|
||||||
|
ShellError::string(&format!("BUG: Unreachable: {}", title.into()))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)]
|
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)]
|
||||||
@ -387,6 +405,10 @@ pub enum ProximateShellError {
|
|||||||
SyntaxError {
|
SyntaxError {
|
||||||
problem: Tagged<String>,
|
problem: Tagged<String>,
|
||||||
},
|
},
|
||||||
|
UnexpectedEof {
|
||||||
|
expected: String,
|
||||||
|
tag: Tag,
|
||||||
|
},
|
||||||
InvalidCommand {
|
InvalidCommand {
|
||||||
command: Tag,
|
command: Tag,
|
||||||
},
|
},
|
||||||
@ -473,6 +495,7 @@ impl std::fmt::Display for ShellError {
|
|||||||
ProximateShellError::MissingValue { .. } => write!(f, "MissingValue"),
|
ProximateShellError::MissingValue { .. } => write!(f, "MissingValue"),
|
||||||
ProximateShellError::InvalidCommand { .. } => write!(f, "InvalidCommand"),
|
ProximateShellError::InvalidCommand { .. } => write!(f, "InvalidCommand"),
|
||||||
ProximateShellError::TypeError { .. } => write!(f, "TypeError"),
|
ProximateShellError::TypeError { .. } => write!(f, "TypeError"),
|
||||||
|
ProximateShellError::UnexpectedEof { .. } => write!(f, "UnexpectedEof"),
|
||||||
ProximateShellError::RangeError { .. } => write!(f, "RangeError"),
|
ProximateShellError::RangeError { .. } => write!(f, "RangeError"),
|
||||||
ProximateShellError::SyntaxError { .. } => write!(f, "SyntaxError"),
|
ProximateShellError::SyntaxError { .. } => write!(f, "SyntaxError"),
|
||||||
ProximateShellError::MissingProperty { .. } => write!(f, "MissingProperty"),
|
ProximateShellError::MissingProperty { .. } => write!(f, "MissingProperty"),
|
||||||
|
@ -7,6 +7,8 @@ use crate::parser::{
|
|||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
|
use log::trace;
|
||||||
|
use std::fmt;
|
||||||
|
|
||||||
#[derive(new)]
|
#[derive(new)]
|
||||||
pub struct Scope {
|
pub struct Scope {
|
||||||
@ -15,6 +17,15 @@ pub struct Scope {
|
|||||||
vars: IndexMap<String, Tagged<Value>>,
|
vars: IndexMap<String, Tagged<Value>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for Scope {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
f.debug_map()
|
||||||
|
.entry(&"$it", &format!("{:?}", self.it.item))
|
||||||
|
.entries(self.vars.iter().map(|(k, v)| (k, &v.item)))
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Scope {
|
impl Scope {
|
||||||
pub(crate) fn empty() -> Scope {
|
pub(crate) fn empty() -> Scope {
|
||||||
Scope {
|
Scope {
|
||||||
@ -48,12 +59,15 @@ pub(crate) fn evaluate_baseline_expr(
|
|||||||
RawExpression::Synthetic(hir::Synthetic::String(s)) => {
|
RawExpression::Synthetic(hir::Synthetic::String(s)) => {
|
||||||
Ok(Value::string(s).tagged_unknown())
|
Ok(Value::string(s).tagged_unknown())
|
||||||
}
|
}
|
||||||
RawExpression::Variable(var) => evaluate_reference(var, scope, source),
|
RawExpression::Variable(var) => evaluate_reference(var, scope, source, expr.tag()),
|
||||||
|
RawExpression::Command(_) => evaluate_command(expr.tag(), scope, source),
|
||||||
RawExpression::ExternalCommand(external) => evaluate_external(external, scope, source),
|
RawExpression::ExternalCommand(external) => evaluate_external(external, scope, source),
|
||||||
RawExpression::Binary(binary) => {
|
RawExpression::Binary(binary) => {
|
||||||
let left = evaluate_baseline_expr(binary.left(), registry, scope, source)?;
|
let left = evaluate_baseline_expr(binary.left(), registry, scope, source)?;
|
||||||
let right = evaluate_baseline_expr(binary.right(), registry, scope, source)?;
|
let right = evaluate_baseline_expr(binary.right(), registry, scope, source)?;
|
||||||
|
|
||||||
|
trace!("left={:?} right={:?}", left.item, right.item);
|
||||||
|
|
||||||
match left.compare(binary.op(), &*right) {
|
match left.compare(binary.op(), &*right) {
|
||||||
Ok(result) => Ok(Value::boolean(result).tagged(expr.tag())),
|
Ok(result) => Ok(Value::boolean(result).tagged(expr.tag())),
|
||||||
Err((left_type, right_type)) => Err(ShellError::coerce_error(
|
Err((left_type, right_type)) => Err(ShellError::coerce_error(
|
||||||
@ -130,14 +144,16 @@ fn evaluate_reference(
|
|||||||
name: &hir::Variable,
|
name: &hir::Variable,
|
||||||
scope: &Scope,
|
scope: &Scope,
|
||||||
source: &Text,
|
source: &Text,
|
||||||
|
tag: Tag,
|
||||||
) -> Result<Tagged<Value>, ShellError> {
|
) -> Result<Tagged<Value>, ShellError> {
|
||||||
|
trace!("Evaluating {} with Scope {}", name, scope);
|
||||||
match name {
|
match name {
|
||||||
hir::Variable::It(tag) => Ok(scope.it.item.clone().tagged(*tag)),
|
hir::Variable::It(_) => Ok(scope.it.item.clone().tagged(tag)),
|
||||||
hir::Variable::Other(tag) => Ok(scope
|
hir::Variable::Other(inner) => Ok(scope
|
||||||
.vars
|
.vars
|
||||||
.get(tag.slice(source))
|
.get(inner.slice(source))
|
||||||
.map(|v| v.clone())
|
.map(|v| v.clone())
|
||||||
.unwrap_or_else(|| Value::nothing().tagged(*tag))),
|
.unwrap_or_else(|| Value::nothing().tagged(tag))),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -150,3 +166,7 @@ fn evaluate_external(
|
|||||||
"Unexpected external command".tagged(*external.name()),
|
"Unexpected external command".tagged(*external.name()),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn evaluate_command(tag: Tag, _scope: &Scope, _source: &Text) -> Result<Tagged<Value>, ShellError> {
|
||||||
|
Err(ShellError::syntax_error("Unexpected command".tagged(tag)))
|
||||||
|
}
|
||||||
|
@ -31,7 +31,7 @@ pub use cli::cli;
|
|||||||
pub use data::base::{Primitive, Value};
|
pub use data::base::{Primitive, Value};
|
||||||
pub use data::config::{config_path, APP_INFO};
|
pub use data::config::{config_path, APP_INFO};
|
||||||
pub use data::dict::{Dictionary, TaggedDictBuilder};
|
pub use data::dict::{Dictionary, TaggedDictBuilder};
|
||||||
pub use data::meta::{Tag, Tagged, TaggedItem};
|
pub use data::meta::{Span, Tag, Tagged, TaggedItem};
|
||||||
pub use errors::{CoerceInto, ShellError};
|
pub use errors::{CoerceInto, ShellError};
|
||||||
pub use num_traits::cast::ToPrimitive;
|
pub use num_traits::cast::ToPrimitive;
|
||||||
pub use parser::parse::text::Text;
|
pub use parser::parse::text::Text;
|
||||||
|
@ -7,7 +7,7 @@ pub(crate) mod registry;
|
|||||||
use crate::errors::ShellError;
|
use crate::errors::ShellError;
|
||||||
|
|
||||||
pub(crate) use deserializer::ConfigDeserializer;
|
pub(crate) use deserializer::ConfigDeserializer;
|
||||||
pub(crate) use hir::baseline_parse_tokens::baseline_parse_tokens;
|
pub(crate) use hir::TokensIterator;
|
||||||
pub(crate) use parse::call_node::CallNode;
|
pub(crate) use parse::call_node::CallNode;
|
||||||
pub(crate) use parse::files::Files;
|
pub(crate) use parse::files::Files;
|
||||||
pub(crate) use parse::flag::Flag;
|
pub(crate) use parse::flag::Flag;
|
||||||
@ -15,10 +15,10 @@ pub(crate) use parse::operator::Operator;
|
|||||||
pub(crate) use parse::parser::{nom_input, pipeline};
|
pub(crate) use parse::parser::{nom_input, pipeline};
|
||||||
pub(crate) use parse::pipeline::{Pipeline, PipelineElement};
|
pub(crate) use parse::pipeline::{Pipeline, PipelineElement};
|
||||||
pub(crate) use parse::text::Text;
|
pub(crate) use parse::text::Text;
|
||||||
pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, PathNode, TokenNode};
|
pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
|
||||||
pub(crate) use parse::tokens::{RawToken, Token};
|
pub(crate) use parse::tokens::{RawToken, Token};
|
||||||
pub(crate) use parse::unit::Unit;
|
pub(crate) use parse::unit::Unit;
|
||||||
pub(crate) use parse_command::parse_command;
|
pub(crate) use parse_command::parse_command_tail;
|
||||||
pub(crate) use registry::CommandRegistry;
|
pub(crate) use registry::CommandRegistry;
|
||||||
|
|
||||||
pub fn parse(input: &str, anchor: uuid::Uuid) -> Result<TokenNode, ShellError> {
|
pub fn parse(input: &str, anchor: uuid::Uuid) -> Result<TokenNode, ShellError> {
|
||||||
|
@ -310,9 +310,10 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> {
|
|||||||
return Ok(r);
|
return Ok(r);
|
||||||
}
|
}
|
||||||
trace!(
|
trace!(
|
||||||
"deserializing struct {:?} {:?} (stack={:?})",
|
"deserializing struct {:?} {:?} (saw_root={} stack={:?})",
|
||||||
name,
|
name,
|
||||||
fields,
|
fields,
|
||||||
|
self.saw_root,
|
||||||
self.stack
|
self.stack
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -326,6 +327,12 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> {
|
|||||||
let type_name = std::any::type_name::<V::Value>();
|
let type_name = std::any::type_name::<V::Value>();
|
||||||
let tagged_val_name = std::any::type_name::<Tagged<Value>>();
|
let tagged_val_name = std::any::type_name::<Tagged<Value>>();
|
||||||
|
|
||||||
|
trace!(
|
||||||
|
"type_name={} tagged_val_name={}",
|
||||||
|
type_name,
|
||||||
|
tagged_val_name
|
||||||
|
);
|
||||||
|
|
||||||
if type_name == tagged_val_name {
|
if type_name == tagged_val_name {
|
||||||
return visit::<Tagged<Value>, _>(value.val, name, fields, visitor);
|
return visit::<Tagged<Value>, _>(value.val, name, fields, visitor);
|
||||||
}
|
}
|
||||||
|
@ -1,11 +1,13 @@
|
|||||||
pub(crate) mod baseline_parse;
|
pub(crate) mod baseline_parse;
|
||||||
pub(crate) mod baseline_parse_tokens;
|
|
||||||
pub(crate) mod binary;
|
pub(crate) mod binary;
|
||||||
|
pub(crate) mod expand_external_tokens;
|
||||||
pub(crate) mod external_command;
|
pub(crate) mod external_command;
|
||||||
pub(crate) mod named;
|
pub(crate) mod named;
|
||||||
pub(crate) mod path;
|
pub(crate) mod path;
|
||||||
|
pub(crate) mod syntax_shape;
|
||||||
|
pub(crate) mod tokens_iterator;
|
||||||
|
|
||||||
use crate::parser::{registry, Unit};
|
use crate::parser::{registry, Operator, Unit};
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use getset::Getters;
|
use getset::Getters;
|
||||||
@ -14,27 +16,18 @@ use std::fmt;
|
|||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use crate::evaluate::Scope;
|
use crate::evaluate::Scope;
|
||||||
|
use crate::parser::parse::tokens::RawNumber;
|
||||||
|
use crate::traits::ToDebug;
|
||||||
|
|
||||||
pub(crate) use self::baseline_parse::{
|
|
||||||
baseline_parse_single_token, baseline_parse_token_as_number, baseline_parse_token_as_path,
|
|
||||||
baseline_parse_token_as_pattern, baseline_parse_token_as_string,
|
|
||||||
};
|
|
||||||
pub(crate) use self::baseline_parse_tokens::{baseline_parse_next_expr, TokensIterator};
|
|
||||||
pub(crate) use self::binary::Binary;
|
pub(crate) use self::binary::Binary;
|
||||||
pub(crate) use self::external_command::ExternalCommand;
|
pub(crate) use self::external_command::ExternalCommand;
|
||||||
pub(crate) use self::named::NamedArguments;
|
pub(crate) use self::named::NamedArguments;
|
||||||
pub(crate) use self::path::Path;
|
pub(crate) use self::path::Path;
|
||||||
|
pub(crate) use self::syntax_shape::ExpandContext;
|
||||||
|
pub(crate) use self::tokens_iterator::debug::debug_tokens;
|
||||||
|
pub(crate) use self::tokens_iterator::TokensIterator;
|
||||||
|
|
||||||
pub use self::baseline_parse_tokens::SyntaxShape;
|
pub use self::syntax_shape::SyntaxShape;
|
||||||
|
|
||||||
pub fn path(head: impl Into<Expression>, tail: Vec<Tagged<impl Into<String>>>) -> Path {
|
|
||||||
Path::new(
|
|
||||||
head.into(),
|
|
||||||
tail.into_iter()
|
|
||||||
.map(|item| item.map(|string| string.into()))
|
|
||||||
.collect(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)]
|
#[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)]
|
||||||
pub struct Call {
|
pub struct Call {
|
||||||
@ -93,6 +86,7 @@ pub enum RawExpression {
|
|||||||
|
|
||||||
FilePath(PathBuf),
|
FilePath(PathBuf),
|
||||||
ExternalCommand(ExternalCommand),
|
ExternalCommand(ExternalCommand),
|
||||||
|
Command(Tag),
|
||||||
|
|
||||||
Boolean(bool),
|
Boolean(bool),
|
||||||
}
|
}
|
||||||
@ -115,13 +109,14 @@ impl RawExpression {
|
|||||||
match self {
|
match self {
|
||||||
RawExpression::Literal(literal) => literal.type_name(),
|
RawExpression::Literal(literal) => literal.type_name(),
|
||||||
RawExpression::Synthetic(synthetic) => synthetic.type_name(),
|
RawExpression::Synthetic(synthetic) => synthetic.type_name(),
|
||||||
|
RawExpression::Command(..) => "command",
|
||||||
RawExpression::ExternalWord => "external word",
|
RawExpression::ExternalWord => "external word",
|
||||||
RawExpression::FilePath(..) => "file path",
|
RawExpression::FilePath(..) => "file path",
|
||||||
RawExpression::Variable(..) => "variable",
|
RawExpression::Variable(..) => "variable",
|
||||||
RawExpression::List(..) => "list",
|
RawExpression::List(..) => "list",
|
||||||
RawExpression::Binary(..) => "binary",
|
RawExpression::Binary(..) => "binary",
|
||||||
RawExpression::Block(..) => "block",
|
RawExpression::Block(..) => "block",
|
||||||
RawExpression::Path(..) => "path",
|
RawExpression::Path(..) => "variable path",
|
||||||
RawExpression::Boolean(..) => "boolean",
|
RawExpression::Boolean(..) => "boolean",
|
||||||
RawExpression::ExternalCommand(..) => "external",
|
RawExpression::ExternalCommand(..) => "external",
|
||||||
}
|
}
|
||||||
@ -130,6 +125,39 @@ impl RawExpression {
|
|||||||
|
|
||||||
pub type Expression = Tagged<RawExpression>;
|
pub type Expression = Tagged<RawExpression>;
|
||||||
|
|
||||||
|
impl std::fmt::Display for Expression {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
let span = self.tag.span;
|
||||||
|
|
||||||
|
match &self.item {
|
||||||
|
RawExpression::Literal(literal) => write!(f, "{}", literal.tagged(self.tag)),
|
||||||
|
RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{}", s),
|
||||||
|
RawExpression::Command(_) => write!(f, "Command{{ {}..{} }}", span.start(), span.end()),
|
||||||
|
RawExpression::ExternalWord => {
|
||||||
|
write!(f, "ExternalWord{{ {}..{} }}", span.start(), span.end())
|
||||||
|
}
|
||||||
|
RawExpression::FilePath(file) => write!(f, "Path{{ {} }}", file.display()),
|
||||||
|
RawExpression::Variable(variable) => write!(f, "{}", variable),
|
||||||
|
RawExpression::List(list) => f
|
||||||
|
.debug_list()
|
||||||
|
.entries(list.iter().map(|e| format!("{}", e)))
|
||||||
|
.finish(),
|
||||||
|
RawExpression::Binary(binary) => write!(f, "{}", binary),
|
||||||
|
RawExpression::Block(items) => {
|
||||||
|
write!(f, "Block")?;
|
||||||
|
f.debug_set()
|
||||||
|
.entries(items.iter().map(|i| format!("{}", i)))
|
||||||
|
.finish()
|
||||||
|
}
|
||||||
|
RawExpression::Path(path) => write!(f, "{}", path),
|
||||||
|
RawExpression::Boolean(b) => write!(f, "${}", b),
|
||||||
|
RawExpression::ExternalCommand(..) => {
|
||||||
|
write!(f, "ExternalComment{{ {}..{} }}", span.start(), span.end())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Expression {
|
impl Expression {
|
||||||
pub(crate) fn number(i: impl Into<Number>, tag: impl Into<Tag>) -> Expression {
|
pub(crate) fn number(i: impl Into<Number>, tag: impl Into<Tag>) -> Expression {
|
||||||
RawExpression::Literal(Literal::Number(i.into())).tagged(tag.into())
|
RawExpression::Literal(Literal::Number(i.into())).tagged(tag.into())
|
||||||
@ -151,10 +179,50 @@ impl Expression {
|
|||||||
RawExpression::Literal(Literal::String(inner.into())).tagged(outer.into())
|
RawExpression::Literal(Literal::String(inner.into())).tagged(outer.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn path(
|
||||||
|
head: Expression,
|
||||||
|
tail: Vec<Tagged<impl Into<String>>>,
|
||||||
|
tag: impl Into<Tag>,
|
||||||
|
) -> Expression {
|
||||||
|
let tail = tail.into_iter().map(|t| t.map(|s| s.into())).collect();
|
||||||
|
RawExpression::Path(Box::new(Path::new(head, tail))).tagged(tag.into())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn dot_member(head: Expression, next: Tagged<impl Into<String>>) -> Expression {
|
||||||
|
let Tagged { item, tag } = head;
|
||||||
|
let new_tag = head.tag.until(next.tag);
|
||||||
|
|
||||||
|
match item {
|
||||||
|
RawExpression::Path(path) => {
|
||||||
|
let (head, mut tail) = path.parts();
|
||||||
|
|
||||||
|
tail.push(next.map(|i| i.into()));
|
||||||
|
Expression::path(head, tail, new_tag)
|
||||||
|
}
|
||||||
|
|
||||||
|
other => Expression::path(other.tagged(tag), vec![next], new_tag),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn infix(
|
||||||
|
left: Expression,
|
||||||
|
op: Tagged<impl Into<Operator>>,
|
||||||
|
right: Expression,
|
||||||
|
) -> Expression {
|
||||||
|
let new_tag = left.tag.until(right.tag);
|
||||||
|
|
||||||
|
RawExpression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right)))
|
||||||
|
.tagged(new_tag)
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn file_path(path: impl Into<PathBuf>, outer: impl Into<Tag>) -> Expression {
|
pub(crate) fn file_path(path: impl Into<PathBuf>, outer: impl Into<Tag>) -> Expression {
|
||||||
RawExpression::FilePath(path.into()).tagged(outer)
|
RawExpression::FilePath(path.into()).tagged(outer)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn list(list: Vec<Expression>, tag: impl Into<Tag>) -> Expression {
|
||||||
|
RawExpression::List(list).tagged(tag)
|
||||||
|
}
|
||||||
|
|
||||||
pub(crate) fn bare(tag: impl Into<Tag>) -> Expression {
|
pub(crate) fn bare(tag: impl Into<Tag>) -> Expression {
|
||||||
RawExpression::Literal(Literal::Bare).tagged(tag)
|
RawExpression::Literal(Literal::Bare).tagged(tag)
|
||||||
}
|
}
|
||||||
@ -182,6 +250,7 @@ impl ToDebug for Expression {
|
|||||||
RawExpression::Literal(l) => l.tagged(self.tag()).fmt_debug(f, source),
|
RawExpression::Literal(l) => l.tagged(self.tag()).fmt_debug(f, source),
|
||||||
RawExpression::FilePath(p) => write!(f, "{}", p.display()),
|
RawExpression::FilePath(p) => write!(f, "{}", p.display()),
|
||||||
RawExpression::ExternalWord => write!(f, "{}", self.tag().slice(source)),
|
RawExpression::ExternalWord => write!(f, "{}", self.tag().slice(source)),
|
||||||
|
RawExpression::Command(tag) => write!(f, "{}", tag.slice(source)),
|
||||||
RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{:?}", s),
|
RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{:?}", s),
|
||||||
RawExpression::Variable(Variable::It(_)) => write!(f, "$it"),
|
RawExpression::Variable(Variable::It(_)) => write!(f, "$it"),
|
||||||
RawExpression::Variable(Variable::Other(s)) => write!(f, "${}", s.slice(source)),
|
RawExpression::Variable(Variable::Other(s)) => write!(f, "${}", s.slice(source)),
|
||||||
@ -232,6 +301,26 @@ pub enum Literal {
|
|||||||
Bare,
|
Bare,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for Tagged<Literal> {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
write!(f, "{}", Tagged::new(self.tag, &self.item))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for Tagged<&Literal> {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
let span = self.tag.span;
|
||||||
|
|
||||||
|
match &self.item {
|
||||||
|
Literal::Number(number) => write!(f, "{}", number),
|
||||||
|
Literal::Size(number, unit) => write!(f, "{}{}", number, unit.as_str()),
|
||||||
|
Literal::String(_) => write!(f, "String{{ {}..{} }}", span.start(), span.end()),
|
||||||
|
Literal::GlobPattern => write!(f, "Glob{{ {}..{} }}", span.start(), span.end()),
|
||||||
|
Literal::Bare => write!(f, "Bare{{ {}..{} }}", span.start(), span.end()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl ToDebug for Tagged<&Literal> {
|
impl ToDebug for Tagged<&Literal> {
|
||||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||||
match self.item() {
|
match self.item() {
|
||||||
@ -261,3 +350,12 @@ pub enum Variable {
|
|||||||
It(Tag),
|
It(Tag),
|
||||||
Other(Tag),
|
Other(Tag),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for Variable {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
Variable::It(_) => write!(f, "$it"),
|
||||||
|
Variable::Other(tag) => write!(f, "${{ {}..{} }}", tag.span.start(), tag.span.end()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -1,140 +1,2 @@
|
|||||||
use crate::context::Context;
|
#[cfg(test)]
|
||||||
use crate::errors::ShellError;
|
mod tests;
|
||||||
use crate::parser::{hir, RawToken, Token};
|
|
||||||
use crate::TaggedItem;
|
|
||||||
use crate::Text;
|
|
||||||
use std::path::PathBuf;
|
|
||||||
|
|
||||||
pub fn baseline_parse_single_token(
|
|
||||||
token: &Token,
|
|
||||||
source: &Text,
|
|
||||||
) -> Result<hir::Expression, ShellError> {
|
|
||||||
Ok(match *token.item() {
|
|
||||||
RawToken::Number(number) => hir::Expression::number(number.to_number(source), token.tag()),
|
|
||||||
RawToken::Size(int, unit) => {
|
|
||||||
hir::Expression::size(int.to_number(source), unit, token.tag())
|
|
||||||
}
|
|
||||||
RawToken::String(tag) => hir::Expression::string(tag, token.tag()),
|
|
||||||
RawToken::Variable(tag) if tag.slice(source) == "it" => {
|
|
||||||
hir::Expression::it_variable(tag, token.tag())
|
|
||||||
}
|
|
||||||
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
|
|
||||||
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()),
|
|
||||||
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
|
|
||||||
RawToken::GlobPattern => hir::Expression::pattern(token.tag()),
|
|
||||||
RawToken::Bare => hir::Expression::bare(token.tag()),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn baseline_parse_token_as_number(
|
|
||||||
token: &Token,
|
|
||||||
source: &Text,
|
|
||||||
) -> Result<hir::Expression, ShellError> {
|
|
||||||
Ok(match *token.item() {
|
|
||||||
RawToken::Variable(tag) if tag.slice(source) == "it" => {
|
|
||||||
hir::Expression::it_variable(tag, token.tag())
|
|
||||||
}
|
|
||||||
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()),
|
|
||||||
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
|
|
||||||
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
|
|
||||||
RawToken::Number(number) => hir::Expression::number(number.to_number(source), token.tag()),
|
|
||||||
RawToken::Size(number, unit) => {
|
|
||||||
hir::Expression::size(number.to_number(source), unit, token.tag())
|
|
||||||
}
|
|
||||||
RawToken::Bare => hir::Expression::bare(token.tag()),
|
|
||||||
RawToken::GlobPattern => {
|
|
||||||
return Err(ShellError::type_error(
|
|
||||||
"Number",
|
|
||||||
"glob pattern".to_string().tagged(token.tag()),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
RawToken::String(tag) => hir::Expression::string(tag, token.tag()),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn baseline_parse_token_as_string(
|
|
||||||
token: &Token,
|
|
||||||
source: &Text,
|
|
||||||
) -> Result<hir::Expression, ShellError> {
|
|
||||||
Ok(match *token.item() {
|
|
||||||
RawToken::Variable(tag) if tag.slice(source) == "it" => {
|
|
||||||
hir::Expression::it_variable(tag, token.tag())
|
|
||||||
}
|
|
||||||
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()),
|
|
||||||
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
|
|
||||||
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
|
|
||||||
RawToken::Number(_) => hir::Expression::bare(token.tag()),
|
|
||||||
RawToken::Size(_, _) => hir::Expression::bare(token.tag()),
|
|
||||||
RawToken::Bare => hir::Expression::bare(token.tag()),
|
|
||||||
RawToken::GlobPattern => {
|
|
||||||
return Err(ShellError::type_error(
|
|
||||||
"String",
|
|
||||||
"glob pattern".tagged(token.tag()),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
RawToken::String(tag) => hir::Expression::string(tag, token.tag()),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn baseline_parse_token_as_path(
|
|
||||||
token: &Token,
|
|
||||||
context: &Context,
|
|
||||||
source: &Text,
|
|
||||||
) -> Result<hir::Expression, ShellError> {
|
|
||||||
Ok(match *token.item() {
|
|
||||||
RawToken::Variable(tag) if tag.slice(source) == "it" => {
|
|
||||||
hir::Expression::it_variable(tag, token.tag())
|
|
||||||
}
|
|
||||||
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()),
|
|
||||||
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
|
|
||||||
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
|
|
||||||
RawToken::Number(_) => hir::Expression::bare(token.tag()),
|
|
||||||
RawToken::Size(_, _) => hir::Expression::bare(token.tag()),
|
|
||||||
RawToken::Bare => {
|
|
||||||
hir::Expression::file_path(expand_path(token.tag().slice(source), context), token.tag())
|
|
||||||
}
|
|
||||||
RawToken::GlobPattern => {
|
|
||||||
return Err(ShellError::type_error(
|
|
||||||
"Path",
|
|
||||||
"glob pattern".tagged(token.tag()),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
RawToken::String(tag) => {
|
|
||||||
hir::Expression::file_path(expand_path(tag.slice(source), context), token.tag())
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn baseline_parse_token_as_pattern(
|
|
||||||
token: &Token,
|
|
||||||
context: &Context,
|
|
||||||
source: &Text,
|
|
||||||
) -> Result<hir::Expression, ShellError> {
|
|
||||||
Ok(match *token.item() {
|
|
||||||
RawToken::Variable(tag) if tag.slice(source) == "it" => {
|
|
||||||
hir::Expression::it_variable(tag, token.tag())
|
|
||||||
}
|
|
||||||
RawToken::ExternalCommand(_) => {
|
|
||||||
return Err(ShellError::syntax_error(
|
|
||||||
"Invalid external command".to_string().tagged(token.tag()),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
|
|
||||||
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
|
|
||||||
RawToken::Number(_) => hir::Expression::bare(token.tag()),
|
|
||||||
RawToken::Size(_, _) => hir::Expression::bare(token.tag()),
|
|
||||||
RawToken::GlobPattern => hir::Expression::pattern(token.tag()),
|
|
||||||
RawToken::Bare => {
|
|
||||||
hir::Expression::file_path(expand_path(token.tag().slice(source), context), token.tag())
|
|
||||||
}
|
|
||||||
RawToken::String(tag) => {
|
|
||||||
hir::Expression::file_path(expand_path(tag.slice(source), context), token.tag())
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn expand_path(string: &str, context: &Context) -> PathBuf {
|
|
||||||
let expanded = shellexpand::tilde_with_context(string, || context.shell_manager.homedir());
|
|
||||||
|
|
||||||
PathBuf::from(expanded.as_ref())
|
|
||||||
}
|
|
||||||
|
144
src/parser/hir/baseline_parse/tests.rs
Normal file
144
src/parser/hir/baseline_parse/tests.rs
Normal file
@ -0,0 +1,144 @@
|
|||||||
|
use crate::commands::classified::InternalCommand;
|
||||||
|
use crate::commands::ClassifiedCommand;
|
||||||
|
use crate::env::host::BasicHost;
|
||||||
|
use crate::parser::hir;
|
||||||
|
use crate::parser::hir::syntax_shape::*;
|
||||||
|
use crate::parser::hir::TokensIterator;
|
||||||
|
use crate::parser::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b};
|
||||||
|
use crate::parser::TokenNode;
|
||||||
|
use crate::{Span, Tag, Tagged, TaggedItem, Text};
|
||||||
|
use pretty_assertions::assert_eq;
|
||||||
|
use std::fmt::Debug;
|
||||||
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_string() {
|
||||||
|
parse_tokens(StringShape, vec![b::string("hello")], |tokens| {
|
||||||
|
hir::Expression::string(inner_string_tag(tokens[0].tag()), tokens[0].tag())
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_path() {
|
||||||
|
parse_tokens(
|
||||||
|
VariablePathShape,
|
||||||
|
vec![b::var("it"), b::op("."), b::bare("cpu")],
|
||||||
|
|tokens| {
|
||||||
|
let (outer_var, inner_var) = tokens[0].expect_var();
|
||||||
|
let bare = tokens[2].expect_bare();
|
||||||
|
hir::Expression::path(
|
||||||
|
hir::Expression::it_variable(inner_var, outer_var),
|
||||||
|
vec!["cpu".tagged(bare)],
|
||||||
|
outer_var.until(bare),
|
||||||
|
)
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
parse_tokens(
|
||||||
|
VariablePathShape,
|
||||||
|
vec![
|
||||||
|
b::var("cpu"),
|
||||||
|
b::op("."),
|
||||||
|
b::bare("amount"),
|
||||||
|
b::op("."),
|
||||||
|
b::string("max ghz"),
|
||||||
|
],
|
||||||
|
|tokens| {
|
||||||
|
let (outer_var, inner_var) = tokens[0].expect_var();
|
||||||
|
let amount = tokens[2].expect_bare();
|
||||||
|
let (outer_max_ghz, _) = tokens[4].expect_string();
|
||||||
|
|
||||||
|
hir::Expression::path(
|
||||||
|
hir::Expression::variable(inner_var, outer_var),
|
||||||
|
vec!["amount".tagged(amount), "max ghz".tagged(outer_max_ghz)],
|
||||||
|
outer_var.until(outer_max_ghz),
|
||||||
|
)
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_parse_command() {
|
||||||
|
parse_tokens(
|
||||||
|
ClassifiedCommandShape,
|
||||||
|
vec![b::bare("ls"), b::sp(), b::pattern("*.txt")],
|
||||||
|
|tokens| {
|
||||||
|
let bare = tokens[0].expect_bare();
|
||||||
|
let pat = tokens[2].tag();
|
||||||
|
|
||||||
|
ClassifiedCommand::Internal(InternalCommand::new(
|
||||||
|
"ls".to_string(),
|
||||||
|
bare,
|
||||||
|
hir::Call {
|
||||||
|
head: Box::new(hir::RawExpression::Command(bare).tagged(bare)),
|
||||||
|
positional: Some(vec![hir::Expression::pattern(pat)]),
|
||||||
|
named: None,
|
||||||
|
},
|
||||||
|
))
|
||||||
|
// hir::Expression::path(
|
||||||
|
// hir::Expression::variable(inner_var, outer_var),
|
||||||
|
// vec!["cpu".tagged(bare)],
|
||||||
|
// outer_var.until(bare),
|
||||||
|
// )
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
parse_tokens(
|
||||||
|
VariablePathShape,
|
||||||
|
vec![
|
||||||
|
b::var("cpu"),
|
||||||
|
b::op("."),
|
||||||
|
b::bare("amount"),
|
||||||
|
b::op("."),
|
||||||
|
b::string("max ghz"),
|
||||||
|
],
|
||||||
|
|tokens| {
|
||||||
|
let (outer_var, inner_var) = tokens[0].expect_var();
|
||||||
|
let amount = tokens[2].expect_bare();
|
||||||
|
let (outer_max_ghz, _) = tokens[4].expect_string();
|
||||||
|
|
||||||
|
hir::Expression::path(
|
||||||
|
hir::Expression::variable(inner_var, outer_var),
|
||||||
|
vec!["amount".tagged(amount), "max ghz".tagged(outer_max_ghz)],
|
||||||
|
outer_var.until(outer_max_ghz),
|
||||||
|
)
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_tokens<T: Eq + Debug>(
|
||||||
|
shape: impl ExpandSyntax<Output = T>,
|
||||||
|
tokens: Vec<CurriedToken>,
|
||||||
|
expected: impl FnOnce(Tagged<&[TokenNode]>) -> T,
|
||||||
|
) {
|
||||||
|
let tokens = b::token_list(tokens);
|
||||||
|
let (tokens, source) = b::build(test_origin(), tokens);
|
||||||
|
|
||||||
|
ExpandContext::with_empty(&Text::from(source), |context| {
|
||||||
|
let tokens = tokens.expect_list();
|
||||||
|
let mut iterator = TokensIterator::all(tokens.item, *context.tag());
|
||||||
|
|
||||||
|
let expr = expand_syntax(&shape, &mut iterator, &context);
|
||||||
|
|
||||||
|
let expr = match expr {
|
||||||
|
Ok(expr) => expr,
|
||||||
|
Err(err) => {
|
||||||
|
crate::cli::print_err(err, &BasicHost, context.source().clone());
|
||||||
|
panic!("Parse failed");
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
assert_eq!(expr, expected(tokens));
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn test_origin() -> Uuid {
|
||||||
|
Uuid::nil()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn inner_string_tag(tag: Tag) -> Tag {
|
||||||
|
Tag {
|
||||||
|
span: Span::new(tag.span.start() + 1, tag.span.end() - 1),
|
||||||
|
anchor: tag.anchor,
|
||||||
|
}
|
||||||
|
}
|
@ -1,459 +0,0 @@
|
|||||||
use crate::context::Context;
|
|
||||||
use crate::errors::ShellError;
|
|
||||||
use crate::parser::{
|
|
||||||
hir,
|
|
||||||
hir::{
|
|
||||||
baseline_parse_single_token, baseline_parse_token_as_number, baseline_parse_token_as_path,
|
|
||||||
baseline_parse_token_as_pattern, baseline_parse_token_as_string,
|
|
||||||
},
|
|
||||||
DelimitedNode, Delimiter, PathNode, RawToken, TokenNode,
|
|
||||||
};
|
|
||||||
use crate::{Tag, Tagged, TaggedItem, Text};
|
|
||||||
use derive_new::new;
|
|
||||||
use log::trace;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
|
|
||||||
pub fn baseline_parse_tokens(
|
|
||||||
token_nodes: &mut TokensIterator<'_>,
|
|
||||||
context: &Context,
|
|
||||||
source: &Text,
|
|
||||||
syntax_type: SyntaxShape,
|
|
||||||
) -> Result<Vec<hir::Expression>, ShellError> {
|
|
||||||
let mut exprs: Vec<hir::Expression> = vec![];
|
|
||||||
|
|
||||||
loop {
|
|
||||||
if token_nodes.at_end() {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
let expr = baseline_parse_next_expr(token_nodes, context, source, syntax_type)?;
|
|
||||||
exprs.push(expr);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(exprs)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
|
|
||||||
pub enum SyntaxShape {
|
|
||||||
Any,
|
|
||||||
List,
|
|
||||||
Literal,
|
|
||||||
String,
|
|
||||||
Member,
|
|
||||||
Variable,
|
|
||||||
Number,
|
|
||||||
Path,
|
|
||||||
Pattern,
|
|
||||||
Binary,
|
|
||||||
Block,
|
|
||||||
Boolean,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl std::fmt::Display for SyntaxShape {
|
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
|
||||||
match self {
|
|
||||||
SyntaxShape::Any => write!(f, "Any"),
|
|
||||||
SyntaxShape::List => write!(f, "List"),
|
|
||||||
SyntaxShape::Literal => write!(f, "Literal"),
|
|
||||||
SyntaxShape::String => write!(f, "String"),
|
|
||||||
SyntaxShape::Member => write!(f, "Member"),
|
|
||||||
SyntaxShape::Variable => write!(f, "Variable"),
|
|
||||||
SyntaxShape::Number => write!(f, "Number"),
|
|
||||||
SyntaxShape::Path => write!(f, "Path"),
|
|
||||||
SyntaxShape::Pattern => write!(f, "Pattern"),
|
|
||||||
SyntaxShape::Binary => write!(f, "Binary"),
|
|
||||||
SyntaxShape::Block => write!(f, "Block"),
|
|
||||||
SyntaxShape::Boolean => write!(f, "Boolean"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn baseline_parse_next_expr(
|
|
||||||
tokens: &mut TokensIterator,
|
|
||||||
context: &Context,
|
|
||||||
source: &Text,
|
|
||||||
syntax_type: SyntaxShape,
|
|
||||||
) -> Result<hir::Expression, ShellError> {
|
|
||||||
let next = tokens
|
|
||||||
.next()
|
|
||||||
.ok_or_else(|| ShellError::string("Expected token, found none"))?;
|
|
||||||
|
|
||||||
trace!(target: "nu::parser::parse_one_expr", "syntax_type={:?}, token={:?}", syntax_type, next);
|
|
||||||
|
|
||||||
match (syntax_type, next) {
|
|
||||||
(SyntaxShape::Path, TokenNode::Token(token)) => {
|
|
||||||
return baseline_parse_token_as_path(token, context, source)
|
|
||||||
}
|
|
||||||
|
|
||||||
(SyntaxShape::Path, token) => {
|
|
||||||
return Err(ShellError::type_error(
|
|
||||||
"Path",
|
|
||||||
token.type_name().tagged(token.tag()),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
(SyntaxShape::Pattern, TokenNode::Token(token)) => {
|
|
||||||
return baseline_parse_token_as_pattern(token, context, source)
|
|
||||||
}
|
|
||||||
|
|
||||||
(SyntaxShape::Pattern, token) => {
|
|
||||||
return Err(ShellError::type_error(
|
|
||||||
"Path",
|
|
||||||
token.type_name().tagged(token.tag()),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
(SyntaxShape::String, TokenNode::Token(token)) => {
|
|
||||||
return baseline_parse_token_as_string(token, source);
|
|
||||||
}
|
|
||||||
|
|
||||||
(SyntaxShape::String, token) => {
|
|
||||||
return Err(ShellError::type_error(
|
|
||||||
"String",
|
|
||||||
token.type_name().tagged(token.tag()),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
(SyntaxShape::Number, TokenNode::Token(token)) => {
|
|
||||||
return Ok(baseline_parse_token_as_number(token, source)?);
|
|
||||||
}
|
|
||||||
|
|
||||||
(SyntaxShape::Number, token) => {
|
|
||||||
return Err(ShellError::type_error(
|
|
||||||
"Numeric",
|
|
||||||
token.type_name().tagged(token.tag()),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: More legit member processing
|
|
||||||
(SyntaxShape::Member, TokenNode::Token(token)) => {
|
|
||||||
return baseline_parse_token_as_string(token, source);
|
|
||||||
}
|
|
||||||
|
|
||||||
(SyntaxShape::Member, token) => {
|
|
||||||
return Err(ShellError::type_error(
|
|
||||||
"member",
|
|
||||||
token.type_name().tagged(token.tag()),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
(SyntaxShape::Any, _) => {}
|
|
||||||
(SyntaxShape::List, _) => {}
|
|
||||||
(SyntaxShape::Literal, _) => {}
|
|
||||||
(SyntaxShape::Variable, _) => {}
|
|
||||||
(SyntaxShape::Binary, _) => {}
|
|
||||||
(SyntaxShape::Block, _) => {}
|
|
||||||
(SyntaxShape::Boolean, _) => {}
|
|
||||||
};
|
|
||||||
|
|
||||||
let first = baseline_parse_semantic_token(next, context, source)?;
|
|
||||||
|
|
||||||
let possible_op = tokens.peek();
|
|
||||||
|
|
||||||
let op = match possible_op {
|
|
||||||
Some(TokenNode::Operator(op)) => op.clone(),
|
|
||||||
_ => return Ok(first),
|
|
||||||
};
|
|
||||||
|
|
||||||
tokens.next();
|
|
||||||
|
|
||||||
let second = match tokens.next() {
|
|
||||||
None => {
|
|
||||||
return Err(ShellError::labeled_error(
|
|
||||||
"Expected something after an operator",
|
|
||||||
"operator",
|
|
||||||
op.tag(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
Some(token) => baseline_parse_semantic_token(token, context, source)?,
|
|
||||||
};
|
|
||||||
|
|
||||||
// We definitely have a binary expression here -- let's see if we should coerce it into a block
|
|
||||||
|
|
||||||
match syntax_type {
|
|
||||||
SyntaxShape::Any => {
|
|
||||||
let tag = first.tag().until(second.tag());
|
|
||||||
let binary = hir::Binary::new(first, op, second);
|
|
||||||
let binary = hir::RawExpression::Binary(Box::new(binary));
|
|
||||||
let binary = binary.tagged(tag);
|
|
||||||
|
|
||||||
Ok(binary)
|
|
||||||
}
|
|
||||||
|
|
||||||
SyntaxShape::Block => {
|
|
||||||
let tag = first.tag().until(second.tag());
|
|
||||||
|
|
||||||
let path: Tagged<hir::RawExpression> = match first {
|
|
||||||
Tagged {
|
|
||||||
item: hir::RawExpression::Literal(hir::Literal::Bare),
|
|
||||||
tag,
|
|
||||||
} => {
|
|
||||||
let string = tag.slice(source).to_string().tagged(tag);
|
|
||||||
let path = hir::Path::new(
|
|
||||||
// TODO: Deal with synthetic nodes that have no representation at all in source
|
|
||||||
hir::RawExpression::Variable(hir::Variable::It(Tag::unknown()))
|
|
||||||
.tagged(Tag::unknown()),
|
|
||||||
vec![string],
|
|
||||||
);
|
|
||||||
let path = hir::RawExpression::Path(Box::new(path));
|
|
||||||
path.tagged(first.tag())
|
|
||||||
}
|
|
||||||
Tagged {
|
|
||||||
item: hir::RawExpression::Literal(hir::Literal::String(inner)),
|
|
||||||
tag,
|
|
||||||
} => {
|
|
||||||
let string = inner.slice(source).to_string().tagged(tag);
|
|
||||||
let path = hir::Path::new(
|
|
||||||
// TODO: Deal with synthetic nodes that have no representation at all in source
|
|
||||||
hir::RawExpression::Variable(hir::Variable::It(Tag::unknown()))
|
|
||||||
.tagged_unknown(),
|
|
||||||
vec![string],
|
|
||||||
);
|
|
||||||
let path = hir::RawExpression::Path(Box::new(path));
|
|
||||||
path.tagged(first.tag())
|
|
||||||
}
|
|
||||||
Tagged {
|
|
||||||
item: hir::RawExpression::Variable(..),
|
|
||||||
..
|
|
||||||
} => first,
|
|
||||||
Tagged { tag, item } => {
|
|
||||||
return Err(ShellError::labeled_error(
|
|
||||||
"The first part of an un-braced block must be a column name",
|
|
||||||
item.type_name(),
|
|
||||||
tag,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
let binary = hir::Binary::new(path, op, second);
|
|
||||||
let binary = hir::RawExpression::Binary(Box::new(binary));
|
|
||||||
let binary = binary.tagged(tag);
|
|
||||||
|
|
||||||
let block = hir::RawExpression::Block(vec![binary]);
|
|
||||||
let block = block.tagged(tag);
|
|
||||||
|
|
||||||
Ok(block)
|
|
||||||
}
|
|
||||||
|
|
||||||
other => Err(ShellError::unimplemented(format!(
|
|
||||||
"coerce hint {:?}",
|
|
||||||
other
|
|
||||||
))),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn baseline_parse_semantic_token(
|
|
||||||
token: &TokenNode,
|
|
||||||
context: &Context,
|
|
||||||
source: &Text,
|
|
||||||
) -> Result<hir::Expression, ShellError> {
|
|
||||||
match token {
|
|
||||||
TokenNode::Token(token) => baseline_parse_single_token(token, source),
|
|
||||||
TokenNode::Call(_call) => unimplemented!(),
|
|
||||||
TokenNode::Delimited(delimited) => baseline_parse_delimited(delimited, context, source),
|
|
||||||
TokenNode::Pipeline(_pipeline) => unimplemented!(),
|
|
||||||
TokenNode::Operator(op) => Err(ShellError::syntax_error(
|
|
||||||
"Unexpected operator".tagged(op.tag),
|
|
||||||
)),
|
|
||||||
TokenNode::Flag(flag) => Err(ShellError::syntax_error("Unexpected flag".tagged(flag.tag))),
|
|
||||||
TokenNode::Member(tag) => Err(ShellError::syntax_error(
|
|
||||||
"BUG: Top-level member".tagged(*tag),
|
|
||||||
)),
|
|
||||||
TokenNode::Whitespace(tag) => Err(ShellError::syntax_error(
|
|
||||||
"BUG: Whitespace found during parse".tagged(*tag),
|
|
||||||
)),
|
|
||||||
TokenNode::Error(error) => Err(*error.item.clone()),
|
|
||||||
TokenNode::Path(path) => baseline_parse_path(path, context, source),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn baseline_parse_delimited(
|
|
||||||
token: &Tagged<DelimitedNode>,
|
|
||||||
context: &Context,
|
|
||||||
source: &Text,
|
|
||||||
) -> Result<hir::Expression, ShellError> {
|
|
||||||
match token.delimiter() {
|
|
||||||
Delimiter::Brace => {
|
|
||||||
let children = token.children();
|
|
||||||
let exprs = baseline_parse_tokens(
|
|
||||||
&mut TokensIterator::new(children),
|
|
||||||
context,
|
|
||||||
source,
|
|
||||||
SyntaxShape::Any,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let expr = hir::RawExpression::Block(exprs);
|
|
||||||
Ok(expr.tagged(token.tag()))
|
|
||||||
}
|
|
||||||
Delimiter::Paren => unimplemented!(),
|
|
||||||
Delimiter::Square => {
|
|
||||||
let children = token.children();
|
|
||||||
let exprs = baseline_parse_tokens(
|
|
||||||
&mut TokensIterator::new(children),
|
|
||||||
context,
|
|
||||||
source,
|
|
||||||
SyntaxShape::Any,
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let expr = hir::RawExpression::List(exprs);
|
|
||||||
Ok(expr.tagged(token.tag()))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn baseline_parse_path(
|
|
||||||
token: &Tagged<PathNode>,
|
|
||||||
context: &Context,
|
|
||||||
source: &Text,
|
|
||||||
) -> Result<hir::Expression, ShellError> {
|
|
||||||
let head = baseline_parse_semantic_token(token.head(), context, source)?;
|
|
||||||
|
|
||||||
let mut tail = vec![];
|
|
||||||
|
|
||||||
for part in token.tail() {
|
|
||||||
let string = match part {
|
|
||||||
TokenNode::Token(token) => match token.item() {
|
|
||||||
RawToken::Bare => token.tag().slice(source),
|
|
||||||
RawToken::String(tag) => tag.slice(source),
|
|
||||||
RawToken::Number(_)
|
|
||||||
| RawToken::Size(..)
|
|
||||||
| RawToken::Variable(_)
|
|
||||||
| RawToken::ExternalCommand(_)
|
|
||||||
| RawToken::GlobPattern
|
|
||||||
| RawToken::ExternalWord => {
|
|
||||||
return Err(ShellError::type_error(
|
|
||||||
"String",
|
|
||||||
token.type_name().tagged(part.tag()),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
TokenNode::Member(tag) => tag.slice(source),
|
|
||||||
|
|
||||||
// TODO: Make this impossible
|
|
||||||
other => {
|
|
||||||
return Err(ShellError::syntax_error(
|
|
||||||
format!("{} in path", other.type_name()).tagged(other.tag()),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
tail.push(string.tagged(part.tag()));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(hir::path(head, tail).tagged(token.tag()).into())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, new)]
|
|
||||||
pub struct TokensIterator<'a> {
|
|
||||||
tokens: &'a [TokenNode],
|
|
||||||
#[new(default)]
|
|
||||||
index: usize,
|
|
||||||
#[new(default)]
|
|
||||||
seen: indexmap::IndexSet<usize>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TokensIterator<'_> {
|
|
||||||
pub fn remove(&mut self, position: usize) {
|
|
||||||
self.seen.insert(position);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn len(&self) -> usize {
|
|
||||||
self.tokens.len()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn at_end(&self) -> bool {
|
|
||||||
for index in self.index..self.tokens.len() {
|
|
||||||
if !self.seen.contains(&index) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
true
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn advance(&mut self) {
|
|
||||||
self.seen.insert(self.index);
|
|
||||||
self.index += 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn extract<T>(&mut self, f: impl Fn(&TokenNode) -> Option<T>) -> Option<(usize, T)> {
|
|
||||||
for (i, item) in self.tokens.iter().enumerate() {
|
|
||||||
if self.seen.contains(&i) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
match f(item) {
|
|
||||||
None => {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
Some(value) => {
|
|
||||||
self.seen.insert(i);
|
|
||||||
return Some((i, value));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
None
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn move_to(&mut self, pos: usize) {
|
|
||||||
self.index = pos;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn restart(&mut self) {
|
|
||||||
self.index = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn clone(&self) -> TokensIterator {
|
|
||||||
TokensIterator {
|
|
||||||
tokens: self.tokens,
|
|
||||||
index: self.index,
|
|
||||||
seen: self.seen.clone(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn peek(&self) -> Option<&TokenNode> {
|
|
||||||
let mut tokens = self.clone();
|
|
||||||
|
|
||||||
tokens.next()
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn debug_remaining(&self) -> Vec<TokenNode> {
|
|
||||||
let mut tokens = self.clone();
|
|
||||||
tokens.restart();
|
|
||||||
tokens.cloned().collect()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> Iterator for TokensIterator<'a> {
|
|
||||||
type Item = &'a TokenNode;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<&'a TokenNode> {
|
|
||||||
loop {
|
|
||||||
if self.index >= self.tokens.len() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.seen.contains(&self.index) {
|
|
||||||
self.advance();
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if self.index >= self.tokens.len() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
match &self.tokens[self.index] {
|
|
||||||
TokenNode::Whitespace(_) => {
|
|
||||||
self.advance();
|
|
||||||
}
|
|
||||||
other => {
|
|
||||||
self.advance();
|
|
||||||
return Some(other);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -16,6 +16,12 @@ pub struct Binary {
|
|||||||
right: Expression,
|
right: Expression,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for Binary {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(f, "({} {} {})", self.op.as_str(), self.left, self.right)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl ToDebug for Binary {
|
impl ToDebug for Binary {
|
||||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||||
write!(f, "{}", self.left.debug(source))?;
|
write!(f, "{}", self.left.debug(source))?;
|
||||||
|
87
src/parser/hir/expand_external_tokens.rs
Normal file
87
src/parser/hir/expand_external_tokens.rs
Normal file
@ -0,0 +1,87 @@
|
|||||||
|
use crate::errors::ShellError;
|
||||||
|
use crate::parser::{TokenNode, TokensIterator};
|
||||||
|
use crate::{Tag, Tagged, Text};
|
||||||
|
|
||||||
|
pub fn expand_external_tokens(
|
||||||
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
|
source: &Text,
|
||||||
|
) -> Result<Vec<Tagged<String>>, ShellError> {
|
||||||
|
let mut out: Vec<Tagged<String>> = vec![];
|
||||||
|
|
||||||
|
loop {
|
||||||
|
if let Some(tag) = expand_next_expression(token_nodes)? {
|
||||||
|
out.push(tag.tagged_string(source));
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(out)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn expand_next_expression(
|
||||||
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
|
) -> Result<Option<Tag>, ShellError> {
|
||||||
|
let first = token_nodes.next_non_ws();
|
||||||
|
|
||||||
|
let first = match first {
|
||||||
|
None => return Ok(None),
|
||||||
|
Some(v) => v,
|
||||||
|
};
|
||||||
|
|
||||||
|
let first = triage_external_head(first)?;
|
||||||
|
let mut last = first;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let continuation = triage_continuation(token_nodes)?;
|
||||||
|
|
||||||
|
if let Some(continuation) = continuation {
|
||||||
|
last = continuation;
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(Some(first.until(last)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn triage_external_head(node: &TokenNode) -> Result<Tag, ShellError> {
|
||||||
|
Ok(match node {
|
||||||
|
TokenNode::Token(token) => token.tag(),
|
||||||
|
TokenNode::Call(_call) => unimplemented!(),
|
||||||
|
TokenNode::Nodes(_nodes) => unimplemented!(),
|
||||||
|
TokenNode::Delimited(_delimited) => unimplemented!(),
|
||||||
|
TokenNode::Pipeline(_pipeline) => unimplemented!(),
|
||||||
|
TokenNode::Flag(flag) => flag.tag(),
|
||||||
|
TokenNode::Member(member) => *member,
|
||||||
|
TokenNode::Whitespace(_whitespace) => {
|
||||||
|
unreachable!("This function should be called after next_non_ws()")
|
||||||
|
}
|
||||||
|
TokenNode::Error(_error) => unimplemented!(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn triage_continuation<'a, 'b>(
|
||||||
|
nodes: &'a mut TokensIterator<'b>,
|
||||||
|
) -> Result<Option<Tag>, ShellError> {
|
||||||
|
let mut peeked = nodes.peek_any();
|
||||||
|
|
||||||
|
let node = match peeked.node {
|
||||||
|
None => return Ok(None),
|
||||||
|
Some(node) => node,
|
||||||
|
};
|
||||||
|
|
||||||
|
match &node {
|
||||||
|
node if node.is_whitespace() => return Ok(None),
|
||||||
|
TokenNode::Token(..) | TokenNode::Flag(..) | TokenNode::Member(..) => {}
|
||||||
|
TokenNode::Call(..) => unimplemented!("call"),
|
||||||
|
TokenNode::Nodes(..) => unimplemented!("nodes"),
|
||||||
|
TokenNode::Delimited(..) => unimplemented!("delimited"),
|
||||||
|
TokenNode::Pipeline(..) => unimplemented!("pipeline"),
|
||||||
|
TokenNode::Whitespace(..) => unimplemented!("whitespace"),
|
||||||
|
TokenNode::Error(..) => unimplemented!("error"),
|
||||||
|
}
|
||||||
|
|
||||||
|
peeked.commit();
|
||||||
|
Ok(Some(node.tag()))
|
||||||
|
}
|
@ -9,7 +9,7 @@ use std::fmt;
|
|||||||
)]
|
)]
|
||||||
#[get = "pub(crate)"]
|
#[get = "pub(crate)"]
|
||||||
pub struct ExternalCommand {
|
pub struct ExternalCommand {
|
||||||
name: Tag,
|
pub(crate) name: Tag,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToDebug for ExternalCommand {
|
impl ToDebug for ExternalCommand {
|
||||||
|
@ -2,19 +2,49 @@ use crate::parser::hir::Expression;
|
|||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use crate::Tagged;
|
use crate::Tagged;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use getset::Getters;
|
use getset::{Getters, MutGetters};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
#[derive(
|
#[derive(
|
||||||
Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new,
|
Debug,
|
||||||
|
Clone,
|
||||||
|
Eq,
|
||||||
|
PartialEq,
|
||||||
|
Ord,
|
||||||
|
PartialOrd,
|
||||||
|
Hash,
|
||||||
|
Getters,
|
||||||
|
MutGetters,
|
||||||
|
Serialize,
|
||||||
|
Deserialize,
|
||||||
|
new,
|
||||||
)]
|
)]
|
||||||
#[get = "pub(crate)"]
|
#[get = "pub(crate)"]
|
||||||
pub struct Path {
|
pub struct Path {
|
||||||
head: Expression,
|
head: Expression,
|
||||||
|
#[get_mut = "pub(crate)"]
|
||||||
tail: Vec<Tagged<String>>,
|
tail: Vec<Tagged<String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for Path {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
write!(f, "{}", self.head)?;
|
||||||
|
|
||||||
|
for entry in &self.tail {
|
||||||
|
write!(f, ".{}", entry.item)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Path {
|
||||||
|
pub(crate) fn parts(self) -> (Expression, Vec<Tagged<String>>) {
|
||||||
|
(self.head, self.tail)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl ToDebug for Path {
|
impl ToDebug for Path {
|
||||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||||
write!(f, "{}", self.head.debug(source))?;
|
write!(f, "{}", self.head.debug(source))?;
|
||||||
|
662
src/parser/hir/syntax_shape.rs
Normal file
662
src/parser/hir/syntax_shape.rs
Normal file
@ -0,0 +1,662 @@
|
|||||||
|
mod block;
|
||||||
|
mod expression;
|
||||||
|
|
||||||
|
use crate::cli::external_command;
|
||||||
|
use crate::commands::{classified::InternalCommand, ClassifiedCommand, Command};
|
||||||
|
use crate::parser::hir::syntax_shape::block::AnyBlockShape;
|
||||||
|
use crate::parser::hir::tokens_iterator::Peeked;
|
||||||
|
use crate::parser::parse_command::parse_command_tail;
|
||||||
|
use crate::parser::{
|
||||||
|
hir,
|
||||||
|
hir::{debug_tokens, TokensIterator},
|
||||||
|
Operator, RawToken, TokenNode,
|
||||||
|
};
|
||||||
|
use crate::prelude::*;
|
||||||
|
use derive_new::new;
|
||||||
|
use getset::Getters;
|
||||||
|
use log::trace;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::path::{Path, PathBuf};
|
||||||
|
|
||||||
|
pub(crate) use self::expression::file_path::FilePathShape;
|
||||||
|
pub(crate) use self::expression::list::ExpressionListShape;
|
||||||
|
pub(crate) use self::expression::number::{IntShape, NumberShape};
|
||||||
|
pub(crate) use self::expression::pattern::PatternShape;
|
||||||
|
pub(crate) use self::expression::string::StringShape;
|
||||||
|
pub(crate) use self::expression::unit::UnitShape;
|
||||||
|
pub(crate) use self::expression::variable_path::{
|
||||||
|
ColumnPathShape, DotShape, ExpressionContinuation, ExpressionContinuationShape, MemberShape,
|
||||||
|
PathTailShape, VariablePathShape,
|
||||||
|
};
|
||||||
|
pub(crate) use self::expression::{continue_expression, AnyExpressionShape};
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
|
||||||
|
pub enum SyntaxShape {
|
||||||
|
Any,
|
||||||
|
List,
|
||||||
|
String,
|
||||||
|
Member,
|
||||||
|
ColumnPath,
|
||||||
|
Number,
|
||||||
|
Int,
|
||||||
|
Path,
|
||||||
|
Pattern,
|
||||||
|
Binary,
|
||||||
|
Block,
|
||||||
|
Boolean,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExpandExpression for SyntaxShape {
|
||||||
|
fn expand_expr<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
match self {
|
||||||
|
SyntaxShape::Any => expand_expr(&AnyExpressionShape, token_nodes, context),
|
||||||
|
SyntaxShape::List => Err(ShellError::unimplemented("SyntaxShape:List")),
|
||||||
|
SyntaxShape::Int => expand_expr(&IntShape, token_nodes, context),
|
||||||
|
SyntaxShape::String => expand_expr(&StringShape, token_nodes, context),
|
||||||
|
SyntaxShape::Member => {
|
||||||
|
let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
|
||||||
|
Ok(syntax.to_expr())
|
||||||
|
}
|
||||||
|
SyntaxShape::ColumnPath => {
|
||||||
|
let Tagged { item: members, tag } =
|
||||||
|
expand_syntax(&ColumnPathShape, token_nodes, context)?;
|
||||||
|
|
||||||
|
Ok(hir::Expression::list(
|
||||||
|
members.into_iter().map(|s| s.to_expr()).collect(),
|
||||||
|
tag,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
SyntaxShape::Number => expand_expr(&NumberShape, token_nodes, context),
|
||||||
|
SyntaxShape::Path => expand_expr(&FilePathShape, token_nodes, context),
|
||||||
|
SyntaxShape::Pattern => expand_expr(&PatternShape, token_nodes, context),
|
||||||
|
SyntaxShape::Binary => Err(ShellError::unimplemented("SyntaxShape:Binary")),
|
||||||
|
SyntaxShape::Block => expand_expr(&AnyBlockShape, token_nodes, context),
|
||||||
|
SyntaxShape::Boolean => Err(ShellError::unimplemented("SyntaxShape:Boolean")),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for SyntaxShape {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
SyntaxShape::Any => write!(f, "Any"),
|
||||||
|
SyntaxShape::List => write!(f, "List"),
|
||||||
|
SyntaxShape::String => write!(f, "String"),
|
||||||
|
SyntaxShape::Int => write!(f, "Integer"),
|
||||||
|
SyntaxShape::Member => write!(f, "Member"),
|
||||||
|
SyntaxShape::ColumnPath => write!(f, "ColumnPath"),
|
||||||
|
SyntaxShape::Number => write!(f, "Number"),
|
||||||
|
SyntaxShape::Path => write!(f, "Path"),
|
||||||
|
SyntaxShape::Pattern => write!(f, "Pattern"),
|
||||||
|
SyntaxShape::Binary => write!(f, "Binary"),
|
||||||
|
SyntaxShape::Block => write!(f, "Block"),
|
||||||
|
SyntaxShape::Boolean => write!(f, "Boolean"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Getters, new)]
|
||||||
|
pub struct ExpandContext<'context> {
|
||||||
|
#[get = "pub(crate)"]
|
||||||
|
registry: &'context CommandRegistry,
|
||||||
|
#[get = "pub(crate)"]
|
||||||
|
tag: Tag,
|
||||||
|
#[get = "pub(crate)"]
|
||||||
|
source: &'context Text,
|
||||||
|
homedir: Option<PathBuf>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'context> ExpandContext<'context> {
|
||||||
|
pub(crate) fn homedir(&self) -> Option<&Path> {
|
||||||
|
self.homedir.as_ref().map(|h| h.as_path())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub fn with_empty(source: &Text, callback: impl FnOnce(ExpandContext)) {
|
||||||
|
let mut registry = CommandRegistry::new();
|
||||||
|
registry.insert(
|
||||||
|
"ls",
|
||||||
|
crate::commands::whole_stream_command(crate::commands::LS),
|
||||||
|
);
|
||||||
|
|
||||||
|
callback(ExpandContext {
|
||||||
|
registry: ®istry,
|
||||||
|
tag: Tag::unknown(),
|
||||||
|
source,
|
||||||
|
homedir: None,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait TestSyntax: std::fmt::Debug + Copy {
|
||||||
|
fn test<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Option<Peeked<'a, 'b>>;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait ExpandExpression: std::fmt::Debug + Copy {
|
||||||
|
fn expand_expr<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError>;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) trait ExpandSyntax: std::fmt::Debug + Copy {
|
||||||
|
type Output: std::fmt::Debug;
|
||||||
|
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<Self::Output, ShellError>;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn expand_syntax<'a, 'b, T: ExpandSyntax>(
|
||||||
|
shape: &T,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<T::Output, ShellError> {
|
||||||
|
trace!(target: "nu::expand_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source));
|
||||||
|
|
||||||
|
let result = shape.expand_syntax(token_nodes, context);
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Err(err) => {
|
||||||
|
trace!(target: "nu::expand_syntax", "error :: {} :: {:?}", err, debug_tokens(token_nodes, context.source));
|
||||||
|
Err(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(result) => {
|
||||||
|
trace!(target: "nu::expand_syntax", "ok :: {:?} :: {:?}", result, debug_tokens(token_nodes, context.source));
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn expand_expr<'a, 'b, T: ExpandExpression>(
|
||||||
|
shape: &T,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
trace!(target: "nu::expand_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source));
|
||||||
|
|
||||||
|
let result = shape.expand_syntax(token_nodes, context);
|
||||||
|
|
||||||
|
match result {
|
||||||
|
Err(err) => {
|
||||||
|
trace!(target: "nu::expand_syntax", "error :: {} :: {:?}", err, debug_tokens(token_nodes, context.source));
|
||||||
|
Err(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(result) => {
|
||||||
|
trace!(target: "nu::expand_syntax", "ok :: {:?} :: {:?}", result, debug_tokens(token_nodes, context.source));
|
||||||
|
Ok(result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: ExpandExpression> ExpandSyntax for T {
|
||||||
|
type Output = hir::Expression;
|
||||||
|
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<Self::Output, ShellError> {
|
||||||
|
ExpandExpression::expand_expr(self, token_nodes, context)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait SkipSyntax: std::fmt::Debug + Copy {
|
||||||
|
fn skip<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<(), ShellError>;
|
||||||
|
}
|
||||||
|
|
||||||
|
enum BarePathState {
|
||||||
|
Initial,
|
||||||
|
Seen(Tag, Tag),
|
||||||
|
Error(ShellError),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl BarePathState {
|
||||||
|
pub fn seen(self, tag: Tag) -> BarePathState {
|
||||||
|
match self {
|
||||||
|
BarePathState::Initial => BarePathState::Seen(tag, tag),
|
||||||
|
BarePathState::Seen(start, _) => BarePathState::Seen(start, tag),
|
||||||
|
BarePathState::Error(err) => BarePathState::Error(err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn end(self, peeked: Peeked, reason: impl Into<String>) -> BarePathState {
|
||||||
|
match self {
|
||||||
|
BarePathState::Initial => BarePathState::Error(peeked.type_error(reason)),
|
||||||
|
BarePathState::Seen(start, end) => BarePathState::Seen(start, end),
|
||||||
|
BarePathState::Error(err) => BarePathState::Error(err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn into_bare(self) -> Result<Tag, ShellError> {
|
||||||
|
match self {
|
||||||
|
BarePathState::Initial => unreachable!("into_bare in initial state"),
|
||||||
|
BarePathState::Seen(start, end) => Ok(start.until(end)),
|
||||||
|
BarePathState::Error(err) => Err(err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn expand_bare<'a, 'b>(
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
_context: &ExpandContext,
|
||||||
|
predicate: impl Fn(&TokenNode) -> bool,
|
||||||
|
) -> Result<Tag, ShellError> {
|
||||||
|
let mut state = BarePathState::Initial;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
// Whitespace ends a word
|
||||||
|
let mut peeked = token_nodes.peek_any();
|
||||||
|
|
||||||
|
match peeked.node {
|
||||||
|
None => {
|
||||||
|
state = state.end(peeked, "word");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
Some(node) => {
|
||||||
|
if predicate(node) {
|
||||||
|
state = state.seen(node.tag());
|
||||||
|
peeked.commit();
|
||||||
|
} else {
|
||||||
|
state = state.end(peeked, "word");
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
state.into_bare()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct BarePathShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for BarePathShape {
|
||||||
|
type Output = Tag;
|
||||||
|
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<Tag, ShellError> {
|
||||||
|
expand_bare(token_nodes, context, |token| match token {
|
||||||
|
TokenNode::Token(Tagged {
|
||||||
|
item: RawToken::Bare,
|
||||||
|
..
|
||||||
|
})
|
||||||
|
| TokenNode::Token(Tagged {
|
||||||
|
item: RawToken::Operator(Operator::Dot),
|
||||||
|
..
|
||||||
|
}) => true,
|
||||||
|
|
||||||
|
_ => false,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct BareShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for BareShape {
|
||||||
|
type Output = Tagged<String>;
|
||||||
|
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<Self::Output, ShellError> {
|
||||||
|
let peeked = token_nodes.peek_any().not_eof("word")?;
|
||||||
|
|
||||||
|
match peeked.node {
|
||||||
|
TokenNode::Token(Tagged {
|
||||||
|
item: RawToken::Bare,
|
||||||
|
tag,
|
||||||
|
}) => {
|
||||||
|
peeked.commit();
|
||||||
|
Ok(tag.tagged_string(context.source))
|
||||||
|
}
|
||||||
|
|
||||||
|
other => Err(ShellError::type_error("word", other.tagged_type_name())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TestSyntax for BareShape {
|
||||||
|
fn test<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
_context: &ExpandContext,
|
||||||
|
) -> Option<Peeked<'a, 'b>> {
|
||||||
|
let peeked = token_nodes.peek_any();
|
||||||
|
|
||||||
|
match peeked.node {
|
||||||
|
Some(TokenNode::Token(token)) => match token.item {
|
||||||
|
RawToken::Bare => Some(peeked),
|
||||||
|
_ => None,
|
||||||
|
},
|
||||||
|
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum CommandSignature {
|
||||||
|
Internal(Tagged<Arc<Command>>),
|
||||||
|
LiteralExternal { outer: Tag, inner: Tag },
|
||||||
|
External(Tag),
|
||||||
|
Expression(hir::Expression),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl CommandSignature {
|
||||||
|
pub fn to_expression(&self) -> hir::Expression {
|
||||||
|
match self {
|
||||||
|
CommandSignature::Internal(command) => {
|
||||||
|
let tag = command.tag;
|
||||||
|
hir::RawExpression::Command(tag).tagged(tag)
|
||||||
|
}
|
||||||
|
CommandSignature::LiteralExternal { outer, inner } => {
|
||||||
|
hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*inner)).tagged(outer)
|
||||||
|
}
|
||||||
|
CommandSignature::External(tag) => {
|
||||||
|
hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*tag)).tagged(tag)
|
||||||
|
}
|
||||||
|
CommandSignature::Expression(expr) => expr.clone(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct CommandHeadShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for CommandHeadShape {
|
||||||
|
type Output = CommandSignature;
|
||||||
|
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<CommandSignature, ShellError> {
|
||||||
|
let node =
|
||||||
|
parse_single_node_skipping_ws(token_nodes, "command head1", |token, token_tag| {
|
||||||
|
Ok(match token {
|
||||||
|
RawToken::ExternalCommand(tag) => CommandSignature::LiteralExternal {
|
||||||
|
outer: token_tag,
|
||||||
|
inner: tag,
|
||||||
|
},
|
||||||
|
RawToken::Bare => {
|
||||||
|
let name = token_tag.slice(context.source);
|
||||||
|
if context.registry.has(name) {
|
||||||
|
let command = context.registry.expect_command(name);
|
||||||
|
CommandSignature::Internal(command.tagged(token_tag))
|
||||||
|
} else {
|
||||||
|
CommandSignature::External(token_tag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
return Err(ShellError::type_error(
|
||||||
|
"command head2",
|
||||||
|
token.type_name().tagged(token_tag),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
match node {
|
||||||
|
Ok(expr) => return Ok(expr),
|
||||||
|
Err(_) => match expand_expr(&AnyExpressionShape, token_nodes, context) {
|
||||||
|
Ok(expr) => return Ok(CommandSignature::Expression(expr)),
|
||||||
|
Err(_) => Err(token_nodes.peek_non_ws().type_error("command head3")),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct ClassifiedCommandShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for ClassifiedCommandShape {
|
||||||
|
type Output = ClassifiedCommand;
|
||||||
|
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
iterator: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<Self::Output, ShellError> {
|
||||||
|
let head = expand_syntax(&CommandHeadShape, iterator, context)?;
|
||||||
|
|
||||||
|
match &head {
|
||||||
|
CommandSignature::Expression(expr) => Err(ShellError::syntax_error(
|
||||||
|
"Unexpected expression in command position".tagged(expr.tag),
|
||||||
|
)),
|
||||||
|
|
||||||
|
// If the command starts with `^`, treat it as an external command no matter what
|
||||||
|
CommandSignature::External(name) => {
|
||||||
|
let name_str = name.slice(&context.source);
|
||||||
|
|
||||||
|
external_command(iterator, &context.source, name_str.tagged(name))
|
||||||
|
}
|
||||||
|
|
||||||
|
CommandSignature::LiteralExternal { outer, inner } => {
|
||||||
|
let name_str = inner.slice(&context.source);
|
||||||
|
|
||||||
|
external_command(iterator, &context.source, name_str.tagged(outer))
|
||||||
|
}
|
||||||
|
|
||||||
|
CommandSignature::Internal(command) => {
|
||||||
|
let tail =
|
||||||
|
parse_command_tail(&command.signature(), &context, iterator, command.tag)?;
|
||||||
|
|
||||||
|
let (positional, named) = match tail {
|
||||||
|
None => (None, None),
|
||||||
|
Some((positional, named)) => (positional, named),
|
||||||
|
};
|
||||||
|
|
||||||
|
let call = hir::Call {
|
||||||
|
head: Box::new(head.to_expression()),
|
||||||
|
positional,
|
||||||
|
named,
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(ClassifiedCommand::Internal(InternalCommand::new(
|
||||||
|
command.item.name().to_string(),
|
||||||
|
command.tag,
|
||||||
|
call,
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct InternalCommandHeadShape;
|
||||||
|
|
||||||
|
impl ExpandExpression for InternalCommandHeadShape {
|
||||||
|
fn expand_expr(
|
||||||
|
&self,
|
||||||
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
|
_context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
let peeked_head = token_nodes.peek_non_ws().not_eof("command head4")?;
|
||||||
|
|
||||||
|
let expr = match peeked_head.node {
|
||||||
|
TokenNode::Token(
|
||||||
|
spanned @ Tagged {
|
||||||
|
item: RawToken::Bare,
|
||||||
|
..
|
||||||
|
},
|
||||||
|
) => spanned.map(|_| hir::RawExpression::Literal(hir::Literal::Bare)),
|
||||||
|
|
||||||
|
TokenNode::Token(Tagged {
|
||||||
|
item: RawToken::String(inner_tag),
|
||||||
|
tag,
|
||||||
|
}) => hir::RawExpression::Literal(hir::Literal::String(*inner_tag)).tagged(*tag),
|
||||||
|
|
||||||
|
node => {
|
||||||
|
return Err(ShellError::type_error(
|
||||||
|
"command head5",
|
||||||
|
node.tagged_type_name(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
peeked_head.commit();
|
||||||
|
|
||||||
|
Ok(expr)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_single_node<'a, 'b, T>(
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
expected: &'static str,
|
||||||
|
callback: impl FnOnce(RawToken, Tag) -> Result<T, ShellError>,
|
||||||
|
) -> Result<T, ShellError> {
|
||||||
|
let peeked = token_nodes.peek_any().not_eof(expected)?;
|
||||||
|
|
||||||
|
let expr = match peeked.node {
|
||||||
|
TokenNode::Token(token) => callback(token.item, token.tag())?,
|
||||||
|
|
||||||
|
other => return Err(ShellError::type_error(expected, other.tagged_type_name())),
|
||||||
|
};
|
||||||
|
|
||||||
|
peeked.commit();
|
||||||
|
|
||||||
|
Ok(expr)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_single_node_skipping_ws<'a, 'b, T>(
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
expected: &'static str,
|
||||||
|
callback: impl FnOnce(RawToken, Tag) -> Result<T, ShellError>,
|
||||||
|
) -> Result<T, ShellError> {
|
||||||
|
let peeked = token_nodes.peek_non_ws().not_eof(expected)?;
|
||||||
|
|
||||||
|
let expr = match peeked.node {
|
||||||
|
TokenNode::Token(token) => callback(token.item, token.tag())?,
|
||||||
|
|
||||||
|
other => return Err(ShellError::type_error(expected, other.tagged_type_name())),
|
||||||
|
};
|
||||||
|
|
||||||
|
peeked.commit();
|
||||||
|
|
||||||
|
Ok(expr)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct WhitespaceShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for WhitespaceShape {
|
||||||
|
type Output = Tag;
|
||||||
|
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
_context: &ExpandContext,
|
||||||
|
) -> Result<Self::Output, ShellError> {
|
||||||
|
let peeked = token_nodes.peek_any().not_eof("whitespace")?;
|
||||||
|
|
||||||
|
let tag = match peeked.node {
|
||||||
|
TokenNode::Whitespace(tag) => *tag,
|
||||||
|
|
||||||
|
other => {
|
||||||
|
return Err(ShellError::type_error(
|
||||||
|
"whitespace",
|
||||||
|
other.tagged_type_name(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
peeked.commit();
|
||||||
|
|
||||||
|
Ok(tag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct SpacedExpression<T: ExpandExpression> {
|
||||||
|
inner: T,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: ExpandExpression> ExpandExpression for SpacedExpression<T> {
|
||||||
|
fn expand_expr<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
// TODO: Make the name part of the trait
|
||||||
|
let peeked = token_nodes.peek_any().not_eof("whitespace")?;
|
||||||
|
|
||||||
|
match peeked.node {
|
||||||
|
TokenNode::Whitespace(_) => {
|
||||||
|
peeked.commit();
|
||||||
|
expand_expr(&self.inner, token_nodes, context)
|
||||||
|
}
|
||||||
|
|
||||||
|
other => Err(ShellError::type_error(
|
||||||
|
"whitespace",
|
||||||
|
other.tagged_type_name(),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn maybe_spaced<T: ExpandExpression>(inner: T) -> MaybeSpacedExpression<T> {
|
||||||
|
MaybeSpacedExpression { inner }
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct MaybeSpacedExpression<T: ExpandExpression> {
|
||||||
|
inner: T,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: ExpandExpression> ExpandExpression for MaybeSpacedExpression<T> {
|
||||||
|
fn expand_expr<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
// TODO: Make the name part of the trait
|
||||||
|
let peeked = token_nodes.peek_any().not_eof("whitespace")?;
|
||||||
|
|
||||||
|
match peeked.node {
|
||||||
|
TokenNode::Whitespace(_) => {
|
||||||
|
peeked.commit();
|
||||||
|
expand_expr(&self.inner, token_nodes, context)
|
||||||
|
}
|
||||||
|
|
||||||
|
_ => {
|
||||||
|
peeked.rollback();
|
||||||
|
expand_expr(&self.inner, token_nodes, context)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn spaced<T: ExpandExpression>(inner: T) -> SpacedExpression<T> {
|
||||||
|
SpacedExpression { inner }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expand_variable(tag: Tag, token_tag: Tag, source: &Text) -> hir::Expression {
|
||||||
|
if tag.slice(source) == "it" {
|
||||||
|
hir::Expression::it_variable(tag, token_tag)
|
||||||
|
} else {
|
||||||
|
hir::Expression::variable(tag, token_tag)
|
||||||
|
}
|
||||||
|
}
|
168
src/parser/hir/syntax_shape/block.rs
Normal file
168
src/parser/hir/syntax_shape/block.rs
Normal file
@ -0,0 +1,168 @@
|
|||||||
|
use crate::errors::ShellError;
|
||||||
|
use crate::parser::{
|
||||||
|
hir,
|
||||||
|
hir::syntax_shape::{
|
||||||
|
continue_expression, expand_expr, expand_syntax, ExpandContext, ExpandExpression,
|
||||||
|
ExpressionListShape, PathTailShape, VariablePathShape,
|
||||||
|
},
|
||||||
|
hir::tokens_iterator::TokensIterator,
|
||||||
|
RawToken, TokenNode,
|
||||||
|
};
|
||||||
|
use crate::{Tag, Tagged, TaggedItem};
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct AnyBlockShape;
|
||||||
|
|
||||||
|
impl ExpandExpression for AnyBlockShape {
|
||||||
|
fn expand_expr<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
let block = token_nodes.peek_non_ws().not_eof("block")?;
|
||||||
|
|
||||||
|
// is it just a block?
|
||||||
|
let block = block.node.as_block();
|
||||||
|
|
||||||
|
match block {
|
||||||
|
Some(block) => {
|
||||||
|
let mut iterator = TokensIterator::new(&block.item, context.tag, false);
|
||||||
|
|
||||||
|
let exprs = expand_syntax(&ExpressionListShape, &mut iterator, context)?;
|
||||||
|
|
||||||
|
return Ok(hir::RawExpression::Block(exprs).tagged(block.tag));
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
expand_syntax(&ShorthandBlock, token_nodes, context)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct ShorthandBlock;
|
||||||
|
|
||||||
|
impl ExpandExpression for ShorthandBlock {
|
||||||
|
fn expand_expr<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
let path = expand_expr(&ShorthandPath, token_nodes, context)?;
|
||||||
|
let start = path.tag;
|
||||||
|
let expr = continue_expression(path, token_nodes, context)?;
|
||||||
|
let end = expr.tag;
|
||||||
|
let block = hir::RawExpression::Block(vec![expr]).tagged(start.until(end));
|
||||||
|
|
||||||
|
Ok(block)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct ShorthandPath;
|
||||||
|
|
||||||
|
impl ExpandExpression for ShorthandPath {
|
||||||
|
fn expand_expr<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
// if it's a variable path, that's the head part
|
||||||
|
let path = expand_expr(&VariablePathShape, token_nodes, context);
|
||||||
|
|
||||||
|
match path {
|
||||||
|
Ok(path) => return Ok(path),
|
||||||
|
Err(_) => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Synthesize the head of the shorthand path (`<member>` -> `$it.<member>`)
|
||||||
|
let mut head = expand_expr(&ShorthandHeadShape, token_nodes, context)?;
|
||||||
|
|
||||||
|
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
|
||||||
|
// like any other path.
|
||||||
|
let tail = expand_syntax(&PathTailShape, token_nodes, context);
|
||||||
|
|
||||||
|
match tail {
|
||||||
|
Err(_) => return Ok(head),
|
||||||
|
Ok((tail, _)) => {
|
||||||
|
// For each member that `PathTailShape` expanded, join it onto the existing expression
|
||||||
|
// to form a new path
|
||||||
|
for member in tail {
|
||||||
|
head = hir::Expression::dot_member(head, member);
|
||||||
|
}
|
||||||
|
|
||||||
|
println!("{:?}", head);
|
||||||
|
|
||||||
|
Ok(head)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct ShorthandHeadShape;
|
||||||
|
|
||||||
|
impl ExpandExpression for ShorthandHeadShape {
|
||||||
|
fn expand_expr<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
// A shorthand path must not be at EOF
|
||||||
|
let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?;
|
||||||
|
|
||||||
|
match peeked.node {
|
||||||
|
// If the head of a shorthand path is a bare token, it expands to `$it.bare`
|
||||||
|
TokenNode::Token(Tagged {
|
||||||
|
item: RawToken::Bare,
|
||||||
|
tag,
|
||||||
|
}) => {
|
||||||
|
// Commit the peeked token
|
||||||
|
peeked.commit();
|
||||||
|
|
||||||
|
// Synthesize an `$it` expression
|
||||||
|
let it = synthetic_it(token_nodes.anchor());
|
||||||
|
|
||||||
|
// Make a path out of `$it` and the bare token as a member
|
||||||
|
Ok(hir::Expression::path(
|
||||||
|
it,
|
||||||
|
vec![tag.tagged_string(context.source)],
|
||||||
|
tag,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the head of a shorthand path is a string, it expands to `$it."some string"`
|
||||||
|
TokenNode::Token(Tagged {
|
||||||
|
item: RawToken::String(inner),
|
||||||
|
tag: outer,
|
||||||
|
}) => {
|
||||||
|
// Commit the peeked token
|
||||||
|
peeked.commit();
|
||||||
|
|
||||||
|
// Synthesize an `$it` expression
|
||||||
|
let it = synthetic_it(token_nodes.anchor());
|
||||||
|
|
||||||
|
// Make a path out of `$it` and the bare token as a member
|
||||||
|
Ok(hir::Expression::path(
|
||||||
|
it,
|
||||||
|
vec![inner.string(context.source).tagged(outer)],
|
||||||
|
outer,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Any other token is not a valid bare head
|
||||||
|
other => {
|
||||||
|
return Err(ShellError::type_error(
|
||||||
|
"shorthand path",
|
||||||
|
other.tagged_type_name(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn synthetic_it(origin: uuid::Uuid) -> hir::Expression {
|
||||||
|
hir::Expression::it_variable(Tag::unknown_span(origin), Tag::unknown_span(origin))
|
||||||
|
}
|
188
src/parser/hir/syntax_shape/expression.rs
Normal file
188
src/parser/hir/syntax_shape/expression.rs
Normal file
@ -0,0 +1,188 @@
|
|||||||
|
pub(crate) mod delimited;
|
||||||
|
pub(crate) mod file_path;
|
||||||
|
pub(crate) mod list;
|
||||||
|
pub(crate) mod number;
|
||||||
|
pub(crate) mod pattern;
|
||||||
|
pub(crate) mod string;
|
||||||
|
pub(crate) mod unit;
|
||||||
|
pub(crate) mod variable_path;
|
||||||
|
|
||||||
|
use crate::parser::hir::syntax_shape::{
|
||||||
|
expand_expr, expand_syntax, expand_variable, expression::delimited::expand_delimited_expr,
|
||||||
|
BareShape, DotShape, ExpandContext, ExpandExpression, ExpandSyntax, ExpressionContinuation,
|
||||||
|
ExpressionContinuationShape, UnitShape,
|
||||||
|
};
|
||||||
|
use crate::parser::{
|
||||||
|
hir,
|
||||||
|
hir::{Expression, Operator, TokensIterator},
|
||||||
|
RawToken, Token, TokenNode,
|
||||||
|
};
|
||||||
|
use crate::prelude::*;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct AnyExpressionShape;
|
||||||
|
|
||||||
|
impl ExpandExpression for AnyExpressionShape {
|
||||||
|
fn expand_expr<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
// Look for an expression at the cursor
|
||||||
|
let head = expand_expr(&AnyExpressionStartShape, token_nodes, context)?;
|
||||||
|
|
||||||
|
continue_expression(head, token_nodes, context)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn continue_expression(
|
||||||
|
mut head: hir::Expression,
|
||||||
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
loop {
|
||||||
|
// Check to see whether there's any continuation after the head expression
|
||||||
|
let continuation = expand_syntax(&ExpressionContinuationShape, token_nodes, context);
|
||||||
|
|
||||||
|
match continuation {
|
||||||
|
// If there's no continuation, return the head
|
||||||
|
Err(_) => return Ok(head),
|
||||||
|
// Otherwise, form a new expression by combining the head with the continuation
|
||||||
|
Ok(continuation) => match continuation {
|
||||||
|
// If the continuation is a `.member`, form a path with the new member
|
||||||
|
ExpressionContinuation::DotSuffix(_dot, member) => {
|
||||||
|
head = Expression::dot_member(head, member);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, if the continuation is an infix suffix, form an infix expression
|
||||||
|
ExpressionContinuation::InfixSuffix(op, expr) => {
|
||||||
|
head = Expression::infix(head, op, expr);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct AnyExpressionStartShape;
|
||||||
|
|
||||||
|
impl ExpandExpression for AnyExpressionStartShape {
|
||||||
|
fn expand_expr<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
let size = expand_expr(&UnitShape, token_nodes, context);
|
||||||
|
|
||||||
|
match size {
|
||||||
|
Ok(expr) => return Ok(expr),
|
||||||
|
Err(_) => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
let peek_next = token_nodes.peek_any().not_eof("expression")?;
|
||||||
|
|
||||||
|
let head = match peek_next.node {
|
||||||
|
TokenNode::Token(token) => match token.item {
|
||||||
|
RawToken::Bare | RawToken::Operator(Operator::Dot) => {
|
||||||
|
let start = token.tag;
|
||||||
|
peek_next.commit();
|
||||||
|
|
||||||
|
let end = expand_syntax(&BareTailShape, token_nodes, context)?;
|
||||||
|
|
||||||
|
match end {
|
||||||
|
Some(end) => return Ok(hir::Expression::bare(start.until(end))),
|
||||||
|
None => return Ok(hir::Expression::bare(start)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
peek_next.commit();
|
||||||
|
expand_one_context_free_token(*token, context)
|
||||||
|
}
|
||||||
|
},
|
||||||
|
node @ TokenNode::Call(_)
|
||||||
|
| node @ TokenNode::Nodes(_)
|
||||||
|
| node @ TokenNode::Pipeline(_)
|
||||||
|
| node @ TokenNode::Flag(_)
|
||||||
|
| node @ TokenNode::Member(_)
|
||||||
|
| node @ TokenNode::Whitespace(_) => {
|
||||||
|
return Err(ShellError::type_error(
|
||||||
|
"expression",
|
||||||
|
node.tagged_type_name(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
TokenNode::Delimited(delimited) => {
|
||||||
|
peek_next.commit();
|
||||||
|
expand_delimited_expr(delimited, context)
|
||||||
|
}
|
||||||
|
|
||||||
|
TokenNode::Error(error) => return Err(*error.item.clone()),
|
||||||
|
}?;
|
||||||
|
|
||||||
|
Ok(head)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct BareTailShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for BareTailShape {
|
||||||
|
type Output = Option<Tag>;
|
||||||
|
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<Option<Tag>, ShellError> {
|
||||||
|
let mut end: Option<Tag> = None;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
match expand_syntax(&BareShape, token_nodes, context) {
|
||||||
|
Ok(bare) => {
|
||||||
|
end = Some(bare.tag);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(_) => match expand_syntax(&DotShape, token_nodes, context) {
|
||||||
|
Ok(dot) => {
|
||||||
|
end = Some(dot);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(_) => break,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(end)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expand_one_context_free_token<'a, 'b>(
|
||||||
|
token: Token,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
Ok(match token.item {
|
||||||
|
RawToken::Number(number) => {
|
||||||
|
hir::Expression::number(number.to_number(context.source), token.tag)
|
||||||
|
}
|
||||||
|
RawToken::Operator(..) => {
|
||||||
|
return Err(ShellError::syntax_error(
|
||||||
|
"unexpected operator, expected an expression".tagged(token.tag),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
RawToken::Size(..) => unimplemented!("size"),
|
||||||
|
RawToken::String(tag) => hir::Expression::string(tag, token.tag),
|
||||||
|
RawToken::Variable(tag) => expand_variable(tag, token.tag, &context.source),
|
||||||
|
RawToken::ExternalCommand(_) => unimplemented!(),
|
||||||
|
RawToken::ExternalWord => unimplemented!(),
|
||||||
|
RawToken::GlobPattern => hir::Expression::pattern(token.tag),
|
||||||
|
RawToken::Bare => hir::Expression::string(token.tag, token.tag),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn expand_file_path(string: &str, context: &ExpandContext) -> PathBuf {
|
||||||
|
let expanded = shellexpand::tilde_with_context(string, || context.homedir());
|
||||||
|
|
||||||
|
PathBuf::from(expanded.as_ref())
|
||||||
|
}
|
38
src/parser/hir/syntax_shape/expression/delimited.rs
Normal file
38
src/parser/hir/syntax_shape/expression/delimited.rs
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
use crate::parser::hir::syntax_shape::{expand_syntax, ExpandContext, ExpressionListShape};
|
||||||
|
use crate::parser::{hir, hir::TokensIterator};
|
||||||
|
use crate::parser::{DelimitedNode, Delimiter};
|
||||||
|
use crate::prelude::*;
|
||||||
|
|
||||||
|
pub fn expand_delimited_expr(
|
||||||
|
delimited: &Tagged<DelimitedNode>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
match &delimited.item {
|
||||||
|
DelimitedNode {
|
||||||
|
delimiter: Delimiter::Square,
|
||||||
|
children,
|
||||||
|
} => {
|
||||||
|
let mut tokens = TokensIterator::new(&children, delimited.tag, false);
|
||||||
|
|
||||||
|
let list = expand_syntax(&ExpressionListShape, &mut tokens, context);
|
||||||
|
|
||||||
|
Ok(hir::Expression::list(list?, delimited.tag))
|
||||||
|
}
|
||||||
|
|
||||||
|
DelimitedNode {
|
||||||
|
delimiter: Delimiter::Paren,
|
||||||
|
..
|
||||||
|
} => Err(ShellError::type_error(
|
||||||
|
"expression",
|
||||||
|
"unimplemented call expression".tagged(delimited.tag),
|
||||||
|
)),
|
||||||
|
|
||||||
|
DelimitedNode {
|
||||||
|
delimiter: Delimiter::Brace,
|
||||||
|
..
|
||||||
|
} => Err(ShellError::type_error(
|
||||||
|
"expression",
|
||||||
|
"unimplemented block expression".tagged(delimited.tag),
|
||||||
|
)),
|
||||||
|
}
|
||||||
|
}
|
59
src/parser/hir/syntax_shape/expression/file_path.rs
Normal file
59
src/parser/hir/syntax_shape/expression/file_path.rs
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
use crate::parser::hir::syntax_shape::{
|
||||||
|
expand_syntax, expression::expand_file_path, parse_single_node, BarePathShape, ExpandContext,
|
||||||
|
ExpandExpression,
|
||||||
|
};
|
||||||
|
use crate::parser::{hir, hir::TokensIterator, RawToken};
|
||||||
|
use crate::prelude::*;
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct FilePathShape;
|
||||||
|
|
||||||
|
impl ExpandExpression for FilePathShape {
|
||||||
|
fn expand_expr<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
let bare = expand_syntax(&BarePathShape, token_nodes, context);
|
||||||
|
|
||||||
|
match bare {
|
||||||
|
Ok(tag) => {
|
||||||
|
let string = tag.slice(context.source);
|
||||||
|
let path = expand_file_path(string, context);
|
||||||
|
return Ok(hir::Expression::file_path(path, tag));
|
||||||
|
}
|
||||||
|
Err(_) => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
parse_single_node(token_nodes, "Path", |token, token_tag| {
|
||||||
|
Ok(match token {
|
||||||
|
RawToken::GlobPattern => {
|
||||||
|
return Err(ShellError::type_error(
|
||||||
|
"Path",
|
||||||
|
"glob pattern".tagged(token_tag),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
RawToken::Operator(..) => {
|
||||||
|
return Err(ShellError::type_error("Path", "operator".tagged(token_tag)))
|
||||||
|
}
|
||||||
|
RawToken::Variable(tag) if tag.slice(context.source) == "it" => {
|
||||||
|
hir::Expression::it_variable(tag, token_tag)
|
||||||
|
}
|
||||||
|
RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag),
|
||||||
|
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag),
|
||||||
|
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)),
|
||||||
|
RawToken::Number(_) => hir::Expression::bare(token_tag),
|
||||||
|
RawToken::Size(_, _) => hir::Expression::bare(token_tag),
|
||||||
|
RawToken::Bare => hir::Expression::file_path(
|
||||||
|
expand_file_path(token_tag.slice(context.source), context),
|
||||||
|
token_tag,
|
||||||
|
),
|
||||||
|
|
||||||
|
RawToken::String(tag) => hir::Expression::file_path(
|
||||||
|
expand_file_path(tag.slice(context.source), context),
|
||||||
|
token_tag,
|
||||||
|
),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
43
src/parser/hir/syntax_shape/expression/list.rs
Normal file
43
src/parser/hir/syntax_shape/expression/list.rs
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
use crate::errors::ShellError;
|
||||||
|
use crate::parser::{
|
||||||
|
hir,
|
||||||
|
hir::syntax_shape::{
|
||||||
|
expand_expr, maybe_spaced, spaced, AnyExpressionShape, ExpandContext, ExpandSyntax,
|
||||||
|
},
|
||||||
|
hir::{debug_tokens, TokensIterator},
|
||||||
|
};
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct ExpressionListShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for ExpressionListShape {
|
||||||
|
type Output = Vec<hir::Expression>;
|
||||||
|
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<Vec<hir::Expression>, ShellError> {
|
||||||
|
let mut exprs = vec![];
|
||||||
|
|
||||||
|
if token_nodes.at_end_possible_ws() {
|
||||||
|
return Ok(exprs);
|
||||||
|
}
|
||||||
|
|
||||||
|
let expr = expand_expr(&maybe_spaced(AnyExpressionShape), token_nodes, context)?;
|
||||||
|
|
||||||
|
exprs.push(expr);
|
||||||
|
|
||||||
|
println!("{:?}", debug_tokens(token_nodes, context.source));
|
||||||
|
|
||||||
|
loop {
|
||||||
|
if token_nodes.at_end_possible_ws() {
|
||||||
|
return Ok(exprs);
|
||||||
|
}
|
||||||
|
|
||||||
|
let expr = expand_expr(&spaced(AnyExpressionShape), token_nodes, context)?;
|
||||||
|
|
||||||
|
exprs.push(expr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
97
src/parser/hir/syntax_shape/expression/number.rs
Normal file
97
src/parser/hir/syntax_shape/expression/number.rs
Normal file
@ -0,0 +1,97 @@
|
|||||||
|
use crate::parser::hir::syntax_shape::{parse_single_node, ExpandContext, ExpandExpression};
|
||||||
|
use crate::parser::{
|
||||||
|
hir,
|
||||||
|
hir::{RawNumber, TokensIterator},
|
||||||
|
RawToken,
|
||||||
|
};
|
||||||
|
use crate::prelude::*;
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct NumberShape;
|
||||||
|
|
||||||
|
impl ExpandExpression for NumberShape {
|
||||||
|
fn expand_expr<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
parse_single_node(token_nodes, "Number", |token, token_tag| {
|
||||||
|
Ok(match token {
|
||||||
|
RawToken::GlobPattern => {
|
||||||
|
return Err(ShellError::type_error(
|
||||||
|
"Number",
|
||||||
|
"glob pattern".to_string().tagged(token_tag),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
RawToken::Operator(..) => {
|
||||||
|
return Err(ShellError::type_error(
|
||||||
|
"Number",
|
||||||
|
"operator".to_string().tagged(token_tag),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
RawToken::Variable(tag) if tag.slice(context.source) == "it" => {
|
||||||
|
hir::Expression::it_variable(tag, token_tag)
|
||||||
|
}
|
||||||
|
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag),
|
||||||
|
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)),
|
||||||
|
RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag),
|
||||||
|
RawToken::Number(number) => {
|
||||||
|
hir::Expression::number(number.to_number(context.source), token_tag)
|
||||||
|
}
|
||||||
|
RawToken::Size(number, unit) => {
|
||||||
|
hir::Expression::size(number.to_number(context.source), unit, token_tag)
|
||||||
|
}
|
||||||
|
RawToken::Bare => hir::Expression::bare(token_tag),
|
||||||
|
RawToken::String(tag) => hir::Expression::string(tag, token_tag),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct IntShape;
|
||||||
|
|
||||||
|
impl ExpandExpression for IntShape {
|
||||||
|
fn expand_expr<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
parse_single_node(token_nodes, "Integer", |token, token_tag| {
|
||||||
|
Ok(match token {
|
||||||
|
RawToken::GlobPattern => {
|
||||||
|
return Err(ShellError::type_error(
|
||||||
|
"Integer",
|
||||||
|
"glob pattern".to_string().tagged(token_tag),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
RawToken::Operator(..) => {
|
||||||
|
return Err(ShellError::type_error(
|
||||||
|
"Integer",
|
||||||
|
"operator".to_string().tagged(token_tag),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
RawToken::Variable(tag) if tag.slice(context.source) == "it" => {
|
||||||
|
hir::Expression::it_variable(tag, token_tag)
|
||||||
|
}
|
||||||
|
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag),
|
||||||
|
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)),
|
||||||
|
RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag),
|
||||||
|
RawToken::Number(number @ RawNumber::Int(_)) => {
|
||||||
|
hir::Expression::number(number.to_number(context.source), token_tag)
|
||||||
|
}
|
||||||
|
token @ RawToken::Number(_) => {
|
||||||
|
return Err(ShellError::type_error(
|
||||||
|
"Integer",
|
||||||
|
token.type_name().tagged(token_tag),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
RawToken::Size(number, unit) => {
|
||||||
|
hir::Expression::size(number.to_number(context.source), unit, token_tag)
|
||||||
|
}
|
||||||
|
RawToken::Bare => hir::Expression::bare(token_tag),
|
||||||
|
RawToken::String(tag) => hir::Expression::string(tag, token_tag),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
86
src/parser/hir/syntax_shape/expression/pattern.rs
Normal file
86
src/parser/hir/syntax_shape/expression/pattern.rs
Normal file
@ -0,0 +1,86 @@
|
|||||||
|
use crate::parser::hir::syntax_shape::{
|
||||||
|
expand_bare, expand_syntax, expression::expand_file_path, parse_single_node, ExpandContext,
|
||||||
|
ExpandExpression, ExpandSyntax,
|
||||||
|
};
|
||||||
|
use crate::parser::{hir, hir::TokensIterator, Operator, RawToken, TokenNode};
|
||||||
|
use crate::prelude::*;
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct PatternShape;
|
||||||
|
|
||||||
|
impl ExpandExpression for PatternShape {
|
||||||
|
fn expand_expr<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
let pattern = expand_syntax(&BarePatternShape, token_nodes, context);
|
||||||
|
|
||||||
|
match pattern {
|
||||||
|
Ok(tag) => {
|
||||||
|
return Ok(hir::Expression::pattern(tag));
|
||||||
|
}
|
||||||
|
Err(_) => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
parse_single_node(token_nodes, "Pattern", |token, token_tag| {
|
||||||
|
Ok(match token {
|
||||||
|
RawToken::GlobPattern => {
|
||||||
|
return Err(ShellError::unreachable(
|
||||||
|
"glob pattern after glob already returned",
|
||||||
|
))
|
||||||
|
}
|
||||||
|
RawToken::Operator(..) => {
|
||||||
|
return Err(ShellError::unreachable("dot after glob already returned"))
|
||||||
|
}
|
||||||
|
RawToken::Bare => {
|
||||||
|
return Err(ShellError::unreachable("bare after glob already returned"))
|
||||||
|
}
|
||||||
|
|
||||||
|
RawToken::Variable(tag) if tag.slice(context.source) == "it" => {
|
||||||
|
hir::Expression::it_variable(tag, token_tag)
|
||||||
|
}
|
||||||
|
RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag),
|
||||||
|
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag),
|
||||||
|
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)),
|
||||||
|
RawToken::Number(_) => hir::Expression::bare(token_tag),
|
||||||
|
RawToken::Size(_, _) => hir::Expression::bare(token_tag),
|
||||||
|
|
||||||
|
RawToken::String(tag) => hir::Expression::file_path(
|
||||||
|
expand_file_path(tag.slice(context.source), context),
|
||||||
|
token_tag,
|
||||||
|
),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct BarePatternShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for BarePatternShape {
|
||||||
|
type Output = Tag;
|
||||||
|
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<Tag, ShellError> {
|
||||||
|
expand_bare(token_nodes, context, |token| match token {
|
||||||
|
TokenNode::Token(Tagged {
|
||||||
|
item: RawToken::Bare,
|
||||||
|
..
|
||||||
|
})
|
||||||
|
| TokenNode::Token(Tagged {
|
||||||
|
item: RawToken::Operator(Operator::Dot),
|
||||||
|
..
|
||||||
|
})
|
||||||
|
| TokenNode::Token(Tagged {
|
||||||
|
item: RawToken::GlobPattern,
|
||||||
|
..
|
||||||
|
}) => true,
|
||||||
|
|
||||||
|
_ => false,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
60
src/parser/hir/syntax_shape/expression/string.rs
Normal file
60
src/parser/hir/syntax_shape/expression/string.rs
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
use crate::parser::hir::syntax_shape::{
|
||||||
|
expand_variable, parse_single_node, ExpandContext, ExpandExpression, TestSyntax,
|
||||||
|
};
|
||||||
|
use crate::parser::hir::tokens_iterator::Peeked;
|
||||||
|
use crate::parser::{hir, hir::TokensIterator, RawToken, TokenNode};
|
||||||
|
use crate::prelude::*;
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct StringShape;
|
||||||
|
|
||||||
|
impl ExpandExpression for StringShape {
|
||||||
|
fn expand_expr<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
parse_single_node(token_nodes, "String", |token, token_tag| {
|
||||||
|
Ok(match token {
|
||||||
|
RawToken::GlobPattern => {
|
||||||
|
return Err(ShellError::type_error(
|
||||||
|
"String",
|
||||||
|
"glob pattern".tagged(token_tag),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
RawToken::Operator(..) => {
|
||||||
|
return Err(ShellError::type_error(
|
||||||
|
"String",
|
||||||
|
"operator".tagged(token_tag),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
RawToken::Variable(tag) => expand_variable(tag, token_tag, &context.source),
|
||||||
|
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag),
|
||||||
|
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)),
|
||||||
|
RawToken::Number(_) => hir::Expression::bare(token_tag),
|
||||||
|
RawToken::Size(_, _) => hir::Expression::bare(token_tag),
|
||||||
|
RawToken::Bare => hir::Expression::bare(token_tag),
|
||||||
|
RawToken::String(tag) => hir::Expression::string(tag, token_tag),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TestSyntax for StringShape {
|
||||||
|
fn test<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
_context: &ExpandContext,
|
||||||
|
) -> Option<Peeked<'a, 'b>> {
|
||||||
|
let peeked = token_nodes.peek_any();
|
||||||
|
|
||||||
|
match peeked.node {
|
||||||
|
Some(TokenNode::Token(token)) => match token.item {
|
||||||
|
RawToken::String(_) => Some(peeked),
|
||||||
|
_ => None,
|
||||||
|
},
|
||||||
|
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
89
src/parser/hir/syntax_shape/expression/unit.rs
Normal file
89
src/parser/hir/syntax_shape/expression/unit.rs
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
use crate::parser::hir::syntax_shape::{ExpandContext, ExpandExpression};
|
||||||
|
use crate::parser::parse::tokens::RawNumber;
|
||||||
|
use crate::parser::parse::unit::Unit;
|
||||||
|
use crate::parser::{hir, hir::TokensIterator, RawToken, TokenNode};
|
||||||
|
use crate::prelude::*;
|
||||||
|
use nom::branch::alt;
|
||||||
|
use nom::bytes::complete::tag;
|
||||||
|
use nom::character::complete::digit1;
|
||||||
|
use nom::combinator::{all_consuming, opt, value};
|
||||||
|
use nom::IResult;
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct UnitShape;
|
||||||
|
|
||||||
|
impl ExpandExpression for UnitShape {
|
||||||
|
fn expand_expr<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
let peeked = token_nodes.peek_any().not_eof("unit")?;
|
||||||
|
|
||||||
|
let tag = match peeked.node {
|
||||||
|
TokenNode::Token(Tagged {
|
||||||
|
item: RawToken::Bare,
|
||||||
|
tag,
|
||||||
|
}) => tag,
|
||||||
|
_ => return Err(peeked.type_error("unit")),
|
||||||
|
};
|
||||||
|
|
||||||
|
let unit = unit_size(tag.slice(context.source), *tag);
|
||||||
|
|
||||||
|
let (_, (number, unit)) = match unit {
|
||||||
|
Err(_) => {
|
||||||
|
return Err(ShellError::type_error(
|
||||||
|
"unit",
|
||||||
|
"word".tagged(Tag::unknown()),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
Ok((number, unit)) => (number, unit),
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(hir::Expression::size(
|
||||||
|
number.to_number(context.source),
|
||||||
|
unit,
|
||||||
|
tag,
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn unit_size(input: &str, bare_tag: Tag) -> IResult<&str, (Tagged<RawNumber>, Unit)> {
|
||||||
|
let (input, digits) = digit1(input)?;
|
||||||
|
|
||||||
|
let (input, dot) = opt(tag("."))(input)?;
|
||||||
|
|
||||||
|
let (input, number) = match dot {
|
||||||
|
Some(dot) => {
|
||||||
|
let (input, rest) = digit1(input)?;
|
||||||
|
(
|
||||||
|
input,
|
||||||
|
RawNumber::decimal((
|
||||||
|
bare_tag.span.start(),
|
||||||
|
bare_tag.span.start() + digits.len() + dot.len() + rest.len(),
|
||||||
|
bare_tag.anchor,
|
||||||
|
)),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
None => (
|
||||||
|
input,
|
||||||
|
RawNumber::int((
|
||||||
|
bare_tag.span.start(),
|
||||||
|
bare_tag.span.start() + digits.len(),
|
||||||
|
bare_tag.anchor,
|
||||||
|
)),
|
||||||
|
),
|
||||||
|
};
|
||||||
|
|
||||||
|
let (input, unit) = all_consuming(alt((
|
||||||
|
value(Unit::B, alt((tag("B"), tag("b")))),
|
||||||
|
value(Unit::KB, alt((tag("KB"), tag("kb"), tag("Kb")))),
|
||||||
|
value(Unit::MB, alt((tag("MB"), tag("mb"), tag("Mb")))),
|
||||||
|
value(Unit::MB, alt((tag("GB"), tag("gb"), tag("Gb")))),
|
||||||
|
value(Unit::MB, alt((tag("TB"), tag("tb"), tag("Tb")))),
|
||||||
|
value(Unit::MB, alt((tag("PB"), tag("pb"), tag("Pb")))),
|
||||||
|
)))(input)?;
|
||||||
|
|
||||||
|
Ok((input, (number, unit)))
|
||||||
|
}
|
396
src/parser/hir/syntax_shape/expression/variable_path.rs
Normal file
396
src/parser/hir/syntax_shape/expression/variable_path.rs
Normal file
@ -0,0 +1,396 @@
|
|||||||
|
use crate::parser::hir::syntax_shape::{
|
||||||
|
expand_expr, expand_syntax, parse_single_node, AnyExpressionShape, BareShape, ExpandContext,
|
||||||
|
ExpandExpression, ExpandSyntax, Peeked, SkipSyntax, StringShape, TestSyntax, WhitespaceShape,
|
||||||
|
};
|
||||||
|
use crate::parser::{hir, hir::Expression, hir::TokensIterator, Operator, RawToken};
|
||||||
|
use crate::prelude::*;
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct VariablePathShape;
|
||||||
|
|
||||||
|
impl ExpandExpression for VariablePathShape {
|
||||||
|
fn expand_expr<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
// 1. let the head be the first token, expecting a variable
|
||||||
|
// 2. let the tail be an empty list of members
|
||||||
|
// 2. while the next token (excluding ws) is a dot:
|
||||||
|
// 1. consume the dot
|
||||||
|
// 2. consume the next token as a member and push it onto tail
|
||||||
|
|
||||||
|
let head = expand_expr(&VariableShape, token_nodes, context)?;
|
||||||
|
let start = head.tag();
|
||||||
|
let mut end = start;
|
||||||
|
let mut tail: Vec<Tagged<String>> = vec![];
|
||||||
|
|
||||||
|
loop {
|
||||||
|
match DotShape.skip(token_nodes, context) {
|
||||||
|
Err(_) => break,
|
||||||
|
Ok(_) => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
|
||||||
|
let member = syntax.to_tagged_string(context.source);
|
||||||
|
|
||||||
|
end = member.tag();
|
||||||
|
tail.push(member);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(hir::Expression::path(head, tail, start.until(end)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct PathTailShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for PathTailShape {
|
||||||
|
type Output = (Vec<Tagged<String>>, Tag);
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<Self::Output, ShellError> {
|
||||||
|
let mut end: Option<Tag> = None;
|
||||||
|
let mut tail = vec![];
|
||||||
|
|
||||||
|
loop {
|
||||||
|
match DotShape.skip(token_nodes, context) {
|
||||||
|
Err(_) => break,
|
||||||
|
Ok(_) => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
|
||||||
|
let member = syntax.to_tagged_string(context.source);
|
||||||
|
end = Some(member.tag());
|
||||||
|
tail.push(member);
|
||||||
|
}
|
||||||
|
|
||||||
|
match end {
|
||||||
|
None => {
|
||||||
|
return Err(ShellError::type_error(
|
||||||
|
"path tail",
|
||||||
|
token_nodes.typed_tag_at_cursor(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
Some(end) => Ok((tail, end)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub enum ExpressionContinuation {
|
||||||
|
DotSuffix(Tag, Tagged<String>),
|
||||||
|
InfixSuffix(Tagged<Operator>, Expression),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An expression continuation
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct ExpressionContinuationShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for ExpressionContinuationShape {
|
||||||
|
type Output = ExpressionContinuation;
|
||||||
|
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<ExpressionContinuation, ShellError> {
|
||||||
|
// Try to expand a `.`
|
||||||
|
let dot = expand_syntax(&DotShape, token_nodes, context);
|
||||||
|
|
||||||
|
match dot {
|
||||||
|
// If a `.` was matched, it's a `Path`, and we expect a `Member` next
|
||||||
|
Ok(dot) => {
|
||||||
|
let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
|
||||||
|
let member = syntax.to_tagged_string(context.source);
|
||||||
|
|
||||||
|
Ok(ExpressionContinuation::DotSuffix(dot, member))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, we expect an infix operator and an expression next
|
||||||
|
Err(_) => {
|
||||||
|
let (_, op, _) = expand_syntax(&InfixShape, token_nodes, context)?;
|
||||||
|
let next = expand_expr(&AnyExpressionShape, token_nodes, context)?;
|
||||||
|
|
||||||
|
Ok(ExpressionContinuation::InfixSuffix(op, next))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct VariableShape;
|
||||||
|
|
||||||
|
impl ExpandExpression for VariableShape {
|
||||||
|
fn expand_expr<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<hir::Expression, ShellError> {
|
||||||
|
parse_single_node(token_nodes, "variable", |token, token_tag| {
|
||||||
|
Ok(match token {
|
||||||
|
RawToken::Variable(tag) => {
|
||||||
|
if tag.slice(context.source) == "it" {
|
||||||
|
hir::Expression::it_variable(tag, token_tag)
|
||||||
|
} else {
|
||||||
|
hir::Expression::variable(tag, token_tag)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
return Err(ShellError::type_error(
|
||||||
|
"variable",
|
||||||
|
token.type_name().tagged(token_tag),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy)]
|
||||||
|
pub enum Member {
|
||||||
|
String(/* outer */ Tag, /* inner */ Tag),
|
||||||
|
Bare(Tag),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Member {
|
||||||
|
pub(crate) fn to_expr(&self) -> hir::Expression {
|
||||||
|
match self {
|
||||||
|
Member::String(outer, inner) => hir::Expression::string(inner, outer),
|
||||||
|
Member::Bare(tag) => hir::Expression::string(tag, tag),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn tag(&self) -> Tag {
|
||||||
|
match self {
|
||||||
|
Member::String(outer, _inner) => *outer,
|
||||||
|
Member::Bare(tag) => *tag,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn to_tagged_string(&self, source: &str) -> Tagged<String> {
|
||||||
|
match self {
|
||||||
|
Member::String(outer, inner) => inner.string(source).tagged(outer),
|
||||||
|
Member::Bare(tag) => tag.tagged_string(source),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn tagged_type_name(&self) -> Tagged<&'static str> {
|
||||||
|
match self {
|
||||||
|
Member::String(outer, _inner) => "string".tagged(outer),
|
||||||
|
Member::Bare(tag) => "word".tagged(tag),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
enum ColumnPathState {
|
||||||
|
Initial,
|
||||||
|
LeadingDot(Tag),
|
||||||
|
Dot(Tag, Vec<Member>, Tag),
|
||||||
|
Member(Tag, Vec<Member>),
|
||||||
|
Error(ShellError),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ColumnPathState {
|
||||||
|
pub fn dot(self, dot: Tag) -> ColumnPathState {
|
||||||
|
match self {
|
||||||
|
ColumnPathState::Initial => ColumnPathState::LeadingDot(dot),
|
||||||
|
ColumnPathState::LeadingDot(_) => {
|
||||||
|
ColumnPathState::Error(ShellError::type_error("column", "dot".tagged(dot)))
|
||||||
|
}
|
||||||
|
ColumnPathState::Dot(..) => {
|
||||||
|
ColumnPathState::Error(ShellError::type_error("column", "dot".tagged(dot)))
|
||||||
|
}
|
||||||
|
ColumnPathState::Member(tag, members) => ColumnPathState::Dot(tag, members, dot),
|
||||||
|
ColumnPathState::Error(err) => ColumnPathState::Error(err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn member(self, member: Member) -> ColumnPathState {
|
||||||
|
match self {
|
||||||
|
ColumnPathState::Initial => ColumnPathState::Member(member.tag(), vec![member]),
|
||||||
|
ColumnPathState::LeadingDot(tag) => {
|
||||||
|
ColumnPathState::Member(tag.until(member.tag()), vec![member])
|
||||||
|
}
|
||||||
|
|
||||||
|
ColumnPathState::Dot(tag, mut tags, _) => {
|
||||||
|
ColumnPathState::Member(tag.until(member.tag()), {
|
||||||
|
tags.push(member);
|
||||||
|
tags
|
||||||
|
})
|
||||||
|
}
|
||||||
|
ColumnPathState::Member(..) => {
|
||||||
|
ColumnPathState::Error(ShellError::type_error("column", member.tagged_type_name()))
|
||||||
|
}
|
||||||
|
ColumnPathState::Error(err) => ColumnPathState::Error(err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn into_path(self, next: Peeked) -> Result<Tagged<Vec<Member>>, ShellError> {
|
||||||
|
match self {
|
||||||
|
ColumnPathState::Initial => Err(next.type_error("column path")),
|
||||||
|
ColumnPathState::LeadingDot(dot) => {
|
||||||
|
Err(ShellError::type_error("column", "dot".tagged(dot)))
|
||||||
|
}
|
||||||
|
ColumnPathState::Dot(_tag, _members, dot) => {
|
||||||
|
Err(ShellError::type_error("column", "dot".tagged(dot)))
|
||||||
|
}
|
||||||
|
ColumnPathState::Member(tag, tags) => Ok(tags.tagged(tag)),
|
||||||
|
ColumnPathState::Error(err) => Err(err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn expand_column_path<'a, 'b>(
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<Tagged<Vec<Member>>, ShellError> {
|
||||||
|
let mut state = ColumnPathState::Initial;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let member = MemberShape.expand_syntax(token_nodes, context);
|
||||||
|
|
||||||
|
match member {
|
||||||
|
Err(_) => break,
|
||||||
|
Ok(member) => state = state.member(member),
|
||||||
|
}
|
||||||
|
|
||||||
|
let dot = DotShape.expand_syntax(token_nodes, context);
|
||||||
|
|
||||||
|
match dot {
|
||||||
|
Err(_) => break,
|
||||||
|
Ok(dot) => state = state.dot(dot),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
state.into_path(token_nodes.peek_non_ws())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct ColumnPathShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for ColumnPathShape {
|
||||||
|
type Output = Tagged<Vec<Member>>;
|
||||||
|
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<Self::Output, ShellError> {
|
||||||
|
expand_column_path(token_nodes, context)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct MemberShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for MemberShape {
|
||||||
|
type Output = Member;
|
||||||
|
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<Member, ShellError> {
|
||||||
|
let bare = BareShape.test(token_nodes, context);
|
||||||
|
if let Some(peeked) = bare {
|
||||||
|
let node = peeked.not_eof("column")?.commit();
|
||||||
|
return Ok(Member::Bare(node.tag()));
|
||||||
|
}
|
||||||
|
|
||||||
|
let string = StringShape.test(token_nodes, context);
|
||||||
|
|
||||||
|
if let Some(peeked) = string {
|
||||||
|
let node = peeked.not_eof("column")?.commit();
|
||||||
|
let (outer, inner) = node.expect_string();
|
||||||
|
|
||||||
|
return Ok(Member::String(outer, inner));
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(token_nodes.peek_any().type_error("column"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct DotShape;
|
||||||
|
|
||||||
|
impl SkipSyntax for DotShape {
|
||||||
|
fn skip<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &mut TokensIterator<'_>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<(), ShellError> {
|
||||||
|
expand_syntax(self, token_nodes, context)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ExpandSyntax for DotShape {
|
||||||
|
type Output = Tag;
|
||||||
|
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
_context: &ExpandContext,
|
||||||
|
) -> Result<Self::Output, ShellError> {
|
||||||
|
parse_single_node(token_nodes, "dot", |token, token_tag| {
|
||||||
|
Ok(match token {
|
||||||
|
RawToken::Operator(Operator::Dot) => token_tag,
|
||||||
|
_ => {
|
||||||
|
return Err(ShellError::type_error(
|
||||||
|
"dot",
|
||||||
|
token.type_name().tagged(token_tag),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Copy, Clone)]
|
||||||
|
pub struct InfixShape;
|
||||||
|
|
||||||
|
impl ExpandSyntax for InfixShape {
|
||||||
|
type Output = (Tag, Tagged<Operator>, Tag);
|
||||||
|
|
||||||
|
fn expand_syntax<'a, 'b>(
|
||||||
|
&self,
|
||||||
|
token_nodes: &'b mut TokensIterator<'a>,
|
||||||
|
context: &ExpandContext,
|
||||||
|
) -> Result<Self::Output, ShellError> {
|
||||||
|
let checkpoint = token_nodes.checkpoint();
|
||||||
|
|
||||||
|
// An infix operator must be prefixed by whitespace
|
||||||
|
let start = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?;
|
||||||
|
|
||||||
|
// Parse the next TokenNode after the whitespace
|
||||||
|
let operator =
|
||||||
|
parse_single_node(checkpoint.iterator, "infix operator", |token, token_tag| {
|
||||||
|
Ok(match token {
|
||||||
|
// If it's an operator (and not `.`), it's a match
|
||||||
|
RawToken::Operator(operator) if operator != Operator::Dot => {
|
||||||
|
operator.tagged(token_tag)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Otherwise, it's not a match
|
||||||
|
_ => {
|
||||||
|
return Err(ShellError::type_error(
|
||||||
|
"infix operator",
|
||||||
|
token.type_name().tagged(token_tag),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
})?;
|
||||||
|
|
||||||
|
// An infix operator must be followed by whitespace
|
||||||
|
let end = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?;
|
||||||
|
|
||||||
|
checkpoint.commit();
|
||||||
|
|
||||||
|
Ok((start, operator, end))
|
||||||
|
}
|
||||||
|
}
|
365
src/parser/hir/tokens_iterator.rs
Normal file
365
src/parser/hir/tokens_iterator.rs
Normal file
@ -0,0 +1,365 @@
|
|||||||
|
pub(crate) mod debug;
|
||||||
|
|
||||||
|
use crate::errors::ShellError;
|
||||||
|
use crate::parser::TokenNode;
|
||||||
|
use crate::{Tag, Tagged, TaggedItem};
|
||||||
|
use derive_new::new;
|
||||||
|
|
||||||
|
#[derive(Debug, new)]
|
||||||
|
pub struct TokensIterator<'a> {
|
||||||
|
tokens: &'a [TokenNode],
|
||||||
|
tag: Tag,
|
||||||
|
skip_ws: bool,
|
||||||
|
#[new(default)]
|
||||||
|
index: usize,
|
||||||
|
#[new(default)]
|
||||||
|
seen: indexmap::IndexSet<usize>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Checkpoint<'content, 'me> {
|
||||||
|
pub(crate) iterator: &'me mut TokensIterator<'content>,
|
||||||
|
index: usize,
|
||||||
|
seen: indexmap::IndexSet<usize>,
|
||||||
|
committed: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'content, 'me> Checkpoint<'content, 'me> {
|
||||||
|
pub(crate) fn commit(mut self) {
|
||||||
|
self.committed = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'content, 'me> std::ops::Drop for Checkpoint<'content, 'me> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
if !self.committed {
|
||||||
|
self.iterator.index = self.index;
|
||||||
|
self.iterator.seen = self.seen.clone();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct Peeked<'content, 'me> {
|
||||||
|
pub(crate) node: Option<&'content TokenNode>,
|
||||||
|
iterator: &'me mut TokensIterator<'content>,
|
||||||
|
from: usize,
|
||||||
|
to: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'content, 'me> Peeked<'content, 'me> {
|
||||||
|
pub fn commit(&mut self) -> Option<&'content TokenNode> {
|
||||||
|
let Peeked {
|
||||||
|
node,
|
||||||
|
iterator,
|
||||||
|
from,
|
||||||
|
to,
|
||||||
|
} = self;
|
||||||
|
|
||||||
|
let node = (*node)?;
|
||||||
|
iterator.commit(*from, *to);
|
||||||
|
Some(node)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn not_eof(
|
||||||
|
self,
|
||||||
|
expected: impl Into<String>,
|
||||||
|
) -> Result<PeekedNode<'content, 'me>, ShellError> {
|
||||||
|
match self.node {
|
||||||
|
None => Err(ShellError::unexpected_eof(
|
||||||
|
expected,
|
||||||
|
self.iterator.eof_tag(),
|
||||||
|
)),
|
||||||
|
Some(node) => Ok(PeekedNode {
|
||||||
|
node,
|
||||||
|
iterator: self.iterator,
|
||||||
|
from: self.from,
|
||||||
|
to: self.to,
|
||||||
|
}),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn type_error(&self, expected: impl Into<String>) -> ShellError {
|
||||||
|
peek_error(&self.node, self.iterator.eof_tag(), expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct PeekedNode<'content, 'me> {
|
||||||
|
pub(crate) node: &'content TokenNode,
|
||||||
|
iterator: &'me mut TokensIterator<'content>,
|
||||||
|
from: usize,
|
||||||
|
to: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'content, 'me> PeekedNode<'content, 'me> {
|
||||||
|
pub fn commit(self) -> &'content TokenNode {
|
||||||
|
let PeekedNode {
|
||||||
|
node,
|
||||||
|
iterator,
|
||||||
|
from,
|
||||||
|
to,
|
||||||
|
} = self;
|
||||||
|
|
||||||
|
iterator.commit(from, to);
|
||||||
|
node
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn rollback(self) {}
|
||||||
|
|
||||||
|
pub fn type_error(&self, expected: impl Into<String>) -> ShellError {
|
||||||
|
peek_error(&Some(self.node), self.iterator.eof_tag(), expected)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn peek_error(
|
||||||
|
node: &Option<&TokenNode>,
|
||||||
|
eof_tag: Tag,
|
||||||
|
expected: impl Into<String>,
|
||||||
|
) -> ShellError {
|
||||||
|
match node {
|
||||||
|
None => ShellError::unexpected_eof(expected, eof_tag),
|
||||||
|
Some(node) => ShellError::type_error(expected, node.tagged_type_name()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'content> TokensIterator<'content> {
|
||||||
|
#[cfg(test)]
|
||||||
|
pub fn all(tokens: &'content [TokenNode], tag: Tag) -> TokensIterator<'content> {
|
||||||
|
TokensIterator::new(tokens, tag, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
|
||||||
|
/// that you'll succeed.
|
||||||
|
pub fn checkpoint<'me>(&'me mut self) -> Checkpoint<'content, 'me> {
|
||||||
|
let index = self.index;
|
||||||
|
let seen = self.seen.clone();
|
||||||
|
|
||||||
|
Checkpoint {
|
||||||
|
iterator: self,
|
||||||
|
index,
|
||||||
|
seen,
|
||||||
|
committed: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn anchor(&self) -> uuid::Uuid {
|
||||||
|
self.tag.anchor
|
||||||
|
}
|
||||||
|
|
||||||
|
fn eof_tag(&self) -> Tag {
|
||||||
|
Tag::from((self.tag.span.end(), self.tag.span.end(), self.tag.anchor))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn typed_tag_at_cursor(&mut self) -> Tagged<&'static str> {
|
||||||
|
let next = self.peek_any();
|
||||||
|
|
||||||
|
match next.node {
|
||||||
|
None => "end".tagged(self.eof_tag()),
|
||||||
|
Some(node) => node.tagged_type_name(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn remove(&mut self, position: usize) {
|
||||||
|
self.seen.insert(position);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn at_end(&self) -> bool {
|
||||||
|
peek(self, self.skip_ws).is_none()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn at_end_possible_ws(&self) -> bool {
|
||||||
|
peek(self, true).is_none()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn advance(&mut self) {
|
||||||
|
self.seen.insert(self.index);
|
||||||
|
self.index += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn extract<T>(&mut self, f: impl Fn(&TokenNode) -> Option<T>) -> Option<(usize, T)> {
|
||||||
|
for (i, item) in self.tokens.iter().enumerate() {
|
||||||
|
if self.seen.contains(&i) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
match f(item) {
|
||||||
|
None => {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
Some(value) => {
|
||||||
|
self.seen.insert(i);
|
||||||
|
return Some((i, value));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn move_to(&mut self, pos: usize) {
|
||||||
|
self.index = pos;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn restart(&mut self) {
|
||||||
|
self.index = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn clone(&self) -> TokensIterator<'content> {
|
||||||
|
TokensIterator {
|
||||||
|
tokens: self.tokens,
|
||||||
|
tag: self.tag,
|
||||||
|
index: self.index,
|
||||||
|
seen: self.seen.clone(),
|
||||||
|
skip_ws: self.skip_ws,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get the next token, not including whitespace
|
||||||
|
pub fn next_non_ws(&mut self) -> Option<&TokenNode> {
|
||||||
|
let mut peeked = start_next(self, true);
|
||||||
|
peeked.commit()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Peek the next token, not including whitespace
|
||||||
|
pub fn peek_non_ws<'me>(&'me mut self) -> Peeked<'content, 'me> {
|
||||||
|
start_next(self, true)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Peek the next token, including whitespace
|
||||||
|
pub fn peek_any<'me>(&'me mut self) -> Peeked<'content, 'me> {
|
||||||
|
start_next(self, false)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn commit(&mut self, from: usize, to: usize) {
|
||||||
|
for index in from..to {
|
||||||
|
self.seen.insert(index);
|
||||||
|
}
|
||||||
|
|
||||||
|
self.index = to;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn debug_remaining(&self) -> Vec<TokenNode> {
|
||||||
|
let mut tokens = self.clone();
|
||||||
|
tokens.restart();
|
||||||
|
tokens.cloned().collect()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Iterator for TokensIterator<'a> {
|
||||||
|
type Item = &'a TokenNode;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<&'a TokenNode> {
|
||||||
|
next(self, self.skip_ws)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn peek<'content, 'me>(
|
||||||
|
iterator: &TokensIterator<'content>,
|
||||||
|
skip_ws: bool,
|
||||||
|
) -> Option<&'content TokenNode> {
|
||||||
|
let mut to = iterator.index;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
if to >= iterator.tokens.len() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
if iterator.seen.contains(&to) {
|
||||||
|
to += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if to >= iterator.tokens.len() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let node = &iterator.tokens[to];
|
||||||
|
|
||||||
|
match node {
|
||||||
|
TokenNode::Whitespace(_) if skip_ws => {
|
||||||
|
to += 1;
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
return Some(node);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn start_next<'content, 'me>(
|
||||||
|
iterator: &'me mut TokensIterator<'content>,
|
||||||
|
skip_ws: bool,
|
||||||
|
) -> Peeked<'content, 'me> {
|
||||||
|
let from = iterator.index;
|
||||||
|
let mut to = iterator.index;
|
||||||
|
|
||||||
|
loop {
|
||||||
|
if to >= iterator.tokens.len() {
|
||||||
|
return Peeked {
|
||||||
|
node: None,
|
||||||
|
iterator,
|
||||||
|
from,
|
||||||
|
to,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if iterator.seen.contains(&to) {
|
||||||
|
to += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if to >= iterator.tokens.len() {
|
||||||
|
return Peeked {
|
||||||
|
node: None,
|
||||||
|
iterator,
|
||||||
|
from,
|
||||||
|
to,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let node = &iterator.tokens[to];
|
||||||
|
|
||||||
|
match node {
|
||||||
|
TokenNode::Whitespace(_) if skip_ws => {
|
||||||
|
to += 1;
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
to += 1;
|
||||||
|
return Peeked {
|
||||||
|
node: Some(node),
|
||||||
|
iterator,
|
||||||
|
from,
|
||||||
|
to,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn next<'a>(iterator: &mut TokensIterator<'a>, skip_ws: bool) -> Option<&'a TokenNode> {
|
||||||
|
loop {
|
||||||
|
if iterator.index >= iterator.tokens.len() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
if iterator.seen.contains(&iterator.index) {
|
||||||
|
iterator.advance();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if iterator.index >= iterator.tokens.len() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
match &iterator.tokens[iterator.index] {
|
||||||
|
TokenNode::Whitespace(_) if skip_ws => {
|
||||||
|
iterator.advance();
|
||||||
|
}
|
||||||
|
other => {
|
||||||
|
iterator.advance();
|
||||||
|
return Some(other);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
30
src/parser/hir/tokens_iterator/debug.rs
Normal file
30
src/parser/hir/tokens_iterator/debug.rs
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
use crate::parser::hir::tokens_iterator::TokensIterator;
|
||||||
|
use crate::traits::ToDebug;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub(crate) enum DebugIteratorToken {
|
||||||
|
Seen(String),
|
||||||
|
Unseen(String),
|
||||||
|
Cursor,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn debug_tokens(iterator: &TokensIterator, source: &str) -> Vec<DebugIteratorToken> {
|
||||||
|
let mut out = vec![];
|
||||||
|
|
||||||
|
for (i, token) in iterator.tokens.iter().enumerate() {
|
||||||
|
if iterator.index == i {
|
||||||
|
out.push(DebugIteratorToken::Cursor);
|
||||||
|
}
|
||||||
|
|
||||||
|
if iterator.seen.contains(&i) {
|
||||||
|
out.push(DebugIteratorToken::Seen(format!("{}", token.debug(source))));
|
||||||
|
} else {
|
||||||
|
out.push(DebugIteratorToken::Unseen(format!(
|
||||||
|
"{}",
|
||||||
|
token.debug(source)
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
out
|
||||||
|
}
|
@ -1,6 +1,7 @@
|
|||||||
use crate::Tag;
|
use crate::Tag;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use language_reporting::{FileName, Location};
|
use language_reporting::{FileName, Location};
|
||||||
|
use log::trace;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
#[derive(new, Debug, Clone)]
|
#[derive(new, Debug, Clone)]
|
||||||
@ -18,7 +19,7 @@ impl language_reporting::ReportingFiles for Files {
|
|||||||
from_index: usize,
|
from_index: usize,
|
||||||
to_index: usize,
|
to_index: usize,
|
||||||
) -> Option<Self::Span> {
|
) -> Option<Self::Span> {
|
||||||
Some(Tag::from((from_index, to_index, file)))
|
Some(Tag::new(file, (from_index, to_index).into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn file_id(&self, tag: Self::Span) -> Self::FileId {
|
fn file_id(&self, tag: Self::Span) -> Self::FileId {
|
||||||
@ -38,8 +39,18 @@ impl language_reporting::ReportingFiles for Files {
|
|||||||
let mut seen_lines = 0;
|
let mut seen_lines = 0;
|
||||||
let mut seen_bytes = 0;
|
let mut seen_bytes = 0;
|
||||||
|
|
||||||
for (pos, _) in source.match_indices('\n') {
|
for (pos, slice) in source.match_indices('\n') {
|
||||||
if pos > byte_index {
|
trace!(
|
||||||
|
"SEARCH={} SEEN={} POS={} SLICE={:?} LEN={} ALL={:?}",
|
||||||
|
byte_index,
|
||||||
|
seen_bytes,
|
||||||
|
pos,
|
||||||
|
slice,
|
||||||
|
source.len(),
|
||||||
|
source
|
||||||
|
);
|
||||||
|
|
||||||
|
if pos >= byte_index {
|
||||||
return Some(language_reporting::Location::new(
|
return Some(language_reporting::Location::new(
|
||||||
seen_lines,
|
seen_lines,
|
||||||
byte_index - seen_bytes,
|
byte_index - seen_bytes,
|
||||||
@ -53,7 +64,7 @@ impl language_reporting::ReportingFiles for Files {
|
|||||||
if seen_lines == 0 {
|
if seen_lines == 0 {
|
||||||
Some(language_reporting::Location::new(0, byte_index))
|
Some(language_reporting::Location::new(0, byte_index))
|
||||||
} else {
|
} else {
|
||||||
None
|
panic!("byte index {} wasn't valid", byte_index);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -64,7 +75,7 @@ impl language_reporting::ReportingFiles for Files {
|
|||||||
|
|
||||||
for (pos, _) in source.match_indices('\n') {
|
for (pos, _) in source.match_indices('\n') {
|
||||||
if seen_lines == lineno {
|
if seen_lines == lineno {
|
||||||
return Some(Tag::from((seen_bytes, pos, file)));
|
return Some(Tag::new(file, (seen_bytes, pos + 1).into()));
|
||||||
} else {
|
} else {
|
||||||
seen_lines += 1;
|
seen_lines += 1;
|
||||||
seen_bytes = pos + 1;
|
seen_bytes = pos + 1;
|
||||||
@ -72,16 +83,18 @@ impl language_reporting::ReportingFiles for Files {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if seen_lines == 0 {
|
if seen_lines == 0 {
|
||||||
Some(Tag::from((0, self.snippet.len() - 1, file)))
|
Some(Tag::new(file, (0, self.snippet.len() - 1).into()))
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn source(&self, tag: Self::Span) -> Option<String> {
|
fn source(&self, tag: Self::Span) -> Option<String> {
|
||||||
if tag.span.start > tag.span.end {
|
trace!("source(tag={:?}) snippet={:?}", tag, self.snippet);
|
||||||
|
|
||||||
|
if tag.span.start() > tag.span.end() {
|
||||||
return None;
|
return None;
|
||||||
} else if tag.span.end >= self.snippet.len() {
|
} else if tag.span.end() > self.snippet.len() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
Some(tag.slice(&self.snippet).to_string())
|
Some(tag.slice(&self.snippet).to_string())
|
||||||
|
@ -11,6 +11,7 @@ pub enum Operator {
|
|||||||
GreaterThan,
|
GreaterThan,
|
||||||
LessThanOrEqual,
|
LessThanOrEqual,
|
||||||
GreaterThanOrEqual,
|
GreaterThanOrEqual,
|
||||||
|
Dot,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToDebug for Operator {
|
impl ToDebug for Operator {
|
||||||
@ -32,6 +33,7 @@ impl Operator {
|
|||||||
Operator::GreaterThan => ">",
|
Operator::GreaterThan => ">",
|
||||||
Operator::LessThanOrEqual => "<=",
|
Operator::LessThanOrEqual => "<=",
|
||||||
Operator::GreaterThanOrEqual => ">=",
|
Operator::GreaterThanOrEqual => ">=",
|
||||||
|
Operator::Dot => ".",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -52,6 +54,7 @@ impl FromStr for Operator {
|
|||||||
">" => Ok(Operator::GreaterThan),
|
">" => Ok(Operator::GreaterThan),
|
||||||
"<=" => Ok(Operator::LessThanOrEqual),
|
"<=" => Ok(Operator::LessThanOrEqual),
|
||||||
">=" => Ok(Operator::GreaterThanOrEqual),
|
">=" => Ok(Operator::GreaterThanOrEqual),
|
||||||
|
"." => Ok(Operator::Dot),
|
||||||
_ => Err(()),
|
_ => Err(()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,4 +1,4 @@
|
|||||||
use crate::parser::CallNode;
|
use crate::parser::TokenNode;
|
||||||
use crate::traits::ToDebug;
|
use crate::traits::ToDebug;
|
||||||
use crate::{Tag, Tagged};
|
use crate::{Tag, Tagged};
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
@ -7,20 +7,16 @@ use std::fmt;
|
|||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, new)]
|
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, new)]
|
||||||
pub struct Pipeline {
|
pub struct Pipeline {
|
||||||
pub(crate) parts: Vec<PipelineElement>,
|
pub(crate) parts: Vec<Tagged<PipelineElement>>,
|
||||||
pub(crate) post_ws: Option<Tag>,
|
// pub(crate) post_ws: Option<Tag>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToDebug for Pipeline {
|
impl ToDebug for Pipeline {
|
||||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||||
for part in &self.parts {
|
for part in self.parts.iter() {
|
||||||
write!(f, "{}", part.debug(source))?;
|
write!(f, "{}", part.debug(source))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(post_ws) = self.post_ws {
|
|
||||||
write!(f, "{}", post_ws.slice(source))?
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -28,10 +24,7 @@ impl ToDebug for Pipeline {
|
|||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
||||||
pub struct PipelineElement {
|
pub struct PipelineElement {
|
||||||
pub pipe: Option<Tag>,
|
pub pipe: Option<Tag>,
|
||||||
pub pre_ws: Option<Tag>,
|
pub tokens: Tagged<Vec<TokenNode>>,
|
||||||
#[get = "pub(crate)"]
|
|
||||||
call: Tagged<CallNode>,
|
|
||||||
pub post_ws: Option<Tag>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToDebug for PipelineElement {
|
impl ToDebug for PipelineElement {
|
||||||
@ -40,14 +33,8 @@ impl ToDebug for PipelineElement {
|
|||||||
write!(f, "{}", pipe.slice(source))?;
|
write!(f, "{}", pipe.slice(source))?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(pre_ws) = self.pre_ws {
|
for token in &self.tokens.item {
|
||||||
write!(f, "{}", pre_ws.slice(source))?;
|
write!(f, "{}", token.debug(source))?;
|
||||||
}
|
|
||||||
|
|
||||||
write!(f, "{}", self.call.debug(source))?;
|
|
||||||
|
|
||||||
if let Some(post_ws) = self.post_ws {
|
|
||||||
write!(f, "{}", post_ws.slice(source))?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
use crate::errors::ShellError;
|
use crate::errors::ShellError;
|
||||||
use crate::parser::parse::{call_node::*, flag::*, operator::*, pipeline::*, tokens::*};
|
use crate::parser::parse::{call_node::*, flag::*, pipeline::*, tokens::*};
|
||||||
|
use crate::prelude::*;
|
||||||
use crate::traits::ToDebug;
|
use crate::traits::ToDebug;
|
||||||
use crate::{Tag, Tagged, Text};
|
use crate::{Tag, Tagged, Text};
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
@ -12,15 +13,14 @@ pub enum TokenNode {
|
|||||||
Token(Token),
|
Token(Token),
|
||||||
|
|
||||||
Call(Tagged<CallNode>),
|
Call(Tagged<CallNode>),
|
||||||
|
Nodes(Tagged<Vec<TokenNode>>),
|
||||||
Delimited(Tagged<DelimitedNode>),
|
Delimited(Tagged<DelimitedNode>),
|
||||||
Pipeline(Tagged<Pipeline>),
|
Pipeline(Tagged<Pipeline>),
|
||||||
Operator(Tagged<Operator>),
|
|
||||||
Flag(Tagged<Flag>),
|
Flag(Tagged<Flag>),
|
||||||
Member(Tag),
|
Member(Tag),
|
||||||
Whitespace(Tag),
|
Whitespace(Tag),
|
||||||
|
|
||||||
Error(Tagged<Box<ShellError>>),
|
Error(Tagged<Box<ShellError>>),
|
||||||
Path(Tagged<PathNode>),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ToDebug for TokenNode {
|
impl ToDebug for TokenNode {
|
||||||
@ -94,32 +94,33 @@ impl TokenNode {
|
|||||||
pub fn tag(&self) -> Tag {
|
pub fn tag(&self) -> Tag {
|
||||||
match self {
|
match self {
|
||||||
TokenNode::Token(t) => t.tag(),
|
TokenNode::Token(t) => t.tag(),
|
||||||
|
TokenNode::Nodes(t) => t.tag(),
|
||||||
TokenNode::Call(s) => s.tag(),
|
TokenNode::Call(s) => s.tag(),
|
||||||
TokenNode::Delimited(s) => s.tag(),
|
TokenNode::Delimited(s) => s.tag(),
|
||||||
TokenNode::Pipeline(s) => s.tag(),
|
TokenNode::Pipeline(s) => s.tag(),
|
||||||
TokenNode::Operator(s) => s.tag(),
|
|
||||||
TokenNode::Flag(s) => s.tag(),
|
TokenNode::Flag(s) => s.tag(),
|
||||||
TokenNode::Member(s) => *s,
|
TokenNode::Member(s) => *s,
|
||||||
TokenNode::Whitespace(s) => *s,
|
TokenNode::Whitespace(s) => *s,
|
||||||
TokenNode::Error(s) => s.tag(),
|
TokenNode::Error(s) => s.tag(),
|
||||||
TokenNode::Path(s) => s.tag(),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn type_name(&self) -> String {
|
pub fn type_name(&self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
TokenNode::Token(t) => t.type_name(),
|
TokenNode::Token(t) => t.type_name(),
|
||||||
|
TokenNode::Nodes(_) => "nodes",
|
||||||
TokenNode::Call(_) => "command",
|
TokenNode::Call(_) => "command",
|
||||||
TokenNode::Delimited(d) => d.type_name(),
|
TokenNode::Delimited(d) => d.type_name(),
|
||||||
TokenNode::Pipeline(_) => "pipeline",
|
TokenNode::Pipeline(_) => "pipeline",
|
||||||
TokenNode::Operator(_) => "operator",
|
|
||||||
TokenNode::Flag(_) => "flag",
|
TokenNode::Flag(_) => "flag",
|
||||||
TokenNode::Member(_) => "member",
|
TokenNode::Member(_) => "member",
|
||||||
TokenNode::Whitespace(_) => "whitespace",
|
TokenNode::Whitespace(_) => "whitespace",
|
||||||
TokenNode::Error(_) => "error",
|
TokenNode::Error(_) => "error",
|
||||||
TokenNode::Path(_) => "path",
|
|
||||||
}
|
}
|
||||||
.to_string()
|
}
|
||||||
|
|
||||||
|
pub fn tagged_type_name(&self) -> Tagged<&'static str> {
|
||||||
|
self.type_name().tagged(self.tag())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn old_debug<'a>(&'a self, source: &'a Text) -> DebugTokenNode<'a> {
|
pub fn old_debug<'a>(&'a self, source: &'a Text) -> DebugTokenNode<'a> {
|
||||||
@ -134,6 +135,16 @@ impl TokenNode {
|
|||||||
self.tag().slice(source)
|
self.tag().slice(source)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_variable(&self) -> Result<(Tag, Tag), ShellError> {
|
||||||
|
match self {
|
||||||
|
TokenNode::Token(Tagged {
|
||||||
|
item: RawToken::Variable(inner_tag),
|
||||||
|
tag: outer_tag,
|
||||||
|
}) => Ok((*outer_tag, *inner_tag)),
|
||||||
|
_ => Err(ShellError::type_error("variable", self.tagged_type_name())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn is_bare(&self) -> bool {
|
pub fn is_bare(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
TokenNode::Token(Tagged {
|
TokenNode::Token(Tagged {
|
||||||
@ -144,6 +155,20 @@ impl TokenNode {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn as_block(&self) -> Option<Tagged<&[TokenNode]>> {
|
||||||
|
match self {
|
||||||
|
TokenNode::Delimited(Tagged {
|
||||||
|
item:
|
||||||
|
DelimitedNode {
|
||||||
|
delimiter,
|
||||||
|
children,
|
||||||
|
},
|
||||||
|
tag,
|
||||||
|
}) if *delimiter == Delimiter::Brace => Some((&children[..]).tagged(tag)),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn is_external(&self) -> bool {
|
pub fn is_external(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
TokenNode::Token(Tagged {
|
TokenNode::Token(Tagged {
|
||||||
@ -181,13 +206,60 @@ impl TokenNode {
|
|||||||
_ => Err(ShellError::string("unimplemented")),
|
_ => Err(ShellError::string("unimplemented")),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn is_whitespace(&self) -> bool {
|
||||||
|
match self {
|
||||||
|
TokenNode::Whitespace(_) => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn expect_string(&self) -> (Tag, Tag) {
|
||||||
|
match self {
|
||||||
|
TokenNode::Token(Tagged {
|
||||||
|
item: RawToken::String(inner_tag),
|
||||||
|
tag: outer_tag,
|
||||||
|
}) => (*outer_tag, *inner_tag),
|
||||||
|
other => panic!("Expected string, found {:?}", other),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
impl TokenNode {
|
||||||
|
pub fn expect_list(&self) -> Tagged<&[TokenNode]> {
|
||||||
|
match self {
|
||||||
|
TokenNode::Nodes(Tagged { item, tag }) => (&item[..]).tagged(tag),
|
||||||
|
other => panic!("Expected list, found {:?}", other),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn expect_var(&self) -> (Tag, Tag) {
|
||||||
|
match self {
|
||||||
|
TokenNode::Token(Tagged {
|
||||||
|
item: RawToken::Variable(inner_tag),
|
||||||
|
tag: outer_tag,
|
||||||
|
}) => (*outer_tag, *inner_tag),
|
||||||
|
other => panic!("Expected var, found {:?}", other),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn expect_bare(&self) -> Tag {
|
||||||
|
match self {
|
||||||
|
TokenNode::Token(Tagged {
|
||||||
|
item: RawToken::Bare,
|
||||||
|
tag,
|
||||||
|
}) => *tag,
|
||||||
|
other => panic!("Expected var, found {:?}", other),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
||||||
#[get = "pub(crate)"]
|
#[get = "pub(crate)"]
|
||||||
pub struct DelimitedNode {
|
pub struct DelimitedNode {
|
||||||
delimiter: Delimiter,
|
pub(crate) delimiter: Delimiter,
|
||||||
children: Vec<TokenNode>,
|
pub(crate) children: Vec<TokenNode>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DelimitedNode {
|
impl DelimitedNode {
|
||||||
@ -207,6 +279,24 @@ pub enum Delimiter {
|
|||||||
Square,
|
Square,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Delimiter {
|
||||||
|
pub(crate) fn open(&self) -> char {
|
||||||
|
match self {
|
||||||
|
Delimiter::Paren => '(',
|
||||||
|
Delimiter::Brace => '{',
|
||||||
|
Delimiter::Square => '[',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn close(&self) -> char {
|
||||||
|
match self {
|
||||||
|
Delimiter::Paren => ')',
|
||||||
|
Delimiter::Brace => '}',
|
||||||
|
Delimiter::Square => ']',
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
||||||
#[get = "pub(crate)"]
|
#[get = "pub(crate)"]
|
||||||
pub struct PathNode {
|
pub struct PathNode {
|
||||||
|
@ -3,7 +3,7 @@ use crate::prelude::*;
|
|||||||
use crate::parser::parse::flag::{Flag, FlagKind};
|
use crate::parser::parse::flag::{Flag, FlagKind};
|
||||||
use crate::parser::parse::operator::Operator;
|
use crate::parser::parse::operator::Operator;
|
||||||
use crate::parser::parse::pipeline::{Pipeline, PipelineElement};
|
use crate::parser::parse::pipeline::{Pipeline, PipelineElement};
|
||||||
use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, PathNode, TokenNode};
|
use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
|
||||||
use crate::parser::parse::tokens::{RawNumber, RawToken};
|
use crate::parser::parse::tokens::{RawNumber, RawToken};
|
||||||
use crate::parser::parse::unit::Unit;
|
use crate::parser::parse::unit::Unit;
|
||||||
use crate::parser::CallNode;
|
use crate::parser::CallNode;
|
||||||
@ -31,60 +31,68 @@ impl TokenTreeBuilder {
|
|||||||
(node, builder.output)
|
(node, builder.output)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn pipeline(input: Vec<(Option<&str>, CurriedCall, Option<&str>)>) -> CurriedToken {
|
fn build_tagged<T>(&mut self, callback: impl FnOnce(&mut TokenTreeBuilder) -> T) -> Tagged<T> {
|
||||||
let input: Vec<(Option<String>, CurriedCall, Option<String>)> = input
|
let start = self.pos;
|
||||||
.into_iter()
|
let ret = callback(self);
|
||||||
.map(|(pre, call, post)| {
|
let end = self.pos;
|
||||||
(
|
|
||||||
pre.map(|s| s.to_string()),
|
|
||||||
call,
|
|
||||||
post.map(|s| s.to_string()),
|
|
||||||
)
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
|
ret.tagged((start, end, self.anchor))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn pipeline(input: Vec<Vec<CurriedToken>>) -> CurriedToken {
|
||||||
Box::new(move |b| {
|
Box::new(move |b| {
|
||||||
let start = b.pos;
|
let start = b.pos;
|
||||||
|
|
||||||
let mut out: Vec<PipelineElement> = vec![];
|
let mut out: Vec<Tagged<PipelineElement>> = vec![];
|
||||||
|
|
||||||
let mut input = input.into_iter().peekable();
|
let mut input = input.into_iter().peekable();
|
||||||
let (pre, call, post) = input
|
let head = input
|
||||||
.next()
|
.next()
|
||||||
.expect("A pipeline must contain at least one element");
|
.expect("A pipeline must contain at least one element");
|
||||||
|
|
||||||
let pipe = None;
|
let pipe = None;
|
||||||
let pre_tag = pre.map(|pre| b.consume_tag(&pre));
|
let head = b.build_tagged(|b| head.into_iter().map(|node| node(b)).collect());
|
||||||
let call = call(b);
|
|
||||||
let post_tag = post.map(|post| b.consume_tag(&post));
|
|
||||||
|
|
||||||
out.push(PipelineElement::new(pipe, pre_tag, call, post_tag));
|
let head_tag: Tag = head.tag;
|
||||||
|
out.push(PipelineElement::new(pipe, head).tagged(head_tag));
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match input.next() {
|
match input.next() {
|
||||||
None => break,
|
None => break,
|
||||||
Some((pre, call, post)) => {
|
Some(node) => {
|
||||||
|
let start = b.pos;
|
||||||
let pipe = Some(b.consume_tag("|"));
|
let pipe = Some(b.consume_tag("|"));
|
||||||
let pre_span = pre.map(|pre| b.consume_tag(&pre));
|
let node =
|
||||||
let call = call(b);
|
b.build_tagged(|b| node.into_iter().map(|node| node(b)).collect());
|
||||||
let post_span = post.map(|post| b.consume_tag(&post));
|
let end = b.pos;
|
||||||
|
|
||||||
out.push(PipelineElement::new(pipe, pre_span, call, post_span));
|
out.push(PipelineElement::new(pipe, node).tagged((start, end, b.anchor)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let end = b.pos;
|
let end = b.pos;
|
||||||
|
|
||||||
TokenTreeBuilder::tagged_pipeline((out, None), (start, end, b.anchor))
|
TokenTreeBuilder::tagged_pipeline(out, (start, end, b.anchor))
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn tagged_pipeline(
|
pub fn tagged_pipeline(input: Vec<Tagged<PipelineElement>>, tag: impl Into<Tag>) -> TokenNode {
|
||||||
input: (Vec<PipelineElement>, Option<Tag>),
|
TokenNode::Pipeline(Pipeline::new(input).tagged(tag.into()))
|
||||||
tag: impl Into<Tag>,
|
}
|
||||||
) -> TokenNode {
|
|
||||||
TokenNode::Pipeline(Pipeline::new(input.0, input.1.into()).tagged(tag.into()))
|
pub fn token_list(input: Vec<CurriedToken>) -> CurriedToken {
|
||||||
|
Box::new(move |b| {
|
||||||
|
let start = b.pos;
|
||||||
|
let tokens = input.into_iter().map(|i| i(b)).collect();
|
||||||
|
let end = b.pos;
|
||||||
|
|
||||||
|
TokenTreeBuilder::tagged_token_list(tokens, (start, end, b.anchor))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn tagged_token_list(input: Vec<TokenNode>, tag: impl Into<Tag>) -> TokenNode {
|
||||||
|
TokenNode::Nodes(input.tagged(tag))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn op(input: impl Into<Operator>) -> CurriedToken {
|
pub fn op(input: impl Into<Operator>) -> CurriedToken {
|
||||||
@ -100,7 +108,7 @@ impl TokenTreeBuilder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn tagged_op(input: impl Into<Operator>, tag: impl Into<Tag>) -> TokenNode {
|
pub fn tagged_op(input: impl Into<Operator>, tag: impl Into<Tag>) -> TokenNode {
|
||||||
TokenNode::Operator(input.into().tagged(tag.into()))
|
TokenNode::Token(RawToken::Operator(input.into()).tagged(tag.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn string(input: impl Into<String>) -> CurriedToken {
|
pub fn string(input: impl Into<String>) -> CurriedToken {
|
||||||
@ -168,8 +176,23 @@ impl TokenTreeBuilder {
|
|||||||
TokenNode::Token(RawToken::ExternalWord.tagged(input.into()))
|
TokenNode::Token(RawToken::ExternalWord.tagged(input.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn tagged_external(input: impl Into<Tag>, tag: impl Into<Tag>) -> TokenNode {
|
pub fn external_command(input: impl Into<String>) -> CurriedToken {
|
||||||
TokenNode::Token(RawToken::ExternalCommand(input.into()).tagged(tag.into()))
|
let input = input.into();
|
||||||
|
|
||||||
|
Box::new(move |b| {
|
||||||
|
let (outer_start, _) = b.consume("^");
|
||||||
|
let (inner_start, end) = b.consume(&input);
|
||||||
|
b.pos = end;
|
||||||
|
|
||||||
|
TokenTreeBuilder::tagged_external_command(
|
||||||
|
(inner_start, end, b.anchor),
|
||||||
|
(outer_start, end, b.anchor),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn tagged_external_command(inner: impl Into<Tag>, outer: impl Into<Tag>) -> TokenNode {
|
||||||
|
TokenNode::Token(RawToken::ExternalCommand(inner.into()).tagged(outer.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn int(input: impl Into<BigInt>) -> CurriedToken {
|
pub fn int(input: impl Into<BigInt>) -> CurriedToken {
|
||||||
@ -229,29 +252,6 @@ impl TokenTreeBuilder {
|
|||||||
TokenNode::Token(RawToken::Size(int, unit).tagged(tag.into()))
|
TokenNode::Token(RawToken::Size(int, unit).tagged(tag.into()))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn path(head: CurriedToken, tail: Vec<CurriedToken>) -> CurriedToken {
|
|
||||||
Box::new(move |b| {
|
|
||||||
let start = b.pos;
|
|
||||||
let head = head(b);
|
|
||||||
|
|
||||||
let mut output = vec![];
|
|
||||||
|
|
||||||
for item in tail {
|
|
||||||
b.consume(".");
|
|
||||||
|
|
||||||
output.push(item(b));
|
|
||||||
}
|
|
||||||
|
|
||||||
let end = b.pos;
|
|
||||||
|
|
||||||
TokenTreeBuilder::tagged_path((head, output), (start, end, b.anchor))
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn tagged_path(input: (TokenNode, Vec<TokenNode>), tag: impl Into<Tag>) -> TokenNode {
|
|
||||||
TokenNode::Path(PathNode::new(Box::new(input.0), input.1).tagged(tag.into()))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn var(input: impl Into<String>) -> CurriedToken {
|
pub fn var(input: impl Into<String>) -> CurriedToken {
|
||||||
let input = input.into();
|
let input = input.into();
|
||||||
|
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
use crate::parser::parse::unit::*;
|
use crate::parser::parse::unit::*;
|
||||||
|
use crate::parser::Operator;
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use crate::{Tagged, Text};
|
use crate::{Tagged, Text};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
@ -7,6 +8,7 @@ use std::str::FromStr;
|
|||||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||||
pub enum RawToken {
|
pub enum RawToken {
|
||||||
Number(RawNumber),
|
Number(RawNumber),
|
||||||
|
Operator(Operator),
|
||||||
Size(RawNumber, Unit),
|
Size(RawNumber, Unit),
|
||||||
String(Tag),
|
String(Tag),
|
||||||
Variable(Tag),
|
Variable(Tag),
|
||||||
@ -49,12 +51,13 @@ impl RawToken {
|
|||||||
pub fn type_name(&self) -> &'static str {
|
pub fn type_name(&self) -> &'static str {
|
||||||
match self {
|
match self {
|
||||||
RawToken::Number(_) => "Number",
|
RawToken::Number(_) => "Number",
|
||||||
|
RawToken::Operator(..) => "operator",
|
||||||
RawToken::Size(..) => "Size",
|
RawToken::Size(..) => "Size",
|
||||||
RawToken::String(_) => "String",
|
RawToken::String(_) => "String",
|
||||||
RawToken::Variable(_) => "Variable",
|
RawToken::Variable(_) => "variable",
|
||||||
RawToken::ExternalCommand(_) => "ExternalCommand",
|
RawToken::ExternalCommand(_) => "external command",
|
||||||
RawToken::ExternalWord => "ExternalWord",
|
RawToken::ExternalWord => "external word",
|
||||||
RawToken::GlobPattern => "GlobPattern",
|
RawToken::GlobPattern => "glob pattern",
|
||||||
RawToken::Bare => "String",
|
RawToken::Bare => "String",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,92 +1,35 @@
|
|||||||
use crate::context::Context;
|
|
||||||
use crate::errors::{ArgumentError, ShellError};
|
use crate::errors::{ArgumentError, ShellError};
|
||||||
|
use crate::parser::hir::syntax_shape::{expand_expr, spaced};
|
||||||
use crate::parser::registry::{NamedType, PositionalType, Signature};
|
use crate::parser::registry::{NamedType, PositionalType, Signature};
|
||||||
use crate::parser::{baseline_parse_tokens, CallNode};
|
use crate::parser::TokensIterator;
|
||||||
use crate::parser::{
|
use crate::parser::{
|
||||||
hir::{self, NamedArguments},
|
hir::{self, ExpandContext, NamedArguments},
|
||||||
Flag, RawToken, TokenNode,
|
Flag,
|
||||||
};
|
};
|
||||||
use crate::traits::ToDebug;
|
use crate::traits::ToDebug;
|
||||||
use crate::{Tag, Tagged, TaggedItem, Text};
|
use crate::{Tag, Tagged, Text};
|
||||||
use log::trace;
|
use log::trace;
|
||||||
|
|
||||||
pub fn parse_command(
|
pub fn parse_command_tail(
|
||||||
config: &Signature,
|
config: &Signature,
|
||||||
context: &Context,
|
context: &ExpandContext,
|
||||||
call: &Tagged<CallNode>,
|
tail: &mut TokensIterator,
|
||||||
source: &Text,
|
|
||||||
) -> Result<hir::Call, ShellError> {
|
|
||||||
let Tagged { item: raw_call, .. } = call;
|
|
||||||
|
|
||||||
trace!("Processing {:?}", config);
|
|
||||||
|
|
||||||
let head = parse_command_head(call.head())?;
|
|
||||||
|
|
||||||
let children: Option<Vec<TokenNode>> = raw_call.children().as_ref().map(|nodes| {
|
|
||||||
nodes
|
|
||||||
.iter()
|
|
||||||
.cloned()
|
|
||||||
.filter(|node| match node {
|
|
||||||
TokenNode::Whitespace(_) => false,
|
|
||||||
_ => true,
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
});
|
|
||||||
|
|
||||||
match parse_command_tail(&config, context, children, source, call.tag())? {
|
|
||||||
None => Ok(hir::Call::new(Box::new(head), None, None)),
|
|
||||||
Some((positional, named)) => Ok(hir::Call::new(Box::new(head), positional, named)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_command_head(head: &TokenNode) -> Result<hir::Expression, ShellError> {
|
|
||||||
match head {
|
|
||||||
TokenNode::Token(
|
|
||||||
spanned @ Tagged {
|
|
||||||
item: RawToken::Bare,
|
|
||||||
..
|
|
||||||
},
|
|
||||||
) => Ok(spanned.map(|_| hir::RawExpression::Literal(hir::Literal::Bare))),
|
|
||||||
|
|
||||||
TokenNode::Token(Tagged {
|
|
||||||
item: RawToken::String(inner_tag),
|
|
||||||
tag,
|
|
||||||
}) => Ok(hir::RawExpression::Literal(hir::Literal::String(*inner_tag)).tagged(*tag)),
|
|
||||||
|
|
||||||
other => Err(ShellError::unexpected(&format!(
|
|
||||||
"command head -> {:?}",
|
|
||||||
other
|
|
||||||
))),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_command_tail(
|
|
||||||
config: &Signature,
|
|
||||||
context: &Context,
|
|
||||||
tail: Option<Vec<TokenNode>>,
|
|
||||||
source: &Text,
|
|
||||||
command_tag: Tag,
|
command_tag: Tag,
|
||||||
) -> Result<Option<(Option<Vec<hir::Expression>>, Option<NamedArguments>)>, ShellError> {
|
) -> Result<Option<(Option<Vec<hir::Expression>>, Option<NamedArguments>)>, ShellError> {
|
||||||
let tail = &mut match &tail {
|
|
||||||
None => hir::TokensIterator::new(&[]),
|
|
||||||
Some(tail) => hir::TokensIterator::new(tail),
|
|
||||||
};
|
|
||||||
|
|
||||||
let mut named = NamedArguments::new();
|
let mut named = NamedArguments::new();
|
||||||
|
trace_remaining("nodes", tail.clone(), context.source());
|
||||||
trace_remaining("nodes", tail.clone(), source);
|
|
||||||
|
|
||||||
for (name, kind) in &config.named {
|
for (name, kind) in &config.named {
|
||||||
trace!(target: "nu::parse", "looking for {} : {:?}", name, kind);
|
trace!(target: "nu::parse", "looking for {} : {:?}", name, kind);
|
||||||
|
|
||||||
match kind {
|
match kind {
|
||||||
NamedType::Switch => {
|
NamedType::Switch => {
|
||||||
let flag = extract_switch(name, tail, source);
|
let flag = extract_switch(name, tail, context.source());
|
||||||
|
|
||||||
named.insert_switch(name, flag);
|
named.insert_switch(name, flag);
|
||||||
}
|
}
|
||||||
NamedType::Mandatory(syntax_type) => {
|
NamedType::Mandatory(syntax_type) => {
|
||||||
match extract_mandatory(config, name, tail, source, command_tag) {
|
match extract_mandatory(config, name, tail, context.source(), command_tag) {
|
||||||
Err(err) => return Err(err), // produce a correct diagnostic
|
Err(err) => return Err(err), // produce a correct diagnostic
|
||||||
Ok((pos, flag)) => {
|
Ok((pos, flag)) => {
|
||||||
tail.move_to(pos);
|
tail.move_to(pos);
|
||||||
@ -99,15 +42,15 @@ fn parse_command_tail(
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let expr =
|
let expr = expand_expr(&spaced(*syntax_type), tail, context)?;
|
||||||
hir::baseline_parse_next_expr(tail, context, source, *syntax_type)?;
|
|
||||||
|
|
||||||
tail.restart();
|
tail.restart();
|
||||||
named.insert_mandatory(name, expr);
|
named.insert_mandatory(name, expr);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
NamedType::Optional(syntax_type) => match extract_optional(name, tail, source) {
|
NamedType::Optional(syntax_type) => {
|
||||||
|
match extract_optional(name, tail, context.source()) {
|
||||||
Err(err) => return Err(err), // produce a correct diagnostic
|
Err(err) => return Err(err), // produce a correct diagnostic
|
||||||
Ok(Some((pos, flag))) => {
|
Ok(Some((pos, flag))) => {
|
||||||
tail.move_to(pos);
|
tail.move_to(pos);
|
||||||
@ -120,21 +63,26 @@ fn parse_command_tail(
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let expr = hir::baseline_parse_next_expr(tail, context, source, *syntax_type)?;
|
let expr = expand_expr(&spaced(*syntax_type), tail, context);
|
||||||
|
|
||||||
|
match expr {
|
||||||
|
Err(_) => named.insert_optional(name, None),
|
||||||
|
Ok(expr) => named.insert_optional(name, Some(expr)),
|
||||||
|
}
|
||||||
|
|
||||||
tail.restart();
|
tail.restart();
|
||||||
named.insert_optional(name, Some(expr));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(None) => {
|
Ok(None) => {
|
||||||
tail.restart();
|
tail.restart();
|
||||||
named.insert_optional(name, None);
|
named.insert_optional(name, None);
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
trace_remaining("after named", tail.clone(), source);
|
trace_remaining("after named", tail.clone(), context.source());
|
||||||
|
|
||||||
let mut positional = vec![];
|
let mut positional = vec![];
|
||||||
|
|
||||||
@ -143,7 +91,7 @@ fn parse_command_tail(
|
|||||||
|
|
||||||
match arg {
|
match arg {
|
||||||
PositionalType::Mandatory(..) => {
|
PositionalType::Mandatory(..) => {
|
||||||
if tail.len() == 0 {
|
if tail.at_end() {
|
||||||
return Err(ShellError::argument_error(
|
return Err(ShellError::argument_error(
|
||||||
config.name.clone(),
|
config.name.clone(),
|
||||||
ArgumentError::MissingMandatoryPositional(arg.name().to_string()),
|
ArgumentError::MissingMandatoryPositional(arg.name().to_string()),
|
||||||
@ -153,25 +101,36 @@ fn parse_command_tail(
|
|||||||
}
|
}
|
||||||
|
|
||||||
PositionalType::Optional(..) => {
|
PositionalType::Optional(..) => {
|
||||||
if tail.len() == 0 {
|
if tail.at_end() {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let result = hir::baseline_parse_next_expr(tail, context, source, arg.syntax_type())?;
|
let result = expand_expr(&spaced(arg.syntax_type()), tail, context)?;
|
||||||
|
|
||||||
positional.push(result);
|
positional.push(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
trace_remaining("after positional", tail.clone(), source);
|
trace_remaining("after positional", tail.clone(), context.source());
|
||||||
|
|
||||||
if let Some(syntax_type) = config.rest_positional {
|
if let Some(syntax_type) = config.rest_positional {
|
||||||
let remainder = baseline_parse_tokens(tail, context, source, syntax_type)?;
|
let mut out = vec![];
|
||||||
positional.extend(remainder);
|
|
||||||
|
loop {
|
||||||
|
if tail.at_end_possible_ws() {
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
trace_remaining("after rest", tail.clone(), source);
|
let next = expand_expr(&spaced(syntax_type), tail, context)?;
|
||||||
|
|
||||||
|
out.push(next);
|
||||||
|
}
|
||||||
|
|
||||||
|
positional.extend(out);
|
||||||
|
}
|
||||||
|
|
||||||
|
trace_remaining("after rest", tail.clone(), context.source());
|
||||||
|
|
||||||
trace!("Constructed positional={:?} named={:?}", positional, named);
|
trace!("Constructed positional={:?} named={:?}", positional, named);
|
||||||
|
|
||||||
|
@ -1,11 +1,11 @@
|
|||||||
// TODO: Temporary redirect
|
// TODO: Temporary redirect
|
||||||
pub(crate) use crate::context::CommandRegistry;
|
pub(crate) use crate::context::CommandRegistry;
|
||||||
use crate::evaluate::{evaluate_baseline_expr, Scope};
|
use crate::evaluate::{evaluate_baseline_expr, Scope};
|
||||||
use crate::parser::{hir, hir::SyntaxShape, parse_command, CallNode};
|
use crate::parser::{hir, hir::SyntaxShape};
|
||||||
use crate::prelude::*;
|
use crate::prelude::*;
|
||||||
use derive_new::new;
|
use derive_new::new;
|
||||||
use indexmap::IndexMap;
|
use indexmap::IndexMap;
|
||||||
use log::trace;
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::fmt;
|
use std::fmt;
|
||||||
|
|
||||||
@ -271,21 +271,6 @@ impl<'a> Iterator for PositionalIter<'a> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Signature {
|
|
||||||
pub(crate) fn parse_args(
|
|
||||||
&self,
|
|
||||||
call: &Tagged<CallNode>,
|
|
||||||
context: &Context,
|
|
||||||
source: &Text,
|
|
||||||
) -> Result<hir::Call, ShellError> {
|
|
||||||
let args = parse_command(self, context, call, source)?;
|
|
||||||
|
|
||||||
trace!("parsed args: {:?}", args);
|
|
||||||
|
|
||||||
Ok(args)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn evaluate_args(
|
pub(crate) fn evaluate_args(
|
||||||
call: &hir::Call,
|
call: &hir::Call,
|
||||||
registry: &CommandRegistry,
|
registry: &CommandRegistry,
|
||||||
|
@ -1,10 +1,13 @@
|
|||||||
|
use itertools::Itertools;
|
||||||
use nu::{
|
use nu::{
|
||||||
serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature,
|
serve_plugin, CallInfo, Plugin, ReturnSuccess, ReturnValue, ShellError, Signature, SyntaxShape,
|
||||||
SyntaxShape, Tagged, Value,
|
Tagged, Value,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
pub type ColumnPath = Vec<Tagged<String>>;
|
||||||
|
|
||||||
struct Add {
|
struct Add {
|
||||||
field: Option<String>,
|
field: Option<ColumnPath>,
|
||||||
value: Option<Value>,
|
value: Option<Value>,
|
||||||
}
|
}
|
||||||
impl Add {
|
impl Add {
|
||||||
@ -19,12 +22,13 @@ impl Add {
|
|||||||
let value_tag = value.tag();
|
let value_tag = value.tag();
|
||||||
match (value.item, self.value.clone()) {
|
match (value.item, self.value.clone()) {
|
||||||
(obj @ Value::Row(_), Some(v)) => match &self.field {
|
(obj @ Value::Row(_), Some(v)) => match &self.field {
|
||||||
Some(f) => match obj.insert_data_at_path(value_tag, &f, v) {
|
Some(f) => match obj.insert_data_at_column_path(value_tag, &f, v) {
|
||||||
Some(v) => return Ok(v),
|
Some(v) => return Ok(v),
|
||||||
None => {
|
None => {
|
||||||
return Err(ShellError::string(format!(
|
return Err(ShellError::string(format!(
|
||||||
"add could not find place to insert field {:?} {}",
|
"add could not find place to insert field {:?} {}",
|
||||||
obj, f
|
obj,
|
||||||
|
f.iter().map(|i| &i.item).join(".")
|
||||||
)))
|
)))
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@ -44,7 +48,7 @@ impl Plugin for Add {
|
|||||||
fn config(&mut self) -> Result<Signature, ShellError> {
|
fn config(&mut self) -> Result<Signature, ShellError> {
|
||||||
Ok(Signature::build("add")
|
Ok(Signature::build("add")
|
||||||
.desc("Add a new field to the table.")
|
.desc("Add a new field to the table.")
|
||||||
.required("Field", SyntaxShape::String)
|
.required("Field", SyntaxShape::ColumnPath)
|
||||||
.required("Value", SyntaxShape::String)
|
.required("Value", SyntaxShape::String)
|
||||||
.rest(SyntaxShape::String)
|
.rest(SyntaxShape::String)
|
||||||
.filter())
|
.filter())
|
||||||
@ -53,12 +57,13 @@ impl Plugin for Add {
|
|||||||
fn begin_filter(&mut self, call_info: CallInfo) -> Result<Vec<ReturnValue>, ShellError> {
|
fn begin_filter(&mut self, call_info: CallInfo) -> Result<Vec<ReturnValue>, ShellError> {
|
||||||
if let Some(args) = call_info.args.positional {
|
if let Some(args) = call_info.args.positional {
|
||||||
match &args[0] {
|
match &args[0] {
|
||||||
Tagged {
|
table @ Tagged {
|
||||||
item: Value::Primitive(Primitive::String(s)),
|
item: Value::Table(_),
|
||||||
..
|
..
|
||||||
} => {
|
} => {
|
||||||
self.field = Some(s.clone());
|
self.field = Some(table.as_column_path()?.item);
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => {
|
_ => {
|
||||||
return Err(ShellError::string(format!(
|
return Err(ShellError::string(format!(
|
||||||
"Unrecognized type in params: {:?}",
|
"Unrecognized type in params: {:?}",
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
use nu::{
|
use nu::{
|
||||||
serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature,
|
serve_plugin, CallInfo, Plugin, ReturnSuccess, ReturnValue, ShellError, Signature, SyntaxShape,
|
||||||
SyntaxShape, Tagged, Value,
|
Tagged, Value,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
pub type ColumnPath = Vec<Tagged<String>>;
|
||||||
|
|
||||||
struct Edit {
|
struct Edit {
|
||||||
field: Option<String>,
|
field: Option<ColumnPath>,
|
||||||
value: Option<Value>,
|
value: Option<Value>,
|
||||||
}
|
}
|
||||||
impl Edit {
|
impl Edit {
|
||||||
@ -19,7 +21,7 @@ impl Edit {
|
|||||||
let value_tag = value.tag();
|
let value_tag = value.tag();
|
||||||
match (value.item, self.value.clone()) {
|
match (value.item, self.value.clone()) {
|
||||||
(obj @ Value::Row(_), Some(v)) => match &self.field {
|
(obj @ Value::Row(_), Some(v)) => match &self.field {
|
||||||
Some(f) => match obj.replace_data_at_path(value_tag, &f, v) {
|
Some(f) => match obj.replace_data_at_column_path(value_tag, &f, v) {
|
||||||
Some(v) => return Ok(v),
|
Some(v) => return Ok(v),
|
||||||
None => {
|
None => {
|
||||||
return Err(ShellError::string(
|
return Err(ShellError::string(
|
||||||
@ -43,7 +45,7 @@ impl Plugin for Edit {
|
|||||||
fn config(&mut self) -> Result<Signature, ShellError> {
|
fn config(&mut self) -> Result<Signature, ShellError> {
|
||||||
Ok(Signature::build("edit")
|
Ok(Signature::build("edit")
|
||||||
.desc("Edit an existing column to have a new value.")
|
.desc("Edit an existing column to have a new value.")
|
||||||
.required("Field", SyntaxShape::String)
|
.required("Field", SyntaxShape::ColumnPath)
|
||||||
.required("Value", SyntaxShape::String)
|
.required("Value", SyntaxShape::String)
|
||||||
.filter())
|
.filter())
|
||||||
}
|
}
|
||||||
@ -51,11 +53,11 @@ impl Plugin for Edit {
|
|||||||
fn begin_filter(&mut self, call_info: CallInfo) -> Result<Vec<ReturnValue>, ShellError> {
|
fn begin_filter(&mut self, call_info: CallInfo) -> Result<Vec<ReturnValue>, ShellError> {
|
||||||
if let Some(args) = call_info.args.positional {
|
if let Some(args) = call_info.args.positional {
|
||||||
match &args[0] {
|
match &args[0] {
|
||||||
Tagged {
|
table @ Tagged {
|
||||||
item: Value::Primitive(Primitive::String(s)),
|
item: Value::Table(_),
|
||||||
..
|
..
|
||||||
} => {
|
} => {
|
||||||
self.field = Some(s.clone());
|
self.field = Some(table.as_column_path()?.item);
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
return Err(ShellError::string(format!(
|
return Err(ShellError::string(format!(
|
||||||
|
@ -14,8 +14,10 @@ pub enum SemVerAction {
|
|||||||
Patch,
|
Patch,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub type ColumnPath = Vec<Tagged<String>>;
|
||||||
|
|
||||||
struct Inc {
|
struct Inc {
|
||||||
field: Option<String>,
|
field: Option<ColumnPath>,
|
||||||
error: Option<String>,
|
error: Option<String>,
|
||||||
action: Option<Action>,
|
action: Option<Action>,
|
||||||
}
|
}
|
||||||
@ -85,16 +87,17 @@ impl Inc {
|
|||||||
}
|
}
|
||||||
Value::Row(_) => match self.field {
|
Value::Row(_) => match self.field {
|
||||||
Some(ref f) => {
|
Some(ref f) => {
|
||||||
let replacement = match value.item.get_data_by_path(value.tag(), f) {
|
let replacement = match value.item.get_data_by_column_path(value.tag(), f) {
|
||||||
Some(result) => self.inc(result.map(|x| x.clone()))?,
|
Some(result) => self.inc(result.map(|x| x.clone()))?,
|
||||||
None => {
|
None => {
|
||||||
return Err(ShellError::string("inc could not find field to replace"))
|
return Err(ShellError::string("inc could not find field to replace"))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
match value
|
match value.item.replace_data_at_column_path(
|
||||||
.item
|
value.tag(),
|
||||||
.replace_data_at_path(value.tag(), f, replacement.item.clone())
|
f,
|
||||||
{
|
replacement.item.clone(),
|
||||||
|
) {
|
||||||
Some(v) => return Ok(v),
|
Some(v) => return Ok(v),
|
||||||
None => {
|
None => {
|
||||||
return Err(ShellError::string("inc could not find field to replace"))
|
return Err(ShellError::string("inc could not find field to replace"))
|
||||||
@ -120,7 +123,7 @@ impl Plugin for Inc {
|
|||||||
.switch("major")
|
.switch("major")
|
||||||
.switch("minor")
|
.switch("minor")
|
||||||
.switch("patch")
|
.switch("patch")
|
||||||
.rest(SyntaxShape::String)
|
.rest(SyntaxShape::ColumnPath)
|
||||||
.filter())
|
.filter())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -138,11 +141,11 @@ impl Plugin for Inc {
|
|||||||
if let Some(args) = call_info.args.positional {
|
if let Some(args) = call_info.args.positional {
|
||||||
for arg in args {
|
for arg in args {
|
||||||
match arg {
|
match arg {
|
||||||
Tagged {
|
table @ Tagged {
|
||||||
item: Value::Primitive(Primitive::String(s)),
|
item: Value::Table(_),
|
||||||
..
|
..
|
||||||
} => {
|
} => {
|
||||||
self.field = Some(s);
|
self.field = Some(table.as_column_path()?.item);
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
return Err(ShellError::string(format!(
|
return Err(ShellError::string(format!(
|
||||||
@ -209,8 +212,13 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn with_parameter(&mut self, name: &str) -> &mut Self {
|
fn with_parameter(&mut self, name: &str) -> &mut Self {
|
||||||
|
let fields: Vec<Tagged<Value>> = name
|
||||||
|
.split(".")
|
||||||
|
.map(|s| Value::string(s.to_string()).tagged(Tag::unknown_span(self.anchor)))
|
||||||
|
.collect();
|
||||||
|
|
||||||
self.positionals
|
self.positionals
|
||||||
.push(Value::string(name.to_string()).tagged(Tag::unknown_span(self.anchor)));
|
.push(Value::Table(fields).tagged(Tag::unknown_span(self.anchor)));
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -297,7 +305,12 @@ mod tests {
|
|||||||
)
|
)
|
||||||
.is_ok());
|
.is_ok());
|
||||||
|
|
||||||
assert_eq!(plugin.field, Some("package.version".to_string()));
|
assert_eq!(
|
||||||
|
plugin
|
||||||
|
.field
|
||||||
|
.map(|f| f.into_iter().map(|f| f.item).collect()),
|
||||||
|
Some(vec!["package".to_string(), "version".to_string()])
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
use nu::{
|
use nu::{
|
||||||
serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature,
|
serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature,
|
||||||
SyntaxShape, Tagged, Value,
|
SyntaxShape, Tagged, TaggedItem, Value,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Eq, PartialEq)]
|
#[derive(Debug, Eq, PartialEq)]
|
||||||
@ -10,8 +10,10 @@ enum Action {
|
|||||||
ToInteger,
|
ToInteger,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub type ColumnPath = Vec<Tagged<String>>;
|
||||||
|
|
||||||
struct Str {
|
struct Str {
|
||||||
field: Option<String>,
|
field: Option<ColumnPath>,
|
||||||
params: Option<Vec<String>>,
|
params: Option<Vec<String>>,
|
||||||
error: Option<String>,
|
error: Option<String>,
|
||||||
action: Option<Action>,
|
action: Option<Action>,
|
||||||
@ -43,8 +45,8 @@ impl Str {
|
|||||||
Ok(applied)
|
Ok(applied)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn for_field(&mut self, field: &str) {
|
fn for_field(&mut self, column_path: ColumnPath) {
|
||||||
self.field = Some(String::from(field));
|
self.field = Some(column_path);
|
||||||
}
|
}
|
||||||
|
|
||||||
fn permit(&mut self) -> bool {
|
fn permit(&mut self) -> bool {
|
||||||
@ -92,14 +94,15 @@ impl Str {
|
|||||||
}
|
}
|
||||||
Value::Row(_) => match self.field {
|
Value::Row(_) => match self.field {
|
||||||
Some(ref f) => {
|
Some(ref f) => {
|
||||||
let replacement = match value.item.get_data_by_path(value.tag(), f) {
|
let replacement = match value.item.get_data_by_column_path(value.tag(), f) {
|
||||||
Some(result) => self.strutils(result.map(|x| x.clone()))?,
|
Some(result) => self.strutils(result.map(|x| x.clone()))?,
|
||||||
None => return Ok(Tagged::from_item(Value::nothing(), value.tag)),
|
None => return Ok(Tagged::from_item(Value::nothing(), value.tag)),
|
||||||
};
|
};
|
||||||
match value
|
match value.item.replace_data_at_column_path(
|
||||||
.item
|
value.tag(),
|
||||||
.replace_data_at_path(value.tag(), f, replacement.item.clone())
|
f,
|
||||||
{
|
replacement.item.clone(),
|
||||||
|
) {
|
||||||
Some(v) => return Ok(v),
|
Some(v) => return Ok(v),
|
||||||
None => {
|
None => {
|
||||||
return Err(ShellError::string("str could not find field to replace"))
|
return Err(ShellError::string("str could not find field to replace"))
|
||||||
@ -127,7 +130,7 @@ impl Plugin for Str {
|
|||||||
.switch("downcase")
|
.switch("downcase")
|
||||||
.switch("upcase")
|
.switch("upcase")
|
||||||
.switch("to-int")
|
.switch("to-int")
|
||||||
.rest(SyntaxShape::Member)
|
.rest(SyntaxShape::ColumnPath)
|
||||||
.filter())
|
.filter())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -148,15 +151,21 @@ impl Plugin for Str {
|
|||||||
match possible_field {
|
match possible_field {
|
||||||
Tagged {
|
Tagged {
|
||||||
item: Value::Primitive(Primitive::String(s)),
|
item: Value::Primitive(Primitive::String(s)),
|
||||||
..
|
tag,
|
||||||
} => match self.action {
|
} => match self.action {
|
||||||
Some(Action::Downcase)
|
Some(Action::Downcase)
|
||||||
| Some(Action::Upcase)
|
| Some(Action::Upcase)
|
||||||
| Some(Action::ToInteger)
|
| Some(Action::ToInteger)
|
||||||
| None => {
|
| None => {
|
||||||
self.for_field(&s);
|
self.for_field(vec![s.clone().tagged(tag)]);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
table @ Tagged {
|
||||||
|
item: Value::Table(_),
|
||||||
|
..
|
||||||
|
} => {
|
||||||
|
self.field = Some(table.as_column_path()?.item);
|
||||||
|
}
|
||||||
_ => {
|
_ => {
|
||||||
return Err(ShellError::string(format!(
|
return Err(ShellError::string(format!(
|
||||||
"Unrecognized type in params: {:?}",
|
"Unrecognized type in params: {:?}",
|
||||||
@ -227,8 +236,13 @@ mod tests {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn with_parameter(&mut self, name: &str) -> &mut Self {
|
fn with_parameter(&mut self, name: &str) -> &mut Self {
|
||||||
|
let fields: Vec<Tagged<Value>> = name
|
||||||
|
.split(".")
|
||||||
|
.map(|s| Value::string(s.to_string()).tagged(Tag::unknown_span(self.anchor)))
|
||||||
|
.collect();
|
||||||
|
|
||||||
self.positionals
|
self.positionals
|
||||||
.push(Value::string(name.to_string()).tagged(Tag::unknown()));
|
.push(Value::Table(fields).tagged(Tag::unknown_span(self.anchor)));
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -303,7 +317,12 @@ mod tests {
|
|||||||
)
|
)
|
||||||
.is_ok());
|
.is_ok());
|
||||||
|
|
||||||
assert_eq!(plugin.field, Some("package.description".to_string()));
|
assert_eq!(
|
||||||
|
plugin
|
||||||
|
.field
|
||||||
|
.map(|f| f.into_iter().map(|f| f.item).collect()),
|
||||||
|
Some(vec!["package".to_string(), "description".to_string()])
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
use crate::parser::hir::TokensIterator;
|
||||||
use crate::parser::nom_input;
|
use crate::parser::nom_input;
|
||||||
use crate::parser::parse::token_tree::TokenNode;
|
use crate::parser::parse::token_tree::TokenNode;
|
||||||
use crate::parser::parse::tokens::RawToken;
|
use crate::parser::parse::tokens::RawToken;
|
||||||
@ -77,16 +78,12 @@ impl Highlighter for Helper {
|
|||||||
Ok(v) => v,
|
Ok(v) => v,
|
||||||
};
|
};
|
||||||
|
|
||||||
let Pipeline { parts, post_ws } = pipeline;
|
let Pipeline { parts } = pipeline;
|
||||||
let mut iter = parts.into_iter();
|
let mut iter = parts.into_iter();
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
match iter.next() {
|
match iter.next() {
|
||||||
None => {
|
None => {
|
||||||
if let Some(ws) = post_ws {
|
|
||||||
out.push_str(ws.slice(line));
|
|
||||||
}
|
|
||||||
|
|
||||||
return Cow::Owned(out);
|
return Cow::Owned(out);
|
||||||
}
|
}
|
||||||
Some(token) => {
|
Some(token) => {
|
||||||
@ -107,13 +104,12 @@ impl Highlighter for Helper {
|
|||||||
fn paint_token_node(token_node: &TokenNode, line: &str) -> String {
|
fn paint_token_node(token_node: &TokenNode, line: &str) -> String {
|
||||||
let styled = match token_node {
|
let styled = match token_node {
|
||||||
TokenNode::Call(..) => Color::Cyan.bold().paint(token_node.tag().slice(line)),
|
TokenNode::Call(..) => Color::Cyan.bold().paint(token_node.tag().slice(line)),
|
||||||
|
TokenNode::Nodes(..) => Color::Green.bold().paint(token_node.tag().slice(line)),
|
||||||
TokenNode::Whitespace(..) => Color::White.normal().paint(token_node.tag().slice(line)),
|
TokenNode::Whitespace(..) => Color::White.normal().paint(token_node.tag().slice(line)),
|
||||||
TokenNode::Flag(..) => Color::Black.bold().paint(token_node.tag().slice(line)),
|
TokenNode::Flag(..) => Color::Black.bold().paint(token_node.tag().slice(line)),
|
||||||
TokenNode::Member(..) => Color::Yellow.bold().paint(token_node.tag().slice(line)),
|
TokenNode::Member(..) => Color::Yellow.bold().paint(token_node.tag().slice(line)),
|
||||||
TokenNode::Path(..) => Color::Green.bold().paint(token_node.tag().slice(line)),
|
|
||||||
TokenNode::Error(..) => Color::Red.bold().paint(token_node.tag().slice(line)),
|
TokenNode::Error(..) => Color::Red.bold().paint(token_node.tag().slice(line)),
|
||||||
TokenNode::Delimited(..) => Color::White.paint(token_node.tag().slice(line)),
|
TokenNode::Delimited(..) => Color::White.paint(token_node.tag().slice(line)),
|
||||||
TokenNode::Operator(..) => Color::White.normal().paint(token_node.tag().slice(line)),
|
|
||||||
TokenNode::Pipeline(..) => Color::Blue.normal().paint(token_node.tag().slice(line)),
|
TokenNode::Pipeline(..) => Color::Blue.normal().paint(token_node.tag().slice(line)),
|
||||||
TokenNode::Token(Tagged {
|
TokenNode::Token(Tagged {
|
||||||
item: RawToken::Number(..),
|
item: RawToken::Number(..),
|
||||||
@ -147,6 +143,10 @@ fn paint_token_node(token_node: &TokenNode, line: &str) -> String {
|
|||||||
item: RawToken::ExternalWord,
|
item: RawToken::ExternalWord,
|
||||||
..
|
..
|
||||||
}) => Color::Black.bold().paint(token_node.tag().slice(line)),
|
}) => Color::Black.bold().paint(token_node.tag().slice(line)),
|
||||||
|
TokenNode::Token(Tagged {
|
||||||
|
item: RawToken::Operator(..),
|
||||||
|
..
|
||||||
|
}) => Color::Black.bold().paint(token_node.tag().slice(line)),
|
||||||
};
|
};
|
||||||
|
|
||||||
styled.to_string()
|
styled.to_string()
|
||||||
@ -159,25 +159,19 @@ fn paint_pipeline_element(pipeline_element: &PipelineElement, line: &str) -> Str
|
|||||||
styled.push_str(&Color::Purple.paint("|"));
|
styled.push_str(&Color::Purple.paint("|"));
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(ws) = pipeline_element.pre_ws {
|
let mut tokens =
|
||||||
styled.push_str(&Color::White.normal().paint(ws.slice(line)));
|
TokensIterator::new(&pipeline_element.tokens, pipeline_element.tokens.tag, false);
|
||||||
}
|
let head = tokens.next();
|
||||||
|
|
||||||
styled.push_str(
|
match head {
|
||||||
&Color::Cyan
|
None => return styled,
|
||||||
.bold()
|
Some(head) => {
|
||||||
.paint(pipeline_element.call().head().tag().slice(line))
|
styled.push_str(&Color::Cyan.bold().paint(head.tag().slice(line)).to_string())
|
||||||
.to_string(),
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Some(children) = pipeline_element.call().children() {
|
|
||||||
for child in children {
|
|
||||||
styled.push_str(&paint_token_node(child, line));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(ws) = pipeline_element.post_ws {
|
for token in tokens {
|
||||||
styled.push_str(&Color::White.normal().paint(ws.slice(line)));
|
styled.push_str(&paint_token_node(token, line));
|
||||||
}
|
}
|
||||||
|
|
||||||
styled.to_string()
|
styled.to_string()
|
||||||
|
@ -212,7 +212,7 @@ fn open_can_parse_ini() {
|
|||||||
fn open_can_parse_utf16_ini() {
|
fn open_can_parse_utf16_ini() {
|
||||||
let actual = nu!(
|
let actual = nu!(
|
||||||
cwd: "tests/fixtures/formats",
|
cwd: "tests/fixtures/formats",
|
||||||
"open utf16.ini | get .ShellClassInfo | get IconIndex | echo $it"
|
"open utf16.ini | get '.ShellClassInfo' | get IconIndex | echo $it"
|
||||||
);
|
);
|
||||||
|
|
||||||
assert_eq!(actual, "-236")
|
assert_eq!(actual, "-236")
|
||||||
|
@ -93,6 +93,7 @@ macro_rules! nu {
|
|||||||
.write_all(commands.as_bytes())
|
.write_all(commands.as_bytes())
|
||||||
.expect("couldn't write to stdin");
|
.expect("couldn't write to stdin");
|
||||||
|
|
||||||
|
|
||||||
let output = process
|
let output = process
|
||||||
.wait_with_output()
|
.wait_with_output()
|
||||||
.expect("couldn't read from stdout");
|
.expect("couldn't read from stdout");
|
||||||
|
Loading…
Reference in New Issue
Block a user