mirror of
https://github.com/nushell/nushell.git
synced 2024-12-28 09:59:36 +01:00
Merge pull request #728 from nushell/better-pseudo-blocks
[DON'T MERGE] Overhaul the expansion system
This commit is contained in:
commit
3317b137e5
22
Cargo.lock
generated
22
Cargo.lock
generated
@ -1491,6 +1491,25 @@ dependencies = [
|
||||
"version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nom-tracable"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"nom-tracable-macros 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nom-tracable-macros"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"syn 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nom_locate"
|
||||
version = "1.0.0"
|
||||
@ -1550,6 +1569,7 @@ dependencies = [
|
||||
"natural 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"neso 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"nom-tracable 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num-bigint 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"num-traits 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
@ -3140,6 +3160,8 @@ dependencies = [
|
||||
"checksum nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9667ddcc6cc8a43afc9b7917599d7216aa09c463919ea32c59ed6cac8bc945"
|
||||
"checksum nom 4.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2ad2a91a8e869eeb30b9cb3119ae87773a8f4ae617f41b1eb9c154b2905f7bd6"
|
||||
"checksum nom 5.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e9761d859320e381010a4f7f8ed425f2c924de33ad121ace447367c713ad561b"
|
||||
"checksum nom-tracable 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "edaa64ad2837d831d4a17966c9a83aa5101cc320730f5b724811c8f7442a2528"
|
||||
"checksum nom-tracable-macros 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fd25f70877a9fe68bd406b3dd3ff99e94ce9de776cf2a96e0d99de90b53d4765"
|
||||
"checksum nom_locate 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f932834fd8e391fc7710e2ba17e8f9f8645d846b55aa63207e17e110a1e1ce35"
|
||||
"checksum ntapi 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f26e041cd983acbc087e30fcba770380cfa352d0e392e175b2344ebaf7ea0602"
|
||||
"checksum num-bigint 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f9c3f34cdd24f334cb265d9bf8bfa8a241920d026916785747a92f0e55541a1a"
|
||||
|
@ -55,6 +55,7 @@ surf = "1.0.2"
|
||||
url = "2.1.0"
|
||||
roxmltree = "0.7.0"
|
||||
nom_locate = "1.0.0"
|
||||
nom-tracable = "0.4.0"
|
||||
enum-utils = "0.1.1"
|
||||
unicode-xid = "0.2.0"
|
||||
serde_ini = "0.2.0"
|
||||
@ -95,6 +96,8 @@ textview = ["syntect", "onig_sys", "crossterm"]
|
||||
binaryview = ["image", "crossterm"]
|
||||
sys = ["heim", "battery"]
|
||||
ps = ["heim"]
|
||||
# trace = ["nom-tracable/trace"]
|
||||
all = ["raw-key", "textview", "binaryview", "sys", "ps", "clipboard", "ptree"]
|
||||
|
||||
[dependencies.rusqlite]
|
||||
version = "0.20.0"
|
||||
|
195
src/cli.rs
195
src/cli.rs
@ -1,4 +1,3 @@
|
||||
use crate::commands::autoview;
|
||||
use crate::commands::classified::{
|
||||
ClassifiedCommand, ClassifiedInputStream, ClassifiedPipeline, ExternalCommand, InternalCommand,
|
||||
StreamNext,
|
||||
@ -13,7 +12,12 @@ pub(crate) use crate::errors::ShellError;
|
||||
use crate::fuzzysearch::{interactive_fuzzy_search, SelectionResult};
|
||||
use crate::git::current_branch;
|
||||
use crate::parser::registry::Signature;
|
||||
use crate::parser::{hir, CallNode, Pipeline, PipelineElement, TokenNode};
|
||||
use crate::parser::{
|
||||
hir,
|
||||
hir::syntax_shape::{expand_syntax, PipelineShape},
|
||||
hir::{expand_external_tokens::expand_external_tokens, tokens_iterator::TokensIterator},
|
||||
TokenNode,
|
||||
};
|
||||
use crate::prelude::*;
|
||||
|
||||
use log::{debug, trace};
|
||||
@ -25,6 +29,7 @@ use std::io::{BufRead, BufReader, Write};
|
||||
use std::iter::Iterator;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum MaybeOwned<'a, T> {
|
||||
@ -75,7 +80,7 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel
|
||||
let name = params.name.clone();
|
||||
let fname = fname.to_string();
|
||||
|
||||
if context.has_command(&name) {
|
||||
if let Some(_) = context.get_command(&name) {
|
||||
trace!("plugin {:?} already loaded.", &name);
|
||||
} else {
|
||||
if params.is_filter {
|
||||
@ -94,11 +99,17 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel
|
||||
},
|
||||
Err(e) => {
|
||||
trace!("incompatible plugin {:?}", input);
|
||||
Err(ShellError::string(format!("Error: {:?}", e)))
|
||||
Err(ShellError::untagged_runtime_error(format!(
|
||||
"Error: {:?}",
|
||||
e
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => Err(ShellError::string(format!("Error: {:?}", e))),
|
||||
Err(e) => Err(ShellError::untagged_runtime_error(format!(
|
||||
"Error: {:?}",
|
||||
e
|
||||
))),
|
||||
};
|
||||
|
||||
let _ = child.wait();
|
||||
@ -314,6 +325,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
)]);
|
||||
}
|
||||
}
|
||||
|
||||
let _ = load_plugins(&mut context);
|
||||
|
||||
let config = Config::builder().color_mode(ColorMode::Forced).build();
|
||||
@ -342,9 +354,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
|
||||
let cwd = context.shell_manager.path();
|
||||
|
||||
rl.set_helper(Some(crate::shell::Helper::new(
|
||||
context.shell_manager.clone(),
|
||||
)));
|
||||
rl.set_helper(Some(crate::shell::Helper::new(context.clone())));
|
||||
|
||||
let edit_mode = config::config(Tag::unknown())?
|
||||
.get("edit_mode")
|
||||
@ -428,21 +438,11 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
}
|
||||
}
|
||||
|
||||
LineResult::Error(mut line, err) => {
|
||||
LineResult::Error(line, err) => {
|
||||
rl.add_history_entry(line.clone());
|
||||
let diag = err.to_diagnostic();
|
||||
|
||||
context.with_host(|host| {
|
||||
let writer = host.err_termcolor();
|
||||
line.push_str(" ");
|
||||
let files = crate::parser::Files::new(line);
|
||||
let _ = std::panic::catch_unwind(move || {
|
||||
let _ = language_reporting::emit(
|
||||
&mut writer.lock(),
|
||||
&files,
|
||||
&diag,
|
||||
&language_reporting::DefaultConfig,
|
||||
);
|
||||
});
|
||||
print_err(err, host, &Text::from(line));
|
||||
})
|
||||
}
|
||||
|
||||
@ -459,6 +459,14 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn chomp_newline(s: &str) -> &str {
|
||||
if s.ends_with('\n') {
|
||||
&s[..s.len() - 1]
|
||||
} else {
|
||||
s
|
||||
}
|
||||
}
|
||||
|
||||
enum LineResult {
|
||||
Success(String),
|
||||
Error(String, ShellError),
|
||||
@ -471,9 +479,11 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
Ok(line) if line.trim() == "" => LineResult::Success(line.clone()),
|
||||
|
||||
Ok(line) => {
|
||||
let line = chomp_newline(line);
|
||||
|
||||
let result = match crate::parser::parse(&line, uuid::Uuid::nil()) {
|
||||
Err(err) => {
|
||||
return LineResult::Error(line.clone(), err);
|
||||
return LineResult::Error(line.to_string(), err);
|
||||
}
|
||||
|
||||
Ok(val) => val,
|
||||
@ -484,7 +494,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
|
||||
let mut pipeline = match classify_pipeline(&result, ctx, &Text::from(line)) {
|
||||
Ok(pipeline) => pipeline,
|
||||
Err(err) => return LineResult::Error(line.clone(), err),
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
};
|
||||
|
||||
match pipeline.commands.last() {
|
||||
@ -492,7 +502,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
_ => pipeline
|
||||
.commands
|
||||
.push(ClassifiedCommand::Internal(InternalCommand {
|
||||
command: whole_stream_command(autoview::Autoview),
|
||||
name: "autoview".to_string(),
|
||||
name_tag: Tag::unknown(),
|
||||
args: hir::Call::new(
|
||||
Box::new(hir::Expression::synthetic_string("autoview")),
|
||||
@ -514,16 +524,24 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
input = match (item, next) {
|
||||
(None, _) => break,
|
||||
|
||||
(Some(ClassifiedCommand::Dynamic(_)), _)
|
||||
| (_, Some(ClassifiedCommand::Dynamic(_))) => {
|
||||
return LineResult::Error(
|
||||
line.to_string(),
|
||||
ShellError::unimplemented("Dynamic commands"),
|
||||
)
|
||||
}
|
||||
|
||||
(Some(ClassifiedCommand::Expr(_)), _) => {
|
||||
return LineResult::Error(
|
||||
line.clone(),
|
||||
line.to_string(),
|
||||
ShellError::unimplemented("Expression-only commands"),
|
||||
)
|
||||
}
|
||||
|
||||
(_, Some(ClassifiedCommand::Expr(_))) => {
|
||||
return LineResult::Error(
|
||||
line.clone(),
|
||||
line.to_string(),
|
||||
ShellError::unimplemented("Expression-only commands"),
|
||||
)
|
||||
}
|
||||
@ -536,7 +554,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
.await
|
||||
{
|
||||
Ok(val) => ClassifiedInputStream::from_input_stream(val),
|
||||
Err(err) => return LineResult::Error(line.clone(), err),
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
},
|
||||
|
||||
(Some(ClassifiedCommand::Internal(left)), Some(_)) => {
|
||||
@ -545,7 +563,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
.await
|
||||
{
|
||||
Ok(val) => ClassifiedInputStream::from_input_stream(val),
|
||||
Err(err) => return LineResult::Error(line.clone(), err),
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
}
|
||||
}
|
||||
|
||||
@ -555,7 +573,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
.await
|
||||
{
|
||||
Ok(val) => ClassifiedInputStream::from_input_stream(val),
|
||||
Err(err) => return LineResult::Error(line.clone(), err),
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
}
|
||||
}
|
||||
|
||||
@ -564,20 +582,20 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
Some(ClassifiedCommand::External(_)),
|
||||
) => match left.run(ctx, input, StreamNext::External).await {
|
||||
Ok(val) => val,
|
||||
Err(err) => return LineResult::Error(line.clone(), err),
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
},
|
||||
|
||||
(Some(ClassifiedCommand::External(left)), Some(_)) => {
|
||||
match left.run(ctx, input, StreamNext::Internal).await {
|
||||
Ok(val) => val,
|
||||
Err(err) => return LineResult::Error(line.clone(), err),
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
}
|
||||
}
|
||||
|
||||
(Some(ClassifiedCommand::External(left)), None) => {
|
||||
match left.run(ctx, input, StreamNext::Last).await {
|
||||
Ok(val) => val,
|
||||
Err(err) => return LineResult::Error(line.clone(), err),
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -585,7 +603,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
is_first_command = false;
|
||||
}
|
||||
|
||||
LineResult::Success(line.clone())
|
||||
LineResult::Success(line.to_string())
|
||||
}
|
||||
Err(ReadlineError::Interrupted) => LineResult::CtrlC,
|
||||
Err(ReadlineError::Eof) => LineResult::Break,
|
||||
@ -601,95 +619,46 @@ fn classify_pipeline(
|
||||
context: &Context,
|
||||
source: &Text,
|
||||
) -> Result<ClassifiedPipeline, ShellError> {
|
||||
let pipeline = pipeline.as_pipeline()?;
|
||||
let mut pipeline_list = vec![pipeline.clone()];
|
||||
let mut iterator = TokensIterator::all(&mut pipeline_list, pipeline.tag());
|
||||
|
||||
let Pipeline { parts, .. } = pipeline;
|
||||
|
||||
let commands: Result<Vec<_>, ShellError> = parts
|
||||
.iter()
|
||||
.map(|item| classify_command(&item, context, &source))
|
||||
.collect();
|
||||
|
||||
Ok(ClassifiedPipeline {
|
||||
commands: commands?,
|
||||
})
|
||||
}
|
||||
|
||||
fn classify_command(
|
||||
command: &PipelineElement,
|
||||
context: &Context,
|
||||
source: &Text,
|
||||
) -> Result<ClassifiedCommand, ShellError> {
|
||||
let call = command.call();
|
||||
|
||||
match call {
|
||||
// If the command starts with `^`, treat it as an external command no matter what
|
||||
call if call.head().is_external() => {
|
||||
let name_tag = call.head().expect_external();
|
||||
let name = name_tag.slice(source);
|
||||
|
||||
Ok(external_command(call, source, name.tagged(name_tag)))
|
||||
}
|
||||
|
||||
// Otherwise, if the command is a bare word, we'll need to triage it
|
||||
call if call.head().is_bare() => {
|
||||
let head = call.head();
|
||||
let name = head.source(source);
|
||||
|
||||
match context.has_command(name) {
|
||||
// if the command is in the registry, it's an internal command
|
||||
true => {
|
||||
let command = context.get_command(name);
|
||||
let config = command.signature();
|
||||
|
||||
trace!(target: "nu::build_pipeline", "classifying {:?}", config);
|
||||
|
||||
let args: hir::Call = config.parse_args(call, &context, source)?;
|
||||
|
||||
trace!(target: "nu::build_pipeline", "args :: {}", args.debug(source));
|
||||
|
||||
Ok(ClassifiedCommand::Internal(InternalCommand {
|
||||
command,
|
||||
name_tag: head.tag(),
|
||||
args,
|
||||
}))
|
||||
}
|
||||
|
||||
// otherwise, it's an external command
|
||||
false => Ok(external_command(call, source, name.tagged(head.tag()))),
|
||||
}
|
||||
}
|
||||
|
||||
// If the command is something else (like a number or a variable), that is currently unsupported.
|
||||
// We might support `$somevar` as a curried command in the future.
|
||||
call => Err(ShellError::invalid_command(call.head().tag())),
|
||||
}
|
||||
expand_syntax(
|
||||
&PipelineShape,
|
||||
&mut iterator,
|
||||
&context.expand_context(source, pipeline.tag()),
|
||||
)
|
||||
}
|
||||
|
||||
// Classify this command as an external command, which doesn't give special meaning
|
||||
// to nu syntactic constructs, and passes all arguments to the external command as
|
||||
// strings.
|
||||
fn external_command(
|
||||
call: &Tagged<CallNode>,
|
||||
pub(crate) fn external_command(
|
||||
tokens: &mut TokensIterator,
|
||||
source: &Text,
|
||||
name: Tagged<&str>,
|
||||
) -> ClassifiedCommand {
|
||||
let arg_list_strings: Vec<Tagged<String>> = match call.children() {
|
||||
Some(args) => args
|
||||
.iter()
|
||||
.filter_map(|i| match i {
|
||||
TokenNode::Whitespace(_) => None,
|
||||
other => Some(other.as_external_arg(source).tagged(other.tag())),
|
||||
})
|
||||
.collect(),
|
||||
None => vec![],
|
||||
};
|
||||
) -> Result<ClassifiedCommand, ShellError> {
|
||||
let arg_list_strings = expand_external_tokens(tokens, source)?;
|
||||
|
||||
let (name, tag) = name.into_parts();
|
||||
|
||||
ClassifiedCommand::External(ExternalCommand {
|
||||
Ok(ClassifiedCommand::External(ExternalCommand {
|
||||
name: name.to_string(),
|
||||
name_tag: tag,
|
||||
name_tag: name.tag(),
|
||||
args: arg_list_strings,
|
||||
})
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn print_err(err: ShellError, host: &dyn Host, source: &Text) {
|
||||
let diag = err.to_diagnostic();
|
||||
|
||||
let writer = host.err_termcolor();
|
||||
let mut source = source.to_string();
|
||||
source.push_str(" ");
|
||||
let files = crate::parser::Files::new(source);
|
||||
let _ = std::panic::catch_unwind(move || {
|
||||
let _ = language_reporting::emit(
|
||||
&mut writer.lock(),
|
||||
&files,
|
||||
&diag,
|
||||
&language_reporting::DefaultConfig,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
@ -75,6 +75,7 @@ pub(crate) use command::{
|
||||
UnevaluatedCallInfo, WholeStreamCommand,
|
||||
};
|
||||
|
||||
pub(crate) use classified::ClassifiedCommand;
|
||||
pub(crate) use config::Config;
|
||||
pub(crate) use cp::Cpy;
|
||||
pub(crate) use date::Date;
|
||||
|
@ -111,6 +111,7 @@ fn is_single_text_value(input: &Vec<Tagged<Value>>) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
fn is_single_anchored_text_value(input: &Vec<Tagged<Value>>) -> bool {
|
||||
if input.len() != 1 {
|
||||
return false;
|
||||
|
@ -1,12 +1,11 @@
|
||||
use crate::commands::Command;
|
||||
use crate::parser::{hir, TokenNode};
|
||||
use crate::prelude::*;
|
||||
use bytes::{BufMut, BytesMut};
|
||||
use derive_new::new;
|
||||
use futures::stream::StreamExt;
|
||||
use futures_codec::{Decoder, Encoder, Framed};
|
||||
use log::{log_enabled, trace};
|
||||
use std::io::{Error, ErrorKind};
|
||||
use std::sync::Arc;
|
||||
use subprocess::Exec;
|
||||
|
||||
/// A simple `Codec` implementation that splits up data into lines.
|
||||
@ -73,23 +72,33 @@ impl ClassifiedInputStream {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct ClassifiedPipeline {
|
||||
pub(crate) commands: Vec<ClassifiedCommand>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub(crate) enum ClassifiedCommand {
|
||||
#[allow(unused)]
|
||||
Expr(TokenNode),
|
||||
Internal(InternalCommand),
|
||||
#[allow(unused)]
|
||||
Dynamic(hir::Call),
|
||||
External(ExternalCommand),
|
||||
}
|
||||
|
||||
#[derive(new, Debug, Eq, PartialEq)]
|
||||
pub(crate) struct InternalCommand {
|
||||
pub(crate) command: Arc<Command>,
|
||||
pub(crate) name: String,
|
||||
pub(crate) name_tag: Tag,
|
||||
pub(crate) args: hir::Call,
|
||||
}
|
||||
|
||||
#[derive(new, Debug, Eq, PartialEq)]
|
||||
pub(crate) struct DynamicCommand {
|
||||
pub(crate) args: hir::Call,
|
||||
}
|
||||
|
||||
impl InternalCommand {
|
||||
pub(crate) async fn run(
|
||||
self,
|
||||
@ -100,22 +109,28 @@ impl InternalCommand {
|
||||
) -> Result<InputStream, ShellError> {
|
||||
if log_enabled!(log::Level::Trace) {
|
||||
trace!(target: "nu::run::internal", "->");
|
||||
trace!(target: "nu::run::internal", "{}", self.command.name());
|
||||
trace!(target: "nu::run::internal", "{}", self.name);
|
||||
trace!(target: "nu::run::internal", "{}", self.args.debug(&source));
|
||||
}
|
||||
|
||||
let objects: InputStream =
|
||||
trace_stream!(target: "nu::trace_stream::internal", "input" = input.objects);
|
||||
|
||||
let result = context.run_command(
|
||||
self.command,
|
||||
self.name_tag.clone(),
|
||||
context.source_map.clone(),
|
||||
self.args,
|
||||
&source,
|
||||
objects,
|
||||
is_first_command,
|
||||
);
|
||||
let command = context.expect_command(&self.name);
|
||||
|
||||
let result = {
|
||||
let source_map = context.source_map.lock().unwrap().clone();
|
||||
|
||||
context.run_command(
|
||||
command,
|
||||
self.name_tag.clone(),
|
||||
source_map,
|
||||
self.args,
|
||||
&source,
|
||||
objects,
|
||||
is_first_command,
|
||||
)
|
||||
};
|
||||
|
||||
let result = trace_out_stream!(target: "nu::trace_stream::internal", source: &source, "output" = result);
|
||||
let mut result = result.values;
|
||||
@ -185,6 +200,7 @@ impl InternalCommand {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
pub(crate) struct ExternalCommand {
|
||||
pub(crate) name: String,
|
||||
|
||||
@ -192,6 +208,7 @@ pub(crate) struct ExternalCommand {
|
||||
pub(crate) args: Vec<Tagged<String>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum StreamNext {
|
||||
Last,
|
||||
External,
|
||||
@ -221,6 +238,8 @@ impl ExternalCommand {
|
||||
|
||||
process = Exec::cmd(&self.name);
|
||||
|
||||
trace!(target: "nu::run::external", "command = {:?}", process);
|
||||
|
||||
if arg_string.contains("$it") {
|
||||
let mut first = true;
|
||||
|
||||
@ -239,7 +258,11 @@ impl ExternalCommand {
|
||||
tag,
|
||||
));
|
||||
} else {
|
||||
return Err(ShellError::string("Error: $it needs string data"));
|
||||
return Err(ShellError::labeled_error(
|
||||
"Error: $it needs string data",
|
||||
"given something else",
|
||||
name_tag,
|
||||
));
|
||||
}
|
||||
}
|
||||
if !first {
|
||||
@ -275,6 +298,8 @@ impl ExternalCommand {
|
||||
|
||||
process = process.cwd(context.shell_manager.path());
|
||||
|
||||
trace!(target: "nu::run::external", "cwd = {:?}", context.shell_manager.path());
|
||||
|
||||
let mut process = match stream_next {
|
||||
StreamNext::Last => process,
|
||||
StreamNext::External | StreamNext::Internal => {
|
||||
@ -282,11 +307,18 @@ impl ExternalCommand {
|
||||
}
|
||||
};
|
||||
|
||||
trace!(target: "nu::run::external", "set up stdout pipe");
|
||||
|
||||
if let Some(stdin) = stdin {
|
||||
process = process.stdin(stdin);
|
||||
}
|
||||
|
||||
let mut popen = process.popen()?;
|
||||
trace!(target: "nu::run::external", "set up stdin pipe");
|
||||
trace!(target: "nu::run::external", "built process {:?}", process);
|
||||
|
||||
let mut popen = process.popen().unwrap();
|
||||
|
||||
trace!(target: "nu::run::external", "next = {:?}", stream_next);
|
||||
|
||||
match stream_next {
|
||||
StreamNext::Last => {
|
||||
|
@ -507,6 +507,15 @@ pub enum Command {
|
||||
PerItem(Arc<dyn PerItemCommand>),
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Command {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Command::WholeStream(command) => write!(f, "WholeStream({})", command.name()),
|
||||
Command::PerItem(command) => write!(f, "PerItem({})", command.name()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Command {
|
||||
pub fn name(&self) -> &str {
|
||||
match self {
|
||||
|
@ -70,9 +70,9 @@ pub fn config(
|
||||
|
||||
if let Some(v) = get {
|
||||
let key = v.to_string();
|
||||
let value = result
|
||||
.get(&key)
|
||||
.ok_or_else(|| ShellError::string(&format!("Missing key {} in config", key)))?;
|
||||
let value = result.get(&key).ok_or_else(|| {
|
||||
ShellError::labeled_error(&format!("Missing key in config"), "key", v.tag())
|
||||
})?;
|
||||
|
||||
let mut results = VecDeque::new();
|
||||
|
||||
@ -120,10 +120,11 @@ pub fn config(
|
||||
result.swap_remove(&key);
|
||||
config::write(&result, &configuration)?;
|
||||
} else {
|
||||
return Err(ShellError::string(&format!(
|
||||
return Err(ShellError::labeled_error(
|
||||
"{} does not exist in config",
|
||||
key
|
||||
)));
|
||||
"key",
|
||||
v.tag(),
|
||||
));
|
||||
}
|
||||
|
||||
let obj = VecDeque::from_iter(vec![Value::Row(result.into()).tagged(v.tag())]);
|
||||
|
@ -54,11 +54,10 @@ fn run(
|
||||
output.push_str(&s);
|
||||
}
|
||||
_ => {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Expect a string from pipeline",
|
||||
"not a string-compatible value",
|
||||
i.tag(),
|
||||
));
|
||||
return Err(ShellError::type_error(
|
||||
"a string-compatible value",
|
||||
i.tagged_type_name(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -15,7 +15,7 @@ impl PerItemCommand for Enter {
|
||||
}
|
||||
|
||||
fn signature(&self) -> registry::Signature {
|
||||
Signature::build("enter").required("location", SyntaxShape::Block)
|
||||
Signature::build("enter").required("location", SyntaxShape::Path)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -33,14 +33,14 @@ impl PerItemCommand for Enter {
|
||||
let raw_args = raw_args.clone();
|
||||
match call_info.args.expect_nth(0)? {
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::String(location)),
|
||||
item: Value::Primitive(Primitive::Path(location)),
|
||||
..
|
||||
} => {
|
||||
let location = location.to_string();
|
||||
let location_clone = location.to_string();
|
||||
let location_string = location.display().to_string();
|
||||
let location_clone = location_string.clone();
|
||||
|
||||
if location.starts_with("help") {
|
||||
let spec = location.split(":").collect::<Vec<&str>>();
|
||||
let spec = location_string.split(":").collect::<Vec<&str>>();
|
||||
|
||||
let (_, command) = (spec[0], spec[1]);
|
||||
|
||||
|
@ -44,16 +44,18 @@ fn run(
|
||||
registry: &CommandRegistry,
|
||||
raw_args: &RawCommandArgs,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let path = match call_info
|
||||
.args
|
||||
.nth(0)
|
||||
.ok_or_else(|| ShellError::string(&format!("No file or directory specified")))?
|
||||
{
|
||||
let path = match call_info.args.nth(0).ok_or_else(|| {
|
||||
ShellError::labeled_error(
|
||||
"No file or directory specified",
|
||||
"for command",
|
||||
call_info.name_tag,
|
||||
)
|
||||
})? {
|
||||
file => file,
|
||||
};
|
||||
let path_buf = path.as_path()?;
|
||||
let path_str = path_buf.display().to_string();
|
||||
let path_span = path.span();
|
||||
let path_span = path.tag.span;
|
||||
let has_raw = call_info.args.has("raw");
|
||||
let registry = registry.clone();
|
||||
let raw_args = raw_args.clone();
|
||||
|
@ -16,7 +16,7 @@ impl WholeStreamCommand for First {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("first").required("amount", SyntaxShape::Literal)
|
||||
Signature::build("first").required("amount", SyntaxShape::Int)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
|
@ -1,14 +1,16 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::data::meta::tag_for_tagged_list;
|
||||
use crate::data::Value;
|
||||
use crate::errors::ShellError;
|
||||
use crate::prelude::*;
|
||||
use log::trace;
|
||||
|
||||
pub struct Get;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct GetArgs {
|
||||
member: Tagged<String>,
|
||||
rest: Vec<Tagged<String>>,
|
||||
member: ColumnPath,
|
||||
rest: Vec<ColumnPath>,
|
||||
}
|
||||
|
||||
impl WholeStreamCommand for Get {
|
||||
@ -18,8 +20,8 @@ impl WholeStreamCommand for Get {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("get")
|
||||
.required("member", SyntaxShape::Member)
|
||||
.rest(SyntaxShape::Member)
|
||||
.required("member", SyntaxShape::ColumnPath)
|
||||
.rest(SyntaxShape::ColumnPath)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -35,39 +37,34 @@ impl WholeStreamCommand for Get {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_member(path: &Tagged<String>, obj: &Tagged<Value>) -> Result<Tagged<Value>, ShellError> {
|
||||
pub type ColumnPath = Vec<Tagged<String>>;
|
||||
|
||||
pub fn get_column_path(
|
||||
path: &ColumnPath,
|
||||
obj: &Tagged<Value>,
|
||||
) -> Result<Tagged<Value>, ShellError> {
|
||||
let mut current = Some(obj);
|
||||
for p in path.split(".") {
|
||||
for p in path.iter() {
|
||||
if let Some(obj) = current {
|
||||
current = match obj.get_data_by_key(p) {
|
||||
current = match obj.get_data_by_key(&p) {
|
||||
Some(v) => Some(v),
|
||||
None =>
|
||||
// Before we give up, see if they gave us a path that matches a field name by itself
|
||||
{
|
||||
match obj.get_data_by_key(&path.item) {
|
||||
Some(v) => return Ok(v.clone()),
|
||||
None => {
|
||||
let possibilities = obj.data_descriptors();
|
||||
let possibilities = obj.data_descriptors();
|
||||
|
||||
let mut possible_matches: Vec<_> = possibilities
|
||||
.iter()
|
||||
.map(|x| {
|
||||
(natural::distance::levenshtein_distance(x, &path.item), x)
|
||||
})
|
||||
.collect();
|
||||
let mut possible_matches: Vec<_> = possibilities
|
||||
.iter()
|
||||
.map(|x| (natural::distance::levenshtein_distance(x, &p), x))
|
||||
.collect();
|
||||
|
||||
possible_matches.sort();
|
||||
possible_matches.sort();
|
||||
|
||||
if possible_matches.len() > 0 {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Unknown column",
|
||||
format!("did you mean '{}'?", possible_matches[0].1),
|
||||
path.tag(),
|
||||
));
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
return Err(ShellError::labeled_error(
|
||||
"Unknown column",
|
||||
format!("did you mean '{}'?", possible_matches[0].1),
|
||||
tag_for_tagged_list(path.iter().map(|p| p.tag())),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -97,6 +94,8 @@ pub fn get(
|
||||
}: GetArgs,
|
||||
RunnableContext { input, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
trace!("get {:?} {:?}", member, fields);
|
||||
|
||||
let stream = input
|
||||
.values
|
||||
.map(move |item| {
|
||||
@ -107,10 +106,10 @@ pub fn get(
|
||||
let fields = vec![&member, &fields]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<&Tagged<String>>>();
|
||||
.collect::<Vec<&ColumnPath>>();
|
||||
|
||||
for field in &fields {
|
||||
match get_member(field, &item) {
|
||||
for column_path in &fields {
|
||||
match get_column_path(column_path, &item) {
|
||||
Ok(Tagged {
|
||||
item: Value::Table(l),
|
||||
..
|
||||
|
@ -45,16 +45,18 @@ fn run(
|
||||
let cwd = PathBuf::from(shell_manager.path());
|
||||
let full_path = PathBuf::from(cwd);
|
||||
|
||||
let path = match call_info
|
||||
.args
|
||||
.nth(0)
|
||||
.ok_or_else(|| ShellError::string(&format!("No file or directory specified")))?
|
||||
{
|
||||
let path = match call_info.args.nth(0).ok_or_else(|| {
|
||||
ShellError::labeled_error(
|
||||
"No file or directory specified",
|
||||
"for command",
|
||||
call_info.name_tag,
|
||||
)
|
||||
})? {
|
||||
file => file,
|
||||
};
|
||||
let path_buf = path.as_path()?;
|
||||
let path_str = path_buf.display().to_string();
|
||||
let path_span = path.span();
|
||||
let path_span = path.tag.span;
|
||||
let has_raw = call_info.args.has("raw");
|
||||
let registry = registry.clone();
|
||||
let raw_args = raw_args.clone();
|
||||
|
@ -128,7 +128,7 @@ pub fn filter_plugin(
|
||||
},
|
||||
Err(e) => {
|
||||
let mut result = VecDeque::new();
|
||||
result.push_back(Err(ShellError::string(format!(
|
||||
result.push_back(Err(ShellError::untagged_runtime_error(format!(
|
||||
"Error while processing begin_filter response: {:?} {}",
|
||||
e, input
|
||||
))));
|
||||
@ -138,7 +138,7 @@ pub fn filter_plugin(
|
||||
}
|
||||
Err(e) => {
|
||||
let mut result = VecDeque::new();
|
||||
result.push_back(Err(ShellError::string(format!(
|
||||
result.push_back(Err(ShellError::untagged_runtime_error(format!(
|
||||
"Error while reading begin_filter response: {:?}",
|
||||
e
|
||||
))));
|
||||
@ -189,7 +189,7 @@ pub fn filter_plugin(
|
||||
},
|
||||
Err(e) => {
|
||||
let mut result = VecDeque::new();
|
||||
result.push_back(Err(ShellError::string(format!(
|
||||
result.push_back(Err(ShellError::untagged_runtime_error(format!(
|
||||
"Error while processing end_filter response: {:?} {}",
|
||||
e, input
|
||||
))));
|
||||
@ -199,7 +199,7 @@ pub fn filter_plugin(
|
||||
}
|
||||
Err(e) => {
|
||||
let mut result = VecDeque::new();
|
||||
result.push_back(Err(ShellError::string(format!(
|
||||
result.push_back(Err(ShellError::untagged_runtime_error(format!(
|
||||
"Error while reading end_filter: {:?}",
|
||||
e
|
||||
))));
|
||||
@ -236,7 +236,7 @@ pub fn filter_plugin(
|
||||
},
|
||||
Err(e) => {
|
||||
let mut result = VecDeque::new();
|
||||
result.push_back(Err(ShellError::string(format!(
|
||||
result.push_back(Err(ShellError::untagged_runtime_error(format!(
|
||||
"Error while processing filter response: {:?} {}",
|
||||
e, input
|
||||
))));
|
||||
@ -246,7 +246,7 @@ pub fn filter_plugin(
|
||||
}
|
||||
Err(e) => {
|
||||
let mut result = VecDeque::new();
|
||||
result.push_back(Err(ShellError::string(format!(
|
||||
result.push_back(Err(ShellError::untagged_runtime_error(format!(
|
||||
"Error while reading filter response: {:?}",
|
||||
e
|
||||
))));
|
||||
|
@ -55,18 +55,14 @@ fn run(
|
||||
raw_args: &RawCommandArgs,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let call_info = call_info.clone();
|
||||
let path = match call_info
|
||||
.args
|
||||
.nth(0)
|
||||
.ok_or_else(|| ShellError::string(&format!("No url specified")))?
|
||||
{
|
||||
let path = match call_info.args.nth(0).ok_or_else(|| {
|
||||
ShellError::labeled_error("No url specified", "for command", call_info.name_tag)
|
||||
})? {
|
||||
file => file.clone(),
|
||||
};
|
||||
let body = match call_info
|
||||
.args
|
||||
.nth(1)
|
||||
.ok_or_else(|| ShellError::string(&format!("No body specified")))?
|
||||
{
|
||||
let body = match call_info.args.nth(1).ok_or_else(|| {
|
||||
ShellError::labeled_error("No body specified", "for command", call_info.name_tag)
|
||||
})? {
|
||||
file => file.clone(),
|
||||
};
|
||||
let path_str = path.as_string()?;
|
||||
|
@ -143,7 +143,7 @@ fn save(
|
||||
}
|
||||
_ => {
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Save requires a filepath",
|
||||
"Save requires a filepath (1)",
|
||||
"needs path",
|
||||
name_tag,
|
||||
));
|
||||
@ -151,7 +151,7 @@ fn save(
|
||||
},
|
||||
None => {
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Save requires a filepath",
|
||||
"Save requires a filepath (2)",
|
||||
"needs path",
|
||||
name_tag,
|
||||
));
|
||||
@ -159,7 +159,7 @@ fn save(
|
||||
}
|
||||
} else {
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Save requires a filepath",
|
||||
"Save requires a filepath (3)",
|
||||
"needs path",
|
||||
name_tag,
|
||||
));
|
||||
@ -212,9 +212,9 @@ fn save(
|
||||
match content {
|
||||
Ok(save_data) => match std::fs::write(full_path, save_data) {
|
||||
Ok(o) => o,
|
||||
Err(e) => yield Err(ShellError::string(e.to_string())),
|
||||
Err(e) => yield Err(ShellError::labeled_error(e.to_string(), "for command", name)),
|
||||
},
|
||||
Err(e) => yield Err(ShellError::string(e.to_string())),
|
||||
Err(e) => yield Err(ShellError::labeled_error(e.to_string(), "for command", name)),
|
||||
}
|
||||
|
||||
};
|
||||
|
@ -1,6 +1,7 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::errors::ShellError;
|
||||
use crate::prelude::*;
|
||||
use log::trace;
|
||||
|
||||
pub struct SkipWhile;
|
||||
|
||||
@ -38,7 +39,9 @@ pub fn skip_while(
|
||||
RunnableContext { input, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let objects = input.values.skip_while(move |item| {
|
||||
trace!("ITEM = {:?}", item);
|
||||
let result = condition.invoke(&item);
|
||||
trace!("RESULT = {:?}", result);
|
||||
|
||||
let return_value = match result {
|
||||
Ok(ref v) if v.is_true() => true,
|
||||
|
@ -38,8 +38,8 @@ fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream,
|
||||
let anchor = v.anchor();
|
||||
let span = v.tag().span;
|
||||
let mut dict = TaggedDictBuilder::new(v.tag());
|
||||
dict.insert("start", Value::int(span.start as i64));
|
||||
dict.insert("end", Value::int(span.end as i64));
|
||||
dict.insert("start", Value::int(span.start() as i64));
|
||||
dict.insert("end", Value::int(span.end() as i64));
|
||||
tags.insert_tagged("span", dict.into_tagged_value());
|
||||
|
||||
match source_map.get(&anchor) {
|
||||
|
@ -32,8 +32,8 @@ impl WholeStreamCommand for ToCSV {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn value_to_csv_value(v: &Value) -> Value {
|
||||
match v {
|
||||
pub fn value_to_csv_value(v: &Tagged<Value>) -> Tagged<Value> {
|
||||
match &v.item {
|
||||
Value::Primitive(Primitive::String(s)) => Value::Primitive(Primitive::String(s.clone())),
|
||||
Value::Primitive(Primitive::Nothing) => Value::Primitive(Primitive::Nothing),
|
||||
Value::Primitive(Primitive::Boolean(b)) => Value::Primitive(Primitive::Boolean(b.clone())),
|
||||
@ -47,10 +47,11 @@ pub fn value_to_csv_value(v: &Value) -> Value {
|
||||
Value::Block(_) => Value::Primitive(Primitive::Nothing),
|
||||
_ => Value::Primitive(Primitive::Nothing),
|
||||
}
|
||||
.tagged(v.tag)
|
||||
}
|
||||
|
||||
fn to_string_helper(v: &Value) -> Result<String, ShellError> {
|
||||
match v {
|
||||
fn to_string_helper(v: &Tagged<Value>) -> Result<String, ShellError> {
|
||||
match &v.item {
|
||||
Value::Primitive(Primitive::Date(d)) => Ok(d.to_string()),
|
||||
Value::Primitive(Primitive::Bytes(b)) => Ok(format!("{}", b)),
|
||||
Value::Primitive(Primitive::Boolean(_)) => Ok(v.as_string()?),
|
||||
@ -60,7 +61,7 @@ fn to_string_helper(v: &Value) -> Result<String, ShellError> {
|
||||
Value::Table(_) => return Ok(String::from("[Table]")),
|
||||
Value::Row(_) => return Ok(String::from("[Row]")),
|
||||
Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()),
|
||||
_ => return Err(ShellError::string("Unexpected value")),
|
||||
_ => return Err(ShellError::labeled_error("Unexpected value", "", v.tag)),
|
||||
}
|
||||
}
|
||||
|
||||
@ -76,7 +77,9 @@ fn merge_descriptors(values: &[Tagged<Value>]) -> Vec<String> {
|
||||
ret
|
||||
}
|
||||
|
||||
pub fn to_string(v: &Value) -> Result<String, ShellError> {
|
||||
pub fn to_string(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
|
||||
let v = &tagged_value.item;
|
||||
|
||||
match v {
|
||||
Value::Row(o) => {
|
||||
let mut wtr = WriterBuilder::new().from_writer(vec![]);
|
||||
@ -92,11 +95,20 @@ pub fn to_string(v: &Value) -> Result<String, ShellError> {
|
||||
wtr.write_record(fields).expect("can not write.");
|
||||
wtr.write_record(values).expect("can not write.");
|
||||
|
||||
return Ok(String::from_utf8(
|
||||
wtr.into_inner()
|
||||
.map_err(|_| ShellError::string("Could not convert record"))?,
|
||||
)
|
||||
.map_err(|_| ShellError::string("Could not convert record"))?);
|
||||
return Ok(String::from_utf8(wtr.into_inner().map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
tagged_value.tag,
|
||||
)
|
||||
})?)
|
||||
.map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
tagged_value.tag,
|
||||
)
|
||||
})?);
|
||||
}
|
||||
Value::Table(list) => {
|
||||
let mut wtr = WriterBuilder::new().from_writer(vec![]);
|
||||
@ -120,13 +132,22 @@ pub fn to_string(v: &Value) -> Result<String, ShellError> {
|
||||
wtr.write_record(&row).expect("can not write");
|
||||
}
|
||||
|
||||
return Ok(String::from_utf8(
|
||||
wtr.into_inner()
|
||||
.map_err(|_| ShellError::string("Could not convert record"))?,
|
||||
)
|
||||
.map_err(|_| ShellError::string("Could not convert record"))?);
|
||||
return Ok(String::from_utf8(wtr.into_inner().map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
tagged_value.tag,
|
||||
)
|
||||
})?)
|
||||
.map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
tagged_value.tag,
|
||||
)
|
||||
})?);
|
||||
}
|
||||
_ => return to_string_helper(&v),
|
||||
_ => return to_string_helper(tagged_value),
|
||||
}
|
||||
}
|
||||
|
||||
@ -148,7 +169,7 @@ fn to_csv(
|
||||
};
|
||||
|
||||
for value in to_process_input {
|
||||
match to_string(&value_to_csv_value(&value.item)) {
|
||||
match to_string(&value_to_csv_value(&value)) {
|
||||
Ok(x) => {
|
||||
let converted = if headerless {
|
||||
x.lines().skip(1).collect()
|
||||
|
@ -32,7 +32,9 @@ impl WholeStreamCommand for ToTSV {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn value_to_tsv_value(v: &Value) -> Value {
|
||||
pub fn value_to_tsv_value(tagged_value: &Tagged<Value>) -> Tagged<Value> {
|
||||
let v = &tagged_value.item;
|
||||
|
||||
match v {
|
||||
Value::Primitive(Primitive::String(s)) => Value::Primitive(Primitive::String(s.clone())),
|
||||
Value::Primitive(Primitive::Nothing) => Value::Primitive(Primitive::Nothing),
|
||||
@ -47,20 +49,28 @@ pub fn value_to_tsv_value(v: &Value) -> Value {
|
||||
Value::Block(_) => Value::Primitive(Primitive::Nothing),
|
||||
_ => Value::Primitive(Primitive::Nothing),
|
||||
}
|
||||
.tagged(tagged_value.tag)
|
||||
}
|
||||
|
||||
fn to_string_helper(v: &Value) -> Result<String, ShellError> {
|
||||
fn to_string_helper(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
|
||||
let v = &tagged_value.item;
|
||||
match v {
|
||||
Value::Primitive(Primitive::Date(d)) => Ok(d.to_string()),
|
||||
Value::Primitive(Primitive::Bytes(b)) => Ok(format!("{}", b)),
|
||||
Value::Primitive(Primitive::Boolean(_)) => Ok(v.as_string()?),
|
||||
Value::Primitive(Primitive::Decimal(_)) => Ok(v.as_string()?),
|
||||
Value::Primitive(Primitive::Int(_)) => Ok(v.as_string()?),
|
||||
Value::Primitive(Primitive::Path(_)) => Ok(v.as_string()?),
|
||||
Value::Primitive(Primitive::Boolean(_)) => Ok(tagged_value.as_string()?),
|
||||
Value::Primitive(Primitive::Decimal(_)) => Ok(tagged_value.as_string()?),
|
||||
Value::Primitive(Primitive::Int(_)) => Ok(tagged_value.as_string()?),
|
||||
Value::Primitive(Primitive::Path(_)) => Ok(tagged_value.as_string()?),
|
||||
Value::Table(_) => return Ok(String::from("[table]")),
|
||||
Value::Row(_) => return Ok(String::from("[row]")),
|
||||
Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()),
|
||||
_ => return Err(ShellError::string("Unexpected value")),
|
||||
_ => {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Unexpected value",
|
||||
"original value",
|
||||
tagged_value.tag,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -76,7 +86,9 @@ fn merge_descriptors(values: &[Tagged<Value>]) -> Vec<String> {
|
||||
ret
|
||||
}
|
||||
|
||||
pub fn to_string(v: &Value) -> Result<String, ShellError> {
|
||||
pub fn to_string(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
|
||||
let v = &tagged_value.item;
|
||||
|
||||
match v {
|
||||
Value::Row(o) => {
|
||||
let mut wtr = WriterBuilder::new().delimiter(b'\t').from_writer(vec![]);
|
||||
@ -91,11 +103,20 @@ pub fn to_string(v: &Value) -> Result<String, ShellError> {
|
||||
wtr.write_record(fields).expect("can not write.");
|
||||
wtr.write_record(values).expect("can not write.");
|
||||
|
||||
return Ok(String::from_utf8(
|
||||
wtr.into_inner()
|
||||
.map_err(|_| ShellError::string("Could not convert record"))?,
|
||||
)
|
||||
.map_err(|_| ShellError::string("Could not convert record"))?);
|
||||
return Ok(String::from_utf8(wtr.into_inner().map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
tagged_value.tag,
|
||||
)
|
||||
})?)
|
||||
.map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
tagged_value.tag,
|
||||
)
|
||||
})?);
|
||||
}
|
||||
Value::Table(list) => {
|
||||
let mut wtr = WriterBuilder::new().delimiter(b'\t').from_writer(vec![]);
|
||||
@ -119,13 +140,22 @@ pub fn to_string(v: &Value) -> Result<String, ShellError> {
|
||||
wtr.write_record(&row).expect("can not write");
|
||||
}
|
||||
|
||||
return Ok(String::from_utf8(
|
||||
wtr.into_inner()
|
||||
.map_err(|_| ShellError::string("Could not convert record"))?,
|
||||
)
|
||||
.map_err(|_| ShellError::string("Could not convert record"))?);
|
||||
return Ok(String::from_utf8(wtr.into_inner().map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
tagged_value.tag,
|
||||
)
|
||||
})?)
|
||||
.map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
tagged_value.tag,
|
||||
)
|
||||
})?);
|
||||
}
|
||||
_ => return to_string_helper(&v),
|
||||
_ => return to_string_helper(tagged_value),
|
||||
}
|
||||
}
|
||||
|
||||
@ -147,7 +177,7 @@ fn to_tsv(
|
||||
};
|
||||
|
||||
for value in to_process_input {
|
||||
match to_string(&value_to_tsv_value(&value.item)) {
|
||||
match to_string(&value_to_tsv_value(&value)) {
|
||||
Ok(x) => {
|
||||
let converted = if headerless {
|
||||
x.lines().skip(1).collect()
|
||||
|
@ -1,5 +1,5 @@
|
||||
use crate::commands::{Command, UnevaluatedCallInfo};
|
||||
use crate::parser::hir;
|
||||
use crate::parser::{hir, hir::syntax_shape::ExpandContext};
|
||||
use crate::prelude::*;
|
||||
|
||||
use derive_new::new;
|
||||
@ -7,7 +7,7 @@ use indexmap::IndexMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::error::Error;
|
||||
use std::sync::Arc;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
@ -53,13 +53,17 @@ impl CommandRegistry {
|
||||
registry.get(name).map(|c| c.clone())
|
||||
}
|
||||
|
||||
pub(crate) fn expect_command(&self, name: &str) -> Arc<Command> {
|
||||
self.get_command(name).unwrap()
|
||||
}
|
||||
|
||||
pub(crate) fn has(&self, name: &str) -> bool {
|
||||
let registry = self.registry.lock().unwrap();
|
||||
|
||||
registry.contains_key(name)
|
||||
}
|
||||
|
||||
fn insert(&mut self, name: impl Into<String>, command: Arc<Command>) {
|
||||
pub(crate) fn insert(&mut self, name: impl Into<String>, command: Arc<Command>) {
|
||||
let mut registry = self.registry.lock().unwrap();
|
||||
registry.insert(name.into(), command);
|
||||
}
|
||||
@ -73,7 +77,7 @@ impl CommandRegistry {
|
||||
#[derive(Clone)]
|
||||
pub struct Context {
|
||||
registry: CommandRegistry,
|
||||
pub(crate) source_map: SourceMap,
|
||||
pub(crate) source_map: Arc<Mutex<SourceMap>>,
|
||||
host: Arc<Mutex<dyn Host + Send>>,
|
||||
pub(crate) shell_manager: ShellManager,
|
||||
}
|
||||
@ -83,11 +87,19 @@ impl Context {
|
||||
&self.registry
|
||||
}
|
||||
|
||||
pub(crate) fn expand_context<'context>(
|
||||
&'context self,
|
||||
source: &'context Text,
|
||||
tag: Tag,
|
||||
) -> ExpandContext<'context> {
|
||||
ExpandContext::new(&self.registry, tag, source, self.shell_manager.homedir())
|
||||
}
|
||||
|
||||
pub(crate) fn basic() -> Result<Context, Box<dyn Error>> {
|
||||
let registry = CommandRegistry::new();
|
||||
Ok(Context {
|
||||
registry: registry.clone(),
|
||||
source_map: SourceMap::new(),
|
||||
source_map: Arc::new(Mutex::new(SourceMap::new())),
|
||||
host: Arc::new(Mutex::new(crate::env::host::BasicHost)),
|
||||
shell_manager: ShellManager::basic(registry)?,
|
||||
})
|
||||
@ -106,15 +118,17 @@ impl Context {
|
||||
}
|
||||
|
||||
pub fn add_anchor_location(&mut self, uuid: Uuid, anchor_location: AnchorLocation) {
|
||||
self.source_map.insert(uuid, anchor_location);
|
||||
let mut source_map = self.source_map.lock().unwrap();
|
||||
|
||||
source_map.insert(uuid, anchor_location);
|
||||
}
|
||||
|
||||
pub(crate) fn has_command(&self, name: &str) -> bool {
|
||||
self.registry.has(name)
|
||||
pub(crate) fn get_command(&self, name: &str) -> Option<Arc<Command>> {
|
||||
self.registry.get_command(name)
|
||||
}
|
||||
|
||||
pub(crate) fn get_command(&self, name: &str) -> Arc<Command> {
|
||||
self.registry.get_command(name).unwrap()
|
||||
pub(crate) fn expect_command(&self, name: &str) -> Arc<Command> {
|
||||
self.registry.expect_command(name)
|
||||
}
|
||||
|
||||
pub(crate) fn run_command<'a>(
|
||||
|
181
src/data/base.rs
181
src/data/base.rs
@ -8,6 +8,7 @@ use crate::Text;
|
||||
use chrono::{DateTime, Utc};
|
||||
use chrono_humanize::Humanize;
|
||||
use derive_new::new;
|
||||
use log::trace;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt;
|
||||
use std::path::PathBuf;
|
||||
@ -217,6 +218,14 @@ impl Block {
|
||||
|
||||
let mut last = None;
|
||||
|
||||
trace!(
|
||||
"EXPRS = {:?}",
|
||||
self.expressions
|
||||
.iter()
|
||||
.map(|e| format!("{}", e))
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
|
||||
for expr in self.expressions.iter() {
|
||||
last = Some(evaluate_baseline_expr(
|
||||
&expr,
|
||||
@ -289,7 +298,7 @@ impl fmt::Debug for ValueDebug<'_> {
|
||||
}
|
||||
|
||||
impl Tagged<Value> {
|
||||
pub(crate) fn tagged_type_name(&self) -> Tagged<String> {
|
||||
pub fn tagged_type_name(&self) -> Tagged<String> {
|
||||
let name = self.type_name();
|
||||
Tagged::from_item(name, self.tag())
|
||||
}
|
||||
@ -394,13 +403,51 @@ impl Tagged<Value> {
|
||||
pub(crate) fn debug(&self) -> ValueDebug<'_> {
|
||||
ValueDebug { value: self }
|
||||
}
|
||||
|
||||
pub fn as_column_path(&self) -> Result<Tagged<Vec<Tagged<String>>>, ShellError> {
|
||||
let mut out: Vec<Tagged<String>> = vec![];
|
||||
|
||||
match &self.item {
|
||||
Value::Table(table) => {
|
||||
for item in table {
|
||||
out.push(item.as_string()?.tagged(item.tag));
|
||||
}
|
||||
}
|
||||
|
||||
other => {
|
||||
return Err(ShellError::type_error(
|
||||
"column name",
|
||||
other.type_name().tagged(self.tag),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
Ok(out.tagged(self.tag))
|
||||
}
|
||||
|
||||
pub(crate) fn as_string(&self) -> Result<String, ShellError> {
|
||||
match &self.item {
|
||||
Value::Primitive(Primitive::String(s)) => Ok(s.clone()),
|
||||
Value::Primitive(Primitive::Boolean(x)) => Ok(format!("{}", x)),
|
||||
Value::Primitive(Primitive::Decimal(x)) => Ok(format!("{}", x)),
|
||||
Value::Primitive(Primitive::Int(x)) => Ok(format!("{}", x)),
|
||||
Value::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)),
|
||||
Value::Primitive(Primitive::Path(x)) => Ok(format!("{}", x.display())),
|
||||
// TODO: this should definitely be more general with better errors
|
||||
other => Err(ShellError::labeled_error(
|
||||
"Expected string",
|
||||
other.type_name(),
|
||||
self.tag,
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Value {
|
||||
pub(crate) fn type_name(&self) -> String {
|
||||
pub fn type_name(&self) -> String {
|
||||
match self {
|
||||
Value::Primitive(p) => p.type_name(),
|
||||
Value::Row(_) => format!("object"),
|
||||
Value::Row(_) => format!("row"),
|
||||
Value::Table(_) => format!("list"),
|
||||
Value::Block(_) => format!("block"),
|
||||
}
|
||||
@ -443,6 +490,22 @@ impl Value {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_data_by_column_path(
|
||||
&self,
|
||||
tag: Tag,
|
||||
path: &Vec<Tagged<String>>,
|
||||
) -> Option<Tagged<&Value>> {
|
||||
let mut current = self;
|
||||
for p in path {
|
||||
match current.get_data_by_key(p) {
|
||||
Some(v) => current = v,
|
||||
None => return None,
|
||||
}
|
||||
}
|
||||
|
||||
Some(Tagged::from_item(current, tag))
|
||||
}
|
||||
|
||||
pub fn get_data_by_path(&self, tag: Tag, path: &str) -> Option<Tagged<&Value>> {
|
||||
let mut current = self;
|
||||
for p in path.split(".") {
|
||||
@ -508,6 +571,58 @@ impl Value {
|
||||
None
|
||||
}
|
||||
|
||||
pub fn insert_data_at_column_path(
|
||||
&self,
|
||||
tag: Tag,
|
||||
split_path: &Vec<Tagged<String>>,
|
||||
new_value: Value,
|
||||
) -> Option<Tagged<Value>> {
|
||||
let mut new_obj = self.clone();
|
||||
|
||||
if let Value::Row(ref mut o) = new_obj {
|
||||
let mut current = o;
|
||||
|
||||
if split_path.len() == 1 {
|
||||
// Special case for inserting at the top level
|
||||
current.entries.insert(
|
||||
split_path[0].item.clone(),
|
||||
Tagged::from_item(new_value, tag),
|
||||
);
|
||||
return Some(Tagged::from_item(new_obj, tag));
|
||||
}
|
||||
|
||||
for idx in 0..split_path.len() {
|
||||
match current.entries.get_mut(&split_path[idx].item) {
|
||||
Some(next) => {
|
||||
if idx == (split_path.len() - 2) {
|
||||
match &mut next.item {
|
||||
Value::Row(o) => {
|
||||
o.entries.insert(
|
||||
split_path[idx + 1].to_string(),
|
||||
Tagged::from_item(new_value, tag),
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
return Some(Tagged::from_item(new_obj, tag));
|
||||
} else {
|
||||
match next.item {
|
||||
Value::Row(ref mut o) => {
|
||||
current = o;
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn replace_data_at_path(
|
||||
&self,
|
||||
tag: Tag,
|
||||
@ -543,6 +658,39 @@ impl Value {
|
||||
None
|
||||
}
|
||||
|
||||
pub fn replace_data_at_column_path(
|
||||
&self,
|
||||
tag: Tag,
|
||||
split_path: &Vec<Tagged<String>>,
|
||||
replaced_value: Value,
|
||||
) -> Option<Tagged<Value>> {
|
||||
let mut new_obj = self.clone();
|
||||
|
||||
if let Value::Row(ref mut o) = new_obj {
|
||||
let mut current = o;
|
||||
for idx in 0..split_path.len() {
|
||||
match current.entries.get_mut(&split_path[idx].item) {
|
||||
Some(next) => {
|
||||
if idx == (split_path.len() - 1) {
|
||||
*next = Tagged::from_item(replaced_value, tag);
|
||||
return Some(Tagged::from_item(new_obj, tag));
|
||||
} else {
|
||||
match next.item {
|
||||
Value::Row(ref mut o) => {
|
||||
current = o;
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn get_data(&self, desc: &String) -> MaybeOwned<'_, Value> {
|
||||
match self {
|
||||
p @ Value::Primitive(_) => MaybeOwned::Borrowed(p),
|
||||
@ -607,22 +755,6 @@ impl Value {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn as_string(&self) -> Result<String, ShellError> {
|
||||
match self {
|
||||
Value::Primitive(Primitive::String(s)) => Ok(s.clone()),
|
||||
Value::Primitive(Primitive::Boolean(x)) => Ok(format!("{}", x)),
|
||||
Value::Primitive(Primitive::Decimal(x)) => Ok(format!("{}", x)),
|
||||
Value::Primitive(Primitive::Int(x)) => Ok(format!("{}", x)),
|
||||
Value::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)),
|
||||
Value::Primitive(Primitive::Path(x)) => Ok(format!("{}", x.display())),
|
||||
// TODO: this should definitely be more general with better errors
|
||||
other => Err(ShellError::string(format!(
|
||||
"Expected string, got {:?}",
|
||||
other
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn is_true(&self) -> bool {
|
||||
match self {
|
||||
Value::Primitive(Primitive::Boolean(true)) => true,
|
||||
@ -675,9 +807,14 @@ impl Value {
|
||||
Value::Primitive(Primitive::Date(s.into()))
|
||||
}
|
||||
|
||||
pub fn date_from_str(s: &str) -> Result<Value, ShellError> {
|
||||
let date = DateTime::parse_from_rfc3339(s)
|
||||
.map_err(|err| ShellError::string(&format!("Date parse error: {}", err)))?;
|
||||
pub fn date_from_str(s: Tagged<&str>) -> Result<Value, ShellError> {
|
||||
let date = DateTime::parse_from_rfc3339(s.item).map_err(|err| {
|
||||
ShellError::labeled_error(
|
||||
&format!("Date parse error: {}", err),
|
||||
"original value",
|
||||
s.tag,
|
||||
)
|
||||
})?;
|
||||
|
||||
let date = date.with_timezone(&chrono::offset::Utc);
|
||||
|
||||
|
@ -51,8 +51,9 @@ pub fn user_data() -> Result<PathBuf, ShellError> {
|
||||
}
|
||||
|
||||
pub fn app_path(app_data_type: AppDataType, display: &str) -> Result<PathBuf, ShellError> {
|
||||
let path = app_root(app_data_type, &APP_INFO)
|
||||
.map_err(|err| ShellError::string(&format!("Couldn't open {} path:\n{}", display, err)))?;
|
||||
let path = app_root(app_data_type, &APP_INFO).map_err(|err| {
|
||||
ShellError::untagged_runtime_error(&format!("Couldn't open {} path:\n{}", display, err))
|
||||
})?;
|
||||
|
||||
Ok(path)
|
||||
}
|
||||
@ -75,10 +76,21 @@ pub fn read(
|
||||
let tag = tag.into();
|
||||
let contents = fs::read_to_string(filename)
|
||||
.map(|v| v.tagged(tag))
|
||||
.map_err(|err| ShellError::string(&format!("Couldn't read config file:\n{}", err)))?;
|
||||
.map_err(|err| {
|
||||
ShellError::labeled_error(
|
||||
&format!("Couldn't read config file:\n{}", err),
|
||||
"file name",
|
||||
tag,
|
||||
)
|
||||
})?;
|
||||
|
||||
let parsed: toml::Value = toml::from_str(&contents)
|
||||
.map_err(|err| ShellError::string(&format!("Couldn't parse config file:\n{}", err)))?;
|
||||
let parsed: toml::Value = toml::from_str(&contents).map_err(|err| {
|
||||
ShellError::labeled_error(
|
||||
&format!("Couldn't parse config file:\n{}", err),
|
||||
"file name",
|
||||
tag,
|
||||
)
|
||||
})?;
|
||||
|
||||
let value = convert_toml_value_to_nu_value(&parsed, tag);
|
||||
let tag = value.tag();
|
||||
|
152
src/data/meta.rs
152
src/data/meta.rs
@ -1,4 +1,5 @@
|
||||
use crate::context::{AnchorLocation, SourceMap};
|
||||
use crate::parser::parse::parser::TracableContext;
|
||||
use crate::prelude::*;
|
||||
use crate::Text;
|
||||
use derive_new::new;
|
||||
@ -119,10 +120,7 @@ impl From<&Tag> for Tag {
|
||||
|
||||
impl From<nom_locate::LocatedSpanEx<&str, Uuid>> for Span {
|
||||
fn from(input: nom_locate::LocatedSpanEx<&str, Uuid>) -> Span {
|
||||
Span {
|
||||
start: input.offset,
|
||||
end: input.offset + input.fragment.len(),
|
||||
}
|
||||
Span::new(input.offset, input.offset + input.fragment.len())
|
||||
}
|
||||
}
|
||||
|
||||
@ -147,10 +145,7 @@ impl<T>
|
||||
|
||||
impl From<(usize, usize)> for Span {
|
||||
fn from(input: (usize, usize)) -> Span {
|
||||
Span {
|
||||
start: input.0,
|
||||
end: input.1,
|
||||
}
|
||||
Span::new(input.0, input.1)
|
||||
}
|
||||
}
|
||||
|
||||
@ -164,7 +159,7 @@ impl From<&std::ops::Range<usize>> for Span {
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters,
|
||||
Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, new,
|
||||
)]
|
||||
pub struct Tag {
|
||||
pub anchor: Uuid,
|
||||
@ -189,11 +184,20 @@ impl From<&Span> for Tag {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<(usize, usize, TracableContext)> for Tag {
|
||||
fn from((start, end, context): (usize, usize, TracableContext)) -> Self {
|
||||
Tag {
|
||||
anchor: context.origin,
|
||||
span: Span::new(start, end),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<(usize, usize, Uuid)> for Tag {
|
||||
fn from((start, end, anchor): (usize, usize, Uuid)) -> Self {
|
||||
Tag {
|
||||
anchor,
|
||||
span: Span { start, end },
|
||||
span: Span::new(start, end),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -201,24 +205,17 @@ impl From<(usize, usize, Uuid)> for Tag {
|
||||
impl From<(usize, usize, Option<Uuid>)> for Tag {
|
||||
fn from((start, end, anchor): (usize, usize, Option<Uuid>)) -> Self {
|
||||
Tag {
|
||||
anchor: if let Some(uuid) = anchor {
|
||||
uuid
|
||||
} else {
|
||||
uuid::Uuid::nil()
|
||||
},
|
||||
span: Span { start, end },
|
||||
anchor: anchor.unwrap_or(uuid::Uuid::nil()),
|
||||
span: Span::new(start, end),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<nom_locate::LocatedSpanEx<&str, Uuid>> for Tag {
|
||||
fn from(input: nom_locate::LocatedSpanEx<&str, Uuid>) -> Tag {
|
||||
impl From<nom_locate::LocatedSpanEx<&str, TracableContext>> for Tag {
|
||||
fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Tag {
|
||||
Tag {
|
||||
anchor: input.extra,
|
||||
span: Span {
|
||||
start: input.offset,
|
||||
end: input.offset + input.fragment.len(),
|
||||
},
|
||||
anchor: input.extra.origin,
|
||||
span: Span::new(input.offset, input.offset + input.fragment.len()),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -243,6 +240,16 @@ impl Tag {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn for_char(pos: usize, anchor: Uuid) -> Tag {
|
||||
Tag {
|
||||
anchor,
|
||||
span: Span {
|
||||
start: pos,
|
||||
end: pos + 1,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unknown_span(anchor: Uuid) -> Tag {
|
||||
Tag {
|
||||
anchor,
|
||||
@ -265,29 +272,73 @@ impl Tag {
|
||||
);
|
||||
|
||||
Tag {
|
||||
span: Span {
|
||||
start: self.span.start,
|
||||
end: other.span.end,
|
||||
},
|
||||
span: Span::new(self.span.start, other.span.end),
|
||||
anchor: self.anchor,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn until_option(&self, other: Option<impl Into<Tag>>) -> Tag {
|
||||
match other {
|
||||
Some(other) => {
|
||||
let other = other.into();
|
||||
debug_assert!(
|
||||
self.anchor == other.anchor,
|
||||
"Can only merge two tags with the same anchor"
|
||||
);
|
||||
|
||||
Tag {
|
||||
span: Span::new(self.span.start, other.span.end),
|
||||
anchor: self.anchor,
|
||||
}
|
||||
}
|
||||
None => *self,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn slice<'a>(&self, source: &'a str) -> &'a str {
|
||||
self.span.slice(source)
|
||||
}
|
||||
|
||||
pub fn string<'a>(&self, source: &'a str) -> String {
|
||||
self.span.slice(source).to_string()
|
||||
}
|
||||
|
||||
pub fn tagged_slice<'a>(&self, source: &'a str) -> Tagged<&'a str> {
|
||||
self.span.slice(source).tagged(self)
|
||||
}
|
||||
|
||||
pub fn tagged_string<'a>(&self, source: &'a str) -> Tagged<String> {
|
||||
self.span.slice(source).to_string().tagged(self)
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn tag_for_tagged_list(mut iter: impl Iterator<Item = Tag>) -> Tag {
|
||||
let first = iter.next();
|
||||
|
||||
let first = match first {
|
||||
None => return Tag::unknown(),
|
||||
Some(first) => first,
|
||||
};
|
||||
|
||||
let last = iter.last();
|
||||
|
||||
match last {
|
||||
None => first,
|
||||
Some(last) => first.until(last),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
|
||||
pub struct Span {
|
||||
pub(crate) start: usize,
|
||||
pub(crate) end: usize,
|
||||
start: usize,
|
||||
end: usize,
|
||||
}
|
||||
|
||||
impl From<Option<Span>> for Span {
|
||||
fn from(input: Option<Span>) -> Span {
|
||||
match input {
|
||||
None => Span { start: 0, end: 0 },
|
||||
None => Span::new(0, 0),
|
||||
Some(span) => span,
|
||||
}
|
||||
}
|
||||
@ -295,7 +346,18 @@ impl From<Option<Span>> for Span {
|
||||
|
||||
impl Span {
|
||||
pub fn unknown() -> Span {
|
||||
Span { start: 0, end: 0 }
|
||||
Span::new(0, 0)
|
||||
}
|
||||
|
||||
pub fn new(start: usize, end: usize) -> Span {
|
||||
assert!(
|
||||
end >= start,
|
||||
"Can't create a Span whose end < start, start={}, end={}",
|
||||
start,
|
||||
end
|
||||
);
|
||||
|
||||
Span { start, end }
|
||||
}
|
||||
|
||||
/*
|
||||
@ -308,6 +370,14 @@ impl Span {
|
||||
}
|
||||
*/
|
||||
|
||||
pub fn start(&self) -> usize {
|
||||
self.start
|
||||
}
|
||||
|
||||
pub fn end(&self) -> usize {
|
||||
self.end
|
||||
}
|
||||
|
||||
pub fn is_unknown(&self) -> bool {
|
||||
self.start == 0 && self.end == 0
|
||||
}
|
||||
@ -319,17 +389,11 @@ impl Span {
|
||||
|
||||
impl language_reporting::ReportingSpan for Span {
|
||||
fn with_start(&self, start: usize) -> Self {
|
||||
Span {
|
||||
start,
|
||||
end: self.end,
|
||||
}
|
||||
Span::new(start, self.end)
|
||||
}
|
||||
|
||||
fn with_end(&self, end: usize) -> Self {
|
||||
Span {
|
||||
start: self.start,
|
||||
end,
|
||||
}
|
||||
Span::new(self.start, end)
|
||||
}
|
||||
|
||||
fn start(&self) -> usize {
|
||||
@ -344,20 +408,14 @@ impl language_reporting::ReportingSpan for Span {
|
||||
impl language_reporting::ReportingSpan for Tag {
|
||||
fn with_start(&self, start: usize) -> Self {
|
||||
Tag {
|
||||
span: Span {
|
||||
start,
|
||||
end: self.span.end,
|
||||
},
|
||||
span: Span::new(start, self.span.end),
|
||||
anchor: self.anchor,
|
||||
}
|
||||
}
|
||||
|
||||
fn with_end(&self, end: usize) -> Self {
|
||||
Tag {
|
||||
span: Span {
|
||||
start: self.span.start,
|
||||
end,
|
||||
},
|
||||
span: Span::new(self.span.start, end),
|
||||
anchor: self.anchor,
|
||||
}
|
||||
}
|
||||
|
137
src/errors.rs
137
src/errors.rs
@ -1,5 +1,6 @@
|
||||
use crate::prelude::*;
|
||||
|
||||
use crate::parser::parse::parser::TracableContext;
|
||||
use ansi_term::Color;
|
||||
use derive_new::new;
|
||||
use language_reporting::{Diagnostic, Label, Severity};
|
||||
@ -19,6 +20,14 @@ impl Description {
|
||||
Description::Synthetic(s) => Err(s),
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
fn tag(&self) -> Tag {
|
||||
match self {
|
||||
Description::Source(tagged) => tagged.tag,
|
||||
Description::Synthetic(_) => Tag::unknown(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)]
|
||||
@ -35,6 +44,13 @@ pub struct ShellError {
|
||||
cause: Option<Box<ProximateShellError>>,
|
||||
}
|
||||
|
||||
impl ShellError {
|
||||
#[allow(unused)]
|
||||
pub(crate) fn tag(&self) -> Option<Tag> {
|
||||
self.error.tag()
|
||||
}
|
||||
}
|
||||
|
||||
impl ToDebug for ShellError {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||
self.error.fmt_debug(f, source)
|
||||
@ -46,12 +62,12 @@ impl serde::de::Error for ShellError {
|
||||
where
|
||||
T: std::fmt::Display,
|
||||
{
|
||||
ShellError::string(msg.to_string())
|
||||
ShellError::untagged_runtime_error(msg.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl ShellError {
|
||||
pub(crate) fn type_error(
|
||||
pub fn type_error(
|
||||
expected: impl Into<String>,
|
||||
actual: Tagged<impl Into<String>>,
|
||||
) -> ShellError {
|
||||
@ -62,6 +78,21 @@ impl ShellError {
|
||||
.start()
|
||||
}
|
||||
|
||||
pub fn untagged_runtime_error(error: impl Into<String>) -> ShellError {
|
||||
ProximateShellError::UntaggedRuntimeError {
|
||||
reason: error.into(),
|
||||
}
|
||||
.start()
|
||||
}
|
||||
|
||||
pub(crate) fn unexpected_eof(expected: impl Into<String>, tag: Tag) -> ShellError {
|
||||
ProximateShellError::UnexpectedEof {
|
||||
expected: expected.into(),
|
||||
tag,
|
||||
}
|
||||
.start()
|
||||
}
|
||||
|
||||
pub(crate) fn range_error(
|
||||
expected: impl Into<ExpectedRange>,
|
||||
actual: &Tagged<impl fmt::Debug>,
|
||||
@ -82,6 +113,7 @@ impl ShellError {
|
||||
.start()
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub(crate) fn invalid_command(problem: impl Into<Tag>) -> ShellError {
|
||||
ProximateShellError::InvalidCommand {
|
||||
command: problem.into(),
|
||||
@ -133,7 +165,7 @@ impl ShellError {
|
||||
|
||||
pub(crate) fn parse_error(
|
||||
error: nom::Err<(
|
||||
nom_locate::LocatedSpanEx<&str, uuid::Uuid>,
|
||||
nom_locate::LocatedSpanEx<&str, TracableContext>,
|
||||
nom::error::ErrorKind,
|
||||
)>,
|
||||
) -> ShellError {
|
||||
@ -164,9 +196,6 @@ impl ShellError {
|
||||
|
||||
pub(crate) fn to_diagnostic(self) -> Diagnostic<Tag> {
|
||||
match self.error {
|
||||
ProximateShellError::String(StringError { title, .. }) => {
|
||||
Diagnostic::new(Severity::Error, title)
|
||||
}
|
||||
ProximateShellError::InvalidCommand { command } => {
|
||||
Diagnostic::new(Severity::Error, "Invalid command")
|
||||
.with_label(Label::new_primary(command))
|
||||
@ -235,7 +264,6 @@ impl ShellError {
|
||||
Label::new_primary(tag)
|
||||
.with_message(format!("Expected {}, found {}", expected, actual)),
|
||||
),
|
||||
|
||||
ProximateShellError::TypeError {
|
||||
expected,
|
||||
actual:
|
||||
@ -246,6 +274,11 @@ impl ShellError {
|
||||
} => Diagnostic::new(Severity::Error, "Type Error")
|
||||
.with_label(Label::new_primary(tag).with_message(expected)),
|
||||
|
||||
ProximateShellError::UnexpectedEof {
|
||||
expected, tag
|
||||
} => Diagnostic::new(Severity::Error, format!("Unexpected end of input"))
|
||||
.with_label(Label::new_primary(tag).with_message(format!("Expected {}", expected))),
|
||||
|
||||
ProximateShellError::RangeError {
|
||||
kind,
|
||||
operation,
|
||||
@ -267,12 +300,12 @@ impl ShellError {
|
||||
problem:
|
||||
Tagged {
|
||||
tag,
|
||||
..
|
||||
item
|
||||
},
|
||||
} => Diagnostic::new(Severity::Error, "Syntax Error")
|
||||
.with_label(Label::new_primary(tag).with_message("Unexpected external command")),
|
||||
.with_label(Label::new_primary(tag).with_message(item)),
|
||||
|
||||
ProximateShellError::MissingProperty { subpath, expr } => {
|
||||
ProximateShellError::MissingProperty { subpath, expr, .. } => {
|
||||
let subpath = subpath.into_label();
|
||||
let expr = expr.into_label();
|
||||
|
||||
@ -296,6 +329,8 @@ impl ShellError {
|
||||
.with_label(Label::new_primary(left.tag()).with_message(left.item))
|
||||
.with_label(Label::new_secondary(right.tag()).with_message(right.item))
|
||||
}
|
||||
|
||||
ProximateShellError::UntaggedRuntimeError { reason } => Diagnostic::new(Severity::Error, format!("Error: {}", reason))
|
||||
}
|
||||
}
|
||||
|
||||
@ -329,16 +364,16 @@ impl ShellError {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn string(title: impl Into<String>) -> ShellError {
|
||||
ProximateShellError::String(StringError::new(title.into(), Value::nothing())).start()
|
||||
}
|
||||
|
||||
pub(crate) fn unimplemented(title: impl Into<String>) -> ShellError {
|
||||
ShellError::string(&format!("Unimplemented: {}", title.into()))
|
||||
ShellError::untagged_runtime_error(&format!("Unimplemented: {}", title.into()))
|
||||
}
|
||||
|
||||
pub(crate) fn unexpected(title: impl Into<String>) -> ShellError {
|
||||
ShellError::string(&format!("Unexpected: {}", title.into()))
|
||||
ShellError::untagged_runtime_error(&format!("Unexpected: {}", title.into()))
|
||||
}
|
||||
|
||||
pub(crate) fn unreachable(title: impl Into<String>) -> ShellError {
|
||||
ShellError::untagged_runtime_error(&format!("BUG: Unreachable: {}", title.into()))
|
||||
}
|
||||
}
|
||||
|
||||
@ -383,10 +418,13 @@ impl ExpectedRange {
|
||||
|
||||
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)]
|
||||
pub enum ProximateShellError {
|
||||
String(StringError),
|
||||
SyntaxError {
|
||||
problem: Tagged<String>,
|
||||
},
|
||||
UnexpectedEof {
|
||||
expected: String,
|
||||
tag: Tag,
|
||||
},
|
||||
InvalidCommand {
|
||||
command: Tag,
|
||||
},
|
||||
@ -397,6 +435,7 @@ pub enum ProximateShellError {
|
||||
MissingProperty {
|
||||
subpath: Description,
|
||||
expr: Description,
|
||||
tag: Tag,
|
||||
},
|
||||
MissingValue {
|
||||
tag: Option<Tag>,
|
||||
@ -417,6 +456,9 @@ pub enum ProximateShellError {
|
||||
left: Tagged<String>,
|
||||
right: Tagged<String>,
|
||||
},
|
||||
UntaggedRuntimeError {
|
||||
reason: String,
|
||||
},
|
||||
}
|
||||
|
||||
impl ProximateShellError {
|
||||
@ -426,6 +468,22 @@ impl ProximateShellError {
|
||||
error: self,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn tag(&self) -> Option<Tag> {
|
||||
Some(match self {
|
||||
ProximateShellError::SyntaxError { problem } => problem.tag(),
|
||||
ProximateShellError::UnexpectedEof { tag, .. } => *tag,
|
||||
ProximateShellError::InvalidCommand { command } => *command,
|
||||
ProximateShellError::TypeError { actual, .. } => actual.tag,
|
||||
ProximateShellError::MissingProperty { tag, .. } => *tag,
|
||||
ProximateShellError::MissingValue { tag, .. } => return *tag,
|
||||
ProximateShellError::ArgumentError { tag, .. } => *tag,
|
||||
ProximateShellError::RangeError { actual_kind, .. } => actual_kind.tag,
|
||||
ProximateShellError::Diagnostic(..) => return None,
|
||||
ProximateShellError::UntaggedRuntimeError { .. } => return None,
|
||||
ProximateShellError::CoerceError { left, right } => left.tag.until(right.tag),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ToDebug for ProximateShellError {
|
||||
@ -469,16 +527,17 @@ pub struct StringError {
|
||||
impl std::fmt::Display for ShellError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
match &self.error {
|
||||
ProximateShellError::String(s) => write!(f, "{}", &s.title),
|
||||
ProximateShellError::MissingValue { .. } => write!(f, "MissingValue"),
|
||||
ProximateShellError::InvalidCommand { .. } => write!(f, "InvalidCommand"),
|
||||
ProximateShellError::TypeError { .. } => write!(f, "TypeError"),
|
||||
ProximateShellError::UnexpectedEof { .. } => write!(f, "UnexpectedEof"),
|
||||
ProximateShellError::RangeError { .. } => write!(f, "RangeError"),
|
||||
ProximateShellError::SyntaxError { .. } => write!(f, "SyntaxError"),
|
||||
ProximateShellError::MissingProperty { .. } => write!(f, "MissingProperty"),
|
||||
ProximateShellError::ArgumentError { .. } => write!(f, "ArgumentError"),
|
||||
ProximateShellError::Diagnostic(_) => write!(f, "<diagnostic>"),
|
||||
ProximateShellError::CoerceError { .. } => write!(f, "CoerceError"),
|
||||
ProximateShellError::UntaggedRuntimeError { .. } => write!(f, "UntaggedRuntimeError"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -487,71 +546,43 @@ impl std::error::Error for ShellError {}
|
||||
|
||||
impl std::convert::From<Box<dyn std::error::Error>> for ShellError {
|
||||
fn from(input: Box<dyn std::error::Error>) -> ShellError {
|
||||
ProximateShellError::String(StringError {
|
||||
title: format!("{}", input),
|
||||
error: Value::nothing(),
|
||||
})
|
||||
.start()
|
||||
ShellError::untagged_runtime_error(format!("{}", input))
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::From<std::io::Error> for ShellError {
|
||||
fn from(input: std::io::Error) -> ShellError {
|
||||
ProximateShellError::String(StringError {
|
||||
title: format!("{}", input),
|
||||
error: Value::nothing(),
|
||||
})
|
||||
.start()
|
||||
ShellError::untagged_runtime_error(format!("{}", input))
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::From<subprocess::PopenError> for ShellError {
|
||||
fn from(input: subprocess::PopenError) -> ShellError {
|
||||
ProximateShellError::String(StringError {
|
||||
title: format!("{}", input),
|
||||
error: Value::nothing(),
|
||||
})
|
||||
.start()
|
||||
ShellError::untagged_runtime_error(format!("{}", input))
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::From<serde_yaml::Error> for ShellError {
|
||||
fn from(input: serde_yaml::Error) -> ShellError {
|
||||
ProximateShellError::String(StringError {
|
||||
title: format!("{:?}", input),
|
||||
error: Value::nothing(),
|
||||
})
|
||||
.start()
|
||||
ShellError::untagged_runtime_error(format!("{:?}", input))
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::From<toml::ser::Error> for ShellError {
|
||||
fn from(input: toml::ser::Error) -> ShellError {
|
||||
ProximateShellError::String(StringError {
|
||||
title: format!("{:?}", input),
|
||||
error: Value::nothing(),
|
||||
})
|
||||
.start()
|
||||
ShellError::untagged_runtime_error(format!("{:?}", input))
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::From<serde_json::Error> for ShellError {
|
||||
fn from(input: serde_json::Error) -> ShellError {
|
||||
ProximateShellError::String(StringError {
|
||||
title: format!("{:?}", input),
|
||||
error: Value::nothing(),
|
||||
})
|
||||
.start()
|
||||
ShellError::untagged_runtime_error(format!("{:?}", input))
|
||||
}
|
||||
}
|
||||
|
||||
impl std::convert::From<Box<dyn std::error::Error + Send + Sync>> for ShellError {
|
||||
fn from(input: Box<dyn std::error::Error + Send + Sync>) -> ShellError {
|
||||
ProximateShellError::String(StringError {
|
||||
title: format!("{:?}", input),
|
||||
error: Value::nothing(),
|
||||
})
|
||||
.start()
|
||||
ShellError::untagged_runtime_error(format!("{:?}", input))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7,6 +7,8 @@ use crate::parser::{
|
||||
use crate::prelude::*;
|
||||
use derive_new::new;
|
||||
use indexmap::IndexMap;
|
||||
use log::trace;
|
||||
use std::fmt;
|
||||
|
||||
#[derive(new)]
|
||||
pub struct Scope {
|
||||
@ -15,6 +17,15 @@ pub struct Scope {
|
||||
vars: IndexMap<String, Tagged<Value>>,
|
||||
}
|
||||
|
||||
impl fmt::Display for Scope {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_map()
|
||||
.entry(&"$it", &format!("{:?}", self.it.item))
|
||||
.entries(self.vars.iter().map(|(k, v)| (k, &v.item)))
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl Scope {
|
||||
pub(crate) fn empty() -> Scope {
|
||||
Scope {
|
||||
@ -48,12 +59,15 @@ pub(crate) fn evaluate_baseline_expr(
|
||||
RawExpression::Synthetic(hir::Synthetic::String(s)) => {
|
||||
Ok(Value::string(s).tagged_unknown())
|
||||
}
|
||||
RawExpression::Variable(var) => evaluate_reference(var, scope, source),
|
||||
RawExpression::Variable(var) => evaluate_reference(var, scope, source, expr.tag()),
|
||||
RawExpression::Command(_) => evaluate_command(expr.tag(), scope, source),
|
||||
RawExpression::ExternalCommand(external) => evaluate_external(external, scope, source),
|
||||
RawExpression::Binary(binary) => {
|
||||
let left = evaluate_baseline_expr(binary.left(), registry, scope, source)?;
|
||||
let right = evaluate_baseline_expr(binary.right(), registry, scope, source)?;
|
||||
|
||||
trace!("left={:?} right={:?}", left.item, right.item);
|
||||
|
||||
match left.compare(binary.op(), &*right) {
|
||||
Ok(result) => Ok(Value::boolean(result).tagged(expr.tag())),
|
||||
Err((left_type, right_type)) => Err(ShellError::coerce_error(
|
||||
@ -130,14 +144,16 @@ fn evaluate_reference(
|
||||
name: &hir::Variable,
|
||||
scope: &Scope,
|
||||
source: &Text,
|
||||
tag: Tag,
|
||||
) -> Result<Tagged<Value>, ShellError> {
|
||||
trace!("Evaluating {} with Scope {}", name, scope);
|
||||
match name {
|
||||
hir::Variable::It(tag) => Ok(scope.it.item.clone().tagged(*tag)),
|
||||
hir::Variable::Other(tag) => Ok(scope
|
||||
hir::Variable::It(_) => Ok(scope.it.item.clone().tagged(tag)),
|
||||
hir::Variable::Other(inner) => Ok(scope
|
||||
.vars
|
||||
.get(tag.slice(source))
|
||||
.get(inner.slice(source))
|
||||
.map(|v| v.clone())
|
||||
.unwrap_or_else(|| Value::nothing().tagged(*tag))),
|
||||
.unwrap_or_else(|| Value::nothing().tagged(tag))),
|
||||
}
|
||||
}
|
||||
|
||||
@ -150,3 +166,7 @@ fn evaluate_external(
|
||||
"Unexpected external command".tagged(*external.name()),
|
||||
))
|
||||
}
|
||||
|
||||
fn evaluate_command(tag: Tag, _scope: &Scope, _source: &Text) -> Result<Tagged<Value>, ShellError> {
|
||||
Err(ShellError::syntax_error("Unexpected command".tagged(tag)))
|
||||
}
|
||||
|
@ -31,7 +31,7 @@ pub use cli::cli;
|
||||
pub use data::base::{Primitive, Value};
|
||||
pub use data::config::{config_path, APP_INFO};
|
||||
pub use data::dict::{Dictionary, TaggedDictBuilder};
|
||||
pub use data::meta::{Tag, Tagged, TaggedItem};
|
||||
pub use data::meta::{Span, Tag, Tagged, TaggedItem};
|
||||
pub use errors::{CoerceInto, ShellError};
|
||||
pub use num_traits::cast::ToPrimitive;
|
||||
pub use parser::parse::text::Text;
|
||||
|
@ -7,18 +7,18 @@ pub(crate) mod registry;
|
||||
use crate::errors::ShellError;
|
||||
|
||||
pub(crate) use deserializer::ConfigDeserializer;
|
||||
pub(crate) use hir::baseline_parse_tokens::baseline_parse_tokens;
|
||||
pub(crate) use hir::syntax_shape::flat_shape::FlatShape;
|
||||
pub(crate) use hir::TokensIterator;
|
||||
pub(crate) use parse::call_node::CallNode;
|
||||
pub(crate) use parse::files::Files;
|
||||
pub(crate) use parse::flag::Flag;
|
||||
pub(crate) use parse::flag::{Flag, FlagKind};
|
||||
pub(crate) use parse::operator::Operator;
|
||||
pub(crate) use parse::parser::{nom_input, pipeline};
|
||||
pub(crate) use parse::pipeline::{Pipeline, PipelineElement};
|
||||
pub(crate) use parse::text::Text;
|
||||
pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, PathNode, TokenNode};
|
||||
pub(crate) use parse::tokens::{RawToken, Token};
|
||||
pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
|
||||
pub(crate) use parse::tokens::{RawNumber, RawToken};
|
||||
pub(crate) use parse::unit::Unit;
|
||||
pub(crate) use parse_command::parse_command;
|
||||
pub(crate) use registry::CommandRegistry;
|
||||
|
||||
pub fn parse(input: &str, anchor: uuid::Uuid) -> Result<TokenNode, ShellError> {
|
||||
|
@ -310,9 +310,10 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> {
|
||||
return Ok(r);
|
||||
}
|
||||
trace!(
|
||||
"deserializing struct {:?} {:?} (stack={:?})",
|
||||
"deserializing struct {:?} {:?} (saw_root={} stack={:?})",
|
||||
name,
|
||||
fields,
|
||||
self.saw_root,
|
||||
self.stack
|
||||
);
|
||||
|
||||
@ -326,6 +327,12 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> {
|
||||
let type_name = std::any::type_name::<V::Value>();
|
||||
let tagged_val_name = std::any::type_name::<Tagged<Value>>();
|
||||
|
||||
trace!(
|
||||
"type_name={} tagged_val_name={}",
|
||||
type_name,
|
||||
tagged_val_name
|
||||
);
|
||||
|
||||
if type_name == tagged_val_name {
|
||||
return visit::<Tagged<Value>, _>(value.val, name, fields, visitor);
|
||||
}
|
||||
|
@ -1,11 +1,13 @@
|
||||
pub(crate) mod baseline_parse;
|
||||
pub(crate) mod baseline_parse_tokens;
|
||||
pub(crate) mod binary;
|
||||
pub(crate) mod expand_external_tokens;
|
||||
pub(crate) mod external_command;
|
||||
pub(crate) mod named;
|
||||
pub(crate) mod path;
|
||||
pub(crate) mod syntax_shape;
|
||||
pub(crate) mod tokens_iterator;
|
||||
|
||||
use crate::parser::{registry, Unit};
|
||||
use crate::parser::{registry, Operator, Unit};
|
||||
use crate::prelude::*;
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
@ -14,27 +16,18 @@ use std::fmt;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::evaluate::Scope;
|
||||
use crate::parser::parse::tokens::RawNumber;
|
||||
use crate::traits::ToDebug;
|
||||
|
||||
pub(crate) use self::baseline_parse::{
|
||||
baseline_parse_single_token, baseline_parse_token_as_number, baseline_parse_token_as_path,
|
||||
baseline_parse_token_as_pattern, baseline_parse_token_as_string,
|
||||
};
|
||||
pub(crate) use self::baseline_parse_tokens::{baseline_parse_next_expr, TokensIterator};
|
||||
pub(crate) use self::binary::Binary;
|
||||
pub(crate) use self::external_command::ExternalCommand;
|
||||
pub(crate) use self::named::NamedArguments;
|
||||
pub(crate) use self::path::Path;
|
||||
pub(crate) use self::syntax_shape::ExpandContext;
|
||||
pub(crate) use self::tokens_iterator::debug::debug_tokens;
|
||||
pub(crate) use self::tokens_iterator::TokensIterator;
|
||||
|
||||
pub use self::baseline_parse_tokens::SyntaxShape;
|
||||
|
||||
pub fn path(head: impl Into<Expression>, tail: Vec<Tagged<impl Into<String>>>) -> Path {
|
||||
Path::new(
|
||||
head.into(),
|
||||
tail.into_iter()
|
||||
.map(|item| item.map(|string| string.into()))
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
pub use self::syntax_shape::SyntaxShape;
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)]
|
||||
pub struct Call {
|
||||
@ -93,6 +86,7 @@ pub enum RawExpression {
|
||||
|
||||
FilePath(PathBuf),
|
||||
ExternalCommand(ExternalCommand),
|
||||
Command(Tag),
|
||||
|
||||
Boolean(bool),
|
||||
}
|
||||
@ -115,13 +109,14 @@ impl RawExpression {
|
||||
match self {
|
||||
RawExpression::Literal(literal) => literal.type_name(),
|
||||
RawExpression::Synthetic(synthetic) => synthetic.type_name(),
|
||||
RawExpression::ExternalWord => "externalword",
|
||||
RawExpression::FilePath(..) => "filepath",
|
||||
RawExpression::Command(..) => "command",
|
||||
RawExpression::ExternalWord => "external word",
|
||||
RawExpression::FilePath(..) => "file path",
|
||||
RawExpression::Variable(..) => "variable",
|
||||
RawExpression::List(..) => "list",
|
||||
RawExpression::Binary(..) => "binary",
|
||||
RawExpression::Block(..) => "block",
|
||||
RawExpression::Path(..) => "path",
|
||||
RawExpression::Path(..) => "variable path",
|
||||
RawExpression::Boolean(..) => "boolean",
|
||||
RawExpression::ExternalCommand(..) => "external",
|
||||
}
|
||||
@ -130,6 +125,39 @@ impl RawExpression {
|
||||
|
||||
pub type Expression = Tagged<RawExpression>;
|
||||
|
||||
impl std::fmt::Display for Expression {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let span = self.tag.span;
|
||||
|
||||
match &self.item {
|
||||
RawExpression::Literal(literal) => write!(f, "{}", literal.tagged(self.tag)),
|
||||
RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{}", s),
|
||||
RawExpression::Command(_) => write!(f, "Command{{ {}..{} }}", span.start(), span.end()),
|
||||
RawExpression::ExternalWord => {
|
||||
write!(f, "ExternalWord{{ {}..{} }}", span.start(), span.end())
|
||||
}
|
||||
RawExpression::FilePath(file) => write!(f, "Path{{ {} }}", file.display()),
|
||||
RawExpression::Variable(variable) => write!(f, "{}", variable),
|
||||
RawExpression::List(list) => f
|
||||
.debug_list()
|
||||
.entries(list.iter().map(|e| format!("{}", e)))
|
||||
.finish(),
|
||||
RawExpression::Binary(binary) => write!(f, "{}", binary),
|
||||
RawExpression::Block(items) => {
|
||||
write!(f, "Block")?;
|
||||
f.debug_set()
|
||||
.entries(items.iter().map(|i| format!("{}", i)))
|
||||
.finish()
|
||||
}
|
||||
RawExpression::Path(path) => write!(f, "{}", path),
|
||||
RawExpression::Boolean(b) => write!(f, "${}", b),
|
||||
RawExpression::ExternalCommand(..) => {
|
||||
write!(f, "ExternalComment{{ {}..{} }}", span.start(), span.end())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Expression {
|
||||
pub(crate) fn number(i: impl Into<Number>, tag: impl Into<Tag>) -> Expression {
|
||||
RawExpression::Literal(Literal::Number(i.into())).tagged(tag.into())
|
||||
@ -151,10 +179,50 @@ impl Expression {
|
||||
RawExpression::Literal(Literal::String(inner.into())).tagged(outer.into())
|
||||
}
|
||||
|
||||
pub(crate) fn path(
|
||||
head: Expression,
|
||||
tail: Vec<Tagged<impl Into<String>>>,
|
||||
tag: impl Into<Tag>,
|
||||
) -> Expression {
|
||||
let tail = tail.into_iter().map(|t| t.map(|s| s.into())).collect();
|
||||
RawExpression::Path(Box::new(Path::new(head, tail))).tagged(tag.into())
|
||||
}
|
||||
|
||||
pub(crate) fn dot_member(head: Expression, next: Tagged<impl Into<String>>) -> Expression {
|
||||
let Tagged { item, tag } = head;
|
||||
let new_tag = head.tag.until(next.tag);
|
||||
|
||||
match item {
|
||||
RawExpression::Path(path) => {
|
||||
let (head, mut tail) = path.parts();
|
||||
|
||||
tail.push(next.map(|i| i.into()));
|
||||
Expression::path(head, tail, new_tag)
|
||||
}
|
||||
|
||||
other => Expression::path(other.tagged(tag), vec![next], new_tag),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn infix(
|
||||
left: Expression,
|
||||
op: Tagged<impl Into<Operator>>,
|
||||
right: Expression,
|
||||
) -> Expression {
|
||||
let new_tag = left.tag.until(right.tag);
|
||||
|
||||
RawExpression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right)))
|
||||
.tagged(new_tag)
|
||||
}
|
||||
|
||||
pub(crate) fn file_path(path: impl Into<PathBuf>, outer: impl Into<Tag>) -> Expression {
|
||||
RawExpression::FilePath(path.into()).tagged(outer)
|
||||
}
|
||||
|
||||
pub(crate) fn list(list: Vec<Expression>, tag: impl Into<Tag>) -> Expression {
|
||||
RawExpression::List(list).tagged(tag)
|
||||
}
|
||||
|
||||
pub(crate) fn bare(tag: impl Into<Tag>) -> Expression {
|
||||
RawExpression::Literal(Literal::Bare).tagged(tag)
|
||||
}
|
||||
@ -182,6 +250,7 @@ impl ToDebug for Expression {
|
||||
RawExpression::Literal(l) => l.tagged(self.tag()).fmt_debug(f, source),
|
||||
RawExpression::FilePath(p) => write!(f, "{}", p.display()),
|
||||
RawExpression::ExternalWord => write!(f, "{}", self.tag().slice(source)),
|
||||
RawExpression::Command(tag) => write!(f, "{}", tag.slice(source)),
|
||||
RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{:?}", s),
|
||||
RawExpression::Variable(Variable::It(_)) => write!(f, "$it"),
|
||||
RawExpression::Variable(Variable::Other(s)) => write!(f, "${}", s.slice(source)),
|
||||
@ -232,6 +301,26 @@ pub enum Literal {
|
||||
Bare,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Tagged<Literal> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", Tagged::new(self.tag, &self.item))
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Tagged<&Literal> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let span = self.tag.span;
|
||||
|
||||
match &self.item {
|
||||
Literal::Number(number) => write!(f, "{}", number),
|
||||
Literal::Size(number, unit) => write!(f, "{}{}", number, unit.as_str()),
|
||||
Literal::String(_) => write!(f, "String{{ {}..{} }}", span.start(), span.end()),
|
||||
Literal::GlobPattern => write!(f, "Glob{{ {}..{} }}", span.start(), span.end()),
|
||||
Literal::Bare => write!(f, "Bare{{ {}..{} }}", span.start(), span.end()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToDebug for Tagged<&Literal> {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||
match self.item() {
|
||||
@ -261,3 +350,12 @@ pub enum Variable {
|
||||
It(Tag),
|
||||
Other(Tag),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Variable {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Variable::It(_) => write!(f, "$it"),
|
||||
Variable::Other(tag) => write!(f, "${{ {}..{} }}", tag.span.start(), tag.span.end()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,140 +1,2 @@
|
||||
use crate::context::Context;
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::{hir, RawToken, Token};
|
||||
use crate::TaggedItem;
|
||||
use crate::Text;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub fn baseline_parse_single_token(
|
||||
token: &Token,
|
||||
source: &Text,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
Ok(match *token.item() {
|
||||
RawToken::Number(number) => hir::Expression::number(number.to_number(source), token.tag()),
|
||||
RawToken::Size(int, unit) => {
|
||||
hir::Expression::size(int.to_number(source), unit, token.tag())
|
||||
}
|
||||
RawToken::String(tag) => hir::Expression::string(tag, token.tag()),
|
||||
RawToken::Variable(tag) if tag.slice(source) == "it" => {
|
||||
hir::Expression::it_variable(tag, token.tag())
|
||||
}
|
||||
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
|
||||
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()),
|
||||
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
|
||||
RawToken::GlobPattern => hir::Expression::pattern(token.tag()),
|
||||
RawToken::Bare => hir::Expression::bare(token.tag()),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn baseline_parse_token_as_number(
|
||||
token: &Token,
|
||||
source: &Text,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
Ok(match *token.item() {
|
||||
RawToken::Variable(tag) if tag.slice(source) == "it" => {
|
||||
hir::Expression::it_variable(tag, token.tag())
|
||||
}
|
||||
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()),
|
||||
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
|
||||
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
|
||||
RawToken::Number(number) => hir::Expression::number(number.to_number(source), token.tag()),
|
||||
RawToken::Size(number, unit) => {
|
||||
hir::Expression::size(number.to_number(source), unit, token.tag())
|
||||
}
|
||||
RawToken::Bare => hir::Expression::bare(token.tag()),
|
||||
RawToken::GlobPattern => {
|
||||
return Err(ShellError::type_error(
|
||||
"Number",
|
||||
"glob pattern".to_string().tagged(token.tag()),
|
||||
))
|
||||
}
|
||||
RawToken::String(tag) => hir::Expression::string(tag, token.tag()),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn baseline_parse_token_as_string(
|
||||
token: &Token,
|
||||
source: &Text,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
Ok(match *token.item() {
|
||||
RawToken::Variable(tag) if tag.slice(source) == "it" => {
|
||||
hir::Expression::it_variable(tag, token.tag())
|
||||
}
|
||||
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()),
|
||||
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
|
||||
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
|
||||
RawToken::Number(_) => hir::Expression::bare(token.tag()),
|
||||
RawToken::Size(_, _) => hir::Expression::bare(token.tag()),
|
||||
RawToken::Bare => hir::Expression::bare(token.tag()),
|
||||
RawToken::GlobPattern => {
|
||||
return Err(ShellError::type_error(
|
||||
"String",
|
||||
"glob pattern".tagged(token.tag()),
|
||||
))
|
||||
}
|
||||
RawToken::String(tag) => hir::Expression::string(tag, token.tag()),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn baseline_parse_token_as_path(
|
||||
token: &Token,
|
||||
context: &Context,
|
||||
source: &Text,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
Ok(match *token.item() {
|
||||
RawToken::Variable(tag) if tag.slice(source) == "it" => {
|
||||
hir::Expression::it_variable(tag, token.tag())
|
||||
}
|
||||
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()),
|
||||
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
|
||||
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
|
||||
RawToken::Number(_) => hir::Expression::bare(token.tag()),
|
||||
RawToken::Size(_, _) => hir::Expression::bare(token.tag()),
|
||||
RawToken::Bare => {
|
||||
hir::Expression::file_path(expand_path(token.tag().slice(source), context), token.tag())
|
||||
}
|
||||
RawToken::GlobPattern => {
|
||||
return Err(ShellError::type_error(
|
||||
"Path",
|
||||
"glob pattern".tagged(token.tag()),
|
||||
))
|
||||
}
|
||||
RawToken::String(tag) => {
|
||||
hir::Expression::file_path(expand_path(tag.slice(source), context), token.tag())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn baseline_parse_token_as_pattern(
|
||||
token: &Token,
|
||||
context: &Context,
|
||||
source: &Text,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
Ok(match *token.item() {
|
||||
RawToken::Variable(tag) if tag.slice(source) == "it" => {
|
||||
hir::Expression::it_variable(tag, token.tag())
|
||||
}
|
||||
RawToken::ExternalCommand(_) => {
|
||||
return Err(ShellError::syntax_error(
|
||||
"Invalid external command".to_string().tagged(token.tag()),
|
||||
))
|
||||
}
|
||||
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
|
||||
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
|
||||
RawToken::Number(_) => hir::Expression::bare(token.tag()),
|
||||
RawToken::Size(_, _) => hir::Expression::bare(token.tag()),
|
||||
RawToken::GlobPattern => hir::Expression::pattern(token.tag()),
|
||||
RawToken::Bare => {
|
||||
hir::Expression::file_path(expand_path(token.tag().slice(source), context), token.tag())
|
||||
}
|
||||
RawToken::String(tag) => {
|
||||
hir::Expression::file_path(expand_path(tag.slice(source), context), token.tag())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn expand_path(string: &str, context: &Context) -> PathBuf {
|
||||
let expanded = shellexpand::tilde_with_context(string, || context.shell_manager.homedir());
|
||||
|
||||
PathBuf::from(expanded.as_ref())
|
||||
}
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
144
src/parser/hir/baseline_parse/tests.rs
Normal file
144
src/parser/hir/baseline_parse/tests.rs
Normal file
@ -0,0 +1,144 @@
|
||||
use crate::commands::classified::InternalCommand;
|
||||
use crate::commands::ClassifiedCommand;
|
||||
use crate::env::host::BasicHost;
|
||||
use crate::parser::hir;
|
||||
use crate::parser::hir::syntax_shape::*;
|
||||
use crate::parser::hir::TokensIterator;
|
||||
use crate::parser::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b};
|
||||
use crate::parser::TokenNode;
|
||||
use crate::{Span, Tag, Tagged, TaggedItem, Text};
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::fmt::Debug;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[test]
|
||||
fn test_parse_string() {
|
||||
parse_tokens(StringShape, vec![b::string("hello")], |tokens| {
|
||||
hir::Expression::string(inner_string_tag(tokens[0].tag()), tokens[0].tag())
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_path() {
|
||||
parse_tokens(
|
||||
VariablePathShape,
|
||||
vec![b::var("it"), b::op("."), b::bare("cpu")],
|
||||
|tokens| {
|
||||
let (outer_var, inner_var) = tokens[0].expect_var();
|
||||
let bare = tokens[2].expect_bare();
|
||||
hir::Expression::path(
|
||||
hir::Expression::it_variable(inner_var, outer_var),
|
||||
vec!["cpu".tagged(bare)],
|
||||
outer_var.until(bare),
|
||||
)
|
||||
},
|
||||
);
|
||||
|
||||
parse_tokens(
|
||||
VariablePathShape,
|
||||
vec![
|
||||
b::var("cpu"),
|
||||
b::op("."),
|
||||
b::bare("amount"),
|
||||
b::op("."),
|
||||
b::string("max ghz"),
|
||||
],
|
||||
|tokens| {
|
||||
let (outer_var, inner_var) = tokens[0].expect_var();
|
||||
let amount = tokens[2].expect_bare();
|
||||
let (outer_max_ghz, _) = tokens[4].expect_string();
|
||||
|
||||
hir::Expression::path(
|
||||
hir::Expression::variable(inner_var, outer_var),
|
||||
vec!["amount".tagged(amount), "max ghz".tagged(outer_max_ghz)],
|
||||
outer_var.until(outer_max_ghz),
|
||||
)
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_command() {
|
||||
parse_tokens(
|
||||
ClassifiedCommandShape,
|
||||
vec![b::bare("ls"), b::sp(), b::pattern("*.txt")],
|
||||
|tokens| {
|
||||
let bare = tokens[0].expect_bare();
|
||||
let pat = tokens[2].tag();
|
||||
|
||||
ClassifiedCommand::Internal(InternalCommand::new(
|
||||
"ls".to_string(),
|
||||
bare,
|
||||
hir::Call {
|
||||
head: Box::new(hir::RawExpression::Command(bare).tagged(bare)),
|
||||
positional: Some(vec![hir::Expression::pattern(pat)]),
|
||||
named: None,
|
||||
},
|
||||
))
|
||||
// hir::Expression::path(
|
||||
// hir::Expression::variable(inner_var, outer_var),
|
||||
// vec!["cpu".tagged(bare)],
|
||||
// outer_var.until(bare),
|
||||
// )
|
||||
},
|
||||
);
|
||||
|
||||
parse_tokens(
|
||||
VariablePathShape,
|
||||
vec![
|
||||
b::var("cpu"),
|
||||
b::op("."),
|
||||
b::bare("amount"),
|
||||
b::op("."),
|
||||
b::string("max ghz"),
|
||||
],
|
||||
|tokens| {
|
||||
let (outer_var, inner_var) = tokens[0].expect_var();
|
||||
let amount = tokens[2].expect_bare();
|
||||
let (outer_max_ghz, _) = tokens[4].expect_string();
|
||||
|
||||
hir::Expression::path(
|
||||
hir::Expression::variable(inner_var, outer_var),
|
||||
vec!["amount".tagged(amount), "max ghz".tagged(outer_max_ghz)],
|
||||
outer_var.until(outer_max_ghz),
|
||||
)
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
fn parse_tokens<T: Eq + Debug>(
|
||||
shape: impl ExpandSyntax<Output = T>,
|
||||
tokens: Vec<CurriedToken>,
|
||||
expected: impl FnOnce(Tagged<&[TokenNode]>) -> T,
|
||||
) {
|
||||
let tokens = b::token_list(tokens);
|
||||
let (tokens, source) = b::build(test_origin(), tokens);
|
||||
|
||||
ExpandContext::with_empty(&Text::from(source), |context| {
|
||||
let tokens = tokens.expect_list();
|
||||
let mut iterator = TokensIterator::all(tokens.item, *context.tag());
|
||||
|
||||
let expr = expand_syntax(&shape, &mut iterator, &context);
|
||||
|
||||
let expr = match expr {
|
||||
Ok(expr) => expr,
|
||||
Err(err) => {
|
||||
crate::cli::print_err(err, &BasicHost, context.source().clone());
|
||||
panic!("Parse failed");
|
||||
}
|
||||
};
|
||||
|
||||
assert_eq!(expr, expected(tokens));
|
||||
})
|
||||
}
|
||||
|
||||
fn test_origin() -> Uuid {
|
||||
Uuid::nil()
|
||||
}
|
||||
|
||||
fn inner_string_tag(tag: Tag) -> Tag {
|
||||
Tag {
|
||||
span: Span::new(tag.span.start() + 1, tag.span.end() - 1),
|
||||
anchor: tag.anchor,
|
||||
}
|
||||
}
|
@ -1,459 +0,0 @@
|
||||
use crate::context::Context;
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::{
|
||||
hir,
|
||||
hir::{
|
||||
baseline_parse_single_token, baseline_parse_token_as_number, baseline_parse_token_as_path,
|
||||
baseline_parse_token_as_pattern, baseline_parse_token_as_string,
|
||||
},
|
||||
DelimitedNode, Delimiter, PathNode, RawToken, TokenNode,
|
||||
};
|
||||
use crate::{Tag, Tagged, TaggedItem, Text};
|
||||
use derive_new::new;
|
||||
use log::trace;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
pub fn baseline_parse_tokens(
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &Context,
|
||||
source: &Text,
|
||||
syntax_type: SyntaxShape,
|
||||
) -> Result<Vec<hir::Expression>, ShellError> {
|
||||
let mut exprs: Vec<hir::Expression> = vec![];
|
||||
|
||||
loop {
|
||||
if token_nodes.at_end() {
|
||||
break;
|
||||
}
|
||||
|
||||
let expr = baseline_parse_next_expr(token_nodes, context, source, syntax_type)?;
|
||||
exprs.push(expr);
|
||||
}
|
||||
|
||||
Ok(exprs)
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
|
||||
pub enum SyntaxShape {
|
||||
Any,
|
||||
List,
|
||||
Literal,
|
||||
String,
|
||||
Member,
|
||||
Variable,
|
||||
Number,
|
||||
Path,
|
||||
Pattern,
|
||||
Binary,
|
||||
Block,
|
||||
Boolean,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for SyntaxShape {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
match self {
|
||||
SyntaxShape::Any => write!(f, "Any"),
|
||||
SyntaxShape::List => write!(f, "List"),
|
||||
SyntaxShape::Literal => write!(f, "Literal"),
|
||||
SyntaxShape::String => write!(f, "String"),
|
||||
SyntaxShape::Member => write!(f, "Member"),
|
||||
SyntaxShape::Variable => write!(f, "Variable"),
|
||||
SyntaxShape::Number => write!(f, "Number"),
|
||||
SyntaxShape::Path => write!(f, "Path"),
|
||||
SyntaxShape::Pattern => write!(f, "Pattern"),
|
||||
SyntaxShape::Binary => write!(f, "Binary"),
|
||||
SyntaxShape::Block => write!(f, "Block"),
|
||||
SyntaxShape::Boolean => write!(f, "Boolean"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn baseline_parse_next_expr(
|
||||
tokens: &mut TokensIterator,
|
||||
context: &Context,
|
||||
source: &Text,
|
||||
syntax_type: SyntaxShape,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
let next = tokens
|
||||
.next()
|
||||
.ok_or_else(|| ShellError::string("Expected token, found none"))?;
|
||||
|
||||
trace!(target: "nu::parser::parse_one_expr", "syntax_type={:?}, token={:?}", syntax_type, next);
|
||||
|
||||
match (syntax_type, next) {
|
||||
(SyntaxShape::Path, TokenNode::Token(token)) => {
|
||||
return baseline_parse_token_as_path(token, context, source)
|
||||
}
|
||||
|
||||
(SyntaxShape::Path, token) => {
|
||||
return Err(ShellError::type_error(
|
||||
"Path",
|
||||
token.type_name().tagged(token.tag()),
|
||||
))
|
||||
}
|
||||
|
||||
(SyntaxShape::Pattern, TokenNode::Token(token)) => {
|
||||
return baseline_parse_token_as_pattern(token, context, source)
|
||||
}
|
||||
|
||||
(SyntaxShape::Pattern, token) => {
|
||||
return Err(ShellError::type_error(
|
||||
"Path",
|
||||
token.type_name().tagged(token.tag()),
|
||||
))
|
||||
}
|
||||
|
||||
(SyntaxShape::String, TokenNode::Token(token)) => {
|
||||
return baseline_parse_token_as_string(token, source);
|
||||
}
|
||||
|
||||
(SyntaxShape::String, token) => {
|
||||
return Err(ShellError::type_error(
|
||||
"String",
|
||||
token.type_name().tagged(token.tag()),
|
||||
))
|
||||
}
|
||||
|
||||
(SyntaxShape::Number, TokenNode::Token(token)) => {
|
||||
return Ok(baseline_parse_token_as_number(token, source)?);
|
||||
}
|
||||
|
||||
(SyntaxShape::Number, token) => {
|
||||
return Err(ShellError::type_error(
|
||||
"Numeric",
|
||||
token.type_name().tagged(token.tag()),
|
||||
))
|
||||
}
|
||||
|
||||
// TODO: More legit member processing
|
||||
(SyntaxShape::Member, TokenNode::Token(token)) => {
|
||||
return baseline_parse_token_as_string(token, source);
|
||||
}
|
||||
|
||||
(SyntaxShape::Member, token) => {
|
||||
return Err(ShellError::type_error(
|
||||
"member",
|
||||
token.type_name().tagged(token.tag()),
|
||||
))
|
||||
}
|
||||
|
||||
(SyntaxShape::Any, _) => {}
|
||||
(SyntaxShape::List, _) => {}
|
||||
(SyntaxShape::Literal, _) => {}
|
||||
(SyntaxShape::Variable, _) => {}
|
||||
(SyntaxShape::Binary, _) => {}
|
||||
(SyntaxShape::Block, _) => {}
|
||||
(SyntaxShape::Boolean, _) => {}
|
||||
};
|
||||
|
||||
let first = baseline_parse_semantic_token(next, context, source)?;
|
||||
|
||||
let possible_op = tokens.peek();
|
||||
|
||||
let op = match possible_op {
|
||||
Some(TokenNode::Operator(op)) => op.clone(),
|
||||
_ => return Ok(first),
|
||||
};
|
||||
|
||||
tokens.next();
|
||||
|
||||
let second = match tokens.next() {
|
||||
None => {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Expected something after an operator",
|
||||
"operator",
|
||||
op.tag(),
|
||||
))
|
||||
}
|
||||
Some(token) => baseline_parse_semantic_token(token, context, source)?,
|
||||
};
|
||||
|
||||
// We definitely have a binary expression here -- let's see if we should coerce it into a block
|
||||
|
||||
match syntax_type {
|
||||
SyntaxShape::Any => {
|
||||
let tag = first.tag().until(second.tag());
|
||||
let binary = hir::Binary::new(first, op, second);
|
||||
let binary = hir::RawExpression::Binary(Box::new(binary));
|
||||
let binary = binary.tagged(tag);
|
||||
|
||||
Ok(binary)
|
||||
}
|
||||
|
||||
SyntaxShape::Block => {
|
||||
let tag = first.tag().until(second.tag());
|
||||
|
||||
let path: Tagged<hir::RawExpression> = match first {
|
||||
Tagged {
|
||||
item: hir::RawExpression::Literal(hir::Literal::Bare),
|
||||
tag,
|
||||
} => {
|
||||
let string = tag.slice(source).to_string().tagged(tag);
|
||||
let path = hir::Path::new(
|
||||
// TODO: Deal with synthetic nodes that have no representation at all in source
|
||||
hir::RawExpression::Variable(hir::Variable::It(Tag::unknown()))
|
||||
.tagged(Tag::unknown()),
|
||||
vec![string],
|
||||
);
|
||||
let path = hir::RawExpression::Path(Box::new(path));
|
||||
path.tagged(first.tag())
|
||||
}
|
||||
Tagged {
|
||||
item: hir::RawExpression::Literal(hir::Literal::String(inner)),
|
||||
tag,
|
||||
} => {
|
||||
let string = inner.slice(source).to_string().tagged(tag);
|
||||
let path = hir::Path::new(
|
||||
// TODO: Deal with synthetic nodes that have no representation at all in source
|
||||
hir::RawExpression::Variable(hir::Variable::It(Tag::unknown()))
|
||||
.tagged_unknown(),
|
||||
vec![string],
|
||||
);
|
||||
let path = hir::RawExpression::Path(Box::new(path));
|
||||
path.tagged(first.tag())
|
||||
}
|
||||
Tagged {
|
||||
item: hir::RawExpression::Variable(..),
|
||||
..
|
||||
} => first,
|
||||
Tagged { tag, item } => {
|
||||
return Err(ShellError::labeled_error(
|
||||
"The first part of an un-braced block must be a column name",
|
||||
item.type_name(),
|
||||
tag,
|
||||
))
|
||||
}
|
||||
};
|
||||
|
||||
let binary = hir::Binary::new(path, op, second);
|
||||
let binary = hir::RawExpression::Binary(Box::new(binary));
|
||||
let binary = binary.tagged(tag);
|
||||
|
||||
let block = hir::RawExpression::Block(vec![binary]);
|
||||
let block = block.tagged(tag);
|
||||
|
||||
Ok(block)
|
||||
}
|
||||
|
||||
other => Err(ShellError::unimplemented(format!(
|
||||
"coerce hint {:?}",
|
||||
other
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn baseline_parse_semantic_token(
|
||||
token: &TokenNode,
|
||||
context: &Context,
|
||||
source: &Text,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
match token {
|
||||
TokenNode::Token(token) => baseline_parse_single_token(token, source),
|
||||
TokenNode::Call(_call) => unimplemented!(),
|
||||
TokenNode::Delimited(delimited) => baseline_parse_delimited(delimited, context, source),
|
||||
TokenNode::Pipeline(_pipeline) => unimplemented!(),
|
||||
TokenNode::Operator(op) => Err(ShellError::syntax_error(
|
||||
"Unexpected operator".tagged(op.tag),
|
||||
)),
|
||||
TokenNode::Flag(flag) => Err(ShellError::syntax_error("Unexpected flag".tagged(flag.tag))),
|
||||
TokenNode::Member(tag) => Err(ShellError::syntax_error(
|
||||
"BUG: Top-level member".tagged(*tag),
|
||||
)),
|
||||
TokenNode::Whitespace(tag) => Err(ShellError::syntax_error(
|
||||
"BUG: Whitespace found during parse".tagged(*tag),
|
||||
)),
|
||||
TokenNode::Error(error) => Err(*error.item.clone()),
|
||||
TokenNode::Path(path) => baseline_parse_path(path, context, source),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn baseline_parse_delimited(
|
||||
token: &Tagged<DelimitedNode>,
|
||||
context: &Context,
|
||||
source: &Text,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
match token.delimiter() {
|
||||
Delimiter::Brace => {
|
||||
let children = token.children();
|
||||
let exprs = baseline_parse_tokens(
|
||||
&mut TokensIterator::new(children),
|
||||
context,
|
||||
source,
|
||||
SyntaxShape::Any,
|
||||
)?;
|
||||
|
||||
let expr = hir::RawExpression::Block(exprs);
|
||||
Ok(expr.tagged(token.tag()))
|
||||
}
|
||||
Delimiter::Paren => unimplemented!(),
|
||||
Delimiter::Square => {
|
||||
let children = token.children();
|
||||
let exprs = baseline_parse_tokens(
|
||||
&mut TokensIterator::new(children),
|
||||
context,
|
||||
source,
|
||||
SyntaxShape::Any,
|
||||
)?;
|
||||
|
||||
let expr = hir::RawExpression::List(exprs);
|
||||
Ok(expr.tagged(token.tag()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn baseline_parse_path(
|
||||
token: &Tagged<PathNode>,
|
||||
context: &Context,
|
||||
source: &Text,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
let head = baseline_parse_semantic_token(token.head(), context, source)?;
|
||||
|
||||
let mut tail = vec![];
|
||||
|
||||
for part in token.tail() {
|
||||
let string = match part {
|
||||
TokenNode::Token(token) => match token.item() {
|
||||
RawToken::Bare => token.tag().slice(source),
|
||||
RawToken::String(tag) => tag.slice(source),
|
||||
RawToken::Number(_)
|
||||
| RawToken::Size(..)
|
||||
| RawToken::Variable(_)
|
||||
| RawToken::ExternalCommand(_)
|
||||
| RawToken::GlobPattern
|
||||
| RawToken::ExternalWord => {
|
||||
return Err(ShellError::type_error(
|
||||
"String",
|
||||
token.type_name().tagged(part.tag()),
|
||||
))
|
||||
}
|
||||
},
|
||||
|
||||
TokenNode::Member(tag) => tag.slice(source),
|
||||
|
||||
// TODO: Make this impossible
|
||||
other => {
|
||||
return Err(ShellError::syntax_error(
|
||||
format!("{} in path", other.type_name()).tagged(other.tag()),
|
||||
))
|
||||
}
|
||||
}
|
||||
.to_string();
|
||||
|
||||
tail.push(string.tagged(part.tag()));
|
||||
}
|
||||
|
||||
Ok(hir::path(head, tail).tagged(token.tag()).into())
|
||||
}
|
||||
|
||||
#[derive(Debug, new)]
|
||||
pub struct TokensIterator<'a> {
|
||||
tokens: &'a [TokenNode],
|
||||
#[new(default)]
|
||||
index: usize,
|
||||
#[new(default)]
|
||||
seen: indexmap::IndexSet<usize>,
|
||||
}
|
||||
|
||||
impl TokensIterator<'_> {
|
||||
pub fn remove(&mut self, position: usize) {
|
||||
self.seen.insert(position);
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.tokens.len()
|
||||
}
|
||||
|
||||
pub fn at_end(&self) -> bool {
|
||||
for index in self.index..self.tokens.len() {
|
||||
if !self.seen.contains(&index) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
pub fn advance(&mut self) {
|
||||
self.seen.insert(self.index);
|
||||
self.index += 1;
|
||||
}
|
||||
|
||||
pub fn extract<T>(&mut self, f: impl Fn(&TokenNode) -> Option<T>) -> Option<(usize, T)> {
|
||||
for (i, item) in self.tokens.iter().enumerate() {
|
||||
if self.seen.contains(&i) {
|
||||
continue;
|
||||
}
|
||||
|
||||
match f(item) {
|
||||
None => {
|
||||
continue;
|
||||
}
|
||||
Some(value) => {
|
||||
self.seen.insert(i);
|
||||
return Some((i, value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn move_to(&mut self, pos: usize) {
|
||||
self.index = pos;
|
||||
}
|
||||
|
||||
pub fn restart(&mut self) {
|
||||
self.index = 0;
|
||||
}
|
||||
|
||||
pub fn clone(&self) -> TokensIterator {
|
||||
TokensIterator {
|
||||
tokens: self.tokens,
|
||||
index: self.index,
|
||||
seen: self.seen.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn peek(&self) -> Option<&TokenNode> {
|
||||
let mut tokens = self.clone();
|
||||
|
||||
tokens.next()
|
||||
}
|
||||
|
||||
pub fn debug_remaining(&self) -> Vec<TokenNode> {
|
||||
let mut tokens = self.clone();
|
||||
tokens.restart();
|
||||
tokens.cloned().collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for TokensIterator<'a> {
|
||||
type Item = &'a TokenNode;
|
||||
|
||||
fn next(&mut self) -> Option<&'a TokenNode> {
|
||||
loop {
|
||||
if self.index >= self.tokens.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if self.seen.contains(&self.index) {
|
||||
self.advance();
|
||||
continue;
|
||||
}
|
||||
|
||||
if self.index >= self.tokens.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
match &self.tokens[self.index] {
|
||||
TokenNode::Whitespace(_) => {
|
||||
self.advance();
|
||||
}
|
||||
other => {
|
||||
self.advance();
|
||||
return Some(other);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -16,6 +16,12 @@ pub struct Binary {
|
||||
right: Expression,
|
||||
}
|
||||
|
||||
impl fmt::Display for Binary {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "({} {} {})", self.op.as_str(), self.left, self.right)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToDebug for Binary {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||
write!(f, "{}", self.left.debug(source))?;
|
||||
|
159
src/parser/hir/expand_external_tokens.rs
Normal file
159
src/parser/hir/expand_external_tokens.rs
Normal file
@ -0,0 +1,159 @@
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::{
|
||||
hir::syntax_shape::{
|
||||
color_syntax, expand_atom, AtomicToken, ColorSyntax, ExpandContext, ExpansionRule,
|
||||
MaybeSpaceShape,
|
||||
},
|
||||
FlatShape, TokenNode, TokensIterator,
|
||||
};
|
||||
use crate::{Tag, Tagged, Text};
|
||||
|
||||
pub fn expand_external_tokens(
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
source: &Text,
|
||||
) -> Result<Vec<Tagged<String>>, ShellError> {
|
||||
let mut out: Vec<Tagged<String>> = vec![];
|
||||
|
||||
loop {
|
||||
if let Some(tag) = expand_next_expression(token_nodes)? {
|
||||
out.push(tag.tagged_string(source));
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ExternalTokensShape;
|
||||
|
||||
impl ColorSyntax for ExternalTokensShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Self::Info {
|
||||
loop {
|
||||
// Allow a space
|
||||
color_syntax(&MaybeSpaceShape, token_nodes, context, shapes);
|
||||
|
||||
// Process an external expression. External expressions are mostly words, with a
|
||||
// few exceptions (like $variables and path expansion rules)
|
||||
match color_syntax(&ExternalExpression, token_nodes, context, shapes).1 {
|
||||
ExternalExpressionResult::Eof => break,
|
||||
ExternalExpressionResult::Processed => continue,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expand_next_expression(
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
) -> Result<Option<Tag>, ShellError> {
|
||||
let first = token_nodes.next_non_ws();
|
||||
|
||||
let first = match first {
|
||||
None => return Ok(None),
|
||||
Some(v) => v,
|
||||
};
|
||||
|
||||
let first = triage_external_head(first)?;
|
||||
let mut last = first;
|
||||
|
||||
loop {
|
||||
let continuation = triage_continuation(token_nodes)?;
|
||||
|
||||
if let Some(continuation) = continuation {
|
||||
last = continuation;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Some(first.until(last)))
|
||||
}
|
||||
|
||||
fn triage_external_head(node: &TokenNode) -> Result<Tag, ShellError> {
|
||||
Ok(match node {
|
||||
TokenNode::Token(token) => token.tag(),
|
||||
TokenNode::Call(_call) => unimplemented!("TODO: OMG"),
|
||||
TokenNode::Nodes(_nodes) => unimplemented!("TODO: OMG"),
|
||||
TokenNode::Delimited(_delimited) => unimplemented!("TODO: OMG"),
|
||||
TokenNode::Pipeline(_pipeline) => unimplemented!("TODO: OMG"),
|
||||
TokenNode::Flag(flag) => flag.tag(),
|
||||
TokenNode::Whitespace(_whitespace) => {
|
||||
unreachable!("This function should be called after next_non_ws()")
|
||||
}
|
||||
TokenNode::Error(_error) => unimplemented!("TODO: OMG"),
|
||||
})
|
||||
}
|
||||
|
||||
fn triage_continuation<'a, 'b>(
|
||||
nodes: &'a mut TokensIterator<'b>,
|
||||
) -> Result<Option<Tag>, ShellError> {
|
||||
let mut peeked = nodes.peek_any();
|
||||
|
||||
let node = match peeked.node {
|
||||
None => return Ok(None),
|
||||
Some(node) => node,
|
||||
};
|
||||
|
||||
match &node {
|
||||
node if node.is_whitespace() => return Ok(None),
|
||||
TokenNode::Token(..) | TokenNode::Flag(..) => {}
|
||||
TokenNode::Call(..) => unimplemented!("call"),
|
||||
TokenNode::Nodes(..) => unimplemented!("nodes"),
|
||||
TokenNode::Delimited(..) => unimplemented!("delimited"),
|
||||
TokenNode::Pipeline(..) => unimplemented!("pipeline"),
|
||||
TokenNode::Whitespace(..) => unimplemented!("whitespace"),
|
||||
TokenNode::Error(..) => unimplemented!("error"),
|
||||
}
|
||||
|
||||
peeked.commit();
|
||||
Ok(Some(node.tag()))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
enum ExternalExpressionResult {
|
||||
Eof,
|
||||
Processed,
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
struct ExternalExpression;
|
||||
|
||||
impl ColorSyntax for ExternalExpression {
|
||||
type Info = ExternalExpressionResult;
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> ExternalExpressionResult {
|
||||
let atom = match expand_atom(
|
||||
token_nodes,
|
||||
"external word",
|
||||
context,
|
||||
ExpansionRule::permissive(),
|
||||
) {
|
||||
Err(_) => unreachable!("TODO: separate infallible expand_atom"),
|
||||
Ok(Tagged {
|
||||
item: AtomicToken::Eof { .. },
|
||||
..
|
||||
}) => return ExternalExpressionResult::Eof,
|
||||
Ok(atom) => atom,
|
||||
};
|
||||
|
||||
atom.color_tokens(shapes);
|
||||
return ExternalExpressionResult::Processed;
|
||||
}
|
||||
}
|
@ -9,7 +9,7 @@ use std::fmt;
|
||||
)]
|
||||
#[get = "pub(crate)"]
|
||||
pub struct ExternalCommand {
|
||||
name: Tag,
|
||||
pub(crate) name: Tag,
|
||||
}
|
||||
|
||||
impl ToDebug for ExternalCommand {
|
||||
|
@ -2,19 +2,49 @@ use crate::parser::hir::Expression;
|
||||
use crate::prelude::*;
|
||||
use crate::Tagged;
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use getset::{Getters, MutGetters};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt;
|
||||
|
||||
#[derive(
|
||||
Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new,
|
||||
Debug,
|
||||
Clone,
|
||||
Eq,
|
||||
PartialEq,
|
||||
Ord,
|
||||
PartialOrd,
|
||||
Hash,
|
||||
Getters,
|
||||
MutGetters,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
new,
|
||||
)]
|
||||
#[get = "pub(crate)"]
|
||||
pub struct Path {
|
||||
head: Expression,
|
||||
#[get_mut = "pub(crate)"]
|
||||
tail: Vec<Tagged<String>>,
|
||||
}
|
||||
|
||||
impl fmt::Display for Path {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "{}", self.head)?;
|
||||
|
||||
for entry in &self.tail {
|
||||
write!(f, ".{}", entry.item)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Path {
|
||||
pub(crate) fn parts(self) -> (Expression, Vec<Tagged<String>>) {
|
||||
(self.head, self.tail)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToDebug for Path {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||
write!(f, "{}", self.head.debug(source))?;
|
||||
|
1261
src/parser/hir/syntax_shape.rs
Normal file
1261
src/parser/hir/syntax_shape.rs
Normal file
File diff suppressed because it is too large
Load Diff
330
src/parser/hir/syntax_shape/block.rs
Normal file
330
src/parser/hir/syntax_shape/block.rs
Normal file
@ -0,0 +1,330 @@
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::{
|
||||
hir,
|
||||
hir::syntax_shape::{
|
||||
color_fallible_syntax, color_syntax_with, continue_expression, expand_expr, expand_syntax,
|
||||
DelimitedShape, ExpandContext, ExpandExpression, ExpressionContinuationShape,
|
||||
ExpressionListShape, FallibleColorSyntax, FlatShape, MemberShape, PathTailShape,
|
||||
VariablePathShape,
|
||||
},
|
||||
hir::tokens_iterator::TokensIterator,
|
||||
parse::token_tree::Delimiter,
|
||||
RawToken, TokenNode,
|
||||
};
|
||||
use crate::{Tag, Tagged, TaggedItem};
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct AnyBlockShape;
|
||||
|
||||
impl FallibleColorSyntax for AnyBlockShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let block = token_nodes.peek_non_ws().not_eof("block");
|
||||
|
||||
let block = match block {
|
||||
Err(_) => return Ok(()),
|
||||
Ok(block) => block,
|
||||
};
|
||||
|
||||
// is it just a block?
|
||||
let block = block.node.as_block();
|
||||
|
||||
match block {
|
||||
// If so, color it as a block
|
||||
Some((children, tags)) => {
|
||||
let mut token_nodes = TokensIterator::new(children.item, context.tag, false);
|
||||
color_syntax_with(
|
||||
&DelimitedShape,
|
||||
&(Delimiter::Brace, tags.0, tags.1),
|
||||
&mut token_nodes,
|
||||
context,
|
||||
shapes,
|
||||
);
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
// Otherwise, look for a shorthand block. If none found, fail
|
||||
color_fallible_syntax(&ShorthandBlock, token_nodes, context, shapes)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandExpression for AnyBlockShape {
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
let block = token_nodes.peek_non_ws().not_eof("block")?;
|
||||
|
||||
// is it just a block?
|
||||
let block = block.node.as_block();
|
||||
|
||||
match block {
|
||||
Some((block, _tags)) => {
|
||||
let mut iterator = TokensIterator::new(&block.item, context.tag, false);
|
||||
|
||||
let exprs = expand_syntax(&ExpressionListShape, &mut iterator, context)?;
|
||||
|
||||
return Ok(hir::RawExpression::Block(exprs).tagged(block.tag));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
expand_syntax(&ShorthandBlock, token_nodes, context)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ShorthandBlock;
|
||||
|
||||
impl FallibleColorSyntax for ShorthandBlock {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
// Try to find a shorthand head. If none found, fail
|
||||
color_fallible_syntax(&ShorthandPath, token_nodes, context, shapes)?;
|
||||
|
||||
loop {
|
||||
// Check to see whether there's any continuation after the head expression
|
||||
let result =
|
||||
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes);
|
||||
|
||||
match result {
|
||||
// if no continuation was found, we're done
|
||||
Err(_) => break,
|
||||
// if a continuation was found, look for another one
|
||||
Ok(_) => continue,
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandExpression for ShorthandBlock {
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
let path = expand_expr(&ShorthandPath, token_nodes, context)?;
|
||||
let start = path.tag;
|
||||
let expr = continue_expression(path, token_nodes, context)?;
|
||||
let end = expr.tag;
|
||||
let block = hir::RawExpression::Block(vec![expr]).tagged(start.until(end));
|
||||
|
||||
Ok(block)
|
||||
}
|
||||
}
|
||||
|
||||
/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ShorthandPath;
|
||||
|
||||
impl FallibleColorSyntax for ShorthandPath {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
token_nodes.atomic(|token_nodes| {
|
||||
let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context, shapes);
|
||||
|
||||
match variable {
|
||||
Ok(_) => {
|
||||
// if it's a variable path, that's the head part
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
Err(_) => {
|
||||
// otherwise, we'll try to find a member path
|
||||
}
|
||||
}
|
||||
|
||||
// look for a member (`<member>` -> `$it.<member>`)
|
||||
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
|
||||
|
||||
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
|
||||
// like any other path.
|
||||
let tail = color_fallible_syntax(&PathTailShape, token_nodes, context, shapes);
|
||||
|
||||
match tail {
|
||||
Ok(_) => {}
|
||||
Err(_) => {
|
||||
// It's ok if there's no path tail; a single member is sufficient
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandExpression for ShorthandPath {
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
// if it's a variable path, that's the head part
|
||||
let path = expand_expr(&VariablePathShape, token_nodes, context);
|
||||
|
||||
match path {
|
||||
Ok(path) => return Ok(path),
|
||||
Err(_) => {}
|
||||
}
|
||||
|
||||
// Synthesize the head of the shorthand path (`<member>` -> `$it.<member>`)
|
||||
let mut head = expand_expr(&ShorthandHeadShape, token_nodes, context)?;
|
||||
|
||||
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
|
||||
// like any other path.
|
||||
let tail = expand_syntax(&PathTailShape, token_nodes, context);
|
||||
|
||||
match tail {
|
||||
Err(_) => return Ok(head),
|
||||
Ok((tail, _)) => {
|
||||
// For each member that `PathTailShape` expanded, join it onto the existing expression
|
||||
// to form a new path
|
||||
for member in tail {
|
||||
head = hir::Expression::dot_member(head, member);
|
||||
}
|
||||
|
||||
Ok(head)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ShorthandHeadShape;
|
||||
|
||||
impl FallibleColorSyntax for ShorthandHeadShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
_context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
// A shorthand path must not be at EOF
|
||||
let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?;
|
||||
|
||||
match peeked.node {
|
||||
// If the head of a shorthand path is a bare token, it expands to `$it.bare`
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::Bare,
|
||||
tag,
|
||||
}) => {
|
||||
peeked.commit();
|
||||
shapes.push(FlatShape::BareMember.tagged(tag));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// If the head of a shorthand path is a string, it expands to `$it."some string"`
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::String(_),
|
||||
tag: outer,
|
||||
}) => {
|
||||
peeked.commit();
|
||||
shapes.push(FlatShape::StringMember.tagged(outer));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
other => Err(ShellError::type_error(
|
||||
"shorthand head",
|
||||
other.tagged_type_name(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandExpression for ShorthandHeadShape {
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
// A shorthand path must not be at EOF
|
||||
let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?;
|
||||
|
||||
match peeked.node {
|
||||
// If the head of a shorthand path is a bare token, it expands to `$it.bare`
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::Bare,
|
||||
tag,
|
||||
}) => {
|
||||
// Commit the peeked token
|
||||
peeked.commit();
|
||||
|
||||
// Synthesize an `$it` expression
|
||||
let it = synthetic_it(token_nodes.anchor());
|
||||
|
||||
// Make a path out of `$it` and the bare token as a member
|
||||
Ok(hir::Expression::path(
|
||||
it,
|
||||
vec![tag.tagged_string(context.source)],
|
||||
tag,
|
||||
))
|
||||
}
|
||||
|
||||
// If the head of a shorthand path is a string, it expands to `$it."some string"`
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::String(inner),
|
||||
tag: outer,
|
||||
}) => {
|
||||
// Commit the peeked token
|
||||
peeked.commit();
|
||||
|
||||
// Synthesize an `$it` expression
|
||||
let it = synthetic_it(token_nodes.anchor());
|
||||
|
||||
// Make a path out of `$it` and the bare token as a member
|
||||
Ok(hir::Expression::path(
|
||||
it,
|
||||
vec![inner.string(context.source).tagged(outer)],
|
||||
outer,
|
||||
))
|
||||
}
|
||||
|
||||
// Any other token is not a valid bare head
|
||||
other => {
|
||||
return Err(ShellError::type_error(
|
||||
"shorthand path",
|
||||
other.tagged_type_name(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn synthetic_it(origin: uuid::Uuid) -> hir::Expression {
|
||||
hir::Expression::it_variable(Tag::unknown_span(origin), Tag::unknown_span(origin))
|
||||
}
|
305
src/parser/hir/syntax_shape/expression.rs
Normal file
305
src/parser/hir/syntax_shape/expression.rs
Normal file
@ -0,0 +1,305 @@
|
||||
pub(crate) mod atom;
|
||||
pub(crate) mod delimited;
|
||||
pub(crate) mod file_path;
|
||||
pub(crate) mod list;
|
||||
pub(crate) mod number;
|
||||
pub(crate) mod pattern;
|
||||
pub(crate) mod string;
|
||||
pub(crate) mod unit;
|
||||
pub(crate) mod variable_path;
|
||||
|
||||
use crate::parser::hir::syntax_shape::{
|
||||
color_delimited_square, color_fallible_syntax, color_fallible_syntax_with, expand_atom,
|
||||
expand_delimited_square, expand_expr, expand_syntax, AtomicToken, BareShape, ColorableDotShape,
|
||||
DotShape, ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, ExpressionContinuation,
|
||||
ExpressionContinuationShape, FallibleColorSyntax, FlatShape,
|
||||
};
|
||||
use crate::parser::{
|
||||
hir,
|
||||
hir::{Expression, TokensIterator},
|
||||
};
|
||||
use crate::prelude::*;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct AnyExpressionShape;
|
||||
|
||||
impl ExpandExpression for AnyExpressionShape {
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
// Look for an expression at the cursor
|
||||
let head = expand_expr(&AnyExpressionStartShape, token_nodes, context)?;
|
||||
|
||||
continue_expression(head, token_nodes, context)
|
||||
}
|
||||
}
|
||||
|
||||
impl FallibleColorSyntax for AnyExpressionShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
// Look for an expression at the cursor
|
||||
color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context, shapes)?;
|
||||
|
||||
match continue_coloring_expression(token_nodes, context, shapes) {
|
||||
Err(_) => {
|
||||
// it's fine for there to be no continuation
|
||||
}
|
||||
|
||||
Ok(()) => {}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn continue_expression(
|
||||
mut head: hir::Expression,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
loop {
|
||||
// Check to see whether there's any continuation after the head expression
|
||||
let continuation = expand_syntax(&ExpressionContinuationShape, token_nodes, context);
|
||||
|
||||
match continuation {
|
||||
// If there's no continuation, return the head
|
||||
Err(_) => return Ok(head),
|
||||
// Otherwise, form a new expression by combining the head with the continuation
|
||||
Ok(continuation) => match continuation {
|
||||
// If the continuation is a `.member`, form a path with the new member
|
||||
ExpressionContinuation::DotSuffix(_dot, member) => {
|
||||
head = Expression::dot_member(head, member);
|
||||
}
|
||||
|
||||
// Otherwise, if the continuation is an infix suffix, form an infix expression
|
||||
ExpressionContinuation::InfixSuffix(op, expr) => {
|
||||
head = Expression::infix(head, op, expr);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn continue_coloring_expression(
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
// if there's not even one expression continuation, fail
|
||||
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes)?;
|
||||
|
||||
loop {
|
||||
// Check to see whether there's any continuation after the head expression
|
||||
let result =
|
||||
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes);
|
||||
|
||||
match result {
|
||||
Err(_) => {
|
||||
// We already saw one continuation, so just return
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
Ok(_) => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct AnyExpressionStartShape;
|
||||
|
||||
impl ExpandExpression for AnyExpressionStartShape {
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
let atom = expand_atom(token_nodes, "expression", context, ExpansionRule::new())?;
|
||||
|
||||
match atom.item {
|
||||
AtomicToken::Size { number, unit } => {
|
||||
return Ok(hir::Expression::size(
|
||||
number.to_number(context.source),
|
||||
unit.item,
|
||||
atom.tag,
|
||||
))
|
||||
}
|
||||
|
||||
AtomicToken::SquareDelimited { nodes, .. } => {
|
||||
expand_delimited_square(&nodes, atom.tag, context)
|
||||
}
|
||||
|
||||
AtomicToken::Word { .. } | AtomicToken::Dot { .. } => {
|
||||
let end = expand_syntax(&BareTailShape, token_nodes, context)?;
|
||||
Ok(hir::Expression::bare(atom.tag.until_option(end)))
|
||||
}
|
||||
|
||||
other => return other.tagged(atom.tag).into_hir(context, "expression"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FallibleColorSyntax for AnyExpressionStartShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let atom = token_nodes.spanned(|token_nodes| {
|
||||
expand_atom(
|
||||
token_nodes,
|
||||
"expression",
|
||||
context,
|
||||
ExpansionRule::permissive(),
|
||||
)
|
||||
});
|
||||
|
||||
let atom = match atom {
|
||||
Tagged {
|
||||
item: Err(_err),
|
||||
tag,
|
||||
} => {
|
||||
shapes.push(FlatShape::Error.tagged(tag));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
Tagged {
|
||||
item: Ok(value), ..
|
||||
} => value,
|
||||
};
|
||||
|
||||
match atom.item {
|
||||
AtomicToken::Size { number, unit } => shapes.push(
|
||||
FlatShape::Size {
|
||||
number: number.tag,
|
||||
unit: unit.tag,
|
||||
}
|
||||
.tagged(atom.tag),
|
||||
),
|
||||
|
||||
AtomicToken::SquareDelimited { nodes, tags } => {
|
||||
color_delimited_square(tags, &nodes, atom.tag, context, shapes)
|
||||
}
|
||||
|
||||
AtomicToken::Word { .. } | AtomicToken::Dot { .. } => {
|
||||
shapes.push(FlatShape::Word.tagged(atom.tag));
|
||||
}
|
||||
|
||||
_ => atom.color_tokens(shapes),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct BareTailShape;
|
||||
|
||||
impl FallibleColorSyntax for BareTailShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let len = shapes.len();
|
||||
|
||||
loop {
|
||||
let word = color_fallible_syntax_with(
|
||||
&BareShape,
|
||||
&FlatShape::Word,
|
||||
token_nodes,
|
||||
context,
|
||||
shapes,
|
||||
);
|
||||
|
||||
match word {
|
||||
// if a word was found, continue
|
||||
Ok(_) => continue,
|
||||
// if a word wasn't found, try to find a dot
|
||||
Err(_) => {}
|
||||
}
|
||||
|
||||
// try to find a dot
|
||||
let dot = color_fallible_syntax_with(
|
||||
&ColorableDotShape,
|
||||
&FlatShape::Word,
|
||||
token_nodes,
|
||||
context,
|
||||
shapes,
|
||||
);
|
||||
|
||||
match dot {
|
||||
// if a dot was found, try to find another word
|
||||
Ok(_) => continue,
|
||||
// otherwise, we're done
|
||||
Err(_) => break,
|
||||
}
|
||||
}
|
||||
|
||||
if shapes.len() > len {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(ShellError::syntax_error(
|
||||
"No tokens matched BareTailShape".tagged_unknown(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandSyntax for BareTailShape {
|
||||
type Output = Option<Tag>;
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Option<Tag>, ShellError> {
|
||||
let mut end: Option<Tag> = None;
|
||||
|
||||
loop {
|
||||
match expand_syntax(&BareShape, token_nodes, context) {
|
||||
Ok(bare) => {
|
||||
end = Some(bare.tag);
|
||||
continue;
|
||||
}
|
||||
|
||||
Err(_) => match expand_syntax(&DotShape, token_nodes, context) {
|
||||
Ok(dot) => {
|
||||
end = Some(dot);
|
||||
continue;
|
||||
}
|
||||
|
||||
Err(_) => break,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
Ok(end)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expand_file_path(string: &str, context: &ExpandContext) -> PathBuf {
|
||||
let expanded = shellexpand::tilde_with_context(string, || context.homedir());
|
||||
|
||||
PathBuf::from(expanded.as_ref())
|
||||
}
|
541
src/parser/hir/syntax_shape/expression/atom.rs
Normal file
541
src/parser/hir/syntax_shape/expression/atom.rs
Normal file
@ -0,0 +1,541 @@
|
||||
use crate::parser::hir::syntax_shape::{
|
||||
expand_syntax, expression::expand_file_path, parse_single_node, BarePathShape,
|
||||
BarePatternShape, ExpandContext, UnitShape,
|
||||
};
|
||||
use crate::parser::{
|
||||
hir,
|
||||
hir::{Expression, RawNumber, TokensIterator},
|
||||
parse::flag::{Flag, FlagKind},
|
||||
DelimitedNode, Delimiter, FlatShape, RawToken, TokenNode, Unit,
|
||||
};
|
||||
use crate::prelude::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum AtomicToken<'tokens> {
|
||||
Eof {
|
||||
tag: Tag,
|
||||
},
|
||||
Error {
|
||||
error: Tagged<ShellError>,
|
||||
},
|
||||
Number {
|
||||
number: RawNumber,
|
||||
},
|
||||
Size {
|
||||
number: Tagged<RawNumber>,
|
||||
unit: Tagged<Unit>,
|
||||
},
|
||||
String {
|
||||
body: Tag,
|
||||
},
|
||||
ItVariable {
|
||||
name: Tag,
|
||||
},
|
||||
Variable {
|
||||
name: Tag,
|
||||
},
|
||||
ExternalCommand {
|
||||
command: Tag,
|
||||
},
|
||||
ExternalWord {
|
||||
text: Tag,
|
||||
},
|
||||
GlobPattern {
|
||||
pattern: Tag,
|
||||
},
|
||||
FilePath {
|
||||
path: Tag,
|
||||
},
|
||||
Word {
|
||||
text: Tag,
|
||||
},
|
||||
SquareDelimited {
|
||||
tags: (Tag, Tag),
|
||||
nodes: &'tokens Vec<TokenNode>,
|
||||
},
|
||||
ParenDelimited {
|
||||
tags: (Tag, Tag),
|
||||
nodes: &'tokens Vec<TokenNode>,
|
||||
},
|
||||
BraceDelimited {
|
||||
tags: (Tag, Tag),
|
||||
nodes: &'tokens Vec<TokenNode>,
|
||||
},
|
||||
Pipeline {
|
||||
pipe: Option<Tag>,
|
||||
elements: Tagged<&'tokens Vec<TokenNode>>,
|
||||
},
|
||||
ShorthandFlag {
|
||||
name: Tag,
|
||||
},
|
||||
LonghandFlag {
|
||||
name: Tag,
|
||||
},
|
||||
Dot {
|
||||
text: Tag,
|
||||
},
|
||||
Operator {
|
||||
text: Tag,
|
||||
},
|
||||
Whitespace {
|
||||
text: Tag,
|
||||
},
|
||||
}
|
||||
|
||||
pub type TaggedAtomicToken<'tokens> = Tagged<AtomicToken<'tokens>>;
|
||||
|
||||
impl<'tokens> TaggedAtomicToken<'tokens> {
|
||||
pub fn into_hir(
|
||||
&self,
|
||||
context: &ExpandContext,
|
||||
expected: &'static str,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
Ok(match &self.item {
|
||||
AtomicToken::Eof { .. } => {
|
||||
return Err(ShellError::type_error(
|
||||
expected,
|
||||
"eof atomic token".tagged(self.tag),
|
||||
))
|
||||
}
|
||||
AtomicToken::Error { .. } => {
|
||||
return Err(ShellError::type_error(
|
||||
expected,
|
||||
"eof atomic token".tagged(self.tag),
|
||||
))
|
||||
}
|
||||
AtomicToken::Operator { .. } => {
|
||||
return Err(ShellError::type_error(
|
||||
expected,
|
||||
"operator".tagged(self.tag),
|
||||
))
|
||||
}
|
||||
AtomicToken::ShorthandFlag { .. } => {
|
||||
return Err(ShellError::type_error(
|
||||
expected,
|
||||
"shorthand flag".tagged(self.tag),
|
||||
))
|
||||
}
|
||||
AtomicToken::LonghandFlag { .. } => {
|
||||
return Err(ShellError::type_error(expected, "flag".tagged(self.tag)))
|
||||
}
|
||||
AtomicToken::Whitespace { .. } => {
|
||||
return Err(ShellError::unimplemented("whitespace in AtomicToken"))
|
||||
}
|
||||
AtomicToken::Dot { .. } => {
|
||||
return Err(ShellError::type_error(expected, "dot".tagged(self.tag)))
|
||||
}
|
||||
AtomicToken::Number { number } => {
|
||||
Expression::number(number.to_number(context.source), self.tag)
|
||||
}
|
||||
AtomicToken::FilePath { path } => Expression::file_path(
|
||||
expand_file_path(path.slice(context.source), context),
|
||||
self.tag,
|
||||
),
|
||||
AtomicToken::Size { number, unit } => {
|
||||
Expression::size(number.to_number(context.source), **unit, self.tag)
|
||||
}
|
||||
AtomicToken::String { body } => Expression::string(body, self.tag),
|
||||
AtomicToken::ItVariable { name } => Expression::it_variable(name, self.tag),
|
||||
AtomicToken::Variable { name } => Expression::variable(name, self.tag),
|
||||
AtomicToken::ExternalCommand { command } => {
|
||||
Expression::external_command(command, self.tag)
|
||||
}
|
||||
AtomicToken::ExternalWord { text } => Expression::string(text, self.tag),
|
||||
AtomicToken::GlobPattern { pattern } => Expression::pattern(pattern),
|
||||
AtomicToken::Word { text } => Expression::string(text, text),
|
||||
AtomicToken::SquareDelimited { .. } => unimplemented!("into_hir"),
|
||||
AtomicToken::ParenDelimited { .. } => unimplemented!("into_hir"),
|
||||
AtomicToken::BraceDelimited { .. } => unimplemented!("into_hir"),
|
||||
AtomicToken::Pipeline { .. } => unimplemented!("into_hir"),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_type_name(&self) -> Tagged<&'static str> {
|
||||
match &self.item {
|
||||
AtomicToken::Eof { .. } => "eof",
|
||||
AtomicToken::Error { .. } => "error",
|
||||
AtomicToken::Operator { .. } => "operator",
|
||||
AtomicToken::ShorthandFlag { .. } => "shorthand flag",
|
||||
AtomicToken::LonghandFlag { .. } => "flag",
|
||||
AtomicToken::Whitespace { .. } => "whitespace",
|
||||
AtomicToken::Dot { .. } => "dot",
|
||||
AtomicToken::Number { .. } => "number",
|
||||
AtomicToken::FilePath { .. } => "file path",
|
||||
AtomicToken::Size { .. } => "size",
|
||||
AtomicToken::String { .. } => "string",
|
||||
AtomicToken::ItVariable { .. } => "$it",
|
||||
AtomicToken::Variable { .. } => "variable",
|
||||
AtomicToken::ExternalCommand { .. } => "external command",
|
||||
AtomicToken::ExternalWord { .. } => "external word",
|
||||
AtomicToken::GlobPattern { .. } => "file pattern",
|
||||
AtomicToken::Word { .. } => "word",
|
||||
AtomicToken::SquareDelimited { .. } => "array literal",
|
||||
AtomicToken::ParenDelimited { .. } => "parenthesized expression",
|
||||
AtomicToken::BraceDelimited { .. } => "block",
|
||||
AtomicToken::Pipeline { .. } => "pipeline",
|
||||
}
|
||||
.tagged(self.tag)
|
||||
}
|
||||
|
||||
pub(crate) fn color_tokens(&self, shapes: &mut Vec<Tagged<FlatShape>>) {
|
||||
match &self.item {
|
||||
AtomicToken::Eof { .. } => {}
|
||||
AtomicToken::Error { .. } => return shapes.push(FlatShape::Error.tagged(self.tag)),
|
||||
AtomicToken::Operator { .. } => {
|
||||
return shapes.push(FlatShape::Operator.tagged(self.tag));
|
||||
}
|
||||
AtomicToken::ShorthandFlag { .. } => {
|
||||
return shapes.push(FlatShape::ShorthandFlag.tagged(self.tag));
|
||||
}
|
||||
AtomicToken::LonghandFlag { .. } => {
|
||||
return shapes.push(FlatShape::Flag.tagged(self.tag));
|
||||
}
|
||||
AtomicToken::Whitespace { .. } => {
|
||||
return shapes.push(FlatShape::Whitespace.tagged(self.tag));
|
||||
}
|
||||
AtomicToken::FilePath { .. } => return shapes.push(FlatShape::Path.tagged(self.tag)),
|
||||
AtomicToken::Dot { .. } => return shapes.push(FlatShape::Dot.tagged(self.tag)),
|
||||
AtomicToken::Number {
|
||||
number: RawNumber::Decimal(_),
|
||||
} => {
|
||||
return shapes.push(FlatShape::Decimal.tagged(self.tag));
|
||||
}
|
||||
AtomicToken::Number {
|
||||
number: RawNumber::Int(_),
|
||||
} => {
|
||||
return shapes.push(FlatShape::Int.tagged(self.tag));
|
||||
}
|
||||
AtomicToken::Size { number, unit } => {
|
||||
return shapes.push(
|
||||
FlatShape::Size {
|
||||
number: number.tag,
|
||||
unit: unit.tag,
|
||||
}
|
||||
.tagged(self.tag),
|
||||
);
|
||||
}
|
||||
AtomicToken::String { .. } => return shapes.push(FlatShape::String.tagged(self.tag)),
|
||||
AtomicToken::ItVariable { .. } => {
|
||||
return shapes.push(FlatShape::ItVariable.tagged(self.tag))
|
||||
}
|
||||
AtomicToken::Variable { .. } => {
|
||||
return shapes.push(FlatShape::Variable.tagged(self.tag))
|
||||
}
|
||||
AtomicToken::ExternalCommand { .. } => {
|
||||
return shapes.push(FlatShape::ExternalCommand.tagged(self.tag));
|
||||
}
|
||||
AtomicToken::ExternalWord { .. } => {
|
||||
return shapes.push(FlatShape::ExternalWord.tagged(self.tag))
|
||||
}
|
||||
AtomicToken::GlobPattern { .. } => {
|
||||
return shapes.push(FlatShape::GlobPattern.tagged(self.tag))
|
||||
}
|
||||
AtomicToken::Word { .. } => return shapes.push(FlatShape::Word.tagged(self.tag)),
|
||||
_ => return shapes.push(FlatShape::Error.tagged(self.tag)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum WhitespaceHandling {
|
||||
#[allow(unused)]
|
||||
AllowWhitespace,
|
||||
RejectWhitespace,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ExpansionRule {
|
||||
pub(crate) allow_external_command: bool,
|
||||
pub(crate) allow_external_word: bool,
|
||||
pub(crate) allow_operator: bool,
|
||||
pub(crate) allow_eof: bool,
|
||||
pub(crate) treat_size_as_word: bool,
|
||||
pub(crate) commit_errors: bool,
|
||||
pub(crate) whitespace: WhitespaceHandling,
|
||||
}
|
||||
|
||||
impl ExpansionRule {
|
||||
pub fn new() -> ExpansionRule {
|
||||
ExpansionRule {
|
||||
allow_external_command: false,
|
||||
allow_external_word: false,
|
||||
allow_operator: false,
|
||||
allow_eof: false,
|
||||
treat_size_as_word: false,
|
||||
commit_errors: false,
|
||||
whitespace: WhitespaceHandling::RejectWhitespace,
|
||||
}
|
||||
}
|
||||
|
||||
/// The intent of permissive mode is to return an atomic token for every possible
|
||||
/// input token. This is important for error-correcting parsing, such as the
|
||||
/// syntax highlighter.
|
||||
pub fn permissive() -> ExpansionRule {
|
||||
ExpansionRule {
|
||||
allow_external_command: true,
|
||||
allow_external_word: true,
|
||||
allow_operator: true,
|
||||
allow_eof: true,
|
||||
treat_size_as_word: false,
|
||||
commit_errors: true,
|
||||
whitespace: WhitespaceHandling::AllowWhitespace,
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn allow_external_command(mut self) -> ExpansionRule {
|
||||
self.allow_external_command = true;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn allow_operator(mut self) -> ExpansionRule {
|
||||
self.allow_operator = true;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn no_operator(mut self) -> ExpansionRule {
|
||||
self.allow_operator = false;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn no_external_command(mut self) -> ExpansionRule {
|
||||
self.allow_external_command = false;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn allow_external_word(mut self) -> ExpansionRule {
|
||||
self.allow_external_word = true;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn no_external_word(mut self) -> ExpansionRule {
|
||||
self.allow_external_word = false;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn treat_size_as_word(mut self) -> ExpansionRule {
|
||||
self.treat_size_as_word = true;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn commit_errors(mut self) -> ExpansionRule {
|
||||
self.commit_errors = true;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn allow_whitespace(mut self) -> ExpansionRule {
|
||||
self.whitespace = WhitespaceHandling::AllowWhitespace;
|
||||
self
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn reject_whitespace(mut self) -> ExpansionRule {
|
||||
self.whitespace = WhitespaceHandling::RejectWhitespace;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// If the caller of expand_atom throws away the returned atomic token returned, it
|
||||
/// must use a checkpoint to roll it back.
|
||||
pub fn expand_atom<'me, 'content>(
|
||||
token_nodes: &'me mut TokensIterator<'content>,
|
||||
expected: &'static str,
|
||||
context: &ExpandContext,
|
||||
rule: ExpansionRule,
|
||||
) -> Result<TaggedAtomicToken<'content>, ShellError> {
|
||||
if token_nodes.at_end() {
|
||||
match rule.allow_eof {
|
||||
true => {
|
||||
return Ok(AtomicToken::Eof {
|
||||
tag: Tag::unknown(),
|
||||
}
|
||||
.tagged_unknown())
|
||||
}
|
||||
false => return Err(ShellError::unexpected_eof("anything", Tag::unknown())),
|
||||
}
|
||||
}
|
||||
|
||||
// First, we'll need to handle the situation where more than one token corresponds
|
||||
// to a single atomic token
|
||||
|
||||
// If treat_size_as_word, don't try to parse the head of the token stream
|
||||
// as a size.
|
||||
match rule.treat_size_as_word {
|
||||
true => {}
|
||||
false => match expand_syntax(&UnitShape, token_nodes, context) {
|
||||
// If the head of the stream isn't a valid unit, we'll try to parse
|
||||
// it again next as a word
|
||||
Err(_) => {}
|
||||
|
||||
// But if it was a valid unit, we're done here
|
||||
Ok(Tagged {
|
||||
item: (number, unit),
|
||||
tag,
|
||||
}) => return Ok(AtomicToken::Size { number, unit }.tagged(tag)),
|
||||
},
|
||||
}
|
||||
|
||||
// Try to parse the head of the stream as a bare path. A bare path includes
|
||||
// words as well as `.`s, connected together without whitespace.
|
||||
match expand_syntax(&BarePathShape, token_nodes, context) {
|
||||
// If we didn't find a bare path
|
||||
Err(_) => {}
|
||||
Ok(tag) => {
|
||||
let next = token_nodes.peek_any();
|
||||
|
||||
match next.node {
|
||||
Some(token) if token.is_pattern() => {
|
||||
// if the very next token is a pattern, we're looking at a glob, not a
|
||||
// word, and we should try to parse it as a glob next
|
||||
}
|
||||
|
||||
_ => return Ok(AtomicToken::Word { text: tag }.tagged(tag)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Try to parse the head of the stream as a pattern. A pattern includes
|
||||
// words, words with `*` as well as `.`s, connected together without whitespace.
|
||||
match expand_syntax(&BarePatternShape, token_nodes, context) {
|
||||
// If we didn't find a bare path
|
||||
Err(_) => {}
|
||||
Ok(tag) => return Ok(AtomicToken::GlobPattern { pattern: tag }.tagged(tag)),
|
||||
}
|
||||
|
||||
// The next token corresponds to at most one atomic token
|
||||
|
||||
// We need to `peek` because `parse_single_node` doesn't cover all of the
|
||||
// cases that `expand_atom` covers. We should probably collapse the two
|
||||
// if possible.
|
||||
let peeked = token_nodes.peek_any().not_eof(expected)?;
|
||||
|
||||
match peeked.node {
|
||||
TokenNode::Token(_) => {
|
||||
// handle this next
|
||||
}
|
||||
|
||||
TokenNode::Error(error) => {
|
||||
peeked.commit();
|
||||
return Ok(AtomicToken::Error {
|
||||
error: error.clone(),
|
||||
}
|
||||
.tagged(error.tag));
|
||||
}
|
||||
|
||||
// [ ... ]
|
||||
TokenNode::Delimited(Tagged {
|
||||
item:
|
||||
DelimitedNode {
|
||||
delimiter: Delimiter::Square,
|
||||
tags,
|
||||
children,
|
||||
},
|
||||
tag,
|
||||
}) => {
|
||||
peeked.commit();
|
||||
return Ok(AtomicToken::SquareDelimited {
|
||||
nodes: children,
|
||||
tags: *tags,
|
||||
}
|
||||
.tagged(tag));
|
||||
}
|
||||
|
||||
TokenNode::Flag(Tagged {
|
||||
item:
|
||||
Flag {
|
||||
kind: FlagKind::Shorthand,
|
||||
name,
|
||||
},
|
||||
tag,
|
||||
}) => {
|
||||
peeked.commit();
|
||||
return Ok(AtomicToken::ShorthandFlag { name: *name }.tagged(tag));
|
||||
}
|
||||
|
||||
TokenNode::Flag(Tagged {
|
||||
item:
|
||||
Flag {
|
||||
kind: FlagKind::Longhand,
|
||||
name,
|
||||
},
|
||||
tag,
|
||||
}) => {
|
||||
peeked.commit();
|
||||
return Ok(AtomicToken::ShorthandFlag { name: *name }.tagged(tag));
|
||||
}
|
||||
|
||||
// If we see whitespace, process the whitespace according to the whitespace
|
||||
// handling rules
|
||||
TokenNode::Whitespace(tag) => match rule.whitespace {
|
||||
// if whitespace is allowed, return a whitespace token
|
||||
WhitespaceHandling::AllowWhitespace => {
|
||||
peeked.commit();
|
||||
return Ok(AtomicToken::Whitespace { text: *tag }.tagged(tag));
|
||||
}
|
||||
|
||||
// if whitespace is disallowed, return an error
|
||||
WhitespaceHandling::RejectWhitespace => {
|
||||
return Err(ShellError::syntax_error(
|
||||
"Unexpected whitespace".tagged(tag),
|
||||
))
|
||||
}
|
||||
},
|
||||
|
||||
other => {
|
||||
let tag = peeked.node.tag();
|
||||
|
||||
peeked.commit();
|
||||
return Ok(AtomicToken::Error {
|
||||
error: ShellError::type_error("token", other.tagged_type_name()).tagged(tag),
|
||||
}
|
||||
.tagged(tag));
|
||||
}
|
||||
}
|
||||
|
||||
parse_single_node(token_nodes, expected, |token, token_tag, err| {
|
||||
Ok(match token {
|
||||
// First, the error cases. Each error case corresponds to a expansion rule
|
||||
// flag that can be used to allow the case
|
||||
|
||||
// rule.allow_operator
|
||||
RawToken::Operator(_) if !rule.allow_operator => return Err(err.error()),
|
||||
// rule.allow_external_command
|
||||
RawToken::ExternalCommand(_) if !rule.allow_external_command => {
|
||||
return Err(ShellError::type_error(
|
||||
expected,
|
||||
token.type_name().tagged(token_tag),
|
||||
))
|
||||
}
|
||||
// rule.allow_external_word
|
||||
RawToken::ExternalWord if !rule.allow_external_word => {
|
||||
return Err(ShellError::invalid_external_word(token_tag))
|
||||
}
|
||||
|
||||
RawToken::Number(number) => AtomicToken::Number { number }.tagged(token_tag),
|
||||
RawToken::Operator(_) => AtomicToken::Operator { text: token_tag }.tagged(token_tag),
|
||||
RawToken::String(body) => AtomicToken::String { body }.tagged(token_tag),
|
||||
RawToken::Variable(name) if name.slice(context.source) == "it" => {
|
||||
AtomicToken::ItVariable { name }.tagged(token_tag)
|
||||
}
|
||||
RawToken::Variable(name) => AtomicToken::Variable { name }.tagged(token_tag),
|
||||
RawToken::ExternalCommand(command) => {
|
||||
AtomicToken::ExternalCommand { command }.tagged(token_tag)
|
||||
}
|
||||
RawToken::ExternalWord => {
|
||||
AtomicToken::ExternalWord { text: token_tag }.tagged(token_tag)
|
||||
}
|
||||
RawToken::GlobPattern => {
|
||||
AtomicToken::GlobPattern { pattern: token_tag }.tagged(token_tag)
|
||||
}
|
||||
RawToken::Bare => AtomicToken::Word { text: token_tag }.tagged(token_tag),
|
||||
})
|
||||
})
|
||||
}
|
49
src/parser/hir/syntax_shape/expression/delimited.rs
Normal file
49
src/parser/hir/syntax_shape/expression/delimited.rs
Normal file
@ -0,0 +1,49 @@
|
||||
use crate::parser::hir::syntax_shape::{
|
||||
color_syntax, expand_syntax, ColorSyntax, ExpandContext, ExpressionListShape, TokenNode,
|
||||
};
|
||||
use crate::parser::{hir, hir::TokensIterator, Delimiter, FlatShape};
|
||||
use crate::prelude::*;
|
||||
|
||||
pub fn expand_delimited_square(
|
||||
children: &Vec<TokenNode>,
|
||||
tag: Tag,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
let mut tokens = TokensIterator::new(&children, tag, false);
|
||||
|
||||
let list = expand_syntax(&ExpressionListShape, &mut tokens, context);
|
||||
|
||||
Ok(hir::Expression::list(list?, tag))
|
||||
}
|
||||
|
||||
pub fn color_delimited_square(
|
||||
(open, close): (Tag, Tag),
|
||||
children: &Vec<TokenNode>,
|
||||
tag: Tag,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) {
|
||||
shapes.push(FlatShape::OpenDelimiter(Delimiter::Square).tagged(open));
|
||||
let mut tokens = TokensIterator::new(&children, tag, false);
|
||||
let _list = color_syntax(&ExpressionListShape, &mut tokens, context, shapes);
|
||||
shapes.push(FlatShape::CloseDelimiter(Delimiter::Square).tagged(close));
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct DelimitedShape;
|
||||
|
||||
impl ColorSyntax for DelimitedShape {
|
||||
type Info = ();
|
||||
type Input = (Delimiter, Tag, Tag);
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
(delimiter, open, close): &(Delimiter, Tag, Tag),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Self::Info {
|
||||
shapes.push(FlatShape::OpenDelimiter(*delimiter).tagged(open));
|
||||
color_syntax(&ExpressionListShape, token_nodes, context, shapes);
|
||||
shapes.push(FlatShape::CloseDelimiter(*delimiter).tagged(close));
|
||||
}
|
||||
}
|
71
src/parser/hir/syntax_shape/expression/file_path.rs
Normal file
71
src/parser/hir/syntax_shape/expression/file_path.rs
Normal file
@ -0,0 +1,71 @@
|
||||
use crate::parser::hir::syntax_shape::expression::atom::{expand_atom, AtomicToken, ExpansionRule};
|
||||
use crate::parser::hir::syntax_shape::{
|
||||
expression::expand_file_path, ExpandContext, ExpandExpression, FallibleColorSyntax, FlatShape,
|
||||
};
|
||||
use crate::parser::{hir, hir::TokensIterator};
|
||||
use crate::prelude::*;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct FilePathShape;
|
||||
|
||||
impl FallibleColorSyntax for FilePathShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let atom = expand_atom(
|
||||
token_nodes,
|
||||
"file path",
|
||||
context,
|
||||
ExpansionRule::permissive(),
|
||||
);
|
||||
|
||||
let atom = match atom {
|
||||
Err(_) => return Ok(()),
|
||||
Ok(atom) => atom,
|
||||
};
|
||||
|
||||
match atom.item {
|
||||
AtomicToken::Word { .. }
|
||||
| AtomicToken::String { .. }
|
||||
| AtomicToken::Number { .. }
|
||||
| AtomicToken::Size { .. } => {
|
||||
shapes.push(FlatShape::Path.tagged(atom.tag));
|
||||
}
|
||||
|
||||
_ => atom.color_tokens(shapes),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandExpression for FilePathShape {
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
let atom = expand_atom(token_nodes, "file path", context, ExpansionRule::new())?;
|
||||
|
||||
match atom.item {
|
||||
AtomicToken::Word { text: body } | AtomicToken::String { body } => {
|
||||
let path = expand_file_path(body.slice(context.source), context);
|
||||
return Ok(hir::Expression::file_path(path, atom.tag));
|
||||
}
|
||||
|
||||
AtomicToken::Number { .. } | AtomicToken::Size { .. } => {
|
||||
let path = atom.tag.slice(context.source);
|
||||
return Ok(hir::Expression::file_path(path, atom.tag));
|
||||
}
|
||||
|
||||
_ => return atom.into_hir(context, "file path"),
|
||||
}
|
||||
}
|
||||
}
|
176
src/parser/hir/syntax_shape/expression/list.rs
Normal file
176
src/parser/hir/syntax_shape/expression/list.rs
Normal file
@ -0,0 +1,176 @@
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::{
|
||||
hir,
|
||||
hir::syntax_shape::{
|
||||
color_fallible_syntax, color_syntax, expand_atom, expand_expr, maybe_spaced, spaced,
|
||||
AnyExpressionShape, ColorSyntax, ExpandContext, ExpandSyntax, ExpansionRule,
|
||||
MaybeSpaceShape, SpaceShape,
|
||||
},
|
||||
hir::TokensIterator,
|
||||
FlatShape,
|
||||
};
|
||||
use crate::Tagged;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ExpressionListShape;
|
||||
|
||||
impl ExpandSyntax for ExpressionListShape {
|
||||
type Output = Vec<hir::Expression>;
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Vec<hir::Expression>, ShellError> {
|
||||
let mut exprs = vec![];
|
||||
|
||||
if token_nodes.at_end_possible_ws() {
|
||||
return Ok(exprs);
|
||||
}
|
||||
|
||||
let expr = expand_expr(&maybe_spaced(AnyExpressionShape), token_nodes, context)?;
|
||||
|
||||
exprs.push(expr);
|
||||
|
||||
loop {
|
||||
if token_nodes.at_end_possible_ws() {
|
||||
return Ok(exprs);
|
||||
}
|
||||
|
||||
let expr = expand_expr(&spaced(AnyExpressionShape), token_nodes, context)?;
|
||||
|
||||
exprs.push(expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ColorSyntax for ExpressionListShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
/// The intent of this method is to fully color an expression list shape infallibly.
|
||||
/// This means that if we can't expand a token into an expression, we fall back to
|
||||
/// a simpler coloring strategy.
|
||||
///
|
||||
/// This would apply to something like `where x >`, which includes an incomplete
|
||||
/// binary operator. Since we will fail to process it as a binary operator, we'll
|
||||
/// fall back to a simpler coloring and move on.
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) {
|
||||
// We encountered a parsing error and will continue with simpler coloring ("backoff
|
||||
// coloring mode")
|
||||
let mut backoff = false;
|
||||
|
||||
// Consume any leading whitespace
|
||||
color_syntax(&MaybeSpaceShape, token_nodes, context, shapes);
|
||||
|
||||
loop {
|
||||
// If we reached the very end of the token stream, we're done
|
||||
if token_nodes.at_end() {
|
||||
return;
|
||||
}
|
||||
|
||||
if backoff {
|
||||
let len = shapes.len();
|
||||
|
||||
// If we previously encountered a parsing error, use backoff coloring mode
|
||||
color_syntax(&SimplestExpression, token_nodes, context, shapes);
|
||||
|
||||
if len == shapes.len() && !token_nodes.at_end() {
|
||||
// This should never happen, but if it does, a panic is better than an infinite loop
|
||||
panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression")
|
||||
}
|
||||
} else {
|
||||
// Try to color the head of the stream as an expression
|
||||
match color_fallible_syntax(&AnyExpressionShape, token_nodes, context, shapes) {
|
||||
// If no expression was found, switch to backoff coloring mode
|
||||
Err(_) => {
|
||||
backoff = true;
|
||||
continue;
|
||||
}
|
||||
Ok(_) => {}
|
||||
}
|
||||
|
||||
// If an expression was found, consume a space
|
||||
match color_fallible_syntax(&SpaceShape, token_nodes, context, shapes) {
|
||||
Err(_) => {
|
||||
// If no space was found, we're either at the end or there's an error.
|
||||
// Either way, switch to backoff coloring mode. If we're at the end
|
||||
// it won't have any consequences.
|
||||
backoff = true;
|
||||
}
|
||||
Ok(_) => {
|
||||
// Otherwise, move on to the next expression
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// BackoffColoringMode consumes all of the remaining tokens in an infallible way
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct BackoffColoringMode;
|
||||
|
||||
impl ColorSyntax for BackoffColoringMode {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &Self::Input,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Self::Info {
|
||||
loop {
|
||||
if token_nodes.at_end() {
|
||||
break;
|
||||
}
|
||||
|
||||
let len = shapes.len();
|
||||
color_syntax(&SimplestExpression, token_nodes, context, shapes);
|
||||
|
||||
if len == shapes.len() && !token_nodes.at_end() {
|
||||
// This shouldn't happen, but if it does, a panic is better than an infinite loop
|
||||
panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, shapes);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The point of `SimplestExpression` is to serve as an infallible base case for coloring.
|
||||
/// As a last ditch effort, if we can't find any way to parse the head of the stream as an
|
||||
/// expression, fall back to simple coloring.
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct SimplestExpression;
|
||||
|
||||
impl ColorSyntax for SimplestExpression {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) {
|
||||
let atom = expand_atom(
|
||||
token_nodes,
|
||||
"any token",
|
||||
context,
|
||||
ExpansionRule::permissive(),
|
||||
);
|
||||
|
||||
match atom {
|
||||
Err(_) => {}
|
||||
Ok(atom) => atom.color_tokens(shapes),
|
||||
}
|
||||
}
|
||||
}
|
125
src/parser/hir/syntax_shape/expression/number.rs
Normal file
125
src/parser/hir/syntax_shape/expression/number.rs
Normal file
@ -0,0 +1,125 @@
|
||||
use crate::parser::hir::syntax_shape::{
|
||||
expand_atom, parse_single_node, ExpandContext, ExpandExpression, ExpansionRule,
|
||||
FallibleColorSyntax, FlatShape,
|
||||
};
|
||||
use crate::parser::{
|
||||
hir,
|
||||
hir::{RawNumber, TokensIterator},
|
||||
RawToken,
|
||||
};
|
||||
use crate::prelude::*;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct NumberShape;
|
||||
|
||||
impl ExpandExpression for NumberShape {
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
parse_single_node(token_nodes, "Number", |token, token_tag, err| {
|
||||
Ok(match token {
|
||||
RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()),
|
||||
RawToken::Variable(tag) if tag.slice(context.source) == "it" => {
|
||||
hir::Expression::it_variable(tag, token_tag)
|
||||
}
|
||||
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag),
|
||||
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)),
|
||||
RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag),
|
||||
RawToken::Number(number) => {
|
||||
hir::Expression::number(number.to_number(context.source), token_tag)
|
||||
}
|
||||
RawToken::Bare => hir::Expression::bare(token_tag),
|
||||
RawToken::String(tag) => hir::Expression::string(tag, token_tag),
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl FallibleColorSyntax for NumberShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let atom = token_nodes.spanned(|token_nodes| {
|
||||
expand_atom(token_nodes, "number", context, ExpansionRule::permissive())
|
||||
});
|
||||
|
||||
let atom = match atom {
|
||||
Tagged { item: Err(_), tag } => {
|
||||
shapes.push(FlatShape::Error.tagged(tag));
|
||||
return Ok(());
|
||||
}
|
||||
Tagged { item: Ok(atom), .. } => atom,
|
||||
};
|
||||
|
||||
atom.color_tokens(shapes);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct IntShape;
|
||||
|
||||
impl ExpandExpression for IntShape {
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
parse_single_node(token_nodes, "Integer", |token, token_tag, err| {
|
||||
Ok(match token {
|
||||
RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()),
|
||||
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)),
|
||||
RawToken::Variable(tag) if tag.slice(context.source) == "it" => {
|
||||
hir::Expression::it_variable(tag, token_tag)
|
||||
}
|
||||
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag),
|
||||
RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag),
|
||||
RawToken::Number(number @ RawNumber::Int(_)) => {
|
||||
hir::Expression::number(number.to_number(context.source), token_tag)
|
||||
}
|
||||
RawToken::Number(_) => return Err(err.error()),
|
||||
RawToken::Bare => hir::Expression::bare(token_tag),
|
||||
RawToken::String(tag) => hir::Expression::string(tag, token_tag),
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl FallibleColorSyntax for IntShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let atom = token_nodes.spanned(|token_nodes| {
|
||||
expand_atom(token_nodes, "integer", context, ExpansionRule::permissive())
|
||||
});
|
||||
|
||||
let atom = match atom {
|
||||
Tagged { item: Err(_), tag } => {
|
||||
shapes.push(FlatShape::Error.tagged(tag));
|
||||
return Ok(());
|
||||
}
|
||||
Tagged { item: Ok(atom), .. } => atom,
|
||||
};
|
||||
|
||||
atom.color_tokens(shapes);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
112
src/parser/hir/syntax_shape/expression/pattern.rs
Normal file
112
src/parser/hir/syntax_shape/expression/pattern.rs
Normal file
@ -0,0 +1,112 @@
|
||||
use crate::parser::hir::syntax_shape::{
|
||||
expand_atom, expand_bare, expand_syntax, expression::expand_file_path, parse_single_node,
|
||||
AtomicToken, ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, FallibleColorSyntax,
|
||||
FlatShape,
|
||||
};
|
||||
use crate::parser::{hir, hir::TokensIterator, Operator, RawToken, TokenNode};
|
||||
use crate::prelude::*;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct PatternShape;
|
||||
|
||||
impl FallibleColorSyntax for PatternShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
token_nodes.atomic(|token_nodes| {
|
||||
let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?;
|
||||
|
||||
match &atom.item {
|
||||
AtomicToken::GlobPattern { .. } | AtomicToken::Word { .. } => {
|
||||
shapes.push(FlatShape::GlobPattern.tagged(atom.tag));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
_ => Err(ShellError::type_error("pattern", atom.tagged_type_name())),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandExpression for PatternShape {
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
let pattern = expand_syntax(&BarePatternShape, token_nodes, context);
|
||||
|
||||
match pattern {
|
||||
Ok(tag) => {
|
||||
return Ok(hir::Expression::pattern(tag));
|
||||
}
|
||||
Err(_) => {}
|
||||
}
|
||||
|
||||
parse_single_node(token_nodes, "Pattern", |token, token_tag, _| {
|
||||
Ok(match token {
|
||||
RawToken::GlobPattern => {
|
||||
return Err(ShellError::unreachable(
|
||||
"glob pattern after glob already returned",
|
||||
))
|
||||
}
|
||||
RawToken::Operator(..) => {
|
||||
return Err(ShellError::unreachable("dot after glob already returned"))
|
||||
}
|
||||
RawToken::Bare => {
|
||||
return Err(ShellError::unreachable("bare after glob already returned"))
|
||||
}
|
||||
|
||||
RawToken::Variable(tag) if tag.slice(context.source) == "it" => {
|
||||
hir::Expression::it_variable(tag, token_tag)
|
||||
}
|
||||
RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag),
|
||||
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag),
|
||||
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)),
|
||||
RawToken::Number(_) => hir::Expression::bare(token_tag),
|
||||
|
||||
RawToken::String(tag) => hir::Expression::file_path(
|
||||
expand_file_path(tag.slice(context.source), context),
|
||||
token_tag,
|
||||
),
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct BarePatternShape;
|
||||
|
||||
impl ExpandSyntax for BarePatternShape {
|
||||
type Output = Tag;
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Tag, ShellError> {
|
||||
expand_bare(token_nodes, context, |token| match token {
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::Bare,
|
||||
..
|
||||
})
|
||||
| TokenNode::Token(Tagged {
|
||||
item: RawToken::Operator(Operator::Dot),
|
||||
..
|
||||
})
|
||||
| TokenNode::Token(Tagged {
|
||||
item: RawToken::GlobPattern,
|
||||
..
|
||||
}) => true,
|
||||
|
||||
_ => false,
|
||||
})
|
||||
}
|
||||
}
|
90
src/parser/hir/syntax_shape/expression/string.rs
Normal file
90
src/parser/hir/syntax_shape/expression/string.rs
Normal file
@ -0,0 +1,90 @@
|
||||
use crate::parser::hir::syntax_shape::{
|
||||
expand_atom, expand_variable, parse_single_node, AtomicToken, ExpandContext, ExpandExpression,
|
||||
ExpansionRule, FallibleColorSyntax, FlatShape, TestSyntax,
|
||||
};
|
||||
use crate::parser::hir::tokens_iterator::Peeked;
|
||||
use crate::parser::{hir, hir::TokensIterator, RawToken, TokenNode};
|
||||
use crate::prelude::*;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct StringShape;
|
||||
|
||||
impl FallibleColorSyntax for StringShape {
|
||||
type Info = ();
|
||||
type Input = FlatShape;
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
input: &FlatShape,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive());
|
||||
|
||||
let atom = match atom {
|
||||
Err(_) => return Ok(()),
|
||||
Ok(atom) => atom,
|
||||
};
|
||||
|
||||
match atom {
|
||||
Tagged {
|
||||
item: AtomicToken::String { .. },
|
||||
tag,
|
||||
} => shapes.push((*input).tagged(tag)),
|
||||
other => other.color_tokens(shapes),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandExpression for StringShape {
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
parse_single_node(token_nodes, "String", |token, token_tag, _| {
|
||||
Ok(match token {
|
||||
RawToken::GlobPattern => {
|
||||
return Err(ShellError::type_error(
|
||||
"String",
|
||||
"glob pattern".tagged(token_tag),
|
||||
))
|
||||
}
|
||||
RawToken::Operator(..) => {
|
||||
return Err(ShellError::type_error(
|
||||
"String",
|
||||
"operator".tagged(token_tag),
|
||||
))
|
||||
}
|
||||
RawToken::Variable(tag) => expand_variable(tag, token_tag, &context.source),
|
||||
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag),
|
||||
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)),
|
||||
RawToken::Number(_) => hir::Expression::bare(token_tag),
|
||||
RawToken::Bare => hir::Expression::bare(token_tag),
|
||||
RawToken::String(tag) => hir::Expression::string(tag, token_tag),
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl TestSyntax for StringShape {
|
||||
fn test<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
_context: &ExpandContext,
|
||||
) -> Option<Peeked<'a, 'b>> {
|
||||
let peeked = token_nodes.peek_any();
|
||||
|
||||
match peeked.node {
|
||||
Some(TokenNode::Token(token)) => match token.item {
|
||||
RawToken::String(_) => Some(peeked),
|
||||
_ => None,
|
||||
},
|
||||
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
96
src/parser/hir/syntax_shape/expression/unit.rs
Normal file
96
src/parser/hir/syntax_shape/expression/unit.rs
Normal file
@ -0,0 +1,96 @@
|
||||
use crate::data::meta::Span;
|
||||
use crate::parser::hir::syntax_shape::{ExpandContext, ExpandSyntax};
|
||||
use crate::parser::parse::tokens::RawNumber;
|
||||
use crate::parser::parse::unit::Unit;
|
||||
use crate::parser::{hir::TokensIterator, RawToken, TokenNode};
|
||||
use crate::prelude::*;
|
||||
use nom::branch::alt;
|
||||
use nom::bytes::complete::tag;
|
||||
use nom::character::complete::digit1;
|
||||
use nom::combinator::{all_consuming, opt, value};
|
||||
use nom::IResult;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct UnitShape;
|
||||
|
||||
impl ExpandSyntax for UnitShape {
|
||||
type Output = Tagged<(Tagged<RawNumber>, Tagged<Unit>)>;
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Tagged<(Tagged<RawNumber>, Tagged<Unit>)>, ShellError> {
|
||||
let peeked = token_nodes.peek_any().not_eof("unit")?;
|
||||
|
||||
let tag = match peeked.node {
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::Bare,
|
||||
tag,
|
||||
}) => tag,
|
||||
_ => return Err(peeked.type_error("unit")),
|
||||
};
|
||||
|
||||
let unit = unit_size(tag.slice(context.source), *tag);
|
||||
|
||||
let (_, (number, unit)) = match unit {
|
||||
Err(_) => {
|
||||
return Err(ShellError::type_error(
|
||||
"unit",
|
||||
"word".tagged(Tag::unknown()),
|
||||
))
|
||||
}
|
||||
Ok((number, unit)) => (number, unit),
|
||||
};
|
||||
|
||||
peeked.commit();
|
||||
Ok((number, unit).tagged(tag))
|
||||
}
|
||||
}
|
||||
|
||||
fn unit_size(input: &str, bare_tag: Tag) -> IResult<&str, (Tagged<RawNumber>, Tagged<Unit>)> {
|
||||
let (input, digits) = digit1(input)?;
|
||||
|
||||
let (input, dot) = opt(tag("."))(input)?;
|
||||
|
||||
let (input, number) = match dot {
|
||||
Some(dot) => {
|
||||
let (input, rest) = digit1(input)?;
|
||||
(
|
||||
input,
|
||||
RawNumber::decimal((
|
||||
bare_tag.span.start(),
|
||||
bare_tag.span.start() + digits.len() + dot.len() + rest.len(),
|
||||
bare_tag.anchor,
|
||||
)),
|
||||
)
|
||||
}
|
||||
|
||||
None => (
|
||||
input,
|
||||
RawNumber::int((
|
||||
bare_tag.span.start(),
|
||||
bare_tag.span.start() + digits.len(),
|
||||
bare_tag.anchor,
|
||||
)),
|
||||
),
|
||||
};
|
||||
|
||||
let (input, unit) = all_consuming(alt((
|
||||
value(Unit::B, alt((tag("B"), tag("b")))),
|
||||
value(Unit::KB, alt((tag("KB"), tag("kb"), tag("Kb")))),
|
||||
value(Unit::MB, alt((tag("MB"), tag("mb"), tag("Mb")))),
|
||||
value(Unit::MB, alt((tag("GB"), tag("gb"), tag("Gb")))),
|
||||
value(Unit::MB, alt((tag("TB"), tag("tb"), tag("Tb")))),
|
||||
value(Unit::MB, alt((tag("PB"), tag("pb"), tag("Pb")))),
|
||||
)))(input)?;
|
||||
|
||||
let start_span = number.tag.span.end();
|
||||
|
||||
let unit_tag = Tag::new(
|
||||
bare_tag.anchor,
|
||||
Span::from((start_span, bare_tag.span.end())),
|
||||
);
|
||||
|
||||
Ok((input, (number, unit.tagged(unit_tag))))
|
||||
}
|
728
src/parser/hir/syntax_shape/expression/variable_path.rs
Normal file
728
src/parser/hir/syntax_shape/expression/variable_path.rs
Normal file
@ -0,0 +1,728 @@
|
||||
use crate::parser::hir::syntax_shape::{
|
||||
color_fallible_syntax, color_fallible_syntax_with, expand_atom, expand_expr, expand_syntax,
|
||||
parse_single_node, AnyExpressionShape, AtomicToken, BareShape, ExpandContext, ExpandExpression,
|
||||
ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, Peeked, SkipSyntax, StringShape,
|
||||
TestSyntax, WhitespaceShape,
|
||||
};
|
||||
use crate::parser::{hir, hir::Expression, hir::TokensIterator, Operator, RawToken};
|
||||
use crate::prelude::*;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct VariablePathShape;
|
||||
|
||||
impl ExpandExpression for VariablePathShape {
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
// 1. let the head be the first token, expecting a variable
|
||||
// 2. let the tail be an empty list of members
|
||||
// 2. while the next token (excluding ws) is a dot:
|
||||
// 1. consume the dot
|
||||
// 2. consume the next token as a member and push it onto tail
|
||||
|
||||
let head = expand_expr(&VariableShape, token_nodes, context)?;
|
||||
let start = head.tag();
|
||||
let mut end = start;
|
||||
let mut tail: Vec<Tagged<String>> = vec![];
|
||||
|
||||
loop {
|
||||
match DotShape.skip(token_nodes, context) {
|
||||
Err(_) => break,
|
||||
Ok(_) => {}
|
||||
}
|
||||
|
||||
let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
|
||||
let member = syntax.to_tagged_string(context.source);
|
||||
|
||||
end = member.tag();
|
||||
tail.push(member);
|
||||
}
|
||||
|
||||
Ok(hir::Expression::path(head, tail, start.until(end)))
|
||||
}
|
||||
}
|
||||
|
||||
impl FallibleColorSyntax for VariablePathShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
token_nodes.atomic(|token_nodes| {
|
||||
// If the head of the token stream is not a variable, fail
|
||||
color_fallible_syntax(&VariableShape, token_nodes, context, shapes)?;
|
||||
|
||||
loop {
|
||||
// look for a dot at the head of a stream
|
||||
let dot = color_fallible_syntax_with(
|
||||
&ColorableDotShape,
|
||||
&FlatShape::Dot,
|
||||
token_nodes,
|
||||
context,
|
||||
shapes,
|
||||
);
|
||||
|
||||
// if there's no dot, we're done
|
||||
match dot {
|
||||
Err(_) => break,
|
||||
Ok(_) => {}
|
||||
}
|
||||
|
||||
// otherwise, look for a member, and if you don't find one, fail
|
||||
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct PathTailShape;
|
||||
|
||||
/// The failure mode of `PathTailShape` is a dot followed by a non-member
|
||||
impl FallibleColorSyntax for PathTailShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
token_nodes.atomic(|token_nodes| loop {
|
||||
let result = color_fallible_syntax_with(
|
||||
&ColorableDotShape,
|
||||
&FlatShape::Dot,
|
||||
token_nodes,
|
||||
context,
|
||||
shapes,
|
||||
);
|
||||
|
||||
match result {
|
||||
Err(_) => return Ok(()),
|
||||
Ok(_) => {}
|
||||
}
|
||||
|
||||
// If we've seen a dot but not a member, fail
|
||||
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandSyntax for PathTailShape {
|
||||
type Output = (Vec<Tagged<String>>, Tag);
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ShellError> {
|
||||
let mut end: Option<Tag> = None;
|
||||
let mut tail = vec![];
|
||||
|
||||
loop {
|
||||
match DotShape.skip(token_nodes, context) {
|
||||
Err(_) => break,
|
||||
Ok(_) => {}
|
||||
}
|
||||
|
||||
let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
|
||||
let member = syntax.to_tagged_string(context.source);
|
||||
end = Some(member.tag());
|
||||
tail.push(member);
|
||||
}
|
||||
|
||||
match end {
|
||||
None => {
|
||||
return Err(ShellError::type_error(
|
||||
"path tail",
|
||||
token_nodes.typed_tag_at_cursor(),
|
||||
))
|
||||
}
|
||||
|
||||
Some(end) => Ok((tail, end)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ExpressionContinuation {
|
||||
DotSuffix(Tag, Tagged<String>),
|
||||
InfixSuffix(Tagged<Operator>, Expression),
|
||||
}
|
||||
|
||||
/// An expression continuation
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ExpressionContinuationShape;
|
||||
|
||||
impl ExpandSyntax for ExpressionContinuationShape {
|
||||
type Output = ExpressionContinuation;
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<ExpressionContinuation, ShellError> {
|
||||
// Try to expand a `.`
|
||||
let dot = expand_syntax(&DotShape, token_nodes, context);
|
||||
|
||||
match dot {
|
||||
// If a `.` was matched, it's a `Path`, and we expect a `Member` next
|
||||
Ok(dot) => {
|
||||
let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
|
||||
let member = syntax.to_tagged_string(context.source);
|
||||
|
||||
Ok(ExpressionContinuation::DotSuffix(dot, member))
|
||||
}
|
||||
|
||||
// Otherwise, we expect an infix operator and an expression next
|
||||
Err(_) => {
|
||||
let (_, op, _) = expand_syntax(&InfixShape, token_nodes, context)?;
|
||||
let next = expand_expr(&AnyExpressionShape, token_nodes, context)?;
|
||||
|
||||
Ok(ExpressionContinuation::InfixSuffix(op, next))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum ContinuationInfo {
|
||||
Dot,
|
||||
Infix,
|
||||
}
|
||||
|
||||
impl FallibleColorSyntax for ExpressionContinuationShape {
|
||||
type Info = ContinuationInfo;
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Result<ContinuationInfo, ShellError> {
|
||||
token_nodes.atomic(|token_nodes| {
|
||||
// Try to expand a `.`
|
||||
let dot = color_fallible_syntax_with(
|
||||
&ColorableDotShape,
|
||||
&FlatShape::Dot,
|
||||
token_nodes,
|
||||
context,
|
||||
shapes,
|
||||
);
|
||||
|
||||
match dot {
|
||||
Ok(_) => {
|
||||
// we found a dot, so let's keep looking for a member; if no member was found, fail
|
||||
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
|
||||
|
||||
Ok(ContinuationInfo::Dot)
|
||||
}
|
||||
Err(_) => {
|
||||
let mut new_shapes = vec![];
|
||||
let result = token_nodes.atomic(|token_nodes| {
|
||||
// we didn't find a dot, so let's see if we're looking at an infix. If not found, fail
|
||||
color_fallible_syntax(&InfixShape, token_nodes, context, &mut new_shapes)?;
|
||||
|
||||
// now that we've seen an infix shape, look for any expression. If not found, fail
|
||||
color_fallible_syntax(
|
||||
&AnyExpressionShape,
|
||||
token_nodes,
|
||||
context,
|
||||
&mut new_shapes,
|
||||
)?;
|
||||
|
||||
Ok(ContinuationInfo::Infix)
|
||||
})?;
|
||||
shapes.extend(new_shapes);
|
||||
Ok(result)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct VariableShape;
|
||||
|
||||
impl ExpandExpression for VariableShape {
|
||||
fn expand_expr<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
parse_single_node(token_nodes, "variable", |token, token_tag, _| {
|
||||
Ok(match token {
|
||||
RawToken::Variable(tag) => {
|
||||
if tag.slice(context.source) == "it" {
|
||||
hir::Expression::it_variable(tag, token_tag)
|
||||
} else {
|
||||
hir::Expression::variable(tag, token_tag)
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(ShellError::type_error(
|
||||
"variable",
|
||||
token.type_name().tagged(token_tag),
|
||||
))
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl FallibleColorSyntax for VariableShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let atom = expand_atom(
|
||||
token_nodes,
|
||||
"variable",
|
||||
context,
|
||||
ExpansionRule::permissive(),
|
||||
);
|
||||
|
||||
let atom = match atom {
|
||||
Err(err) => return Err(err),
|
||||
Ok(atom) => atom,
|
||||
};
|
||||
|
||||
match &atom.item {
|
||||
AtomicToken::Variable { .. } => {
|
||||
shapes.push(FlatShape::Variable.tagged(atom.tag));
|
||||
Ok(())
|
||||
}
|
||||
AtomicToken::ItVariable { .. } => {
|
||||
shapes.push(FlatShape::ItVariable.tagged(atom.tag));
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(ShellError::type_error("variable", atom.tagged_type_name())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum Member {
|
||||
String(/* outer */ Tag, /* inner */ Tag),
|
||||
Bare(Tag),
|
||||
}
|
||||
|
||||
impl Member {
|
||||
pub(crate) fn to_expr(&self) -> hir::Expression {
|
||||
match self {
|
||||
Member::String(outer, inner) => hir::Expression::string(inner, outer),
|
||||
Member::Bare(tag) => hir::Expression::string(tag, tag),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn tag(&self) -> Tag {
|
||||
match self {
|
||||
Member::String(outer, _inner) => *outer,
|
||||
Member::Bare(tag) => *tag,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn to_tagged_string(&self, source: &str) -> Tagged<String> {
|
||||
match self {
|
||||
Member::String(outer, inner) => inner.string(source).tagged(outer),
|
||||
Member::Bare(tag) => tag.tagged_string(source),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn tagged_type_name(&self) -> Tagged<&'static str> {
|
||||
match self {
|
||||
Member::String(outer, _inner) => "string".tagged(outer),
|
||||
Member::Bare(tag) => "word".tagged(tag),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum ColumnPathState {
|
||||
Initial,
|
||||
LeadingDot(Tag),
|
||||
Dot(Tag, Vec<Member>, Tag),
|
||||
Member(Tag, Vec<Member>),
|
||||
Error(ShellError),
|
||||
}
|
||||
|
||||
impl ColumnPathState {
|
||||
pub fn dot(self, dot: Tag) -> ColumnPathState {
|
||||
match self {
|
||||
ColumnPathState::Initial => ColumnPathState::LeadingDot(dot),
|
||||
ColumnPathState::LeadingDot(_) => {
|
||||
ColumnPathState::Error(ShellError::type_error("column", "dot".tagged(dot)))
|
||||
}
|
||||
ColumnPathState::Dot(..) => {
|
||||
ColumnPathState::Error(ShellError::type_error("column", "dot".tagged(dot)))
|
||||
}
|
||||
ColumnPathState::Member(tag, members) => ColumnPathState::Dot(tag, members, dot),
|
||||
ColumnPathState::Error(err) => ColumnPathState::Error(err),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn member(self, member: Member) -> ColumnPathState {
|
||||
match self {
|
||||
ColumnPathState::Initial => ColumnPathState::Member(member.tag(), vec![member]),
|
||||
ColumnPathState::LeadingDot(tag) => {
|
||||
ColumnPathState::Member(tag.until(member.tag()), vec![member])
|
||||
}
|
||||
|
||||
ColumnPathState::Dot(tag, mut tags, _) => {
|
||||
ColumnPathState::Member(tag.until(member.tag()), {
|
||||
tags.push(member);
|
||||
tags
|
||||
})
|
||||
}
|
||||
ColumnPathState::Member(..) => {
|
||||
ColumnPathState::Error(ShellError::type_error("column", member.tagged_type_name()))
|
||||
}
|
||||
ColumnPathState::Error(err) => ColumnPathState::Error(err),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_path(self, next: Peeked) -> Result<Tagged<Vec<Member>>, ShellError> {
|
||||
match self {
|
||||
ColumnPathState::Initial => Err(next.type_error("column path")),
|
||||
ColumnPathState::LeadingDot(dot) => {
|
||||
Err(ShellError::type_error("column", "dot".tagged(dot)))
|
||||
}
|
||||
ColumnPathState::Dot(_tag, _members, dot) => {
|
||||
Err(ShellError::type_error("column", "dot".tagged(dot)))
|
||||
}
|
||||
ColumnPathState::Member(tag, tags) => Ok(tags.tagged(tag)),
|
||||
ColumnPathState::Error(err) => Err(err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expand_column_path<'a, 'b>(
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Tagged<Vec<Member>>, ShellError> {
|
||||
let mut state = ColumnPathState::Initial;
|
||||
|
||||
loop {
|
||||
let member = MemberShape.expand_syntax(token_nodes, context);
|
||||
|
||||
match member {
|
||||
Err(_) => break,
|
||||
Ok(member) => state = state.member(member),
|
||||
}
|
||||
|
||||
let dot = DotShape.expand_syntax(token_nodes, context);
|
||||
|
||||
match dot {
|
||||
Err(_) => break,
|
||||
Ok(dot) => state = state.dot(dot),
|
||||
}
|
||||
}
|
||||
|
||||
state.into_path(token_nodes.peek_non_ws())
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ColumnPathShape;
|
||||
|
||||
impl FallibleColorSyntax for ColumnPathShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
// If there's not even one member shape, fail
|
||||
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
|
||||
|
||||
loop {
|
||||
let checkpoint = token_nodes.checkpoint();
|
||||
|
||||
match color_fallible_syntax_with(
|
||||
&ColorableDotShape,
|
||||
&FlatShape::Dot,
|
||||
checkpoint.iterator,
|
||||
context,
|
||||
shapes,
|
||||
) {
|
||||
Err(_) => {
|
||||
// we already saw at least one member shape, so return successfully
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
Ok(_) => {
|
||||
match color_fallible_syntax(&MemberShape, checkpoint.iterator, context, shapes)
|
||||
{
|
||||
Err(_) => {
|
||||
// we saw a dot but not a member (but we saw at least one member),
|
||||
// so don't commit the dot but return successfully
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
Ok(_) => {
|
||||
// we saw a dot and a member, so commit it and continue on
|
||||
checkpoint.commit();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandSyntax for ColumnPathShape {
|
||||
type Output = Tagged<Vec<Member>>;
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ShellError> {
|
||||
expand_column_path(token_nodes, context)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct MemberShape;
|
||||
|
||||
impl FallibleColorSyntax for MemberShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let bare = color_fallible_syntax_with(
|
||||
&BareShape,
|
||||
&FlatShape::BareMember,
|
||||
token_nodes,
|
||||
context,
|
||||
shapes,
|
||||
);
|
||||
|
||||
match bare {
|
||||
Ok(_) => return Ok(()),
|
||||
Err(_) => {
|
||||
// If we don't have a bare word, we'll look for a string
|
||||
}
|
||||
}
|
||||
|
||||
// Look for a string token. If we don't find one, fail
|
||||
color_fallible_syntax_with(
|
||||
&StringShape,
|
||||
&FlatShape::StringMember,
|
||||
token_nodes,
|
||||
context,
|
||||
shapes,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandSyntax for MemberShape {
|
||||
type Output = Member;
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Member, ShellError> {
|
||||
let bare = BareShape.test(token_nodes, context);
|
||||
if let Some(peeked) = bare {
|
||||
let node = peeked.not_eof("column")?.commit();
|
||||
return Ok(Member::Bare(node.tag()));
|
||||
}
|
||||
|
||||
let string = StringShape.test(token_nodes, context);
|
||||
|
||||
if let Some(peeked) = string {
|
||||
let node = peeked.not_eof("column")?.commit();
|
||||
let (outer, inner) = node.expect_string();
|
||||
|
||||
return Ok(Member::String(outer, inner));
|
||||
}
|
||||
|
||||
Err(token_nodes.peek_any().type_error("column"))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct DotShape;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ColorableDotShape;
|
||||
|
||||
impl FallibleColorSyntax for ColorableDotShape {
|
||||
type Info = ();
|
||||
type Input = FlatShape;
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
input: &FlatShape,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
_context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let peeked = token_nodes.peek_any().not_eof("dot")?;
|
||||
|
||||
match peeked.node {
|
||||
node if node.is_dot() => {
|
||||
peeked.commit();
|
||||
shapes.push((*input).tagged(node.tag()));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
other => Err(ShellError::type_error("dot", other.tagged_type_name())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SkipSyntax for DotShape {
|
||||
fn skip<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<(), ShellError> {
|
||||
expand_syntax(self, token_nodes, context)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandSyntax for DotShape {
|
||||
type Output = Tag;
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
_context: &ExpandContext,
|
||||
) -> Result<Self::Output, ShellError> {
|
||||
parse_single_node(token_nodes, "dot", |token, token_tag, _| {
|
||||
Ok(match token {
|
||||
RawToken::Operator(Operator::Dot) => token_tag,
|
||||
_ => {
|
||||
return Err(ShellError::type_error(
|
||||
"dot",
|
||||
token.type_name().tagged(token_tag),
|
||||
))
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct InfixShape;
|
||||
|
||||
impl FallibleColorSyntax for InfixShape {
|
||||
type Info = ();
|
||||
type Input = ();
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
outer_shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let checkpoint = token_nodes.checkpoint();
|
||||
let mut shapes = vec![];
|
||||
|
||||
// An infix operator must be prefixed by whitespace. If no whitespace was found, fail
|
||||
color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context, &mut shapes)?;
|
||||
|
||||
// Parse the next TokenNode after the whitespace
|
||||
parse_single_node(
|
||||
checkpoint.iterator,
|
||||
"infix operator",
|
||||
|token, token_tag, _| {
|
||||
match token {
|
||||
// If it's an operator (and not `.`), it's a match
|
||||
RawToken::Operator(operator) if operator != Operator::Dot => {
|
||||
shapes.push(FlatShape::Operator.tagged(token_tag));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Otherwise, it's not a match
|
||||
_ => Err(ShellError::type_error(
|
||||
"infix operator",
|
||||
token.type_name().tagged(token_tag),
|
||||
)),
|
||||
}
|
||||
},
|
||||
)?;
|
||||
|
||||
// An infix operator must be followed by whitespace. If no whitespace was found, fail
|
||||
color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context, &mut shapes)?;
|
||||
|
||||
outer_shapes.extend(shapes);
|
||||
checkpoint.commit();
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl ExpandSyntax for InfixShape {
|
||||
type Output = (Tag, Tagged<Operator>, Tag);
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ShellError> {
|
||||
let checkpoint = token_nodes.checkpoint();
|
||||
|
||||
// An infix operator must be prefixed by whitespace
|
||||
let start = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?;
|
||||
|
||||
// Parse the next TokenNode after the whitespace
|
||||
let operator = parse_single_node(
|
||||
checkpoint.iterator,
|
||||
"infix operator",
|
||||
|token, token_tag, _| {
|
||||
Ok(match token {
|
||||
// If it's an operator (and not `.`), it's a match
|
||||
RawToken::Operator(operator) if operator != Operator::Dot => {
|
||||
operator.tagged(token_tag)
|
||||
}
|
||||
|
||||
// Otherwise, it's not a match
|
||||
_ => {
|
||||
return Err(ShellError::type_error(
|
||||
"infix operator",
|
||||
token.type_name().tagged(token_tag),
|
||||
))
|
||||
}
|
||||
})
|
||||
},
|
||||
)?;
|
||||
|
||||
// An infix operator must be followed by whitespace
|
||||
let end = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?;
|
||||
|
||||
checkpoint.commit();
|
||||
|
||||
Ok((start, operator, end))
|
||||
}
|
||||
}
|
95
src/parser/hir/syntax_shape/flat_shape.rs
Normal file
95
src/parser/hir/syntax_shape/flat_shape.rs
Normal file
@ -0,0 +1,95 @@
|
||||
use crate::parser::{Delimiter, Flag, FlagKind, Operator, RawNumber, RawToken, TokenNode};
|
||||
use crate::{Tag, Tagged, TaggedItem, Text};
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub enum FlatShape {
|
||||
OpenDelimiter(Delimiter),
|
||||
CloseDelimiter(Delimiter),
|
||||
ItVariable,
|
||||
Variable,
|
||||
Operator,
|
||||
Dot,
|
||||
InternalCommand,
|
||||
ExternalCommand,
|
||||
ExternalWord,
|
||||
BareMember,
|
||||
StringMember,
|
||||
String,
|
||||
Path,
|
||||
Word,
|
||||
Pipe,
|
||||
GlobPattern,
|
||||
Flag,
|
||||
ShorthandFlag,
|
||||
Int,
|
||||
Decimal,
|
||||
Whitespace,
|
||||
Error,
|
||||
Size { number: Tag, unit: Tag },
|
||||
}
|
||||
|
||||
impl FlatShape {
|
||||
pub fn from(token: &TokenNode, source: &Text, shapes: &mut Vec<Tagged<FlatShape>>) -> () {
|
||||
match token {
|
||||
TokenNode::Token(token) => match token.item {
|
||||
RawToken::Number(RawNumber::Int(_)) => {
|
||||
shapes.push(FlatShape::Int.tagged(token.tag))
|
||||
}
|
||||
RawToken::Number(RawNumber::Decimal(_)) => {
|
||||
shapes.push(FlatShape::Decimal.tagged(token.tag))
|
||||
}
|
||||
RawToken::Operator(Operator::Dot) => shapes.push(FlatShape::Dot.tagged(token.tag)),
|
||||
RawToken::Operator(_) => shapes.push(FlatShape::Operator.tagged(token.tag)),
|
||||
RawToken::String(_) => shapes.push(FlatShape::String.tagged(token.tag)),
|
||||
RawToken::Variable(v) if v.slice(source) == "it" => {
|
||||
shapes.push(FlatShape::ItVariable.tagged(token.tag))
|
||||
}
|
||||
RawToken::Variable(_) => shapes.push(FlatShape::Variable.tagged(token.tag)),
|
||||
RawToken::ExternalCommand(_) => {
|
||||
shapes.push(FlatShape::ExternalCommand.tagged(token.tag))
|
||||
}
|
||||
RawToken::ExternalWord => shapes.push(FlatShape::ExternalWord.tagged(token.tag)),
|
||||
RawToken::GlobPattern => shapes.push(FlatShape::GlobPattern.tagged(token.tag)),
|
||||
RawToken::Bare => shapes.push(FlatShape::Word.tagged(token.tag)),
|
||||
},
|
||||
TokenNode::Call(_) => unimplemented!(),
|
||||
TokenNode::Nodes(nodes) => {
|
||||
for node in &nodes.item {
|
||||
FlatShape::from(node, source, shapes);
|
||||
}
|
||||
}
|
||||
TokenNode::Delimited(v) => {
|
||||
shapes.push(FlatShape::OpenDelimiter(v.item.delimiter).tagged(v.item.tags.0));
|
||||
for token in &v.item.children {
|
||||
FlatShape::from(token, source, shapes);
|
||||
}
|
||||
shapes.push(FlatShape::CloseDelimiter(v.item.delimiter).tagged(v.item.tags.1));
|
||||
}
|
||||
TokenNode::Pipeline(pipeline) => {
|
||||
for part in &pipeline.parts {
|
||||
if let Some(_) = part.pipe {
|
||||
shapes.push(FlatShape::Pipe.tagged(part.tag));
|
||||
}
|
||||
}
|
||||
}
|
||||
TokenNode::Flag(Tagged {
|
||||
item:
|
||||
Flag {
|
||||
kind: FlagKind::Longhand,
|
||||
..
|
||||
},
|
||||
tag,
|
||||
}) => shapes.push(FlatShape::Flag.tagged(tag)),
|
||||
TokenNode::Flag(Tagged {
|
||||
item:
|
||||
Flag {
|
||||
kind: FlagKind::Shorthand,
|
||||
..
|
||||
},
|
||||
tag,
|
||||
}) => shapes.push(FlatShape::ShorthandFlag.tagged(tag)),
|
||||
TokenNode::Whitespace(_) => shapes.push(FlatShape::Whitespace.tagged(token.tag())),
|
||||
TokenNode::Error(v) => shapes.push(FlatShape::Error.tagged(v.tag)),
|
||||
}
|
||||
}
|
||||
}
|
477
src/parser/hir/tokens_iterator.rs
Normal file
477
src/parser/hir/tokens_iterator.rs
Normal file
@ -0,0 +1,477 @@
|
||||
pub(crate) mod debug;
|
||||
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::TokenNode;
|
||||
use crate::{Tag, Tagged, TaggedItem};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TokensIterator<'content> {
|
||||
tokens: &'content [TokenNode],
|
||||
tag: Tag,
|
||||
skip_ws: bool,
|
||||
index: usize,
|
||||
seen: indexmap::IndexSet<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Checkpoint<'content, 'me> {
|
||||
pub(crate) iterator: &'me mut TokensIterator<'content>,
|
||||
index: usize,
|
||||
seen: indexmap::IndexSet<usize>,
|
||||
committed: bool,
|
||||
}
|
||||
|
||||
impl<'content, 'me> Checkpoint<'content, 'me> {
|
||||
pub(crate) fn commit(mut self) {
|
||||
self.committed = true;
|
||||
}
|
||||
}
|
||||
|
||||
impl<'content, 'me> std::ops::Drop for Checkpoint<'content, 'me> {
|
||||
fn drop(&mut self) {
|
||||
if !self.committed {
|
||||
self.iterator.index = self.index;
|
||||
self.iterator.seen = self.seen.clone();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Peeked<'content, 'me> {
|
||||
pub(crate) node: Option<&'content TokenNode>,
|
||||
iterator: &'me mut TokensIterator<'content>,
|
||||
from: usize,
|
||||
to: usize,
|
||||
}
|
||||
|
||||
impl<'content, 'me> Peeked<'content, 'me> {
|
||||
pub fn commit(&mut self) -> Option<&'content TokenNode> {
|
||||
let Peeked {
|
||||
node,
|
||||
iterator,
|
||||
from,
|
||||
to,
|
||||
} = self;
|
||||
|
||||
let node = (*node)?;
|
||||
iterator.commit(*from, *to);
|
||||
Some(node)
|
||||
}
|
||||
|
||||
pub fn not_eof(
|
||||
self,
|
||||
expected: impl Into<String>,
|
||||
) -> Result<PeekedNode<'content, 'me>, ShellError> {
|
||||
match self.node {
|
||||
None => Err(ShellError::unexpected_eof(
|
||||
expected,
|
||||
self.iterator.eof_tag(),
|
||||
)),
|
||||
Some(node) => Ok(PeekedNode {
|
||||
node,
|
||||
iterator: self.iterator,
|
||||
from: self.from,
|
||||
to: self.to,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn type_error(&self, expected: impl Into<String>) -> ShellError {
|
||||
peek_error(&self.node, self.iterator.eof_tag(), expected)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct PeekedNode<'content, 'me> {
|
||||
pub(crate) node: &'content TokenNode,
|
||||
iterator: &'me mut TokensIterator<'content>,
|
||||
from: usize,
|
||||
to: usize,
|
||||
}
|
||||
|
||||
impl<'content, 'me> PeekedNode<'content, 'me> {
|
||||
pub fn commit(self) -> &'content TokenNode {
|
||||
let PeekedNode {
|
||||
node,
|
||||
iterator,
|
||||
from,
|
||||
to,
|
||||
} = self;
|
||||
|
||||
iterator.commit(from, to);
|
||||
node
|
||||
}
|
||||
|
||||
pub fn rollback(self) {}
|
||||
|
||||
pub fn type_error(&self, expected: impl Into<String>) -> ShellError {
|
||||
peek_error(&Some(self.node), self.iterator.eof_tag(), expected)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn peek_error(
|
||||
node: &Option<&TokenNode>,
|
||||
eof_tag: Tag,
|
||||
expected: impl Into<String>,
|
||||
) -> ShellError {
|
||||
match node {
|
||||
None => ShellError::unexpected_eof(expected, eof_tag),
|
||||
Some(node) => ShellError::type_error(expected, node.tagged_type_name()),
|
||||
}
|
||||
}
|
||||
|
||||
impl<'content> TokensIterator<'content> {
|
||||
pub fn new(items: &'content [TokenNode], tag: Tag, skip_ws: bool) -> TokensIterator<'content> {
|
||||
TokensIterator {
|
||||
tokens: items,
|
||||
tag,
|
||||
skip_ws,
|
||||
index: 0,
|
||||
seen: indexmap::IndexSet::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn anchor(&self) -> uuid::Uuid {
|
||||
self.tag.anchor
|
||||
}
|
||||
|
||||
pub fn all(tokens: &'content [TokenNode], tag: Tag) -> TokensIterator<'content> {
|
||||
TokensIterator::new(tokens, tag, false)
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.tokens.len()
|
||||
}
|
||||
|
||||
pub fn spanned<T>(
|
||||
&mut self,
|
||||
block: impl FnOnce(&mut TokensIterator<'content>) -> T,
|
||||
) -> Tagged<T> {
|
||||
let start = self.tag_at_cursor();
|
||||
|
||||
let result = block(self);
|
||||
|
||||
let end = self.tag_at_cursor();
|
||||
|
||||
result.tagged(start.until(end))
|
||||
}
|
||||
|
||||
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
|
||||
/// that you'll succeed.
|
||||
pub fn checkpoint<'me>(&'me mut self) -> Checkpoint<'content, 'me> {
|
||||
let index = self.index;
|
||||
let seen = self.seen.clone();
|
||||
|
||||
Checkpoint {
|
||||
iterator: self,
|
||||
index,
|
||||
seen,
|
||||
committed: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
|
||||
/// that you'll succeed.
|
||||
pub fn atomic<'me, T>(
|
||||
&'me mut self,
|
||||
block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, ShellError>,
|
||||
) -> Result<T, ShellError> {
|
||||
let index = self.index;
|
||||
let seen = self.seen.clone();
|
||||
|
||||
let checkpoint = Checkpoint {
|
||||
iterator: self,
|
||||
index,
|
||||
seen,
|
||||
committed: false,
|
||||
};
|
||||
|
||||
let value = block(checkpoint.iterator)?;
|
||||
|
||||
checkpoint.commit();
|
||||
return Ok(value);
|
||||
}
|
||||
|
||||
fn eof_tag(&self) -> Tag {
|
||||
Tag::from((self.tag.span.end(), self.tag.span.end(), self.tag.anchor))
|
||||
}
|
||||
|
||||
pub fn typed_tag_at_cursor(&mut self) -> Tagged<&'static str> {
|
||||
let next = self.peek_any();
|
||||
|
||||
match next.node {
|
||||
None => "end".tagged(self.eof_tag()),
|
||||
Some(node) => node.tagged_type_name(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tag_at_cursor(&mut self) -> Tag {
|
||||
let next = self.peek_any();
|
||||
|
||||
match next.node {
|
||||
None => self.eof_tag(),
|
||||
Some(node) => node.tag(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, position: usize) {
|
||||
self.seen.insert(position);
|
||||
}
|
||||
|
||||
pub fn at_end(&self) -> bool {
|
||||
peek(self, self.skip_ws).is_none()
|
||||
}
|
||||
|
||||
pub fn at_end_possible_ws(&self) -> bool {
|
||||
peek(self, true).is_none()
|
||||
}
|
||||
|
||||
pub fn advance(&mut self) {
|
||||
self.seen.insert(self.index);
|
||||
self.index += 1;
|
||||
}
|
||||
|
||||
pub fn extract<T>(&mut self, f: impl Fn(&TokenNode) -> Option<T>) -> Option<(usize, T)> {
|
||||
for (i, item) in self.tokens.iter().enumerate() {
|
||||
if self.seen.contains(&i) {
|
||||
continue;
|
||||
}
|
||||
|
||||
match f(item) {
|
||||
None => {
|
||||
continue;
|
||||
}
|
||||
Some(value) => {
|
||||
self.seen.insert(i);
|
||||
return Some((i, value));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn move_to(&mut self, pos: usize) {
|
||||
self.index = pos;
|
||||
}
|
||||
|
||||
pub fn restart(&mut self) {
|
||||
self.index = 0;
|
||||
}
|
||||
|
||||
pub fn clone(&self) -> TokensIterator<'content> {
|
||||
TokensIterator {
|
||||
tokens: self.tokens,
|
||||
tag: self.tag,
|
||||
index: self.index,
|
||||
seen: self.seen.clone(),
|
||||
skip_ws: self.skip_ws,
|
||||
}
|
||||
}
|
||||
|
||||
// Get the next token, not including whitespace
|
||||
pub fn next_non_ws(&mut self) -> Option<&TokenNode> {
|
||||
let mut peeked = start_next(self, true);
|
||||
peeked.commit()
|
||||
}
|
||||
|
||||
// Peek the next token, not including whitespace
|
||||
pub fn peek_non_ws<'me>(&'me mut self) -> Peeked<'content, 'me> {
|
||||
start_next(self, true)
|
||||
}
|
||||
|
||||
// Peek the next token, including whitespace
|
||||
pub fn peek_any<'me>(&'me mut self) -> Peeked<'content, 'me> {
|
||||
start_next(self, false)
|
||||
}
|
||||
|
||||
// Peek the next token, including whitespace, but not EOF
|
||||
pub fn peek_any_token<'me, T>(
|
||||
&'me mut self,
|
||||
block: impl FnOnce(&'content TokenNode) -> Result<T, ShellError>,
|
||||
) -> Result<T, ShellError> {
|
||||
let peeked = start_next(self, false);
|
||||
let peeked = peeked.not_eof("invariant");
|
||||
|
||||
match peeked {
|
||||
Err(err) => return Err(err),
|
||||
Ok(peeked) => match block(peeked.node) {
|
||||
Err(err) => return Err(err),
|
||||
Ok(val) => {
|
||||
peeked.commit();
|
||||
return Ok(val);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn commit(&mut self, from: usize, to: usize) {
|
||||
for index in from..to {
|
||||
self.seen.insert(index);
|
||||
}
|
||||
|
||||
self.index = to;
|
||||
}
|
||||
|
||||
pub fn pos(&self, skip_ws: bool) -> Option<usize> {
|
||||
peek_pos(self, skip_ws)
|
||||
}
|
||||
|
||||
pub fn debug_remaining(&self) -> Vec<TokenNode> {
|
||||
let mut tokens = self.clone();
|
||||
tokens.restart();
|
||||
tokens.cloned().collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'content> Iterator for TokensIterator<'content> {
|
||||
type Item = &'content TokenNode;
|
||||
|
||||
fn next(&mut self) -> Option<&'content TokenNode> {
|
||||
next(self, self.skip_ws)
|
||||
}
|
||||
}
|
||||
|
||||
fn peek<'content, 'me>(
|
||||
iterator: &'me TokensIterator<'content>,
|
||||
skip_ws: bool,
|
||||
) -> Option<&'me TokenNode> {
|
||||
let mut to = iterator.index;
|
||||
|
||||
loop {
|
||||
if to >= iterator.tokens.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if iterator.seen.contains(&to) {
|
||||
to += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if to >= iterator.tokens.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let node = &iterator.tokens[to];
|
||||
|
||||
match node {
|
||||
TokenNode::Whitespace(_) if skip_ws => {
|
||||
to += 1;
|
||||
}
|
||||
_ => {
|
||||
return Some(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn peek_pos<'content, 'me>(
|
||||
iterator: &'me TokensIterator<'content>,
|
||||
skip_ws: bool,
|
||||
) -> Option<usize> {
|
||||
let mut to = iterator.index;
|
||||
|
||||
loop {
|
||||
if to >= iterator.tokens.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if iterator.seen.contains(&to) {
|
||||
to += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if to >= iterator.tokens.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let node = &iterator.tokens[to];
|
||||
|
||||
match node {
|
||||
TokenNode::Whitespace(_) if skip_ws => {
|
||||
to += 1;
|
||||
}
|
||||
_ => return Some(to),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn start_next<'content, 'me>(
|
||||
iterator: &'me mut TokensIterator<'content>,
|
||||
skip_ws: bool,
|
||||
) -> Peeked<'content, 'me> {
|
||||
let from = iterator.index;
|
||||
let mut to = iterator.index;
|
||||
|
||||
loop {
|
||||
if to >= iterator.tokens.len() {
|
||||
return Peeked {
|
||||
node: None,
|
||||
iterator,
|
||||
from,
|
||||
to,
|
||||
};
|
||||
}
|
||||
|
||||
if iterator.seen.contains(&to) {
|
||||
to += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if to >= iterator.tokens.len() {
|
||||
return Peeked {
|
||||
node: None,
|
||||
iterator,
|
||||
from,
|
||||
to,
|
||||
};
|
||||
}
|
||||
|
||||
let node = &iterator.tokens[to];
|
||||
|
||||
match node {
|
||||
TokenNode::Whitespace(_) if skip_ws => {
|
||||
to += 1;
|
||||
}
|
||||
_ => {
|
||||
to += 1;
|
||||
return Peeked {
|
||||
node: Some(node),
|
||||
iterator,
|
||||
from,
|
||||
to,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn next<'me, 'content>(
|
||||
iterator: &'me mut TokensIterator<'content>,
|
||||
skip_ws: bool,
|
||||
) -> Option<&'content TokenNode> {
|
||||
loop {
|
||||
if iterator.index >= iterator.tokens.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if iterator.seen.contains(&iterator.index) {
|
||||
iterator.advance();
|
||||
continue;
|
||||
}
|
||||
|
||||
if iterator.index >= iterator.tokens.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
match &iterator.tokens[iterator.index] {
|
||||
TokenNode::Whitespace(_) if skip_ws => {
|
||||
iterator.advance();
|
||||
}
|
||||
other => {
|
||||
iterator.advance();
|
||||
return Some(other);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
30
src/parser/hir/tokens_iterator/debug.rs
Normal file
30
src/parser/hir/tokens_iterator/debug.rs
Normal file
@ -0,0 +1,30 @@
|
||||
use crate::parser::hir::tokens_iterator::TokensIterator;
|
||||
use crate::traits::ToDebug;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) enum DebugIteratorToken {
|
||||
Seen(String),
|
||||
Unseen(String),
|
||||
Cursor,
|
||||
}
|
||||
|
||||
pub(crate) fn debug_tokens(iterator: &TokensIterator, source: &str) -> Vec<DebugIteratorToken> {
|
||||
let mut out = vec![];
|
||||
|
||||
for (i, token) in iterator.tokens.iter().enumerate() {
|
||||
if iterator.index == i {
|
||||
out.push(DebugIteratorToken::Cursor);
|
||||
}
|
||||
|
||||
if iterator.seen.contains(&i) {
|
||||
out.push(DebugIteratorToken::Seen(format!("{}", token.debug(source))));
|
||||
} else {
|
||||
out.push(DebugIteratorToken::Unseen(format!(
|
||||
"{}",
|
||||
token.debug(source)
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
out
|
||||
}
|
@ -1,6 +1,7 @@
|
||||
use crate::Tag;
|
||||
use derive_new::new;
|
||||
use language_reporting::{FileName, Location};
|
||||
use log::trace;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(new, Debug, Clone)]
|
||||
@ -18,7 +19,7 @@ impl language_reporting::ReportingFiles for Files {
|
||||
from_index: usize,
|
||||
to_index: usize,
|
||||
) -> Option<Self::Span> {
|
||||
Some(Tag::from((from_index, to_index, file)))
|
||||
Some(Tag::new(file, (from_index, to_index).into()))
|
||||
}
|
||||
|
||||
fn file_id(&self, tag: Self::Span) -> Self::FileId {
|
||||
@ -38,8 +39,18 @@ impl language_reporting::ReportingFiles for Files {
|
||||
let mut seen_lines = 0;
|
||||
let mut seen_bytes = 0;
|
||||
|
||||
for (pos, _) in source.match_indices('\n') {
|
||||
if pos > byte_index {
|
||||
for (pos, slice) in source.match_indices('\n') {
|
||||
trace!(
|
||||
"SEARCH={} SEEN={} POS={} SLICE={:?} LEN={} ALL={:?}",
|
||||
byte_index,
|
||||
seen_bytes,
|
||||
pos,
|
||||
slice,
|
||||
source.len(),
|
||||
source
|
||||
);
|
||||
|
||||
if pos >= byte_index {
|
||||
return Some(language_reporting::Location::new(
|
||||
seen_lines,
|
||||
byte_index - seen_bytes,
|
||||
@ -53,7 +64,7 @@ impl language_reporting::ReportingFiles for Files {
|
||||
if seen_lines == 0 {
|
||||
Some(language_reporting::Location::new(0, byte_index))
|
||||
} else {
|
||||
None
|
||||
panic!("byte index {} wasn't valid", byte_index);
|
||||
}
|
||||
}
|
||||
|
||||
@ -64,7 +75,7 @@ impl language_reporting::ReportingFiles for Files {
|
||||
|
||||
for (pos, _) in source.match_indices('\n') {
|
||||
if seen_lines == lineno {
|
||||
return Some(Tag::from((seen_bytes, pos, file)));
|
||||
return Some(Tag::new(file, (seen_bytes, pos + 1).into()));
|
||||
} else {
|
||||
seen_lines += 1;
|
||||
seen_bytes = pos + 1;
|
||||
@ -72,16 +83,18 @@ impl language_reporting::ReportingFiles for Files {
|
||||
}
|
||||
|
||||
if seen_lines == 0 {
|
||||
Some(Tag::from((0, self.snippet.len() - 1, file)))
|
||||
Some(Tag::new(file, (0, self.snippet.len() - 1).into()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn source(&self, tag: Self::Span) -> Option<String> {
|
||||
if tag.span.start > tag.span.end {
|
||||
trace!("source(tag={:?}) snippet={:?}", tag, self.snippet);
|
||||
|
||||
if tag.span.start() > tag.span.end() {
|
||||
return None;
|
||||
} else if tag.span.end >= self.snippet.len() {
|
||||
} else if tag.span.end() > self.snippet.len() {
|
||||
return None;
|
||||
}
|
||||
Some(tag.slice(&self.snippet).to_string())
|
||||
|
@ -1,4 +1,5 @@
|
||||
use crate::Tag;
|
||||
use crate::parser::hir::syntax_shape::flat_shape::FlatShape;
|
||||
use crate::{Tag, Tagged, TaggedItem};
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -12,6 +13,15 @@ pub enum FlagKind {
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Getters, new)]
|
||||
#[get = "pub(crate)"]
|
||||
pub struct Flag {
|
||||
kind: FlagKind,
|
||||
name: Tag,
|
||||
pub(crate) kind: FlagKind,
|
||||
pub(crate) name: Tag,
|
||||
}
|
||||
|
||||
impl Tagged<Flag> {
|
||||
pub fn color(&self) -> Tagged<FlatShape> {
|
||||
match self.item.kind {
|
||||
FlagKind::Longhand => FlatShape::Flag.tagged(self.tag),
|
||||
FlagKind::Shorthand => FlatShape::ShorthandFlag.tagged(self.tag),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -11,6 +11,7 @@ pub enum Operator {
|
||||
GreaterThan,
|
||||
LessThanOrEqual,
|
||||
GreaterThanOrEqual,
|
||||
Dot,
|
||||
}
|
||||
|
||||
impl ToDebug for Operator {
|
||||
@ -32,6 +33,7 @@ impl Operator {
|
||||
Operator::GreaterThan => ">",
|
||||
Operator::LessThanOrEqual => "<=",
|
||||
Operator::GreaterThanOrEqual => ">=",
|
||||
Operator::Dot => ".",
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -52,6 +54,7 @@ impl FromStr for Operator {
|
||||
">" => Ok(Operator::GreaterThan),
|
||||
"<=" => Ok(Operator::LessThanOrEqual),
|
||||
">=" => Ok(Operator::GreaterThanOrEqual),
|
||||
"." => Ok(Operator::Dot),
|
||||
_ => Err(()),
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,4 +1,4 @@
|
||||
use crate::parser::CallNode;
|
||||
use crate::parser::TokenNode;
|
||||
use crate::traits::ToDebug;
|
||||
use crate::{Tag, Tagged};
|
||||
use derive_new::new;
|
||||
@ -7,20 +7,16 @@ use std::fmt;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, new)]
|
||||
pub struct Pipeline {
|
||||
pub(crate) parts: Vec<PipelineElement>,
|
||||
pub(crate) post_ws: Option<Tag>,
|
||||
pub(crate) parts: Vec<Tagged<PipelineElement>>,
|
||||
// pub(crate) post_ws: Option<Tag>,
|
||||
}
|
||||
|
||||
impl ToDebug for Pipeline {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||
for part in &self.parts {
|
||||
for part in self.parts.iter() {
|
||||
write!(f, "{}", part.debug(source))?;
|
||||
}
|
||||
|
||||
if let Some(post_ws) = self.post_ws {
|
||||
write!(f, "{}", post_ws.slice(source))?
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@ -28,10 +24,7 @@ impl ToDebug for Pipeline {
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
||||
pub struct PipelineElement {
|
||||
pub pipe: Option<Tag>,
|
||||
pub pre_ws: Option<Tag>,
|
||||
#[get = "pub(crate)"]
|
||||
call: Tagged<CallNode>,
|
||||
pub post_ws: Option<Tag>,
|
||||
pub tokens: Tagged<Vec<TokenNode>>,
|
||||
}
|
||||
|
||||
impl ToDebug for PipelineElement {
|
||||
@ -40,14 +33,8 @@ impl ToDebug for PipelineElement {
|
||||
write!(f, "{}", pipe.slice(source))?;
|
||||
}
|
||||
|
||||
if let Some(pre_ws) = self.pre_ws {
|
||||
write!(f, "{}", pre_ws.slice(source))?;
|
||||
}
|
||||
|
||||
write!(f, "{}", self.call.debug(source))?;
|
||||
|
||||
if let Some(post_ws) = self.post_ws {
|
||||
write!(f, "{}", post_ws.slice(source))?;
|
||||
for token in &self.tokens.item {
|
||||
write!(f, "{}", token.debug(source))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -1,5 +1,6 @@
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::parse::{call_node::*, flag::*, operator::*, pipeline::*, tokens::*};
|
||||
use crate::prelude::*;
|
||||
use crate::traits::ToDebug;
|
||||
use crate::{Tag, Tagged, Text};
|
||||
use derive_new::new;
|
||||
@ -12,15 +13,13 @@ pub enum TokenNode {
|
||||
Token(Token),
|
||||
|
||||
Call(Tagged<CallNode>),
|
||||
Nodes(Tagged<Vec<TokenNode>>),
|
||||
Delimited(Tagged<DelimitedNode>),
|
||||
Pipeline(Tagged<Pipeline>),
|
||||
Operator(Tagged<Operator>),
|
||||
Flag(Tagged<Flag>),
|
||||
Member(Tag),
|
||||
Whitespace(Tag),
|
||||
|
||||
Error(Tagged<Box<ShellError>>),
|
||||
Path(Tagged<PathNode>),
|
||||
Error(Tagged<ShellError>),
|
||||
}
|
||||
|
||||
impl ToDebug for TokenNode {
|
||||
@ -78,7 +77,7 @@ impl fmt::Debug for DebugTokenNode<'_> {
|
||||
)
|
||||
}
|
||||
TokenNode::Pipeline(pipeline) => write!(f, "{}", pipeline.debug(self.source)),
|
||||
TokenNode::Error(s) => write!(f, "<error> for {:?}", s.tag().slice(self.source)),
|
||||
TokenNode::Error(_) => write!(f, "<error>"),
|
||||
rest => write!(f, "{}", rest.tag().slice(self.source)),
|
||||
}
|
||||
}
|
||||
@ -94,32 +93,31 @@ impl TokenNode {
|
||||
pub fn tag(&self) -> Tag {
|
||||
match self {
|
||||
TokenNode::Token(t) => t.tag(),
|
||||
TokenNode::Nodes(t) => t.tag(),
|
||||
TokenNode::Call(s) => s.tag(),
|
||||
TokenNode::Delimited(s) => s.tag(),
|
||||
TokenNode::Pipeline(s) => s.tag(),
|
||||
TokenNode::Operator(s) => s.tag(),
|
||||
TokenNode::Flag(s) => s.tag(),
|
||||
TokenNode::Member(s) => *s,
|
||||
TokenNode::Whitespace(s) => *s,
|
||||
TokenNode::Error(s) => s.tag(),
|
||||
TokenNode::Path(s) => s.tag(),
|
||||
TokenNode::Error(s) => return s.tag,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn type_name(&self) -> String {
|
||||
pub fn type_name(&self) -> &'static str {
|
||||
match self {
|
||||
TokenNode::Token(t) => t.type_name(),
|
||||
TokenNode::Nodes(_) => "nodes",
|
||||
TokenNode::Call(_) => "command",
|
||||
TokenNode::Delimited(d) => d.type_name(),
|
||||
TokenNode::Pipeline(_) => "pipeline",
|
||||
TokenNode::Operator(_) => "operator",
|
||||
TokenNode::Flag(_) => "flag",
|
||||
TokenNode::Member(_) => "member",
|
||||
TokenNode::Whitespace(_) => "whitespace",
|
||||
TokenNode::Error(_) => "error",
|
||||
TokenNode::Path(_) => "path",
|
||||
}
|
||||
.to_string()
|
||||
}
|
||||
|
||||
pub fn tagged_type_name(&self) -> Tagged<&'static str> {
|
||||
self.type_name().tagged(self.tag())
|
||||
}
|
||||
|
||||
pub fn old_debug<'a>(&'a self, source: &'a Text) -> DebugTokenNode<'a> {
|
||||
@ -134,6 +132,16 @@ impl TokenNode {
|
||||
self.tag().slice(source)
|
||||
}
|
||||
|
||||
pub fn get_variable(&self) -> Result<(Tag, Tag), ShellError> {
|
||||
match self {
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::Variable(inner_tag),
|
||||
tag: outer_tag,
|
||||
}) => Ok((*outer_tag, *inner_tag)),
|
||||
_ => Err(ShellError::type_error("variable", self.tagged_type_name())),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_bare(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Tagged {
|
||||
@ -144,6 +152,41 @@ impl TokenNode {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_pattern(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::GlobPattern,
|
||||
..
|
||||
}) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_dot(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::Operator(Operator::Dot),
|
||||
..
|
||||
}) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_block(&self) -> Option<(Tagged<&[TokenNode]>, (Tag, Tag))> {
|
||||
match self {
|
||||
TokenNode::Delimited(Tagged {
|
||||
item:
|
||||
DelimitedNode {
|
||||
delimiter,
|
||||
children,
|
||||
tags,
|
||||
},
|
||||
tag,
|
||||
}) if *delimiter == Delimiter::Brace => Some(((&children[..]).tagged(tag), *tags)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_external(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Tagged {
|
||||
@ -178,7 +221,54 @@ impl TokenNode {
|
||||
pub fn as_pipeline(&self) -> Result<Pipeline, ShellError> {
|
||||
match self {
|
||||
TokenNode::Pipeline(Tagged { item, .. }) => Ok(item.clone()),
|
||||
_ => Err(ShellError::string("unimplemented")),
|
||||
_ => Err(ShellError::unimplemented("unimplemented")),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_whitespace(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Whitespace(_) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_string(&self) -> (Tag, Tag) {
|
||||
match self {
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::String(inner_tag),
|
||||
tag: outer_tag,
|
||||
}) => (*outer_tag, *inner_tag),
|
||||
other => panic!("Expected string, found {:?}", other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
impl TokenNode {
|
||||
pub fn expect_list(&self) -> Tagged<&[TokenNode]> {
|
||||
match self {
|
||||
TokenNode::Nodes(Tagged { item, tag }) => (&item[..]).tagged(tag),
|
||||
other => panic!("Expected list, found {:?}", other),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_var(&self) -> (Tag, Tag) {
|
||||
match self {
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::Variable(inner_tag),
|
||||
tag: outer_tag,
|
||||
}) => (*outer_tag, *inner_tag),
|
||||
other => panic!("Expected var, found {:?}", other),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_bare(&self) -> Tag {
|
||||
match self {
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::Bare,
|
||||
tag,
|
||||
}) => *tag,
|
||||
other => panic!("Expected var, found {:?}", other),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -186,8 +276,9 @@ impl TokenNode {
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
||||
#[get = "pub(crate)"]
|
||||
pub struct DelimitedNode {
|
||||
delimiter: Delimiter,
|
||||
children: Vec<TokenNode>,
|
||||
pub(crate) delimiter: Delimiter,
|
||||
pub(crate) tags: (Tag, Tag),
|
||||
pub(crate) children: Vec<TokenNode>,
|
||||
}
|
||||
|
||||
impl DelimitedNode {
|
||||
@ -207,6 +298,24 @@ pub enum Delimiter {
|
||||
Square,
|
||||
}
|
||||
|
||||
impl Delimiter {
|
||||
pub(crate) fn open(&self) -> &'static str {
|
||||
match self {
|
||||
Delimiter::Paren => "(",
|
||||
Delimiter::Brace => "{",
|
||||
Delimiter::Square => "[",
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn close(&self) -> &'static str {
|
||||
match self {
|
||||
Delimiter::Paren => ")",
|
||||
Delimiter::Brace => "}",
|
||||
Delimiter::Square => "]",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
||||
#[get = "pub(crate)"]
|
||||
pub struct PathNode {
|
||||
|
@ -3,9 +3,8 @@ use crate::prelude::*;
|
||||
use crate::parser::parse::flag::{Flag, FlagKind};
|
||||
use crate::parser::parse::operator::Operator;
|
||||
use crate::parser::parse::pipeline::{Pipeline, PipelineElement};
|
||||
use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, PathNode, TokenNode};
|
||||
use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
|
||||
use crate::parser::parse::tokens::{RawNumber, RawToken};
|
||||
use crate::parser::parse::unit::Unit;
|
||||
use crate::parser::CallNode;
|
||||
use derive_new::new;
|
||||
use uuid::Uuid;
|
||||
@ -31,60 +30,68 @@ impl TokenTreeBuilder {
|
||||
(node, builder.output)
|
||||
}
|
||||
|
||||
pub fn pipeline(input: Vec<(Option<&str>, CurriedCall, Option<&str>)>) -> CurriedToken {
|
||||
let input: Vec<(Option<String>, CurriedCall, Option<String>)> = input
|
||||
.into_iter()
|
||||
.map(|(pre, call, post)| {
|
||||
(
|
||||
pre.map(|s| s.to_string()),
|
||||
call,
|
||||
post.map(|s| s.to_string()),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
fn build_tagged<T>(&mut self, callback: impl FnOnce(&mut TokenTreeBuilder) -> T) -> Tagged<T> {
|
||||
let start = self.pos;
|
||||
let ret = callback(self);
|
||||
let end = self.pos;
|
||||
|
||||
ret.tagged((start, end, self.anchor))
|
||||
}
|
||||
|
||||
pub fn pipeline(input: Vec<Vec<CurriedToken>>) -> CurriedToken {
|
||||
Box::new(move |b| {
|
||||
let start = b.pos;
|
||||
|
||||
let mut out: Vec<PipelineElement> = vec![];
|
||||
let mut out: Vec<Tagged<PipelineElement>> = vec![];
|
||||
|
||||
let mut input = input.into_iter().peekable();
|
||||
let (pre, call, post) = input
|
||||
let head = input
|
||||
.next()
|
||||
.expect("A pipeline must contain at least one element");
|
||||
|
||||
let pipe = None;
|
||||
let pre_tag = pre.map(|pre| b.consume_tag(&pre));
|
||||
let call = call(b);
|
||||
let post_tag = post.map(|post| b.consume_tag(&post));
|
||||
let head = b.build_tagged(|b| head.into_iter().map(|node| node(b)).collect());
|
||||
|
||||
out.push(PipelineElement::new(pipe, pre_tag, call, post_tag));
|
||||
let head_tag: Tag = head.tag;
|
||||
out.push(PipelineElement::new(pipe, head).tagged(head_tag));
|
||||
|
||||
loop {
|
||||
match input.next() {
|
||||
None => break,
|
||||
Some((pre, call, post)) => {
|
||||
Some(node) => {
|
||||
let start = b.pos;
|
||||
let pipe = Some(b.consume_tag("|"));
|
||||
let pre_span = pre.map(|pre| b.consume_tag(&pre));
|
||||
let call = call(b);
|
||||
let post_span = post.map(|post| b.consume_tag(&post));
|
||||
let node =
|
||||
b.build_tagged(|b| node.into_iter().map(|node| node(b)).collect());
|
||||
let end = b.pos;
|
||||
|
||||
out.push(PipelineElement::new(pipe, pre_span, call, post_span));
|
||||
out.push(PipelineElement::new(pipe, node).tagged((start, end, b.anchor)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let end = b.pos;
|
||||
|
||||
TokenTreeBuilder::tagged_pipeline((out, None), (start, end, b.anchor))
|
||||
TokenTreeBuilder::tagged_pipeline(out, (start, end, b.anchor))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_pipeline(
|
||||
input: (Vec<PipelineElement>, Option<Tag>),
|
||||
tag: impl Into<Tag>,
|
||||
) -> TokenNode {
|
||||
TokenNode::Pipeline(Pipeline::new(input.0, input.1.into()).tagged(tag.into()))
|
||||
pub fn tagged_pipeline(input: Vec<Tagged<PipelineElement>>, tag: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Pipeline(Pipeline::new(input).tagged(tag.into()))
|
||||
}
|
||||
|
||||
pub fn token_list(input: Vec<CurriedToken>) -> CurriedToken {
|
||||
Box::new(move |b| {
|
||||
let start = b.pos;
|
||||
let tokens = input.into_iter().map(|i| i(b)).collect();
|
||||
let end = b.pos;
|
||||
|
||||
TokenTreeBuilder::tagged_token_list(tokens, (start, end, b.anchor))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_token_list(input: Vec<TokenNode>, tag: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Nodes(input.tagged(tag))
|
||||
}
|
||||
|
||||
pub fn op(input: impl Into<Operator>) -> CurriedToken {
|
||||
@ -100,7 +107,7 @@ impl TokenTreeBuilder {
|
||||
}
|
||||
|
||||
pub fn tagged_op(input: impl Into<Operator>, tag: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Operator(input.into().tagged(tag.into()))
|
||||
TokenNode::Token(RawToken::Operator(input.into()).tagged(tag.into()))
|
||||
}
|
||||
|
||||
pub fn string(input: impl Into<String>) -> CurriedToken {
|
||||
@ -168,8 +175,23 @@ impl TokenTreeBuilder {
|
||||
TokenNode::Token(RawToken::ExternalWord.tagged(input.into()))
|
||||
}
|
||||
|
||||
pub fn tagged_external(input: impl Into<Tag>, tag: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Token(RawToken::ExternalCommand(input.into()).tagged(tag.into()))
|
||||
pub fn external_command(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (outer_start, _) = b.consume("^");
|
||||
let (inner_start, end) = b.consume(&input);
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::tagged_external_command(
|
||||
(inner_start, end, b.anchor),
|
||||
(outer_start, end, b.anchor),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_external_command(inner: impl Into<Tag>, outer: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Token(RawToken::ExternalCommand(inner.into()).tagged(outer.into()))
|
||||
}
|
||||
|
||||
pub fn int(input: impl Into<BigInt>) -> CurriedToken {
|
||||
@ -204,54 +226,6 @@ impl TokenTreeBuilder {
|
||||
TokenNode::Token(RawToken::Number(input.into()).tagged(tag.into()))
|
||||
}
|
||||
|
||||
pub fn size(int: impl Into<i64>, unit: impl Into<Unit>) -> CurriedToken {
|
||||
let int = int.into();
|
||||
let unit = unit.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start_int, end_int) = b.consume(&int.to_string());
|
||||
let (_, end_unit) = b.consume(unit.as_str());
|
||||
b.pos = end_unit;
|
||||
|
||||
TokenTreeBuilder::tagged_size(
|
||||
(RawNumber::Int((start_int, end_int, b.anchor).into()), unit),
|
||||
(start_int, end_unit, b.anchor),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_size(
|
||||
input: (impl Into<RawNumber>, impl Into<Unit>),
|
||||
tag: impl Into<Tag>,
|
||||
) -> TokenNode {
|
||||
let (int, unit) = (input.0.into(), input.1.into());
|
||||
|
||||
TokenNode::Token(RawToken::Size(int, unit).tagged(tag.into()))
|
||||
}
|
||||
|
||||
pub fn path(head: CurriedToken, tail: Vec<CurriedToken>) -> CurriedToken {
|
||||
Box::new(move |b| {
|
||||
let start = b.pos;
|
||||
let head = head(b);
|
||||
|
||||
let mut output = vec![];
|
||||
|
||||
for item in tail {
|
||||
b.consume(".");
|
||||
|
||||
output.push(item(b));
|
||||
}
|
||||
|
||||
let end = b.pos;
|
||||
|
||||
TokenTreeBuilder::tagged_path((head, output), (start, end, b.anchor))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_path(input: (TokenNode, Vec<TokenNode>), tag: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Path(PathNode::new(Box::new(input.0), input.1).tagged(tag.into()))
|
||||
}
|
||||
|
||||
pub fn var(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
@ -297,19 +271,6 @@ impl TokenTreeBuilder {
|
||||
TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into()).tagged(tag.into()))
|
||||
}
|
||||
|
||||
pub fn member(input: impl Into<String>) -> CurriedToken {
|
||||
let input = input.into();
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, end) = b.consume(&input);
|
||||
TokenTreeBuilder::tagged_member((start, end, b.anchor))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_member(tag: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Member(tag.into())
|
||||
}
|
||||
|
||||
pub fn call(head: CurriedToken, input: Vec<CurriedToken>) -> CurriedCall {
|
||||
Box::new(move |b| {
|
||||
let start = b.pos;
|
||||
@ -340,58 +301,79 @@ impl TokenTreeBuilder {
|
||||
CallNode::new(Box::new(head), tail).tagged(tag.into())
|
||||
}
|
||||
|
||||
fn consume_delimiter(
|
||||
&mut self,
|
||||
input: Vec<CurriedToken>,
|
||||
_open: &str,
|
||||
_close: &str,
|
||||
) -> (Tag, Tag, Tag, Vec<TokenNode>) {
|
||||
let (start_open_paren, end_open_paren) = self.consume("(");
|
||||
let mut output = vec![];
|
||||
for item in input {
|
||||
output.push(item(self));
|
||||
}
|
||||
|
||||
let (start_close_paren, end_close_paren) = self.consume(")");
|
||||
|
||||
let open = Tag::from((start_open_paren, end_open_paren, self.anchor));
|
||||
let close = Tag::from((start_close_paren, end_close_paren, self.anchor));
|
||||
let whole = Tag::from((start_open_paren, end_close_paren, self.anchor));
|
||||
|
||||
(open, close, whole, output)
|
||||
}
|
||||
|
||||
pub fn parens(input: Vec<CurriedToken>) -> CurriedToken {
|
||||
Box::new(move |b| {
|
||||
let (start, _) = b.consume("(");
|
||||
let mut output = vec![];
|
||||
for item in input {
|
||||
output.push(item(b));
|
||||
}
|
||||
let (open, close, whole, output) = b.consume_delimiter(input, "(", ")");
|
||||
|
||||
let (_, end) = b.consume(")");
|
||||
|
||||
TokenTreeBuilder::tagged_parens(output, (start, end, b.anchor))
|
||||
TokenTreeBuilder::tagged_parens(output, (open, close), whole)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_parens(input: impl Into<Vec<TokenNode>>, tag: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Delimited(DelimitedNode::new(Delimiter::Paren, input.into()).tagged(tag.into()))
|
||||
pub fn tagged_parens(
|
||||
input: impl Into<Vec<TokenNode>>,
|
||||
tags: (Tag, Tag),
|
||||
tag: impl Into<Tag>,
|
||||
) -> TokenNode {
|
||||
TokenNode::Delimited(
|
||||
DelimitedNode::new(Delimiter::Paren, tags, input.into()).tagged(tag.into()),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn square(input: Vec<CurriedToken>) -> CurriedToken {
|
||||
Box::new(move |b| {
|
||||
let (start, _) = b.consume("[");
|
||||
let mut output = vec![];
|
||||
for item in input {
|
||||
output.push(item(b));
|
||||
}
|
||||
let (open, close, whole, tokens) = b.consume_delimiter(input, "[", "]");
|
||||
|
||||
let (_, end) = b.consume("]");
|
||||
|
||||
TokenTreeBuilder::tagged_square(output, (start, end, b.anchor))
|
||||
TokenTreeBuilder::tagged_square(tokens, (open, close), whole)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_square(input: impl Into<Vec<TokenNode>>, tag: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Delimited(DelimitedNode::new(Delimiter::Square, input.into()).tagged(tag.into()))
|
||||
pub fn tagged_square(
|
||||
input: impl Into<Vec<TokenNode>>,
|
||||
tags: (Tag, Tag),
|
||||
tag: impl Into<Tag>,
|
||||
) -> TokenNode {
|
||||
TokenNode::Delimited(
|
||||
DelimitedNode::new(Delimiter::Square, tags, input.into()).tagged(tag.into()),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn braced(input: Vec<CurriedToken>) -> CurriedToken {
|
||||
Box::new(move |b| {
|
||||
let (start, _) = b.consume("{ ");
|
||||
let mut output = vec![];
|
||||
for item in input {
|
||||
output.push(item(b));
|
||||
}
|
||||
let (open, close, whole, tokens) = b.consume_delimiter(input, "{", "}");
|
||||
|
||||
let (_, end) = b.consume(" }");
|
||||
|
||||
TokenTreeBuilder::tagged_brace(output, (start, end, b.anchor))
|
||||
TokenTreeBuilder::tagged_brace(tokens, (open, close), whole)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_brace(input: impl Into<Vec<TokenNode>>, tag: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Delimited(DelimitedNode::new(Delimiter::Brace, input.into()).tagged(tag.into()))
|
||||
pub fn tagged_brace(
|
||||
input: impl Into<Vec<TokenNode>>,
|
||||
tags: (Tag, Tag),
|
||||
tag: impl Into<Tag>,
|
||||
) -> TokenNode {
|
||||
TokenNode::Delimited(
|
||||
DelimitedNode::new(Delimiter::Brace, tags, input.into()).tagged(tag.into()),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn sp() -> CurriedToken {
|
||||
|
@ -1,4 +1,4 @@
|
||||
use crate::parser::parse::unit::*;
|
||||
use crate::parser::Operator;
|
||||
use crate::prelude::*;
|
||||
use crate::{Tagged, Text};
|
||||
use std::fmt;
|
||||
@ -7,7 +7,7 @@ use std::str::FromStr;
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub enum RawToken {
|
||||
Number(RawNumber),
|
||||
Size(RawNumber, Unit),
|
||||
Operator(Operator),
|
||||
String(Tag),
|
||||
Variable(Tag),
|
||||
ExternalCommand(Tag),
|
||||
@ -16,6 +16,21 @@ pub enum RawToken {
|
||||
Bare,
|
||||
}
|
||||
|
||||
impl RawToken {
|
||||
pub fn type_name(&self) -> &'static str {
|
||||
match self {
|
||||
RawToken::Number(_) => "Number",
|
||||
RawToken::Operator(..) => "operator",
|
||||
RawToken::String(_) => "String",
|
||||
RawToken::Variable(_) => "variable",
|
||||
RawToken::ExternalCommand(_) => "external command",
|
||||
RawToken::ExternalWord => "external word",
|
||||
RawToken::GlobPattern => "glob pattern",
|
||||
RawToken::Bare => "String",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub enum RawNumber {
|
||||
Int(Tag),
|
||||
@ -45,21 +60,6 @@ impl RawNumber {
|
||||
}
|
||||
}
|
||||
|
||||
impl RawToken {
|
||||
pub fn type_name(&self) -> &'static str {
|
||||
match self {
|
||||
RawToken::Number(_) => "Number",
|
||||
RawToken::Size(..) => "Size",
|
||||
RawToken::String(_) => "String",
|
||||
RawToken::Variable(_) => "Variable",
|
||||
RawToken::ExternalCommand(_) => "ExternalCommand",
|
||||
RawToken::ExternalWord => "ExternalWord",
|
||||
RawToken::GlobPattern => "GlobPattern",
|
||||
RawToken::Bare => "String",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub type Token = Tagged<RawToken>;
|
||||
|
||||
impl Token {
|
||||
@ -69,6 +69,76 @@ impl Token {
|
||||
source,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_number(&self) -> Option<Tagged<RawNumber>> {
|
||||
match self.item {
|
||||
RawToken::Number(number) => Some((number).tagged(self.tag)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_int(&self) -> Option<(Tag, Tag)> {
|
||||
match self.item {
|
||||
RawToken::Number(RawNumber::Int(int)) => Some((int, self.tag)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_decimal(&self) -> Option<(Tag, Tag)> {
|
||||
match self.item {
|
||||
RawToken::Number(RawNumber::Decimal(decimal)) => Some((decimal, self.tag)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_operator(&self) -> Option<Tagged<Operator>> {
|
||||
match self.item {
|
||||
RawToken::Operator(operator) => Some(operator.tagged(self.tag)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_string(&self) -> Option<(Tag, Tag)> {
|
||||
match self.item {
|
||||
RawToken::String(tag) => Some((tag, self.tag)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_variable(&self) -> Option<(Tag, Tag)> {
|
||||
match self.item {
|
||||
RawToken::Variable(tag) => Some((tag, self.tag)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_external_command(&self) -> Option<(Tag, Tag)> {
|
||||
match self.item {
|
||||
RawToken::ExternalCommand(tag) => Some((tag, self.tag)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_external_word(&self) -> Option<Tag> {
|
||||
match self.item {
|
||||
RawToken::ExternalWord => Some(self.tag),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_glob_pattern(&self) -> Option<Tag> {
|
||||
match self.item {
|
||||
RawToken::GlobPattern => Some(self.tag),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_bare(&self) -> Option<Tag> {
|
||||
match self.item {
|
||||
RawToken::Bare => Some(self.tag),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DebugToken<'a> {
|
||||
|
@ -1,92 +1,38 @@
|
||||
use crate::context::Context;
|
||||
use crate::errors::{ArgumentError, ShellError};
|
||||
use crate::parser::hir::syntax_shape::{
|
||||
color_fallible_syntax, color_syntax, expand_expr, flat_shape::FlatShape, spaced,
|
||||
BackoffColoringMode, ColorSyntax, MaybeSpaceShape,
|
||||
};
|
||||
use crate::parser::registry::{NamedType, PositionalType, Signature};
|
||||
use crate::parser::{baseline_parse_tokens, CallNode};
|
||||
use crate::parser::TokensIterator;
|
||||
use crate::parser::{
|
||||
hir::{self, NamedArguments},
|
||||
Flag, RawToken, TokenNode,
|
||||
hir::{self, ExpandContext, NamedArguments},
|
||||
Flag,
|
||||
};
|
||||
use crate::traits::ToDebug;
|
||||
use crate::{Tag, Tagged, TaggedItem, Text};
|
||||
use crate::{Tag, Tagged, Text};
|
||||
use log::trace;
|
||||
|
||||
pub fn parse_command(
|
||||
pub fn parse_command_tail(
|
||||
config: &Signature,
|
||||
context: &Context,
|
||||
call: &Tagged<CallNode>,
|
||||
source: &Text,
|
||||
) -> Result<hir::Call, ShellError> {
|
||||
let Tagged { item: raw_call, .. } = call;
|
||||
|
||||
trace!("Processing {:?}", config);
|
||||
|
||||
let head = parse_command_head(call.head())?;
|
||||
|
||||
let children: Option<Vec<TokenNode>> = raw_call.children().as_ref().map(|nodes| {
|
||||
nodes
|
||||
.iter()
|
||||
.cloned()
|
||||
.filter(|node| match node {
|
||||
TokenNode::Whitespace(_) => false,
|
||||
_ => true,
|
||||
})
|
||||
.collect()
|
||||
});
|
||||
|
||||
match parse_command_tail(&config, context, children, source, call.tag())? {
|
||||
None => Ok(hir::Call::new(Box::new(head), None, None)),
|
||||
Some((positional, named)) => Ok(hir::Call::new(Box::new(head), positional, named)),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_command_head(head: &TokenNode) -> Result<hir::Expression, ShellError> {
|
||||
match head {
|
||||
TokenNode::Token(
|
||||
spanned @ Tagged {
|
||||
item: RawToken::Bare,
|
||||
..
|
||||
},
|
||||
) => Ok(spanned.map(|_| hir::RawExpression::Literal(hir::Literal::Bare))),
|
||||
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::String(inner_tag),
|
||||
tag,
|
||||
}) => Ok(hir::RawExpression::Literal(hir::Literal::String(*inner_tag)).tagged(*tag)),
|
||||
|
||||
other => Err(ShellError::unexpected(&format!(
|
||||
"command head -> {:?}",
|
||||
other
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_command_tail(
|
||||
config: &Signature,
|
||||
context: &Context,
|
||||
tail: Option<Vec<TokenNode>>,
|
||||
source: &Text,
|
||||
context: &ExpandContext,
|
||||
tail: &mut TokensIterator,
|
||||
command_tag: Tag,
|
||||
) -> Result<Option<(Option<Vec<hir::Expression>>, Option<NamedArguments>)>, ShellError> {
|
||||
let tail = &mut match &tail {
|
||||
None => hir::TokensIterator::new(&[]),
|
||||
Some(tail) => hir::TokensIterator::new(tail),
|
||||
};
|
||||
|
||||
let mut named = NamedArguments::new();
|
||||
|
||||
trace_remaining("nodes", tail.clone(), source);
|
||||
trace_remaining("nodes", tail.clone(), context.source());
|
||||
|
||||
for (name, kind) in &config.named {
|
||||
trace!(target: "nu::parse", "looking for {} : {:?}", name, kind);
|
||||
|
||||
match kind {
|
||||
NamedType::Switch => {
|
||||
let flag = extract_switch(name, tail, source);
|
||||
let flag = extract_switch(name, tail, context.source());
|
||||
|
||||
named.insert_switch(name, flag);
|
||||
}
|
||||
NamedType::Mandatory(syntax_type) => {
|
||||
match extract_mandatory(config, name, tail, source, command_tag) {
|
||||
match extract_mandatory(config, name, tail, context.source(), command_tag) {
|
||||
Err(err) => return Err(err), // produce a correct diagnostic
|
||||
Ok((pos, flag)) => {
|
||||
tail.move_to(pos);
|
||||
@ -99,42 +45,47 @@ fn parse_command_tail(
|
||||
));
|
||||
}
|
||||
|
||||
let expr =
|
||||
hir::baseline_parse_next_expr(tail, context, source, *syntax_type)?;
|
||||
let expr = expand_expr(&spaced(*syntax_type), tail, context)?;
|
||||
|
||||
tail.restart();
|
||||
named.insert_mandatory(name, expr);
|
||||
}
|
||||
}
|
||||
}
|
||||
NamedType::Optional(syntax_type) => match extract_optional(name, tail, source) {
|
||||
Err(err) => return Err(err), // produce a correct diagnostic
|
||||
Ok(Some((pos, flag))) => {
|
||||
tail.move_to(pos);
|
||||
NamedType::Optional(syntax_type) => {
|
||||
match extract_optional(name, tail, context.source()) {
|
||||
Err(err) => return Err(err), // produce a correct diagnostic
|
||||
Ok(Some((pos, flag))) => {
|
||||
tail.move_to(pos);
|
||||
|
||||
if tail.at_end() {
|
||||
return Err(ShellError::argument_error(
|
||||
config.name.clone(),
|
||||
ArgumentError::MissingValueForName(name.to_string()),
|
||||
flag.tag(),
|
||||
));
|
||||
if tail.at_end() {
|
||||
return Err(ShellError::argument_error(
|
||||
config.name.clone(),
|
||||
ArgumentError::MissingValueForName(name.to_string()),
|
||||
flag.tag(),
|
||||
));
|
||||
}
|
||||
|
||||
let expr = expand_expr(&spaced(*syntax_type), tail, context);
|
||||
|
||||
match expr {
|
||||
Err(_) => named.insert_optional(name, None),
|
||||
Ok(expr) => named.insert_optional(name, Some(expr)),
|
||||
}
|
||||
|
||||
tail.restart();
|
||||
}
|
||||
|
||||
let expr = hir::baseline_parse_next_expr(tail, context, source, *syntax_type)?;
|
||||
|
||||
tail.restart();
|
||||
named.insert_optional(name, Some(expr));
|
||||
Ok(None) => {
|
||||
tail.restart();
|
||||
named.insert_optional(name, None);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(None) => {
|
||||
tail.restart();
|
||||
named.insert_optional(name, None);
|
||||
}
|
||||
},
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
trace_remaining("after named", tail.clone(), source);
|
||||
trace_remaining("after named", tail.clone(), context.source());
|
||||
|
||||
let mut positional = vec![];
|
||||
|
||||
@ -143,7 +94,7 @@ fn parse_command_tail(
|
||||
|
||||
match arg {
|
||||
PositionalType::Mandatory(..) => {
|
||||
if tail.len() == 0 {
|
||||
if tail.at_end() {
|
||||
return Err(ShellError::argument_error(
|
||||
config.name.clone(),
|
||||
ArgumentError::MissingMandatoryPositional(arg.name().to_string()),
|
||||
@ -153,25 +104,36 @@ fn parse_command_tail(
|
||||
}
|
||||
|
||||
PositionalType::Optional(..) => {
|
||||
if tail.len() == 0 {
|
||||
if tail.at_end() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let result = hir::baseline_parse_next_expr(tail, context, source, arg.syntax_type())?;
|
||||
let result = expand_expr(&spaced(arg.syntax_type()), tail, context)?;
|
||||
|
||||
positional.push(result);
|
||||
}
|
||||
|
||||
trace_remaining("after positional", tail.clone(), source);
|
||||
trace_remaining("after positional", tail.clone(), context.source());
|
||||
|
||||
if let Some(syntax_type) = config.rest_positional {
|
||||
let remainder = baseline_parse_tokens(tail, context, source, syntax_type)?;
|
||||
positional.extend(remainder);
|
||||
let mut out = vec![];
|
||||
|
||||
loop {
|
||||
if tail.at_end_possible_ws() {
|
||||
break;
|
||||
}
|
||||
|
||||
let next = expand_expr(&spaced(syntax_type), tail, context)?;
|
||||
|
||||
out.push(next);
|
||||
}
|
||||
|
||||
positional.extend(out);
|
||||
}
|
||||
|
||||
trace_remaining("after rest", tail.clone(), source);
|
||||
trace_remaining("after rest", tail.clone(), context.source());
|
||||
|
||||
trace!("Constructed positional={:?} named={:?}", positional, named);
|
||||
|
||||
@ -194,6 +156,232 @@ fn parse_command_tail(
|
||||
Ok(Some((positional, named)))
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ColoringArgs {
|
||||
vec: Vec<Option<Vec<Tagged<FlatShape>>>>,
|
||||
}
|
||||
|
||||
impl ColoringArgs {
|
||||
fn new(len: usize) -> ColoringArgs {
|
||||
let vec = vec![None; len];
|
||||
ColoringArgs { vec }
|
||||
}
|
||||
|
||||
fn insert(&mut self, pos: usize, shapes: Vec<Tagged<FlatShape>>) {
|
||||
self.vec[pos] = Some(shapes);
|
||||
}
|
||||
|
||||
fn spread_shapes(self, shapes: &mut Vec<Tagged<FlatShape>>) {
|
||||
for item in self.vec {
|
||||
match item {
|
||||
None => {}
|
||||
Some(vec) => {
|
||||
shapes.extend(vec);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct CommandTailShape;
|
||||
|
||||
impl ColorSyntax for CommandTailShape {
|
||||
type Info = ();
|
||||
type Input = Signature;
|
||||
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
signature: &Signature,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
) -> Self::Info {
|
||||
let mut args = ColoringArgs::new(token_nodes.len());
|
||||
trace_remaining("nodes", token_nodes.clone(), context.source());
|
||||
|
||||
for (name, kind) in &signature.named {
|
||||
trace!(target: "nu::color_syntax", "looking for {} : {:?}", name, kind);
|
||||
|
||||
match kind {
|
||||
NamedType::Switch => {
|
||||
match token_nodes.extract(|t| t.as_flag(name, context.source())) {
|
||||
Some((pos, flag)) => args.insert(pos, vec![flag.color()]),
|
||||
None => {}
|
||||
}
|
||||
}
|
||||
NamedType::Mandatory(syntax_type) => {
|
||||
match extract_mandatory(
|
||||
signature,
|
||||
name,
|
||||
token_nodes,
|
||||
context.source(),
|
||||
Tag::unknown(),
|
||||
) {
|
||||
Err(_) => {
|
||||
// The mandatory flag didn't exist at all, so there's nothing to color
|
||||
}
|
||||
Ok((pos, flag)) => {
|
||||
let mut shapes = vec![flag.color()];
|
||||
token_nodes.move_to(pos);
|
||||
|
||||
if token_nodes.at_end() {
|
||||
args.insert(pos, shapes);
|
||||
token_nodes.restart();
|
||||
continue;
|
||||
}
|
||||
|
||||
// We can live with unmatched syntax after a mandatory flag
|
||||
let _ = token_nodes.atomic(|token_nodes| {
|
||||
color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes);
|
||||
|
||||
// If the part after a mandatory flag isn't present, that's ok, but we
|
||||
// should roll back any whitespace we chomped
|
||||
color_fallible_syntax(
|
||||
syntax_type,
|
||||
token_nodes,
|
||||
context,
|
||||
&mut shapes,
|
||||
)
|
||||
});
|
||||
|
||||
args.insert(pos, shapes);
|
||||
token_nodes.restart();
|
||||
}
|
||||
}
|
||||
}
|
||||
NamedType::Optional(syntax_type) => {
|
||||
match extract_optional(name, token_nodes, context.source()) {
|
||||
Err(_) => {
|
||||
// The optional flag didn't exist at all, so there's nothing to color
|
||||
}
|
||||
Ok(Some((pos, flag))) => {
|
||||
let mut shapes = vec![flag.color()];
|
||||
token_nodes.move_to(pos);
|
||||
|
||||
if token_nodes.at_end() {
|
||||
args.insert(pos, shapes);
|
||||
token_nodes.restart();
|
||||
continue;
|
||||
}
|
||||
|
||||
// We can live with unmatched syntax after an optional flag
|
||||
let _ = token_nodes.atomic(|token_nodes| {
|
||||
color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes);
|
||||
|
||||
// If the part after a mandatory flag isn't present, that's ok, but we
|
||||
// should roll back any whitespace we chomped
|
||||
color_fallible_syntax(
|
||||
syntax_type,
|
||||
token_nodes,
|
||||
context,
|
||||
&mut shapes,
|
||||
)
|
||||
});
|
||||
|
||||
args.insert(pos, shapes);
|
||||
token_nodes.restart();
|
||||
}
|
||||
|
||||
Ok(None) => {
|
||||
token_nodes.restart();
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
trace_remaining("after named", token_nodes.clone(), context.source());
|
||||
|
||||
for arg in &signature.positional {
|
||||
trace!("Processing positional {:?}", arg);
|
||||
|
||||
match arg {
|
||||
PositionalType::Mandatory(..) => {
|
||||
if token_nodes.at_end() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
PositionalType::Optional(..) => {
|
||||
if token_nodes.at_end() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut shapes = vec![];
|
||||
let pos = token_nodes.pos(false);
|
||||
|
||||
match pos {
|
||||
None => break,
|
||||
Some(pos) => {
|
||||
// We can live with an unmatched positional argument. Hopefully it will be
|
||||
// matched by a future token
|
||||
let _ = token_nodes.atomic(|token_nodes| {
|
||||
color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes);
|
||||
|
||||
// If no match, we should roll back any whitespace we chomped
|
||||
color_fallible_syntax(
|
||||
&arg.syntax_type(),
|
||||
token_nodes,
|
||||
context,
|
||||
&mut shapes,
|
||||
)?;
|
||||
|
||||
args.insert(pos, shapes);
|
||||
|
||||
Ok(())
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trace_remaining("after positional", token_nodes.clone(), context.source());
|
||||
|
||||
if let Some(syntax_type) = signature.rest_positional {
|
||||
loop {
|
||||
if token_nodes.at_end_possible_ws() {
|
||||
break;
|
||||
}
|
||||
|
||||
let pos = token_nodes.pos(false);
|
||||
|
||||
match pos {
|
||||
None => break,
|
||||
Some(pos) => {
|
||||
let mut shapes = vec![];
|
||||
|
||||
// If any arguments don't match, we'll fall back to backoff coloring mode
|
||||
let result = token_nodes.atomic(|token_nodes| {
|
||||
color_syntax(&MaybeSpaceShape, token_nodes, context, &mut shapes);
|
||||
|
||||
// If no match, we should roll back any whitespace we chomped
|
||||
color_fallible_syntax(&syntax_type, token_nodes, context, &mut shapes)?;
|
||||
|
||||
args.insert(pos, shapes);
|
||||
|
||||
Ok(())
|
||||
});
|
||||
|
||||
match result {
|
||||
Err(_) => break,
|
||||
Ok(_) => continue,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
args.spread_shapes(shapes);
|
||||
|
||||
// Consume any remaining tokens with backoff coloring mode
|
||||
color_syntax(&BackoffColoringMode, token_nodes, context, shapes);
|
||||
|
||||
shapes.sort_by(|a, b| a.tag.span.start().cmp(&b.tag.span.start()));
|
||||
}
|
||||
}
|
||||
|
||||
fn extract_switch(name: &str, tokens: &mut hir::TokensIterator<'_>, source: &Text) -> Option<Flag> {
|
||||
tokens
|
||||
.extract(|t| t.as_flag(name, source))
|
||||
@ -241,6 +429,7 @@ fn extract_optional(
|
||||
|
||||
pub fn trace_remaining(desc: &'static str, tail: hir::TokensIterator<'_>, source: &Text) {
|
||||
trace!(
|
||||
target: "nu::expand_args",
|
||||
"{} = {:?}",
|
||||
desc,
|
||||
itertools::join(
|
||||
|
@ -1,11 +1,11 @@
|
||||
// TODO: Temporary redirect
|
||||
pub(crate) use crate::context::CommandRegistry;
|
||||
use crate::evaluate::{evaluate_baseline_expr, Scope};
|
||||
use crate::parser::{hir, hir::SyntaxShape, parse_command, CallNode};
|
||||
use crate::parser::{hir, hir::SyntaxShape};
|
||||
use crate::prelude::*;
|
||||
use derive_new::new;
|
||||
use indexmap::IndexMap;
|
||||
use log::trace;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt;
|
||||
|
||||
@ -271,21 +271,6 @@ impl<'a> Iterator for PositionalIter<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
impl Signature {
|
||||
pub(crate) fn parse_args(
|
||||
&self,
|
||||
call: &Tagged<CallNode>,
|
||||
context: &Context,
|
||||
source: &Text,
|
||||
) -> Result<hir::Call, ShellError> {
|
||||
let args = parse_command(self, context, call, source)?;
|
||||
|
||||
trace!("parsed args: {:?}", args);
|
||||
|
||||
Ok(args)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn evaluate_args(
|
||||
call: &hir::Call,
|
||||
registry: &CommandRegistry,
|
||||
|
@ -32,7 +32,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) {
|
||||
let input = match input {
|
||||
Some(arg) => std::fs::read_to_string(arg),
|
||||
None => {
|
||||
send_response(ShellError::string(format!("No input given.")));
|
||||
send_response(ShellError::untagged_runtime_error("No input given."));
|
||||
return;
|
||||
}
|
||||
};
|
||||
@ -64,7 +64,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) {
|
||||
return;
|
||||
}
|
||||
e => {
|
||||
send_response(ShellError::string(format!(
|
||||
send_response(ShellError::untagged_runtime_error(format!(
|
||||
"Could not handle plugin message: {} {:?}",
|
||||
input, e
|
||||
)));
|
||||
@ -102,7 +102,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) {
|
||||
break;
|
||||
}
|
||||
e => {
|
||||
send_response(ShellError::string(format!(
|
||||
send_response(ShellError::untagged_runtime_error(format!(
|
||||
"Could not handle plugin message: {} {:?}",
|
||||
input, e
|
||||
)));
|
||||
@ -111,7 +111,7 @@ pub fn serve_plugin(plugin: &mut dyn Plugin) {
|
||||
}
|
||||
}
|
||||
e => {
|
||||
send_response(ShellError::string(format!(
|
||||
send_response(ShellError::untagged_runtime_error(format!(
|
||||
"Could not handle plugin message: {:?}",
|
||||
e,
|
||||
)));
|
||||
|
@ -1,10 +1,13 @@
|
||||
use itertools::Itertools;
|
||||
use nu::{
|
||||
serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature,
|
||||
SyntaxShape, Tagged, Value,
|
||||
serve_plugin, CallInfo, Plugin, ReturnSuccess, ReturnValue, ShellError, Signature, SyntaxShape,
|
||||
Tagged, TaggedItem, Value,
|
||||
};
|
||||
|
||||
pub type ColumnPath = Vec<Tagged<String>>;
|
||||
|
||||
struct Add {
|
||||
field: Option<String>,
|
||||
field: Option<ColumnPath>,
|
||||
value: Option<Value>,
|
||||
}
|
||||
impl Add {
|
||||
@ -19,23 +22,30 @@ impl Add {
|
||||
let value_tag = value.tag();
|
||||
match (value.item, self.value.clone()) {
|
||||
(obj @ Value::Row(_), Some(v)) => match &self.field {
|
||||
Some(f) => match obj.insert_data_at_path(value_tag, &f, v) {
|
||||
Some(f) => match obj.insert_data_at_column_path(value_tag, &f, v) {
|
||||
Some(v) => return Ok(v),
|
||||
None => {
|
||||
return Err(ShellError::string(format!(
|
||||
"add could not find place to insert field {:?} {}",
|
||||
obj, f
|
||||
)))
|
||||
return Err(ShellError::labeled_error(
|
||||
format!(
|
||||
"add could not find place to insert field {:?} {}",
|
||||
obj,
|
||||
f.iter().map(|i| &i.item).join(".")
|
||||
),
|
||||
"column name",
|
||||
value_tag,
|
||||
))
|
||||
}
|
||||
},
|
||||
None => Err(ShellError::string(
|
||||
None => Err(ShellError::labeled_error(
|
||||
"add needs a column name when adding a value to a table",
|
||||
"column name",
|
||||
value_tag,
|
||||
)),
|
||||
},
|
||||
x => Err(ShellError::string(format!(
|
||||
"Unrecognized type in stream: {:?}",
|
||||
x
|
||||
))),
|
||||
(value, _) => Err(ShellError::type_error(
|
||||
"row",
|
||||
value.type_name().tagged(value_tag),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -44,7 +54,7 @@ impl Plugin for Add {
|
||||
fn config(&mut self) -> Result<Signature, ShellError> {
|
||||
Ok(Signature::build("add")
|
||||
.desc("Add a new field to the table.")
|
||||
.required("Field", SyntaxShape::String)
|
||||
.required("Field", SyntaxShape::ColumnPath)
|
||||
.required("Value", SyntaxShape::String)
|
||||
.rest(SyntaxShape::String)
|
||||
.filter())
|
||||
@ -53,18 +63,14 @@ impl Plugin for Add {
|
||||
fn begin_filter(&mut self, call_info: CallInfo) -> Result<Vec<ReturnValue>, ShellError> {
|
||||
if let Some(args) = call_info.args.positional {
|
||||
match &args[0] {
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::String(s)),
|
||||
table @ Tagged {
|
||||
item: Value::Table(_),
|
||||
..
|
||||
} => {
|
||||
self.field = Some(s.clone());
|
||||
}
|
||||
_ => {
|
||||
return Err(ShellError::string(format!(
|
||||
"Unrecognized type in params: {:?}",
|
||||
args[0]
|
||||
)))
|
||||
self.field = Some(table.as_column_path()?.item);
|
||||
}
|
||||
|
||||
value => return Err(ShellError::type_error("table", value.tagged_type_name())),
|
||||
}
|
||||
match &args[1] {
|
||||
Tagged { item: v, .. } => {
|
||||
|
@ -1,10 +1,12 @@
|
||||
use nu::{
|
||||
serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature,
|
||||
SyntaxShape, Tagged, Value,
|
||||
serve_plugin, CallInfo, Plugin, ReturnSuccess, ReturnValue, ShellError, Signature, SyntaxShape,
|
||||
Tagged, Value,
|
||||
};
|
||||
|
||||
pub type ColumnPath = Tagged<Vec<Tagged<String>>>;
|
||||
|
||||
struct Edit {
|
||||
field: Option<String>,
|
||||
field: Option<ColumnPath>,
|
||||
value: Option<Value>,
|
||||
}
|
||||
impl Edit {
|
||||
@ -19,22 +21,25 @@ impl Edit {
|
||||
let value_tag = value.tag();
|
||||
match (value.item, self.value.clone()) {
|
||||
(obj @ Value::Row(_), Some(v)) => match &self.field {
|
||||
Some(f) => match obj.replace_data_at_path(value_tag, &f, v) {
|
||||
Some(f) => match obj.replace_data_at_column_path(value_tag, &f, v) {
|
||||
Some(v) => return Ok(v),
|
||||
None => {
|
||||
return Err(ShellError::string(
|
||||
return Err(ShellError::labeled_error(
|
||||
"edit could not find place to insert column",
|
||||
"column name",
|
||||
f.tag,
|
||||
))
|
||||
}
|
||||
},
|
||||
None => Err(ShellError::string(
|
||||
None => Err(ShellError::untagged_runtime_error(
|
||||
"edit needs a column when changing a value in a table",
|
||||
)),
|
||||
},
|
||||
x => Err(ShellError::string(format!(
|
||||
"Unrecognized type in stream: {:?}",
|
||||
x
|
||||
))),
|
||||
_ => Err(ShellError::labeled_error(
|
||||
"Unrecognized type in stream",
|
||||
"original value",
|
||||
value_tag,
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -43,7 +48,7 @@ impl Plugin for Edit {
|
||||
fn config(&mut self) -> Result<Signature, ShellError> {
|
||||
Ok(Signature::build("edit")
|
||||
.desc("Edit an existing column to have a new value.")
|
||||
.required("Field", SyntaxShape::String)
|
||||
.required("Field", SyntaxShape::ColumnPath)
|
||||
.required("Value", SyntaxShape::String)
|
||||
.filter())
|
||||
}
|
||||
@ -51,18 +56,13 @@ impl Plugin for Edit {
|
||||
fn begin_filter(&mut self, call_info: CallInfo) -> Result<Vec<ReturnValue>, ShellError> {
|
||||
if let Some(args) = call_info.args.positional {
|
||||
match &args[0] {
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::String(s)),
|
||||
table @ Tagged {
|
||||
item: Value::Table(_),
|
||||
..
|
||||
} => {
|
||||
self.field = Some(s.clone());
|
||||
}
|
||||
_ => {
|
||||
return Err(ShellError::string(format!(
|
||||
"Unrecognized type in params: {:?}",
|
||||
args[0]
|
||||
)))
|
||||
self.field = Some(table.as_column_path()?);
|
||||
}
|
||||
value => return Err(ShellError::type_error("table", value.tagged_type_name())),
|
||||
}
|
||||
match &args[1] {
|
||||
Tagged { item: v, .. } => {
|
||||
|
@ -25,8 +25,10 @@ impl Embed {
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
None => Err(ShellError::string(
|
||||
None => Err(ShellError::labeled_error(
|
||||
"embed needs a field when embedding a value",
|
||||
"original value",
|
||||
value.tag,
|
||||
)),
|
||||
},
|
||||
}
|
||||
@ -52,12 +54,7 @@ impl Plugin for Embed {
|
||||
self.field = Some(s.clone());
|
||||
self.values = Vec::new();
|
||||
}
|
||||
_ => {
|
||||
return Err(ShellError::string(format!(
|
||||
"Unrecognized type in params: {:?}",
|
||||
args[0]
|
||||
)))
|
||||
}
|
||||
value => return Err(ShellError::type_error("string", value.tagged_type_name())),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -14,8 +14,10 @@ pub enum SemVerAction {
|
||||
Patch,
|
||||
}
|
||||
|
||||
pub type ColumnPath = Tagged<Vec<Tagged<String>>>;
|
||||
|
||||
struct Inc {
|
||||
field: Option<String>,
|
||||
field: Option<ColumnPath>,
|
||||
error: Option<String>,
|
||||
action: Option<Action>,
|
||||
}
|
||||
@ -85,30 +87,39 @@ impl Inc {
|
||||
}
|
||||
Value::Row(_) => match self.field {
|
||||
Some(ref f) => {
|
||||
let replacement = match value.item.get_data_by_path(value.tag(), f) {
|
||||
let replacement = match value.item.get_data_by_column_path(value.tag(), f) {
|
||||
Some(result) => self.inc(result.map(|x| x.clone()))?,
|
||||
None => {
|
||||
return Err(ShellError::string("inc could not find field to replace"))
|
||||
return Err(ShellError::labeled_error(
|
||||
"inc could not find field to replace",
|
||||
"column name",
|
||||
f.tag,
|
||||
))
|
||||
}
|
||||
};
|
||||
match value
|
||||
.item
|
||||
.replace_data_at_path(value.tag(), f, replacement.item.clone())
|
||||
{
|
||||
match value.item.replace_data_at_column_path(
|
||||
value.tag(),
|
||||
f,
|
||||
replacement.item.clone(),
|
||||
) {
|
||||
Some(v) => return Ok(v),
|
||||
None => {
|
||||
return Err(ShellError::string("inc could not find field to replace"))
|
||||
return Err(ShellError::labeled_error(
|
||||
"inc could not find field to replace",
|
||||
"column name",
|
||||
f.tag,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
None => Err(ShellError::string(
|
||||
None => Err(ShellError::untagged_runtime_error(
|
||||
"inc needs a field when incrementing a column in a table",
|
||||
)),
|
||||
},
|
||||
x => Err(ShellError::string(format!(
|
||||
"Unrecognized type in stream: {:?}",
|
||||
x
|
||||
))),
|
||||
_ => Err(ShellError::type_error(
|
||||
"incrementable value",
|
||||
value.tagged_type_name(),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -120,7 +131,7 @@ impl Plugin for Inc {
|
||||
.switch("major")
|
||||
.switch("minor")
|
||||
.switch("patch")
|
||||
.rest(SyntaxShape::String)
|
||||
.rest(SyntaxShape::ColumnPath)
|
||||
.filter())
|
||||
}
|
||||
|
||||
@ -138,18 +149,13 @@ impl Plugin for Inc {
|
||||
if let Some(args) = call_info.args.positional {
|
||||
for arg in args {
|
||||
match arg {
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::String(s)),
|
||||
table @ Tagged {
|
||||
item: Value::Table(_),
|
||||
..
|
||||
} => {
|
||||
self.field = Some(s);
|
||||
}
|
||||
_ => {
|
||||
return Err(ShellError::string(format!(
|
||||
"Unrecognized type in params: {:?}",
|
||||
arg
|
||||
)))
|
||||
self.field = Some(table.as_column_path()?);
|
||||
}
|
||||
value => return Err(ShellError::type_error("table", value.tagged_type_name())),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -160,7 +166,11 @@ impl Plugin for Inc {
|
||||
|
||||
match &self.error {
|
||||
Some(reason) => {
|
||||
return Err(ShellError::string(format!("{}: {}", reason, Inc::usage())))
|
||||
return Err(ShellError::untagged_runtime_error(format!(
|
||||
"{}: {}",
|
||||
reason,
|
||||
Inc::usage()
|
||||
)))
|
||||
}
|
||||
None => Ok(vec![]),
|
||||
}
|
||||
@ -209,8 +219,13 @@ mod tests {
|
||||
}
|
||||
|
||||
fn with_parameter(&mut self, name: &str) -> &mut Self {
|
||||
let fields: Vec<Tagged<Value>> = name
|
||||
.split(".")
|
||||
.map(|s| Value::string(s.to_string()).tagged(Tag::unknown_span(self.anchor)))
|
||||
.collect();
|
||||
|
||||
self.positionals
|
||||
.push(Value::string(name.to_string()).tagged(Tag::unknown_span(self.anchor)));
|
||||
.push(Value::Table(fields).tagged(Tag::unknown_span(self.anchor)));
|
||||
self
|
||||
}
|
||||
|
||||
@ -297,7 +312,12 @@ mod tests {
|
||||
)
|
||||
.is_ok());
|
||||
|
||||
assert_eq!(plugin.field, Some("package.version".to_string()));
|
||||
assert_eq!(
|
||||
plugin
|
||||
.field
|
||||
.map(|f| f.iter().map(|f| f.item.clone()).collect()),
|
||||
Some(vec!["package".to_string(), "version".to_string()])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -35,11 +35,12 @@ impl Plugin for Match {
|
||||
} => {
|
||||
self.column = s.clone();
|
||||
}
|
||||
_ => {
|
||||
return Err(ShellError::string(format!(
|
||||
"Unrecognized type in params: {:?}",
|
||||
args[0]
|
||||
)));
|
||||
Tagged { tag, .. } => {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Unrecognized type in params",
|
||||
"value",
|
||||
tag,
|
||||
));
|
||||
}
|
||||
}
|
||||
match &args[1] {
|
||||
@ -49,11 +50,12 @@ impl Plugin for Match {
|
||||
} => {
|
||||
self.regex = Regex::new(s).unwrap();
|
||||
}
|
||||
_ => {
|
||||
return Err(ShellError::string(format!(
|
||||
"Unrecognized type in params: {:?}",
|
||||
args[1]
|
||||
)));
|
||||
Tagged { tag, .. } => {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Unrecognized type in params",
|
||||
"value",
|
||||
tag,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -65,7 +67,7 @@ impl Plugin for Match {
|
||||
match &input {
|
||||
Tagged {
|
||||
item: Value::Row(dict),
|
||||
..
|
||||
tag,
|
||||
} => {
|
||||
if let Some(val) = dict.entries.get(&self.column) {
|
||||
match val {
|
||||
@ -75,22 +77,20 @@ impl Plugin for Match {
|
||||
} => {
|
||||
flag = self.regex.is_match(s);
|
||||
}
|
||||
_ => {
|
||||
return Err(ShellError::string(format!(
|
||||
"value is not a string! {:?}",
|
||||
&val
|
||||
)));
|
||||
Tagged { tag, .. } => {
|
||||
return Err(ShellError::labeled_error("expected string", "value", tag));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return Err(ShellError::string(format!(
|
||||
"column not in row! {:?} {:?}",
|
||||
&self.column, dict
|
||||
)));
|
||||
return Err(ShellError::labeled_error(
|
||||
format!("column not in row! {:?} {:?}", &self.column, dict),
|
||||
"row",
|
||||
tag,
|
||||
));
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(ShellError::string(format!("Not a row! {:?}", &input)));
|
||||
Tagged { tag, .. } => {
|
||||
return Err(ShellError::labeled_error("Expected row", "value", tag));
|
||||
}
|
||||
}
|
||||
if flag {
|
||||
|
@ -1,6 +1,6 @@
|
||||
use nu::{
|
||||
serve_plugin, CallInfo, Plugin, Primitive, ReturnSuccess, ReturnValue, ShellError, Signature,
|
||||
SyntaxShape, Tagged, Value,
|
||||
SyntaxShape, Tagged, TaggedItem, Value,
|
||||
};
|
||||
|
||||
#[derive(Debug, Eq, PartialEq)]
|
||||
@ -10,8 +10,10 @@ enum Action {
|
||||
ToInteger,
|
||||
}
|
||||
|
||||
pub type ColumnPath = Vec<Tagged<String>>;
|
||||
|
||||
struct Str {
|
||||
field: Option<String>,
|
||||
field: Option<ColumnPath>,
|
||||
params: Option<Vec<String>>,
|
||||
error: Option<String>,
|
||||
action: Option<Action>,
|
||||
@ -43,8 +45,8 @@ impl Str {
|
||||
Ok(applied)
|
||||
}
|
||||
|
||||
fn for_field(&mut self, field: &str) {
|
||||
self.field = Some(String::from(field));
|
||||
fn for_field(&mut self, column_path: ColumnPath) {
|
||||
self.field = Some(column_path);
|
||||
}
|
||||
|
||||
fn permit(&mut self) -> bool {
|
||||
@ -92,30 +94,35 @@ impl Str {
|
||||
}
|
||||
Value::Row(_) => match self.field {
|
||||
Some(ref f) => {
|
||||
let replacement = match value.item.get_data_by_path(value.tag(), f) {
|
||||
let replacement = match value.item.get_data_by_column_path(value.tag(), f) {
|
||||
Some(result) => self.strutils(result.map(|x| x.clone()))?,
|
||||
None => return Ok(Tagged::from_item(Value::nothing(), value.tag)),
|
||||
};
|
||||
match value
|
||||
.item
|
||||
.replace_data_at_path(value.tag(), f, replacement.item.clone())
|
||||
{
|
||||
match value.item.replace_data_at_column_path(
|
||||
value.tag(),
|
||||
f,
|
||||
replacement.item.clone(),
|
||||
) {
|
||||
Some(v) => return Ok(v),
|
||||
None => {
|
||||
return Err(ShellError::string("str could not find field to replace"))
|
||||
return Err(ShellError::type_error(
|
||||
"column name",
|
||||
value.tagged_type_name(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
None => Err(ShellError::string(format!(
|
||||
None => Err(ShellError::untagged_runtime_error(format!(
|
||||
"{}: {}",
|
||||
"str needs a column when applied to a value in a row",
|
||||
Str::usage()
|
||||
))),
|
||||
},
|
||||
x => Err(ShellError::string(format!(
|
||||
"Unrecognized type in stream: {:?}",
|
||||
x
|
||||
))),
|
||||
_ => Err(ShellError::labeled_error(
|
||||
"Unrecognized type in stream",
|
||||
value.type_name(),
|
||||
value.tag,
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -127,7 +134,7 @@ impl Plugin for Str {
|
||||
.switch("downcase")
|
||||
.switch("upcase")
|
||||
.switch("to-int")
|
||||
.rest(SyntaxShape::Member)
|
||||
.rest(SyntaxShape::ColumnPath)
|
||||
.filter())
|
||||
}
|
||||
|
||||
@ -148,20 +155,27 @@ impl Plugin for Str {
|
||||
match possible_field {
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::String(s)),
|
||||
..
|
||||
tag,
|
||||
} => match self.action {
|
||||
Some(Action::Downcase)
|
||||
| Some(Action::Upcase)
|
||||
| Some(Action::ToInteger)
|
||||
| None => {
|
||||
self.for_field(&s);
|
||||
self.for_field(vec![s.clone().tagged(tag)]);
|
||||
}
|
||||
},
|
||||
table @ Tagged {
|
||||
item: Value::Table(_),
|
||||
..
|
||||
} => {
|
||||
self.field = Some(table.as_column_path()?.item);
|
||||
}
|
||||
_ => {
|
||||
return Err(ShellError::string(format!(
|
||||
"Unrecognized type in params: {:?}",
|
||||
possible_field
|
||||
)))
|
||||
return Err(ShellError::labeled_error(
|
||||
"Unrecognized type in params",
|
||||
possible_field.type_name(),
|
||||
possible_field.tag,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -178,7 +192,11 @@ impl Plugin for Str {
|
||||
|
||||
match &self.error {
|
||||
Some(reason) => {
|
||||
return Err(ShellError::string(format!("{}: {}", reason, Str::usage())))
|
||||
return Err(ShellError::untagged_runtime_error(format!(
|
||||
"{}: {}",
|
||||
reason,
|
||||
Str::usage()
|
||||
)))
|
||||
}
|
||||
None => Ok(vec![]),
|
||||
}
|
||||
@ -227,8 +245,13 @@ mod tests {
|
||||
}
|
||||
|
||||
fn with_parameter(&mut self, name: &str) -> &mut Self {
|
||||
let fields: Vec<Tagged<Value>> = name
|
||||
.split(".")
|
||||
.map(|s| Value::string(s.to_string()).tagged(Tag::unknown_span(self.anchor)))
|
||||
.collect();
|
||||
|
||||
self.positionals
|
||||
.push(Value::string(name.to_string()).tagged(Tag::unknown()));
|
||||
.push(Value::Table(fields).tagged(Tag::unknown_span(self.anchor)));
|
||||
self
|
||||
}
|
||||
|
||||
@ -303,7 +326,12 @@ mod tests {
|
||||
)
|
||||
.is_ok());
|
||||
|
||||
assert_eq!(plugin.field, Some("package.description".to_string()));
|
||||
assert_eq!(
|
||||
plugin
|
||||
.field
|
||||
.map(|f| f.into_iter().map(|f| f.item).collect()),
|
||||
Some(vec!["package".to_string(), "description".to_string()])
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -28,9 +28,11 @@ impl Sum {
|
||||
self.total = Some(value.clone());
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(ShellError::string(format!(
|
||||
"Could not sum non-integer or unrelated types"
|
||||
))),
|
||||
_ => Err(ShellError::labeled_error(
|
||||
"Could not sum non-integer or unrelated types",
|
||||
"source",
|
||||
value.tag,
|
||||
)),
|
||||
}
|
||||
}
|
||||
Value::Primitive(Primitive::Bytes(b)) => {
|
||||
@ -47,15 +49,18 @@ impl Sum {
|
||||
self.total = Some(value);
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(ShellError::string(format!(
|
||||
"Could not sum non-integer or unrelated types"
|
||||
))),
|
||||
_ => Err(ShellError::labeled_error(
|
||||
"Could not sum non-integer or unrelated types",
|
||||
"source",
|
||||
value.tag,
|
||||
)),
|
||||
}
|
||||
}
|
||||
x => Err(ShellError::string(format!(
|
||||
"Unrecognized type in stream: {:?}",
|
||||
x
|
||||
))),
|
||||
x => Err(ShellError::labeled_error(
|
||||
format!("Unrecognized type in stream: {:?}", x),
|
||||
"source",
|
||||
value.tag,
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,3 +1,13 @@
|
||||
#[macro_export]
|
||||
macro_rules! return_err {
|
||||
($expr:expr) => {
|
||||
match $expr {
|
||||
Err(_) => return,
|
||||
Ok(expr) => expr,
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! stream {
|
||||
($($expr:expr),*) => {{
|
||||
|
@ -145,7 +145,7 @@ impl Shell for FilesystemShell {
|
||||
source.tag(),
|
||||
));
|
||||
} else {
|
||||
return Err(ShellError::string("Invalid pattern."));
|
||||
return Err(ShellError::untagged_runtime_error("Invalid pattern."));
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -1,10 +1,11 @@
|
||||
use crate::context::Context;
|
||||
use crate::parser::hir::syntax_shape::{color_fallible_syntax, FlatShape, PipelineShape};
|
||||
use crate::parser::hir::TokensIterator;
|
||||
use crate::parser::nom_input;
|
||||
use crate::parser::parse::token_tree::TokenNode;
|
||||
use crate::parser::parse::tokens::RawToken;
|
||||
use crate::parser::{Pipeline, PipelineElement};
|
||||
use crate::shell::shell_manager::ShellManager;
|
||||
use crate::Tagged;
|
||||
use crate::{Tag, Tagged, TaggedItem, Text};
|
||||
use ansi_term::Color;
|
||||
use log::trace;
|
||||
use rustyline::completion::Completer;
|
||||
use rustyline::error::ReadlineError;
|
||||
use rustyline::highlight::Highlighter;
|
||||
@ -12,12 +13,12 @@ use rustyline::hint::Hinter;
|
||||
use std::borrow::Cow::{self, Owned};
|
||||
|
||||
pub(crate) struct Helper {
|
||||
helper: ShellManager,
|
||||
context: Context,
|
||||
}
|
||||
|
||||
impl Helper {
|
||||
pub(crate) fn new(helper: ShellManager) -> Helper {
|
||||
Helper { helper }
|
||||
pub(crate) fn new(context: Context) -> Helper {
|
||||
Helper { context }
|
||||
}
|
||||
}
|
||||
|
||||
@ -29,7 +30,7 @@ impl Completer for Helper {
|
||||
pos: usize,
|
||||
ctx: &rustyline::Context<'_>,
|
||||
) -> Result<(usize, Vec<rustyline::completion::Pair>), ReadlineError> {
|
||||
self.helper.complete(line, pos, ctx)
|
||||
self.context.shell_manager.complete(line, pos, ctx)
|
||||
}
|
||||
}
|
||||
|
||||
@ -52,7 +53,7 @@ impl Completer for Helper {
|
||||
|
||||
impl Hinter for Helper {
|
||||
fn hint(&self, line: &str, pos: usize, ctx: &rustyline::Context<'_>) -> Option<String> {
|
||||
self.helper.hint(line, pos, ctx)
|
||||
self.context.shell_manager.hint(line, pos, ctx)
|
||||
}
|
||||
}
|
||||
|
||||
@ -77,24 +78,42 @@ impl Highlighter for Helper {
|
||||
Ok(v) => v,
|
||||
};
|
||||
|
||||
let Pipeline { parts, post_ws } = pipeline;
|
||||
let mut iter = parts.into_iter();
|
||||
let tokens = vec![TokenNode::Pipeline(pipeline.clone().tagged(v.tag()))];
|
||||
let mut tokens = TokensIterator::all(&tokens[..], v.tag());
|
||||
|
||||
loop {
|
||||
match iter.next() {
|
||||
None => {
|
||||
if let Some(ws) = post_ws {
|
||||
out.push_str(ws.slice(line));
|
||||
}
|
||||
let text = Text::from(line);
|
||||
let expand_context = self
|
||||
.context
|
||||
.expand_context(&text, Tag::from((0, line.len() - 1, uuid::Uuid::nil())));
|
||||
let mut shapes = vec![];
|
||||
|
||||
return Cow::Owned(out);
|
||||
}
|
||||
Some(token) => {
|
||||
let styled = paint_pipeline_element(&token, line);
|
||||
out.push_str(&styled.to_string());
|
||||
}
|
||||
}
|
||||
// We just constructed a token list that only contains a pipeline, so it can't fail
|
||||
color_fallible_syntax(&PipelineShape, &mut tokens, &expand_context, &mut shapes)
|
||||
.unwrap();
|
||||
|
||||
trace!(target: "nu::shapes",
|
||||
"SHAPES :: {:?}",
|
||||
shapes.iter().map(|shape| shape.item).collect::<Vec<_>>()
|
||||
);
|
||||
|
||||
for shape in shapes {
|
||||
let styled = paint_flat_shape(shape, line);
|
||||
out.push_str(&styled);
|
||||
}
|
||||
|
||||
Cow::Owned(out)
|
||||
|
||||
// loop {
|
||||
// match iter.next() {
|
||||
// None => {
|
||||
// return Cow::Owned(out);
|
||||
// }
|
||||
// Some(token) => {
|
||||
// let styled = paint_pipeline_element(&token, line);
|
||||
// out.push_str(&styled.to_string());
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -104,83 +123,55 @@ impl Highlighter for Helper {
|
||||
}
|
||||
}
|
||||
|
||||
fn paint_token_node(token_node: &TokenNode, line: &str) -> String {
|
||||
let styled = match token_node {
|
||||
TokenNode::Call(..) => Color::Cyan.bold().paint(token_node.tag().slice(line)),
|
||||
TokenNode::Whitespace(..) => Color::White.normal().paint(token_node.tag().slice(line)),
|
||||
TokenNode::Flag(..) => Color::Black.bold().paint(token_node.tag().slice(line)),
|
||||
TokenNode::Member(..) => Color::Yellow.bold().paint(token_node.tag().slice(line)),
|
||||
TokenNode::Path(..) => Color::Green.bold().paint(token_node.tag().slice(line)),
|
||||
TokenNode::Error(..) => Color::Red.bold().paint(token_node.tag().slice(line)),
|
||||
TokenNode::Delimited(..) => Color::White.paint(token_node.tag().slice(line)),
|
||||
TokenNode::Operator(..) => Color::White.normal().paint(token_node.tag().slice(line)),
|
||||
TokenNode::Pipeline(..) => Color::Blue.normal().paint(token_node.tag().slice(line)),
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::Number(..),
|
||||
..
|
||||
}) => Color::Purple.bold().paint(token_node.tag().slice(line)),
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::Size(..),
|
||||
..
|
||||
}) => Color::Purple.bold().paint(token_node.tag().slice(line)),
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::GlobPattern,
|
||||
..
|
||||
}) => Color::Cyan.normal().paint(token_node.tag().slice(line)),
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::String(..),
|
||||
..
|
||||
}) => Color::Green.normal().paint(token_node.tag().slice(line)),
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::Variable(..),
|
||||
..
|
||||
}) => Color::Yellow.bold().paint(token_node.tag().slice(line)),
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::Bare,
|
||||
..
|
||||
}) => Color::Green.normal().paint(token_node.tag().slice(line)),
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::ExternalCommand(..),
|
||||
..
|
||||
}) => Color::Cyan.bold().paint(token_node.tag().slice(line)),
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::ExternalWord,
|
||||
..
|
||||
}) => Color::Black.bold().paint(token_node.tag().slice(line)),
|
||||
};
|
||||
#[allow(unused)]
|
||||
fn vec_tag<T>(input: Vec<Tagged<T>>) -> Option<Tag> {
|
||||
let mut iter = input.iter();
|
||||
let first = iter.next()?.tag;
|
||||
let last = iter.last();
|
||||
|
||||
styled.to_string()
|
||||
Some(match last {
|
||||
None => first,
|
||||
Some(last) => first.until(last.tag),
|
||||
})
|
||||
}
|
||||
|
||||
fn paint_pipeline_element(pipeline_element: &PipelineElement, line: &str) -> String {
|
||||
let mut styled = String::new();
|
||||
|
||||
if let Some(_) = pipeline_element.pipe {
|
||||
styled.push_str(&Color::Purple.paint("|"));
|
||||
}
|
||||
|
||||
if let Some(ws) = pipeline_element.pre_ws {
|
||||
styled.push_str(&Color::White.normal().paint(ws.slice(line)));
|
||||
}
|
||||
|
||||
styled.push_str(
|
||||
&Color::Cyan
|
||||
.bold()
|
||||
.paint(pipeline_element.call().head().tag().slice(line))
|
||||
.to_string(),
|
||||
);
|
||||
|
||||
if let Some(children) = pipeline_element.call().children() {
|
||||
for child in children {
|
||||
styled.push_str(&paint_token_node(child, line));
|
||||
fn paint_flat_shape(flat_shape: Tagged<FlatShape>, line: &str) -> String {
|
||||
let style = match &flat_shape.item {
|
||||
FlatShape::OpenDelimiter(_) => Color::White.normal(),
|
||||
FlatShape::CloseDelimiter(_) => Color::White.normal(),
|
||||
FlatShape::ItVariable => Color::Purple.bold(),
|
||||
FlatShape::Variable => Color::Purple.normal(),
|
||||
FlatShape::Operator => Color::Yellow.normal(),
|
||||
FlatShape::Dot => Color::White.normal(),
|
||||
FlatShape::InternalCommand => Color::Cyan.bold(),
|
||||
FlatShape::ExternalCommand => Color::Cyan.normal(),
|
||||
FlatShape::ExternalWord => Color::Black.bold(),
|
||||
FlatShape::BareMember => Color::Yellow.bold(),
|
||||
FlatShape::StringMember => Color::Yellow.bold(),
|
||||
FlatShape::String => Color::Green.normal(),
|
||||
FlatShape::Path => Color::Cyan.normal(),
|
||||
FlatShape::GlobPattern => Color::Cyan.bold(),
|
||||
FlatShape::Word => Color::Green.normal(),
|
||||
FlatShape::Pipe => Color::Purple.bold(),
|
||||
FlatShape::Flag => Color::Black.bold(),
|
||||
FlatShape::ShorthandFlag => Color::Black.bold(),
|
||||
FlatShape::Int => Color::Purple.bold(),
|
||||
FlatShape::Decimal => Color::Purple.bold(),
|
||||
FlatShape::Whitespace => Color::White.normal(),
|
||||
FlatShape::Error => Color::Red.bold(),
|
||||
FlatShape::Size { number, unit } => {
|
||||
let number = number.slice(line);
|
||||
let unit = unit.slice(line);
|
||||
return format!(
|
||||
"{}{}",
|
||||
Color::Purple.bold().paint(number),
|
||||
Color::Cyan.bold().paint(unit)
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(ws) = pipeline_element.post_ws {
|
||||
styled.push_str(&Color::White.normal().paint(ws.slice(line)));
|
||||
}
|
||||
|
||||
styled.to_string()
|
||||
let body = flat_shape.tag.slice(line);
|
||||
style.paint(body).to_string()
|
||||
}
|
||||
|
||||
impl rustyline::Helper for Helper {}
|
||||
|
@ -212,7 +212,7 @@ fn open_can_parse_ini() {
|
||||
fn open_can_parse_utf16_ini() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats",
|
||||
"open utf16.ini | get .ShellClassInfo | get IconIndex | echo $it"
|
||||
"open utf16.ini | get '.ShellClassInfo' | get IconIndex | echo $it"
|
||||
);
|
||||
|
||||
assert_eq!(actual, "-236")
|
||||
|
@ -93,6 +93,7 @@ macro_rules! nu {
|
||||
.write_all(commands.as_bytes())
|
||||
.expect("couldn't write to stdin");
|
||||
|
||||
|
||||
let output = process
|
||||
.wait_with_output()
|
||||
.expect("couldn't read from stdout");
|
||||
|
Loading…
Reference in New Issue
Block a user