Pull in upstream changes.

This commit is contained in:
Thomas Hartmann 2019-10-14 23:05:52 +02:00
commit 65546646a7
125 changed files with 9271 additions and 4140 deletions

View File

@ -5,10 +5,25 @@ strategy:
matrix:
linux-nightly:
image: ubuntu-16.04
style: 'unflagged'
macos-nightly:
image: macos-10.14
style: 'unflagged'
windows-nightly:
image: vs2017-win2016
style: 'unflagged'
linux-nightly-canary:
image: ubuntu-16.04
style: 'canary'
macos-nightly-canary:
image: macos-10.14
style: 'canary'
windows-nightly-canary:
image: vs2017-win2016
style: 'canary'
fmt:
image: ubuntu-16.04
style: 'fmt'
pool:
vmImage: $(image)
@ -27,6 +42,11 @@ steps:
rustup component add rustfmt --toolchain `cat rust-toolchain`
displayName: Install Rust
- bash: RUSTFLAGS="-D warnings" cargo test --all-features
condition: eq(variables['style'], 'unflagged')
displayName: Run tests
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all-features
condition: eq(variables['style'], 'canary')
displayName: Run tests
- bash: cargo fmt --all -- --check
condition: eq(variables['style'], 'fmt')
displayName: Lint

1206
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -55,7 +55,7 @@ surf = "1.0.2"
url = "2.1.0"
roxmltree = "0.7.0"
nom_locate = "1.0.0"
enum-utils = "0.1.1"
nom-tracable = "0.4.0"
unicode-xid = "0.2.0"
serde_ini = "0.2.0"
subprocess = "0.1.18"
@ -65,7 +65,6 @@ hex = "0.3.2"
tempfile = "3.1.0"
semver = "0.9.0"
which = "2.0.1"
uuid = {version = "0.7.4", features = [ "v4", "serde" ]}
textwrap = {version = "0.11.0", features = ["term_size"]}
shellexpand = "1.0.0"
futures-timer = "0.4.0"
@ -75,13 +74,13 @@ bigdecimal = { version = "0.1.0", features = ["serde"] }
natural = "0.3.0"
serde_urlencoded = "0.6.1"
sublime_fuzzy = "0.5"
regex = "1"
regex = {version = "1", optional = true }
neso = { version = "0.5.0", optional = true }
crossterm = { version = "0.10.2", optional = true }
syntect = {version = "3.2.0", optional = true }
onig_sys = {version = "=69.1.0", optional = true }
heim = {version = "0.0.8-alpha.1", optional = true }
heim = {version = "0.0.8", optional = true }
battery = {version = "0.7.4", optional = true }
rawkey = {version = "0.1.2", optional = true }
clipboard = {version = "0.5", optional = true }
@ -95,6 +94,8 @@ textview = ["syntect", "onig_sys", "crossterm"]
binaryview = ["image", "crossterm"]
sys = ["heim", "battery"]
ps = ["heim"]
# trace = ["nom-tracable/trace"]
all = ["raw-key", "textview", "binaryview", "sys", "ps", "clipboard", "ptree"]
[dependencies.rusqlite]
version = "0.20.0"
@ -103,6 +104,10 @@ features = ["bundled", "blob"]
[dev-dependencies]
pretty_assertions = "0.6.1"
[build-dependencies]
toml = "0.5.3"
serde = { version = "1.0.101", features = ["derive"] }
[lib]
name = "nu"
path = "src/lib.rs"
@ -138,6 +143,7 @@ path = "src/plugins/skip.rs"
[[bin]]
name = "nu_plugin_match"
path = "src/plugins/match.rs"
required-features = ["regex"]
[[bin]]
name = "nu_plugin_sys"

39
build.rs Normal file
View File

@ -0,0 +1,39 @@
use serde::Deserialize;
use std::collections::HashMap;
use std::collections::HashSet;
use std::env;
use std::path::Path;
#[derive(Deserialize)]
struct Feature {
#[allow(unused)]
description: String,
enabled: bool,
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
let input = env::var("CARGO_MANIFEST_DIR").unwrap();
let all_on = env::var("NUSHELL_ENABLE_ALL_FLAGS").is_ok();
let flags: HashSet<String> = env::var("NUSHELL_ENABLE_FLAGS")
.map(|s| s.split(",").map(|s| s.to_string()).collect())
.unwrap_or_else(|_| HashSet::new());
if all_on && !flags.is_empty() {
println!(
"cargo:warning={}",
"Both NUSHELL_ENABLE_ALL_FLAGS and NUSHELL_ENABLE_FLAGS were set. You don't need both."
);
}
let path = Path::new(&input).join("features.toml");
let toml: HashMap<String, Feature> = toml::from_str(&std::fs::read_to_string(path)?)?;
for (key, value) in toml.iter() {
if value.enabled == true || all_on || flags.contains(key) {
println!("cargo:rustc-cfg={}", key);
}
}
Ok(())
}

4
features.toml Normal file
View File

@ -0,0 +1,4 @@
[hintsv1]
description = "Adding hints based upon error states in the syntax highlighter"
enabled = false

View File

@ -1,4 +1,3 @@
use crate::commands::autoview;
use crate::commands::classified::{
ClassifiedCommand, ClassifiedInputStream, ClassifiedPipeline, ExternalCommand, InternalCommand,
StreamNext,
@ -13,7 +12,12 @@ pub(crate) use crate::errors::ShellError;
use crate::fuzzysearch::{interactive_fuzzy_search, SelectionResult};
use crate::git::current_branch;
use crate::parser::registry::Signature;
use crate::parser::{hir, CallNode, Pipeline, PipelineElement, TokenNode};
use crate::parser::{
hir,
hir::syntax_shape::{expand_syntax, PipelineShape},
hir::{expand_external_tokens::expand_external_tokens, tokens_iterator::TokensIterator},
TokenNode,
};
use crate::prelude::*;
use log::{debug, trace};
@ -24,7 +28,7 @@ use std::error::Error;
use std::io::{BufRead, BufReader, Write};
use std::iter::Iterator;
use std::path::PathBuf;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::atomic::Ordering;
#[derive(Debug)]
pub enum MaybeOwned<'a, T> {
@ -75,7 +79,7 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel
let name = params.name.clone();
let fname = fname.to_string();
if context.has_command(&name) {
if let Some(_) = context.get_command(&name) {
trace!("plugin {:?} already loaded.", &name);
} else {
if params.is_filter {
@ -94,11 +98,17 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel
},
Err(e) => {
trace!("incompatible plugin {:?}", input);
Err(ShellError::string(format!("Error: {:?}", e)))
Err(ShellError::untagged_runtime_error(format!(
"Error: {:?}",
e
)))
}
}
}
Err(e) => Err(ShellError::string(format!("Error: {:?}", e))),
Err(e) => Err(ShellError::untagged_runtime_error(format!(
"Error: {:?}",
e
))),
};
let _ = child.wait();
@ -315,6 +325,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
)]);
}
}
let _ = load_plugins(&mut context);
let config = Config::builder().color_mode(ColorMode::Forced).build();
@ -328,24 +339,21 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
// we are ok if history does not exist
let _ = rl.load_history(&History::path());
let ctrl_c = Arc::new(AtomicBool::new(false));
let cc = ctrl_c.clone();
let cc = context.ctrl_c.clone();
ctrlc::set_handler(move || {
cc.store(true, Ordering::SeqCst);
})
.expect("Error setting Ctrl-C handler");
let mut ctrlcbreak = false;
loop {
if ctrl_c.load(Ordering::SeqCst) {
ctrl_c.store(false, Ordering::SeqCst);
if context.ctrl_c.load(Ordering::SeqCst) {
context.ctrl_c.store(false, Ordering::SeqCst);
continue;
}
let cwd = context.shell_manager.path();
rl.set_helper(Some(crate::shell::Helper::new(
context.shell_manager.clone(),
)));
rl.set_helper(Some(crate::shell::Helper::new(context.clone())));
let edit_mode = config::config(Tag::unknown())?
.get("edit_mode")
@ -429,21 +437,11 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
}
}
LineResult::Error(mut line, err) => {
LineResult::Error(line, err) => {
rl.add_history_entry(line.clone());
let diag = err.to_diagnostic();
context.with_host(|host| {
let writer = host.err_termcolor();
line.push_str(" ");
let files = crate::parser::Files::new(line);
let _ = std::panic::catch_unwind(move || {
let _ = language_reporting::emit(
&mut writer.lock(),
&files,
&diag,
&language_reporting::DefaultConfig,
);
});
print_err(err, host, &Text::from(line));
})
}
@ -460,6 +458,14 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
Ok(())
}
fn chomp_newline(s: &str) -> &str {
if s.ends_with('\n') {
&s[..s.len() - 1]
} else {
s
}
}
enum LineResult {
Success(String),
Error(String, ShellError),
@ -472,9 +478,11 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
Ok(line) if line.trim() == "" => LineResult::Success(line.clone()),
Ok(line) => {
let result = match crate::parser::parse(&line, uuid::Uuid::nil()) {
let line = chomp_newline(line);
let result = match crate::parser::parse(&line) {
Err(err) => {
return LineResult::Error(line.clone(), err);
return LineResult::Error(line.to_string(), err);
}
Ok(val) => val,
@ -485,7 +493,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
let mut pipeline = match classify_pipeline(&result, ctx, &Text::from(line)) {
Ok(pipeline) => pipeline,
Err(err) => return LineResult::Error(line.clone(), err),
Err(err) => return LineResult::Error(line.to_string(), err),
};
match pipeline.commands.last() {
@ -493,7 +501,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
_ => pipeline
.commands
.push(ClassifiedCommand::Internal(InternalCommand {
command: whole_stream_command(autoview::Autoview),
name: "autoview".to_string(),
name_tag: Tag::unknown(),
args: hir::Call::new(
Box::new(hir::Expression::synthetic_string("autoview")),
@ -515,16 +523,24 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
input = match (item, next) {
(None, _) => break,
(Some(ClassifiedCommand::Dynamic(_)), _)
| (_, Some(ClassifiedCommand::Dynamic(_))) => {
return LineResult::Error(
line.to_string(),
ShellError::unimplemented("Dynamic commands"),
)
}
(Some(ClassifiedCommand::Expr(_)), _) => {
return LineResult::Error(
line.clone(),
line.to_string(),
ShellError::unimplemented("Expression-only commands"),
)
}
(_, Some(ClassifiedCommand::Expr(_))) => {
return LineResult::Error(
line.clone(),
line.to_string(),
ShellError::unimplemented("Expression-only commands"),
)
}
@ -532,31 +548,46 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
(
Some(ClassifiedCommand::Internal(left)),
Some(ClassifiedCommand::External(_)),
) => match left
.run(ctx, input, Text::from(line), is_first_command)
.await
{
) => match left.run(ctx, input, Text::from(line), is_first_command) {
Ok(val) => ClassifiedInputStream::from_input_stream(val),
Err(err) => return LineResult::Error(line.clone(), err),
Err(err) => return LineResult::Error(line.to_string(), err),
},
(Some(ClassifiedCommand::Internal(left)), Some(_)) => {
match left
.run(ctx, input, Text::from(line), is_first_command)
.await
{
match left.run(ctx, input, Text::from(line), is_first_command) {
Ok(val) => ClassifiedInputStream::from_input_stream(val),
Err(err) => return LineResult::Error(line.clone(), err),
Err(err) => return LineResult::Error(line.to_string(), err),
}
}
(Some(ClassifiedCommand::Internal(left)), None) => {
match left
.run(ctx, input, Text::from(line), is_first_command)
.await
{
Ok(val) => ClassifiedInputStream::from_input_stream(val),
Err(err) => return LineResult::Error(line.clone(), err),
match left.run(ctx, input, Text::from(line), is_first_command) {
Ok(val) => {
use futures::stream::TryStreamExt;
let mut output_stream: OutputStream = val.into();
loop {
match output_stream.try_next().await {
Ok(Some(ReturnSuccess::Value(Tagged {
item: Value::Error(e),
..
}))) => {
return LineResult::Error(line.to_string(), e);
}
Ok(Some(_item)) => {
if ctx.ctrl_c.load(Ordering::SeqCst) {
break;
}
}
_ => {
break;
}
}
}
return LineResult::Success(line.to_string());
}
Err(err) => return LineResult::Error(line.to_string(), err),
}
}
@ -565,20 +596,20 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
Some(ClassifiedCommand::External(_)),
) => match left.run(ctx, input, StreamNext::External).await {
Ok(val) => val,
Err(err) => return LineResult::Error(line.clone(), err),
Err(err) => return LineResult::Error(line.to_string(), err),
},
(Some(ClassifiedCommand::External(left)), Some(_)) => {
match left.run(ctx, input, StreamNext::Internal).await {
Ok(val) => val,
Err(err) => return LineResult::Error(line.clone(), err),
Err(err) => return LineResult::Error(line.to_string(), err),
}
}
(Some(ClassifiedCommand::External(left)), None) => {
match left.run(ctx, input, StreamNext::Last).await {
Ok(val) => val,
Err(err) => return LineResult::Error(line.clone(), err),
Err(err) => return LineResult::Error(line.to_string(), err),
}
}
};
@ -586,7 +617,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
is_first_command = false;
}
LineResult::Success(line.clone())
LineResult::Success(line.to_string())
}
Err(ReadlineError::Interrupted) => LineResult::CtrlC,
Err(ReadlineError::Eof) => LineResult::Break,
@ -602,95 +633,52 @@ fn classify_pipeline(
context: &Context,
source: &Text,
) -> Result<ClassifiedPipeline, ShellError> {
let pipeline = pipeline.as_pipeline()?;
let mut pipeline_list = vec![pipeline.clone()];
let mut iterator = TokensIterator::all(&mut pipeline_list, pipeline.span());
let Pipeline { parts, .. } = pipeline;
let commands: Result<Vec<_>, ShellError> = parts
.iter()
.map(|item| classify_command(&item, context, &source))
.collect();
Ok(ClassifiedPipeline {
commands: commands?,
})
}
fn classify_command(
command: &PipelineElement,
context: &Context,
source: &Text,
) -> Result<ClassifiedCommand, ShellError> {
let call = command.call();
match call {
// If the command starts with `^`, treat it as an external command no matter what
call if call.head().is_external() => {
let name_tag = call.head().expect_external();
let name = name_tag.slice(source);
Ok(external_command(call, source, name.tagged(name_tag)))
}
// Otherwise, if the command is a bare word, we'll need to triage it
call if call.head().is_bare() => {
let head = call.head();
let name = head.source(source);
match context.has_command(name) {
// if the command is in the registry, it's an internal command
true => {
let command = context.get_command(name);
let config = command.signature();
trace!(target: "nu::build_pipeline", "classifying {:?}", config);
let args: hir::Call = config.parse_args(call, &context, source)?;
trace!(target: "nu::build_pipeline", "args :: {}", args.debug(source));
Ok(ClassifiedCommand::Internal(InternalCommand {
command,
name_tag: head.tag(),
args,
}))
}
// otherwise, it's an external command
false => Ok(external_command(call, source, name.tagged(head.tag()))),
}
}
// If the command is something else (like a number or a variable), that is currently unsupported.
// We might support `$somevar` as a curried command in the future.
call => Err(ShellError::invalid_command(call.head().tag())),
}
expand_syntax(
&PipelineShape,
&mut iterator,
&context.expand_context(source, pipeline.span()),
)
}
// Classify this command as an external command, which doesn't give special meaning
// to nu syntactic constructs, and passes all arguments to the external command as
// strings.
fn external_command(
call: &Tagged<CallNode>,
pub(crate) fn external_command(
tokens: &mut TokensIterator,
source: &Text,
name: Tagged<&str>,
) -> ClassifiedCommand {
let arg_list_strings: Vec<Tagged<String>> = match call.children() {
Some(args) => args
) -> Result<ClassifiedCommand, ShellError> {
let arg_list_strings = expand_external_tokens(tokens, source)?;
Ok(ClassifiedCommand::External(ExternalCommand {
name: name.to_string(),
name_tag: name.tag(),
args: arg_list_strings
.iter()
.filter_map(|i| match i {
TokenNode::Whitespace(_) => None,
other => Some(other.as_external_arg(source).tagged(other.tag())),
.map(|x| Tagged {
tag: x.span.into(),
item: x.item.clone(),
})
.collect(),
None => vec![],
};
let (name, tag) = name.into_parts();
ClassifiedCommand::External(ExternalCommand {
name: name.to_string(),
name_tag: tag,
args: arg_list_strings,
})
}))
}
pub fn print_err(err: ShellError, host: &dyn Host, source: &Text) {
let diag = err.to_diagnostic();
let writer = host.err_termcolor();
let mut source = source.to_string();
source.push_str(" ");
let files = crate::parser::Files::new(source);
let _ = std::panic::catch_unwind(move || {
let _ = language_reporting::emit(
&mut writer.lock(),
&files,
&diag,
&language_reporting::DefaultConfig,
);
});
}

View File

@ -76,6 +76,7 @@ pub(crate) use command::{
UnevaluatedCallInfo, WholeStreamCommand,
};
pub(crate) use classified::ClassifiedCommand;
pub(crate) use config::Config;
pub(crate) use cp::Cpy;
pub(crate) use date::Date;

View File

@ -1,9 +1,14 @@
use crate::commands::{RawCommandArgs, WholeStreamCommand};
use crate::errors::ShellError;
use crate::parser::hir::{Expression, NamedArguments};
use crate::prelude::*;
use futures::stream::TryStreamExt;
use std::sync::atomic::Ordering;
pub struct Autoview;
const STREAM_PAGE_SIZE: u64 = 50;
#[derive(Deserialize)]
pub struct AutoviewArgs {}
@ -31,61 +36,132 @@ impl WholeStreamCommand for Autoview {
pub fn autoview(
AutoviewArgs {}: AutoviewArgs,
mut context: RunnableContext,
context: RunnableContext,
raw: RawCommandArgs,
) -> Result<OutputStream, ShellError> {
Ok(OutputStream::new(async_stream! {
let input = context.input.drain_vec().await;
let binary = context.get_command("binaryview");
let text = context.get_command("textview");
let table = context.get_command("table");
if input.len() > 0 {
if let Tagged {
item: Value::Primitive(Primitive::Binary(_)),
..
} = input[0usize]
{
let binary = context.get_command("binaryview");
if let Some(binary) = binary {
let result = binary.run(raw.with_input(input), &context.commands, false);
result.collect::<Vec<_>>().await;
} else {
for i in input {
match i.item {
Value::Primitive(Primitive::Binary(b)) => {
use pretty_hex::*;
println!("{:?}", b.hex_dump());
Ok(OutputStream::new(async_stream! {
let mut output_stream: OutputStream = context.input.into();
match output_stream.try_next().await {
Ok(Some(x)) => {
match output_stream.try_next().await {
Ok(Some(y)) => {
let ctrl_c = context.ctrl_c.clone();
let stream = async_stream! {
yield Ok(x);
yield Ok(y);
loop {
match output_stream.try_next().await {
Ok(Some(z)) => {
if ctrl_c.load(Ordering::SeqCst) {
break;
}
yield Ok(z);
}
_ => break,
}
}
};
if let Some(table) = table {
let mut new_output_stream: OutputStream = stream.to_output_stream();
let mut finished = false;
let mut current_idx = 0;
loop {
let mut new_input = VecDeque::new();
for _ in 0..STREAM_PAGE_SIZE {
match new_output_stream.try_next().await {
Ok(Some(a)) => {
if let ReturnSuccess::Value(v) = a {
new_input.push_back(v);
}
}
_ => {
finished = true;
break;
}
}
}
let raw = raw.clone();
let mut command_args = raw.with_input(new_input.into());
let mut named_args = NamedArguments::new();
named_args.insert_optional("start_number", Some(Expression::number(current_idx, Tag::unknown())));
command_args.call_info.args.named = Some(named_args);
let result = table.run(command_args, &context.commands, false);
result.collect::<Vec<_>>().await;
if finished {
break;
} else {
current_idx += STREAM_PAGE_SIZE;
}
}
_ => {}
}
}
};
} else if is_single_anchored_text_value(&input) {
let text = context.get_command("textview");
if let Some(text) = text {
let result = text.run(raw.with_input(input), &context.commands, false);
result.collect::<Vec<_>>().await;
} else {
for i in input {
match i.item {
Value::Primitive(Primitive::String(s)) => {
println!("{}", s);
_ => {
if let ReturnSuccess::Value(x) = x {
match x {
Tagged {
item: Value::Primitive(Primitive::String(ref s)),
tag: Tag { anchor, span },
} if anchor.is_some() => {
if let Some(text) = text {
let mut stream = VecDeque::new();
stream.push_back(Value::string(s).tagged(Tag { anchor, span }));
let result = text.run(raw.with_input(stream.into()), &context.commands, false);
result.collect::<Vec<_>>().await;
} else {
println!("{}", s);
}
}
Tagged {
item: Value::Primitive(Primitive::String(s)),
..
} => {
println!("{}", s);
}
Tagged { item: Value::Primitive(Primitive::Binary(ref b)), .. } => {
if let Some(binary) = binary {
let mut stream = VecDeque::new();
stream.push_back(x.clone());
let result = binary.run(raw.with_input(stream.into()), &context.commands, false);
result.collect::<Vec<_>>().await;
} else {
use pretty_hex::*;
println!("{:?}", b.hex_dump());
}
}
Tagged { item: Value::Error(e), .. } => {
yield Err(e);
}
Tagged { item: ref item, .. } => {
if let Some(table) = table {
let mut stream = VecDeque::new();
stream.push_back(x.clone());
let result = table.run(raw.with_input(stream.into()), &context.commands, false);
result.collect::<Vec<_>>().await;
} else {
println!("{:?}", item);
}
}
}
_ => {}
}
}
}
} else if is_single_text_value(&input) {
for i in input {
match i.item {
Value::Primitive(Primitive::String(s)) => {
println!("{}", s);
}
_ => {}
}
}
} else {
let table = context.expect_command("table");
let result = table.run(raw.with_input(input), &context.commands, false);
result.collect::<Vec<_>>().await;
}
_ => {
//println!("<no results>");
}
}
@ -95,34 +171,3 @@ pub fn autoview(
}
}))
}
fn is_single_text_value(input: &Vec<Tagged<Value>>) -> bool {
if input.len() != 1 {
return false;
}
if let Tagged {
item: Value::Primitive(Primitive::String(_)),
..
} = input[0]
{
true
} else {
false
}
}
fn is_single_anchored_text_value(input: &Vec<Tagged<Value>>) -> bool {
if input.len() != 1 {
return false;
}
if let Tagged {
item: Value::Primitive(Primitive::String(_)),
tag: Tag { anchor, .. },
} = input[0]
{
anchor != uuid::Uuid::nil()
} else {
false
}
}

View File

@ -1,12 +1,11 @@
use crate::commands::Command;
use crate::parser::{hir, TokenNode};
use crate::prelude::*;
use bytes::{BufMut, BytesMut};
use derive_new::new;
use futures::stream::StreamExt;
use futures_codec::{Decoder, Encoder, Framed};
use log::{log_enabled, trace};
use std::io::{Error, ErrorKind};
use std::sync::Arc;
use subprocess::Exec;
/// A simple `Codec` implementation that splits up data into lines.
@ -73,25 +72,35 @@ impl ClassifiedInputStream {
}
}
#[derive(Debug)]
pub(crate) struct ClassifiedPipeline {
pub(crate) commands: Vec<ClassifiedCommand>,
}
#[derive(Debug, Eq, PartialEq)]
pub(crate) enum ClassifiedCommand {
#[allow(unused)]
Expr(TokenNode),
Internal(InternalCommand),
#[allow(unused)]
Dynamic(hir::Call),
External(ExternalCommand),
}
#[derive(new, Debug, Eq, PartialEq)]
pub(crate) struct InternalCommand {
pub(crate) command: Arc<Command>,
pub(crate) name: String,
pub(crate) name_tag: Tag,
pub(crate) args: hir::Call,
}
#[derive(new, Debug, Eq, PartialEq)]
pub(crate) struct DynamicCommand {
pub(crate) args: hir::Call,
}
impl InternalCommand {
pub(crate) async fn run(
pub(crate) fn run(
self,
context: &mut Context,
input: ClassifiedInputStream,
@ -100,91 +109,99 @@ impl InternalCommand {
) -> Result<InputStream, ShellError> {
if log_enabled!(log::Level::Trace) {
trace!(target: "nu::run::internal", "->");
trace!(target: "nu::run::internal", "{}", self.command.name());
trace!(target: "nu::run::internal", "{}", self.name);
trace!(target: "nu::run::internal", "{}", self.args.debug(&source));
}
let objects: InputStream =
trace_stream!(target: "nu::trace_stream::internal", "input" = input.objects);
let result = context.run_command(
self.command,
self.name_tag.clone(),
context.source_map.clone(),
self.args,
&source,
objects,
is_first_command,
);
let command = context.expect_command(&self.name);
let result = {
context.run_command(
command,
self.name_tag.clone(),
self.args,
&source,
objects,
is_first_command,
)
};
let result = trace_out_stream!(target: "nu::trace_stream::internal", source: &source, "output" = result);
let mut result = result.values;
let mut context = context.clone();
let mut stream = VecDeque::new();
while let Some(item) = result.next().await {
match item? {
ReturnSuccess::Action(action) => match action {
CommandAction::ChangePath(path) => {
context.shell_manager.set_path(path);
}
CommandAction::AddAnchorLocation(uuid, anchor_location) => {
context.add_anchor_location(uuid, anchor_location);
}
CommandAction::Exit => std::process::exit(0), // TODO: save history.txt
CommandAction::EnterHelpShell(value) => {
match value {
Tagged {
item: Value::Primitive(Primitive::String(cmd)),
tag,
} => {
context.shell_manager.insert_at_current(Box::new(
HelpShell::for_command(
Value::string(cmd).tagged(tag),
&context.registry(),
)?,
));
}
_ => {
context.shell_manager.insert_at_current(Box::new(
HelpShell::index(&context.registry())?,
));
let stream = async_stream! {
while let Some(item) = result.next().await {
match item {
Ok(ReturnSuccess::Action(action)) => match action {
CommandAction::ChangePath(path) => {
context.shell_manager.set_path(path);
}
CommandAction::Exit => std::process::exit(0), // TODO: save history.txt
CommandAction::EnterHelpShell(value) => {
match value {
Tagged {
item: Value::Primitive(Primitive::String(cmd)),
tag,
} => {
context.shell_manager.insert_at_current(Box::new(
HelpShell::for_command(
Value::string(cmd).tagged(tag),
&context.registry(),
).unwrap(),
));
}
_ => {
context.shell_manager.insert_at_current(Box::new(
HelpShell::index(&context.registry()).unwrap(),
));
}
}
}
}
CommandAction::EnterValueShell(value) => {
context
.shell_manager
.insert_at_current(Box::new(ValueShell::new(value)));
}
CommandAction::EnterShell(location) => {
context.shell_manager.insert_at_current(Box::new(
FilesystemShell::with_location(location, context.registry().clone())?,
));
}
CommandAction::PreviousShell => {
context.shell_manager.prev();
}
CommandAction::NextShell => {
context.shell_manager.next();
}
CommandAction::LeaveShell => {
context.shell_manager.remove_at_current();
if context.shell_manager.is_empty() {
std::process::exit(0); // TODO: save history.txt
CommandAction::EnterValueShell(value) => {
context
.shell_manager
.insert_at_current(Box::new(ValueShell::new(value)));
}
}
},
CommandAction::EnterShell(location) => {
context.shell_manager.insert_at_current(Box::new(
FilesystemShell::with_location(location, context.registry().clone()).unwrap(),
));
}
CommandAction::PreviousShell => {
context.shell_manager.prev();
}
CommandAction::NextShell => {
context.shell_manager.next();
}
CommandAction::LeaveShell => {
context.shell_manager.remove_at_current();
if context.shell_manager.is_empty() {
std::process::exit(0); // TODO: save history.txt
}
}
},
ReturnSuccess::Value(v) => {
stream.push_back(v);
Ok(ReturnSuccess::Value(v)) => {
yield Ok(v);
}
Err(x) => {
yield Ok(Value::Error(x).tagged_unknown());
break;
}
}
}
}
};
Ok(stream.into())
Ok(stream.to_input_stream())
}
}
#[derive(Debug, Eq, PartialEq)]
pub(crate) struct ExternalCommand {
pub(crate) name: String,
@ -192,6 +209,7 @@ pub(crate) struct ExternalCommand {
pub(crate) args: Vec<Tagged<String>>,
}
#[derive(Debug)]
pub(crate) enum StreamNext {
Last,
External,
@ -221,6 +239,8 @@ impl ExternalCommand {
process = Exec::cmd(&self.name);
trace!(target: "nu::run::external", "command = {:?}", process);
if arg_string.contains("$it") {
let mut first = true;
@ -239,7 +259,11 @@ impl ExternalCommand {
tag,
));
} else {
return Err(ShellError::string("Error: $it needs string data"));
return Err(ShellError::labeled_error(
"Error: $it needs string data",
"given something else",
name_tag,
));
}
}
if !first {
@ -275,6 +299,8 @@ impl ExternalCommand {
process = process.cwd(context.shell_manager.path());
trace!(target: "nu::run::external", "cwd = {:?}", context.shell_manager.path());
let mut process = match stream_next {
StreamNext::Last => process,
StreamNext::External | StreamNext::Internal => {
@ -282,43 +308,59 @@ impl ExternalCommand {
}
};
trace!(target: "nu::run::external", "set up stdout pipe");
if let Some(stdin) = stdin {
process = process.stdin(stdin);
}
let mut popen = process.popen()?;
trace!(target: "nu::run::external", "set up stdin pipe");
trace!(target: "nu::run::external", "built process {:?}", process);
match stream_next {
StreamNext::Last => {
let _ = popen.detach();
loop {
match popen.poll() {
None => {
let _ = std::thread::sleep(std::time::Duration::new(0, 100000000));
}
_ => {
let _ = popen.terminate();
break;
let popen = process.popen();
trace!(target: "nu::run::external", "next = {:?}", stream_next);
if let Ok(mut popen) = popen {
match stream_next {
StreamNext::Last => {
let _ = popen.detach();
loop {
match popen.poll() {
None => {
let _ = std::thread::sleep(std::time::Duration::new(0, 100000000));
}
_ => {
let _ = popen.terminate();
break;
}
}
}
Ok(ClassifiedInputStream::new())
}
StreamNext::External => {
let _ = popen.detach();
let stdout = popen.stdout.take().unwrap();
Ok(ClassifiedInputStream::from_stdout(stdout))
}
StreamNext::Internal => {
let _ = popen.detach();
let stdout = popen.stdout.take().unwrap();
let file = futures::io::AllowStdIo::new(stdout);
let stream = Framed::new(file, LinesCodec {});
let stream =
stream.map(move |line| Value::string(line.unwrap()).tagged(&name_tag));
Ok(ClassifiedInputStream::from_input_stream(
stream.boxed() as BoxStream<'static, Tagged<Value>>
))
}
Ok(ClassifiedInputStream::new())
}
StreamNext::External => {
let _ = popen.detach();
let stdout = popen.stdout.take().unwrap();
Ok(ClassifiedInputStream::from_stdout(stdout))
}
StreamNext::Internal => {
let _ = popen.detach();
let stdout = popen.stdout.take().unwrap();
let file = futures::io::AllowStdIo::new(stdout);
let stream = Framed::new(file, LinesCodec {});
let stream = stream.map(move |line| Value::string(line.unwrap()).tagged(name_tag));
Ok(ClassifiedInputStream::from_input_stream(
stream.boxed() as BoxStream<'static, Tagged<Value>>
))
}
} else {
return Err(ShellError::labeled_error(
"Command not found",
"command not found",
name_tag,
));
}
}
}

View File

@ -1,4 +1,3 @@
use crate::context::{AnchorLocation, SourceMap};
use crate::data::Value;
use crate::errors::ShellError;
use crate::evaluate::Scope;
@ -11,13 +10,12 @@ use serde::{Deserialize, Serialize};
use std::fmt;
use std::ops::Deref;
use std::path::PathBuf;
use uuid::Uuid;
use std::sync::atomic::AtomicBool;
#[derive(Deserialize, Serialize, Debug, Clone)]
pub struct UnevaluatedCallInfo {
pub args: hir::Call,
pub source: Text,
pub source_map: SourceMap,
pub name_tag: Tag,
}
@ -37,7 +35,6 @@ impl UnevaluatedCallInfo {
Ok(CallInfo {
args,
source_map: self.source_map,
name_tag: self.name_tag,
})
}
@ -46,7 +43,6 @@ impl UnevaluatedCallInfo {
#[derive(Deserialize, Serialize, Debug, Clone)]
pub struct CallInfo {
pub args: registry::EvaluatedArgs,
pub source_map: SourceMap,
pub name_tag: Tag,
}
@ -62,7 +58,7 @@ impl CallInfo {
args: T::deserialize(&mut deserializer)?,
context: RunnablePerItemContext {
shell_manager: shell_manager.clone(),
name: self.name_tag,
name: self.name_tag.clone(),
},
callback,
})
@ -73,6 +69,7 @@ impl CallInfo {
#[get = "pub(crate)"]
pub struct CommandArgs {
pub host: Arc<Mutex<dyn Host>>,
pub ctrl_c: Arc<AtomicBool>,
pub shell_manager: ShellManager,
pub call_info: UnevaluatedCallInfo,
pub input: InputStream,
@ -82,6 +79,7 @@ pub struct CommandArgs {
#[get = "pub(crate)"]
pub struct RawCommandArgs {
pub host: Arc<Mutex<dyn Host>>,
pub ctrl_c: Arc<AtomicBool>,
pub shell_manager: ShellManager,
pub call_info: UnevaluatedCallInfo,
}
@ -90,6 +88,7 @@ impl RawCommandArgs {
pub fn with_input(self, input: Vec<Tagged<Value>>) -> CommandArgs {
CommandArgs {
host: self.host,
ctrl_c: self.ctrl_c,
shell_manager: self.shell_manager,
call_info: self.call_info,
input: input.into(),
@ -109,12 +108,14 @@ impl CommandArgs {
registry: &registry::CommandRegistry,
) -> Result<EvaluatedWholeStreamCommandArgs, ShellError> {
let host = self.host.clone();
let ctrl_c = self.ctrl_c.clone();
let shell_manager = self.shell_manager.clone();
let input = self.input;
let call_info = self.call_info.evaluate(registry, &Scope::empty())?;
Ok(EvaluatedWholeStreamCommandArgs::new(
host,
ctrl_c,
shell_manager,
call_info,
input,
@ -127,12 +128,13 @@ impl CommandArgs {
callback: fn(T, RunnableContext) -> Result<OutputStream, ShellError>,
) -> Result<RunnableArgs<T>, ShellError> {
let shell_manager = self.shell_manager.clone();
let source_map = self.call_info.source_map.clone();
let host = self.host.clone();
let ctrl_c = self.ctrl_c.clone();
let args = self.evaluate_once(registry)?;
let call_info = args.call_info.clone();
let (input, args) = args.split();
let name_tag = args.call_info.name_tag;
let mut deserializer = ConfigDeserializer::from_call_info(args.call_info);
let mut deserializer = ConfigDeserializer::from_call_info(call_info);
Ok(RunnableArgs {
args: T::deserialize(&mut deserializer)?,
@ -141,8 +143,8 @@ impl CommandArgs {
commands: registry.clone(),
shell_manager,
name: name_tag,
source_map,
host,
ctrl_c,
},
callback,
})
@ -155,17 +157,20 @@ impl CommandArgs {
) -> Result<RunnableRawArgs<T>, ShellError> {
let raw_args = RawCommandArgs {
host: self.host.clone(),
ctrl_c: self.ctrl_c.clone(),
shell_manager: self.shell_manager.clone(),
call_info: self.call_info.clone(),
};
let shell_manager = self.shell_manager.clone();
let source_map = self.call_info.source_map.clone();
let host = self.host.clone();
let ctrl_c = self.ctrl_c.clone();
let args = self.evaluate_once(registry)?;
let call_info = args.call_info.clone();
let (input, args) = args.split();
let name_tag = args.call_info.name_tag;
let mut deserializer = ConfigDeserializer::from_call_info(args.call_info);
let mut deserializer = ConfigDeserializer::from_call_info(call_info.clone());
Ok(RunnableRawArgs {
args: T::deserialize(&mut deserializer)?,
@ -174,8 +179,8 @@ impl CommandArgs {
commands: registry.clone(),
shell_manager,
name: name_tag,
source_map,
host,
ctrl_c,
},
raw_args,
callback,
@ -198,18 +203,12 @@ pub struct RunnableContext {
pub input: InputStream,
pub shell_manager: ShellManager,
pub host: Arc<Mutex<dyn Host>>,
pub ctrl_c: Arc<AtomicBool>,
pub commands: CommandRegistry,
pub source_map: SourceMap,
pub name: Tag,
}
impl RunnableContext {
pub fn expect_command(&self, name: &str) -> Arc<Command> {
self.commands
.get_command(name)
.expect(&format!("Expected command {}", name))
}
pub fn get_command(&self, name: &str) -> Option<Arc<Command>> {
self.commands.get_command(name)
}
@ -270,6 +269,7 @@ impl Deref for EvaluatedWholeStreamCommandArgs {
impl EvaluatedWholeStreamCommandArgs {
pub fn new(
host: Arc<Mutex<dyn Host>>,
ctrl_c: Arc<AtomicBool>,
shell_manager: ShellManager,
call_info: CallInfo,
input: impl Into<InputStream>,
@ -277,6 +277,7 @@ impl EvaluatedWholeStreamCommandArgs {
EvaluatedWholeStreamCommandArgs {
args: EvaluatedCommandArgs {
host,
ctrl_c,
shell_manager,
call_info,
},
@ -285,7 +286,7 @@ impl EvaluatedWholeStreamCommandArgs {
}
pub fn name_tag(&self) -> Tag {
self.args.call_info.name_tag
self.args.call_info.name_tag.clone()
}
pub fn parts(self) -> (InputStream, registry::EvaluatedArgs) {
@ -317,12 +318,14 @@ impl Deref for EvaluatedFilterCommandArgs {
impl EvaluatedFilterCommandArgs {
pub fn new(
host: Arc<Mutex<dyn Host>>,
ctrl_c: Arc<AtomicBool>,
shell_manager: ShellManager,
call_info: CallInfo,
) -> EvaluatedFilterCommandArgs {
EvaluatedFilterCommandArgs {
args: EvaluatedCommandArgs {
host,
ctrl_c,
shell_manager,
call_info,
},
@ -334,6 +337,7 @@ impl EvaluatedFilterCommandArgs {
#[get = "pub(crate)"]
pub struct EvaluatedCommandArgs {
pub host: Arc<Mutex<dyn Host>>,
pub ctrl_c: Arc<AtomicBool>,
pub shell_manager: ShellManager,
pub call_info: CallInfo,
}
@ -376,7 +380,6 @@ impl EvaluatedCommandArgs {
#[derive(Debug, Serialize, Deserialize)]
pub enum CommandAction {
ChangePath(String),
AddAnchorLocation(Uuid, AnchorLocation),
Exit,
EnterShell(String),
EnterValueShell(Tagged<Value>),
@ -390,9 +393,6 @@ impl ToDebug for CommandAction {
fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result {
match self {
CommandAction::ChangePath(s) => write!(f, "action:change-path={}", s),
CommandAction::AddAnchorLocation(u, source) => {
write!(f, "action:add-span-source={}@{:?}", u, source)
}
CommandAction::Exit => write!(f, "action:exit"),
CommandAction::EnterShell(s) => write!(f, "action:enter-shell={}", s),
CommandAction::EnterValueShell(t) => {
@ -507,6 +507,15 @@ pub enum Command {
PerItem(Arc<dyn PerItemCommand>),
}
impl std::fmt::Debug for Command {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Command::WholeStream(command) => write!(f, "WholeStream({})", command.name()),
Command::PerItem(command) => write!(f, "PerItem({})", command.name()),
}
}
}
impl Command {
pub fn name(&self) -> &str {
match self {
@ -555,6 +564,7 @@ impl Command {
) -> OutputStream {
let raw_args = RawCommandArgs {
host: args.host,
ctrl_c: args.ctrl_c,
shell_manager: args.shell_manager,
call_info: args.call_info,
};
@ -624,6 +634,7 @@ impl WholeStreamCommand for FnFilterCommand {
) -> Result<OutputStream, ShellError> {
let CommandArgs {
host,
ctrl_c,
shell_manager,
call_info,
input,
@ -641,8 +652,12 @@ impl WholeStreamCommand for FnFilterCommand {
Ok(args) => args,
};
let args =
EvaluatedFilterCommandArgs::new(host.clone(), shell_manager.clone(), call_info);
let args = EvaluatedFilterCommandArgs::new(
host.clone(),
ctrl_c.clone(),
shell_manager.clone(),
call_info,
);
match func(args) {
Err(err) => return OutputStream::from(vec![Err(err)]).values,

View File

@ -58,7 +58,7 @@ pub fn config(
}: ConfigArgs,
RunnableContext { name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let name_span = name;
let name_span = name.clone();
let configuration = if let Some(supplied) = load {
Some(supplied.item().clone())
@ -70,9 +70,9 @@ pub fn config(
if let Some(v) = get {
let key = v.to_string();
let value = result
.get(&key)
.ok_or_else(|| ShellError::string(&format!("Missing key {} in config", key)))?;
let value = result.get(&key).ok_or_else(|| {
ShellError::labeled_error(&format!("Missing key in config"), "key", v.tag())
})?;
let mut results = VecDeque::new();
@ -120,10 +120,11 @@ pub fn config(
result.swap_remove(&key);
config::write(&result, &configuration)?;
} else {
return Err(ShellError::string(&format!(
return Err(ShellError::labeled_error(
"{} does not exist in config",
key
)));
"key",
v.tag(),
));
}
let obj = VecDeque::from_iter(vec![Value::Row(result.into()).tagged(v.tag())]);

View File

@ -39,27 +39,27 @@ where
{
let mut indexmap = IndexMap::new();
indexmap.insert("year".to_string(), Value::int(dt.year()).tagged(tag));
indexmap.insert("month".to_string(), Value::int(dt.month()).tagged(tag));
indexmap.insert("day".to_string(), Value::int(dt.day()).tagged(tag));
indexmap.insert("hour".to_string(), Value::int(dt.hour()).tagged(tag));
indexmap.insert("minute".to_string(), Value::int(dt.minute()).tagged(tag));
indexmap.insert("second".to_string(), Value::int(dt.second()).tagged(tag));
indexmap.insert("year".to_string(), Value::int(dt.year()).tagged(&tag));
indexmap.insert("month".to_string(), Value::int(dt.month()).tagged(&tag));
indexmap.insert("day".to_string(), Value::int(dt.day()).tagged(&tag));
indexmap.insert("hour".to_string(), Value::int(dt.hour()).tagged(&tag));
indexmap.insert("minute".to_string(), Value::int(dt.minute()).tagged(&tag));
indexmap.insert("second".to_string(), Value::int(dt.second()).tagged(&tag));
let tz = dt.offset();
indexmap.insert(
"timezone".to_string(),
Value::string(format!("{}", tz)).tagged(tag),
Value::string(format!("{}", tz)).tagged(&tag),
);
Value::Row(Dictionary::from(indexmap)).tagged(tag)
Value::Row(Dictionary::from(indexmap)).tagged(&tag)
}
pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?;
let mut date_out = VecDeque::new();
let tag = args.call_info.name_tag;
let tag = args.call_info.name_tag.clone();
let value = if args.has("utc") {
let utc: DateTime<Utc> = Utc::now();

View File

@ -35,7 +35,7 @@ fn run(
_registry: &CommandRegistry,
_raw_args: &RawCommandArgs,
) -> Result<OutputStream, ShellError> {
let name = call_info.name_tag;
let name = call_info.name_tag.clone();
let mut output = String::new();
@ -54,11 +54,10 @@ fn run(
output.push_str(&s);
}
_ => {
return Err(ShellError::labeled_error(
"Expect a string from pipeline",
"not a string-compatible value",
i.tag(),
));
return Err(ShellError::type_error(
"a string-compatible value",
i.tagged_type_name(),
))
}
}
}

View File

@ -15,7 +15,7 @@ impl PerItemCommand for Enter {
}
fn signature(&self) -> registry::Signature {
Signature::build("enter").required("location", SyntaxShape::Block)
Signature::build("enter").required("location", SyntaxShape::Path)
}
fn usage(&self) -> &str {
@ -33,14 +33,14 @@ impl PerItemCommand for Enter {
let raw_args = raw_args.clone();
match call_info.args.expect_nth(0)? {
Tagged {
item: Value::Primitive(Primitive::String(location)),
item: Value::Primitive(Primitive::Path(location)),
..
} => {
let location = location.to_string();
let location_clone = location.to_string();
let location_string = location.display().to_string();
let location_clone = location_string.clone();
if location.starts_with("help") {
let spec = location.split(":").collect::<Vec<&str>>();
let spec = location_string.split(":").collect::<Vec<&str>>();
let (_, command) = (spec[0], spec[1]);
@ -67,7 +67,7 @@ impl PerItemCommand for Enter {
let full_path = std::path::PathBuf::from(cwd);
let (file_extension, contents, contents_tag, anchor_location) =
let (file_extension, contents, contents_tag) =
crate::commands::open::fetch(
&full_path,
&location_clone,
@ -75,18 +75,9 @@ impl PerItemCommand for Enter {
)
.await.unwrap();
if contents_tag.anchor != uuid::Uuid::nil() {
// If we have loaded something, track its source
yield ReturnSuccess::action(CommandAction::AddAnchorLocation(
contents_tag.anchor,
anchor_location,
));
}
match contents {
Value::Primitive(Primitive::String(_)) => {
let tagged_contents = contents.tagged(contents_tag);
let tagged_contents = contents.tagged(&contents_tag);
if let Some(extension) = file_extension {
let command_name = format!("from-{}", extension);
@ -95,6 +86,7 @@ impl PerItemCommand for Enter {
{
let new_args = RawCommandArgs {
host: raw_args.host,
ctrl_c: raw_args.ctrl_c,
shell_manager: raw_args.shell_manager,
call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call {
@ -103,7 +95,6 @@ impl PerItemCommand for Enter {
named: None,
},
source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag,
},
};
@ -123,7 +114,7 @@ impl PerItemCommand for Enter {
yield Ok(ReturnSuccess::Action(CommandAction::EnterValueShell(
Tagged {
item,
tag: contents_tag,
tag: contents_tag.clone(),
})));
}
x => yield x,

View File

@ -37,22 +37,22 @@ pub fn get_environment(tag: Tag) -> Result<Tagged<Value>, Box<dyn std::error::Er
let mut indexmap = IndexMap::new();
let path = std::env::current_dir()?;
indexmap.insert("cwd".to_string(), Value::path(path).tagged(tag));
indexmap.insert("cwd".to_string(), Value::path(path).tagged(&tag));
if let Some(home) = dirs::home_dir() {
indexmap.insert("home".to_string(), Value::path(home).tagged(tag));
indexmap.insert("home".to_string(), Value::path(home).tagged(&tag));
}
let config = config::default_path()?;
indexmap.insert("config".to_string(), Value::path(config).tagged(tag));
indexmap.insert("config".to_string(), Value::path(config).tagged(&tag));
let history = History::path();
indexmap.insert("history".to_string(), Value::path(history).tagged(tag));
indexmap.insert("history".to_string(), Value::path(history).tagged(&tag));
let temp = std::env::temp_dir();
indexmap.insert("temp".to_string(), Value::path(temp).tagged(tag));
indexmap.insert("temp".to_string(), Value::path(temp).tagged(&tag));
let mut dict = TaggedDictBuilder::new(tag);
let mut dict = TaggedDictBuilder::new(&tag);
for v in std::env::vars() {
dict.insert(v.0, Value::string(v.1));
}
@ -60,14 +60,14 @@ pub fn get_environment(tag: Tag) -> Result<Tagged<Value>, Box<dyn std::error::Er
indexmap.insert("vars".to_string(), dict.into_tagged_value());
}
Ok(Value::Row(Dictionary::from(indexmap)).tagged(tag))
Ok(Value::Row(Dictionary::from(indexmap)).tagged(&tag))
}
pub fn env(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?;
let mut env_out = VecDeque::new();
let tag = args.call_info.name_tag;
let tag = args.call_info.name_tag.clone();
let value = get_environment(tag)?;
env_out.push_back(value);

View File

@ -10,7 +10,6 @@ use mime::Mime;
use std::path::PathBuf;
use std::str::FromStr;
use surf::mime;
use uuid::Uuid;
pub struct Fetch;
impl PerItemCommand for Fetch {
@ -44,16 +43,18 @@ fn run(
registry: &CommandRegistry,
raw_args: &RawCommandArgs,
) -> Result<OutputStream, ShellError> {
let path = match call_info
.args
.nth(0)
.ok_or_else(|| ShellError::string(&format!("No file or directory specified")))?
{
let path = match call_info.args.nth(0).ok_or_else(|| {
ShellError::labeled_error(
"No file or directory specified",
"for command",
&call_info.name_tag,
)
})? {
file => file,
};
let path_buf = path.as_path()?;
let path_str = path_buf.display().to_string();
let path_span = path.span();
let path_span = path.tag.span;
let has_raw = call_info.args.has("raw");
let registry = registry.clone();
let raw_args = raw_args.clone();
@ -66,7 +67,7 @@ fn run(
yield Err(e);
return;
}
let (file_extension, contents, contents_tag, anchor_location) = result.unwrap();
let (file_extension, contents, contents_tag) = result.unwrap();
let file_extension = if has_raw {
None
@ -76,21 +77,14 @@ fn run(
file_extension.or(path_str.split('.').last().map(String::from))
};
if contents_tag.anchor != uuid::Uuid::nil() {
// If we have loaded something, track its source
yield ReturnSuccess::action(CommandAction::AddAnchorLocation(
contents_tag.anchor,
anchor_location,
));
}
let tagged_contents = contents.tagged(contents_tag);
let tagged_contents = contents.tagged(&contents_tag);
if let Some(extension) = file_extension {
let command_name = format!("from-{}", extension);
if let Some(converter) = registry.get_command(&command_name) {
let new_args = RawCommandArgs {
host: raw_args.host,
ctrl_c: raw_args.ctrl_c,
shell_manager: raw_args.shell_manager,
call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call {
@ -99,7 +93,6 @@ fn run(
named: None
},
source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag,
}
};
@ -113,7 +106,7 @@ fn run(
}
}
Ok(ReturnSuccess::Value(Tagged { item, .. })) => {
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag }));
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() }));
}
x => yield x,
}
@ -129,10 +122,7 @@ fn run(
Ok(stream.to_output_stream())
}
pub async fn fetch(
location: &str,
span: Span,
) -> Result<(Option<String>, Value, Tag, AnchorLocation), ShellError> {
pub async fn fetch(location: &str, span: Span) -> Result<(Option<String>, Value, Tag), ShellError> {
if let Err(_) = url::Url::parse(location) {
return Err(ShellError::labeled_error(
"Incomplete or incorrect url",
@ -158,9 +148,8 @@ pub async fn fetch(
})?),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::Url(location.to_string())),
},
AnchorLocation::Url(location.to_string()),
)),
(mime::APPLICATION, mime::JSON) => Ok((
Some("json".to_string()),
@ -173,9 +162,8 @@ pub async fn fetch(
})?),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::Url(location.to_string())),
},
AnchorLocation::Url(location.to_string()),
)),
(mime::APPLICATION, mime::OCTET_STREAM) => {
let buf: Vec<u8> = r.body_bytes().await.map_err(|_| {
@ -190,9 +178,8 @@ pub async fn fetch(
Value::binary(buf),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::Url(location.to_string())),
},
AnchorLocation::Url(location.to_string()),
))
}
(mime::IMAGE, mime::SVG) => Ok((
@ -206,9 +193,8 @@ pub async fn fetch(
})?),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::Url(location.to_string())),
},
AnchorLocation::Url(location.to_string()),
)),
(mime::IMAGE, image_ty) => {
let buf: Vec<u8> = r.body_bytes().await.map_err(|_| {
@ -223,9 +209,8 @@ pub async fn fetch(
Value::binary(buf),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::Url(location.to_string())),
},
AnchorLocation::Url(location.to_string()),
))
}
(mime::TEXT, mime::HTML) => Ok((
@ -239,9 +224,8 @@ pub async fn fetch(
})?),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::Url(location.to_string())),
},
AnchorLocation::Url(location.to_string()),
)),
(mime::TEXT, mime::PLAIN) => {
let path_extension = url::Url::parse(location)
@ -266,9 +250,8 @@ pub async fn fetch(
})?),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::Url(location.to_string())),
},
AnchorLocation::Url(location.to_string()),
))
}
(ty, sub_ty) => Ok((
@ -276,9 +259,8 @@ pub async fn fetch(
Value::string(format!("Not yet supported MIME type: {} {}", ty, sub_ty)),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::Url(location.to_string())),
},
AnchorLocation::Url(location.to_string()),
)),
}
}
@ -287,9 +269,8 @@ pub async fn fetch(
Value::string(format!("No content type found")),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::Url(location.to_string())),
},
AnchorLocation::Url(location.to_string()),
)),
},
Err(_) => {

View File

@ -16,7 +16,7 @@ impl WholeStreamCommand for First {
}
fn signature(&self) -> Signature {
Signature::build("first").required("amount", SyntaxShape::Literal)
Signature::build("first").required("amount", SyntaxShape::Int)
}
fn usage(&self) -> &str {

View File

@ -33,7 +33,7 @@ fn bson_array(input: &Vec<Bson>, tag: Tag) -> Result<Vec<Tagged<Value>>, ShellEr
let mut out = vec![];
for value in input {
out.push(convert_bson_value_to_nu_value(value, tag)?);
out.push(convert_bson_value_to_nu_value(value, &tag)?);
}
Ok(out)
@ -46,100 +46,100 @@ fn convert_bson_value_to_nu_value(
let tag = tag.into();
Ok(match v {
Bson::FloatingPoint(n) => Value::Primitive(Primitive::from(*n)).tagged(tag),
Bson::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag),
Bson::Array(a) => Value::Table(bson_array(a, tag)?).tagged(tag),
Bson::FloatingPoint(n) => Value::Primitive(Primitive::from(*n)).tagged(&tag),
Bson::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(&tag),
Bson::Array(a) => Value::Table(bson_array(a, tag.clone())?).tagged(&tag),
Bson::Document(doc) => {
let mut collected = TaggedDictBuilder::new(tag);
let mut collected = TaggedDictBuilder::new(tag.clone());
for (k, v) in doc.iter() {
collected.insert_tagged(k.clone(), convert_bson_value_to_nu_value(v, tag)?);
collected.insert_tagged(k.clone(), convert_bson_value_to_nu_value(v, &tag)?);
}
collected.into_tagged_value()
}
Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(tag),
Bson::Null => Value::Primitive(Primitive::Nothing).tagged(tag),
Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(&tag),
Bson::Null => Value::Primitive(Primitive::Nothing).tagged(&tag),
Bson::RegExp(r, opts) => {
let mut collected = TaggedDictBuilder::new(tag);
let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged(
"$regex".to_string(),
Value::Primitive(Primitive::String(String::from(r))).tagged(tag),
Value::Primitive(Primitive::String(String::from(r))).tagged(&tag),
);
collected.insert_tagged(
"$options".to_string(),
Value::Primitive(Primitive::String(String::from(opts))).tagged(tag),
Value::Primitive(Primitive::String(String::from(opts))).tagged(&tag),
);
collected.into_tagged_value()
}
Bson::I32(n) => Value::number(n).tagged(tag),
Bson::I64(n) => Value::number(n).tagged(tag),
Bson::I32(n) => Value::number(n).tagged(&tag),
Bson::I64(n) => Value::number(n).tagged(&tag),
Bson::Decimal128(n) => {
// TODO: this really isn't great, and we should update this to do a higher
// fidelity translation
let decimal = BigDecimal::from_str(&format!("{}", n)).map_err(|_| {
ShellError::range_error(
ExpectedRange::BigDecimal,
&n.tagged(tag),
&n.tagged(&tag),
format!("converting BSON Decimal128 to BigDecimal"),
)
})?;
Value::Primitive(Primitive::Decimal(decimal)).tagged(tag)
Value::Primitive(Primitive::Decimal(decimal)).tagged(&tag)
}
Bson::JavaScriptCode(js) => {
let mut collected = TaggedDictBuilder::new(tag);
let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged(
"$javascript".to_string(),
Value::Primitive(Primitive::String(String::from(js))).tagged(tag),
Value::Primitive(Primitive::String(String::from(js))).tagged(&tag),
);
collected.into_tagged_value()
}
Bson::JavaScriptCodeWithScope(js, doc) => {
let mut collected = TaggedDictBuilder::new(tag);
let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged(
"$javascript".to_string(),
Value::Primitive(Primitive::String(String::from(js))).tagged(tag),
Value::Primitive(Primitive::String(String::from(js))).tagged(&tag),
);
collected.insert_tagged(
"$scope".to_string(),
convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag)?,
convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag.clone())?,
);
collected.into_tagged_value()
}
Bson::TimeStamp(ts) => {
let mut collected = TaggedDictBuilder::new(tag);
collected.insert_tagged("$timestamp".to_string(), Value::number(ts).tagged(tag));
let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged("$timestamp".to_string(), Value::number(ts).tagged(&tag));
collected.into_tagged_value()
}
Bson::Binary(bst, bytes) => {
let mut collected = TaggedDictBuilder::new(tag);
let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged(
"$binary_subtype".to_string(),
match bst {
BinarySubtype::UserDefined(u) => Value::number(u),
_ => Value::Primitive(Primitive::String(binary_subtype_to_string(*bst))),
}
.tagged(tag),
.tagged(&tag),
);
collected.insert_tagged(
"$binary".to_string(),
Value::Primitive(Primitive::Binary(bytes.to_owned())).tagged(tag),
Value::Primitive(Primitive::Binary(bytes.to_owned())).tagged(&tag),
);
collected.into_tagged_value()
}
Bson::ObjectId(obj_id) => {
let mut collected = TaggedDictBuilder::new(tag);
let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged(
"$object_id".to_string(),
Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(tag),
Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(&tag),
);
collected.into_tagged_value()
}
Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(tag),
Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(&tag),
Bson::Symbol(s) => {
let mut collected = TaggedDictBuilder::new(tag);
let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged(
"$symbol".to_string(),
Value::Primitive(Primitive::String(String::from(s))).tagged(tag),
Value::Primitive(Primitive::String(String::from(s))).tagged(&tag),
);
collected.into_tagged_value()
}
@ -208,13 +208,13 @@ fn from_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
let value_tag = value.tag();
match value.item {
Value::Primitive(Primitive::Binary(vb)) =>
match from_bson_bytes_to_value(vb, tag) {
match from_bson_bytes_to_value(vb, tag.clone()) {
Ok(x) => yield ReturnSuccess::value(x),
Err(_) => {
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as BSON",
"input cannot be parsed as BSON",
tag,
tag.clone(),
"value originates from here",
value_tag,
))
@ -223,7 +223,7 @@ fn from_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
tag,
tag.clone(),
"value originates from here",
value_tag,
)),

View File

@ -62,12 +62,12 @@ pub fn from_csv_string_to_value(
if let Some(row_values) = iter.next() {
let row_values = row_values?;
let mut row = TaggedDictBuilder::new(tag);
let mut row = TaggedDictBuilder::new(tag.clone());
for (idx, entry) in row_values.iter().enumerate() {
row.insert_tagged(
fields.get(idx).unwrap(),
Value::Primitive(Primitive::String(String::from(entry))).tagged(tag),
Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag),
);
}
@ -77,7 +77,7 @@ pub fn from_csv_string_to_value(
}
}
Ok(Tagged::from_item(Value::Table(rows), tag))
Ok(Value::Table(rows).tagged(&tag))
}
fn from_csv(
@ -96,7 +96,7 @@ fn from_csv(
for value in values {
let value_tag = value.tag();
latest_tag = Some(value_tag);
latest_tag = Some(value_tag.clone());
match value.item {
Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s);
@ -105,15 +105,15 @@ fn from_csv(
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
name_tag,
name_tag.clone(),
"value originates from here",
value_tag,
value_tag.clone(),
)),
}
}
match from_csv_string_to_value(concat_string, skip_headers, name_tag) {
match from_csv_string_to_value(concat_string, skip_headers, name_tag.clone()) {
Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => {
for l in list {
@ -126,9 +126,9 @@ fn from_csv(
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as CSV",
"input cannot be parsed as CSV",
name_tag,
name_tag.clone(),
"value originates from here",
last_tag,
last_tag.clone(),
))
} ,
}

View File

@ -45,10 +45,13 @@ fn convert_ini_top_to_nu_value(
tag: impl Into<Tag>,
) -> Tagged<Value> {
let tag = tag.into();
let mut top_level = TaggedDictBuilder::new(tag);
let mut top_level = TaggedDictBuilder::new(tag.clone());
for (key, value) in v.iter() {
top_level.insert_tagged(key.clone(), convert_ini_second_to_nu_value(value, tag));
top_level.insert_tagged(
key.clone(),
convert_ini_second_to_nu_value(value, tag.clone()),
);
}
top_level.into_tagged_value()
@ -75,7 +78,7 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
for value in values {
let value_tag = value.tag();
latest_tag = Some(value_tag);
latest_tag = Some(value_tag.clone());
match value.item {
Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s);
@ -84,15 +87,15 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
tag,
&tag,
"value originates from here",
value_tag,
&value_tag,
)),
}
}
match from_ini_string_to_value(concat_string, tag) {
match from_ini_string_to_value(concat_string, tag.clone()) {
Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => {
for l in list {
@ -105,7 +108,7 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as INI",
"input cannot be parsed as INI",
tag,
&tag,
"value originates from here",
last_tag,
))

View File

@ -35,24 +35,24 @@ fn convert_json_value_to_nu_value(v: &serde_hjson::Value, tag: impl Into<Tag>) -
let tag = tag.into();
match v {
serde_hjson::Value::Null => Value::Primitive(Primitive::Nothing).tagged(tag),
serde_hjson::Value::Bool(b) => Value::boolean(*b).tagged(tag),
serde_hjson::Value::F64(n) => Value::number(n).tagged(tag),
serde_hjson::Value::U64(n) => Value::number(n).tagged(tag),
serde_hjson::Value::I64(n) => Value::number(n).tagged(tag),
serde_hjson::Value::Null => Value::Primitive(Primitive::Nothing).tagged(&tag),
serde_hjson::Value::Bool(b) => Value::boolean(*b).tagged(&tag),
serde_hjson::Value::F64(n) => Value::number(n).tagged(&tag),
serde_hjson::Value::U64(n) => Value::number(n).tagged(&tag),
serde_hjson::Value::I64(n) => Value::number(n).tagged(&tag),
serde_hjson::Value::String(s) => {
Value::Primitive(Primitive::String(String::from(s))).tagged(tag)
Value::Primitive(Primitive::String(String::from(s))).tagged(&tag)
}
serde_hjson::Value::Array(a) => Value::Table(
a.iter()
.map(|x| convert_json_value_to_nu_value(x, tag))
.map(|x| convert_json_value_to_nu_value(x, &tag))
.collect(),
)
.tagged(tag),
serde_hjson::Value::Object(o) => {
let mut collected = TaggedDictBuilder::new(tag);
let mut collected = TaggedDictBuilder::new(&tag);
for (k, v) in o.iter() {
collected.insert_tagged(k.clone(), convert_json_value_to_nu_value(v, tag));
collected.insert_tagged(k.clone(), convert_json_value_to_nu_value(v, &tag));
}
collected.into_tagged_value()
@ -82,7 +82,7 @@ fn from_json(
for value in values {
let value_tag = value.tag();
latest_tag = Some(value_tag);
latest_tag = Some(value_tag.clone());
match value.item {
Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s);
@ -91,9 +91,9 @@ fn from_json(
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
name_tag,
&name_tag,
"value originates from here",
value_tag,
&value_tag,
)),
}
@ -106,15 +106,15 @@ fn from_json(
continue;
}
match from_json_string_to_value(json_str.to_string(), name_tag) {
match from_json_string_to_value(json_str.to_string(), &name_tag) {
Ok(x) =>
yield ReturnSuccess::value(x),
Err(_) => {
if let Some(last_tag) = latest_tag {
if let Some(ref last_tag) = latest_tag {
yield Err(ShellError::labeled_error_with_secondary(
"Could nnot parse as JSON",
"input cannot be parsed as JSON",
name_tag,
&name_tag,
"value originates from here",
last_tag))
}
@ -122,7 +122,7 @@ fn from_json(
}
}
} else {
match from_json_string_to_value(concat_string, name_tag) {
match from_json_string_to_value(concat_string, name_tag.clone()) {
Ok(x) =>
match x {
Tagged { item: Value::Table(list), .. } => {

View File

@ -138,7 +138,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
let value_tag = value.tag();
match value.item {
Value::Primitive(Primitive::Binary(vb)) =>
match from_sqlite_bytes_to_value(vb, tag) {
match from_sqlite_bytes_to_value(vb, tag.clone()) {
Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => {
for l in list {
@ -151,7 +151,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as SQLite",
"input cannot be parsed as SQLite",
tag,
&tag,
"value originates from here",
value_tag,
))
@ -160,7 +160,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
tag,
&tag,
"value originates from here",
value_tag,
)),

View File

@ -36,7 +36,7 @@ pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into<Tag>) -> T
toml::Value::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag),
toml::Value::Array(a) => Value::Table(
a.iter()
.map(|x| convert_toml_value_to_nu_value(x, tag))
.map(|x| convert_toml_value_to_nu_value(x, &tag))
.collect(),
)
.tagged(tag),
@ -44,10 +44,10 @@ pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into<Tag>) -> T
Value::Primitive(Primitive::String(dt.to_string())).tagged(tag)
}
toml::Value::Table(t) => {
let mut collected = TaggedDictBuilder::new(tag);
let mut collected = TaggedDictBuilder::new(&tag);
for (k, v) in t.iter() {
collected.insert_tagged(k.clone(), convert_toml_value_to_nu_value(v, tag));
collected.insert_tagged(k.clone(), convert_toml_value_to_nu_value(v, &tag));
}
collected.into_tagged_value()
@ -79,7 +79,7 @@ pub fn from_toml(
for value in values {
let value_tag = value.tag();
latest_tag = Some(value_tag);
latest_tag = Some(value_tag.clone());
match value.item {
Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s);
@ -88,15 +88,15 @@ pub fn from_toml(
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
tag,
&tag,
"value originates from here",
value_tag,
&value_tag,
)),
}
}
match from_toml_string_to_value(concat_string, tag) {
match from_toml_string_to_value(concat_string, tag.clone()) {
Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => {
for l in list {
@ -109,7 +109,7 @@ pub fn from_toml(
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as TOML",
"input cannot be parsed as TOML",
tag,
&tag,
"value originates from here",
last_tag,
))

View File

@ -63,12 +63,12 @@ pub fn from_tsv_string_to_value(
if let Some(row_values) = iter.next() {
let row_values = row_values?;
let mut row = TaggedDictBuilder::new(tag);
let mut row = TaggedDictBuilder::new(&tag);
for (idx, entry) in row_values.iter().enumerate() {
row.insert_tagged(
fields.get(idx).unwrap(),
Value::Primitive(Primitive::String(String::from(entry))).tagged(tag),
Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag),
);
}
@ -78,7 +78,7 @@ pub fn from_tsv_string_to_value(
}
}
Ok(Tagged::from_item(Value::Table(rows), tag))
Ok(Value::Table(rows).tagged(&tag))
}
fn from_tsv(
@ -97,7 +97,7 @@ fn from_tsv(
for value in values {
let value_tag = value.tag();
latest_tag = Some(value_tag);
latest_tag = Some(value_tag.clone());
match value.item {
Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s);
@ -106,15 +106,15 @@ fn from_tsv(
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
name_tag,
&name_tag,
"value originates from here",
value_tag,
&value_tag,
)),
}
}
match from_tsv_string_to_value(concat_string, skip_headers, name_tag) {
match from_tsv_string_to_value(concat_string, skip_headers, name_tag.clone()) {
Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => {
for l in list {
@ -127,9 +127,9 @@ fn from_tsv(
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as TSV",
"input cannot be parsed as TSV",
name_tag,
&name_tag,
"value originates from here",
last_tag,
&last_tag,
))
} ,
}

View File

@ -39,7 +39,7 @@ fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
for value in values {
let value_tag = value.tag();
latest_tag = Some(value_tag);
latest_tag = Some(value_tag.clone());
match value.item {
Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s);
@ -47,9 +47,9 @@ fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
tag,
&tag,
"value originates from here",
value_tag,
&value_tag,
)),
}

View File

@ -34,7 +34,7 @@ fn from_node_to_value<'a, 'd>(n: &roxmltree::Node<'a, 'd>, tag: impl Into<Tag>)
let mut children_values = vec![];
for c in n.children() {
children_values.push(from_node_to_value(&c, tag));
children_values.push(from_node_to_value(&c, &tag));
}
let children_values: Vec<Tagged<Value>> = children_values
@ -94,7 +94,7 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
for value in values {
let value_tag = value.tag();
latest_tag = Some(value_tag);
latest_tag = Some(value_tag.clone());
match value.item {
Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s);
@ -103,15 +103,15 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
tag,
&tag,
"value originates from here",
value_tag,
&value_tag,
)),
}
}
match from_xml_string_to_value(concat_string, tag) {
match from_xml_string_to_value(concat_string, tag.clone()) {
Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => {
for l in list {
@ -124,9 +124,9 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as XML",
"input cannot be parsed as XML",
tag,
&tag,
"value originates from here",
last_tag,
&last_tag,
))
} ,
}

View File

@ -64,17 +64,17 @@ fn convert_yaml_value_to_nu_value(v: &serde_yaml::Value, tag: impl Into<Tag>) ->
serde_yaml::Value::String(s) => Value::string(s).tagged(tag),
serde_yaml::Value::Sequence(a) => Value::Table(
a.iter()
.map(|x| convert_yaml_value_to_nu_value(x, tag))
.map(|x| convert_yaml_value_to_nu_value(x, &tag))
.collect(),
)
.tagged(tag),
serde_yaml::Value::Mapping(t) => {
let mut collected = TaggedDictBuilder::new(tag);
let mut collected = TaggedDictBuilder::new(&tag);
for (k, v) in t.iter() {
match k {
serde_yaml::Value::String(k) => {
collected.insert_tagged(k.clone(), convert_yaml_value_to_nu_value(v, tag));
collected.insert_tagged(k.clone(), convert_yaml_value_to_nu_value(v, &tag));
}
_ => unimplemented!("Unknown key type"),
}
@ -108,7 +108,7 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
for value in values {
let value_tag = value.tag();
latest_tag = Some(value_tag);
latest_tag = Some(value_tag.clone());
match value.item {
Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s);
@ -117,15 +117,15 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
tag,
&tag,
"value originates from here",
value_tag,
&value_tag,
)),
}
}
match from_yaml_string_to_value(concat_string, tag) {
match from_yaml_string_to_value(concat_string, tag.clone()) {
Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => {
for l in list {
@ -138,9 +138,9 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as YAML",
"input cannot be parsed as YAML",
tag,
&tag,
"value originates from here",
last_tag,
&last_tag,
))
} ,
}

View File

@ -1,14 +1,16 @@
use crate::commands::WholeStreamCommand;
use crate::data::meta::tag_for_tagged_list;
use crate::data::Value;
use crate::errors::ShellError;
use crate::prelude::*;
use log::trace;
pub struct Get;
#[derive(Deserialize)]
pub struct GetArgs {
member: Tagged<String>,
rest: Vec<Tagged<String>>,
member: ColumnPath,
rest: Vec<ColumnPath>,
}
impl WholeStreamCommand for Get {
@ -18,8 +20,8 @@ impl WholeStreamCommand for Get {
fn signature(&self) -> Signature {
Signature::build("get")
.required("member", SyntaxShape::Member)
.rest(SyntaxShape::Member)
.required("member", SyntaxShape::ColumnPath)
.rest(SyntaxShape::ColumnPath)
}
fn usage(&self) -> &str {
@ -35,38 +37,41 @@ impl WholeStreamCommand for Get {
}
}
fn get_member(path: &Tagged<String>, obj: &Tagged<Value>) -> Result<Tagged<Value>, ShellError> {
pub type ColumnPath = Vec<Tagged<String>>;
pub fn get_column_path(
path: &ColumnPath,
obj: &Tagged<Value>,
) -> Result<Tagged<Value>, ShellError> {
let mut current = Some(obj);
for p in path.split(".") {
for p in path.iter() {
if let Some(obj) = current {
current = match obj.get_data_by_key(p) {
current = match obj.get_data_by_key(&p) {
Some(v) => Some(v),
None =>
// Before we give up, see if they gave us a path that matches a field name by itself
{
match obj.get_data_by_key(&path.item) {
Some(v) => return Ok(v.clone()),
None => {
let possibilities = obj.data_descriptors();
let possibilities = obj.data_descriptors();
let mut possible_matches: Vec<_> = possibilities
.iter()
.map(|x| {
(natural::distance::levenshtein_distance(x, &path.item), x)
})
.collect();
let mut possible_matches: Vec<_> = possibilities
.iter()
.map(|x| (natural::distance::levenshtein_distance(x, &p), x))
.collect();
possible_matches.sort();
possible_matches.sort();
if possible_matches.len() > 0 {
return Err(ShellError::labeled_error(
"Unknown column",
format!("did you mean '{}'?", possible_matches[0].1),
path.tag(),
));
}
None
}
if possible_matches.len() > 0 {
return Err(ShellError::labeled_error(
"Unknown column",
format!("did you mean '{}'?", possible_matches[0].1),
tag_for_tagged_list(path.iter().map(|p| p.tag())),
));
} else {
return Err(ShellError::labeled_error(
"Unknown column",
"row does not contain this column",
tag_for_tagged_list(path.iter().map(|p| p.tag())),
));
}
}
}
@ -85,7 +90,7 @@ fn get_member(path: &Tagged<String>, obj: &Tagged<Value>) -> Result<Tagged<Value
item: Value::Primitive(Primitive::Path(_)),
..
} => Ok(obj.clone()),
_ => Ok(Value::nothing().tagged(obj.tag)),
_ => Ok(Value::nothing().tagged(&obj.tag)),
},
}
}
@ -97,6 +102,8 @@ pub fn get(
}: GetArgs,
RunnableContext { input, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
trace!("get {:?} {:?}", member, fields);
let stream = input
.values
.map(move |item| {
@ -107,10 +114,10 @@ pub fn get(
let fields = vec![&member, &fields]
.into_iter()
.flatten()
.collect::<Vec<&Tagged<String>>>();
.collect::<Vec<&ColumnPath>>();
for field in &fields {
match get_member(field, &item) {
for column_path in &fields {
match get_column_path(column_path, &item) {
Ok(Tagged {
item: Value::Table(l),
..

View File

@ -26,7 +26,7 @@ impl PerItemCommand for Help {
_raw_args: &RawCommandArgs,
_input: Tagged<Value>,
) -> Result<OutputStream, ShellError> {
let tag = call_info.name_tag;
let tag = &call_info.name_tag;
match call_info.args.nth(0) {
Some(Tagged {

View File

@ -58,7 +58,7 @@ fn lines(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
result.push_back(Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
tag,
&tag,
"value originates from here",
v.tag(),
)));

View File

@ -34,5 +34,5 @@ impl WholeStreamCommand for LS {
}
fn ls(LsArgs { path }: LsArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
context.shell_manager.ls(path, context.name)
context.shell_manager.ls(path, &context)
}

View File

@ -7,7 +7,6 @@ use crate::parser::hir::SyntaxShape;
use crate::parser::registry::Signature;
use crate::prelude::*;
use std::path::{Path, PathBuf};
use uuid::Uuid;
pub struct Open;
impl PerItemCommand for Open {
@ -45,16 +44,18 @@ fn run(
let cwd = PathBuf::from(shell_manager.path());
let full_path = PathBuf::from(cwd);
let path = match call_info
.args
.nth(0)
.ok_or_else(|| ShellError::string(&format!("No file or directory specified")))?
{
let path = match call_info.args.nth(0).ok_or_else(|| {
ShellError::labeled_error(
"No file or directory specified",
"for command",
&call_info.name_tag,
)
})? {
file => file,
};
let path_buf = path.as_path()?;
let path_str = path_buf.display().to_string();
let path_span = path.span();
let path_span = path.tag.span;
let has_raw = call_info.args.has("raw");
let registry = registry.clone();
let raw_args = raw_args.clone();
@ -67,7 +68,7 @@ fn run(
yield Err(e);
return;
}
let (file_extension, contents, contents_tag, anchor_location) = result.unwrap();
let (file_extension, contents, contents_tag) = result.unwrap();
let file_extension = if has_raw {
None
@ -77,21 +78,14 @@ fn run(
file_extension.or(path_str.split('.').last().map(String::from))
};
if contents_tag.anchor != uuid::Uuid::nil() {
// If we have loaded something, track its source
yield ReturnSuccess::action(CommandAction::AddAnchorLocation(
contents_tag.anchor,
anchor_location,
));
}
let tagged_contents = contents.tagged(contents_tag);
let tagged_contents = contents.tagged(&contents_tag);
if let Some(extension) = file_extension {
let command_name = format!("from-{}", extension);
if let Some(converter) = registry.get_command(&command_name) {
let new_args = RawCommandArgs {
host: raw_args.host,
ctrl_c: raw_args.ctrl_c,
shell_manager: raw_args.shell_manager,
call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call {
@ -100,7 +94,6 @@ fn run(
named: None
},
source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag,
}
};
@ -114,7 +107,7 @@ fn run(
}
}
Ok(ReturnSuccess::Value(Tagged { item, .. })) => {
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag }));
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() }));
}
x => yield x,
}
@ -134,7 +127,7 @@ pub async fn fetch(
cwd: &PathBuf,
location: &str,
span: Span,
) -> Result<(Option<String>, Value, Tag, AnchorLocation), ShellError> {
) -> Result<(Option<String>, Value, Tag), ShellError> {
let mut cwd = cwd.clone();
cwd.push(Path::new(location));
@ -147,9 +140,8 @@ pub async fn fetch(
Value::string(s),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::File(cwd.to_string_lossy().to_string())),
},
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)),
Err(_) => {
//Non utf8 data.
@ -166,18 +158,20 @@ pub async fn fetch(
Value::string(s),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
},
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)),
Err(_) => Ok((
None,
Value::binary(bytes),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
},
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)),
}
} else {
@ -186,9 +180,10 @@ pub async fn fetch(
Value::binary(bytes),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
},
AnchorLocation::File(cwd.to_string_lossy().to_string()),
))
}
}
@ -204,18 +199,20 @@ pub async fn fetch(
Value::string(s),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
},
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)),
Err(_) => Ok((
None,
Value::binary(bytes),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
},
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)),
}
} else {
@ -224,9 +221,10 @@ pub async fn fetch(
Value::binary(bytes),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
},
AnchorLocation::File(cwd.to_string_lossy().to_string()),
))
}
}
@ -235,9 +233,10 @@ pub async fn fetch(
Value::binary(bytes),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
},
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)),
}
}

View File

@ -104,7 +104,7 @@ pub fn pivot(args: PivotArgs, context: RunnableContext) -> Result<OutputStream,
for desc in descs {
let mut column_num: usize = 0;
let mut dict = TaggedDictBuilder::new(context.name);
let mut dict = TaggedDictBuilder::new(&context.name);
if !args.ignore_titles && !args.header_row {
dict.insert(headers[column_num].clone(), Value::string(desc.clone()));

View File

@ -128,7 +128,7 @@ pub fn filter_plugin(
},
Err(e) => {
let mut result = VecDeque::new();
result.push_back(Err(ShellError::string(format!(
result.push_back(Err(ShellError::untagged_runtime_error(format!(
"Error while processing begin_filter response: {:?} {}",
e, input
))));
@ -138,7 +138,7 @@ pub fn filter_plugin(
}
Err(e) => {
let mut result = VecDeque::new();
result.push_back(Err(ShellError::string(format!(
result.push_back(Err(ShellError::untagged_runtime_error(format!(
"Error while reading begin_filter response: {:?}",
e
))));
@ -189,7 +189,7 @@ pub fn filter_plugin(
},
Err(e) => {
let mut result = VecDeque::new();
result.push_back(Err(ShellError::string(format!(
result.push_back(Err(ShellError::untagged_runtime_error(format!(
"Error while processing end_filter response: {:?} {}",
e, input
))));
@ -199,7 +199,7 @@ pub fn filter_plugin(
}
Err(e) => {
let mut result = VecDeque::new();
result.push_back(Err(ShellError::string(format!(
result.push_back(Err(ShellError::untagged_runtime_error(format!(
"Error while reading end_filter: {:?}",
e
))));
@ -236,7 +236,7 @@ pub fn filter_plugin(
},
Err(e) => {
let mut result = VecDeque::new();
result.push_back(Err(ShellError::string(format!(
result.push_back(Err(ShellError::untagged_runtime_error(format!(
"Error while processing filter response: {:?} {}",
e, input
))));
@ -246,7 +246,7 @@ pub fn filter_plugin(
}
Err(e) => {
let mut result = VecDeque::new();
result.push_back(Err(ShellError::string(format!(
result.push_back(Err(ShellError::untagged_runtime_error(format!(
"Error while reading filter response: {:?}",
e
))));

View File

@ -54,21 +54,20 @@ fn run(
registry: &CommandRegistry,
raw_args: &RawCommandArgs,
) -> Result<OutputStream, ShellError> {
let name_tag = call_info.name_tag.clone();
let call_info = call_info.clone();
let path = match call_info
.args
.nth(0)
.ok_or_else(|| ShellError::string(&format!("No url specified")))?
{
file => file.clone(),
};
let body = match call_info
.args
.nth(1)
.ok_or_else(|| ShellError::string(&format!("No body specified")))?
{
file => file.clone(),
};
let path =
match call_info.args.nth(0).ok_or_else(|| {
ShellError::labeled_error("No url specified", "for command", &name_tag)
})? {
file => file.clone(),
};
let body =
match call_info.args.nth(1).ok_or_else(|| {
ShellError::labeled_error("No body specified", "for command", &name_tag)
})? {
file => file.clone(),
};
let path_str = path.as_string()?;
let path_span = path.tag();
let has_raw = call_info.args.has("raw");
@ -83,7 +82,7 @@ fn run(
let headers = get_headers(&call_info)?;
let stream = async_stream! {
let (file_extension, contents, contents_tag, anchor_location) =
let (file_extension, contents, contents_tag) =
post(&path_str, &body, user, password, &headers, path_span, &registry, &raw_args).await.unwrap();
let file_extension = if has_raw {
@ -94,21 +93,14 @@ fn run(
file_extension.or(path_str.split('.').last().map(String::from))
};
if contents_tag.anchor != uuid::Uuid::nil() {
// If we have loaded something, track its source
yield ReturnSuccess::action(CommandAction::AddAnchorLocation(
contents_tag.anchor,
anchor_location,
));
}
let tagged_contents = contents.tagged(contents_tag);
let tagged_contents = contents.tagged(&contents_tag);
if let Some(extension) = file_extension {
let command_name = format!("from-{}", extension);
if let Some(converter) = registry.get_command(&command_name) {
let new_args = RawCommandArgs {
host: raw_args.host,
ctrl_c: raw_args.ctrl_c,
shell_manager: raw_args.shell_manager,
call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call {
@ -117,7 +109,6 @@ fn run(
named: None
},
source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag,
}
};
@ -131,7 +122,7 @@ fn run(
}
}
Ok(ReturnSuccess::Value(Tagged { item, .. })) => {
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag }));
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() }));
}
x => yield x,
}
@ -211,7 +202,7 @@ pub async fn post(
tag: Tag,
registry: &CommandRegistry,
raw_args: &RawCommandArgs,
) -> Result<(Option<String>, Value, Tag, AnchorLocation), ShellError> {
) -> Result<(Option<String>, Value, Tag), ShellError> {
let registry = registry.clone();
let raw_args = raw_args.clone();
if location.starts_with("http:") || location.starts_with("https:") {
@ -252,6 +243,7 @@ pub async fn post(
if let Some(converter) = registry.get_command("to-json") {
let new_args = RawCommandArgs {
host: raw_args.host,
ctrl_c: raw_args.ctrl_c,
shell_manager: raw_args.shell_manager,
call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call {
@ -260,7 +252,6 @@ pub async fn post(
named: None,
},
source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag,
},
};
@ -284,7 +275,7 @@ pub async fn post(
return Err(ShellError::labeled_error(
"Save could not successfully save",
"unexpected data during save",
*tag,
tag,
));
}
}
@ -300,7 +291,7 @@ pub async fn post(
return Err(ShellError::labeled_error(
"Could not automatically convert table",
"needs manual conversion",
*tag,
tag,
));
}
}
@ -316,11 +307,13 @@ pub async fn post(
ShellError::labeled_error(
"Could not load text from remote url",
"could not load",
tag,
&tag,
)
})?),
tag,
AnchorLocation::Url(location.to_string()),
Tag {
anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)),
(mime::APPLICATION, mime::JSON) => Ok((
Some("json".to_string()),
@ -328,25 +321,29 @@ pub async fn post(
ShellError::labeled_error(
"Could not load text from remote url",
"could not load",
tag,
&tag,
)
})?),
tag,
AnchorLocation::Url(location.to_string()),
Tag {
anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)),
(mime::APPLICATION, mime::OCTET_STREAM) => {
let buf: Vec<u8> = r.body_bytes().await.map_err(|_| {
ShellError::labeled_error(
"Could not load binary file",
"could not load",
tag,
&tag,
)
})?;
Ok((
None,
Value::binary(buf),
tag,
AnchorLocation::Url(location.to_string()),
Tag {
anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
))
}
(mime::IMAGE, image_ty) => {
@ -354,14 +351,16 @@ pub async fn post(
ShellError::labeled_error(
"Could not load image file",
"could not load",
tag,
&tag,
)
})?;
Ok((
Some(image_ty.to_string()),
Value::binary(buf),
tag,
AnchorLocation::Url(location.to_string()),
Tag {
anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
))
}
(mime::TEXT, mime::HTML) => Ok((
@ -370,11 +369,13 @@ pub async fn post(
ShellError::labeled_error(
"Could not load text from remote url",
"could not load",
tag,
&tag,
)
})?),
tag,
AnchorLocation::Url(location.to_string()),
Tag {
anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)),
(mime::TEXT, mime::PLAIN) => {
let path_extension = url::Url::parse(location)
@ -394,11 +395,13 @@ pub async fn post(
ShellError::labeled_error(
"Could not load text from remote url",
"could not load",
tag,
&tag,
)
})?),
tag,
AnchorLocation::Url(location.to_string()),
Tag {
anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
))
}
(ty, sub_ty) => Ok((
@ -407,16 +410,20 @@ pub async fn post(
"Not yet supported MIME type: {} {}",
ty, sub_ty
)),
tag,
AnchorLocation::Url(location.to_string()),
Tag {
anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)),
}
}
None => Ok((
None,
Value::string(format!("No content type found")),
tag,
AnchorLocation::Url(location.to_string()),
Tag {
anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)),
},
Err(_) => {

View File

@ -119,49 +119,48 @@ fn save(
input,
name,
shell_manager,
source_map,
host,
ctrl_c,
commands: registry,
..
}: RunnableContext,
raw_args: RawCommandArgs,
) -> Result<OutputStream, ShellError> {
let mut full_path = PathBuf::from(shell_manager.path());
let name_tag = name;
let name_tag = name.clone();
let source_map = source_map.clone();
let stream = async_stream! {
let input: Vec<Tagged<Value>> = input.values.collect().await;
if path.is_none() {
// If there is no filename, check the metadata for the anchor filename
if input.len() > 0 {
let anchor = input[0].anchor();
match source_map.get(&anchor) {
match anchor {
Some(path) => match path {
AnchorLocation::File(file) => {
full_path.push(Path::new(file));
full_path.push(Path::new(&file));
}
_ => {
yield Err(ShellError::labeled_error(
"Save requires a filepath",
"Save requires a filepath (1)",
"needs path",
name_tag,
name_tag.clone(),
));
}
},
None => {
yield Err(ShellError::labeled_error(
"Save requires a filepath",
"Save requires a filepath (2)",
"needs path",
name_tag,
name_tag.clone(),
));
}
}
} else {
yield Err(ShellError::labeled_error(
"Save requires a filepath",
"Save requires a filepath (3)",
"needs path",
name_tag,
name_tag.clone(),
));
}
} else {
@ -179,6 +178,7 @@ fn save(
if let Some(converter) = registry.get_command(&command_name) {
let new_args = RawCommandArgs {
host,
ctrl_c,
shell_manager,
call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call {
@ -187,7 +187,6 @@ fn save(
named: None
},
source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag,
}
};
@ -212,9 +211,9 @@ fn save(
match content {
Ok(save_data) => match std::fs::write(full_path, save_data) {
Ok(o) => o,
Err(e) => yield Err(ShellError::string(e.to_string())),
Err(e) => yield Err(ShellError::labeled_error(e.to_string(), "for command", name)),
},
Err(e) => yield Err(ShellError::string(e.to_string())),
Err(e) => yield Err(ShellError::labeled_error(e.to_string(), "for command", name)),
}
};

View File

@ -2,6 +2,7 @@ use crate::commands::WholeStreamCommand;
use crate::data::TaggedDictBuilder;
use crate::errors::ShellError;
use crate::prelude::*;
use std::sync::atomic::Ordering;
pub struct Shells;
@ -32,14 +33,14 @@ fn shells(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream
let tag = args.call_info.name_tag;
for (index, shell) in args.shell_manager.shells.lock().unwrap().iter().enumerate() {
let mut dict = TaggedDictBuilder::new(tag);
let mut dict = TaggedDictBuilder::new(&tag);
if index == args.shell_manager.current_shell {
if index == (*args.shell_manager.current_shell).load(Ordering::SeqCst) {
dict.insert(" ", "X".to_string());
} else {
dict.insert(" ", " ".to_string());
}
dict.insert("name", shell.name(&args.call_info.source_map));
dict.insert("name", shell.name());
dict.insert("path", shell.path());
shells_out.push_back(dict.into_tagged_value());

View File

@ -37,7 +37,7 @@ fn size(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream,
_ => Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
tag,
&tag,
"value originates from here",
v.tag(),
)),

View File

@ -1,6 +1,7 @@
use crate::commands::WholeStreamCommand;
use crate::errors::ShellError;
use crate::prelude::*;
use log::trace;
pub struct SkipWhile;
@ -38,7 +39,9 @@ pub fn skip_while(
RunnableContext { input, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let objects = input.values.skip_while(move |item| {
trace!("ITEM = {:?}", item);
let result = condition.invoke(&item);
trace!("RESULT = {:?}", result);
let return_value = match result {
Ok(ref v) if v.is_true() => true,

View File

@ -94,7 +94,7 @@ fn split_column(
_ => Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
name,
&name,
"value originates from here",
v.tag(),
)),

View File

@ -60,7 +60,7 @@ fn split_row(
result.push_back(Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
name,
&name,
"value originates from here",
v.tag(),
)));

View File

@ -5,16 +5,13 @@ use crate::prelude::*;
pub struct Table;
#[derive(Deserialize)]
pub struct TableArgs {}
impl WholeStreamCommand for Table {
fn name(&self) -> &str {
"table"
}
fn signature(&self) -> Signature {
Signature::build("table")
Signature::build("table").named("start_number", SyntaxShape::Number)
}
fn usage(&self) -> &str {
@ -26,16 +23,29 @@ impl WholeStreamCommand for Table {
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, table)?.run()
table(args, registry)
}
}
pub fn table(_args: TableArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
fn table(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?;
let stream = async_stream! {
let input: Vec<Tagged<Value>> = context.input.into_vec().await;
let host = args.host.clone();
let start_number = match args.get("start_number") {
Some(Tagged { item: Value::Primitive(Primitive::Int(i)), .. }) => {
i.to_usize().unwrap()
}
_ => {
0
}
};
let input: Vec<Tagged<Value>> = args.input.into_vec().await;
if input.len() > 0 {
let mut host = context.host.lock().unwrap();
let view = TableView::from_list(&input);
let mut host = host.lock().unwrap();
let view = TableView::from_list(&input, start_number);
if let Some(view) = view {
handle_unexpected(&mut *host, |host| crate::format::print_view(&view, host));
}

View File

@ -28,7 +28,6 @@ impl WholeStreamCommand for Tags {
}
fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let source_map = args.call_info.source_map.clone();
Ok(args
.input
.values
@ -38,11 +37,11 @@ fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream,
let anchor = v.anchor();
let span = v.tag().span;
let mut dict = TaggedDictBuilder::new(v.tag());
dict.insert("start", Value::int(span.start as i64));
dict.insert("end", Value::int(span.end as i64));
dict.insert("start", Value::int(span.start() as i64));
dict.insert("end", Value::int(span.end() as i64));
tags.insert_tagged("span", dict.into_tagged_value());
match source_map.get(&anchor) {
match anchor {
Some(AnchorLocation::File(source)) => {
tags.insert("anchor", Value::string(source));
}

View File

@ -46,7 +46,7 @@ pub fn value_to_bson_value(v: &Tagged<Value>) -> Result<Bson, ShellError> {
Value::Primitive(Primitive::BeginningOfStream) => Bson::Null,
Value::Primitive(Primitive::Decimal(d)) => Bson::FloatingPoint(d.to_f64().unwrap()),
Value::Primitive(Primitive::Int(i)) => {
Bson::I64(i.tagged(v.tag).coerce_into("converting to BSON")?)
Bson::I64(i.tagged(&v.tag).coerce_into("converting to BSON")?)
}
Value::Primitive(Primitive::Nothing) => Bson::Null,
Value::Primitive(Primitive::String(s)) => Bson::String(s.clone()),
@ -58,6 +58,7 @@ pub fn value_to_bson_value(v: &Tagged<Value>) -> Result<Bson, ShellError> {
.collect::<Result<_, _>>()?,
),
Value::Block(_) => Bson::Null,
Value::Error(e) => return Err(e.clone()),
Value::Primitive(Primitive::Binary(b)) => Bson::Binary(BinarySubtype::Generic, b.clone()),
Value::Row(o) => object_value_to_bson(o)?,
})
@ -170,7 +171,7 @@ fn get_binary_subtype<'a>(tagged_value: &'a Tagged<Value>) -> Result<BinarySubty
_ => unreachable!(),
}),
Value::Primitive(Primitive::Int(i)) => Ok(BinarySubtype::UserDefined(
i.tagged(tagged_value.tag)
i.tagged(&tagged_value.tag)
.coerce_into("converting to BSON binary subtype")?,
)),
_ => Err(ShellError::type_error(
@ -207,12 +208,12 @@ fn bson_value_to_bytes(bson: Bson, tag: Tag) -> Result<Vec<u8>, ShellError> {
Bson::Array(a) => {
for v in a.into_iter() {
match v {
Bson::Document(d) => shell_encode_document(&mut out, d, tag)?,
Bson::Document(d) => shell_encode_document(&mut out, d, tag.clone())?,
_ => {
return Err(ShellError::labeled_error(
format!("All top level values must be Documents, got {:?}", v),
"requires BSON-compatible document",
tag,
&tag,
))
}
}
@ -237,7 +238,7 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
let input: Vec<Tagged<Value>> = args.input.values.collect().await;
let to_process_input = if input.len() > 1 {
let tag = input[0].tag;
let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 {
input
@ -248,14 +249,14 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
for value in to_process_input {
match value_to_bson_value(&value) {
Ok(bson_value) => {
match bson_value_to_bytes(bson_value, name_tag) {
match bson_value_to_bytes(bson_value, name_tag.clone()) {
Ok(x) => yield ReturnSuccess::value(
Value::binary(x).tagged(name_tag),
Value::binary(x).tagged(&name_tag),
),
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with BSON-compatible structure.tag() from pipeline",
"requires BSON-compatible input",
name_tag,
&name_tag,
"originates from here".to_string(),
value.tag(),
)),
@ -264,7 +265,7 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
_ => yield Err(ShellError::labeled_error(
"Expected a table with BSON-compatible structure from pipeline",
"requires BSON-compatible input",
name_tag))
&name_tag))
}
}
};

View File

@ -32,8 +32,8 @@ impl WholeStreamCommand for ToCSV {
}
}
pub fn value_to_csv_value(v: &Value) -> Value {
match v {
pub fn value_to_csv_value(v: &Tagged<Value>) -> Tagged<Value> {
match &v.item {
Value::Primitive(Primitive::String(s)) => Value::Primitive(Primitive::String(s.clone())),
Value::Primitive(Primitive::Nothing) => Value::Primitive(Primitive::Nothing),
Value::Primitive(Primitive::Boolean(b)) => Value::Primitive(Primitive::Boolean(b.clone())),
@ -47,10 +47,11 @@ pub fn value_to_csv_value(v: &Value) -> Value {
Value::Block(_) => Value::Primitive(Primitive::Nothing),
_ => Value::Primitive(Primitive::Nothing),
}
.tagged(v.tag.clone())
}
fn to_string_helper(v: &Value) -> Result<String, ShellError> {
match v {
fn to_string_helper(v: &Tagged<Value>) -> Result<String, ShellError> {
match &v.item {
Value::Primitive(Primitive::Date(d)) => Ok(d.to_string()),
Value::Primitive(Primitive::Bytes(b)) => Ok(format!("{}", b)),
Value::Primitive(Primitive::Boolean(_)) => Ok(v.as_string()?),
@ -60,7 +61,13 @@ fn to_string_helper(v: &Value) -> Result<String, ShellError> {
Value::Table(_) => return Ok(String::from("[Table]")),
Value::Row(_) => return Ok(String::from("[Row]")),
Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()),
_ => return Err(ShellError::string("Unexpected value")),
_ => {
return Err(ShellError::labeled_error(
"Unexpected value",
"",
v.tag.clone(),
))
}
}
}
@ -76,7 +83,9 @@ fn merge_descriptors(values: &[Tagged<Value>]) -> Vec<String> {
ret
}
pub fn to_string(v: &Value) -> Result<String, ShellError> {
pub fn to_string(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
let v = &tagged_value.item;
match v {
Value::Row(o) => {
let mut wtr = WriterBuilder::new().from_writer(vec![]);
@ -92,11 +101,20 @@ pub fn to_string(v: &Value) -> Result<String, ShellError> {
wtr.write_record(fields).expect("can not write.");
wtr.write_record(values).expect("can not write.");
return Ok(String::from_utf8(
wtr.into_inner()
.map_err(|_| ShellError::string("Could not convert record"))?,
)
.map_err(|_| ShellError::string("Could not convert record"))?);
return Ok(String::from_utf8(wtr.into_inner().map_err(|_| {
ShellError::labeled_error(
"Could not convert record",
"original value",
&tagged_value.tag,
)
})?)
.map_err(|_| {
ShellError::labeled_error(
"Could not convert record",
"original value",
&tagged_value.tag,
)
})?);
}
Value::Table(list) => {
let mut wtr = WriterBuilder::new().from_writer(vec![]);
@ -120,13 +138,22 @@ pub fn to_string(v: &Value) -> Result<String, ShellError> {
wtr.write_record(&row).expect("can not write");
}
return Ok(String::from_utf8(
wtr.into_inner()
.map_err(|_| ShellError::string("Could not convert record"))?,
)
.map_err(|_| ShellError::string("Could not convert record"))?);
return Ok(String::from_utf8(wtr.into_inner().map_err(|_| {
ShellError::labeled_error(
"Could not convert record",
"original value",
&tagged_value.tag,
)
})?)
.map_err(|_| {
ShellError::labeled_error(
"Could not convert record",
"original value",
&tagged_value.tag,
)
})?);
}
_ => return to_string_helper(&v),
_ => return to_string_helper(tagged_value),
}
}
@ -139,7 +166,7 @@ fn to_csv(
let input: Vec<Tagged<Value>> = input.values.collect().await;
let to_process_input = if input.len() > 1 {
let tag = input[0].tag;
let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 {
input
@ -148,20 +175,20 @@ fn to_csv(
};
for value in to_process_input {
match to_string(&value_to_csv_value(&value.item)) {
match to_string(&value_to_csv_value(&value)) {
Ok(x) => {
let converted = if headerless {
x.lines().skip(1).collect()
} else {
x
};
yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(name_tag))
yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(&name_tag))
}
_ => {
yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with CSV-compatible structure.tag() from pipeline",
"requires CSV-compatible input",
name_tag,
&name_tag,
"originates from here".to_string(),
value.tag(),
))

View File

@ -42,7 +42,7 @@ pub fn value_to_json_value(v: &Tagged<Value>) -> Result<serde_json::Value, Shell
.unwrap(),
),
Value::Primitive(Primitive::Int(i)) => serde_json::Value::Number(serde_json::Number::from(
CoerceInto::<i64>::coerce_into(i.tagged(v.tag), "converting to JSON number")?,
CoerceInto::<i64>::coerce_into(i.tagged(&v.tag), "converting to JSON number")?,
)),
Value::Primitive(Primitive::Nothing) => serde_json::Value::Null,
Value::Primitive(Primitive::Pattern(s)) => serde_json::Value::String(s.clone()),
@ -50,6 +50,7 @@ pub fn value_to_json_value(v: &Tagged<Value>) -> Result<serde_json::Value, Shell
Value::Primitive(Primitive::Path(s)) => serde_json::Value::String(s.display().to_string()),
Value::Table(l) => serde_json::Value::Array(json_list(l)?),
Value::Error(e) => return Err(e.clone()),
Value::Block(_) => serde_json::Value::Null,
Value::Primitive(Primitive::Binary(b)) => serde_json::Value::Array(
b.iter()
@ -85,7 +86,7 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
let input: Vec<Tagged<Value>> = args.input.values.collect().await;
let to_process_input = if input.len() > 1 {
let tag = input[0].tag;
let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 {
input
@ -98,12 +99,12 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
Ok(json_value) => {
match serde_json::to_string(&json_value) {
Ok(x) => yield ReturnSuccess::value(
Value::Primitive(Primitive::String(x)).tagged(name_tag),
Value::Primitive(Primitive::String(x)).tagged(&name_tag),
),
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with JSON-compatible structure.tag() from pipeline",
"requires JSON-compatible input",
name_tag,
&name_tag,
"originates from here".to_string(),
value.tag(),
)),
@ -112,7 +113,7 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
_ => yield Err(ShellError::labeled_error(
"Expected a table with JSON-compatible structure from pipeline",
"requires JSON-compatible input",
name_tag))
&name_tag))
}
}
};

View File

@ -38,10 +38,10 @@ pub fn value_to_toml_value(v: &Tagged<Value>) -> Result<toml::Value, ShellError>
toml::Value::String("<Beginning of Stream>".to_string())
}
Value::Primitive(Primitive::Decimal(f)) => {
toml::Value::Float(f.tagged(v.tag).coerce_into("converting to TOML float")?)
toml::Value::Float(f.tagged(&v.tag).coerce_into("converting to TOML float")?)
}
Value::Primitive(Primitive::Int(i)) => {
toml::Value::Integer(i.tagged(v.tag).coerce_into("converting to TOML integer")?)
toml::Value::Integer(i.tagged(&v.tag).coerce_into("converting to TOML integer")?)
}
Value::Primitive(Primitive::Nothing) => toml::Value::String("<Nothing>".to_string()),
Value::Primitive(Primitive::Pattern(s)) => toml::Value::String(s.clone()),
@ -49,6 +49,7 @@ pub fn value_to_toml_value(v: &Tagged<Value>) -> Result<toml::Value, ShellError>
Value::Primitive(Primitive::Path(s)) => toml::Value::String(s.display().to_string()),
Value::Table(l) => toml::Value::Array(collect_values(l)?),
Value::Error(e) => return Err(e.clone()),
Value::Block(_) => toml::Value::String("<Block>".to_string()),
Value::Primitive(Primitive::Binary(b)) => {
toml::Value::Array(b.iter().map(|x| toml::Value::Integer(*x as i64)).collect())
@ -80,7 +81,7 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
let input: Vec<Tagged<Value>> = args.input.values.collect().await;
let to_process_input = if input.len() > 1 {
let tag = input[0].tag;
let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 {
input
@ -93,12 +94,12 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
Ok(toml_value) => {
match toml::to_string(&toml_value) {
Ok(x) => yield ReturnSuccess::value(
Value::Primitive(Primitive::String(x)).tagged(name_tag),
Value::Primitive(Primitive::String(x)).tagged(&name_tag),
),
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with TOML-compatible structure.tag() from pipeline",
"requires TOML-compatible input",
name_tag,
&name_tag,
"originates from here".to_string(),
value.tag(),
)),
@ -107,7 +108,7 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
_ => yield Err(ShellError::labeled_error(
"Expected a table with TOML-compatible structure from pipeline",
"requires TOML-compatible input",
name_tag))
&name_tag))
}
}
};

View File

@ -32,7 +32,9 @@ impl WholeStreamCommand for ToTSV {
}
}
pub fn value_to_tsv_value(v: &Value) -> Value {
pub fn value_to_tsv_value(tagged_value: &Tagged<Value>) -> Tagged<Value> {
let v = &tagged_value.item;
match v {
Value::Primitive(Primitive::String(s)) => Value::Primitive(Primitive::String(s.clone())),
Value::Primitive(Primitive::Nothing) => Value::Primitive(Primitive::Nothing),
@ -47,20 +49,28 @@ pub fn value_to_tsv_value(v: &Value) -> Value {
Value::Block(_) => Value::Primitive(Primitive::Nothing),
_ => Value::Primitive(Primitive::Nothing),
}
.tagged(&tagged_value.tag)
}
fn to_string_helper(v: &Value) -> Result<String, ShellError> {
fn to_string_helper(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
let v = &tagged_value.item;
match v {
Value::Primitive(Primitive::Date(d)) => Ok(d.to_string()),
Value::Primitive(Primitive::Bytes(b)) => Ok(format!("{}", b)),
Value::Primitive(Primitive::Boolean(_)) => Ok(v.as_string()?),
Value::Primitive(Primitive::Decimal(_)) => Ok(v.as_string()?),
Value::Primitive(Primitive::Int(_)) => Ok(v.as_string()?),
Value::Primitive(Primitive::Path(_)) => Ok(v.as_string()?),
Value::Primitive(Primitive::Boolean(_)) => Ok(tagged_value.as_string()?),
Value::Primitive(Primitive::Decimal(_)) => Ok(tagged_value.as_string()?),
Value::Primitive(Primitive::Int(_)) => Ok(tagged_value.as_string()?),
Value::Primitive(Primitive::Path(_)) => Ok(tagged_value.as_string()?),
Value::Table(_) => return Ok(String::from("[table]")),
Value::Row(_) => return Ok(String::from("[row]")),
Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()),
_ => return Err(ShellError::string("Unexpected value")),
_ => {
return Err(ShellError::labeled_error(
"Unexpected value",
"original value",
&tagged_value.tag,
))
}
}
}
@ -76,7 +86,9 @@ fn merge_descriptors(values: &[Tagged<Value>]) -> Vec<String> {
ret
}
pub fn to_string(v: &Value) -> Result<String, ShellError> {
pub fn to_string(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
let v = &tagged_value.item;
match v {
Value::Row(o) => {
let mut wtr = WriterBuilder::new().delimiter(b'\t').from_writer(vec![]);
@ -91,11 +103,20 @@ pub fn to_string(v: &Value) -> Result<String, ShellError> {
wtr.write_record(fields).expect("can not write.");
wtr.write_record(values).expect("can not write.");
return Ok(String::from_utf8(
wtr.into_inner()
.map_err(|_| ShellError::string("Could not convert record"))?,
)
.map_err(|_| ShellError::string("Could not convert record"))?);
return Ok(String::from_utf8(wtr.into_inner().map_err(|_| {
ShellError::labeled_error(
"Could not convert record",
"original value",
&tagged_value.tag,
)
})?)
.map_err(|_| {
ShellError::labeled_error(
"Could not convert record",
"original value",
&tagged_value.tag,
)
})?);
}
Value::Table(list) => {
let mut wtr = WriterBuilder::new().delimiter(b'\t').from_writer(vec![]);
@ -119,13 +140,22 @@ pub fn to_string(v: &Value) -> Result<String, ShellError> {
wtr.write_record(&row).expect("can not write");
}
return Ok(String::from_utf8(
wtr.into_inner()
.map_err(|_| ShellError::string("Could not convert record"))?,
)
.map_err(|_| ShellError::string("Could not convert record"))?);
return Ok(String::from_utf8(wtr.into_inner().map_err(|_| {
ShellError::labeled_error(
"Could not convert record",
"original value",
&tagged_value.tag,
)
})?)
.map_err(|_| {
ShellError::labeled_error(
"Could not convert record",
"original value",
&tagged_value.tag,
)
})?);
}
_ => return to_string_helper(&v),
_ => return to_string_helper(tagged_value),
}
}
@ -138,7 +168,7 @@ fn to_tsv(
let input: Vec<Tagged<Value>> = input.values.collect().await;
let to_process_input = if input.len() > 1 {
let tag = input[0].tag;
let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 {
input
@ -147,20 +177,20 @@ fn to_tsv(
};
for value in to_process_input {
match to_string(&value_to_tsv_value(&value.item)) {
match to_string(&value_to_tsv_value(&value)) {
Ok(x) => {
let converted = if headerless {
x.lines().skip(1).collect()
} else {
x
};
yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(name_tag))
yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(&name_tag))
}
_ => {
yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with TSV-compatible structure.tag() from pipeline",
"requires TSV-compatible input",
name_tag,
&name_tag,
"originates from here".to_string(),
value.tag(),
))

View File

@ -47,7 +47,7 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
yield Err(ShellError::labeled_error_with_secondary(
"Expected table with string values",
"requires table with strings",
tag,
&tag,
"value originates from here",
v.tag,
))
@ -57,13 +57,13 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
match serde_urlencoded::to_string(row_vec) {
Ok(s) => {
yield ReturnSuccess::value(Value::string(s).tagged(tag));
yield ReturnSuccess::value(Value::string(s).tagged(&tag));
}
_ => {
yield Err(ShellError::labeled_error(
"Failed to convert to url-encoded",
"cannot url-encode",
tag,
&tag,
))
}
}
@ -72,7 +72,7 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
yield Err(ShellError::labeled_error_with_secondary(
"Expected a table from pipeline",
"requires table input",
tag,
&tag,
"value originates from here",
value_tag,
))

View File

@ -39,7 +39,7 @@ pub fn value_to_yaml_value(v: &Tagged<Value>) -> Result<serde_yaml::Value, Shell
serde_yaml::Value::Number(serde_yaml::Number::from(f.to_f64().unwrap()))
}
Value::Primitive(Primitive::Int(i)) => serde_yaml::Value::Number(serde_yaml::Number::from(
CoerceInto::<i64>::coerce_into(i.tagged(v.tag), "converting to YAML number")?,
CoerceInto::<i64>::coerce_into(i.tagged(&v.tag), "converting to YAML number")?,
)),
Value::Primitive(Primitive::Nothing) => serde_yaml::Value::Null,
Value::Primitive(Primitive::Pattern(s)) => serde_yaml::Value::String(s.clone()),
@ -55,6 +55,7 @@ pub fn value_to_yaml_value(v: &Tagged<Value>) -> Result<serde_yaml::Value, Shell
serde_yaml::Value::Sequence(out)
}
Value::Error(e) => return Err(e.clone()),
Value::Block(_) => serde_yaml::Value::Null,
Value::Primitive(Primitive::Binary(b)) => serde_yaml::Value::Sequence(
b.iter()
@ -81,7 +82,7 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
let input: Vec<Tagged<Value>> = args.input.values.collect().await;
let to_process_input = if input.len() > 1 {
let tag = input[0].tag;
let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 {
input
@ -94,12 +95,12 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
Ok(yaml_value) => {
match serde_yaml::to_string(&yaml_value) {
Ok(x) => yield ReturnSuccess::value(
Value::Primitive(Primitive::String(x)).tagged(name_tag),
Value::Primitive(Primitive::String(x)).tagged(&name_tag),
),
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with YAML-compatible structure.tag() from pipeline",
"requires YAML-compatible input",
name_tag,
&name_tag,
"originates from here".to_string(),
value.tag(),
)),
@ -108,7 +109,7 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
_ => yield Err(ShellError::labeled_error(
"Expected a table with YAML-compatible structure from pipeline",
"requires YAML-compatible input",
name_tag))
&name_tag))
}
}
};

View File

@ -31,14 +31,14 @@ impl WholeStreamCommand for Version {
pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?;
let tag = args.call_info.name_tag;
let tag = args.call_info.name_tag.clone();
let mut indexmap = IndexMap::new();
indexmap.insert(
"version".to_string(),
Value::string(clap::crate_version!()).tagged(tag),
Value::string(clap::crate_version!()).tagged(&tag),
);
let value = Value::Row(Dictionary::from(indexmap)).tagged(tag);
let value = Value::Row(Dictionary::from(indexmap)).tagged(&tag);
Ok(OutputStream::one(value))
}

View File

@ -49,7 +49,7 @@ impl PerItemCommand for Where {
return Err(ShellError::labeled_error(
"Expected a condition",
"where needs a condition",
*tag,
tag,
))
}
};

View File

@ -33,7 +33,7 @@ pub fn which(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
let args = args.evaluate_once(registry)?;
let mut which_out = VecDeque::new();
let tag = args.call_info.name_tag;
let tag = args.call_info.name_tag.clone();
if let Some(v) = &args.call_info.args.positional {
if v.len() > 0 {
@ -52,7 +52,7 @@ pub fn which(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
return Err(ShellError::labeled_error(
"Expected a filename to find",
"needs a filename",
*tag,
tag,
));
}
}

View File

@ -1,39 +1,20 @@
use crate::commands::{Command, UnevaluatedCallInfo};
use crate::parser::hir;
use crate::parser::{hir, hir::syntax_shape::ExpandContext};
use crate::prelude::*;
use derive_new::new;
use indexmap::IndexMap;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::error::Error;
use std::sync::Arc;
use uuid::Uuid;
use std::sync::atomic::AtomicBool;
use std::sync::{Arc, Mutex};
#[derive(Clone, Debug, Serialize, Deserialize)]
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum AnchorLocation {
Url(String),
File(String),
Source(Text),
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct SourceMap(HashMap<Uuid, AnchorLocation>);
impl SourceMap {
pub fn insert(&mut self, uuid: Uuid, anchor_location: AnchorLocation) {
self.0.insert(uuid, anchor_location);
}
pub fn get(&self, uuid: &Uuid) -> Option<&AnchorLocation> {
self.0.get(uuid)
}
pub fn new() -> SourceMap {
SourceMap(HashMap::new())
}
}
#[derive(Clone, new)]
pub struct CommandRegistry {
#[new(value = "Arc::new(Mutex::new(IndexMap::default()))")]
@ -53,13 +34,17 @@ impl CommandRegistry {
registry.get(name).map(|c| c.clone())
}
pub(crate) fn expect_command(&self, name: &str) -> Arc<Command> {
self.get_command(name).unwrap()
}
pub(crate) fn has(&self, name: &str) -> bool {
let registry = self.registry.lock().unwrap();
registry.contains_key(name)
}
fn insert(&mut self, name: impl Into<String>, command: Arc<Command>) {
pub(crate) fn insert(&mut self, name: impl Into<String>, command: Arc<Command>) {
let mut registry = self.registry.lock().unwrap();
registry.insert(name.into(), command);
}
@ -73,8 +58,8 @@ impl CommandRegistry {
#[derive(Clone)]
pub struct Context {
registry: CommandRegistry,
pub(crate) source_map: SourceMap,
host: Arc<Mutex<dyn Host + Send>>,
pub ctrl_c: Arc<AtomicBool>,
pub(crate) shell_manager: ShellManager,
}
@ -83,12 +68,20 @@ impl Context {
&self.registry
}
pub(crate) fn expand_context<'context>(
&'context self,
source: &'context Text,
span: Span,
) -> ExpandContext<'context> {
ExpandContext::new(&self.registry, span, source, self.shell_manager.homedir())
}
pub(crate) fn basic() -> Result<Context, Box<dyn Error>> {
let registry = CommandRegistry::new();
Ok(Context {
registry: registry.clone(),
source_map: SourceMap::new(),
host: Arc::new(Mutex::new(crate::env::host::BasicHost)),
ctrl_c: Arc::new(AtomicBool::new(false)),
shell_manager: ShellManager::basic(registry)?,
})
}
@ -105,43 +98,31 @@ impl Context {
}
}
pub fn add_anchor_location(&mut self, uuid: Uuid, anchor_location: AnchorLocation) {
self.source_map.insert(uuid, anchor_location);
pub(crate) fn get_command(&self, name: &str) -> Option<Arc<Command>> {
self.registry.get_command(name)
}
pub(crate) fn has_command(&self, name: &str) -> bool {
self.registry.has(name)
}
pub(crate) fn get_command(&self, name: &str) -> Arc<Command> {
self.registry.get_command(name).unwrap()
pub(crate) fn expect_command(&self, name: &str) -> Arc<Command> {
self.registry.expect_command(name)
}
pub(crate) fn run_command<'a>(
&mut self,
command: Arc<Command>,
name_tag: Tag,
source_map: SourceMap,
args: hir::Call,
source: &Text,
input: InputStream,
is_first_command: bool,
) -> OutputStream {
let command_args = self.command_args(args, input, source, source_map, name_tag);
let command_args = self.command_args(args, input, source, name_tag);
command.run(command_args, self.registry(), is_first_command)
}
fn call_info(
&self,
args: hir::Call,
source: &Text,
source_map: SourceMap,
name_tag: Tag,
) -> UnevaluatedCallInfo {
fn call_info(&self, args: hir::Call, source: &Text, name_tag: Tag) -> UnevaluatedCallInfo {
UnevaluatedCallInfo {
args,
source: source.clone(),
source_map,
name_tag,
}
}
@ -151,13 +132,13 @@ impl Context {
args: hir::Call,
input: InputStream,
source: &Text,
source_map: SourceMap,
name_tag: Tag,
) -> CommandArgs {
CommandArgs {
host: self.host.clone(),
ctrl_c: self.ctrl_c.clone(),
shell_manager: self.shell_manager.clone(),
call_info: self.call_info(args, source, source_map, name_tag),
call_info: self.call_info(args, source, name_tag),
input,
}
}

View File

@ -8,6 +8,7 @@ use crate::Text;
use chrono::{DateTime, Utc};
use chrono_humanize::Humanize;
use derive_new::new;
use log::trace;
use serde::{Deserialize, Serialize};
use std::fmt;
use std::path::PathBuf;
@ -212,11 +213,19 @@ impl Block {
let scope = Scope::new(value.clone());
if self.expressions.len() == 0 {
return Ok(Value::nothing().tagged(self.tag));
return Ok(Value::nothing().tagged(&self.tag));
}
let mut last = None;
trace!(
"EXPRS = {:?}",
self.expressions
.iter()
.map(|e| format!("{}", e))
.collect::<Vec<_>>()
);
for expr in self.expressions.iter() {
last = Some(evaluate_baseline_expr(
&expr,
@ -236,6 +245,9 @@ pub enum Value {
Row(crate::data::Dictionary),
Table(Vec<Tagged<Value>>),
// Errors are a type of value too
Error(ShellError),
Block(Block),
}
@ -284,14 +296,15 @@ impl fmt::Debug for ValueDebug<'_> {
Value::Row(o) => o.debug(f),
Value::Table(l) => debug_list(l).fmt(f),
Value::Block(_) => write!(f, "[[block]]"),
Value::Error(_) => write!(f, "[[error]]"),
}
}
}
impl Tagged<Value> {
pub(crate) fn tagged_type_name(&self) -> Tagged<String> {
pub fn tagged_type_name(&self) -> Tagged<String> {
let name = self.type_name();
Tagged::from_item(name, self.tag())
name.tagged(self.tag())
}
}
@ -303,7 +316,7 @@ impl std::convert::TryFrom<&Tagged<Value>> for Block {
Value::Block(block) => Ok(block.clone()),
v => Err(ShellError::type_error(
"Block",
value.copy_tag(v.type_name()),
v.type_name().tagged(value.tag()),
)),
}
}
@ -315,11 +328,11 @@ impl std::convert::TryFrom<&Tagged<Value>> for i64 {
fn try_from(value: &Tagged<Value>) -> Result<i64, ShellError> {
match value.item() {
Value::Primitive(Primitive::Int(int)) => {
int.tagged(value.tag).coerce_into("converting to i64")
int.tagged(&value.tag).coerce_into("converting to i64")
}
v => Err(ShellError::type_error(
"Integer",
value.copy_tag(v.type_name()),
v.type_name().tagged(value.tag()),
)),
}
}
@ -333,7 +346,7 @@ impl std::convert::TryFrom<&Tagged<Value>> for String {
Value::Primitive(Primitive::String(s)) => Ok(s.clone()),
v => Err(ShellError::type_error(
"String",
value.copy_tag(v.type_name()),
v.type_name().tagged(value.tag()),
)),
}
}
@ -347,7 +360,7 @@ impl std::convert::TryFrom<&Tagged<Value>> for Vec<u8> {
Value::Primitive(Primitive::Binary(b)) => Ok(b.clone()),
v => Err(ShellError::type_error(
"Binary",
value.copy_tag(v.type_name()),
v.type_name().tagged(value.tag()),
)),
}
}
@ -361,7 +374,7 @@ impl<'a> std::convert::TryFrom<&'a Tagged<Value>> for &'a crate::data::Dictionar
Value::Row(d) => Ok(d),
v => Err(ShellError::type_error(
"Dictionary",
value.copy_tag(v.type_name()),
v.type_name().tagged(value.tag()),
)),
}
}
@ -383,7 +396,7 @@ impl std::convert::TryFrom<Option<&Tagged<Value>>> for Switch {
Value::Primitive(Primitive::Boolean(true)) => Ok(Switch::Present),
v => Err(ShellError::type_error(
"Boolean",
value.copy_tag(v.type_name()),
v.type_name().tagged(value.tag()),
)),
},
}
@ -394,15 +407,54 @@ impl Tagged<Value> {
pub(crate) fn debug(&self) -> ValueDebug<'_> {
ValueDebug { value: self }
}
pub fn as_column_path(&self) -> Result<Tagged<Vec<Tagged<String>>>, ShellError> {
let mut out: Vec<Tagged<String>> = vec![];
match &self.item {
Value::Table(table) => {
for item in table {
out.push(item.as_string()?.tagged(&item.tag));
}
}
other => {
return Err(ShellError::type_error(
"column name",
other.type_name().tagged(&self.tag),
))
}
}
Ok(out.tagged(&self.tag))
}
pub(crate) fn as_string(&self) -> Result<String, ShellError> {
match &self.item {
Value::Primitive(Primitive::String(s)) => Ok(s.clone()),
Value::Primitive(Primitive::Boolean(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Decimal(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Int(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Path(x)) => Ok(format!("{}", x.display())),
// TODO: this should definitely be more general with better errors
other => Err(ShellError::labeled_error(
"Expected string",
other.type_name(),
&self.tag,
)),
}
}
}
impl Value {
pub(crate) fn type_name(&self) -> String {
pub fn type_name(&self) -> String {
match self {
Value::Primitive(p) => p.type_name(),
Value::Row(_) => format!("object"),
Value::Row(_) => format!("row"),
Value::Table(_) => format!("list"),
Value::Block(_) => format!("block"),
Value::Error(_) => format!("error"),
}
}
@ -418,6 +470,7 @@ impl Value {
.collect(),
Value::Block(_) => vec![],
Value::Table(_) => vec![],
Value::Error(_) => vec![],
}
}
@ -443,6 +496,22 @@ impl Value {
}
}
pub fn get_data_by_column_path(
&self,
tag: Tag,
path: &Vec<Tagged<String>>,
) -> Option<Tagged<&Value>> {
let mut current = self;
for p in path {
match current.get_data_by_key(p) {
Some(v) => current = v,
None => return None,
}
}
Some(current.tagged(tag))
}
pub fn get_data_by_path(&self, tag: Tag, path: &str) -> Option<Tagged<&Value>> {
let mut current = self;
for p in path.split(".") {
@ -452,7 +521,7 @@ impl Value {
}
}
Some(Tagged::from_item(current, tag))
Some(current.tagged(tag))
}
pub fn insert_data_at_path(
@ -472,8 +541,8 @@ impl Value {
// Special case for inserting at the top level
current
.entries
.insert(path.to_string(), Tagged::from_item(new_value, tag));
return Some(Tagged::from_item(new_obj, tag));
.insert(path.to_string(), new_value.tagged(&tag));
return Some(new_obj.tagged(&tag));
}
for idx in 0..split_path.len() {
@ -484,13 +553,64 @@ impl Value {
Value::Row(o) => {
o.entries.insert(
split_path[idx + 1].to_string(),
Tagged::from_item(new_value, tag),
new_value.tagged(&tag),
);
}
_ => {}
}
return Some(Tagged::from_item(new_obj, tag));
return Some(new_obj.tagged(&tag));
} else {
match next.item {
Value::Row(ref mut o) => {
current = o;
}
_ => return None,
}
}
}
_ => return None,
}
}
}
None
}
pub fn insert_data_at_column_path(
&self,
tag: Tag,
split_path: &Vec<Tagged<String>>,
new_value: Value,
) -> Option<Tagged<Value>> {
let mut new_obj = self.clone();
if let Value::Row(ref mut o) = new_obj {
let mut current = o;
if split_path.len() == 1 {
// Special case for inserting at the top level
current
.entries
.insert(split_path[0].item.clone(), new_value.tagged(&tag));
return Some(new_obj.tagged(&tag));
}
for idx in 0..split_path.len() {
match current.entries.get_mut(&split_path[idx].item) {
Some(next) => {
if idx == (split_path.len() - 2) {
match &mut next.item {
Value::Row(o) => {
o.entries.insert(
split_path[idx + 1].to_string(),
new_value.tagged(&tag),
);
}
_ => {}
}
return Some(new_obj.tagged(&tag));
} else {
match next.item {
Value::Row(ref mut o) => {
@ -524,8 +644,41 @@ impl Value {
match current.entries.get_mut(split_path[idx]) {
Some(next) => {
if idx == (split_path.len() - 1) {
*next = Tagged::from_item(replaced_value, tag);
return Some(Tagged::from_item(new_obj, tag));
*next = replaced_value.tagged(&tag);
return Some(new_obj.tagged(&tag));
} else {
match next.item {
Value::Row(ref mut o) => {
current = o;
}
_ => return None,
}
}
}
_ => return None,
}
}
}
None
}
pub fn replace_data_at_column_path(
&self,
tag: Tag,
split_path: &Vec<Tagged<String>>,
replaced_value: Value,
) -> Option<Tagged<Value>> {
let mut new_obj = self.clone();
if let Value::Row(ref mut o) = new_obj {
let mut current = o;
for idx in 0..split_path.len() {
match current.entries.get_mut(&split_path[idx].item) {
Some(next) => {
if idx == (split_path.len() - 1) {
*next = replaced_value.tagged(&tag);
return Some(new_obj.tagged(&tag));
} else {
match next.item {
Value::Row(ref mut o) => {
@ -549,6 +702,7 @@ impl Value {
Value::Row(o) => o.get_data(desc),
Value::Block(_) => MaybeOwned::Owned(Value::nothing()),
Value::Table(_) => MaybeOwned::Owned(Value::nothing()),
Value::Error(_) => MaybeOwned::Owned(Value::nothing()),
}
}
@ -558,7 +712,7 @@ impl Value {
Value::Block(b) => itertools::join(
b.expressions
.iter()
.map(|e| e.source(&b.source).to_string()),
.map(|e| e.span.slice(&b.source).to_string()),
"; ",
),
Value::Row(_) => format!("[table: 1 row]"),
@ -567,6 +721,7 @@ impl Value {
l.len(),
if l.len() == 1 { "row" } else { "rows" }
),
Value::Error(_) => format!("[error]"),
}
}
@ -607,22 +762,6 @@ impl Value {
}
}
pub(crate) fn as_string(&self) -> Result<String, ShellError> {
match self {
Value::Primitive(Primitive::String(s)) => Ok(s.clone()),
Value::Primitive(Primitive::Boolean(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Decimal(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Int(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Path(x)) => Ok(format!("{}", x.display())),
// TODO: this should definitely be more general with better errors
other => Err(ShellError::string(format!(
"Expected string, got {:?}",
other
))),
}
}
pub(crate) fn is_true(&self) -> bool {
match self {
Value::Primitive(Primitive::Boolean(true)) => true,
@ -675,9 +814,14 @@ impl Value {
Value::Primitive(Primitive::Date(s.into()))
}
pub fn date_from_str(s: &str) -> Result<Value, ShellError> {
let date = DateTime::parse_from_rfc3339(s)
.map_err(|err| ShellError::string(&format!("Date parse error: {}", err)))?;
pub fn date_from_str(s: Tagged<&str>) -> Result<Value, ShellError> {
let date = DateTime::parse_from_rfc3339(s.item).map_err(|err| {
ShellError::labeled_error(
&format!("Date parse error: {}", err),
"original value",
s.tag,
)
})?;
let date = date.with_timezone(&chrono::offset::Utc);

View File

@ -7,7 +7,7 @@ use std::ops::Deref;
pub(crate) fn command_dict(command: Arc<Command>, tag: impl Into<Tag>) -> Tagged<Value> {
let tag = tag.into();
let mut cmd_dict = TaggedDictBuilder::new(tag);
let mut cmd_dict = TaggedDictBuilder::new(&tag);
cmd_dict.insert("name", Value::string(command.name()));
@ -42,7 +42,7 @@ fn for_spec(name: &str, ty: &str, required: bool, tag: impl Into<Tag>) -> Tagged
fn signature_dict(signature: Signature, tag: impl Into<Tag>) -> Tagged<Value> {
let tag = tag.into();
let mut sig = TaggedListBuilder::new(tag);
let mut sig = TaggedListBuilder::new(&tag);
for arg in signature.positional.iter() {
let is_required = match arg {
@ -50,19 +50,19 @@ fn signature_dict(signature: Signature, tag: impl Into<Tag>) -> Tagged<Value> {
PositionalType::Optional(_, _) => false,
};
sig.insert_tagged(for_spec(arg.name(), "argument", is_required, tag));
sig.insert_tagged(for_spec(arg.name(), "argument", is_required, &tag));
}
if let Some(_) = signature.rest_positional {
let is_required = false;
sig.insert_tagged(for_spec("rest", "argument", is_required, tag));
sig.insert_tagged(for_spec("rest", "argument", is_required, &tag));
}
for (name, ty) in signature.named.iter() {
match ty {
NamedType::Mandatory(_) => sig.insert_tagged(for_spec(name, "flag", true, tag)),
NamedType::Optional(_) => sig.insert_tagged(for_spec(name, "flag", false, tag)),
NamedType::Switch => sig.insert_tagged(for_spec(name, "switch", false, tag)),
NamedType::Mandatory(_) => sig.insert_tagged(for_spec(name, "flag", true, &tag)),
NamedType::Optional(_) => sig.insert_tagged(for_spec(name, "flag", false, &tag)),
NamedType::Switch => sig.insert_tagged(for_spec(name, "switch", false, &tag)),
}
}

View File

@ -51,8 +51,9 @@ pub fn user_data() -> Result<PathBuf, ShellError> {
}
pub fn app_path(app_data_type: AppDataType, display: &str) -> Result<PathBuf, ShellError> {
let path = app_root(app_data_type, &APP_INFO)
.map_err(|err| ShellError::string(&format!("Couldn't open {} path:\n{}", display, err)))?;
let path = app_root(app_data_type, &APP_INFO).map_err(|err| {
ShellError::untagged_runtime_error(&format!("Couldn't open {} path:\n{}", display, err))
})?;
Ok(path)
}
@ -74,11 +75,22 @@ pub fn read(
let tag = tag.into();
let contents = fs::read_to_string(filename)
.map(|v| v.tagged(tag))
.map_err(|err| ShellError::string(&format!("Couldn't read config file:\n{}", err)))?;
.map(|v| v.tagged(&tag))
.map_err(|err| {
ShellError::labeled_error(
&format!("Couldn't read config file:\n{}", err),
"file name",
&tag,
)
})?;
let parsed: toml::Value = toml::from_str(&contents)
.map_err(|err| ShellError::string(&format!("Couldn't parse config file:\n{}", err)))?;
let parsed: toml::Value = toml::from_str(&contents).map_err(|err| {
ShellError::labeled_error(
&format!("Couldn't parse config file:\n{}", err),
"file name",
&tag,
)
})?;
let value = convert_toml_value_to_nu_value(&parsed, tag);
let tag = value.tag();
@ -86,7 +98,7 @@ pub fn read(
Value::Row(Dictionary { entries }) => Ok(entries),
other => Err(ShellError::type_error(
"Dictionary",
other.type_name().tagged(tag),
other.type_name().tagged(&tag),
)),
}
}

View File

@ -115,7 +115,7 @@ impl TaggedListBuilder {
}
pub fn push(&mut self, value: impl Into<Value>) {
self.list.push(value.into().tagged(self.tag));
self.list.push(value.into().tagged(&self.tag));
}
pub fn insert_tagged(&mut self, value: impl Into<Tagged<Value>>) {
@ -155,7 +155,7 @@ impl TaggedDictBuilder {
}
pub fn insert(&mut self, key: impl Into<String>, value: impl Into<Value>) {
self.dict.insert(key.into(), value.into().tagged(self.tag));
self.dict.insert(key.into(), value.into().tagged(&self.tag));
}
pub fn insert_tagged(&mut self, key: impl Into<String>, value: impl Into<Tagged<Value>>) {

View File

@ -1,14 +1,52 @@
use crate::context::{AnchorLocation, SourceMap};
use crate::context::AnchorLocation;
use crate::parser::parse::parser::TracableContext;
use crate::prelude::*;
use crate::Text;
use derive_new::new;
use getset::Getters;
use serde::Deserialize;
use serde::Serialize;
use std::path::{Path, PathBuf};
use uuid::Uuid;
#[derive(new, Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
pub struct Spanned<T> {
pub span: Span,
pub item: T,
}
impl<T> Spanned<T> {
pub fn map<U>(self, input: impl FnOnce(T) -> U) -> Spanned<U> {
let span = self.span;
let mapped = input(self.item);
mapped.spanned(span)
}
}
pub trait SpannedItem: Sized {
fn spanned(self, span: impl Into<Span>) -> Spanned<Self> {
Spanned {
item: self,
span: span.into(),
}
}
fn spanned_unknown(self) -> Spanned<Self> {
Spanned {
item: self,
span: Span::unknown(),
}
}
}
impl<T> SpannedItem for T {}
impl<T> std::ops::Deref for Spanned<T> {
type Target = T;
fn deref(&self) -> &T {
&self.item
}
}
#[derive(new, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
pub struct Tagged<T> {
pub tag: Tag,
pub item: T,
@ -16,7 +54,7 @@ pub struct Tagged<T> {
impl<T> HasTag for Tagged<T> {
fn tag(&self) -> Tag {
self.tag
self.tag.clone()
}
}
@ -28,20 +66,23 @@ impl AsRef<Path> for Tagged<PathBuf> {
pub trait TaggedItem: Sized {
fn tagged(self, tag: impl Into<Tag>) -> Tagged<Self> {
Tagged::from_item(self, tag.into())
Tagged {
item: self,
tag: tag.into(),
}
}
// For now, this is a temporary facility. In many cases, there are other useful spans that we
// could be using, such as the original source spans of JSON or Toml files, but we don't yet
// have the infrastructure to make that work.
fn tagged_unknown(self) -> Tagged<Self> {
Tagged::from_item(
self,
Tag {
Tagged {
item: self,
tag: Tag {
span: Span::unknown(),
anchor: uuid::Uuid::nil(),
anchor: None,
},
)
}
}
}
@ -56,48 +97,29 @@ impl<T> std::ops::Deref for Tagged<T> {
}
impl<T> Tagged<T> {
pub fn with_tag(self, tag: impl Into<Tag>) -> Tagged<T> {
Tagged::from_item(self.item, tag)
}
pub fn from_item(item: T, tag: impl Into<Tag>) -> Tagged<T> {
Tagged {
item,
tag: tag.into(),
}
}
pub fn map<U>(self, input: impl FnOnce(T) -> U) -> Tagged<U> {
let tag = self.tag();
let mapped = input(self.item);
Tagged::from_item(mapped, tag)
}
pub(crate) fn copy_tag<U>(&self, output: U) -> Tagged<U> {
Tagged::from_item(output, self.tag())
}
pub fn source(&self, source: &Text) -> Text {
Text::from(self.tag().slice(source))
mapped.tagged(tag)
}
pub fn tag(&self) -> Tag {
self.tag
self.tag.clone()
}
pub fn span(&self) -> Span {
self.tag.span
}
pub fn anchor(&self) -> uuid::Uuid {
self.tag.anchor
pub fn anchor(&self) -> Option<AnchorLocation> {
self.tag.anchor.clone()
}
pub fn anchor_name(&self, source_map: &SourceMap) -> Option<String> {
match source_map.get(&self.tag.anchor) {
Some(AnchorLocation::File(file)) => Some(file.clone()),
Some(AnchorLocation::Url(url)) => Some(url.clone()),
pub fn anchor_name(&self) -> Option<String> {
match self.tag.anchor {
Some(AnchorLocation::File(ref file)) => Some(file.clone()),
Some(AnchorLocation::Url(ref url)) => Some(url.clone()),
_ => None,
}
}
@ -113,29 +135,32 @@ impl<T> Tagged<T> {
impl From<&Tag> for Tag {
fn from(input: &Tag) -> Tag {
*input
input.clone()
}
}
impl From<nom_locate::LocatedSpanEx<&str, Uuid>> for Span {
fn from(input: nom_locate::LocatedSpanEx<&str, Uuid>) -> Span {
Span {
start: input.offset,
end: input.offset + input.fragment.len(),
}
impl From<nom_locate::LocatedSpanEx<&str, TracableContext>> for Span {
fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Span {
Span::new(input.offset, input.offset + input.fragment.len())
}
}
impl From<nom_locate::LocatedSpanEx<&str, u64>> for Span {
fn from(input: nom_locate::LocatedSpanEx<&str, u64>) -> Span {
Span::new(input.offset, input.offset + input.fragment.len())
}
}
impl<T>
From<(
nom_locate::LocatedSpanEx<T, Uuid>,
nom_locate::LocatedSpanEx<T, Uuid>,
nom_locate::LocatedSpanEx<T, u64>,
nom_locate::LocatedSpanEx<T, u64>,
)> for Span
{
fn from(
input: (
nom_locate::LocatedSpanEx<T, Uuid>,
nom_locate::LocatedSpanEx<T, Uuid>,
nom_locate::LocatedSpanEx<T, u64>,
nom_locate::LocatedSpanEx<T, u64>,
),
) -> Span {
Span {
@ -147,10 +172,7 @@ impl<T>
impl From<(usize, usize)> for Span {
fn from(input: (usize, usize)) -> Span {
Span {
start: input.0,
end: input.1,
}
Span::new(input.0, input.1)
}
}
@ -164,61 +186,60 @@ impl From<&std::ops::Range<usize>> for Span {
}
#[derive(
Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters,
Debug, Clone, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, new,
)]
pub struct Tag {
pub anchor: Uuid,
pub anchor: Option<AnchorLocation>,
pub span: Span,
}
impl From<Span> for Tag {
fn from(span: Span) -> Self {
Tag {
anchor: uuid::Uuid::nil(),
span,
}
Tag { anchor: None, span }
}
}
impl From<&Span> for Tag {
fn from(span: &Span) -> Self {
Tag {
anchor: uuid::Uuid::nil(),
anchor: None,
span: *span,
}
}
}
impl From<(usize, usize, Uuid)> for Tag {
fn from((start, end, anchor): (usize, usize, Uuid)) -> Self {
impl From<(usize, usize, TracableContext)> for Tag {
fn from((start, end, _context): (usize, usize, TracableContext)) -> Self {
Tag {
anchor: None,
span: Span::new(start, end),
}
}
}
impl From<(usize, usize, AnchorLocation)> for Tag {
fn from((start, end, anchor): (usize, usize, AnchorLocation)) -> Self {
Tag {
anchor: Some(anchor),
span: Span::new(start, end),
}
}
}
impl From<(usize, usize, Option<AnchorLocation>)> for Tag {
fn from((start, end, anchor): (usize, usize, Option<AnchorLocation>)) -> Self {
Tag {
anchor,
span: Span { start, end },
span: Span::new(start, end),
}
}
}
impl From<(usize, usize, Option<Uuid>)> for Tag {
fn from((start, end, anchor): (usize, usize, Option<Uuid>)) -> Self {
impl From<nom_locate::LocatedSpanEx<&str, TracableContext>> for Tag {
fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Tag {
Tag {
anchor: if let Some(uuid) = anchor {
uuid
} else {
uuid::Uuid::nil()
},
span: Span { start, end },
}
}
}
impl From<nom_locate::LocatedSpanEx<&str, Uuid>> for Tag {
fn from(input: nom_locate::LocatedSpanEx<&str, Uuid>) -> Tag {
Tag {
anchor: input.extra,
span: Span {
start: input.offset,
end: input.offset + input.fragment.len(),
},
anchor: None,
span: Span::new(input.offset, input.offset + input.fragment.len()),
}
}
}
@ -237,22 +258,29 @@ impl From<&Tag> for Span {
impl Tag {
pub fn unknown_anchor(span: Span) -> Tag {
Tag { anchor: None, span }
}
pub fn for_char(pos: usize, anchor: AnchorLocation) -> Tag {
Tag {
anchor: uuid::Uuid::nil(),
span,
anchor: Some(anchor),
span: Span {
start: pos,
end: pos + 1,
},
}
}
pub fn unknown_span(anchor: Uuid) -> Tag {
pub fn unknown_span(anchor: AnchorLocation) -> Tag {
Tag {
anchor,
anchor: Some(anchor),
span: Span::unknown(),
}
}
pub fn unknown() -> Tag {
Tag {
anchor: uuid::Uuid::nil(),
anchor: None,
span: Span::unknown(),
}
}
@ -265,29 +293,73 @@ impl Tag {
);
Tag {
span: Span {
start: self.span.start,
end: other.span.end,
},
anchor: self.anchor,
span: Span::new(self.span.start, other.span.end),
anchor: self.anchor.clone(),
}
}
pub fn until_option(&self, other: Option<impl Into<Tag>>) -> Tag {
match other {
Some(other) => {
let other = other.into();
debug_assert!(
self.anchor == other.anchor,
"Can only merge two tags with the same anchor"
);
Tag {
span: Span::new(self.span.start, other.span.end),
anchor: self.anchor.clone(),
}
}
None => self.clone(),
}
}
pub fn slice<'a>(&self, source: &'a str) -> &'a str {
self.span.slice(source)
}
pub fn string<'a>(&self, source: &'a str) -> String {
self.span.slice(source).to_string()
}
pub fn tagged_slice<'a>(&self, source: &'a str) -> Tagged<&'a str> {
self.span.slice(source).tagged(self)
}
pub fn tagged_string<'a>(&self, source: &'a str) -> Tagged<String> {
self.span.slice(source).to_string().tagged(self)
}
}
#[allow(unused)]
pub fn tag_for_tagged_list(mut iter: impl Iterator<Item = Tag>) -> Tag {
let first = iter.next();
let first = match first {
None => return Tag::unknown(),
Some(first) => first,
};
let last = iter.last();
match last {
None => first,
Some(last) => first.until(last),
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
pub struct Span {
pub(crate) start: usize,
pub(crate) end: usize,
start: usize,
end: usize,
}
impl From<Option<Span>> for Span {
fn from(input: Option<Span>) -> Span {
match input {
None => Span { start: 0, end: 0 },
None => Span::new(0, 0),
Some(span) => span,
}
}
@ -295,7 +367,54 @@ impl From<Option<Span>> for Span {
impl Span {
pub fn unknown() -> Span {
Span { start: 0, end: 0 }
Span::new(0, 0)
}
pub fn new(start: usize, end: usize) -> Span {
assert!(
end >= start,
"Can't create a Span whose end < start, start={}, end={}",
start,
end
);
Span { start, end }
}
pub fn for_char(pos: usize) -> Span {
Span {
start: pos,
end: pos + 1,
}
}
pub fn until(&self, other: impl Into<Span>) -> Span {
let other = other.into();
Span::new(self.start, other.end)
}
pub fn until_option(&self, other: Option<impl Into<Span>>) -> Span {
match other {
Some(other) => {
let other = other.into();
Span::new(self.start, other.end)
}
None => *self,
}
}
pub fn string<'a>(&self, source: &'a str) -> String {
self.slice(source).to_string()
}
pub fn spanned_slice<'a>(&self, source: &'a str) -> Spanned<&'a str> {
self.slice(source).spanned(*self)
}
pub fn spanned_string<'a>(&self, source: &'a str) -> Spanned<String> {
self.slice(source).to_string().spanned(*self)
}
/*
@ -308,6 +427,14 @@ impl Span {
}
*/
pub fn start(&self) -> usize {
self.start
}
pub fn end(&self) -> usize {
self.end
}
pub fn is_unknown(&self) -> bool {
self.start == 0 && self.end == 0
}
@ -319,17 +446,11 @@ impl Span {
impl language_reporting::ReportingSpan for Span {
fn with_start(&self, start: usize) -> Self {
Span {
start,
end: self.end,
}
Span::new(start, self.end)
}
fn with_end(&self, end: usize) -> Self {
Span {
start: self.start,
end,
}
Span::new(self.start, end)
}
fn start(&self) -> usize {
@ -340,33 +461,3 @@ impl language_reporting::ReportingSpan for Span {
self.end
}
}
impl language_reporting::ReportingSpan for Tag {
fn with_start(&self, start: usize) -> Self {
Tag {
span: Span {
start,
end: self.span.end,
},
anchor: self.anchor,
}
}
fn with_end(&self, end: usize) -> Self {
Tag {
span: Span {
start: self.span.start,
end,
},
anchor: self.anchor,
}
}
fn start(&self) -> usize {
self.span.start
}
fn end(&self) -> usize {
self.span.end
}
}

View File

@ -54,7 +54,7 @@ impl ExtractType for i64 {
&Tagged {
item: Value::Primitive(Primitive::Int(int)),
..
} => Ok(int.tagged(value.tag).coerce_into("converting to i64")?),
} => Ok(int.tagged(&value.tag).coerce_into("converting to i64")?),
other => Err(ShellError::type_error("Integer", other.tagged_type_name())),
}
}
@ -68,7 +68,7 @@ impl ExtractType for u64 {
&Tagged {
item: Value::Primitive(Primitive::Int(int)),
..
} => Ok(int.tagged(value.tag).coerce_into("converting to u64")?),
} => Ok(int.tagged(&value.tag).coerce_into("converting to u64")?),
other => Err(ShellError::type_error("Integer", other.tagged_type_name())),
}
}

View File

@ -1,5 +1,6 @@
use crate::prelude::*;
use crate::parser::parse::parser::TracableContext;
use ansi_term::Color;
use derive_new::new;
use language_reporting::{Diagnostic, Label, Severity};
@ -13,12 +14,20 @@ pub enum Description {
}
impl Description {
fn into_label(self) -> Result<Label<Tag>, String> {
fn into_label(self) -> Result<Label<Span>, String> {
match self {
Description::Source(s) => Ok(Label::new_primary(s.tag()).with_message(s.item)),
Description::Source(s) => Ok(Label::new_primary(s.span()).with_message(s.item)),
Description::Synthetic(s) => Err(s),
}
}
#[allow(unused)]
fn tag(&self) -> Tag {
match self {
Description::Source(tagged) => tagged.tag.clone(),
Description::Synthetic(_) => Tag::unknown(),
}
}
}
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)]
@ -35,6 +44,13 @@ pub struct ShellError {
cause: Option<Box<ProximateShellError>>,
}
impl ShellError {
#[allow(unused)]
pub(crate) fn tag(&self) -> Option<Tag> {
self.error.tag()
}
}
impl ToDebug for ShellError {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
self.error.fmt_debug(f, source)
@ -46,12 +62,12 @@ impl serde::de::Error for ShellError {
where
T: std::fmt::Display,
{
ShellError::string(msg.to_string())
ShellError::untagged_runtime_error(msg.to_string())
}
}
impl ShellError {
pub(crate) fn type_error(
pub fn type_error(
expected: impl Into<String>,
actual: Tagged<impl Into<String>>,
) -> ShellError {
@ -62,6 +78,21 @@ impl ShellError {
.start()
}
pub fn untagged_runtime_error(error: impl Into<String>) -> ShellError {
ProximateShellError::UntaggedRuntimeError {
reason: error.into(),
}
.start()
}
pub(crate) fn unexpected_eof(expected: impl Into<String>, tag: impl Into<Tag>) -> ShellError {
ProximateShellError::UnexpectedEof {
expected: expected.into(),
tag: tag.into(),
}
.start()
}
pub(crate) fn range_error(
expected: impl Into<ExpectedRange>,
actual: &Tagged<impl fmt::Debug>,
@ -69,7 +100,7 @@ impl ShellError {
) -> ShellError {
ProximateShellError::RangeError {
kind: expected.into(),
actual_kind: actual.copy_tag(format!("{:?}", actual.item)),
actual_kind: format!("{:?}", actual.item).tagged(actual.tag()),
operation,
}
.start()
@ -82,6 +113,7 @@ impl ShellError {
.start()
}
#[allow(unused)]
pub(crate) fn invalid_command(problem: impl Into<Tag>) -> ShellError {
ProximateShellError::InvalidCommand {
command: problem.into(),
@ -111,29 +143,29 @@ impl ShellError {
pub(crate) fn argument_error(
command: impl Into<String>,
kind: ArgumentError,
tag: Tag,
tag: impl Into<Tag>,
) -> ShellError {
ProximateShellError::ArgumentError {
command: command.into(),
error: kind,
tag,
tag: tag.into(),
}
.start()
}
pub(crate) fn invalid_external_word(tag: Tag) -> ShellError {
pub(crate) fn invalid_external_word(tag: impl Into<Tag>) -> ShellError {
ProximateShellError::ArgumentError {
command: "Invalid argument to Nu command (did you mean to call an external command?)"
.into(),
error: ArgumentError::InvalidExternalWord,
tag,
tag: tag.into(),
}
.start()
}
pub(crate) fn parse_error(
error: nom::Err<(
nom_locate::LocatedSpanEx<&str, uuid::Uuid>,
nom_locate::LocatedSpanEx<&str, TracableContext>,
nom::error::ErrorKind,
)>,
) -> ShellError {
@ -151,25 +183,22 @@ impl ShellError {
}
nom::Err::Failure(span) | nom::Err::Error(span) => {
let diagnostic = Diagnostic::new(Severity::Error, format!("Parse Error"))
.with_label(Label::new_primary(Tag::from(span.0)));
.with_label(Label::new_primary(Span::from(span.0)));
ShellError::diagnostic(diagnostic)
}
}
}
pub(crate) fn diagnostic(diagnostic: Diagnostic<Tag>) -> ShellError {
pub(crate) fn diagnostic(diagnostic: Diagnostic<Span>) -> ShellError {
ProximateShellError::Diagnostic(ShellDiagnostic { diagnostic }).start()
}
pub(crate) fn to_diagnostic(self) -> Diagnostic<Tag> {
pub(crate) fn to_diagnostic(self) -> Diagnostic<Span> {
match self.error {
ProximateShellError::String(StringError { title, .. }) => {
Diagnostic::new(Severity::Error, title)
}
ProximateShellError::InvalidCommand { command } => {
Diagnostic::new(Severity::Error, "Invalid command")
.with_label(Label::new_primary(command))
.with_label(Label::new_primary(command.span))
}
ProximateShellError::MissingValue { tag, reason } => {
let mut d = Diagnostic::new(
@ -178,7 +207,7 @@ impl ShellError {
);
if let Some(tag) = tag {
d = d.with_label(Label::new_primary(tag));
d = d.with_label(Label::new_primary(tag.span));
}
d
@ -191,7 +220,7 @@ impl ShellError {
ArgumentError::InvalidExternalWord => Diagnostic::new(
Severity::Error,
format!("Invalid bare word for Nu command (did you intend to invoke an external command?)"))
.with_label(Label::new_primary(tag)),
.with_label(Label::new_primary(tag.span)),
ArgumentError::MissingMandatoryFlag(name) => Diagnostic::new(
Severity::Error,
format!(
@ -201,7 +230,7 @@ impl ShellError {
Color::Black.bold().paint(name)
),
)
.with_label(Label::new_primary(tag)),
.with_label(Label::new_primary(tag.span)),
ArgumentError::MissingMandatoryPositional(name) => Diagnostic::new(
Severity::Error,
format!(
@ -211,7 +240,7 @@ impl ShellError {
),
)
.with_label(
Label::new_primary(tag).with_message(format!("requires {} parameter", name)),
Label::new_primary(tag.span).with_message(format!("requires {} parameter", name)),
),
ArgumentError::MissingValueForName(name) => Diagnostic::new(
Severity::Error,
@ -222,7 +251,7 @@ impl ShellError {
Color::Black.bold().paint(name)
),
)
.with_label(Label::new_primary(tag)),
.with_label(Label::new_primary(tag.span)),
},
ProximateShellError::TypeError {
expected,
@ -232,10 +261,9 @@ impl ShellError {
tag,
},
} => Diagnostic::new(Severity::Error, "Type Error").with_label(
Label::new_primary(tag)
Label::new_primary(tag.span)
.with_message(format!("Expected {}, found {}", expected, actual)),
),
ProximateShellError::TypeError {
expected,
actual:
@ -244,7 +272,12 @@ impl ShellError {
tag
},
} => Diagnostic::new(Severity::Error, "Type Error")
.with_label(Label::new_primary(tag).with_message(expected)),
.with_label(Label::new_primary(tag.span).with_message(expected)),
ProximateShellError::UnexpectedEof {
expected, tag
} => Diagnostic::new(Severity::Error, format!("Unexpected end of input"))
.with_label(Label::new_primary(tag.span).with_message(format!("Expected {}", expected))),
ProximateShellError::RangeError {
kind,
@ -255,7 +288,7 @@ impl ShellError {
tag
},
} => Diagnostic::new(Severity::Error, "Range Error").with_label(
Label::new_primary(tag).with_message(format!(
Label::new_primary(tag.span).with_message(format!(
"Expected to convert {} to {} while {}, but it was out of range",
item,
kind.desc(),
@ -267,12 +300,12 @@ impl ShellError {
problem:
Tagged {
tag,
..
item
},
} => Diagnostic::new(Severity::Error, "Syntax Error")
.with_label(Label::new_primary(tag).with_message("Unexpected external command")),
.with_label(Label::new_primary(tag.span).with_message(item)),
ProximateShellError::MissingProperty { subpath, expr } => {
ProximateShellError::MissingProperty { subpath, expr, .. } => {
let subpath = subpath.into_label();
let expr = expr.into_label();
@ -293,9 +326,11 @@ impl ShellError {
ProximateShellError::Diagnostic(diag) => diag.diagnostic,
ProximateShellError::CoerceError { left, right } => {
Diagnostic::new(Severity::Error, "Coercion error")
.with_label(Label::new_primary(left.tag()).with_message(left.item))
.with_label(Label::new_secondary(right.tag()).with_message(right.item))
.with_label(Label::new_primary(left.tag().span).with_message(left.item))
.with_label(Label::new_secondary(right.tag().span).with_message(right.item))
}
ProximateShellError::UntaggedRuntimeError { reason } => Diagnostic::new(Severity::Error, format!("Error: {}", reason))
}
}
@ -306,7 +341,7 @@ impl ShellError {
) -> ShellError {
ShellError::diagnostic(
Diagnostic::new(Severity::Error, msg.into())
.with_label(Label::new_primary(tag.into()).with_message(label.into())),
.with_label(Label::new_primary(tag.into().span).with_message(label.into())),
)
}
@ -320,25 +355,29 @@ impl ShellError {
ShellError::diagnostic(
Diagnostic::new_error(msg.into())
.with_label(
Label::new_primary(primary_span.into()).with_message(primary_label.into()),
Label::new_primary(primary_span.into().span).with_message(primary_label.into()),
)
.with_label(
Label::new_secondary(secondary_span.into())
Label::new_secondary(secondary_span.into().span)
.with_message(secondary_label.into()),
),
)
}
pub fn string(title: impl Into<String>) -> ShellError {
ProximateShellError::String(StringError::new(title.into(), Value::nothing())).start()
}
// pub fn string(title: impl Into<String>) -> ShellError {
// ProximateShellError::String(StringError::new(title.into(), String::new())).start()
// }
pub(crate) fn unimplemented(title: impl Into<String>) -> ShellError {
ShellError::string(&format!("Unimplemented: {}", title.into()))
ShellError::untagged_runtime_error(&format!("Unimplemented: {}", title.into()))
}
pub(crate) fn unexpected(title: impl Into<String>) -> ShellError {
ShellError::string(&format!("Unexpected: {}", title.into()))
ShellError::untagged_runtime_error(&format!("Unexpected: {}", title.into()))
}
pub(crate) fn unreachable(title: impl Into<String>) -> ShellError {
ShellError::untagged_runtime_error(&format!("BUG: Unreachable: {}", title.into()))
}
}
@ -383,10 +422,13 @@ impl ExpectedRange {
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)]
pub enum ProximateShellError {
String(StringError),
SyntaxError {
problem: Tagged<String>,
},
UnexpectedEof {
expected: String,
tag: Tag,
},
InvalidCommand {
command: Tag,
},
@ -397,6 +439,7 @@ pub enum ProximateShellError {
MissingProperty {
subpath: Description,
expr: Description,
tag: Tag,
},
MissingValue {
tag: Option<Tag>,
@ -417,6 +460,9 @@ pub enum ProximateShellError {
left: Tagged<String>,
right: Tagged<String>,
},
UntaggedRuntimeError {
reason: String,
},
}
impl ProximateShellError {
@ -426,6 +472,22 @@ impl ProximateShellError {
error: self,
}
}
pub(crate) fn tag(&self) -> Option<Tag> {
Some(match self {
ProximateShellError::SyntaxError { problem } => problem.tag(),
ProximateShellError::UnexpectedEof { tag, .. } => tag.clone(),
ProximateShellError::InvalidCommand { command } => command.clone(),
ProximateShellError::TypeError { actual, .. } => actual.tag.clone(),
ProximateShellError::MissingProperty { tag, .. } => tag.clone(),
ProximateShellError::MissingValue { tag, .. } => return tag.clone(),
ProximateShellError::ArgumentError { tag, .. } => tag.clone(),
ProximateShellError::RangeError { actual_kind, .. } => actual_kind.tag.clone(),
ProximateShellError::Diagnostic(..) => return None,
ProximateShellError::UntaggedRuntimeError { .. } => return None,
ProximateShellError::CoerceError { left, right } => left.tag.until(&right.tag),
})
}
}
impl ToDebug for ProximateShellError {
@ -437,7 +499,7 @@ impl ToDebug for ProximateShellError {
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ShellDiagnostic {
pub(crate) diagnostic: Diagnostic<Tag>,
pub(crate) diagnostic: Diagnostic<Span>,
}
impl PartialEq for ShellDiagnostic {
@ -463,22 +525,23 @@ impl std::cmp::Ord for ShellDiagnostic {
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, new, Clone, Serialize, Deserialize)]
pub struct StringError {
title: String,
error: Value,
error: String,
}
impl std::fmt::Display for ShellError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match &self.error {
ProximateShellError::String(s) => write!(f, "{}", &s.title),
ProximateShellError::MissingValue { .. } => write!(f, "MissingValue"),
ProximateShellError::InvalidCommand { .. } => write!(f, "InvalidCommand"),
ProximateShellError::TypeError { .. } => write!(f, "TypeError"),
ProximateShellError::UnexpectedEof { .. } => write!(f, "UnexpectedEof"),
ProximateShellError::RangeError { .. } => write!(f, "RangeError"),
ProximateShellError::SyntaxError { .. } => write!(f, "SyntaxError"),
ProximateShellError::MissingProperty { .. } => write!(f, "MissingProperty"),
ProximateShellError::ArgumentError { .. } => write!(f, "ArgumentError"),
ProximateShellError::Diagnostic(_) => write!(f, "<diagnostic>"),
ProximateShellError::CoerceError { .. } => write!(f, "CoerceError"),
ProximateShellError::UntaggedRuntimeError { .. } => write!(f, "UntaggedRuntimeError"),
}
}
}
@ -487,71 +550,43 @@ impl std::error::Error for ShellError {}
impl std::convert::From<Box<dyn std::error::Error>> for ShellError {
fn from(input: Box<dyn std::error::Error>) -> ShellError {
ProximateShellError::String(StringError {
title: format!("{}", input),
error: Value::nothing(),
})
.start()
ShellError::untagged_runtime_error(format!("{}", input))
}
}
impl std::convert::From<std::io::Error> for ShellError {
fn from(input: std::io::Error) -> ShellError {
ProximateShellError::String(StringError {
title: format!("{}", input),
error: Value::nothing(),
})
.start()
ShellError::untagged_runtime_error(format!("{}", input))
}
}
impl std::convert::From<subprocess::PopenError> for ShellError {
fn from(input: subprocess::PopenError) -> ShellError {
ProximateShellError::String(StringError {
title: format!("{}", input),
error: Value::nothing(),
})
.start()
ShellError::untagged_runtime_error(format!("{}", input))
}
}
impl std::convert::From<serde_yaml::Error> for ShellError {
fn from(input: serde_yaml::Error) -> ShellError {
ProximateShellError::String(StringError {
title: format!("{:?}", input),
error: Value::nothing(),
})
.start()
ShellError::untagged_runtime_error(format!("{:?}", input))
}
}
impl std::convert::From<toml::ser::Error> for ShellError {
fn from(input: toml::ser::Error) -> ShellError {
ProximateShellError::String(StringError {
title: format!("{:?}", input),
error: Value::nothing(),
})
.start()
ShellError::untagged_runtime_error(format!("{:?}", input))
}
}
impl std::convert::From<serde_json::Error> for ShellError {
fn from(input: serde_json::Error) -> ShellError {
ProximateShellError::String(StringError {
title: format!("{:?}", input),
error: Value::nothing(),
})
.start()
ShellError::untagged_runtime_error(format!("{:?}", input))
}
}
impl std::convert::From<Box<dyn std::error::Error + Send + Sync>> for ShellError {
fn from(input: Box<dyn std::error::Error + Send + Sync>) -> ShellError {
ProximateShellError::String(StringError {
title: format!("{:?}", input),
error: Value::nothing(),
})
.start()
ShellError::untagged_runtime_error(format!("{:?}", input))
}
}
@ -567,7 +602,6 @@ impl<T> ShellErrorUtils<Tagged<T>> for Option<Tagged<T>> {
}
}
}
pub trait CoerceInto<U> {
fn coerce_into(self, operation: impl Into<String>) -> Result<U, ShellError>;
}

View File

@ -7,6 +7,8 @@ use crate::parser::{
use crate::prelude::*;
use derive_new::new;
use indexmap::IndexMap;
use log::trace;
use std::fmt;
#[derive(new)]
pub struct Scope {
@ -15,6 +17,15 @@ pub struct Scope {
vars: IndexMap<String, Tagged<Value>>,
}
impl fmt::Display for Scope {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_map()
.entry(&"$it", &format!("{:?}", self.it.item))
.entries(self.vars.iter().map(|(k, v)| (k, &v.item)))
.finish()
}
}
impl Scope {
pub(crate) fn empty() -> Scope {
Scope {
@ -37,28 +48,41 @@ pub(crate) fn evaluate_baseline_expr(
scope: &Scope,
source: &Text,
) -> Result<Tagged<Value>, ShellError> {
let tag = Tag {
span: expr.span,
anchor: None,
};
match &expr.item {
RawExpression::Literal(literal) => Ok(evaluate_literal(expr.copy_tag(literal), source)),
RawExpression::Literal(literal) => Ok(evaluate_literal(literal.tagged(tag), source)),
RawExpression::ExternalWord => Err(ShellError::argument_error(
"Invalid external word",
ArgumentError::InvalidExternalWord,
expr.tag(),
tag,
)),
RawExpression::FilePath(path) => Ok(Value::path(path.clone()).tagged(expr.tag())),
RawExpression::FilePath(path) => Ok(Value::path(path.clone()).tagged(tag)),
RawExpression::Synthetic(hir::Synthetic::String(s)) => {
Ok(Value::string(s).tagged_unknown())
}
RawExpression::Variable(var) => evaluate_reference(var, scope, source),
RawExpression::Variable(var) => evaluate_reference(var, scope, source, tag),
RawExpression::Command(_) => evaluate_command(tag, scope, source),
RawExpression::ExternalCommand(external) => evaluate_external(external, scope, source),
RawExpression::Binary(binary) => {
let left = evaluate_baseline_expr(binary.left(), registry, scope, source)?;
let right = evaluate_baseline_expr(binary.right(), registry, scope, source)?;
trace!("left={:?} right={:?}", left.item, right.item);
match left.compare(binary.op(), &*right) {
Ok(result) => Ok(Value::boolean(result).tagged(expr.tag())),
Ok(result) => Ok(Value::boolean(result).tagged(tag)),
Err((left_type, right_type)) => Err(ShellError::coerce_error(
binary.left().copy_tag(left_type),
binary.right().copy_tag(right_type),
left_type.tagged(Tag {
span: binary.left().span,
anchor: None,
}),
right_type.tagged(Tag {
span: binary.right().span,
anchor: None,
}),
)),
}
}
@ -70,13 +94,10 @@ pub(crate) fn evaluate_baseline_expr(
exprs.push(expr);
}
Ok(Value::Table(exprs).tagged(expr.tag()))
Ok(Value::Table(exprs).tagged(tag))
}
RawExpression::Block(block) => {
Ok(
Value::Block(Block::new(block.clone(), source.clone(), expr.tag()))
.tagged(expr.tag()),
)
Ok(Value::Block(Block::new(block.clone(), source.clone(), tag.clone())).tagged(&tag))
}
RawExpression::Path(path) => {
let value = evaluate_baseline_expr(path.head(), registry, scope, source)?;
@ -96,19 +117,27 @@ pub(crate) fn evaluate_baseline_expr(
possible_matches.sort();
return Err(ShellError::labeled_error(
"Unknown column",
format!("did you mean '{}'?", possible_matches[0].1),
expr.tag(),
));
if possible_matches.len() > 0 {
return Err(ShellError::labeled_error(
"Unknown column",
format!("did you mean '{}'?", possible_matches[0].1),
&tag,
));
} else {
return Err(ShellError::labeled_error(
"Unknown column",
"row does not have this column",
&tag,
));
}
}
Some(next) => {
item = next.clone().item.tagged(expr.tag());
item = next.clone().item.tagged(&tag);
}
};
}
Ok(item.item().clone().tagged(expr.tag()))
Ok(item.item().clone().tagged(tag))
}
RawExpression::Boolean(_boolean) => unimplemented!(),
}
@ -130,14 +159,16 @@ fn evaluate_reference(
name: &hir::Variable,
scope: &Scope,
source: &Text,
tag: Tag,
) -> Result<Tagged<Value>, ShellError> {
trace!("Evaluating {} with Scope {}", name, scope);
match name {
hir::Variable::It(tag) => Ok(scope.it.item.clone().tagged(*tag)),
hir::Variable::Other(tag) => Ok(scope
hir::Variable::It(_) => Ok(scope.it.item.clone().tagged(tag)),
hir::Variable::Other(inner) => Ok(scope
.vars
.get(tag.slice(source))
.get(inner.slice(source))
.map(|v| v.clone())
.unwrap_or_else(|| Value::nothing().tagged(*tag))),
.unwrap_or_else(|| Value::nothing().tagged(tag))),
}
}
@ -150,3 +181,7 @@ fn evaluate_external(
"Unexpected external command".tagged(*external.name()),
))
}
fn evaluate_command(tag: Tag, _scope: &Scope, _source: &Text) -> Result<Tagged<Value>, ShellError> {
Err(ShellError::syntax_error("Unexpected command".tagged(tag)))
}

View File

@ -14,7 +14,7 @@ impl RenderView for GenericView<'_> {
match self.value {
Value::Primitive(p) => Ok(host.stdout(&p.format(None))),
Value::Table(l) => {
let view = TableView::from_list(l);
let view = TableView::from_list(l, 0);
if let Some(view) = view {
view.render_view(host)?;
@ -35,6 +35,8 @@ impl RenderView for GenericView<'_> {
view.render_view(host)?;
Ok(())
}
Value::Error(e) => Err(e.clone()),
}
}
}

View File

@ -34,7 +34,7 @@ impl TableView {
ret
}
pub fn from_list(values: &[Tagged<Value>]) -> Option<TableView> {
pub fn from_list(values: &[Tagged<Value>], starting_idx: usize) -> Option<TableView> {
if values.len() == 0 {
return None;
}
@ -42,7 +42,7 @@ impl TableView {
let mut headers = TableView::merge_descriptors(values);
if headers.len() == 0 {
headers.push("value".to_string());
headers.push("<unknown>".to_string());
}
let mut entries = vec![];
@ -68,7 +68,7 @@ impl TableView {
if values.len() > 1 {
// Indices are black, bold, right-aligned:
row.insert(0, (format!("{}", idx.to_string()), "Fdbr"));
row.insert(0, (format!("{}", (starting_idx + idx).to_string()), "Fdbr"));
}
entries.push(row);

View File

@ -1,4 +1,4 @@
#![recursion_limit = "512"]
#![recursion_limit = "1024"]
#[macro_use]
mod prelude;
@ -21,7 +21,7 @@ mod traits;
mod utils;
pub use crate::commands::command::{CallInfo, ReturnSuccess, ReturnValue};
pub use crate::context::{AnchorLocation, SourceMap};
pub use crate::context::AnchorLocation;
pub use crate::env::host::BasicHost;
pub use crate::parser::hir::SyntaxShape;
pub use crate::parser::parse::token_tree_builder::TokenTreeBuilder;
@ -31,7 +31,7 @@ pub use cli::cli;
pub use data::base::{Primitive, Value};
pub use data::config::{config_path, APP_INFO};
pub use data::dict::{Dictionary, TaggedDictBuilder};
pub use data::meta::{Tag, Tagged, TaggedItem};
pub use data::meta::{Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem};
pub use errors::{CoerceInto, ShellError};
pub use num_traits::cast::ToPrimitive;
pub use parser::parse::text::Text;

View File

@ -3,6 +3,9 @@ use log::LevelFilter;
use std::error::Error;
fn main() -> Result<(), Box<dyn Error>> {
#[cfg(feature1)]
println!("feature1 is enabled");
let matches = App::new("nushell")
.version(clap::crate_version!())
.arg(

View File

@ -7,24 +7,24 @@ pub(crate) mod registry;
use crate::errors::ShellError;
pub(crate) use deserializer::ConfigDeserializer;
pub(crate) use hir::baseline_parse_tokens::baseline_parse_tokens;
pub(crate) use hir::syntax_shape::flat_shape::FlatShape;
pub(crate) use hir::TokensIterator;
pub(crate) use parse::call_node::CallNode;
pub(crate) use parse::files::Files;
pub(crate) use parse::flag::Flag;
pub(crate) use parse::flag::{Flag, FlagKind};
pub(crate) use parse::operator::Operator;
pub(crate) use parse::parser::{nom_input, pipeline};
pub(crate) use parse::pipeline::{Pipeline, PipelineElement};
pub(crate) use parse::text::Text;
pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, PathNode, TokenNode};
pub(crate) use parse::tokens::{RawToken, Token};
pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
pub(crate) use parse::tokens::{RawNumber, RawToken};
pub(crate) use parse::unit::Unit;
pub(crate) use parse_command::parse_command;
pub(crate) use registry::CommandRegistry;
pub fn parse(input: &str, anchor: uuid::Uuid) -> Result<TokenNode, ShellError> {
pub fn parse(input: &str) -> Result<TokenNode, ShellError> {
let _ = pretty_env_logger::try_init();
match pipeline(nom_input(input, anchor)) {
match pipeline(nom_input(input)) {
Ok((_rest, val)) => Ok(val),
Err(err) => Err(ShellError::parse_error(err)),
}

View File

@ -52,7 +52,7 @@ impl<'de> ConfigDeserializer<'de> {
self.stack.push(DeserializerItem {
key_struct_field: Some((name.to_string(), name)),
val: value.unwrap_or_else(|| Value::nothing().tagged(self.call.name_tag)),
val: value.unwrap_or_else(|| Value::nothing().tagged(&self.call.name_tag)),
});
Ok(())
@ -310,9 +310,10 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> {
return Ok(r);
}
trace!(
"deserializing struct {:?} {:?} (stack={:?})",
"deserializing struct {:?} {:?} (saw_root={} stack={:?})",
name,
fields,
self.saw_root,
self.stack
);
@ -326,6 +327,12 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> {
let type_name = std::any::type_name::<V::Value>();
let tagged_val_name = std::any::type_name::<Tagged<Value>>();
trace!(
"type_name={} tagged_val_name={}",
type_name,
tagged_val_name
);
if type_name == tagged_val_name {
return visit::<Tagged<Value>, _>(value.val, name, fields, visitor);
}

View File

@ -1,11 +1,13 @@
pub(crate) mod baseline_parse;
pub(crate) mod baseline_parse_tokens;
pub(crate) mod binary;
pub(crate) mod expand_external_tokens;
pub(crate) mod external_command;
pub(crate) mod named;
pub(crate) mod path;
pub(crate) mod syntax_shape;
pub(crate) mod tokens_iterator;
use crate::parser::{registry, Unit};
use crate::parser::{registry, Operator, Unit};
use crate::prelude::*;
use derive_new::new;
use getset::Getters;
@ -14,27 +16,18 @@ use std::fmt;
use std::path::PathBuf;
use crate::evaluate::Scope;
use crate::parser::parse::tokens::RawNumber;
use crate::traits::ToDebug;
pub(crate) use self::baseline_parse::{
baseline_parse_single_token, baseline_parse_token_as_number, baseline_parse_token_as_path,
baseline_parse_token_as_pattern, baseline_parse_token_as_string,
};
pub(crate) use self::baseline_parse_tokens::{baseline_parse_next_expr, TokensIterator};
pub(crate) use self::binary::Binary;
pub(crate) use self::external_command::ExternalCommand;
pub(crate) use self::named::NamedArguments;
pub(crate) use self::path::Path;
pub(crate) use self::syntax_shape::ExpandContext;
pub(crate) use self::tokens_iterator::debug::debug_tokens;
pub(crate) use self::tokens_iterator::TokensIterator;
pub use self::baseline_parse_tokens::SyntaxShape;
pub fn path(head: impl Into<Expression>, tail: Vec<Tagged<impl Into<String>>>) -> Path {
Path::new(
head.into(),
tail.into_iter()
.map(|item| item.map(|string| string.into()))
.collect(),
)
}
pub use self::syntax_shape::SyntaxShape;
#[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)]
pub struct Call {
@ -93,6 +86,7 @@ pub enum RawExpression {
FilePath(PathBuf),
ExternalCommand(ExternalCommand),
Command(Span),
Boolean(bool),
}
@ -115,73 +109,148 @@ impl RawExpression {
match self {
RawExpression::Literal(literal) => literal.type_name(),
RawExpression::Synthetic(synthetic) => synthetic.type_name(),
RawExpression::ExternalWord => "externalword",
RawExpression::FilePath(..) => "filepath",
RawExpression::Command(..) => "command",
RawExpression::ExternalWord => "external word",
RawExpression::FilePath(..) => "file path",
RawExpression::Variable(..) => "variable",
RawExpression::List(..) => "list",
RawExpression::Binary(..) => "binary",
RawExpression::Block(..) => "block",
RawExpression::Path(..) => "path",
RawExpression::Path(..) => "variable path",
RawExpression::Boolean(..) => "boolean",
RawExpression::ExternalCommand(..) => "external",
}
}
}
pub type Expression = Tagged<RawExpression>;
pub type Expression = Spanned<RawExpression>;
impl std::fmt::Display for Expression {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let span = self.span;
match &self.item {
RawExpression::Literal(literal) => write!(f, "{}", literal.tagged(self.span)),
RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{}", s),
RawExpression::Command(_) => write!(f, "Command{{ {}..{} }}", span.start(), span.end()),
RawExpression::ExternalWord => {
write!(f, "ExternalWord{{ {}..{} }}", span.start(), span.end())
}
RawExpression::FilePath(file) => write!(f, "Path{{ {} }}", file.display()),
RawExpression::Variable(variable) => write!(f, "{}", variable),
RawExpression::List(list) => f
.debug_list()
.entries(list.iter().map(|e| format!("{}", e)))
.finish(),
RawExpression::Binary(binary) => write!(f, "{}", binary),
RawExpression::Block(items) => {
write!(f, "Block")?;
f.debug_set()
.entries(items.iter().map(|i| format!("{}", i)))
.finish()
}
RawExpression::Path(path) => write!(f, "{}", path),
RawExpression::Boolean(b) => write!(f, "${}", b),
RawExpression::ExternalCommand(..) => {
write!(f, "ExternalComment{{ {}..{} }}", span.start(), span.end())
}
}
}
}
impl Expression {
pub(crate) fn number(i: impl Into<Number>, tag: impl Into<Tag>) -> Expression {
RawExpression::Literal(Literal::Number(i.into())).tagged(tag.into())
pub(crate) fn number(i: impl Into<Number>, span: impl Into<Span>) -> Expression {
RawExpression::Literal(Literal::Number(i.into())).spanned(span.into())
}
pub(crate) fn size(
i: impl Into<Number>,
unit: impl Into<Unit>,
tag: impl Into<Tag>,
span: impl Into<Span>,
) -> Expression {
RawExpression::Literal(Literal::Size(i.into(), unit.into())).tagged(tag.into())
RawExpression::Literal(Literal::Size(i.into(), unit.into())).spanned(span.into())
}
pub(crate) fn synthetic_string(s: impl Into<String>) -> Expression {
RawExpression::Synthetic(Synthetic::String(s.into())).tagged_unknown()
RawExpression::Synthetic(Synthetic::String(s.into())).spanned_unknown()
}
pub(crate) fn string(inner: impl Into<Tag>, outer: impl Into<Tag>) -> Expression {
RawExpression::Literal(Literal::String(inner.into())).tagged(outer.into())
pub(crate) fn string(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
RawExpression::Literal(Literal::String(inner.into())).spanned(outer.into())
}
pub(crate) fn file_path(path: impl Into<PathBuf>, outer: impl Into<Tag>) -> Expression {
RawExpression::FilePath(path.into()).tagged(outer)
pub(crate) fn path(
head: Expression,
tail: Vec<Spanned<impl Into<String>>>,
span: impl Into<Span>,
) -> Expression {
let tail = tail.into_iter().map(|t| t.map(|s| s.into())).collect();
RawExpression::Path(Box::new(Path::new(head, tail))).spanned(span.into())
}
pub(crate) fn bare(tag: impl Into<Tag>) -> Expression {
RawExpression::Literal(Literal::Bare).tagged(tag)
pub(crate) fn dot_member(head: Expression, next: Spanned<impl Into<String>>) -> Expression {
let Spanned { item, span } = head;
let new_span = head.span.until(next.span);
match item {
RawExpression::Path(path) => {
let (head, mut tail) = path.parts();
tail.push(next.map(|i| i.into()));
Expression::path(head, tail, new_span)
}
other => Expression::path(other.spanned(span), vec![next], new_span),
}
}
pub(crate) fn pattern(tag: impl Into<Tag>) -> Expression {
RawExpression::Literal(Literal::GlobPattern).tagged(tag.into())
pub(crate) fn infix(
left: Expression,
op: Spanned<impl Into<Operator>>,
right: Expression,
) -> Expression {
let new_span = left.span.until(right.span);
RawExpression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right)))
.spanned(new_span)
}
pub(crate) fn variable(inner: impl Into<Tag>, outer: impl Into<Tag>) -> Expression {
RawExpression::Variable(Variable::Other(inner.into())).tagged(outer)
pub(crate) fn file_path(path: impl Into<PathBuf>, outer: impl Into<Span>) -> Expression {
RawExpression::FilePath(path.into()).spanned(outer)
}
pub(crate) fn external_command(inner: impl Into<Tag>, outer: impl Into<Tag>) -> Expression {
RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).tagged(outer)
pub(crate) fn list(list: Vec<Expression>, span: impl Into<Span>) -> Expression {
RawExpression::List(list).spanned(span)
}
pub(crate) fn it_variable(inner: impl Into<Tag>, outer: impl Into<Tag>) -> Expression {
RawExpression::Variable(Variable::It(inner.into())).tagged(outer)
pub(crate) fn bare(span: impl Into<Span>) -> Expression {
RawExpression::Literal(Literal::Bare).spanned(span)
}
pub(crate) fn pattern(span: impl Into<Span>) -> Expression {
RawExpression::Literal(Literal::GlobPattern).spanned(span.into())
}
pub(crate) fn variable(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
RawExpression::Variable(Variable::Other(inner.into())).spanned(outer)
}
pub(crate) fn external_command(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).spanned(outer)
}
pub(crate) fn it_variable(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
RawExpression::Variable(Variable::It(inner.into())).spanned(outer)
}
}
impl ToDebug for Expression {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
match self.item() {
RawExpression::Literal(l) => l.tagged(self.tag()).fmt_debug(f, source),
match &self.item {
RawExpression::Literal(l) => l.spanned(self.span).fmt_debug(f, source),
RawExpression::FilePath(p) => write!(f, "{}", p.display()),
RawExpression::ExternalWord => write!(f, "{}", self.tag().slice(source)),
RawExpression::ExternalWord => write!(f, "{}", self.span.slice(source)),
RawExpression::Command(tag) => write!(f, "{}", tag.slice(source)),
RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{:?}", s),
RawExpression::Variable(Variable::It(_)) => write!(f, "$it"),
RawExpression::Variable(Variable::Other(s)) => write!(f, "${}", s.slice(source)),
@ -212,8 +281,8 @@ impl ToDebug for Expression {
}
}
impl From<Tagged<Path>> for Expression {
fn from(path: Tagged<Path>) -> Expression {
impl From<Spanned<Path>> for Expression {
fn from(path: Spanned<Path>) -> Expression {
path.map(|p| RawExpression::Path(Box::new(p)))
}
}
@ -227,19 +296,39 @@ impl From<Tagged<Path>> for Expression {
pub enum Literal {
Number(Number),
Size(Number, Unit),
String(Tag),
String(Span),
GlobPattern,
Bare,
}
impl ToDebug for Tagged<&Literal> {
impl std::fmt::Display for Tagged<Literal> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", Tagged::new(self.tag.clone(), &self.item))
}
}
impl std::fmt::Display for Tagged<&Literal> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let span = self.tag.span;
match &self.item {
Literal::Number(number) => write!(f, "{}", number),
Literal::Size(number, unit) => write!(f, "{}{}", number, unit.as_str()),
Literal::String(_) => write!(f, "String{{ {}..{} }}", span.start(), span.end()),
Literal::GlobPattern => write!(f, "Glob{{ {}..{} }}", span.start(), span.end()),
Literal::Bare => write!(f, "Bare{{ {}..{} }}", span.start(), span.end()),
}
}
}
impl ToDebug for Spanned<&Literal> {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
match self.item() {
Literal::Number(number) => write!(f, "{:?}", *number),
match self.item {
Literal::Number(number) => write!(f, "{:?}", number),
Literal::Size(number, unit) => write!(f, "{:?}{:?}", *number, unit),
Literal::String(tag) => write!(f, "{}", tag.slice(source)),
Literal::GlobPattern => write!(f, "{}", self.tag().slice(source)),
Literal::Bare => write!(f, "{}", self.tag().slice(source)),
Literal::GlobPattern => write!(f, "{}", self.span.slice(source)),
Literal::Bare => write!(f, "{}", self.span.slice(source)),
}
}
}
@ -258,6 +347,15 @@ impl Literal {
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum Variable {
It(Tag),
Other(Tag),
It(Span),
Other(Span),
}
impl std::fmt::Display for Variable {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Variable::It(_) => write!(f, "$it"),
Variable::Other(span) => write!(f, "${{ {}..{} }}", span.start(), span.end()),
}
}
}

View File

@ -1,140 +1,2 @@
use crate::context::Context;
use crate::errors::ShellError;
use crate::parser::{hir, RawToken, Token};
use crate::TaggedItem;
use crate::Text;
use std::path::PathBuf;
pub fn baseline_parse_single_token(
token: &Token,
source: &Text,
) -> Result<hir::Expression, ShellError> {
Ok(match *token.item() {
RawToken::Number(number) => hir::Expression::number(number.to_number(source), token.tag()),
RawToken::Size(int, unit) => {
hir::Expression::size(int.to_number(source), unit, token.tag())
}
RawToken::String(tag) => hir::Expression::string(tag, token.tag()),
RawToken::Variable(tag) if tag.slice(source) == "it" => {
hir::Expression::it_variable(tag, token.tag())
}
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()),
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
RawToken::GlobPattern => hir::Expression::pattern(token.tag()),
RawToken::Bare => hir::Expression::bare(token.tag()),
})
}
pub fn baseline_parse_token_as_number(
token: &Token,
source: &Text,
) -> Result<hir::Expression, ShellError> {
Ok(match *token.item() {
RawToken::Variable(tag) if tag.slice(source) == "it" => {
hir::Expression::it_variable(tag, token.tag())
}
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()),
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
RawToken::Number(number) => hir::Expression::number(number.to_number(source), token.tag()),
RawToken::Size(number, unit) => {
hir::Expression::size(number.to_number(source), unit, token.tag())
}
RawToken::Bare => hir::Expression::bare(token.tag()),
RawToken::GlobPattern => {
return Err(ShellError::type_error(
"Number",
"glob pattern".to_string().tagged(token.tag()),
))
}
RawToken::String(tag) => hir::Expression::string(tag, token.tag()),
})
}
pub fn baseline_parse_token_as_string(
token: &Token,
source: &Text,
) -> Result<hir::Expression, ShellError> {
Ok(match *token.item() {
RawToken::Variable(tag) if tag.slice(source) == "it" => {
hir::Expression::it_variable(tag, token.tag())
}
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()),
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
RawToken::Number(_) => hir::Expression::bare(token.tag()),
RawToken::Size(_, _) => hir::Expression::bare(token.tag()),
RawToken::Bare => hir::Expression::bare(token.tag()),
RawToken::GlobPattern => {
return Err(ShellError::type_error(
"String",
"glob pattern".tagged(token.tag()),
))
}
RawToken::String(tag) => hir::Expression::string(tag, token.tag()),
})
}
pub fn baseline_parse_token_as_path(
token: &Token,
context: &Context,
source: &Text,
) -> Result<hir::Expression, ShellError> {
Ok(match *token.item() {
RawToken::Variable(tag) if tag.slice(source) == "it" => {
hir::Expression::it_variable(tag, token.tag())
}
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()),
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
RawToken::Number(_) => hir::Expression::bare(token.tag()),
RawToken::Size(_, _) => hir::Expression::bare(token.tag()),
RawToken::Bare => {
hir::Expression::file_path(expand_path(token.tag().slice(source), context), token.tag())
}
RawToken::GlobPattern => {
return Err(ShellError::type_error(
"Path",
"glob pattern".tagged(token.tag()),
))
}
RawToken::String(tag) => {
hir::Expression::file_path(expand_path(tag.slice(source), context), token.tag())
}
})
}
pub fn baseline_parse_token_as_pattern(
token: &Token,
context: &Context,
source: &Text,
) -> Result<hir::Expression, ShellError> {
Ok(match *token.item() {
RawToken::Variable(tag) if tag.slice(source) == "it" => {
hir::Expression::it_variable(tag, token.tag())
}
RawToken::ExternalCommand(_) => {
return Err(ShellError::syntax_error(
"Invalid external command".to_string().tagged(token.tag()),
))
}
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
RawToken::Number(_) => hir::Expression::bare(token.tag()),
RawToken::Size(_, _) => hir::Expression::bare(token.tag()),
RawToken::GlobPattern => hir::Expression::pattern(token.tag()),
RawToken::Bare => {
hir::Expression::file_path(expand_path(token.tag().slice(source), context), token.tag())
}
RawToken::String(tag) => {
hir::Expression::file_path(expand_path(tag.slice(source), context), token.tag())
}
})
}
pub fn expand_path(string: &str, context: &Context) -> PathBuf {
let expanded = shellexpand::tilde_with_context(string, || context.shell_manager.homedir());
PathBuf::from(expanded.as_ref())
}
#[cfg(test)]
mod tests;

View File

@ -0,0 +1,139 @@
use crate::commands::classified::InternalCommand;
use crate::commands::ClassifiedCommand;
use crate::env::host::BasicHost;
use crate::parser::hir;
use crate::parser::hir::syntax_shape::*;
use crate::parser::hir::TokensIterator;
use crate::parser::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b};
use crate::parser::TokenNode;
use crate::{Span, SpannedItem, Tag, Tagged, Text};
use pretty_assertions::assert_eq;
use std::fmt::Debug;
#[test]
fn test_parse_string() {
parse_tokens(StringShape, vec![b::string("hello")], |tokens| {
hir::Expression::string(inner_string_span(tokens[0].span()), tokens[0].span())
});
}
#[test]
fn test_parse_path() {
parse_tokens(
VariablePathShape,
vec![b::var("it"), b::op("."), b::bare("cpu")],
|tokens| {
let (outer_var, inner_var) = tokens[0].expect_var();
let bare = tokens[2].expect_bare();
hir::Expression::path(
hir::Expression::it_variable(inner_var, outer_var),
vec!["cpu".spanned(bare)],
outer_var.until(bare),
)
},
);
parse_tokens(
VariablePathShape,
vec![
b::var("cpu"),
b::op("."),
b::bare("amount"),
b::op("."),
b::string("max ghz"),
],
|tokens| {
let (outer_var, inner_var) = tokens[0].expect_var();
let amount = tokens[2].expect_bare();
let (outer_max_ghz, _) = tokens[4].expect_string();
hir::Expression::path(
hir::Expression::variable(inner_var, outer_var),
vec!["amount".spanned(amount), "max ghz".spanned(outer_max_ghz)],
outer_var.until(outer_max_ghz),
)
},
);
}
#[test]
fn test_parse_command() {
parse_tokens(
ClassifiedCommandShape,
vec![b::bare("ls"), b::sp(), b::pattern("*.txt")],
|tokens| {
let bare = tokens[0].expect_bare();
let pat = tokens[2].span();
ClassifiedCommand::Internal(InternalCommand::new(
"ls".to_string(),
Tag {
span: bare,
anchor: None,
},
hir::Call {
head: Box::new(hir::RawExpression::Command(bare).spanned(bare)),
positional: Some(vec![hir::Expression::pattern(pat)]),
named: None,
},
))
// hir::Expression::path(
// hir::Expression::variable(inner_var, outer_var),
// vec!["cpu".tagged(bare)],
// outer_var.until(bare),
// )
},
);
parse_tokens(
VariablePathShape,
vec![
b::var("cpu"),
b::op("."),
b::bare("amount"),
b::op("."),
b::string("max ghz"),
],
|tokens| {
let (outer_var, inner_var) = tokens[0].expect_var();
let amount = tokens[2].expect_bare();
let (outer_max_ghz, _) = tokens[4].expect_string();
hir::Expression::path(
hir::Expression::variable(inner_var, outer_var),
vec!["amount".spanned(amount), "max ghz".spanned(outer_max_ghz)],
outer_var.until(outer_max_ghz),
)
},
);
}
fn parse_tokens<T: Eq + Debug>(
shape: impl ExpandSyntax<Output = T>,
tokens: Vec<CurriedToken>,
expected: impl FnOnce(Tagged<&[TokenNode]>) -> T,
) {
let tokens = b::token_list(tokens);
let (tokens, source) = b::build(tokens);
ExpandContext::with_empty(&Text::from(source), |context| {
let tokens = tokens.expect_list();
let mut iterator = TokensIterator::all(tokens.item, *context.span());
let expr = expand_syntax(&shape, &mut iterator, &context);
let expr = match expr {
Ok(expr) => expr,
Err(err) => {
crate::cli::print_err(err, &BasicHost, context.source().clone());
panic!("Parse failed");
}
};
assert_eq!(expr, expected(tokens));
})
}
fn inner_string_span(span: Span) -> Span {
Span::new(span.start() + 1, span.end() - 1)
}

View File

@ -1,459 +0,0 @@
use crate::context::Context;
use crate::errors::ShellError;
use crate::parser::{
hir,
hir::{
baseline_parse_single_token, baseline_parse_token_as_number, baseline_parse_token_as_path,
baseline_parse_token_as_pattern, baseline_parse_token_as_string,
},
DelimitedNode, Delimiter, PathNode, RawToken, TokenNode,
};
use crate::{Tag, Tagged, TaggedItem, Text};
use derive_new::new;
use log::trace;
use serde::{Deserialize, Serialize};
pub fn baseline_parse_tokens(
token_nodes: &mut TokensIterator<'_>,
context: &Context,
source: &Text,
syntax_type: SyntaxShape,
) -> Result<Vec<hir::Expression>, ShellError> {
let mut exprs: Vec<hir::Expression> = vec![];
loop {
if token_nodes.at_end() {
break;
}
let expr = baseline_parse_next_expr(token_nodes, context, source, syntax_type)?;
exprs.push(expr);
}
Ok(exprs)
}
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
pub enum SyntaxShape {
Any,
List,
Literal,
String,
Member,
Variable,
Number,
Path,
Pattern,
Binary,
Block,
Boolean,
}
impl std::fmt::Display for SyntaxShape {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
SyntaxShape::Any => write!(f, "Any"),
SyntaxShape::List => write!(f, "List"),
SyntaxShape::Literal => write!(f, "Literal"),
SyntaxShape::String => write!(f, "String"),
SyntaxShape::Member => write!(f, "Member"),
SyntaxShape::Variable => write!(f, "Variable"),
SyntaxShape::Number => write!(f, "Number"),
SyntaxShape::Path => write!(f, "Path"),
SyntaxShape::Pattern => write!(f, "Pattern"),
SyntaxShape::Binary => write!(f, "Binary"),
SyntaxShape::Block => write!(f, "Block"),
SyntaxShape::Boolean => write!(f, "Boolean"),
}
}
}
pub fn baseline_parse_next_expr(
tokens: &mut TokensIterator,
context: &Context,
source: &Text,
syntax_type: SyntaxShape,
) -> Result<hir::Expression, ShellError> {
let next = tokens
.next()
.ok_or_else(|| ShellError::string("Expected token, found none"))?;
trace!(target: "nu::parser::parse_one_expr", "syntax_type={:?}, token={:?}", syntax_type, next);
match (syntax_type, next) {
(SyntaxShape::Path, TokenNode::Token(token)) => {
return baseline_parse_token_as_path(token, context, source)
}
(SyntaxShape::Path, token) => {
return Err(ShellError::type_error(
"Path",
token.type_name().tagged(token.tag()),
))
}
(SyntaxShape::Pattern, TokenNode::Token(token)) => {
return baseline_parse_token_as_pattern(token, context, source)
}
(SyntaxShape::Pattern, token) => {
return Err(ShellError::type_error(
"Path",
token.type_name().tagged(token.tag()),
))
}
(SyntaxShape::String, TokenNode::Token(token)) => {
return baseline_parse_token_as_string(token, source);
}
(SyntaxShape::String, token) => {
return Err(ShellError::type_error(
"String",
token.type_name().tagged(token.tag()),
))
}
(SyntaxShape::Number, TokenNode::Token(token)) => {
return Ok(baseline_parse_token_as_number(token, source)?);
}
(SyntaxShape::Number, token) => {
return Err(ShellError::type_error(
"Numeric",
token.type_name().tagged(token.tag()),
))
}
// TODO: More legit member processing
(SyntaxShape::Member, TokenNode::Token(token)) => {
return baseline_parse_token_as_string(token, source);
}
(SyntaxShape::Member, token) => {
return Err(ShellError::type_error(
"member",
token.type_name().tagged(token.tag()),
))
}
(SyntaxShape::Any, _) => {}
(SyntaxShape::List, _) => {}
(SyntaxShape::Literal, _) => {}
(SyntaxShape::Variable, _) => {}
(SyntaxShape::Binary, _) => {}
(SyntaxShape::Block, _) => {}
(SyntaxShape::Boolean, _) => {}
};
let first = baseline_parse_semantic_token(next, context, source)?;
let possible_op = tokens.peek();
let op = match possible_op {
Some(TokenNode::Operator(op)) => op.clone(),
_ => return Ok(first),
};
tokens.next();
let second = match tokens.next() {
None => {
return Err(ShellError::labeled_error(
"Expected something after an operator",
"operator",
op.tag(),
))
}
Some(token) => baseline_parse_semantic_token(token, context, source)?,
};
// We definitely have a binary expression here -- let's see if we should coerce it into a block
match syntax_type {
SyntaxShape::Any => {
let tag = first.tag().until(second.tag());
let binary = hir::Binary::new(first, op, second);
let binary = hir::RawExpression::Binary(Box::new(binary));
let binary = binary.tagged(tag);
Ok(binary)
}
SyntaxShape::Block => {
let tag = first.tag().until(second.tag());
let path: Tagged<hir::RawExpression> = match first {
Tagged {
item: hir::RawExpression::Literal(hir::Literal::Bare),
tag,
} => {
let string = tag.slice(source).to_string().tagged(tag);
let path = hir::Path::new(
// TODO: Deal with synthetic nodes that have no representation at all in source
hir::RawExpression::Variable(hir::Variable::It(Tag::unknown()))
.tagged(Tag::unknown()),
vec![string],
);
let path = hir::RawExpression::Path(Box::new(path));
path.tagged(first.tag())
}
Tagged {
item: hir::RawExpression::Literal(hir::Literal::String(inner)),
tag,
} => {
let string = inner.slice(source).to_string().tagged(tag);
let path = hir::Path::new(
// TODO: Deal with synthetic nodes that have no representation at all in source
hir::RawExpression::Variable(hir::Variable::It(Tag::unknown()))
.tagged_unknown(),
vec![string],
);
let path = hir::RawExpression::Path(Box::new(path));
path.tagged(first.tag())
}
Tagged {
item: hir::RawExpression::Variable(..),
..
} => first,
Tagged { tag, item } => {
return Err(ShellError::labeled_error(
"The first part of an un-braced block must be a column name",
item.type_name(),
tag,
))
}
};
let binary = hir::Binary::new(path, op, second);
let binary = hir::RawExpression::Binary(Box::new(binary));
let binary = binary.tagged(tag);
let block = hir::RawExpression::Block(vec![binary]);
let block = block.tagged(tag);
Ok(block)
}
other => Err(ShellError::unimplemented(format!(
"coerce hint {:?}",
other
))),
}
}
pub fn baseline_parse_semantic_token(
token: &TokenNode,
context: &Context,
source: &Text,
) -> Result<hir::Expression, ShellError> {
match token {
TokenNode::Token(token) => baseline_parse_single_token(token, source),
TokenNode::Call(_call) => unimplemented!(),
TokenNode::Delimited(delimited) => baseline_parse_delimited(delimited, context, source),
TokenNode::Pipeline(_pipeline) => unimplemented!(),
TokenNode::Operator(op) => Err(ShellError::syntax_error(
"Unexpected operator".tagged(op.tag),
)),
TokenNode::Flag(flag) => Err(ShellError::syntax_error("Unexpected flag".tagged(flag.tag))),
TokenNode::Member(tag) => Err(ShellError::syntax_error(
"BUG: Top-level member".tagged(*tag),
)),
TokenNode::Whitespace(tag) => Err(ShellError::syntax_error(
"BUG: Whitespace found during parse".tagged(*tag),
)),
TokenNode::Error(error) => Err(*error.item.clone()),
TokenNode::Path(path) => baseline_parse_path(path, context, source),
}
}
pub fn baseline_parse_delimited(
token: &Tagged<DelimitedNode>,
context: &Context,
source: &Text,
) -> Result<hir::Expression, ShellError> {
match token.delimiter() {
Delimiter::Brace => {
let children = token.children();
let exprs = baseline_parse_tokens(
&mut TokensIterator::new(children),
context,
source,
SyntaxShape::Any,
)?;
let expr = hir::RawExpression::Block(exprs);
Ok(expr.tagged(token.tag()))
}
Delimiter::Paren => unimplemented!(),
Delimiter::Square => {
let children = token.children();
let exprs = baseline_parse_tokens(
&mut TokensIterator::new(children),
context,
source,
SyntaxShape::Any,
)?;
let expr = hir::RawExpression::List(exprs);
Ok(expr.tagged(token.tag()))
}
}
}
pub fn baseline_parse_path(
token: &Tagged<PathNode>,
context: &Context,
source: &Text,
) -> Result<hir::Expression, ShellError> {
let head = baseline_parse_semantic_token(token.head(), context, source)?;
let mut tail = vec![];
for part in token.tail() {
let string = match part {
TokenNode::Token(token) => match token.item() {
RawToken::Bare => token.tag().slice(source),
RawToken::String(tag) => tag.slice(source),
RawToken::Number(_)
| RawToken::Size(..)
| RawToken::Variable(_)
| RawToken::ExternalCommand(_)
| RawToken::GlobPattern
| RawToken::ExternalWord => {
return Err(ShellError::type_error(
"String",
token.type_name().tagged(part.tag()),
))
}
},
TokenNode::Member(tag) => tag.slice(source),
// TODO: Make this impossible
other => {
return Err(ShellError::syntax_error(
format!("{} in path", other.type_name()).tagged(other.tag()),
))
}
}
.to_string();
tail.push(string.tagged(part.tag()));
}
Ok(hir::path(head, tail).tagged(token.tag()).into())
}
#[derive(Debug, new)]
pub struct TokensIterator<'a> {
tokens: &'a [TokenNode],
#[new(default)]
index: usize,
#[new(default)]
seen: indexmap::IndexSet<usize>,
}
impl TokensIterator<'_> {
pub fn remove(&mut self, position: usize) {
self.seen.insert(position);
}
pub fn len(&self) -> usize {
self.tokens.len()
}
pub fn at_end(&self) -> bool {
for index in self.index..self.tokens.len() {
if !self.seen.contains(&index) {
return false;
}
}
true
}
pub fn advance(&mut self) {
self.seen.insert(self.index);
self.index += 1;
}
pub fn extract<T>(&mut self, f: impl Fn(&TokenNode) -> Option<T>) -> Option<(usize, T)> {
for (i, item) in self.tokens.iter().enumerate() {
if self.seen.contains(&i) {
continue;
}
match f(item) {
None => {
continue;
}
Some(value) => {
self.seen.insert(i);
return Some((i, value));
}
}
}
None
}
pub fn move_to(&mut self, pos: usize) {
self.index = pos;
}
pub fn restart(&mut self) {
self.index = 0;
}
pub fn clone(&self) -> TokensIterator {
TokensIterator {
tokens: self.tokens,
index: self.index,
seen: self.seen.clone(),
}
}
pub fn peek(&self) -> Option<&TokenNode> {
let mut tokens = self.clone();
tokens.next()
}
pub fn debug_remaining(&self) -> Vec<TokenNode> {
let mut tokens = self.clone();
tokens.restart();
tokens.cloned().collect()
}
}
impl<'a> Iterator for TokensIterator<'a> {
type Item = &'a TokenNode;
fn next(&mut self) -> Option<&'a TokenNode> {
loop {
if self.index >= self.tokens.len() {
return None;
}
if self.seen.contains(&self.index) {
self.advance();
continue;
}
if self.index >= self.tokens.len() {
return None;
}
match &self.tokens[self.index] {
TokenNode::Whitespace(_) => {
self.advance();
}
other => {
self.advance();
return Some(other);
}
}
}
}
}

View File

@ -1,6 +1,6 @@
use crate::parser::{hir::Expression, Operator};
use crate::prelude::*;
use crate::Tagged;
use derive_new::new;
use getset::Getters;
use serde::{Deserialize, Serialize};
@ -12,10 +12,16 @@ use std::fmt;
#[get = "pub(crate)"]
pub struct Binary {
left: Expression,
op: Tagged<Operator>,
op: Spanned<Operator>,
right: Expression,
}
impl fmt::Display for Binary {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "({} {} {})", self.op.as_str(), self.left, self.right)
}
}
impl ToDebug for Binary {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
write!(f, "{}", self.left.debug(source))?;

View File

@ -0,0 +1,159 @@
use crate::errors::ShellError;
use crate::parser::{
hir::syntax_shape::{
color_syntax, expand_atom, AtomicToken, ColorSyntax, ExpandContext, ExpansionRule,
MaybeSpaceShape,
},
FlatShape, TokenNode, TokensIterator,
};
use crate::{Span, Spanned, Text};
pub fn expand_external_tokens(
token_nodes: &mut TokensIterator<'_>,
source: &Text,
) -> Result<Vec<Spanned<String>>, ShellError> {
let mut out: Vec<Spanned<String>> = vec![];
loop {
if let Some(span) = expand_next_expression(token_nodes)? {
out.push(span.spanned_string(source));
} else {
break;
}
}
Ok(out)
}
#[derive(Debug, Copy, Clone)]
pub struct ExternalTokensShape;
impl ColorSyntax for ExternalTokensShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Self::Info {
loop {
// Allow a space
color_syntax(&MaybeSpaceShape, token_nodes, context, shapes);
// Process an external expression. External expressions are mostly words, with a
// few exceptions (like $variables and path expansion rules)
match color_syntax(&ExternalExpression, token_nodes, context, shapes).1 {
ExternalExpressionResult::Eof => break,
ExternalExpressionResult::Processed => continue,
}
}
}
}
pub fn expand_next_expression(
token_nodes: &mut TokensIterator<'_>,
) -> Result<Option<Span>, ShellError> {
let first = token_nodes.next_non_ws();
let first = match first {
None => return Ok(None),
Some(v) => v,
};
let first = triage_external_head(first)?;
let mut last = first;
loop {
let continuation = triage_continuation(token_nodes)?;
if let Some(continuation) = continuation {
last = continuation;
} else {
break;
}
}
Ok(Some(first.until(last)))
}
fn triage_external_head(node: &TokenNode) -> Result<Span, ShellError> {
Ok(match node {
TokenNode::Token(token) => token.span,
TokenNode::Call(_call) => unimplemented!("TODO: OMG"),
TokenNode::Nodes(_nodes) => unimplemented!("TODO: OMG"),
TokenNode::Delimited(_delimited) => unimplemented!("TODO: OMG"),
TokenNode::Pipeline(_pipeline) => unimplemented!("TODO: OMG"),
TokenNode::Flag(flag) => flag.span,
TokenNode::Whitespace(_whitespace) => {
unreachable!("This function should be called after next_non_ws()")
}
TokenNode::Error(_error) => unimplemented!("TODO: OMG"),
})
}
fn triage_continuation<'a, 'b>(
nodes: &'a mut TokensIterator<'b>,
) -> Result<Option<Span>, ShellError> {
let mut peeked = nodes.peek_any();
let node = match peeked.node {
None => return Ok(None),
Some(node) => node,
};
match &node {
node if node.is_whitespace() => return Ok(None),
TokenNode::Token(..) | TokenNode::Flag(..) => {}
TokenNode::Call(..) => unimplemented!("call"),
TokenNode::Nodes(..) => unimplemented!("nodes"),
TokenNode::Delimited(..) => unimplemented!("delimited"),
TokenNode::Pipeline(..) => unimplemented!("pipeline"),
TokenNode::Whitespace(..) => unimplemented!("whitespace"),
TokenNode::Error(..) => unimplemented!("error"),
}
peeked.commit();
Ok(Some(node.span()))
}
#[must_use]
enum ExternalExpressionResult {
Eof,
Processed,
}
#[derive(Debug, Copy, Clone)]
struct ExternalExpression;
impl ColorSyntax for ExternalExpression {
type Info = ExternalExpressionResult;
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> ExternalExpressionResult {
let atom = match expand_atom(
token_nodes,
"external word",
context,
ExpansionRule::permissive(),
) {
Err(_) => unreachable!("TODO: separate infallible expand_atom"),
Ok(Spanned {
item: AtomicToken::Eof { .. },
..
}) => return ExternalExpressionResult::Eof,
Ok(atom) => atom,
};
atom.color_tokens(shapes);
return ExternalExpressionResult::Processed;
}
}

View File

@ -9,7 +9,7 @@ use std::fmt;
)]
#[get = "pub(crate)"]
pub struct ExternalCommand {
name: Tag,
pub(crate) name: Span,
}
impl ToDebug for ExternalCommand {

View File

@ -43,9 +43,13 @@ impl NamedArguments {
match switch {
None => self.named.insert(name.into(), NamedValue::AbsentSwitch),
Some(flag) => self
.named
.insert(name, NamedValue::PresentSwitch(*flag.name())),
Some(flag) => self.named.insert(
name,
NamedValue::PresentSwitch(Tag {
span: *flag.name(),
anchor: None,
}),
),
};
}

View File

@ -1,18 +1,47 @@
use crate::parser::hir::Expression;
use crate::prelude::*;
use crate::Tagged;
use derive_new::new;
use getset::Getters;
use getset::{Getters, MutGetters};
use serde::{Deserialize, Serialize};
use std::fmt;
#[derive(
Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new,
Debug,
Clone,
Eq,
PartialEq,
Ord,
PartialOrd,
Hash,
Getters,
MutGetters,
Serialize,
Deserialize,
new,
)]
#[get = "pub(crate)"]
pub struct Path {
head: Expression,
tail: Vec<Tagged<String>>,
#[get_mut = "pub(crate)"]
tail: Vec<Spanned<String>>,
}
impl fmt::Display for Path {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.head)?;
for entry in &self.tail {
write!(f, ".{}", entry.item)?;
}
Ok(())
}
}
impl Path {
pub(crate) fn parts(self) -> (Expression, Vec<Spanned<String>>) {
(self.head, self.tail)
}
}
impl ToDebug for Path {
@ -20,7 +49,7 @@ impl ToDebug for Path {
write!(f, "{}", self.head.debug(source))?;
for part in &self.tail {
write!(f, ".{}", part.item())?;
write!(f, ".{}", part.item)?;
}
Ok(())

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,330 @@
use crate::errors::ShellError;
use crate::parser::{
hir,
hir::syntax_shape::{
color_fallible_syntax, color_syntax_with, continue_expression, expand_expr, expand_syntax,
DelimitedShape, ExpandContext, ExpandExpression, ExpressionContinuationShape,
ExpressionListShape, FallibleColorSyntax, FlatShape, MemberShape, PathTailShape,
VariablePathShape,
},
hir::tokens_iterator::TokensIterator,
parse::token_tree::Delimiter,
RawToken, TokenNode,
};
use crate::{Span, Spanned, SpannedItem};
#[derive(Debug, Copy, Clone)]
pub struct AnyBlockShape;
impl FallibleColorSyntax for AnyBlockShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let block = token_nodes.peek_non_ws().not_eof("block");
let block = match block {
Err(_) => return Ok(()),
Ok(block) => block,
};
// is it just a block?
let block = block.node.as_block();
match block {
// If so, color it as a block
Some((children, spans)) => {
let mut token_nodes = TokensIterator::new(children.item, context.span, false);
color_syntax_with(
&DelimitedShape,
&(Delimiter::Brace, spans.0, spans.1),
&mut token_nodes,
context,
shapes,
);
return Ok(());
}
_ => {}
}
// Otherwise, look for a shorthand block. If none found, fail
color_fallible_syntax(&ShorthandBlock, token_nodes, context, shapes)
}
}
impl ExpandExpression for AnyBlockShape {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
let block = token_nodes.peek_non_ws().not_eof("block")?;
// is it just a block?
let block = block.node.as_block();
match block {
Some((block, _tags)) => {
let mut iterator = TokensIterator::new(&block.item, context.span, false);
let exprs = expand_syntax(&ExpressionListShape, &mut iterator, context)?;
return Ok(hir::RawExpression::Block(exprs).spanned(block.span));
}
_ => {}
}
expand_syntax(&ShorthandBlock, token_nodes, context)
}
}
#[derive(Debug, Copy, Clone)]
pub struct ShorthandBlock;
impl FallibleColorSyntax for ShorthandBlock {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
// Try to find a shorthand head. If none found, fail
color_fallible_syntax(&ShorthandPath, token_nodes, context, shapes)?;
loop {
// Check to see whether there's any continuation after the head expression
let result =
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes);
match result {
// if no continuation was found, we're done
Err(_) => break,
// if a continuation was found, look for another one
Ok(_) => continue,
}
}
Ok(())
}
}
impl ExpandExpression for ShorthandBlock {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
let path = expand_expr(&ShorthandPath, token_nodes, context)?;
let start = path.span;
let expr = continue_expression(path, token_nodes, context)?;
let end = expr.span;
let block = hir::RawExpression::Block(vec![expr]).spanned(start.until(end));
Ok(block)
}
}
/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block
#[derive(Debug, Copy, Clone)]
pub struct ShorthandPath;
impl FallibleColorSyntax for ShorthandPath {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
token_nodes.atomic(|token_nodes| {
let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context, shapes);
match variable {
Ok(_) => {
// if it's a variable path, that's the head part
return Ok(());
}
Err(_) => {
// otherwise, we'll try to find a member path
}
}
// look for a member (`<member>` -> `$it.<member>`)
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
// like any other path.
let tail = color_fallible_syntax(&PathTailShape, token_nodes, context, shapes);
match tail {
Ok(_) => {}
Err(_) => {
// It's ok if there's no path tail; a single member is sufficient
}
}
Ok(())
})
}
}
impl ExpandExpression for ShorthandPath {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
// if it's a variable path, that's the head part
let path = expand_expr(&VariablePathShape, token_nodes, context);
match path {
Ok(path) => return Ok(path),
Err(_) => {}
}
// Synthesize the head of the shorthand path (`<member>` -> `$it.<member>`)
let mut head = expand_expr(&ShorthandHeadShape, token_nodes, context)?;
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
// like any other path.
let tail = expand_syntax(&PathTailShape, token_nodes, context);
match tail {
Err(_) => return Ok(head),
Ok((tail, _)) => {
// For each member that `PathTailShape` expanded, join it onto the existing expression
// to form a new path
for member in tail {
head = hir::Expression::dot_member(head, member);
}
Ok(head)
}
}
}
}
/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block
#[derive(Debug, Copy, Clone)]
pub struct ShorthandHeadShape;
impl FallibleColorSyntax for ShorthandHeadShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
// A shorthand path must not be at EOF
let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?;
match peeked.node {
// If the head of a shorthand path is a bare token, it expands to `$it.bare`
TokenNode::Token(Spanned {
item: RawToken::Bare,
span,
}) => {
peeked.commit();
shapes.push(FlatShape::BareMember.spanned(*span));
Ok(())
}
// If the head of a shorthand path is a string, it expands to `$it."some string"`
TokenNode::Token(Spanned {
item: RawToken::String(_),
span: outer,
}) => {
peeked.commit();
shapes.push(FlatShape::StringMember.spanned(*outer));
Ok(())
}
other => Err(ShellError::type_error(
"shorthand head",
other.tagged_type_name(),
)),
}
}
}
impl ExpandExpression for ShorthandHeadShape {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
// A shorthand path must not be at EOF
let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?;
match peeked.node {
// If the head of a shorthand path is a bare token, it expands to `$it.bare`
TokenNode::Token(Spanned {
item: RawToken::Bare,
span,
}) => {
// Commit the peeked token
peeked.commit();
// Synthesize an `$it` expression
let it = synthetic_it();
// Make a path out of `$it` and the bare token as a member
Ok(hir::Expression::path(
it,
vec![span.spanned_string(context.source)],
*span,
))
}
// If the head of a shorthand path is a string, it expands to `$it."some string"`
TokenNode::Token(Spanned {
item: RawToken::String(inner),
span: outer,
}) => {
// Commit the peeked token
peeked.commit();
// Synthesize an `$it` expression
let it = synthetic_it();
// Make a path out of `$it` and the bare token as a member
Ok(hir::Expression::path(
it,
vec![inner.string(context.source).spanned(*outer)],
*outer,
))
}
// Any other token is not a valid bare head
other => {
return Err(ShellError::type_error(
"shorthand path",
other.tagged_type_name(),
))
}
}
}
}
fn synthetic_it() -> hir::Expression {
hir::Expression::it_variable(Span::unknown(), Span::unknown())
}

View File

@ -0,0 +1,308 @@
pub(crate) mod atom;
pub(crate) mod delimited;
pub(crate) mod file_path;
pub(crate) mod list;
pub(crate) mod number;
pub(crate) mod pattern;
pub(crate) mod string;
pub(crate) mod unit;
pub(crate) mod variable_path;
use crate::parser::hir::syntax_shape::{
color_delimited_square, color_fallible_syntax, color_fallible_syntax_with, expand_atom,
expand_delimited_square, expand_expr, expand_syntax, AtomicToken, BareShape, ColorableDotShape,
DotShape, ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, ExpressionContinuation,
ExpressionContinuationShape, FallibleColorSyntax, FlatShape,
};
use crate::parser::{
hir,
hir::{Expression, TokensIterator},
};
use crate::prelude::*;
use std::path::PathBuf;
#[derive(Debug, Copy, Clone)]
pub struct AnyExpressionShape;
impl ExpandExpression for AnyExpressionShape {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
// Look for an expression at the cursor
let head = expand_expr(&AnyExpressionStartShape, token_nodes, context)?;
continue_expression(head, token_nodes, context)
}
}
impl FallibleColorSyntax for AnyExpressionShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
// Look for an expression at the cursor
color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context, shapes)?;
match continue_coloring_expression(token_nodes, context, shapes) {
Err(_) => {
// it's fine for there to be no continuation
}
Ok(()) => {}
}
Ok(())
}
}
pub(crate) fn continue_expression(
mut head: hir::Expression,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
loop {
// Check to see whether there's any continuation after the head expression
let continuation = expand_syntax(&ExpressionContinuationShape, token_nodes, context);
match continuation {
// If there's no continuation, return the head
Err(_) => return Ok(head),
// Otherwise, form a new expression by combining the head with the continuation
Ok(continuation) => match continuation {
// If the continuation is a `.member`, form a path with the new member
ExpressionContinuation::DotSuffix(_dot, member) => {
head = Expression::dot_member(head, member);
}
// Otherwise, if the continuation is an infix suffix, form an infix expression
ExpressionContinuation::InfixSuffix(op, expr) => {
head = Expression::infix(head, op, expr);
}
},
}
}
}
pub(crate) fn continue_coloring_expression(
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
// if there's not even one expression continuation, fail
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes)?;
loop {
// Check to see whether there's any continuation after the head expression
let result =
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes);
match result {
Err(_) => {
// We already saw one continuation, so just return
return Ok(());
}
Ok(_) => {}
}
}
}
#[derive(Debug, Copy, Clone)]
pub struct AnyExpressionStartShape;
impl ExpandExpression for AnyExpressionStartShape {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
let atom = expand_atom(token_nodes, "expression", context, ExpansionRule::new())?;
match atom.item {
AtomicToken::Size { number, unit } => {
return Ok(hir::Expression::size(
number.to_number(context.source),
unit.item,
Tag {
span: atom.span,
anchor: None,
},
))
}
AtomicToken::SquareDelimited { nodes, .. } => {
expand_delimited_square(&nodes, atom.span.into(), context)
}
AtomicToken::Word { .. } | AtomicToken::Dot { .. } => {
let end = expand_syntax(&BareTailShape, token_nodes, context)?;
Ok(hir::Expression::bare(atom.span.until_option(end)))
}
other => return other.spanned(atom.span).into_hir(context, "expression"),
}
}
}
impl FallibleColorSyntax for AnyExpressionStartShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let atom = token_nodes.spanned(|token_nodes| {
expand_atom(
token_nodes,
"expression",
context,
ExpansionRule::permissive(),
)
});
let atom = match atom {
Spanned {
item: Err(_err),
span,
} => {
shapes.push(FlatShape::Error.spanned(span));
return Ok(());
}
Spanned {
item: Ok(value), ..
} => value,
};
match atom.item {
AtomicToken::Size { number, unit } => shapes.push(
FlatShape::Size {
number: number.span.into(),
unit: unit.span.into(),
}
.spanned(atom.span),
),
AtomicToken::SquareDelimited { nodes, spans } => {
color_delimited_square(spans, &nodes, atom.span.into(), context, shapes)
}
AtomicToken::Word { .. } | AtomicToken::Dot { .. } => {
shapes.push(FlatShape::Word.spanned(atom.span));
}
_ => atom.color_tokens(shapes),
}
Ok(())
}
}
#[derive(Debug, Copy, Clone)]
pub struct BareTailShape;
impl FallibleColorSyntax for BareTailShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let len = shapes.len();
loop {
let word = color_fallible_syntax_with(
&BareShape,
&FlatShape::Word,
token_nodes,
context,
shapes,
);
match word {
// if a word was found, continue
Ok(_) => continue,
// if a word wasn't found, try to find a dot
Err(_) => {}
}
// try to find a dot
let dot = color_fallible_syntax_with(
&ColorableDotShape,
&FlatShape::Word,
token_nodes,
context,
shapes,
);
match dot {
// if a dot was found, try to find another word
Ok(_) => continue,
// otherwise, we're done
Err(_) => break,
}
}
if shapes.len() > len {
Ok(())
} else {
Err(ShellError::syntax_error(
"No tokens matched BareTailShape".tagged_unknown(),
))
}
}
}
impl ExpandSyntax for BareTailShape {
type Output = Option<Span>;
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Option<Span>, ShellError> {
let mut end: Option<Span> = None;
loop {
match expand_syntax(&BareShape, token_nodes, context) {
Ok(bare) => {
end = Some(bare.span);
continue;
}
Err(_) => match expand_syntax(&DotShape, token_nodes, context) {
Ok(dot) => {
end = Some(dot);
continue;
}
Err(_) => break,
},
}
}
Ok(end)
}
}
pub fn expand_file_path(string: &str, context: &ExpandContext) -> PathBuf {
let expanded = shellexpand::tilde_with_context(string, || context.homedir());
PathBuf::from(expanded.as_ref())
}

View File

@ -0,0 +1,580 @@
use crate::parser::hir::syntax_shape::{
expand_syntax, expression::expand_file_path, parse_single_node, BarePathShape,
BarePatternShape, ExpandContext, UnitShape,
};
use crate::parser::{
hir,
hir::{Expression, RawNumber, TokensIterator},
parse::flag::{Flag, FlagKind},
DelimitedNode, Delimiter, FlatShape, RawToken, TokenNode, Unit,
};
use crate::prelude::*;
use crate::{Span, Spanned};
#[derive(Debug)]
pub enum AtomicToken<'tokens> {
Eof {
span: Span,
},
Error {
error: Spanned<ShellError>,
},
Number {
number: RawNumber,
},
Size {
number: Spanned<RawNumber>,
unit: Spanned<Unit>,
},
String {
body: Span,
},
ItVariable {
name: Span,
},
Variable {
name: Span,
},
ExternalCommand {
command: Span,
},
ExternalWord {
text: Span,
},
GlobPattern {
pattern: Span,
},
FilePath {
path: Span,
},
Word {
text: Span,
},
SquareDelimited {
spans: (Span, Span),
nodes: &'tokens Vec<TokenNode>,
},
ParenDelimited {
span: (Span, Span),
nodes: &'tokens Vec<TokenNode>,
},
BraceDelimited {
spans: (Span, Span),
nodes: &'tokens Vec<TokenNode>,
},
Pipeline {
pipe: Option<Span>,
elements: Spanned<&'tokens Vec<TokenNode>>,
},
ShorthandFlag {
name: Span,
},
LonghandFlag {
name: Span,
},
Dot {
text: Span,
},
Operator {
text: Span,
},
Whitespace {
text: Span,
},
}
pub type SpannedAtomicToken<'tokens> = Spanned<AtomicToken<'tokens>>;
impl<'tokens> SpannedAtomicToken<'tokens> {
pub fn into_hir(
&self,
context: &ExpandContext,
expected: &'static str,
) -> Result<hir::Expression, ShellError> {
Ok(match &self.item {
AtomicToken::Eof { .. } => {
return Err(ShellError::type_error(
expected,
"eof atomic token".tagged(self.span),
))
}
AtomicToken::Error { .. } => {
return Err(ShellError::type_error(
expected,
"eof atomic token".tagged(self.span),
))
}
AtomicToken::Operator { .. } => {
return Err(ShellError::type_error(
expected,
"operator".tagged(self.span),
))
}
AtomicToken::ShorthandFlag { .. } => {
return Err(ShellError::type_error(
expected,
"shorthand flag".tagged(self.span),
))
}
AtomicToken::LonghandFlag { .. } => {
return Err(ShellError::type_error(expected, "flag".tagged(self.span)))
}
AtomicToken::Whitespace { .. } => {
return Err(ShellError::unimplemented("whitespace in AtomicToken"))
}
AtomicToken::Dot { .. } => {
return Err(ShellError::type_error(expected, "dot".tagged(self.span)))
}
AtomicToken::Number { number } => {
Expression::number(number.to_number(context.source), self.span)
}
AtomicToken::FilePath { path } => Expression::file_path(
expand_file_path(path.slice(context.source), context),
self.span,
),
AtomicToken::Size { number, unit } => {
Expression::size(number.to_number(context.source), **unit, self.span)
}
AtomicToken::String { body } => Expression::string(*body, self.span),
AtomicToken::ItVariable { name } => Expression::it_variable(*name, self.span),
AtomicToken::Variable { name } => Expression::variable(*name, self.span),
AtomicToken::ExternalCommand { command } => {
Expression::external_command(*command, self.span)
}
AtomicToken::ExternalWord { text } => Expression::string(*text, self.span),
AtomicToken::GlobPattern { pattern } => Expression::pattern(*pattern),
AtomicToken::Word { text } => Expression::string(*text, *text),
AtomicToken::SquareDelimited { .. } => unimplemented!("into_hir"),
AtomicToken::ParenDelimited { .. } => unimplemented!("into_hir"),
AtomicToken::BraceDelimited { .. } => unimplemented!("into_hir"),
AtomicToken::Pipeline { .. } => unimplemented!("into_hir"),
})
}
pub fn spanned_type_name(&self) -> Spanned<&'static str> {
match &self.item {
AtomicToken::Eof { .. } => "eof",
AtomicToken::Error { .. } => "error",
AtomicToken::Operator { .. } => "operator",
AtomicToken::ShorthandFlag { .. } => "shorthand flag",
AtomicToken::LonghandFlag { .. } => "flag",
AtomicToken::Whitespace { .. } => "whitespace",
AtomicToken::Dot { .. } => "dot",
AtomicToken::Number { .. } => "number",
AtomicToken::FilePath { .. } => "file path",
AtomicToken::Size { .. } => "size",
AtomicToken::String { .. } => "string",
AtomicToken::ItVariable { .. } => "$it",
AtomicToken::Variable { .. } => "variable",
AtomicToken::ExternalCommand { .. } => "external command",
AtomicToken::ExternalWord { .. } => "external word",
AtomicToken::GlobPattern { .. } => "file pattern",
AtomicToken::Word { .. } => "word",
AtomicToken::SquareDelimited { .. } => "array literal",
AtomicToken::ParenDelimited { .. } => "parenthesized expression",
AtomicToken::BraceDelimited { .. } => "block",
AtomicToken::Pipeline { .. } => "pipeline",
}
.spanned(self.span)
}
pub fn tagged_type_name(&self) -> Tagged<&'static str> {
match &self.item {
AtomicToken::Eof { .. } => "eof",
AtomicToken::Error { .. } => "error",
AtomicToken::Operator { .. } => "operator",
AtomicToken::ShorthandFlag { .. } => "shorthand flag",
AtomicToken::LonghandFlag { .. } => "flag",
AtomicToken::Whitespace { .. } => "whitespace",
AtomicToken::Dot { .. } => "dot",
AtomicToken::Number { .. } => "number",
AtomicToken::FilePath { .. } => "file path",
AtomicToken::Size { .. } => "size",
AtomicToken::String { .. } => "string",
AtomicToken::ItVariable { .. } => "$it",
AtomicToken::Variable { .. } => "variable",
AtomicToken::ExternalCommand { .. } => "external command",
AtomicToken::ExternalWord { .. } => "external word",
AtomicToken::GlobPattern { .. } => "file pattern",
AtomicToken::Word { .. } => "word",
AtomicToken::SquareDelimited { .. } => "array literal",
AtomicToken::ParenDelimited { .. } => "parenthesized expression",
AtomicToken::BraceDelimited { .. } => "block",
AtomicToken::Pipeline { .. } => "pipeline",
}
.tagged(self.span)
}
pub(crate) fn color_tokens(&self, shapes: &mut Vec<Spanned<FlatShape>>) {
match &self.item {
AtomicToken::Eof { .. } => {}
AtomicToken::Error { .. } => return shapes.push(FlatShape::Error.spanned(self.span)),
AtomicToken::Operator { .. } => {
return shapes.push(FlatShape::Operator.spanned(self.span));
}
AtomicToken::ShorthandFlag { .. } => {
return shapes.push(FlatShape::ShorthandFlag.spanned(self.span));
}
AtomicToken::LonghandFlag { .. } => {
return shapes.push(FlatShape::Flag.spanned(self.span));
}
AtomicToken::Whitespace { .. } => {
return shapes.push(FlatShape::Whitespace.spanned(self.span));
}
AtomicToken::FilePath { .. } => return shapes.push(FlatShape::Path.spanned(self.span)),
AtomicToken::Dot { .. } => return shapes.push(FlatShape::Dot.spanned(self.span)),
AtomicToken::Number {
number: RawNumber::Decimal(_),
} => {
return shapes.push(FlatShape::Decimal.spanned(self.span));
}
AtomicToken::Number {
number: RawNumber::Int(_),
} => {
return shapes.push(FlatShape::Int.spanned(self.span));
}
AtomicToken::Size { number, unit } => {
return shapes.push(
FlatShape::Size {
number: number.span,
unit: unit.span,
}
.spanned(self.span),
);
}
AtomicToken::String { .. } => return shapes.push(FlatShape::String.spanned(self.span)),
AtomicToken::ItVariable { .. } => {
return shapes.push(FlatShape::ItVariable.spanned(self.span))
}
AtomicToken::Variable { .. } => {
return shapes.push(FlatShape::Variable.spanned(self.span))
}
AtomicToken::ExternalCommand { .. } => {
return shapes.push(FlatShape::ExternalCommand.spanned(self.span));
}
AtomicToken::ExternalWord { .. } => {
return shapes.push(FlatShape::ExternalWord.spanned(self.span))
}
AtomicToken::GlobPattern { .. } => {
return shapes.push(FlatShape::GlobPattern.spanned(self.span))
}
AtomicToken::Word { .. } => return shapes.push(FlatShape::Word.spanned(self.span)),
_ => return shapes.push(FlatShape::Error.spanned(self.span)),
}
}
}
#[derive(Debug)]
pub enum WhitespaceHandling {
#[allow(unused)]
AllowWhitespace,
RejectWhitespace,
}
#[derive(Debug)]
pub struct ExpansionRule {
pub(crate) allow_external_command: bool,
pub(crate) allow_external_word: bool,
pub(crate) allow_operator: bool,
pub(crate) allow_eof: bool,
pub(crate) treat_size_as_word: bool,
pub(crate) commit_errors: bool,
pub(crate) whitespace: WhitespaceHandling,
}
impl ExpansionRule {
pub fn new() -> ExpansionRule {
ExpansionRule {
allow_external_command: false,
allow_external_word: false,
allow_operator: false,
allow_eof: false,
treat_size_as_word: false,
commit_errors: false,
whitespace: WhitespaceHandling::RejectWhitespace,
}
}
/// The intent of permissive mode is to return an atomic token for every possible
/// input token. This is important for error-correcting parsing, such as the
/// syntax highlighter.
pub fn permissive() -> ExpansionRule {
ExpansionRule {
allow_external_command: true,
allow_external_word: true,
allow_operator: true,
allow_eof: true,
treat_size_as_word: false,
commit_errors: true,
whitespace: WhitespaceHandling::AllowWhitespace,
}
}
#[allow(unused)]
pub fn allow_external_command(mut self) -> ExpansionRule {
self.allow_external_command = true;
self
}
#[allow(unused)]
pub fn allow_operator(mut self) -> ExpansionRule {
self.allow_operator = true;
self
}
#[allow(unused)]
pub fn no_operator(mut self) -> ExpansionRule {
self.allow_operator = false;
self
}
#[allow(unused)]
pub fn no_external_command(mut self) -> ExpansionRule {
self.allow_external_command = false;
self
}
#[allow(unused)]
pub fn allow_external_word(mut self) -> ExpansionRule {
self.allow_external_word = true;
self
}
#[allow(unused)]
pub fn no_external_word(mut self) -> ExpansionRule {
self.allow_external_word = false;
self
}
#[allow(unused)]
pub fn treat_size_as_word(mut self) -> ExpansionRule {
self.treat_size_as_word = true;
self
}
#[allow(unused)]
pub fn commit_errors(mut self) -> ExpansionRule {
self.commit_errors = true;
self
}
#[allow(unused)]
pub fn allow_whitespace(mut self) -> ExpansionRule {
self.whitespace = WhitespaceHandling::AllowWhitespace;
self
}
#[allow(unused)]
pub fn reject_whitespace(mut self) -> ExpansionRule {
self.whitespace = WhitespaceHandling::RejectWhitespace;
self
}
}
/// If the caller of expand_atom throws away the returned atomic token returned, it
/// must use a checkpoint to roll it back.
pub fn expand_atom<'me, 'content>(
token_nodes: &'me mut TokensIterator<'content>,
expected: &'static str,
context: &ExpandContext,
rule: ExpansionRule,
) -> Result<SpannedAtomicToken<'content>, ShellError> {
if token_nodes.at_end() {
match rule.allow_eof {
true => {
return Ok(AtomicToken::Eof {
span: Span::unknown(),
}
.spanned(Span::unknown()))
}
false => return Err(ShellError::unexpected_eof("anything", Tag::unknown())),
}
}
// First, we'll need to handle the situation where more than one token corresponds
// to a single atomic token
// If treat_size_as_word, don't try to parse the head of the token stream
// as a size.
match rule.treat_size_as_word {
true => {}
false => match expand_syntax(&UnitShape, token_nodes, context) {
// If the head of the stream isn't a valid unit, we'll try to parse
// it again next as a word
Err(_) => {}
// But if it was a valid unit, we're done here
Ok(Spanned {
item: (number, unit),
span,
}) => return Ok(AtomicToken::Size { number, unit }.spanned(span)),
},
}
// Try to parse the head of the stream as a bare path. A bare path includes
// words as well as `.`s, connected together without whitespace.
match expand_syntax(&BarePathShape, token_nodes, context) {
// If we didn't find a bare path
Err(_) => {}
Ok(span) => {
let next = token_nodes.peek_any();
match next.node {
Some(token) if token.is_pattern() => {
// if the very next token is a pattern, we're looking at a glob, not a
// word, and we should try to parse it as a glob next
}
_ => return Ok(AtomicToken::Word { text: span }.spanned(span)),
}
}
}
// Try to parse the head of the stream as a pattern. A pattern includes
// words, words with `*` as well as `.`s, connected together without whitespace.
match expand_syntax(&BarePatternShape, token_nodes, context) {
// If we didn't find a bare path
Err(_) => {}
Ok(span) => return Ok(AtomicToken::GlobPattern { pattern: span }.spanned(span)),
}
// The next token corresponds to at most one atomic token
// We need to `peek` because `parse_single_node` doesn't cover all of the
// cases that `expand_atom` covers. We should probably collapse the two
// if possible.
let peeked = token_nodes.peek_any().not_eof(expected)?;
match peeked.node {
TokenNode::Token(_) => {
// handle this next
}
TokenNode::Error(error) => {
peeked.commit();
return Ok(AtomicToken::Error {
error: error.clone(),
}
.spanned(error.span));
}
// [ ... ]
TokenNode::Delimited(Spanned {
item:
DelimitedNode {
delimiter: Delimiter::Square,
spans,
children,
},
span,
}) => {
peeked.commit();
let span = *span;
return Ok(AtomicToken::SquareDelimited {
nodes: children,
spans: *spans,
}
.spanned(span));
}
TokenNode::Flag(Spanned {
item:
Flag {
kind: FlagKind::Shorthand,
name,
},
span,
}) => {
peeked.commit();
return Ok(AtomicToken::ShorthandFlag { name: *name }.spanned(*span));
}
TokenNode::Flag(Spanned {
item:
Flag {
kind: FlagKind::Longhand,
name,
},
span,
}) => {
peeked.commit();
return Ok(AtomicToken::ShorthandFlag { name: *name }.spanned(*span));
}
// If we see whitespace, process the whitespace according to the whitespace
// handling rules
TokenNode::Whitespace(span) => match rule.whitespace {
// if whitespace is allowed, return a whitespace token
WhitespaceHandling::AllowWhitespace => {
peeked.commit();
return Ok(AtomicToken::Whitespace { text: *span }.spanned(*span));
}
// if whitespace is disallowed, return an error
WhitespaceHandling::RejectWhitespace => {
return Err(ShellError::syntax_error("Unexpected whitespace".tagged(
Tag {
span: *span,
anchor: None,
},
)))
}
},
other => {
let span = peeked.node.span();
peeked.commit();
return Ok(AtomicToken::Error {
error: ShellError::type_error("token", other.tagged_type_name()).spanned(span),
}
.spanned(span));
}
}
parse_single_node(token_nodes, expected, |token, token_span, err| {
Ok(match token {
// First, the error cases. Each error case corresponds to a expansion rule
// flag that can be used to allow the case
// rule.allow_operator
RawToken::Operator(_) if !rule.allow_operator => return Err(err.error()),
// rule.allow_external_command
RawToken::ExternalCommand(_) if !rule.allow_external_command => {
return Err(ShellError::type_error(
expected,
token.type_name().tagged(Tag {
span: token_span,
anchor: None,
}),
))
}
// rule.allow_external_word
RawToken::ExternalWord if !rule.allow_external_word => {
return Err(ShellError::invalid_external_word(Tag {
span: token_span,
anchor: None,
}))
}
RawToken::Number(number) => AtomicToken::Number { number }.spanned(token_span),
RawToken::Operator(_) => AtomicToken::Operator { text: token_span }.spanned(token_span),
RawToken::String(body) => AtomicToken::String { body }.spanned(token_span),
RawToken::Variable(name) if name.slice(context.source) == "it" => {
AtomicToken::ItVariable { name }.spanned(token_span)
}
RawToken::Variable(name) => AtomicToken::Variable { name }.spanned(token_span),
RawToken::ExternalCommand(command) => {
AtomicToken::ExternalCommand { command }.spanned(token_span)
}
RawToken::ExternalWord => {
AtomicToken::ExternalWord { text: token_span }.spanned(token_span)
}
RawToken::GlobPattern => AtomicToken::GlobPattern {
pattern: token_span,
}
.spanned(token_span),
RawToken::Bare => AtomicToken::Word { text: token_span }.spanned(token_span),
})
})
}

View File

@ -0,0 +1,49 @@
use crate::parser::hir::syntax_shape::{
color_syntax, expand_syntax, ColorSyntax, ExpandContext, ExpressionListShape, TokenNode,
};
use crate::parser::{hir, hir::TokensIterator, Delimiter, FlatShape};
use crate::prelude::*;
pub fn expand_delimited_square(
children: &Vec<TokenNode>,
span: Span,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
let mut tokens = TokensIterator::new(&children, span, false);
let list = expand_syntax(&ExpressionListShape, &mut tokens, context);
Ok(hir::Expression::list(list?, Tag { span, anchor: None }))
}
pub fn color_delimited_square(
(open, close): (Span, Span),
children: &Vec<TokenNode>,
span: Span,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) {
shapes.push(FlatShape::OpenDelimiter(Delimiter::Square).spanned(open));
let mut tokens = TokensIterator::new(&children, span, false);
let _list = color_syntax(&ExpressionListShape, &mut tokens, context, shapes);
shapes.push(FlatShape::CloseDelimiter(Delimiter::Square).spanned(close));
}
#[derive(Debug, Copy, Clone)]
pub struct DelimitedShape;
impl ColorSyntax for DelimitedShape {
type Info = ();
type Input = (Delimiter, Span, Span);
fn color_syntax<'a, 'b>(
&self,
(delimiter, open, close): &(Delimiter, Span, Span),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Self::Info {
shapes.push(FlatShape::OpenDelimiter(*delimiter).spanned(*open));
color_syntax(&ExpressionListShape, token_nodes, context, shapes);
shapes.push(FlatShape::CloseDelimiter(*delimiter).spanned(*close));
}
}

View File

@ -0,0 +1,71 @@
use crate::parser::hir::syntax_shape::expression::atom::{expand_atom, AtomicToken, ExpansionRule};
use crate::parser::hir::syntax_shape::{
expression::expand_file_path, ExpandContext, ExpandExpression, FallibleColorSyntax, FlatShape,
};
use crate::parser::{hir, hir::TokensIterator};
use crate::prelude::*;
#[derive(Debug, Copy, Clone)]
pub struct FilePathShape;
impl FallibleColorSyntax for FilePathShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let atom = expand_atom(
token_nodes,
"file path",
context,
ExpansionRule::permissive(),
);
let atom = match atom {
Err(_) => return Ok(()),
Ok(atom) => atom,
};
match atom.item {
AtomicToken::Word { .. }
| AtomicToken::String { .. }
| AtomicToken::Number { .. }
| AtomicToken::Size { .. } => {
shapes.push(FlatShape::Path.spanned(atom.span));
}
_ => atom.color_tokens(shapes),
}
Ok(())
}
}
impl ExpandExpression for FilePathShape {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
let atom = expand_atom(token_nodes, "file path", context, ExpansionRule::new())?;
match atom.item {
AtomicToken::Word { text: body } | AtomicToken::String { body } => {
let path = expand_file_path(body.slice(context.source), context);
return Ok(hir::Expression::file_path(path, atom.span));
}
AtomicToken::Number { .. } | AtomicToken::Size { .. } => {
let path = atom.span.slice(context.source);
return Ok(hir::Expression::file_path(path, atom.span));
}
_ => return atom.into_hir(context, "file path"),
}
}
}

View File

@ -0,0 +1,176 @@
use crate::errors::ShellError;
use crate::parser::{
hir,
hir::syntax_shape::{
color_fallible_syntax, color_syntax, expand_atom, expand_expr, maybe_spaced, spaced,
AnyExpressionShape, ColorSyntax, ExpandContext, ExpandSyntax, ExpansionRule,
MaybeSpaceShape, SpaceShape,
},
hir::TokensIterator,
FlatShape,
};
use crate::Spanned;
#[derive(Debug, Copy, Clone)]
pub struct ExpressionListShape;
impl ExpandSyntax for ExpressionListShape {
type Output = Vec<hir::Expression>;
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<Vec<hir::Expression>, ShellError> {
let mut exprs = vec![];
if token_nodes.at_end_possible_ws() {
return Ok(exprs);
}
let expr = expand_expr(&maybe_spaced(AnyExpressionShape), token_nodes, context)?;
exprs.push(expr);
loop {
if token_nodes.at_end_possible_ws() {
return Ok(exprs);
}
let expr = expand_expr(&spaced(AnyExpressionShape), token_nodes, context)?;
exprs.push(expr);
}
}
}
impl ColorSyntax for ExpressionListShape {
type Info = ();
type Input = ();
/// The intent of this method is to fully color an expression list shape infallibly.
/// This means that if we can't expand a token into an expression, we fall back to
/// a simpler coloring strategy.
///
/// This would apply to something like `where x >`, which includes an incomplete
/// binary operator. Since we will fail to process it as a binary operator, we'll
/// fall back to a simpler coloring and move on.
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) {
// We encountered a parsing error and will continue with simpler coloring ("backoff
// coloring mode")
let mut backoff = false;
// Consume any leading whitespace
color_syntax(&MaybeSpaceShape, token_nodes, context, shapes);
loop {
// If we reached the very end of the token stream, we're done
if token_nodes.at_end() {
return;
}
if backoff {
let len = shapes.len();
// If we previously encountered a parsing error, use backoff coloring mode
color_syntax(&SimplestExpression, token_nodes, context, shapes);
if len == shapes.len() && !token_nodes.at_end() {
// This should never happen, but if it does, a panic is better than an infinite loop
panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression")
}
} else {
// Try to color the head of the stream as an expression
match color_fallible_syntax(&AnyExpressionShape, token_nodes, context, shapes) {
// If no expression was found, switch to backoff coloring mode
Err(_) => {
backoff = true;
continue;
}
Ok(_) => {}
}
// If an expression was found, consume a space
match color_fallible_syntax(&SpaceShape, token_nodes, context, shapes) {
Err(_) => {
// If no space was found, we're either at the end or there's an error.
// Either way, switch to backoff coloring mode. If we're at the end
// it won't have any consequences.
backoff = true;
}
Ok(_) => {
// Otherwise, move on to the next expression
}
}
}
}
}
}
/// BackoffColoringMode consumes all of the remaining tokens in an infallible way
#[derive(Debug, Copy, Clone)]
pub struct BackoffColoringMode;
impl ColorSyntax for BackoffColoringMode {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &Self::Input,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Self::Info {
loop {
if token_nodes.at_end() {
break;
}
let len = shapes.len();
color_syntax(&SimplestExpression, token_nodes, context, shapes);
if len == shapes.len() && !token_nodes.at_end() {
// This shouldn't happen, but if it does, a panic is better than an infinite loop
panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, shapes);
}
}
}
}
/// The point of `SimplestExpression` is to serve as an infallible base case for coloring.
/// As a last ditch effort, if we can't find any way to parse the head of the stream as an
/// expression, fall back to simple coloring.
#[derive(Debug, Copy, Clone)]
pub struct SimplestExpression;
impl ColorSyntax for SimplestExpression {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) {
let atom = expand_atom(
token_nodes,
"any token",
context,
ExpansionRule::permissive(),
);
match atom {
Err(_) => {}
Ok(atom) => atom.color_tokens(shapes),
}
}
}

View File

@ -0,0 +1,136 @@
use crate::parser::hir::syntax_shape::{
expand_atom, parse_single_node, ExpandContext, ExpandExpression, ExpansionRule,
FallibleColorSyntax, FlatShape,
};
use crate::parser::{
hir,
hir::{RawNumber, TokensIterator},
RawToken,
};
use crate::prelude::*;
#[derive(Debug, Copy, Clone)]
pub struct NumberShape;
impl ExpandExpression for NumberShape {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
parse_single_node(token_nodes, "Number", |token, token_span, err| {
Ok(match token {
RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()),
RawToken::Variable(tag) if tag.slice(context.source) == "it" => {
hir::Expression::it_variable(tag, token_span)
}
RawToken::ExternalCommand(tag) => {
hir::Expression::external_command(tag, token_span)
}
RawToken::ExternalWord => {
return Err(ShellError::invalid_external_word(Tag {
span: token_span,
anchor: None,
}))
}
RawToken::Variable(tag) => hir::Expression::variable(tag, token_span),
RawToken::Number(number) => {
hir::Expression::number(number.to_number(context.source), token_span)
}
RawToken::Bare => hir::Expression::bare(token_span),
RawToken::String(tag) => hir::Expression::string(tag, token_span),
})
})
}
}
impl FallibleColorSyntax for NumberShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let atom = token_nodes.spanned(|token_nodes| {
expand_atom(token_nodes, "number", context, ExpansionRule::permissive())
});
let atom = match atom {
Spanned { item: Err(_), span } => {
shapes.push(FlatShape::Error.spanned(span));
return Ok(());
}
Spanned { item: Ok(atom), .. } => atom,
};
atom.color_tokens(shapes);
Ok(())
}
}
#[derive(Debug, Copy, Clone)]
pub struct IntShape;
impl ExpandExpression for IntShape {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
parse_single_node(token_nodes, "Integer", |token, token_span, err| {
Ok(match token {
RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()),
RawToken::ExternalWord => {
return Err(ShellError::invalid_external_word(token_span))
}
RawToken::Variable(span) if span.slice(context.source) == "it" => {
hir::Expression::it_variable(span, token_span)
}
RawToken::ExternalCommand(span) => {
hir::Expression::external_command(span, token_span)
}
RawToken::Variable(span) => hir::Expression::variable(span, token_span),
RawToken::Number(number @ RawNumber::Int(_)) => {
hir::Expression::number(number.to_number(context.source), token_span)
}
RawToken::Number(_) => return Err(err.error()),
RawToken::Bare => hir::Expression::bare(token_span),
RawToken::String(span) => hir::Expression::string(span, token_span),
})
})
}
}
impl FallibleColorSyntax for IntShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let atom = token_nodes.spanned(|token_nodes| {
expand_atom(token_nodes, "integer", context, ExpansionRule::permissive())
});
let atom = match atom {
Spanned { item: Err(_), span } => {
shapes.push(FlatShape::Error.spanned(span));
return Ok(());
}
Spanned { item: Ok(atom), .. } => atom,
};
atom.color_tokens(shapes);
Ok(())
}
}

View File

@ -0,0 +1,112 @@
use crate::parser::hir::syntax_shape::{
expand_atom, expand_bare, expand_syntax, expression::expand_file_path, parse_single_node,
AtomicToken, ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, FallibleColorSyntax,
FlatShape,
};
use crate::parser::{hir, hir::TokensIterator, Operator, RawToken, TokenNode};
use crate::prelude::*;
#[derive(Debug, Copy, Clone)]
pub struct PatternShape;
impl FallibleColorSyntax for PatternShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
token_nodes.atomic(|token_nodes| {
let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?;
match &atom.item {
AtomicToken::GlobPattern { .. } | AtomicToken::Word { .. } => {
shapes.push(FlatShape::GlobPattern.spanned(atom.span));
Ok(())
}
_ => Err(ShellError::type_error("pattern", atom.tagged_type_name())),
}
})
}
}
impl ExpandExpression for PatternShape {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
let pattern = expand_syntax(&BarePatternShape, token_nodes, context);
match pattern {
Ok(tag) => {
return Ok(hir::Expression::pattern(tag));
}
Err(_) => {}
}
parse_single_node(token_nodes, "Pattern", |token, token_tag, _| {
Ok(match token {
RawToken::GlobPattern => {
return Err(ShellError::unreachable(
"glob pattern after glob already returned",
))
}
RawToken::Operator(..) => {
return Err(ShellError::unreachable("dot after glob already returned"))
}
RawToken::Bare => {
return Err(ShellError::unreachable("bare after glob already returned"))
}
RawToken::Variable(tag) if tag.slice(context.source) == "it" => {
hir::Expression::it_variable(tag, token_tag)
}
RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag),
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag),
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)),
RawToken::Number(_) => hir::Expression::bare(token_tag),
RawToken::String(tag) => hir::Expression::file_path(
expand_file_path(tag.slice(context.source), context),
token_tag,
),
})
})
}
}
#[derive(Debug, Copy, Clone)]
pub struct BarePatternShape;
impl ExpandSyntax for BarePatternShape {
type Output = Span;
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Span, ShellError> {
expand_bare(token_nodes, context, |token| match token {
TokenNode::Token(Spanned {
item: RawToken::Bare,
..
})
| TokenNode::Token(Spanned {
item: RawToken::Operator(Operator::Dot),
..
})
| TokenNode::Token(Spanned {
item: RawToken::GlobPattern,
..
}) => true,
_ => false,
})
}
}

View File

@ -0,0 +1,94 @@
use crate::parser::hir::syntax_shape::{
expand_atom, expand_variable, parse_single_node, AtomicToken, ExpandContext, ExpandExpression,
ExpansionRule, FallibleColorSyntax, FlatShape, TestSyntax,
};
use crate::parser::hir::tokens_iterator::Peeked;
use crate::parser::{hir, hir::TokensIterator, RawToken, TokenNode};
use crate::prelude::*;
#[derive(Debug, Copy, Clone)]
pub struct StringShape;
impl FallibleColorSyntax for StringShape {
type Info = ();
type Input = FlatShape;
fn color_syntax<'a, 'b>(
&self,
input: &FlatShape,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive());
let atom = match atom {
Err(_) => return Ok(()),
Ok(atom) => atom,
};
match atom {
Spanned {
item: AtomicToken::String { .. },
span,
} => shapes.push((*input).spanned(span)),
other => other.color_tokens(shapes),
}
Ok(())
}
}
impl ExpandExpression for StringShape {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
parse_single_node(token_nodes, "String", |token, token_span, _| {
Ok(match token {
RawToken::GlobPattern => {
return Err(ShellError::type_error(
"String",
"glob pattern".tagged(token_span),
))
}
RawToken::Operator(..) => {
return Err(ShellError::type_error(
"String",
"operator".tagged(token_span),
))
}
RawToken::Variable(span) => expand_variable(span, token_span, &context.source),
RawToken::ExternalCommand(span) => {
hir::Expression::external_command(span, token_span)
}
RawToken::ExternalWord => {
return Err(ShellError::invalid_external_word(token_span))
}
RawToken::Number(_) => hir::Expression::bare(token_span),
RawToken::Bare => hir::Expression::bare(token_span),
RawToken::String(span) => hir::Expression::string(span, token_span),
})
})
}
}
impl TestSyntax for StringShape {
fn test<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
) -> Option<Peeked<'a, 'b>> {
let peeked = token_nodes.peek_any();
match peeked.node {
Some(TokenNode::Token(token)) => match token.item {
RawToken::String(_) => Some(peeked),
_ => None,
},
_ => None,
}
}
}

View File

@ -0,0 +1,92 @@
use crate::data::meta::Span;
use crate::parser::hir::syntax_shape::{ExpandContext, ExpandSyntax};
use crate::parser::parse::tokens::RawNumber;
use crate::parser::parse::unit::Unit;
use crate::parser::{hir::TokensIterator, RawToken, TokenNode};
use crate::prelude::*;
use nom::branch::alt;
use nom::bytes::complete::tag;
use nom::character::complete::digit1;
use nom::combinator::{all_consuming, opt, value};
use nom::IResult;
#[derive(Debug, Copy, Clone)]
pub struct UnitShape;
impl ExpandSyntax for UnitShape {
type Output = Spanned<(Spanned<RawNumber>, Spanned<Unit>)>;
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Spanned<(Spanned<RawNumber>, Spanned<Unit>)>, ShellError> {
let peeked = token_nodes.peek_any().not_eof("unit")?;
let span = match peeked.node {
TokenNode::Token(Spanned {
item: RawToken::Bare,
span,
}) => span,
_ => return Err(peeked.type_error("unit")),
};
let unit = unit_size(span.slice(context.source), *span);
let (_, (number, unit)) = match unit {
Err(_) => {
return Err(ShellError::type_error(
"unit",
"word".tagged(Tag::unknown()),
))
}
Ok((number, unit)) => (number, unit),
};
peeked.commit();
Ok((number, unit).spanned(*span))
}
}
fn unit_size(input: &str, bare_span: Span) -> IResult<&str, (Spanned<RawNumber>, Spanned<Unit>)> {
let (input, digits) = digit1(input)?;
let (input, dot) = opt(tag("."))(input)?;
let (input, number) = match dot {
Some(dot) => {
let (input, rest) = digit1(input)?;
(
input,
RawNumber::decimal(Span::new(
bare_span.start(),
bare_span.start() + digits.len() + dot.len() + rest.len(),
)),
)
}
None => (
input,
RawNumber::int(Span::new(
bare_span.start(),
bare_span.start() + digits.len(),
)),
),
};
let (input, unit) = all_consuming(alt((
value(Unit::B, alt((tag("B"), tag("b")))),
value(Unit::KB, alt((tag("KB"), tag("kb"), tag("Kb")))),
value(Unit::MB, alt((tag("MB"), tag("mb"), tag("Mb")))),
value(Unit::MB, alt((tag("GB"), tag("gb"), tag("Gb")))),
value(Unit::MB, alt((tag("TB"), tag("tb"), tag("Tb")))),
value(Unit::MB, alt((tag("PB"), tag("pb"), tag("Pb")))),
)))(input)?;
let start_span = number.span.end();
Ok((
input,
(number, unit.spanned(Span::new(start_span, bare_span.end()))),
))
}

View File

@ -0,0 +1,735 @@
use crate::parser::hir::syntax_shape::{
color_fallible_syntax, color_fallible_syntax_with, expand_atom, expand_expr, expand_syntax,
parse_single_node, AnyExpressionShape, AtomicToken, BareShape, ExpandContext, ExpandExpression,
ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, Peeked, SkipSyntax, StringShape,
TestSyntax, WhitespaceShape,
};
use crate::parser::{hir, hir::Expression, hir::TokensIterator, Operator, RawToken};
use crate::prelude::*;
#[derive(Debug, Copy, Clone)]
pub struct VariablePathShape;
impl ExpandExpression for VariablePathShape {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
// 1. let the head be the first token, expecting a variable
// 2. let the tail be an empty list of members
// 2. while the next token (excluding ws) is a dot:
// 1. consume the dot
// 2. consume the next token as a member and push it onto tail
let head = expand_expr(&VariableShape, token_nodes, context)?;
let start = head.span;
let mut end = start;
let mut tail: Vec<Spanned<String>> = vec![];
loop {
match DotShape.skip(token_nodes, context) {
Err(_) => break,
Ok(_) => {}
}
let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
let member = syntax.to_spanned_string(context.source);
end = member.span;
tail.push(member);
}
Ok(hir::Expression::path(head, tail, start.until(end)))
}
}
impl FallibleColorSyntax for VariablePathShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
token_nodes.atomic(|token_nodes| {
// If the head of the token stream is not a variable, fail
color_fallible_syntax(&VariableShape, token_nodes, context, shapes)?;
loop {
// look for a dot at the head of a stream
let dot = color_fallible_syntax_with(
&ColorableDotShape,
&FlatShape::Dot,
token_nodes,
context,
shapes,
);
// if there's no dot, we're done
match dot {
Err(_) => break,
Ok(_) => {}
}
// otherwise, look for a member, and if you don't find one, fail
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
}
Ok(())
})
}
}
#[derive(Debug, Copy, Clone)]
pub struct PathTailShape;
/// The failure mode of `PathTailShape` is a dot followed by a non-member
impl FallibleColorSyntax for PathTailShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
token_nodes.atomic(|token_nodes| loop {
let result = color_fallible_syntax_with(
&ColorableDotShape,
&FlatShape::Dot,
token_nodes,
context,
shapes,
);
match result {
Err(_) => return Ok(()),
Ok(_) => {}
}
// If we've seen a dot but not a member, fail
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
})
}
}
impl ExpandSyntax for PathTailShape {
type Output = (Vec<Spanned<String>>, Span);
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Self::Output, ShellError> {
let mut end: Option<Span> = None;
let mut tail = vec![];
loop {
match DotShape.skip(token_nodes, context) {
Err(_) => break,
Ok(_) => {}
}
let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
let member = syntax.to_spanned_string(context.source);
end = Some(member.span);
tail.push(member);
}
match end {
None => {
return Err(ShellError::type_error("path tail", {
let typed_span = token_nodes.typed_span_at_cursor();
Tagged {
tag: typed_span.span.into(),
item: typed_span.item,
}
}))
}
Some(end) => Ok((tail, end)),
}
}
}
#[derive(Debug)]
pub enum ExpressionContinuation {
DotSuffix(Span, Spanned<String>),
InfixSuffix(Spanned<Operator>, Expression),
}
/// An expression continuation
#[derive(Debug, Copy, Clone)]
pub struct ExpressionContinuationShape;
impl ExpandSyntax for ExpressionContinuationShape {
type Output = ExpressionContinuation;
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<ExpressionContinuation, ShellError> {
// Try to expand a `.`
let dot = expand_syntax(&DotShape, token_nodes, context);
match dot {
// If a `.` was matched, it's a `Path`, and we expect a `Member` next
Ok(dot) => {
let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
let member = syntax.to_spanned_string(context.source);
Ok(ExpressionContinuation::DotSuffix(dot, member))
}
// Otherwise, we expect an infix operator and an expression next
Err(_) => {
let (_, op, _) = expand_syntax(&InfixShape, token_nodes, context)?;
let next = expand_expr(&AnyExpressionShape, token_nodes, context)?;
Ok(ExpressionContinuation::InfixSuffix(op, next))
}
}
}
}
pub enum ContinuationInfo {
Dot,
Infix,
}
impl FallibleColorSyntax for ExpressionContinuationShape {
type Info = ContinuationInfo;
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<ContinuationInfo, ShellError> {
token_nodes.atomic(|token_nodes| {
// Try to expand a `.`
let dot = color_fallible_syntax_with(
&ColorableDotShape,
&FlatShape::Dot,
token_nodes,
context,
shapes,
);
match dot {
Ok(_) => {
// we found a dot, so let's keep looking for a member; if no member was found, fail
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
Ok(ContinuationInfo::Dot)
}
Err(_) => {
let mut new_shapes = vec![];
let result = token_nodes.atomic(|token_nodes| {
// we didn't find a dot, so let's see if we're looking at an infix. If not found, fail
color_fallible_syntax(&InfixShape, token_nodes, context, &mut new_shapes)?;
// now that we've seen an infix shape, look for any expression. If not found, fail
color_fallible_syntax(
&AnyExpressionShape,
token_nodes,
context,
&mut new_shapes,
)?;
Ok(ContinuationInfo::Infix)
})?;
shapes.extend(new_shapes);
Ok(result)
}
}
})
}
}
#[derive(Debug, Copy, Clone)]
pub struct VariableShape;
impl ExpandExpression for VariableShape {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
parse_single_node(token_nodes, "variable", |token, token_tag, _| {
Ok(match token {
RawToken::Variable(tag) => {
if tag.slice(context.source) == "it" {
hir::Expression::it_variable(tag, token_tag)
} else {
hir::Expression::variable(tag, token_tag)
}
}
_ => {
return Err(ShellError::type_error(
"variable",
token.type_name().tagged(token_tag),
))
}
})
})
}
}
impl FallibleColorSyntax for VariableShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let atom = expand_atom(
token_nodes,
"variable",
context,
ExpansionRule::permissive(),
);
let atom = match atom {
Err(err) => return Err(err),
Ok(atom) => atom,
};
match &atom.item {
AtomicToken::Variable { .. } => {
shapes.push(FlatShape::Variable.spanned(atom.span));
Ok(())
}
AtomicToken::ItVariable { .. } => {
shapes.push(FlatShape::ItVariable.spanned(atom.span));
Ok(())
}
_ => Err(ShellError::type_error("variable", atom.tagged_type_name())),
}
}
}
#[derive(Debug, Clone, Copy)]
pub enum Member {
String(/* outer */ Span, /* inner */ Span),
Bare(Span),
}
impl Member {
pub(crate) fn to_expr(&self) -> hir::Expression {
match self {
Member::String(outer, inner) => hir::Expression::string(*inner, *outer),
Member::Bare(span) => hir::Expression::string(*span, *span),
}
}
pub(crate) fn span(&self) -> Span {
match self {
Member::String(outer, _inner) => *outer,
Member::Bare(span) => *span,
}
}
pub(crate) fn to_spanned_string(&self, source: &str) -> Spanned<String> {
match self {
Member::String(outer, inner) => inner.string(source).spanned(*outer),
Member::Bare(span) => span.spanned_string(source),
}
}
pub(crate) fn tagged_type_name(&self) -> Tagged<&'static str> {
match self {
Member::String(outer, _inner) => "string".tagged(outer),
Member::Bare(span) => "word".tagged(Tag {
span: *span,
anchor: None,
}),
}
}
}
enum ColumnPathState {
Initial,
LeadingDot(Span),
Dot(Span, Vec<Member>, Span),
Member(Span, Vec<Member>),
Error(ShellError),
}
impl ColumnPathState {
pub fn dot(self, dot: Span) -> ColumnPathState {
match self {
ColumnPathState::Initial => ColumnPathState::LeadingDot(dot),
ColumnPathState::LeadingDot(_) => {
ColumnPathState::Error(ShellError::type_error("column", "dot".tagged(dot)))
}
ColumnPathState::Dot(..) => {
ColumnPathState::Error(ShellError::type_error("column", "dot".tagged(dot)))
}
ColumnPathState::Member(tag, members) => ColumnPathState::Dot(tag, members, dot),
ColumnPathState::Error(err) => ColumnPathState::Error(err),
}
}
pub fn member(self, member: Member) -> ColumnPathState {
match self {
ColumnPathState::Initial => ColumnPathState::Member(member.span(), vec![member]),
ColumnPathState::LeadingDot(tag) => {
ColumnPathState::Member(tag.until(member.span()), vec![member])
}
ColumnPathState::Dot(tag, mut tags, _) => {
ColumnPathState::Member(tag.until(member.span()), {
tags.push(member);
tags
})
}
ColumnPathState::Member(..) => {
ColumnPathState::Error(ShellError::type_error("column", member.tagged_type_name()))
}
ColumnPathState::Error(err) => ColumnPathState::Error(err),
}
}
pub fn into_path(self, next: Peeked) -> Result<Tagged<Vec<Member>>, ShellError> {
match self {
ColumnPathState::Initial => Err(next.type_error("column path")),
ColumnPathState::LeadingDot(dot) => {
Err(ShellError::type_error("column", "dot".tagged(dot)))
}
ColumnPathState::Dot(_tag, _members, dot) => {
Err(ShellError::type_error("column", "dot".tagged(dot)))
}
ColumnPathState::Member(tag, tags) => Ok(tags.tagged(tag)),
ColumnPathState::Error(err) => Err(err),
}
}
}
pub fn expand_column_path<'a, 'b>(
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Tagged<Vec<Member>>, ShellError> {
let mut state = ColumnPathState::Initial;
loop {
let member = MemberShape.expand_syntax(token_nodes, context);
match member {
Err(_) => break,
Ok(member) => state = state.member(member),
}
let dot = DotShape.expand_syntax(token_nodes, context);
match dot {
Err(_) => break,
Ok(dot) => state = state.dot(dot),
}
}
state.into_path(token_nodes.peek_non_ws())
}
#[derive(Debug, Copy, Clone)]
pub struct ColumnPathShape;
impl FallibleColorSyntax for ColumnPathShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
// If there's not even one member shape, fail
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
loop {
let checkpoint = token_nodes.checkpoint();
match color_fallible_syntax_with(
&ColorableDotShape,
&FlatShape::Dot,
checkpoint.iterator,
context,
shapes,
) {
Err(_) => {
// we already saw at least one member shape, so return successfully
return Ok(());
}
Ok(_) => {
match color_fallible_syntax(&MemberShape, checkpoint.iterator, context, shapes)
{
Err(_) => {
// we saw a dot but not a member (but we saw at least one member),
// so don't commit the dot but return successfully
return Ok(());
}
Ok(_) => {
// we saw a dot and a member, so commit it and continue on
checkpoint.commit();
}
}
}
}
}
}
}
impl ExpandSyntax for ColumnPathShape {
type Output = Tagged<Vec<Member>>;
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Self::Output, ShellError> {
expand_column_path(token_nodes, context)
}
}
#[derive(Debug, Copy, Clone)]
pub struct MemberShape;
impl FallibleColorSyntax for MemberShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let bare = color_fallible_syntax_with(
&BareShape,
&FlatShape::BareMember,
token_nodes,
context,
shapes,
);
match bare {
Ok(_) => return Ok(()),
Err(_) => {
// If we don't have a bare word, we'll look for a string
}
}
// Look for a string token. If we don't find one, fail
color_fallible_syntax_with(
&StringShape,
&FlatShape::StringMember,
token_nodes,
context,
shapes,
)
}
}
impl ExpandSyntax for MemberShape {
type Output = Member;
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<Member, ShellError> {
let bare = BareShape.test(token_nodes, context);
if let Some(peeked) = bare {
let node = peeked.not_eof("column")?.commit();
return Ok(Member::Bare(node.span()));
}
let string = StringShape.test(token_nodes, context);
if let Some(peeked) = string {
let node = peeked.not_eof("column")?.commit();
let (outer, inner) = node.expect_string();
return Ok(Member::String(outer, inner));
}
Err(token_nodes.peek_any().type_error("column"))
}
}
#[derive(Debug, Copy, Clone)]
pub struct DotShape;
#[derive(Debug, Copy, Clone)]
pub struct ColorableDotShape;
impl FallibleColorSyntax for ColorableDotShape {
type Info = ();
type Input = FlatShape;
fn color_syntax<'a, 'b>(
&self,
input: &FlatShape,
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let peeked = token_nodes.peek_any().not_eof("dot")?;
match peeked.node {
node if node.is_dot() => {
peeked.commit();
shapes.push((*input).spanned(node.span()));
Ok(())
}
other => Err(ShellError::type_error("dot", other.tagged_type_name())),
}
}
}
impl SkipSyntax for DotShape {
fn skip<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<(), ShellError> {
expand_syntax(self, token_nodes, context)?;
Ok(())
}
}
impl ExpandSyntax for DotShape {
type Output = Span;
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
) -> Result<Self::Output, ShellError> {
parse_single_node(token_nodes, "dot", |token, token_span, _| {
Ok(match token {
RawToken::Operator(Operator::Dot) => token_span,
_ => {
return Err(ShellError::type_error(
"dot",
token.type_name().tagged(token_span),
))
}
})
})
}
}
#[derive(Debug, Copy, Clone)]
pub struct InfixShape;
impl FallibleColorSyntax for InfixShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
outer_shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let checkpoint = token_nodes.checkpoint();
let mut shapes = vec![];
// An infix operator must be prefixed by whitespace. If no whitespace was found, fail
color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context, &mut shapes)?;
// Parse the next TokenNode after the whitespace
parse_single_node(
checkpoint.iterator,
"infix operator",
|token, token_span, _| {
match token {
// If it's an operator (and not `.`), it's a match
RawToken::Operator(operator) if operator != Operator::Dot => {
shapes.push(FlatShape::Operator.spanned(token_span));
Ok(())
}
// Otherwise, it's not a match
_ => Err(ShellError::type_error(
"infix operator",
token.type_name().tagged(token_span),
)),
}
},
)?;
// An infix operator must be followed by whitespace. If no whitespace was found, fail
color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context, &mut shapes)?;
outer_shapes.extend(shapes);
checkpoint.commit();
Ok(())
}
}
impl ExpandSyntax for InfixShape {
type Output = (Span, Spanned<Operator>, Span);
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Self::Output, ShellError> {
let checkpoint = token_nodes.checkpoint();
// An infix operator must be prefixed by whitespace
let start = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?;
// Parse the next TokenNode after the whitespace
let operator = parse_single_node(
checkpoint.iterator,
"infix operator",
|token, token_span, _| {
Ok(match token {
// If it's an operator (and not `.`), it's a match
RawToken::Operator(operator) if operator != Operator::Dot => {
operator.spanned(token_span)
}
// Otherwise, it's not a match
_ => {
return Err(ShellError::type_error(
"infix operator",
token.type_name().tagged(token_span),
))
}
})
},
)?;
// An infix operator must be followed by whitespace
let end = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?;
checkpoint.commit();
Ok((start, operator, end))
}
}

View File

@ -0,0 +1,97 @@
use crate::parser::{Delimiter, Flag, FlagKind, Operator, RawNumber, RawToken, TokenNode};
use crate::{Span, Spanned, SpannedItem, Text};
#[derive(Debug, Copy, Clone)]
pub enum FlatShape {
OpenDelimiter(Delimiter),
CloseDelimiter(Delimiter),
ItVariable,
Variable,
Operator,
Dot,
InternalCommand,
ExternalCommand,
ExternalWord,
BareMember,
StringMember,
String,
Path,
Word,
Pipe,
GlobPattern,
Flag,
ShorthandFlag,
Int,
Decimal,
Whitespace,
Error,
Size { number: Span, unit: Span },
}
impl FlatShape {
pub fn from(token: &TokenNode, source: &Text, shapes: &mut Vec<Spanned<FlatShape>>) -> () {
match token {
TokenNode::Token(token) => match token.item {
RawToken::Number(RawNumber::Int(_)) => {
shapes.push(FlatShape::Int.spanned(token.span))
}
RawToken::Number(RawNumber::Decimal(_)) => {
shapes.push(FlatShape::Decimal.spanned(token.span))
}
RawToken::Operator(Operator::Dot) => {
shapes.push(FlatShape::Dot.spanned(token.span))
}
RawToken::Operator(_) => shapes.push(FlatShape::Operator.spanned(token.span)),
RawToken::String(_) => shapes.push(FlatShape::String.spanned(token.span)),
RawToken::Variable(v) if v.slice(source) == "it" => {
shapes.push(FlatShape::ItVariable.spanned(token.span))
}
RawToken::Variable(_) => shapes.push(FlatShape::Variable.spanned(token.span)),
RawToken::ExternalCommand(_) => {
shapes.push(FlatShape::ExternalCommand.spanned(token.span))
}
RawToken::ExternalWord => shapes.push(FlatShape::ExternalWord.spanned(token.span)),
RawToken::GlobPattern => shapes.push(FlatShape::GlobPattern.spanned(token.span)),
RawToken::Bare => shapes.push(FlatShape::Word.spanned(token.span)),
},
TokenNode::Call(_) => unimplemented!(),
TokenNode::Nodes(nodes) => {
for node in &nodes.item {
FlatShape::from(node, source, shapes);
}
}
TokenNode::Delimited(v) => {
shapes.push(FlatShape::OpenDelimiter(v.item.delimiter).spanned(v.item.spans.0));
for token in &v.item.children {
FlatShape::from(token, source, shapes);
}
shapes.push(FlatShape::CloseDelimiter(v.item.delimiter).spanned(v.item.spans.1));
}
TokenNode::Pipeline(pipeline) => {
for part in &pipeline.parts {
if let Some(_) = part.pipe {
shapes.push(FlatShape::Pipe.spanned(part.span));
}
}
}
TokenNode::Flag(Spanned {
item:
Flag {
kind: FlagKind::Longhand,
..
},
span,
}) => shapes.push(FlatShape::Flag.spanned(*span)),
TokenNode::Flag(Spanned {
item:
Flag {
kind: FlagKind::Shorthand,
..
},
span,
}) => shapes.push(FlatShape::ShorthandFlag.spanned(*span)),
TokenNode::Whitespace(_) => shapes.push(FlatShape::Whitespace.spanned(token.span())),
TokenNode::Error(v) => shapes.push(FlatShape::Error.spanned(v.span)),
}
}
}

View File

@ -0,0 +1,477 @@
pub(crate) mod debug;
use crate::errors::ShellError;
use crate::parser::TokenNode;
use crate::{Span, Spanned, SpannedItem};
#[derive(Debug)]
pub struct TokensIterator<'content> {
tokens: &'content [TokenNode],
span: Span,
skip_ws: bool,
index: usize,
seen: indexmap::IndexSet<usize>,
}
#[derive(Debug)]
pub struct Checkpoint<'content, 'me> {
pub(crate) iterator: &'me mut TokensIterator<'content>,
index: usize,
seen: indexmap::IndexSet<usize>,
committed: bool,
}
impl<'content, 'me> Checkpoint<'content, 'me> {
pub(crate) fn commit(mut self) {
self.committed = true;
}
}
impl<'content, 'me> std::ops::Drop for Checkpoint<'content, 'me> {
fn drop(&mut self) {
if !self.committed {
self.iterator.index = self.index;
self.iterator.seen = self.seen.clone();
}
}
}
#[derive(Debug)]
pub struct Peeked<'content, 'me> {
pub(crate) node: Option<&'content TokenNode>,
iterator: &'me mut TokensIterator<'content>,
from: usize,
to: usize,
}
impl<'content, 'me> Peeked<'content, 'me> {
pub fn commit(&mut self) -> Option<&'content TokenNode> {
let Peeked {
node,
iterator,
from,
to,
} = self;
let node = (*node)?;
iterator.commit(*from, *to);
Some(node)
}
pub fn not_eof(
self,
expected: impl Into<String>,
) -> Result<PeekedNode<'content, 'me>, ShellError> {
match self.node {
None => Err(ShellError::unexpected_eof(
expected,
self.iterator.eof_span(),
)),
Some(node) => Ok(PeekedNode {
node,
iterator: self.iterator,
from: self.from,
to: self.to,
}),
}
}
pub fn type_error(&self, expected: impl Into<String>) -> ShellError {
peek_error(&self.node, self.iterator.eof_span(), expected)
}
}
#[derive(Debug)]
pub struct PeekedNode<'content, 'me> {
pub(crate) node: &'content TokenNode,
iterator: &'me mut TokensIterator<'content>,
from: usize,
to: usize,
}
impl<'content, 'me> PeekedNode<'content, 'me> {
pub fn commit(self) -> &'content TokenNode {
let PeekedNode {
node,
iterator,
from,
to,
} = self;
iterator.commit(from, to);
node
}
pub fn rollback(self) {}
pub fn type_error(&self, expected: impl Into<String>) -> ShellError {
peek_error(&Some(self.node), self.iterator.eof_span(), expected)
}
}
pub fn peek_error(
node: &Option<&TokenNode>,
eof_span: Span,
expected: impl Into<String>,
) -> ShellError {
match node {
None => ShellError::unexpected_eof(expected, eof_span),
Some(node) => ShellError::type_error(expected, node.tagged_type_name()),
}
}
impl<'content> TokensIterator<'content> {
pub fn new(
items: &'content [TokenNode],
span: Span,
skip_ws: bool,
) -> TokensIterator<'content> {
TokensIterator {
tokens: items,
span,
skip_ws,
index: 0,
seen: indexmap::IndexSet::new(),
}
}
pub fn all(tokens: &'content [TokenNode], span: Span) -> TokensIterator<'content> {
TokensIterator::new(tokens, span, false)
}
pub fn len(&self) -> usize {
self.tokens.len()
}
pub fn spanned<T>(
&mut self,
block: impl FnOnce(&mut TokensIterator<'content>) -> T,
) -> Spanned<T> {
let start = self.span_at_cursor();
let result = block(self);
let end = self.span_at_cursor();
result.spanned(start.until(end))
}
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
/// that you'll succeed.
pub fn checkpoint<'me>(&'me mut self) -> Checkpoint<'content, 'me> {
let index = self.index;
let seen = self.seen.clone();
Checkpoint {
iterator: self,
index,
seen,
committed: false,
}
}
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
/// that you'll succeed.
pub fn atomic<'me, T>(
&'me mut self,
block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, ShellError>,
) -> Result<T, ShellError> {
let index = self.index;
let seen = self.seen.clone();
let checkpoint = Checkpoint {
iterator: self,
index,
seen,
committed: false,
};
let value = block(checkpoint.iterator)?;
checkpoint.commit();
return Ok(value);
}
fn eof_span(&self) -> Span {
Span::new(self.span.end(), self.span.end())
}
pub fn typed_span_at_cursor(&mut self) -> Spanned<&'static str> {
let next = self.peek_any();
match next.node {
None => "end".spanned(self.eof_span()),
Some(node) => node.spanned_type_name(),
}
}
pub fn span_at_cursor(&mut self) -> Span {
let next = self.peek_any();
match next.node {
None => self.eof_span(),
Some(node) => node.span(),
}
}
pub fn remove(&mut self, position: usize) {
self.seen.insert(position);
}
pub fn at_end(&self) -> bool {
peek(self, self.skip_ws).is_none()
}
pub fn at_end_possible_ws(&self) -> bool {
peek(self, true).is_none()
}
pub fn advance(&mut self) {
self.seen.insert(self.index);
self.index += 1;
}
pub fn extract<T>(&mut self, f: impl Fn(&TokenNode) -> Option<T>) -> Option<(usize, T)> {
for (i, item) in self.tokens.iter().enumerate() {
if self.seen.contains(&i) {
continue;
}
match f(item) {
None => {
continue;
}
Some(value) => {
self.seen.insert(i);
return Some((i, value));
}
}
}
None
}
pub fn move_to(&mut self, pos: usize) {
self.index = pos;
}
pub fn restart(&mut self) {
self.index = 0;
}
pub fn clone(&self) -> TokensIterator<'content> {
TokensIterator {
tokens: self.tokens,
span: self.span,
index: self.index,
seen: self.seen.clone(),
skip_ws: self.skip_ws,
}
}
// Get the next token, not including whitespace
pub fn next_non_ws(&mut self) -> Option<&TokenNode> {
let mut peeked = start_next(self, true);
peeked.commit()
}
// Peek the next token, not including whitespace
pub fn peek_non_ws<'me>(&'me mut self) -> Peeked<'content, 'me> {
start_next(self, true)
}
// Peek the next token, including whitespace
pub fn peek_any<'me>(&'me mut self) -> Peeked<'content, 'me> {
start_next(self, false)
}
// Peek the next token, including whitespace, but not EOF
pub fn peek_any_token<'me, T>(
&'me mut self,
block: impl FnOnce(&'content TokenNode) -> Result<T, ShellError>,
) -> Result<T, ShellError> {
let peeked = start_next(self, false);
let peeked = peeked.not_eof("invariant");
match peeked {
Err(err) => return Err(err),
Ok(peeked) => match block(peeked.node) {
Err(err) => return Err(err),
Ok(val) => {
peeked.commit();
return Ok(val);
}
},
}
}
fn commit(&mut self, from: usize, to: usize) {
for index in from..to {
self.seen.insert(index);
}
self.index = to;
}
pub fn pos(&self, skip_ws: bool) -> Option<usize> {
peek_pos(self, skip_ws)
}
pub fn debug_remaining(&self) -> Vec<TokenNode> {
let mut tokens = self.clone();
tokens.restart();
tokens.cloned().collect()
}
}
impl<'content> Iterator for TokensIterator<'content> {
type Item = &'content TokenNode;
fn next(&mut self) -> Option<&'content TokenNode> {
next(self, self.skip_ws)
}
}
fn peek<'content, 'me>(
iterator: &'me TokensIterator<'content>,
skip_ws: bool,
) -> Option<&'me TokenNode> {
let mut to = iterator.index;
loop {
if to >= iterator.tokens.len() {
return None;
}
if iterator.seen.contains(&to) {
to += 1;
continue;
}
if to >= iterator.tokens.len() {
return None;
}
let node = &iterator.tokens[to];
match node {
TokenNode::Whitespace(_) if skip_ws => {
to += 1;
}
_ => {
return Some(node);
}
}
}
}
fn peek_pos<'content, 'me>(
iterator: &'me TokensIterator<'content>,
skip_ws: bool,
) -> Option<usize> {
let mut to = iterator.index;
loop {
if to >= iterator.tokens.len() {
return None;
}
if iterator.seen.contains(&to) {
to += 1;
continue;
}
if to >= iterator.tokens.len() {
return None;
}
let node = &iterator.tokens[to];
match node {
TokenNode::Whitespace(_) if skip_ws => {
to += 1;
}
_ => return Some(to),
}
}
}
fn start_next<'content, 'me>(
iterator: &'me mut TokensIterator<'content>,
skip_ws: bool,
) -> Peeked<'content, 'me> {
let from = iterator.index;
let mut to = iterator.index;
loop {
if to >= iterator.tokens.len() {
return Peeked {
node: None,
iterator,
from,
to,
};
}
if iterator.seen.contains(&to) {
to += 1;
continue;
}
if to >= iterator.tokens.len() {
return Peeked {
node: None,
iterator,
from,
to,
};
}
let node = &iterator.tokens[to];
match node {
TokenNode::Whitespace(_) if skip_ws => {
to += 1;
}
_ => {
to += 1;
return Peeked {
node: Some(node),
iterator,
from,
to,
};
}
}
}
}
fn next<'me, 'content>(
iterator: &'me mut TokensIterator<'content>,
skip_ws: bool,
) -> Option<&'content TokenNode> {
loop {
if iterator.index >= iterator.tokens.len() {
return None;
}
if iterator.seen.contains(&iterator.index) {
iterator.advance();
continue;
}
if iterator.index >= iterator.tokens.len() {
return None;
}
match &iterator.tokens[iterator.index] {
TokenNode::Whitespace(_) if skip_ws => {
iterator.advance();
}
other => {
iterator.advance();
return Some(other);
}
}
}
}

View File

@ -0,0 +1,30 @@
use crate::parser::hir::tokens_iterator::TokensIterator;
use crate::traits::ToDebug;
#[derive(Debug)]
pub(crate) enum DebugIteratorToken {
Seen(String),
Unseen(String),
Cursor,
}
pub(crate) fn debug_tokens(iterator: &TokensIterator, source: &str) -> Vec<DebugIteratorToken> {
let mut out = vec![];
for (i, token) in iterator.tokens.iter().enumerate() {
if iterator.index == i {
out.push(DebugIteratorToken::Cursor);
}
if iterator.seen.contains(&i) {
out.push(DebugIteratorToken::Seen(format!("{}", token.debug(source))));
} else {
out.push(DebugIteratorToken::Unseen(format!(
"{}",
token.debug(source)
)));
}
}
out
}

View File

@ -1,7 +1,7 @@
use crate::Tag;
use crate::Span;
use derive_new::new;
use language_reporting::{FileName, Location};
use uuid::Uuid;
use log::trace;
#[derive(new, Debug, Clone)]
pub struct Files {
@ -9,20 +9,20 @@ pub struct Files {
}
impl language_reporting::ReportingFiles for Files {
type Span = Tag;
type FileId = Uuid;
type Span = Span;
type FileId = usize;
fn byte_span(
&self,
file: Self::FileId,
_file: Self::FileId,
from_index: usize,
to_index: usize,
) -> Option<Self::Span> {
Some(Tag::from((from_index, to_index, file)))
Some(Span::new(from_index, to_index))
}
fn file_id(&self, tag: Self::Span) -> Self::FileId {
tag.anchor
fn file_id(&self, _tag: Self::Span) -> Self::FileId {
0
}
fn file_name(&self, _file: Self::FileId) -> FileName {
@ -38,8 +38,18 @@ impl language_reporting::ReportingFiles for Files {
let mut seen_lines = 0;
let mut seen_bytes = 0;
for (pos, _) in source.match_indices('\n') {
if pos > byte_index {
for (pos, slice) in source.match_indices('\n') {
trace!(
"SEARCH={} SEEN={} POS={} SLICE={:?} LEN={} ALL={:?}",
byte_index,
seen_bytes,
pos,
slice,
source.len(),
source
);
if pos >= byte_index {
return Some(language_reporting::Location::new(
seen_lines,
byte_index - seen_bytes,
@ -53,18 +63,18 @@ impl language_reporting::ReportingFiles for Files {
if seen_lines == 0 {
Some(language_reporting::Location::new(0, byte_index))
} else {
None
panic!("byte index {} wasn't valid", byte_index);
}
}
fn line_span(&self, file: Self::FileId, lineno: usize) -> Option<Self::Span> {
fn line_span(&self, _file: Self::FileId, lineno: usize) -> Option<Self::Span> {
let source = &self.snippet;
let mut seen_lines = 0;
let mut seen_bytes = 0;
for (pos, _) in source.match_indices('\n') {
if seen_lines == lineno {
return Some(Tag::from((seen_bytes, pos, file)));
return Some(Span::new(seen_bytes, pos + 1));
} else {
seen_lines += 1;
seen_bytes = pos + 1;
@ -72,18 +82,20 @@ impl language_reporting::ReportingFiles for Files {
}
if seen_lines == 0 {
Some(Tag::from((0, self.snippet.len() - 1, file)))
Some(Span::new(0, self.snippet.len() - 1))
} else {
None
}
}
fn source(&self, tag: Self::Span) -> Option<String> {
if tag.span.start > tag.span.end {
fn source(&self, span: Self::Span) -> Option<String> {
trace!("source(tag={:?}) snippet={:?}", span, self.snippet);
if span.start() > span.end() {
return None;
} else if tag.span.end >= self.snippet.len() {
} else if span.end() > self.snippet.len() {
return None;
}
Some(tag.slice(&self.snippet).to_string())
Some(span.slice(&self.snippet).to_string())
}
}

View File

@ -1,4 +1,5 @@
use crate::Tag;
use crate::parser::hir::syntax_shape::flat_shape::FlatShape;
use crate::{Span, Spanned, SpannedItem};
use derive_new::new;
use getset::Getters;
use serde::{Deserialize, Serialize};
@ -12,6 +13,15 @@ pub enum FlagKind {
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Getters, new)]
#[get = "pub(crate)"]
pub struct Flag {
kind: FlagKind,
name: Tag,
pub(crate) kind: FlagKind,
pub(crate) name: Span,
}
impl Spanned<Flag> {
pub fn color(&self) -> Spanned<FlatShape> {
match self.item.kind {
FlagKind::Longhand => FlatShape::Flag.spanned(self.span),
FlagKind::Shorthand => FlatShape::ShorthandFlag.spanned(self.span),
}
}
}

View File

@ -11,6 +11,7 @@ pub enum Operator {
GreaterThan,
LessThanOrEqual,
GreaterThanOrEqual,
Dot,
}
impl ToDebug for Operator {
@ -32,6 +33,7 @@ impl Operator {
Operator::GreaterThan => ">",
Operator::LessThanOrEqual => "<=",
Operator::GreaterThanOrEqual => ">=",
Operator::Dot => ".",
}
}
}
@ -52,6 +54,7 @@ impl FromStr for Operator {
">" => Ok(Operator::GreaterThan),
"<=" => Ok(Operator::LessThanOrEqual),
">=" => Ok(Operator::GreaterThanOrEqual),
"." => Ok(Operator::Dot),
_ => Err(()),
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,37 +1,30 @@
use crate::parser::CallNode;
use crate::parser::TokenNode;
use crate::traits::ToDebug;
use crate::{Tag, Tagged};
use crate::{Span, Spanned};
use derive_new::new;
use getset::Getters;
use std::fmt;
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, new)]
pub struct Pipeline {
pub(crate) parts: Vec<PipelineElement>,
pub(crate) post_ws: Option<Tag>,
pub(crate) parts: Vec<Spanned<PipelineElement>>,
// pub(crate) post_ws: Option<Tag>,
}
impl ToDebug for Pipeline {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
for part in &self.parts {
for part in self.parts.iter() {
write!(f, "{}", part.debug(source))?;
}
if let Some(post_ws) = self.post_ws {
write!(f, "{}", post_ws.slice(source))?
}
Ok(())
}
}
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
pub struct PipelineElement {
pub pipe: Option<Tag>,
pub pre_ws: Option<Tag>,
#[get = "pub(crate)"]
call: Tagged<CallNode>,
pub post_ws: Option<Tag>,
pub pipe: Option<Span>,
pub tokens: Spanned<Vec<TokenNode>>,
}
impl ToDebug for PipelineElement {
@ -40,14 +33,8 @@ impl ToDebug for PipelineElement {
write!(f, "{}", pipe.slice(source))?;
}
if let Some(pre_ws) = self.pre_ws {
write!(f, "{}", pre_ws.slice(source))?;
}
write!(f, "{}", self.call.debug(source))?;
if let Some(post_ws) = self.post_ws {
write!(f, "{}", post_ws.slice(source))?;
for token in &self.tokens.item {
write!(f, "{}", token.debug(source))?;
}
Ok(())

View File

@ -1,9 +1,9 @@
use crate::errors::ShellError;
use crate::parser::parse::{call_node::*, flag::*, operator::*, pipeline::*, tokens::*};
use crate::prelude::*;
use crate::traits::ToDebug;
use crate::{Tag, Tagged, Text};
use crate::{Tagged, Text};
use derive_new::new;
use enum_utils::FromStr;
use getset::Getters;
use std::fmt;
@ -11,16 +11,14 @@ use std::fmt;
pub enum TokenNode {
Token(Token),
Call(Tagged<CallNode>),
Delimited(Tagged<DelimitedNode>),
Pipeline(Tagged<Pipeline>),
Operator(Tagged<Operator>),
Flag(Tagged<Flag>),
Member(Tag),
Whitespace(Tag),
Call(Spanned<CallNode>),
Nodes(Spanned<Vec<TokenNode>>),
Delimited(Spanned<DelimitedNode>),
Pipeline(Spanned<Pipeline>),
Flag(Spanned<Flag>),
Whitespace(Span),
Error(Tagged<Box<ShellError>>),
Path(Tagged<PathNode>),
Error(Spanned<ShellError>),
}
impl ToDebug for TokenNode {
@ -78,48 +76,51 @@ impl fmt::Debug for DebugTokenNode<'_> {
)
}
TokenNode::Pipeline(pipeline) => write!(f, "{}", pipeline.debug(self.source)),
TokenNode::Error(s) => write!(f, "<error> for {:?}", s.tag().slice(self.source)),
rest => write!(f, "{}", rest.tag().slice(self.source)),
TokenNode::Error(_) => write!(f, "<error>"),
rest => write!(f, "{}", rest.span().slice(self.source)),
}
}
}
impl From<&TokenNode> for Tag {
fn from(token: &TokenNode) -> Tag {
token.tag()
impl From<&TokenNode> for Span {
fn from(token: &TokenNode) -> Span {
token.span()
}
}
impl TokenNode {
pub fn tag(&self) -> Tag {
pub fn span(&self) -> Span {
match self {
TokenNode::Token(t) => t.tag(),
TokenNode::Call(s) => s.tag(),
TokenNode::Delimited(s) => s.tag(),
TokenNode::Pipeline(s) => s.tag(),
TokenNode::Operator(s) => s.tag(),
TokenNode::Flag(s) => s.tag(),
TokenNode::Member(s) => *s,
TokenNode::Token(t) => t.span,
TokenNode::Nodes(t) => t.span,
TokenNode::Call(s) => s.span,
TokenNode::Delimited(s) => s.span,
TokenNode::Pipeline(s) => s.span,
TokenNode::Flag(s) => s.span,
TokenNode::Whitespace(s) => *s,
TokenNode::Error(s) => s.tag(),
TokenNode::Path(s) => s.tag(),
TokenNode::Error(s) => s.span,
}
}
pub fn type_name(&self) -> String {
pub fn type_name(&self) -> &'static str {
match self {
TokenNode::Token(t) => t.type_name(),
TokenNode::Nodes(_) => "nodes",
TokenNode::Call(_) => "command",
TokenNode::Delimited(d) => d.type_name(),
TokenNode::Pipeline(_) => "pipeline",
TokenNode::Operator(_) => "operator",
TokenNode::Flag(_) => "flag",
TokenNode::Member(_) => "member",
TokenNode::Whitespace(_) => "whitespace",
TokenNode::Error(_) => "error",
TokenNode::Path(_) => "path",
}
.to_string()
}
pub fn spanned_type_name(&self) -> Spanned<&'static str> {
self.type_name().spanned(self.span())
}
pub fn tagged_type_name(&self) -> Tagged<&'static str> {
self.type_name().tagged(self.span())
}
pub fn old_debug<'a>(&'a self, source: &'a Text) -> DebugTokenNode<'a> {
@ -127,16 +128,26 @@ impl TokenNode {
}
pub fn as_external_arg(&self, source: &Text) -> String {
self.tag().slice(source).to_string()
self.span().slice(source).to_string()
}
pub fn source<'a>(&self, source: &'a Text) -> &'a str {
self.tag().slice(source)
self.span().slice(source)
}
pub fn get_variable(&self) -> Result<(Span, Span), ShellError> {
match self {
TokenNode::Token(Spanned {
item: RawToken::Variable(inner_span),
span: outer_span,
}) => Ok((*outer_span, *inner_span)),
_ => Err(ShellError::type_error("variable", self.tagged_type_name())),
}
}
pub fn is_bare(&self) -> bool {
match self {
TokenNode::Token(Tagged {
TokenNode::Token(Spanned {
item: RawToken::Bare,
..
}) => true,
@ -144,9 +155,44 @@ impl TokenNode {
}
}
pub fn is_pattern(&self) -> bool {
match self {
TokenNode::Token(Spanned {
item: RawToken::GlobPattern,
..
}) => true,
_ => false,
}
}
pub fn is_dot(&self) -> bool {
match self {
TokenNode::Token(Spanned {
item: RawToken::Operator(Operator::Dot),
..
}) => true,
_ => false,
}
}
pub fn as_block(&self) -> Option<(Spanned<&[TokenNode]>, (Span, Span))> {
match self {
TokenNode::Delimited(Spanned {
item:
DelimitedNode {
delimiter,
children,
spans,
},
span,
}) if *delimiter == Delimiter::Brace => Some(((&children[..]).spanned(*span), *spans)),
_ => None,
}
}
pub fn is_external(&self) -> bool {
match self {
TokenNode::Token(Tagged {
TokenNode::Token(Spanned {
item: RawToken::ExternalCommand(..),
..
}) => true,
@ -154,20 +200,20 @@ impl TokenNode {
}
}
pub fn expect_external(&self) -> Tag {
pub fn expect_external(&self) -> Span {
match self {
TokenNode::Token(Tagged {
item: RawToken::ExternalCommand(tag),
TokenNode::Token(Spanned {
item: RawToken::ExternalCommand(span),
..
}) => *tag,
}) => *span,
_ => panic!("Only call expect_external if you checked is_external first"),
}
}
pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option<Tagged<Flag>> {
pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option<Spanned<Flag>> {
match self {
TokenNode::Flag(
flag @ Tagged {
flag @ Spanned {
item: Flag { .. }, ..
},
) if value == flag.name().slice(source) => Some(*flag),
@ -177,8 +223,58 @@ impl TokenNode {
pub fn as_pipeline(&self) -> Result<Pipeline, ShellError> {
match self {
TokenNode::Pipeline(Tagged { item, .. }) => Ok(item.clone()),
_ => Err(ShellError::string("unimplemented")),
TokenNode::Pipeline(Spanned { item, .. }) => Ok(item.clone()),
_ => Err(ShellError::unimplemented("unimplemented")),
}
}
pub fn is_whitespace(&self) -> bool {
match self {
TokenNode::Whitespace(_) => true,
_ => false,
}
}
pub fn expect_string(&self) -> (Span, Span) {
match self {
TokenNode::Token(Spanned {
item: RawToken::String(inner_span),
span: outer_span,
}) => (*outer_span, *inner_span),
other => panic!("Expected string, found {:?}", other),
}
}
}
#[cfg(test)]
impl TokenNode {
pub fn expect_list(&self) -> Tagged<&[TokenNode]> {
match self {
TokenNode::Nodes(Spanned { item, span }) => (&item[..]).tagged(Tag {
span: *span,
anchor: None,
}),
other => panic!("Expected list, found {:?}", other),
}
}
pub fn expect_var(&self) -> (Span, Span) {
match self {
TokenNode::Token(Spanned {
item: RawToken::Variable(inner_span),
span: outer_span,
}) => (*outer_span, *inner_span),
other => panic!("Expected var, found {:?}", other),
}
}
pub fn expect_bare(&self) -> Span {
match self {
TokenNode::Token(Spanned {
item: RawToken::Bare,
span,
}) => *span,
other => panic!("Expected var, found {:?}", other),
}
}
}
@ -186,8 +282,9 @@ impl TokenNode {
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
#[get = "pub(crate)"]
pub struct DelimitedNode {
delimiter: Delimiter,
children: Vec<TokenNode>,
pub(crate) delimiter: Delimiter,
pub(crate) spans: (Span, Span),
pub(crate) children: Vec<TokenNode>,
}
impl DelimitedNode {
@ -200,13 +297,31 @@ impl DelimitedNode {
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, FromStr)]
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
pub enum Delimiter {
Paren,
Brace,
Square,
}
impl Delimiter {
pub(crate) fn open(&self) -> &'static str {
match self {
Delimiter::Paren => "(",
Delimiter::Brace => "{",
Delimiter::Square => "[",
}
}
pub(crate) fn close(&self) -> &'static str {
match self {
Delimiter::Paren => ")",
Delimiter::Brace => "}",
Delimiter::Square => "]",
}
}
}
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
#[get = "pub(crate)"]
pub struct PathNode {

View File

@ -3,12 +3,10 @@ use crate::prelude::*;
use crate::parser::parse::flag::{Flag, FlagKind};
use crate::parser::parse::operator::Operator;
use crate::parser::parse::pipeline::{Pipeline, PipelineElement};
use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, PathNode, TokenNode};
use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
use crate::parser::parse::tokens::{RawNumber, RawToken};
use crate::parser::parse::unit::Unit;
use crate::parser::CallNode;
use derive_new::new;
use uuid::Uuid;
#[derive(new)]
pub struct TokenTreeBuilder {
@ -17,74 +15,86 @@ pub struct TokenTreeBuilder {
#[new(default)]
output: String,
anchor: Uuid,
}
pub type CurriedToken = Box<dyn FnOnce(&mut TokenTreeBuilder) -> TokenNode + 'static>;
pub type CurriedCall = Box<dyn FnOnce(&mut TokenTreeBuilder) -> Tagged<CallNode> + 'static>;
impl TokenTreeBuilder {
pub fn build(anchor: Uuid, block: impl FnOnce(&mut Self) -> TokenNode) -> (TokenNode, String) {
let mut builder = TokenTreeBuilder::new(anchor);
pub fn build(block: impl FnOnce(&mut Self) -> TokenNode) -> (TokenNode, String) {
let mut builder = TokenTreeBuilder::new();
let node = block(&mut builder);
(node, builder.output)
}
pub fn pipeline(input: Vec<(Option<&str>, CurriedCall, Option<&str>)>) -> CurriedToken {
let input: Vec<(Option<String>, CurriedCall, Option<String>)> = input
.into_iter()
.map(|(pre, call, post)| {
(
pre.map(|s| s.to_string()),
call,
post.map(|s| s.to_string()),
)
})
.collect();
fn build_spanned<T>(
&mut self,
callback: impl FnOnce(&mut TokenTreeBuilder) -> T,
) -> Spanned<T> {
let start = self.pos;
let ret = callback(self);
let end = self.pos;
ret.spanned(Span::new(start, end))
}
pub fn pipeline(input: Vec<Vec<CurriedToken>>) -> CurriedToken {
Box::new(move |b| {
let start = b.pos;
let mut out: Vec<PipelineElement> = vec![];
let mut out: Vec<Spanned<PipelineElement>> = vec![];
let mut input = input.into_iter().peekable();
let (pre, call, post) = input
let head = input
.next()
.expect("A pipeline must contain at least one element");
let pipe = None;
let pre_tag = pre.map(|pre| b.consume_tag(&pre));
let call = call(b);
let post_tag = post.map(|post| b.consume_tag(&post));
let head = b.build_spanned(|b| head.into_iter().map(|node| node(b)).collect());
out.push(PipelineElement::new(pipe, pre_tag, call, post_tag));
let head_span: Span = head.span;
out.push(PipelineElement::new(pipe, head).spanned(head_span));
loop {
match input.next() {
None => break,
Some((pre, call, post)) => {
let pipe = Some(b.consume_tag("|"));
let pre_span = pre.map(|pre| b.consume_tag(&pre));
let call = call(b);
let post_span = post.map(|post| b.consume_tag(&post));
Some(node) => {
let start = b.pos;
let pipe = Some(b.consume_span("|"));
let node =
b.build_spanned(|b| node.into_iter().map(|node| node(b)).collect());
let end = b.pos;
out.push(PipelineElement::new(pipe, pre_span, call, post_span));
out.push(PipelineElement::new(pipe, node).spanned(Span::new(start, end)));
}
}
}
let end = b.pos;
TokenTreeBuilder::tagged_pipeline((out, None), (start, end, b.anchor))
TokenTreeBuilder::spanned_pipeline(out, Span::new(start, end))
})
}
pub fn tagged_pipeline(
input: (Vec<PipelineElement>, Option<Tag>),
tag: impl Into<Tag>,
pub fn spanned_pipeline(
input: Vec<Spanned<PipelineElement>>,
span: impl Into<Span>,
) -> TokenNode {
TokenNode::Pipeline(Pipeline::new(input.0, input.1.into()).tagged(tag.into()))
TokenNode::Pipeline(Pipeline::new(input).spanned(span))
}
pub fn token_list(input: Vec<CurriedToken>) -> CurriedToken {
Box::new(move |b| {
let start = b.pos;
let tokens = input.into_iter().map(|i| i(b)).collect();
let end = b.pos;
TokenTreeBuilder::tagged_token_list(tokens, (start, end, None))
})
}
pub fn tagged_token_list(input: Vec<TokenNode>, tag: impl Into<Tag>) -> TokenNode {
TokenNode::Nodes(input.spanned(tag.into().span))
}
pub fn op(input: impl Into<Operator>) -> CurriedToken {
@ -95,12 +105,12 @@ impl TokenTreeBuilder {
b.pos = end;
TokenTreeBuilder::tagged_op(input, (start, end, b.anchor))
TokenTreeBuilder::spanned_op(input, Span::new(start, end))
})
}
pub fn tagged_op(input: impl Into<Operator>, tag: impl Into<Tag>) -> TokenNode {
TokenNode::Operator(input.into().tagged(tag.into()))
pub fn spanned_op(input: impl Into<Operator>, span: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::Operator(input.into()).spanned(span.into()))
}
pub fn string(input: impl Into<String>) -> CurriedToken {
@ -112,15 +122,15 @@ impl TokenTreeBuilder {
let (_, end) = b.consume("\"");
b.pos = end;
TokenTreeBuilder::tagged_string(
(inner_start, inner_end, b.anchor),
(start, end, b.anchor),
TokenTreeBuilder::spanned_string(
Span::new(inner_start, inner_end),
Span::new(start, end),
)
})
}
pub fn tagged_string(input: impl Into<Tag>, tag: impl Into<Tag>) -> TokenNode {
TokenNode::Token(RawToken::String(input.into()).tagged(tag.into()))
pub fn spanned_string(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::String(input.into()).spanned(span.into()))
}
pub fn bare(input: impl Into<String>) -> CurriedToken {
@ -130,12 +140,12 @@ impl TokenTreeBuilder {
let (start, end) = b.consume(&input);
b.pos = end;
TokenTreeBuilder::tagged_bare((start, end, b.anchor))
TokenTreeBuilder::spanned_bare(Span::new(start, end))
})
}
pub fn tagged_bare(tag: impl Into<Tag>) -> TokenNode {
TokenNode::Token(RawToken::Bare.tagged(tag.into()))
pub fn spanned_bare(span: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::Bare.spanned(span))
}
pub fn pattern(input: impl Into<String>) -> CurriedToken {
@ -145,12 +155,12 @@ impl TokenTreeBuilder {
let (start, end) = b.consume(&input);
b.pos = end;
TokenTreeBuilder::tagged_pattern((start, end, b.anchor))
TokenTreeBuilder::spanned_pattern(Span::new(start, end))
})
}
pub fn tagged_pattern(input: impl Into<Tag>) -> TokenNode {
TokenNode::Token(RawToken::GlobPattern.tagged(input.into()))
pub fn spanned_pattern(input: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::GlobPattern.spanned(input.into()))
}
pub fn external_word(input: impl Into<String>) -> CurriedToken {
@ -160,16 +170,31 @@ impl TokenTreeBuilder {
let (start, end) = b.consume(&input);
b.pos = end;
TokenTreeBuilder::tagged_external_word((start, end, b.anchor))
TokenTreeBuilder::spanned_external_word(Span::new(start, end))
})
}
pub fn tagged_external_word(input: impl Into<Tag>) -> TokenNode {
TokenNode::Token(RawToken::ExternalWord.tagged(input.into()))
pub fn spanned_external_word(input: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::ExternalWord.spanned(input.into()))
}
pub fn tagged_external(input: impl Into<Tag>, tag: impl Into<Tag>) -> TokenNode {
TokenNode::Token(RawToken::ExternalCommand(input.into()).tagged(tag.into()))
pub fn external_command(input: impl Into<String>) -> CurriedToken {
let input = input.into();
Box::new(move |b| {
let (outer_start, _) = b.consume("^");
let (inner_start, end) = b.consume(&input);
b.pos = end;
TokenTreeBuilder::spanned_external_command(
Span::new(inner_start, end),
Span::new(outer_start, end),
)
})
}
pub fn spanned_external_command(inner: impl Into<Span>, outer: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::ExternalCommand(inner.into()).spanned(outer.into()))
}
pub fn int(input: impl Into<BigInt>) -> CurriedToken {
@ -179,9 +204,9 @@ impl TokenTreeBuilder {
let (start, end) = b.consume(&int.to_string());
b.pos = end;
TokenTreeBuilder::tagged_number(
RawNumber::Int((start, end, b.anchor).into()),
(start, end, b.anchor),
TokenTreeBuilder::spanned_number(
RawNumber::Int(Span::new(start, end)),
Span::new(start, end),
)
})
}
@ -193,63 +218,15 @@ impl TokenTreeBuilder {
let (start, end) = b.consume(&decimal.to_string());
b.pos = end;
TokenTreeBuilder::tagged_number(
RawNumber::Decimal((start, end, b.anchor).into()),
(start, end, b.anchor),
TokenTreeBuilder::spanned_number(
RawNumber::Decimal(Span::new(start, end)),
Span::new(start, end),
)
})
}
pub fn tagged_number(input: impl Into<RawNumber>, tag: impl Into<Tag>) -> TokenNode {
TokenNode::Token(RawToken::Number(input.into()).tagged(tag.into()))
}
pub fn size(int: impl Into<i64>, unit: impl Into<Unit>) -> CurriedToken {
let int = int.into();
let unit = unit.into();
Box::new(move |b| {
let (start_int, end_int) = b.consume(&int.to_string());
let (_, end_unit) = b.consume(unit.as_str());
b.pos = end_unit;
TokenTreeBuilder::tagged_size(
(RawNumber::Int((start_int, end_int, b.anchor).into()), unit),
(start_int, end_unit, b.anchor),
)
})
}
pub fn tagged_size(
input: (impl Into<RawNumber>, impl Into<Unit>),
tag: impl Into<Tag>,
) -> TokenNode {
let (int, unit) = (input.0.into(), input.1.into());
TokenNode::Token(RawToken::Size(int, unit).tagged(tag.into()))
}
pub fn path(head: CurriedToken, tail: Vec<CurriedToken>) -> CurriedToken {
Box::new(move |b| {
let start = b.pos;
let head = head(b);
let mut output = vec![];
for item in tail {
b.consume(".");
output.push(item(b));
}
let end = b.pos;
TokenTreeBuilder::tagged_path((head, output), (start, end, b.anchor))
})
}
pub fn tagged_path(input: (TokenNode, Vec<TokenNode>), tag: impl Into<Tag>) -> TokenNode {
TokenNode::Path(PathNode::new(Box::new(input.0), input.1).tagged(tag.into()))
pub fn spanned_number(input: impl Into<RawNumber>, span: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::Number(input.into()).spanned(span.into()))
}
pub fn var(input: impl Into<String>) -> CurriedToken {
@ -259,12 +236,12 @@ impl TokenTreeBuilder {
let (start, _) = b.consume("$");
let (inner_start, end) = b.consume(&input);
TokenTreeBuilder::tagged_var((inner_start, end, b.anchor), (start, end, b.anchor))
TokenTreeBuilder::spanned_var(Span::new(inner_start, end), Span::new(start, end))
})
}
pub fn tagged_var(input: impl Into<Tag>, tag: impl Into<Tag>) -> TokenNode {
TokenNode::Token(RawToken::Variable(input.into()).tagged(tag.into()))
pub fn spanned_var(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::Variable(input.into()).spanned(span.into()))
}
pub fn flag(input: impl Into<String>) -> CurriedToken {
@ -274,12 +251,12 @@ impl TokenTreeBuilder {
let (start, _) = b.consume("--");
let (inner_start, end) = b.consume(&input);
TokenTreeBuilder::tagged_flag((inner_start, end, b.anchor), (start, end, b.anchor))
TokenTreeBuilder::spanned_flag(Span::new(inner_start, end), Span::new(start, end))
})
}
pub fn tagged_flag(input: impl Into<Tag>, tag: impl Into<Tag>) -> TokenNode {
TokenNode::Flag(Flag::new(FlagKind::Longhand, input.into()).tagged(tag.into()))
pub fn spanned_flag(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
TokenNode::Flag(Flag::new(FlagKind::Longhand, input.into()).spanned(span.into()))
}
pub fn shorthand(input: impl Into<String>) -> CurriedToken {
@ -289,25 +266,12 @@ impl TokenTreeBuilder {
let (start, _) = b.consume("-");
let (inner_start, end) = b.consume(&input);
TokenTreeBuilder::tagged_shorthand((inner_start, end, b.anchor), (start, end, b.anchor))
TokenTreeBuilder::spanned_shorthand((inner_start, end), (start, end))
})
}
pub fn tagged_shorthand(input: impl Into<Tag>, tag: impl Into<Tag>) -> TokenNode {
TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into()).tagged(tag.into()))
}
pub fn member(input: impl Into<String>) -> CurriedToken {
let input = input.into();
Box::new(move |b| {
let (start, end) = b.consume(&input);
TokenTreeBuilder::tagged_member((start, end, b.anchor))
})
}
pub fn tagged_member(tag: impl Into<Tag>) -> TokenNode {
TokenNode::Member(tag.into())
pub fn spanned_shorthand(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into()).spanned(span.into()))
}
pub fn call(head: CurriedToken, input: Vec<CurriedToken>) -> CurriedCall {
@ -323,7 +287,7 @@ impl TokenTreeBuilder {
let end = b.pos;
TokenTreeBuilder::tagged_call(nodes, (start, end, b.anchor))
TokenTreeBuilder::tagged_call(nodes, (start, end, None))
})
}
@ -340,64 +304,85 @@ impl TokenTreeBuilder {
CallNode::new(Box::new(head), tail).tagged(tag.into())
}
fn consume_delimiter(
&mut self,
input: Vec<CurriedToken>,
_open: &str,
_close: &str,
) -> (Span, Span, Span, Vec<TokenNode>) {
let (start_open_paren, end_open_paren) = self.consume("(");
let mut output = vec![];
for item in input {
output.push(item(self));
}
let (start_close_paren, end_close_paren) = self.consume(")");
let open = Span::new(start_open_paren, end_open_paren);
let close = Span::new(start_close_paren, end_close_paren);
let whole = Span::new(start_open_paren, end_close_paren);
(open, close, whole, output)
}
pub fn parens(input: Vec<CurriedToken>) -> CurriedToken {
Box::new(move |b| {
let (start, _) = b.consume("(");
let mut output = vec![];
for item in input {
output.push(item(b));
}
let (open, close, whole, output) = b.consume_delimiter(input, "(", ")");
let (_, end) = b.consume(")");
TokenTreeBuilder::tagged_parens(output, (start, end, b.anchor))
TokenTreeBuilder::spanned_parens(output, (open, close), whole)
})
}
pub fn tagged_parens(input: impl Into<Vec<TokenNode>>, tag: impl Into<Tag>) -> TokenNode {
TokenNode::Delimited(DelimitedNode::new(Delimiter::Paren, input.into()).tagged(tag.into()))
pub fn spanned_parens(
input: impl Into<Vec<TokenNode>>,
spans: (Span, Span),
span: impl Into<Span>,
) -> TokenNode {
TokenNode::Delimited(
DelimitedNode::new(Delimiter::Paren, spans, input.into()).spanned(span.into()),
)
}
pub fn square(input: Vec<CurriedToken>) -> CurriedToken {
Box::new(move |b| {
let (start, _) = b.consume("[");
let mut output = vec![];
for item in input {
output.push(item(b));
}
let (open, close, whole, tokens) = b.consume_delimiter(input, "[", "]");
let (_, end) = b.consume("]");
TokenTreeBuilder::tagged_square(output, (start, end, b.anchor))
TokenTreeBuilder::spanned_square(tokens, (open, close), whole)
})
}
pub fn tagged_square(input: impl Into<Vec<TokenNode>>, tag: impl Into<Tag>) -> TokenNode {
TokenNode::Delimited(DelimitedNode::new(Delimiter::Square, input.into()).tagged(tag.into()))
pub fn spanned_square(
input: impl Into<Vec<TokenNode>>,
spans: (Span, Span),
span: impl Into<Span>,
) -> TokenNode {
TokenNode::Delimited(
DelimitedNode::new(Delimiter::Square, spans, input.into()).spanned(span.into()),
)
}
pub fn braced(input: Vec<CurriedToken>) -> CurriedToken {
Box::new(move |b| {
let (start, _) = b.consume("{ ");
let mut output = vec![];
for item in input {
output.push(item(b));
}
let (open, close, whole, tokens) = b.consume_delimiter(input, "{", "}");
let (_, end) = b.consume(" }");
TokenTreeBuilder::tagged_brace(output, (start, end, b.anchor))
TokenTreeBuilder::spanned_brace(tokens, (open, close), whole)
})
}
pub fn tagged_brace(input: impl Into<Vec<TokenNode>>, tag: impl Into<Tag>) -> TokenNode {
TokenNode::Delimited(DelimitedNode::new(Delimiter::Brace, input.into()).tagged(tag.into()))
pub fn spanned_brace(
input: impl Into<Vec<TokenNode>>,
spans: (Span, Span),
span: impl Into<Span>,
) -> TokenNode {
TokenNode::Delimited(
DelimitedNode::new(Delimiter::Brace, spans, input.into()).spanned(span.into()),
)
}
pub fn sp() -> CurriedToken {
Box::new(|b| {
let (start, end) = b.consume(" ");
TokenNode::Whitespace(Tag::from((start, end, b.anchor)))
TokenNode::Whitespace(Span::new(start, end))
})
}
@ -406,12 +391,12 @@ impl TokenTreeBuilder {
Box::new(move |b| {
let (start, end) = b.consume(&input);
TokenTreeBuilder::tagged_ws((start, end, b.anchor))
TokenTreeBuilder::spanned_ws(Span::new(start, end))
})
}
pub fn tagged_ws(tag: impl Into<Tag>) -> TokenNode {
TokenNode::Whitespace(tag.into())
pub fn spanned_ws(span: impl Into<Span>) -> TokenNode {
TokenNode::Whitespace(span.into())
}
fn consume(&mut self, input: &str) -> (usize, usize) {
@ -421,10 +406,10 @@ impl TokenTreeBuilder {
(start, self.pos)
}
fn consume_tag(&mut self, input: &str) -> Tag {
fn consume_span(&mut self, input: &str) -> Span {
let start = self.pos;
self.pos += input.len();
self.output.push_str(input);
(start, self.pos, self.anchor).into()
Span::new(start, self.pos)
}
}

View File

@ -1,38 +1,53 @@
use crate::parser::parse::unit::*;
use crate::parser::Operator;
use crate::prelude::*;
use crate::{Tagged, Text};
use crate::Text;
use std::fmt;
use std::str::FromStr;
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum RawToken {
Number(RawNumber),
Size(RawNumber, Unit),
String(Tag),
Variable(Tag),
ExternalCommand(Tag),
Operator(Operator),
String(Span),
Variable(Span),
ExternalCommand(Span),
ExternalWord,
GlobPattern,
Bare,
}
impl RawToken {
pub fn type_name(&self) -> &'static str {
match self {
RawToken::Number(_) => "Number",
RawToken::Operator(..) => "operator",
RawToken::String(_) => "String",
RawToken::Variable(_) => "variable",
RawToken::ExternalCommand(_) => "external command",
RawToken::ExternalWord => "external word",
RawToken::GlobPattern => "glob pattern",
RawToken::Bare => "String",
}
}
}
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum RawNumber {
Int(Tag),
Decimal(Tag),
Int(Span),
Decimal(Span),
}
impl RawNumber {
pub fn int(tag: impl Into<Tag>) -> Tagged<RawNumber> {
let tag = tag.into();
pub fn int(span: impl Into<Span>) -> Spanned<RawNumber> {
let span = span.into();
RawNumber::Int(tag).tagged(tag)
RawNumber::Int(span).spanned(span)
}
pub fn decimal(tag: impl Into<Tag>) -> Tagged<RawNumber> {
let tag = tag.into();
pub fn decimal(span: impl Into<Span>) -> Spanned<RawNumber> {
let span = span.into();
RawNumber::Decimal(tag).tagged(tag)
RawNumber::Decimal(span).spanned(span)
}
pub(crate) fn to_number(self, source: &Text) -> Number {
@ -45,22 +60,7 @@ impl RawNumber {
}
}
impl RawToken {
pub fn type_name(&self) -> &'static str {
match self {
RawToken::Number(_) => "Number",
RawToken::Size(..) => "Size",
RawToken::String(_) => "String",
RawToken::Variable(_) => "Variable",
RawToken::ExternalCommand(_) => "ExternalCommand",
RawToken::ExternalWord => "ExternalWord",
RawToken::GlobPattern => "GlobPattern",
RawToken::Bare => "String",
}
}
}
pub type Token = Tagged<RawToken>;
pub type Token = Spanned<RawToken>;
impl Token {
pub fn debug<'a>(&self, source: &'a Text) -> DebugToken<'a> {
@ -69,6 +69,76 @@ impl Token {
source,
}
}
pub fn extract_number(&self) -> Option<Spanned<RawNumber>> {
match self.item {
RawToken::Number(number) => Some((number).spanned(self.span)),
_ => None,
}
}
pub fn extract_int(&self) -> Option<(Span, Span)> {
match self.item {
RawToken::Number(RawNumber::Int(int)) => Some((int, self.span)),
_ => None,
}
}
pub fn extract_decimal(&self) -> Option<(Span, Span)> {
match self.item {
RawToken::Number(RawNumber::Decimal(decimal)) => Some((decimal, self.span)),
_ => None,
}
}
pub fn extract_operator(&self) -> Option<Spanned<Operator>> {
match self.item {
RawToken::Operator(operator) => Some(operator.spanned(self.span)),
_ => None,
}
}
pub fn extract_string(&self) -> Option<(Span, Span)> {
match self.item {
RawToken::String(span) => Some((span, self.span)),
_ => None,
}
}
pub fn extract_variable(&self) -> Option<(Span, Span)> {
match self.item {
RawToken::Variable(span) => Some((span, self.span)),
_ => None,
}
}
pub fn extract_external_command(&self) -> Option<(Span, Span)> {
match self.item {
RawToken::ExternalCommand(span) => Some((span, self.span)),
_ => None,
}
}
pub fn extract_external_word(&self) -> Option<Span> {
match self.item {
RawToken::ExternalWord => Some(self.span),
_ => None,
}
}
pub fn extract_glob_pattern(&self) -> Option<Span> {
match self.item {
RawToken::GlobPattern => Some(self.span),
_ => None,
}
}
pub fn extract_bare(&self) -> Option<Span> {
match self.item {
RawToken::Bare => Some(self.span),
_ => None,
}
}
}
pub struct DebugToken<'a> {
@ -78,6 +148,6 @@ pub struct DebugToken<'a> {
impl fmt::Debug for DebugToken<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.node.tag().slice(self.source))
write!(f, "{}", self.node.span.slice(self.source))
}
}

Some files were not shown because too many files have changed in this diff Show More