Pull in upstream changes.

This commit is contained in:
Thomas Hartmann 2019-10-14 23:05:52 +02:00
commit 65546646a7
125 changed files with 9271 additions and 4140 deletions

View File

@ -5,10 +5,25 @@ strategy:
matrix: matrix:
linux-nightly: linux-nightly:
image: ubuntu-16.04 image: ubuntu-16.04
style: 'unflagged'
macos-nightly: macos-nightly:
image: macos-10.14 image: macos-10.14
style: 'unflagged'
windows-nightly: windows-nightly:
image: vs2017-win2016 image: vs2017-win2016
style: 'unflagged'
linux-nightly-canary:
image: ubuntu-16.04
style: 'canary'
macos-nightly-canary:
image: macos-10.14
style: 'canary'
windows-nightly-canary:
image: vs2017-win2016
style: 'canary'
fmt:
image: ubuntu-16.04
style: 'fmt'
pool: pool:
vmImage: $(image) vmImage: $(image)
@ -27,6 +42,11 @@ steps:
rustup component add rustfmt --toolchain `cat rust-toolchain` rustup component add rustfmt --toolchain `cat rust-toolchain`
displayName: Install Rust displayName: Install Rust
- bash: RUSTFLAGS="-D warnings" cargo test --all-features - bash: RUSTFLAGS="-D warnings" cargo test --all-features
condition: eq(variables['style'], 'unflagged')
displayName: Run tests
- bash: NUSHELL_ENABLE_ALL_FLAGS=1 RUSTFLAGS="-D warnings" cargo test --all-features
condition: eq(variables['style'], 'canary')
displayName: Run tests displayName: Run tests
- bash: cargo fmt --all -- --check - bash: cargo fmt --all -- --check
condition: eq(variables['style'], 'fmt')
displayName: Lint displayName: Lint

1206
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -55,7 +55,7 @@ surf = "1.0.2"
url = "2.1.0" url = "2.1.0"
roxmltree = "0.7.0" roxmltree = "0.7.0"
nom_locate = "1.0.0" nom_locate = "1.0.0"
enum-utils = "0.1.1" nom-tracable = "0.4.0"
unicode-xid = "0.2.0" unicode-xid = "0.2.0"
serde_ini = "0.2.0" serde_ini = "0.2.0"
subprocess = "0.1.18" subprocess = "0.1.18"
@ -65,7 +65,6 @@ hex = "0.3.2"
tempfile = "3.1.0" tempfile = "3.1.0"
semver = "0.9.0" semver = "0.9.0"
which = "2.0.1" which = "2.0.1"
uuid = {version = "0.7.4", features = [ "v4", "serde" ]}
textwrap = {version = "0.11.0", features = ["term_size"]} textwrap = {version = "0.11.0", features = ["term_size"]}
shellexpand = "1.0.0" shellexpand = "1.0.0"
futures-timer = "0.4.0" futures-timer = "0.4.0"
@ -75,13 +74,13 @@ bigdecimal = { version = "0.1.0", features = ["serde"] }
natural = "0.3.0" natural = "0.3.0"
serde_urlencoded = "0.6.1" serde_urlencoded = "0.6.1"
sublime_fuzzy = "0.5" sublime_fuzzy = "0.5"
regex = "1"
regex = {version = "1", optional = true }
neso = { version = "0.5.0", optional = true } neso = { version = "0.5.0", optional = true }
crossterm = { version = "0.10.2", optional = true } crossterm = { version = "0.10.2", optional = true }
syntect = {version = "3.2.0", optional = true } syntect = {version = "3.2.0", optional = true }
onig_sys = {version = "=69.1.0", optional = true } onig_sys = {version = "=69.1.0", optional = true }
heim = {version = "0.0.8-alpha.1", optional = true } heim = {version = "0.0.8", optional = true }
battery = {version = "0.7.4", optional = true } battery = {version = "0.7.4", optional = true }
rawkey = {version = "0.1.2", optional = true } rawkey = {version = "0.1.2", optional = true }
clipboard = {version = "0.5", optional = true } clipboard = {version = "0.5", optional = true }
@ -95,6 +94,8 @@ textview = ["syntect", "onig_sys", "crossterm"]
binaryview = ["image", "crossterm"] binaryview = ["image", "crossterm"]
sys = ["heim", "battery"] sys = ["heim", "battery"]
ps = ["heim"] ps = ["heim"]
# trace = ["nom-tracable/trace"]
all = ["raw-key", "textview", "binaryview", "sys", "ps", "clipboard", "ptree"]
[dependencies.rusqlite] [dependencies.rusqlite]
version = "0.20.0" version = "0.20.0"
@ -103,6 +104,10 @@ features = ["bundled", "blob"]
[dev-dependencies] [dev-dependencies]
pretty_assertions = "0.6.1" pretty_assertions = "0.6.1"
[build-dependencies]
toml = "0.5.3"
serde = { version = "1.0.101", features = ["derive"] }
[lib] [lib]
name = "nu" name = "nu"
path = "src/lib.rs" path = "src/lib.rs"
@ -138,6 +143,7 @@ path = "src/plugins/skip.rs"
[[bin]] [[bin]]
name = "nu_plugin_match" name = "nu_plugin_match"
path = "src/plugins/match.rs" path = "src/plugins/match.rs"
required-features = ["regex"]
[[bin]] [[bin]]
name = "nu_plugin_sys" name = "nu_plugin_sys"

39
build.rs Normal file
View File

@ -0,0 +1,39 @@
use serde::Deserialize;
use std::collections::HashMap;
use std::collections::HashSet;
use std::env;
use std::path::Path;
#[derive(Deserialize)]
struct Feature {
#[allow(unused)]
description: String,
enabled: bool,
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
let input = env::var("CARGO_MANIFEST_DIR").unwrap();
let all_on = env::var("NUSHELL_ENABLE_ALL_FLAGS").is_ok();
let flags: HashSet<String> = env::var("NUSHELL_ENABLE_FLAGS")
.map(|s| s.split(",").map(|s| s.to_string()).collect())
.unwrap_or_else(|_| HashSet::new());
if all_on && !flags.is_empty() {
println!(
"cargo:warning={}",
"Both NUSHELL_ENABLE_ALL_FLAGS and NUSHELL_ENABLE_FLAGS were set. You don't need both."
);
}
let path = Path::new(&input).join("features.toml");
let toml: HashMap<String, Feature> = toml::from_str(&std::fs::read_to_string(path)?)?;
for (key, value) in toml.iter() {
if value.enabled == true || all_on || flags.contains(key) {
println!("cargo:rustc-cfg={}", key);
}
}
Ok(())
}

4
features.toml Normal file
View File

@ -0,0 +1,4 @@
[hintsv1]
description = "Adding hints based upon error states in the syntax highlighter"
enabled = false

View File

@ -1,4 +1,3 @@
use crate::commands::autoview;
use crate::commands::classified::{ use crate::commands::classified::{
ClassifiedCommand, ClassifiedInputStream, ClassifiedPipeline, ExternalCommand, InternalCommand, ClassifiedCommand, ClassifiedInputStream, ClassifiedPipeline, ExternalCommand, InternalCommand,
StreamNext, StreamNext,
@ -13,7 +12,12 @@ pub(crate) use crate::errors::ShellError;
use crate::fuzzysearch::{interactive_fuzzy_search, SelectionResult}; use crate::fuzzysearch::{interactive_fuzzy_search, SelectionResult};
use crate::git::current_branch; use crate::git::current_branch;
use crate::parser::registry::Signature; use crate::parser::registry::Signature;
use crate::parser::{hir, CallNode, Pipeline, PipelineElement, TokenNode}; use crate::parser::{
hir,
hir::syntax_shape::{expand_syntax, PipelineShape},
hir::{expand_external_tokens::expand_external_tokens, tokens_iterator::TokensIterator},
TokenNode,
};
use crate::prelude::*; use crate::prelude::*;
use log::{debug, trace}; use log::{debug, trace};
@ -24,7 +28,7 @@ use std::error::Error;
use std::io::{BufRead, BufReader, Write}; use std::io::{BufRead, BufReader, Write};
use std::iter::Iterator; use std::iter::Iterator;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::Ordering;
#[derive(Debug)] #[derive(Debug)]
pub enum MaybeOwned<'a, T> { pub enum MaybeOwned<'a, T> {
@ -75,7 +79,7 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel
let name = params.name.clone(); let name = params.name.clone();
let fname = fname.to_string(); let fname = fname.to_string();
if context.has_command(&name) { if let Some(_) = context.get_command(&name) {
trace!("plugin {:?} already loaded.", &name); trace!("plugin {:?} already loaded.", &name);
} else { } else {
if params.is_filter { if params.is_filter {
@ -94,11 +98,17 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel
}, },
Err(e) => { Err(e) => {
trace!("incompatible plugin {:?}", input); trace!("incompatible plugin {:?}", input);
Err(ShellError::string(format!("Error: {:?}", e))) Err(ShellError::untagged_runtime_error(format!(
"Error: {:?}",
e
)))
} }
} }
} }
Err(e) => Err(ShellError::string(format!("Error: {:?}", e))), Err(e) => Err(ShellError::untagged_runtime_error(format!(
"Error: {:?}",
e
))),
}; };
let _ = child.wait(); let _ = child.wait();
@ -315,6 +325,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
)]); )]);
} }
} }
let _ = load_plugins(&mut context); let _ = load_plugins(&mut context);
let config = Config::builder().color_mode(ColorMode::Forced).build(); let config = Config::builder().color_mode(ColorMode::Forced).build();
@ -328,24 +339,21 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
// we are ok if history does not exist // we are ok if history does not exist
let _ = rl.load_history(&History::path()); let _ = rl.load_history(&History::path());
let ctrl_c = Arc::new(AtomicBool::new(false)); let cc = context.ctrl_c.clone();
let cc = ctrl_c.clone();
ctrlc::set_handler(move || { ctrlc::set_handler(move || {
cc.store(true, Ordering::SeqCst); cc.store(true, Ordering::SeqCst);
}) })
.expect("Error setting Ctrl-C handler"); .expect("Error setting Ctrl-C handler");
let mut ctrlcbreak = false; let mut ctrlcbreak = false;
loop { loop {
if ctrl_c.load(Ordering::SeqCst) { if context.ctrl_c.load(Ordering::SeqCst) {
ctrl_c.store(false, Ordering::SeqCst); context.ctrl_c.store(false, Ordering::SeqCst);
continue; continue;
} }
let cwd = context.shell_manager.path(); let cwd = context.shell_manager.path();
rl.set_helper(Some(crate::shell::Helper::new( rl.set_helper(Some(crate::shell::Helper::new(context.clone())));
context.shell_manager.clone(),
)));
let edit_mode = config::config(Tag::unknown())? let edit_mode = config::config(Tag::unknown())?
.get("edit_mode") .get("edit_mode")
@ -429,21 +437,11 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
} }
} }
LineResult::Error(mut line, err) => { LineResult::Error(line, err) => {
rl.add_history_entry(line.clone()); rl.add_history_entry(line.clone());
let diag = err.to_diagnostic();
context.with_host(|host| { context.with_host(|host| {
let writer = host.err_termcolor(); print_err(err, host, &Text::from(line));
line.push_str(" ");
let files = crate::parser::Files::new(line);
let _ = std::panic::catch_unwind(move || {
let _ = language_reporting::emit(
&mut writer.lock(),
&files,
&diag,
&language_reporting::DefaultConfig,
);
});
}) })
} }
@ -460,6 +458,14 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
Ok(()) Ok(())
} }
fn chomp_newline(s: &str) -> &str {
if s.ends_with('\n') {
&s[..s.len() - 1]
} else {
s
}
}
enum LineResult { enum LineResult {
Success(String), Success(String),
Error(String, ShellError), Error(String, ShellError),
@ -472,9 +478,11 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
Ok(line) if line.trim() == "" => LineResult::Success(line.clone()), Ok(line) if line.trim() == "" => LineResult::Success(line.clone()),
Ok(line) => { Ok(line) => {
let result = match crate::parser::parse(&line, uuid::Uuid::nil()) { let line = chomp_newline(line);
let result = match crate::parser::parse(&line) {
Err(err) => { Err(err) => {
return LineResult::Error(line.clone(), err); return LineResult::Error(line.to_string(), err);
} }
Ok(val) => val, Ok(val) => val,
@ -485,7 +493,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
let mut pipeline = match classify_pipeline(&result, ctx, &Text::from(line)) { let mut pipeline = match classify_pipeline(&result, ctx, &Text::from(line)) {
Ok(pipeline) => pipeline, Ok(pipeline) => pipeline,
Err(err) => return LineResult::Error(line.clone(), err), Err(err) => return LineResult::Error(line.to_string(), err),
}; };
match pipeline.commands.last() { match pipeline.commands.last() {
@ -493,7 +501,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
_ => pipeline _ => pipeline
.commands .commands
.push(ClassifiedCommand::Internal(InternalCommand { .push(ClassifiedCommand::Internal(InternalCommand {
command: whole_stream_command(autoview::Autoview), name: "autoview".to_string(),
name_tag: Tag::unknown(), name_tag: Tag::unknown(),
args: hir::Call::new( args: hir::Call::new(
Box::new(hir::Expression::synthetic_string("autoview")), Box::new(hir::Expression::synthetic_string("autoview")),
@ -515,16 +523,24 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
input = match (item, next) { input = match (item, next) {
(None, _) => break, (None, _) => break,
(Some(ClassifiedCommand::Dynamic(_)), _)
| (_, Some(ClassifiedCommand::Dynamic(_))) => {
return LineResult::Error(
line.to_string(),
ShellError::unimplemented("Dynamic commands"),
)
}
(Some(ClassifiedCommand::Expr(_)), _) => { (Some(ClassifiedCommand::Expr(_)), _) => {
return LineResult::Error( return LineResult::Error(
line.clone(), line.to_string(),
ShellError::unimplemented("Expression-only commands"), ShellError::unimplemented("Expression-only commands"),
) )
} }
(_, Some(ClassifiedCommand::Expr(_))) => { (_, Some(ClassifiedCommand::Expr(_))) => {
return LineResult::Error( return LineResult::Error(
line.clone(), line.to_string(),
ShellError::unimplemented("Expression-only commands"), ShellError::unimplemented("Expression-only commands"),
) )
} }
@ -532,31 +548,46 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
( (
Some(ClassifiedCommand::Internal(left)), Some(ClassifiedCommand::Internal(left)),
Some(ClassifiedCommand::External(_)), Some(ClassifiedCommand::External(_)),
) => match left ) => match left.run(ctx, input, Text::from(line), is_first_command) {
.run(ctx, input, Text::from(line), is_first_command)
.await
{
Ok(val) => ClassifiedInputStream::from_input_stream(val), Ok(val) => ClassifiedInputStream::from_input_stream(val),
Err(err) => return LineResult::Error(line.clone(), err), Err(err) => return LineResult::Error(line.to_string(), err),
}, },
(Some(ClassifiedCommand::Internal(left)), Some(_)) => { (Some(ClassifiedCommand::Internal(left)), Some(_)) => {
match left match left.run(ctx, input, Text::from(line), is_first_command) {
.run(ctx, input, Text::from(line), is_first_command)
.await
{
Ok(val) => ClassifiedInputStream::from_input_stream(val), Ok(val) => ClassifiedInputStream::from_input_stream(val),
Err(err) => return LineResult::Error(line.clone(), err), Err(err) => return LineResult::Error(line.to_string(), err),
} }
} }
(Some(ClassifiedCommand::Internal(left)), None) => { (Some(ClassifiedCommand::Internal(left)), None) => {
match left match left.run(ctx, input, Text::from(line), is_first_command) {
.run(ctx, input, Text::from(line), is_first_command) Ok(val) => {
.await use futures::stream::TryStreamExt;
{
Ok(val) => ClassifiedInputStream::from_input_stream(val), let mut output_stream: OutputStream = val.into();
Err(err) => return LineResult::Error(line.clone(), err), loop {
match output_stream.try_next().await {
Ok(Some(ReturnSuccess::Value(Tagged {
item: Value::Error(e),
..
}))) => {
return LineResult::Error(line.to_string(), e);
}
Ok(Some(_item)) => {
if ctx.ctrl_c.load(Ordering::SeqCst) {
break;
}
}
_ => {
break;
}
}
}
return LineResult::Success(line.to_string());
}
Err(err) => return LineResult::Error(line.to_string(), err),
} }
} }
@ -565,20 +596,20 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
Some(ClassifiedCommand::External(_)), Some(ClassifiedCommand::External(_)),
) => match left.run(ctx, input, StreamNext::External).await { ) => match left.run(ctx, input, StreamNext::External).await {
Ok(val) => val, Ok(val) => val,
Err(err) => return LineResult::Error(line.clone(), err), Err(err) => return LineResult::Error(line.to_string(), err),
}, },
(Some(ClassifiedCommand::External(left)), Some(_)) => { (Some(ClassifiedCommand::External(left)), Some(_)) => {
match left.run(ctx, input, StreamNext::Internal).await { match left.run(ctx, input, StreamNext::Internal).await {
Ok(val) => val, Ok(val) => val,
Err(err) => return LineResult::Error(line.clone(), err), Err(err) => return LineResult::Error(line.to_string(), err),
} }
} }
(Some(ClassifiedCommand::External(left)), None) => { (Some(ClassifiedCommand::External(left)), None) => {
match left.run(ctx, input, StreamNext::Last).await { match left.run(ctx, input, StreamNext::Last).await {
Ok(val) => val, Ok(val) => val,
Err(err) => return LineResult::Error(line.clone(), err), Err(err) => return LineResult::Error(line.to_string(), err),
} }
} }
}; };
@ -586,7 +617,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
is_first_command = false; is_first_command = false;
} }
LineResult::Success(line.clone()) LineResult::Success(line.to_string())
} }
Err(ReadlineError::Interrupted) => LineResult::CtrlC, Err(ReadlineError::Interrupted) => LineResult::CtrlC,
Err(ReadlineError::Eof) => LineResult::Break, Err(ReadlineError::Eof) => LineResult::Break,
@ -602,95 +633,52 @@ fn classify_pipeline(
context: &Context, context: &Context,
source: &Text, source: &Text,
) -> Result<ClassifiedPipeline, ShellError> { ) -> Result<ClassifiedPipeline, ShellError> {
let pipeline = pipeline.as_pipeline()?; let mut pipeline_list = vec![pipeline.clone()];
let mut iterator = TokensIterator::all(&mut pipeline_list, pipeline.span());
let Pipeline { parts, .. } = pipeline; expand_syntax(
&PipelineShape,
let commands: Result<Vec<_>, ShellError> = parts &mut iterator,
.iter() &context.expand_context(source, pipeline.span()),
.map(|item| classify_command(&item, context, &source)) )
.collect();
Ok(ClassifiedPipeline {
commands: commands?,
})
}
fn classify_command(
command: &PipelineElement,
context: &Context,
source: &Text,
) -> Result<ClassifiedCommand, ShellError> {
let call = command.call();
match call {
// If the command starts with `^`, treat it as an external command no matter what
call if call.head().is_external() => {
let name_tag = call.head().expect_external();
let name = name_tag.slice(source);
Ok(external_command(call, source, name.tagged(name_tag)))
}
// Otherwise, if the command is a bare word, we'll need to triage it
call if call.head().is_bare() => {
let head = call.head();
let name = head.source(source);
match context.has_command(name) {
// if the command is in the registry, it's an internal command
true => {
let command = context.get_command(name);
let config = command.signature();
trace!(target: "nu::build_pipeline", "classifying {:?}", config);
let args: hir::Call = config.parse_args(call, &context, source)?;
trace!(target: "nu::build_pipeline", "args :: {}", args.debug(source));
Ok(ClassifiedCommand::Internal(InternalCommand {
command,
name_tag: head.tag(),
args,
}))
}
// otherwise, it's an external command
false => Ok(external_command(call, source, name.tagged(head.tag()))),
}
}
// If the command is something else (like a number or a variable), that is currently unsupported.
// We might support `$somevar` as a curried command in the future.
call => Err(ShellError::invalid_command(call.head().tag())),
}
} }
// Classify this command as an external command, which doesn't give special meaning // Classify this command as an external command, which doesn't give special meaning
// to nu syntactic constructs, and passes all arguments to the external command as // to nu syntactic constructs, and passes all arguments to the external command as
// strings. // strings.
fn external_command( pub(crate) fn external_command(
call: &Tagged<CallNode>, tokens: &mut TokensIterator,
source: &Text, source: &Text,
name: Tagged<&str>, name: Tagged<&str>,
) -> ClassifiedCommand { ) -> Result<ClassifiedCommand, ShellError> {
let arg_list_strings: Vec<Tagged<String>> = match call.children() { let arg_list_strings = expand_external_tokens(tokens, source)?;
Some(args) => args
Ok(ClassifiedCommand::External(ExternalCommand {
name: name.to_string(),
name_tag: name.tag(),
args: arg_list_strings
.iter() .iter()
.filter_map(|i| match i { .map(|x| Tagged {
TokenNode::Whitespace(_) => None, tag: x.span.into(),
other => Some(other.as_external_arg(source).tagged(other.tag())), item: x.item.clone(),
}) })
.collect(), .collect(),
None => vec![], }))
}; }
let (name, tag) = name.into_parts(); pub fn print_err(err: ShellError, host: &dyn Host, source: &Text) {
let diag = err.to_diagnostic();
ClassifiedCommand::External(ExternalCommand {
name: name.to_string(), let writer = host.err_termcolor();
name_tag: tag, let mut source = source.to_string();
args: arg_list_strings, source.push_str(" ");
}) let files = crate::parser::Files::new(source);
let _ = std::panic::catch_unwind(move || {
let _ = language_reporting::emit(
&mut writer.lock(),
&files,
&diag,
&language_reporting::DefaultConfig,
);
});
} }

View File

@ -76,6 +76,7 @@ pub(crate) use command::{
UnevaluatedCallInfo, WholeStreamCommand, UnevaluatedCallInfo, WholeStreamCommand,
}; };
pub(crate) use classified::ClassifiedCommand;
pub(crate) use config::Config; pub(crate) use config::Config;
pub(crate) use cp::Cpy; pub(crate) use cp::Cpy;
pub(crate) use date::Date; pub(crate) use date::Date;

View File

@ -1,9 +1,14 @@
use crate::commands::{RawCommandArgs, WholeStreamCommand}; use crate::commands::{RawCommandArgs, WholeStreamCommand};
use crate::errors::ShellError; use crate::errors::ShellError;
use crate::parser::hir::{Expression, NamedArguments};
use crate::prelude::*; use crate::prelude::*;
use futures::stream::TryStreamExt;
use std::sync::atomic::Ordering;
pub struct Autoview; pub struct Autoview;
const STREAM_PAGE_SIZE: u64 = 50;
#[derive(Deserialize)] #[derive(Deserialize)]
pub struct AutoviewArgs {} pub struct AutoviewArgs {}
@ -31,61 +36,132 @@ impl WholeStreamCommand for Autoview {
pub fn autoview( pub fn autoview(
AutoviewArgs {}: AutoviewArgs, AutoviewArgs {}: AutoviewArgs,
mut context: RunnableContext, context: RunnableContext,
raw: RawCommandArgs, raw: RawCommandArgs,
) -> Result<OutputStream, ShellError> { ) -> Result<OutputStream, ShellError> {
Ok(OutputStream::new(async_stream! { let binary = context.get_command("binaryview");
let input = context.input.drain_vec().await; let text = context.get_command("textview");
let table = context.get_command("table");
if input.len() > 0 { Ok(OutputStream::new(async_stream! {
if let Tagged { let mut output_stream: OutputStream = context.input.into();
item: Value::Primitive(Primitive::Binary(_)),
.. match output_stream.try_next().await {
} = input[0usize] Ok(Some(x)) => {
{ match output_stream.try_next().await {
let binary = context.get_command("binaryview"); Ok(Some(y)) => {
if let Some(binary) = binary { let ctrl_c = context.ctrl_c.clone();
let result = binary.run(raw.with_input(input), &context.commands, false); let stream = async_stream! {
result.collect::<Vec<_>>().await; yield Ok(x);
} else { yield Ok(y);
for i in input {
match i.item { loop {
Value::Primitive(Primitive::Binary(b)) => { match output_stream.try_next().await {
use pretty_hex::*; Ok(Some(z)) => {
println!("{:?}", b.hex_dump()); if ctrl_c.load(Ordering::SeqCst) {
break;
}
yield Ok(z);
}
_ => break,
}
}
};
if let Some(table) = table {
let mut new_output_stream: OutputStream = stream.to_output_stream();
let mut finished = false;
let mut current_idx = 0;
loop {
let mut new_input = VecDeque::new();
for _ in 0..STREAM_PAGE_SIZE {
match new_output_stream.try_next().await {
Ok(Some(a)) => {
if let ReturnSuccess::Value(v) = a {
new_input.push_back(v);
}
}
_ => {
finished = true;
break;
}
}
}
let raw = raw.clone();
let mut command_args = raw.with_input(new_input.into());
let mut named_args = NamedArguments::new();
named_args.insert_optional("start_number", Some(Expression::number(current_idx, Tag::unknown())));
command_args.call_info.args.named = Some(named_args);
let result = table.run(command_args, &context.commands, false);
result.collect::<Vec<_>>().await;
if finished {
break;
} else {
current_idx += STREAM_PAGE_SIZE;
}
} }
_ => {}
} }
} }
}; _ => {
} else if is_single_anchored_text_value(&input) { if let ReturnSuccess::Value(x) = x {
let text = context.get_command("textview"); match x {
if let Some(text) = text { Tagged {
let result = text.run(raw.with_input(input), &context.commands, false); item: Value::Primitive(Primitive::String(ref s)),
result.collect::<Vec<_>>().await; tag: Tag { anchor, span },
} else { } if anchor.is_some() => {
for i in input { if let Some(text) = text {
match i.item { let mut stream = VecDeque::new();
Value::Primitive(Primitive::String(s)) => { stream.push_back(Value::string(s).tagged(Tag { anchor, span }));
println!("{}", s); let result = text.run(raw.with_input(stream.into()), &context.commands, false);
result.collect::<Vec<_>>().await;
} else {
println!("{}", s);
}
}
Tagged {
item: Value::Primitive(Primitive::String(s)),
..
} => {
println!("{}", s);
}
Tagged { item: Value::Primitive(Primitive::Binary(ref b)), .. } => {
if let Some(binary) = binary {
let mut stream = VecDeque::new();
stream.push_back(x.clone());
let result = binary.run(raw.with_input(stream.into()), &context.commands, false);
result.collect::<Vec<_>>().await;
} else {
use pretty_hex::*;
println!("{:?}", b.hex_dump());
}
}
Tagged { item: Value::Error(e), .. } => {
yield Err(e);
}
Tagged { item: ref item, .. } => {
if let Some(table) = table {
let mut stream = VecDeque::new();
stream.push_back(x.clone());
let result = table.run(raw.with_input(stream.into()), &context.commands, false);
result.collect::<Vec<_>>().await;
} else {
println!("{:?}", item);
}
}
} }
_ => {}
} }
} }
} }
} else if is_single_text_value(&input) { }
for i in input { _ => {
match i.item { //println!("<no results>");
Value::Primitive(Primitive::String(s)) => {
println!("{}", s);
}
_ => {}
}
}
} else {
let table = context.expect_command("table");
let result = table.run(raw.with_input(input), &context.commands, false);
result.collect::<Vec<_>>().await;
} }
} }
@ -95,34 +171,3 @@ pub fn autoview(
} }
})) }))
} }
fn is_single_text_value(input: &Vec<Tagged<Value>>) -> bool {
if input.len() != 1 {
return false;
}
if let Tagged {
item: Value::Primitive(Primitive::String(_)),
..
} = input[0]
{
true
} else {
false
}
}
fn is_single_anchored_text_value(input: &Vec<Tagged<Value>>) -> bool {
if input.len() != 1 {
return false;
}
if let Tagged {
item: Value::Primitive(Primitive::String(_)),
tag: Tag { anchor, .. },
} = input[0]
{
anchor != uuid::Uuid::nil()
} else {
false
}
}

View File

@ -1,12 +1,11 @@
use crate::commands::Command;
use crate::parser::{hir, TokenNode}; use crate::parser::{hir, TokenNode};
use crate::prelude::*; use crate::prelude::*;
use bytes::{BufMut, BytesMut}; use bytes::{BufMut, BytesMut};
use derive_new::new;
use futures::stream::StreamExt; use futures::stream::StreamExt;
use futures_codec::{Decoder, Encoder, Framed}; use futures_codec::{Decoder, Encoder, Framed};
use log::{log_enabled, trace}; use log::{log_enabled, trace};
use std::io::{Error, ErrorKind}; use std::io::{Error, ErrorKind};
use std::sync::Arc;
use subprocess::Exec; use subprocess::Exec;
/// A simple `Codec` implementation that splits up data into lines. /// A simple `Codec` implementation that splits up data into lines.
@ -73,25 +72,35 @@ impl ClassifiedInputStream {
} }
} }
#[derive(Debug)]
pub(crate) struct ClassifiedPipeline { pub(crate) struct ClassifiedPipeline {
pub(crate) commands: Vec<ClassifiedCommand>, pub(crate) commands: Vec<ClassifiedCommand>,
} }
#[derive(Debug, Eq, PartialEq)]
pub(crate) enum ClassifiedCommand { pub(crate) enum ClassifiedCommand {
#[allow(unused)] #[allow(unused)]
Expr(TokenNode), Expr(TokenNode),
Internal(InternalCommand), Internal(InternalCommand),
#[allow(unused)]
Dynamic(hir::Call),
External(ExternalCommand), External(ExternalCommand),
} }
#[derive(new, Debug, Eq, PartialEq)]
pub(crate) struct InternalCommand { pub(crate) struct InternalCommand {
pub(crate) command: Arc<Command>, pub(crate) name: String,
pub(crate) name_tag: Tag, pub(crate) name_tag: Tag,
pub(crate) args: hir::Call, pub(crate) args: hir::Call,
} }
#[derive(new, Debug, Eq, PartialEq)]
pub(crate) struct DynamicCommand {
pub(crate) args: hir::Call,
}
impl InternalCommand { impl InternalCommand {
pub(crate) async fn run( pub(crate) fn run(
self, self,
context: &mut Context, context: &mut Context,
input: ClassifiedInputStream, input: ClassifiedInputStream,
@ -100,91 +109,99 @@ impl InternalCommand {
) -> Result<InputStream, ShellError> { ) -> Result<InputStream, ShellError> {
if log_enabled!(log::Level::Trace) { if log_enabled!(log::Level::Trace) {
trace!(target: "nu::run::internal", "->"); trace!(target: "nu::run::internal", "->");
trace!(target: "nu::run::internal", "{}", self.command.name()); trace!(target: "nu::run::internal", "{}", self.name);
trace!(target: "nu::run::internal", "{}", self.args.debug(&source)); trace!(target: "nu::run::internal", "{}", self.args.debug(&source));
} }
let objects: InputStream = let objects: InputStream =
trace_stream!(target: "nu::trace_stream::internal", "input" = input.objects); trace_stream!(target: "nu::trace_stream::internal", "input" = input.objects);
let result = context.run_command( let command = context.expect_command(&self.name);
self.command,
self.name_tag.clone(), let result = {
context.source_map.clone(), context.run_command(
self.args, command,
&source, self.name_tag.clone(),
objects, self.args,
is_first_command, &source,
); objects,
is_first_command,
)
};
let result = trace_out_stream!(target: "nu::trace_stream::internal", source: &source, "output" = result); let result = trace_out_stream!(target: "nu::trace_stream::internal", source: &source, "output" = result);
let mut result = result.values; let mut result = result.values;
let mut context = context.clone();
let mut stream = VecDeque::new(); let stream = async_stream! {
while let Some(item) = result.next().await { while let Some(item) = result.next().await {
match item? { match item {
ReturnSuccess::Action(action) => match action { Ok(ReturnSuccess::Action(action)) => match action {
CommandAction::ChangePath(path) => { CommandAction::ChangePath(path) => {
context.shell_manager.set_path(path); context.shell_manager.set_path(path);
} }
CommandAction::AddAnchorLocation(uuid, anchor_location) => { CommandAction::Exit => std::process::exit(0), // TODO: save history.txt
context.add_anchor_location(uuid, anchor_location); CommandAction::EnterHelpShell(value) => {
} match value {
CommandAction::Exit => std::process::exit(0), // TODO: save history.txt Tagged {
CommandAction::EnterHelpShell(value) => { item: Value::Primitive(Primitive::String(cmd)),
match value { tag,
Tagged { } => {
item: Value::Primitive(Primitive::String(cmd)), context.shell_manager.insert_at_current(Box::new(
tag, HelpShell::for_command(
} => { Value::string(cmd).tagged(tag),
context.shell_manager.insert_at_current(Box::new( &context.registry(),
HelpShell::for_command( ).unwrap(),
Value::string(cmd).tagged(tag), ));
&context.registry(), }
)?, _ => {
)); context.shell_manager.insert_at_current(Box::new(
} HelpShell::index(&context.registry()).unwrap(),
_ => { ));
context.shell_manager.insert_at_current(Box::new( }
HelpShell::index(&context.registry())?,
));
} }
} }
} CommandAction::EnterValueShell(value) => {
CommandAction::EnterValueShell(value) => { context
context .shell_manager
.shell_manager .insert_at_current(Box::new(ValueShell::new(value)));
.insert_at_current(Box::new(ValueShell::new(value)));
}
CommandAction::EnterShell(location) => {
context.shell_manager.insert_at_current(Box::new(
FilesystemShell::with_location(location, context.registry().clone())?,
));
}
CommandAction::PreviousShell => {
context.shell_manager.prev();
}
CommandAction::NextShell => {
context.shell_manager.next();
}
CommandAction::LeaveShell => {
context.shell_manager.remove_at_current();
if context.shell_manager.is_empty() {
std::process::exit(0); // TODO: save history.txt
} }
} CommandAction::EnterShell(location) => {
}, context.shell_manager.insert_at_current(Box::new(
FilesystemShell::with_location(location, context.registry().clone()).unwrap(),
));
}
CommandAction::PreviousShell => {
context.shell_manager.prev();
}
CommandAction::NextShell => {
context.shell_manager.next();
}
CommandAction::LeaveShell => {
context.shell_manager.remove_at_current();
if context.shell_manager.is_empty() {
std::process::exit(0); // TODO: save history.txt
}
}
},
ReturnSuccess::Value(v) => { Ok(ReturnSuccess::Value(v)) => {
stream.push_back(v); yield Ok(v);
}
Err(x) => {
yield Ok(Value::Error(x).tagged_unknown());
break;
}
} }
} }
} };
Ok(stream.into()) Ok(stream.to_input_stream())
} }
} }
#[derive(Debug, Eq, PartialEq)]
pub(crate) struct ExternalCommand { pub(crate) struct ExternalCommand {
pub(crate) name: String, pub(crate) name: String,
@ -192,6 +209,7 @@ pub(crate) struct ExternalCommand {
pub(crate) args: Vec<Tagged<String>>, pub(crate) args: Vec<Tagged<String>>,
} }
#[derive(Debug)]
pub(crate) enum StreamNext { pub(crate) enum StreamNext {
Last, Last,
External, External,
@ -221,6 +239,8 @@ impl ExternalCommand {
process = Exec::cmd(&self.name); process = Exec::cmd(&self.name);
trace!(target: "nu::run::external", "command = {:?}", process);
if arg_string.contains("$it") { if arg_string.contains("$it") {
let mut first = true; let mut first = true;
@ -239,7 +259,11 @@ impl ExternalCommand {
tag, tag,
)); ));
} else { } else {
return Err(ShellError::string("Error: $it needs string data")); return Err(ShellError::labeled_error(
"Error: $it needs string data",
"given something else",
name_tag,
));
} }
} }
if !first { if !first {
@ -275,6 +299,8 @@ impl ExternalCommand {
process = process.cwd(context.shell_manager.path()); process = process.cwd(context.shell_manager.path());
trace!(target: "nu::run::external", "cwd = {:?}", context.shell_manager.path());
let mut process = match stream_next { let mut process = match stream_next {
StreamNext::Last => process, StreamNext::Last => process,
StreamNext::External | StreamNext::Internal => { StreamNext::External | StreamNext::Internal => {
@ -282,43 +308,59 @@ impl ExternalCommand {
} }
}; };
trace!(target: "nu::run::external", "set up stdout pipe");
if let Some(stdin) = stdin { if let Some(stdin) = stdin {
process = process.stdin(stdin); process = process.stdin(stdin);
} }
let mut popen = process.popen()?; trace!(target: "nu::run::external", "set up stdin pipe");
trace!(target: "nu::run::external", "built process {:?}", process);
match stream_next { let popen = process.popen();
StreamNext::Last => {
let _ = popen.detach(); trace!(target: "nu::run::external", "next = {:?}", stream_next);
loop {
match popen.poll() { if let Ok(mut popen) = popen {
None => { match stream_next {
let _ = std::thread::sleep(std::time::Duration::new(0, 100000000)); StreamNext::Last => {
} let _ = popen.detach();
_ => { loop {
let _ = popen.terminate(); match popen.poll() {
break; None => {
let _ = std::thread::sleep(std::time::Duration::new(0, 100000000));
}
_ => {
let _ = popen.terminate();
break;
}
} }
} }
Ok(ClassifiedInputStream::new())
}
StreamNext::External => {
let _ = popen.detach();
let stdout = popen.stdout.take().unwrap();
Ok(ClassifiedInputStream::from_stdout(stdout))
}
StreamNext::Internal => {
let _ = popen.detach();
let stdout = popen.stdout.take().unwrap();
let file = futures::io::AllowStdIo::new(stdout);
let stream = Framed::new(file, LinesCodec {});
let stream =
stream.map(move |line| Value::string(line.unwrap()).tagged(&name_tag));
Ok(ClassifiedInputStream::from_input_stream(
stream.boxed() as BoxStream<'static, Tagged<Value>>
))
} }
Ok(ClassifiedInputStream::new())
}
StreamNext::External => {
let _ = popen.detach();
let stdout = popen.stdout.take().unwrap();
Ok(ClassifiedInputStream::from_stdout(stdout))
}
StreamNext::Internal => {
let _ = popen.detach();
let stdout = popen.stdout.take().unwrap();
let file = futures::io::AllowStdIo::new(stdout);
let stream = Framed::new(file, LinesCodec {});
let stream = stream.map(move |line| Value::string(line.unwrap()).tagged(name_tag));
Ok(ClassifiedInputStream::from_input_stream(
stream.boxed() as BoxStream<'static, Tagged<Value>>
))
} }
} else {
return Err(ShellError::labeled_error(
"Command not found",
"command not found",
name_tag,
));
} }
} }
} }

View File

@ -1,4 +1,3 @@
use crate::context::{AnchorLocation, SourceMap};
use crate::data::Value; use crate::data::Value;
use crate::errors::ShellError; use crate::errors::ShellError;
use crate::evaluate::Scope; use crate::evaluate::Scope;
@ -11,13 +10,12 @@ use serde::{Deserialize, Serialize};
use std::fmt; use std::fmt;
use std::ops::Deref; use std::ops::Deref;
use std::path::PathBuf; use std::path::PathBuf;
use uuid::Uuid; use std::sync::atomic::AtomicBool;
#[derive(Deserialize, Serialize, Debug, Clone)] #[derive(Deserialize, Serialize, Debug, Clone)]
pub struct UnevaluatedCallInfo { pub struct UnevaluatedCallInfo {
pub args: hir::Call, pub args: hir::Call,
pub source: Text, pub source: Text,
pub source_map: SourceMap,
pub name_tag: Tag, pub name_tag: Tag,
} }
@ -37,7 +35,6 @@ impl UnevaluatedCallInfo {
Ok(CallInfo { Ok(CallInfo {
args, args,
source_map: self.source_map,
name_tag: self.name_tag, name_tag: self.name_tag,
}) })
} }
@ -46,7 +43,6 @@ impl UnevaluatedCallInfo {
#[derive(Deserialize, Serialize, Debug, Clone)] #[derive(Deserialize, Serialize, Debug, Clone)]
pub struct CallInfo { pub struct CallInfo {
pub args: registry::EvaluatedArgs, pub args: registry::EvaluatedArgs,
pub source_map: SourceMap,
pub name_tag: Tag, pub name_tag: Tag,
} }
@ -62,7 +58,7 @@ impl CallInfo {
args: T::deserialize(&mut deserializer)?, args: T::deserialize(&mut deserializer)?,
context: RunnablePerItemContext { context: RunnablePerItemContext {
shell_manager: shell_manager.clone(), shell_manager: shell_manager.clone(),
name: self.name_tag, name: self.name_tag.clone(),
}, },
callback, callback,
}) })
@ -73,6 +69,7 @@ impl CallInfo {
#[get = "pub(crate)"] #[get = "pub(crate)"]
pub struct CommandArgs { pub struct CommandArgs {
pub host: Arc<Mutex<dyn Host>>, pub host: Arc<Mutex<dyn Host>>,
pub ctrl_c: Arc<AtomicBool>,
pub shell_manager: ShellManager, pub shell_manager: ShellManager,
pub call_info: UnevaluatedCallInfo, pub call_info: UnevaluatedCallInfo,
pub input: InputStream, pub input: InputStream,
@ -82,6 +79,7 @@ pub struct CommandArgs {
#[get = "pub(crate)"] #[get = "pub(crate)"]
pub struct RawCommandArgs { pub struct RawCommandArgs {
pub host: Arc<Mutex<dyn Host>>, pub host: Arc<Mutex<dyn Host>>,
pub ctrl_c: Arc<AtomicBool>,
pub shell_manager: ShellManager, pub shell_manager: ShellManager,
pub call_info: UnevaluatedCallInfo, pub call_info: UnevaluatedCallInfo,
} }
@ -90,6 +88,7 @@ impl RawCommandArgs {
pub fn with_input(self, input: Vec<Tagged<Value>>) -> CommandArgs { pub fn with_input(self, input: Vec<Tagged<Value>>) -> CommandArgs {
CommandArgs { CommandArgs {
host: self.host, host: self.host,
ctrl_c: self.ctrl_c,
shell_manager: self.shell_manager, shell_manager: self.shell_manager,
call_info: self.call_info, call_info: self.call_info,
input: input.into(), input: input.into(),
@ -109,12 +108,14 @@ impl CommandArgs {
registry: &registry::CommandRegistry, registry: &registry::CommandRegistry,
) -> Result<EvaluatedWholeStreamCommandArgs, ShellError> { ) -> Result<EvaluatedWholeStreamCommandArgs, ShellError> {
let host = self.host.clone(); let host = self.host.clone();
let ctrl_c = self.ctrl_c.clone();
let shell_manager = self.shell_manager.clone(); let shell_manager = self.shell_manager.clone();
let input = self.input; let input = self.input;
let call_info = self.call_info.evaluate(registry, &Scope::empty())?; let call_info = self.call_info.evaluate(registry, &Scope::empty())?;
Ok(EvaluatedWholeStreamCommandArgs::new( Ok(EvaluatedWholeStreamCommandArgs::new(
host, host,
ctrl_c,
shell_manager, shell_manager,
call_info, call_info,
input, input,
@ -127,12 +128,13 @@ impl CommandArgs {
callback: fn(T, RunnableContext) -> Result<OutputStream, ShellError>, callback: fn(T, RunnableContext) -> Result<OutputStream, ShellError>,
) -> Result<RunnableArgs<T>, ShellError> { ) -> Result<RunnableArgs<T>, ShellError> {
let shell_manager = self.shell_manager.clone(); let shell_manager = self.shell_manager.clone();
let source_map = self.call_info.source_map.clone();
let host = self.host.clone(); let host = self.host.clone();
let ctrl_c = self.ctrl_c.clone();
let args = self.evaluate_once(registry)?; let args = self.evaluate_once(registry)?;
let call_info = args.call_info.clone();
let (input, args) = args.split(); let (input, args) = args.split();
let name_tag = args.call_info.name_tag; let name_tag = args.call_info.name_tag;
let mut deserializer = ConfigDeserializer::from_call_info(args.call_info); let mut deserializer = ConfigDeserializer::from_call_info(call_info);
Ok(RunnableArgs { Ok(RunnableArgs {
args: T::deserialize(&mut deserializer)?, args: T::deserialize(&mut deserializer)?,
@ -141,8 +143,8 @@ impl CommandArgs {
commands: registry.clone(), commands: registry.clone(),
shell_manager, shell_manager,
name: name_tag, name: name_tag,
source_map,
host, host,
ctrl_c,
}, },
callback, callback,
}) })
@ -155,17 +157,20 @@ impl CommandArgs {
) -> Result<RunnableRawArgs<T>, ShellError> { ) -> Result<RunnableRawArgs<T>, ShellError> {
let raw_args = RawCommandArgs { let raw_args = RawCommandArgs {
host: self.host.clone(), host: self.host.clone(),
ctrl_c: self.ctrl_c.clone(),
shell_manager: self.shell_manager.clone(), shell_manager: self.shell_manager.clone(),
call_info: self.call_info.clone(), call_info: self.call_info.clone(),
}; };
let shell_manager = self.shell_manager.clone(); let shell_manager = self.shell_manager.clone();
let source_map = self.call_info.source_map.clone();
let host = self.host.clone(); let host = self.host.clone();
let ctrl_c = self.ctrl_c.clone();
let args = self.evaluate_once(registry)?; let args = self.evaluate_once(registry)?;
let call_info = args.call_info.clone();
let (input, args) = args.split(); let (input, args) = args.split();
let name_tag = args.call_info.name_tag; let name_tag = args.call_info.name_tag;
let mut deserializer = ConfigDeserializer::from_call_info(args.call_info); let mut deserializer = ConfigDeserializer::from_call_info(call_info.clone());
Ok(RunnableRawArgs { Ok(RunnableRawArgs {
args: T::deserialize(&mut deserializer)?, args: T::deserialize(&mut deserializer)?,
@ -174,8 +179,8 @@ impl CommandArgs {
commands: registry.clone(), commands: registry.clone(),
shell_manager, shell_manager,
name: name_tag, name: name_tag,
source_map,
host, host,
ctrl_c,
}, },
raw_args, raw_args,
callback, callback,
@ -198,18 +203,12 @@ pub struct RunnableContext {
pub input: InputStream, pub input: InputStream,
pub shell_manager: ShellManager, pub shell_manager: ShellManager,
pub host: Arc<Mutex<dyn Host>>, pub host: Arc<Mutex<dyn Host>>,
pub ctrl_c: Arc<AtomicBool>,
pub commands: CommandRegistry, pub commands: CommandRegistry,
pub source_map: SourceMap,
pub name: Tag, pub name: Tag,
} }
impl RunnableContext { impl RunnableContext {
pub fn expect_command(&self, name: &str) -> Arc<Command> {
self.commands
.get_command(name)
.expect(&format!("Expected command {}", name))
}
pub fn get_command(&self, name: &str) -> Option<Arc<Command>> { pub fn get_command(&self, name: &str) -> Option<Arc<Command>> {
self.commands.get_command(name) self.commands.get_command(name)
} }
@ -270,6 +269,7 @@ impl Deref for EvaluatedWholeStreamCommandArgs {
impl EvaluatedWholeStreamCommandArgs { impl EvaluatedWholeStreamCommandArgs {
pub fn new( pub fn new(
host: Arc<Mutex<dyn Host>>, host: Arc<Mutex<dyn Host>>,
ctrl_c: Arc<AtomicBool>,
shell_manager: ShellManager, shell_manager: ShellManager,
call_info: CallInfo, call_info: CallInfo,
input: impl Into<InputStream>, input: impl Into<InputStream>,
@ -277,6 +277,7 @@ impl EvaluatedWholeStreamCommandArgs {
EvaluatedWholeStreamCommandArgs { EvaluatedWholeStreamCommandArgs {
args: EvaluatedCommandArgs { args: EvaluatedCommandArgs {
host, host,
ctrl_c,
shell_manager, shell_manager,
call_info, call_info,
}, },
@ -285,7 +286,7 @@ impl EvaluatedWholeStreamCommandArgs {
} }
pub fn name_tag(&self) -> Tag { pub fn name_tag(&self) -> Tag {
self.args.call_info.name_tag self.args.call_info.name_tag.clone()
} }
pub fn parts(self) -> (InputStream, registry::EvaluatedArgs) { pub fn parts(self) -> (InputStream, registry::EvaluatedArgs) {
@ -317,12 +318,14 @@ impl Deref for EvaluatedFilterCommandArgs {
impl EvaluatedFilterCommandArgs { impl EvaluatedFilterCommandArgs {
pub fn new( pub fn new(
host: Arc<Mutex<dyn Host>>, host: Arc<Mutex<dyn Host>>,
ctrl_c: Arc<AtomicBool>,
shell_manager: ShellManager, shell_manager: ShellManager,
call_info: CallInfo, call_info: CallInfo,
) -> EvaluatedFilterCommandArgs { ) -> EvaluatedFilterCommandArgs {
EvaluatedFilterCommandArgs { EvaluatedFilterCommandArgs {
args: EvaluatedCommandArgs { args: EvaluatedCommandArgs {
host, host,
ctrl_c,
shell_manager, shell_manager,
call_info, call_info,
}, },
@ -334,6 +337,7 @@ impl EvaluatedFilterCommandArgs {
#[get = "pub(crate)"] #[get = "pub(crate)"]
pub struct EvaluatedCommandArgs { pub struct EvaluatedCommandArgs {
pub host: Arc<Mutex<dyn Host>>, pub host: Arc<Mutex<dyn Host>>,
pub ctrl_c: Arc<AtomicBool>,
pub shell_manager: ShellManager, pub shell_manager: ShellManager,
pub call_info: CallInfo, pub call_info: CallInfo,
} }
@ -376,7 +380,6 @@ impl EvaluatedCommandArgs {
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub enum CommandAction { pub enum CommandAction {
ChangePath(String), ChangePath(String),
AddAnchorLocation(Uuid, AnchorLocation),
Exit, Exit,
EnterShell(String), EnterShell(String),
EnterValueShell(Tagged<Value>), EnterValueShell(Tagged<Value>),
@ -390,9 +393,6 @@ impl ToDebug for CommandAction {
fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result { fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result {
match self { match self {
CommandAction::ChangePath(s) => write!(f, "action:change-path={}", s), CommandAction::ChangePath(s) => write!(f, "action:change-path={}", s),
CommandAction::AddAnchorLocation(u, source) => {
write!(f, "action:add-span-source={}@{:?}", u, source)
}
CommandAction::Exit => write!(f, "action:exit"), CommandAction::Exit => write!(f, "action:exit"),
CommandAction::EnterShell(s) => write!(f, "action:enter-shell={}", s), CommandAction::EnterShell(s) => write!(f, "action:enter-shell={}", s),
CommandAction::EnterValueShell(t) => { CommandAction::EnterValueShell(t) => {
@ -507,6 +507,15 @@ pub enum Command {
PerItem(Arc<dyn PerItemCommand>), PerItem(Arc<dyn PerItemCommand>),
} }
impl std::fmt::Debug for Command {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Command::WholeStream(command) => write!(f, "WholeStream({})", command.name()),
Command::PerItem(command) => write!(f, "PerItem({})", command.name()),
}
}
}
impl Command { impl Command {
pub fn name(&self) -> &str { pub fn name(&self) -> &str {
match self { match self {
@ -555,6 +564,7 @@ impl Command {
) -> OutputStream { ) -> OutputStream {
let raw_args = RawCommandArgs { let raw_args = RawCommandArgs {
host: args.host, host: args.host,
ctrl_c: args.ctrl_c,
shell_manager: args.shell_manager, shell_manager: args.shell_manager,
call_info: args.call_info, call_info: args.call_info,
}; };
@ -624,6 +634,7 @@ impl WholeStreamCommand for FnFilterCommand {
) -> Result<OutputStream, ShellError> { ) -> Result<OutputStream, ShellError> {
let CommandArgs { let CommandArgs {
host, host,
ctrl_c,
shell_manager, shell_manager,
call_info, call_info,
input, input,
@ -641,8 +652,12 @@ impl WholeStreamCommand for FnFilterCommand {
Ok(args) => args, Ok(args) => args,
}; };
let args = let args = EvaluatedFilterCommandArgs::new(
EvaluatedFilterCommandArgs::new(host.clone(), shell_manager.clone(), call_info); host.clone(),
ctrl_c.clone(),
shell_manager.clone(),
call_info,
);
match func(args) { match func(args) {
Err(err) => return OutputStream::from(vec![Err(err)]).values, Err(err) => return OutputStream::from(vec![Err(err)]).values,

View File

@ -58,7 +58,7 @@ pub fn config(
}: ConfigArgs, }: ConfigArgs,
RunnableContext { name, .. }: RunnableContext, RunnableContext { name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> { ) -> Result<OutputStream, ShellError> {
let name_span = name; let name_span = name.clone();
let configuration = if let Some(supplied) = load { let configuration = if let Some(supplied) = load {
Some(supplied.item().clone()) Some(supplied.item().clone())
@ -70,9 +70,9 @@ pub fn config(
if let Some(v) = get { if let Some(v) = get {
let key = v.to_string(); let key = v.to_string();
let value = result let value = result.get(&key).ok_or_else(|| {
.get(&key) ShellError::labeled_error(&format!("Missing key in config"), "key", v.tag())
.ok_or_else(|| ShellError::string(&format!("Missing key {} in config", key)))?; })?;
let mut results = VecDeque::new(); let mut results = VecDeque::new();
@ -120,10 +120,11 @@ pub fn config(
result.swap_remove(&key); result.swap_remove(&key);
config::write(&result, &configuration)?; config::write(&result, &configuration)?;
} else { } else {
return Err(ShellError::string(&format!( return Err(ShellError::labeled_error(
"{} does not exist in config", "{} does not exist in config",
key "key",
))); v.tag(),
));
} }
let obj = VecDeque::from_iter(vec![Value::Row(result.into()).tagged(v.tag())]); let obj = VecDeque::from_iter(vec![Value::Row(result.into()).tagged(v.tag())]);

View File

@ -39,27 +39,27 @@ where
{ {
let mut indexmap = IndexMap::new(); let mut indexmap = IndexMap::new();
indexmap.insert("year".to_string(), Value::int(dt.year()).tagged(tag)); indexmap.insert("year".to_string(), Value::int(dt.year()).tagged(&tag));
indexmap.insert("month".to_string(), Value::int(dt.month()).tagged(tag)); indexmap.insert("month".to_string(), Value::int(dt.month()).tagged(&tag));
indexmap.insert("day".to_string(), Value::int(dt.day()).tagged(tag)); indexmap.insert("day".to_string(), Value::int(dt.day()).tagged(&tag));
indexmap.insert("hour".to_string(), Value::int(dt.hour()).tagged(tag)); indexmap.insert("hour".to_string(), Value::int(dt.hour()).tagged(&tag));
indexmap.insert("minute".to_string(), Value::int(dt.minute()).tagged(tag)); indexmap.insert("minute".to_string(), Value::int(dt.minute()).tagged(&tag));
indexmap.insert("second".to_string(), Value::int(dt.second()).tagged(tag)); indexmap.insert("second".to_string(), Value::int(dt.second()).tagged(&tag));
let tz = dt.offset(); let tz = dt.offset();
indexmap.insert( indexmap.insert(
"timezone".to_string(), "timezone".to_string(),
Value::string(format!("{}", tz)).tagged(tag), Value::string(format!("{}", tz)).tagged(&tag),
); );
Value::Row(Dictionary::from(indexmap)).tagged(tag) Value::Row(Dictionary::from(indexmap)).tagged(&tag)
} }
pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> { pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?; let args = args.evaluate_once(registry)?;
let mut date_out = VecDeque::new(); let mut date_out = VecDeque::new();
let tag = args.call_info.name_tag; let tag = args.call_info.name_tag.clone();
let value = if args.has("utc") { let value = if args.has("utc") {
let utc: DateTime<Utc> = Utc::now(); let utc: DateTime<Utc> = Utc::now();

View File

@ -35,7 +35,7 @@ fn run(
_registry: &CommandRegistry, _registry: &CommandRegistry,
_raw_args: &RawCommandArgs, _raw_args: &RawCommandArgs,
) -> Result<OutputStream, ShellError> { ) -> Result<OutputStream, ShellError> {
let name = call_info.name_tag; let name = call_info.name_tag.clone();
let mut output = String::new(); let mut output = String::new();
@ -54,11 +54,10 @@ fn run(
output.push_str(&s); output.push_str(&s);
} }
_ => { _ => {
return Err(ShellError::labeled_error( return Err(ShellError::type_error(
"Expect a string from pipeline", "a string-compatible value",
"not a string-compatible value", i.tagged_type_name(),
i.tag(), ))
));
} }
} }
} }

View File

@ -15,7 +15,7 @@ impl PerItemCommand for Enter {
} }
fn signature(&self) -> registry::Signature { fn signature(&self) -> registry::Signature {
Signature::build("enter").required("location", SyntaxShape::Block) Signature::build("enter").required("location", SyntaxShape::Path)
} }
fn usage(&self) -> &str { fn usage(&self) -> &str {
@ -33,14 +33,14 @@ impl PerItemCommand for Enter {
let raw_args = raw_args.clone(); let raw_args = raw_args.clone();
match call_info.args.expect_nth(0)? { match call_info.args.expect_nth(0)? {
Tagged { Tagged {
item: Value::Primitive(Primitive::String(location)), item: Value::Primitive(Primitive::Path(location)),
.. ..
} => { } => {
let location = location.to_string(); let location_string = location.display().to_string();
let location_clone = location.to_string(); let location_clone = location_string.clone();
if location.starts_with("help") { if location.starts_with("help") {
let spec = location.split(":").collect::<Vec<&str>>(); let spec = location_string.split(":").collect::<Vec<&str>>();
let (_, command) = (spec[0], spec[1]); let (_, command) = (spec[0], spec[1]);
@ -67,7 +67,7 @@ impl PerItemCommand for Enter {
let full_path = std::path::PathBuf::from(cwd); let full_path = std::path::PathBuf::from(cwd);
let (file_extension, contents, contents_tag, anchor_location) = let (file_extension, contents, contents_tag) =
crate::commands::open::fetch( crate::commands::open::fetch(
&full_path, &full_path,
&location_clone, &location_clone,
@ -75,18 +75,9 @@ impl PerItemCommand for Enter {
) )
.await.unwrap(); .await.unwrap();
if contents_tag.anchor != uuid::Uuid::nil() {
// If we have loaded something, track its source
yield ReturnSuccess::action(CommandAction::AddAnchorLocation(
contents_tag.anchor,
anchor_location,
));
}
match contents { match contents {
Value::Primitive(Primitive::String(_)) => { Value::Primitive(Primitive::String(_)) => {
let tagged_contents = contents.tagged(contents_tag); let tagged_contents = contents.tagged(&contents_tag);
if let Some(extension) = file_extension { if let Some(extension) = file_extension {
let command_name = format!("from-{}", extension); let command_name = format!("from-{}", extension);
@ -95,6 +86,7 @@ impl PerItemCommand for Enter {
{ {
let new_args = RawCommandArgs { let new_args = RawCommandArgs {
host: raw_args.host, host: raw_args.host,
ctrl_c: raw_args.ctrl_c,
shell_manager: raw_args.shell_manager, shell_manager: raw_args.shell_manager,
call_info: UnevaluatedCallInfo { call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call { args: crate::parser::hir::Call {
@ -103,7 +95,6 @@ impl PerItemCommand for Enter {
named: None, named: None,
}, },
source: raw_args.call_info.source, source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag, name_tag: raw_args.call_info.name_tag,
}, },
}; };
@ -123,7 +114,7 @@ impl PerItemCommand for Enter {
yield Ok(ReturnSuccess::Action(CommandAction::EnterValueShell( yield Ok(ReturnSuccess::Action(CommandAction::EnterValueShell(
Tagged { Tagged {
item, item,
tag: contents_tag, tag: contents_tag.clone(),
}))); })));
} }
x => yield x, x => yield x,

View File

@ -37,22 +37,22 @@ pub fn get_environment(tag: Tag) -> Result<Tagged<Value>, Box<dyn std::error::Er
let mut indexmap = IndexMap::new(); let mut indexmap = IndexMap::new();
let path = std::env::current_dir()?; let path = std::env::current_dir()?;
indexmap.insert("cwd".to_string(), Value::path(path).tagged(tag)); indexmap.insert("cwd".to_string(), Value::path(path).tagged(&tag));
if let Some(home) = dirs::home_dir() { if let Some(home) = dirs::home_dir() {
indexmap.insert("home".to_string(), Value::path(home).tagged(tag)); indexmap.insert("home".to_string(), Value::path(home).tagged(&tag));
} }
let config = config::default_path()?; let config = config::default_path()?;
indexmap.insert("config".to_string(), Value::path(config).tagged(tag)); indexmap.insert("config".to_string(), Value::path(config).tagged(&tag));
let history = History::path(); let history = History::path();
indexmap.insert("history".to_string(), Value::path(history).tagged(tag)); indexmap.insert("history".to_string(), Value::path(history).tagged(&tag));
let temp = std::env::temp_dir(); let temp = std::env::temp_dir();
indexmap.insert("temp".to_string(), Value::path(temp).tagged(tag)); indexmap.insert("temp".to_string(), Value::path(temp).tagged(&tag));
let mut dict = TaggedDictBuilder::new(tag); let mut dict = TaggedDictBuilder::new(&tag);
for v in std::env::vars() { for v in std::env::vars() {
dict.insert(v.0, Value::string(v.1)); dict.insert(v.0, Value::string(v.1));
} }
@ -60,14 +60,14 @@ pub fn get_environment(tag: Tag) -> Result<Tagged<Value>, Box<dyn std::error::Er
indexmap.insert("vars".to_string(), dict.into_tagged_value()); indexmap.insert("vars".to_string(), dict.into_tagged_value());
} }
Ok(Value::Row(Dictionary::from(indexmap)).tagged(tag)) Ok(Value::Row(Dictionary::from(indexmap)).tagged(&tag))
} }
pub fn env(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> { pub fn env(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?; let args = args.evaluate_once(registry)?;
let mut env_out = VecDeque::new(); let mut env_out = VecDeque::new();
let tag = args.call_info.name_tag; let tag = args.call_info.name_tag.clone();
let value = get_environment(tag)?; let value = get_environment(tag)?;
env_out.push_back(value); env_out.push_back(value);

View File

@ -10,7 +10,6 @@ use mime::Mime;
use std::path::PathBuf; use std::path::PathBuf;
use std::str::FromStr; use std::str::FromStr;
use surf::mime; use surf::mime;
use uuid::Uuid;
pub struct Fetch; pub struct Fetch;
impl PerItemCommand for Fetch { impl PerItemCommand for Fetch {
@ -44,16 +43,18 @@ fn run(
registry: &CommandRegistry, registry: &CommandRegistry,
raw_args: &RawCommandArgs, raw_args: &RawCommandArgs,
) -> Result<OutputStream, ShellError> { ) -> Result<OutputStream, ShellError> {
let path = match call_info let path = match call_info.args.nth(0).ok_or_else(|| {
.args ShellError::labeled_error(
.nth(0) "No file or directory specified",
.ok_or_else(|| ShellError::string(&format!("No file or directory specified")))? "for command",
{ &call_info.name_tag,
)
})? {
file => file, file => file,
}; };
let path_buf = path.as_path()?; let path_buf = path.as_path()?;
let path_str = path_buf.display().to_string(); let path_str = path_buf.display().to_string();
let path_span = path.span(); let path_span = path.tag.span;
let has_raw = call_info.args.has("raw"); let has_raw = call_info.args.has("raw");
let registry = registry.clone(); let registry = registry.clone();
let raw_args = raw_args.clone(); let raw_args = raw_args.clone();
@ -66,7 +67,7 @@ fn run(
yield Err(e); yield Err(e);
return; return;
} }
let (file_extension, contents, contents_tag, anchor_location) = result.unwrap(); let (file_extension, contents, contents_tag) = result.unwrap();
let file_extension = if has_raw { let file_extension = if has_raw {
None None
@ -76,21 +77,14 @@ fn run(
file_extension.or(path_str.split('.').last().map(String::from)) file_extension.or(path_str.split('.').last().map(String::from))
}; };
if contents_tag.anchor != uuid::Uuid::nil() { let tagged_contents = contents.tagged(&contents_tag);
// If we have loaded something, track its source
yield ReturnSuccess::action(CommandAction::AddAnchorLocation(
contents_tag.anchor,
anchor_location,
));
}
let tagged_contents = contents.tagged(contents_tag);
if let Some(extension) = file_extension { if let Some(extension) = file_extension {
let command_name = format!("from-{}", extension); let command_name = format!("from-{}", extension);
if let Some(converter) = registry.get_command(&command_name) { if let Some(converter) = registry.get_command(&command_name) {
let new_args = RawCommandArgs { let new_args = RawCommandArgs {
host: raw_args.host, host: raw_args.host,
ctrl_c: raw_args.ctrl_c,
shell_manager: raw_args.shell_manager, shell_manager: raw_args.shell_manager,
call_info: UnevaluatedCallInfo { call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call { args: crate::parser::hir::Call {
@ -99,7 +93,6 @@ fn run(
named: None named: None
}, },
source: raw_args.call_info.source, source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag, name_tag: raw_args.call_info.name_tag,
} }
}; };
@ -113,7 +106,7 @@ fn run(
} }
} }
Ok(ReturnSuccess::Value(Tagged { item, .. })) => { Ok(ReturnSuccess::Value(Tagged { item, .. })) => {
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag })); yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() }));
} }
x => yield x, x => yield x,
} }
@ -129,10 +122,7 @@ fn run(
Ok(stream.to_output_stream()) Ok(stream.to_output_stream())
} }
pub async fn fetch( pub async fn fetch(location: &str, span: Span) -> Result<(Option<String>, Value, Tag), ShellError> {
location: &str,
span: Span,
) -> Result<(Option<String>, Value, Tag, AnchorLocation), ShellError> {
if let Err(_) = url::Url::parse(location) { if let Err(_) = url::Url::parse(location) {
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"Incomplete or incorrect url", "Incomplete or incorrect url",
@ -158,9 +148,8 @@ pub async fn fetch(
})?), })?),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::Url(location.to_string())),
}, },
AnchorLocation::Url(location.to_string()),
)), )),
(mime::APPLICATION, mime::JSON) => Ok(( (mime::APPLICATION, mime::JSON) => Ok((
Some("json".to_string()), Some("json".to_string()),
@ -173,9 +162,8 @@ pub async fn fetch(
})?), })?),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::Url(location.to_string())),
}, },
AnchorLocation::Url(location.to_string()),
)), )),
(mime::APPLICATION, mime::OCTET_STREAM) => { (mime::APPLICATION, mime::OCTET_STREAM) => {
let buf: Vec<u8> = r.body_bytes().await.map_err(|_| { let buf: Vec<u8> = r.body_bytes().await.map_err(|_| {
@ -190,9 +178,8 @@ pub async fn fetch(
Value::binary(buf), Value::binary(buf),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::Url(location.to_string())),
}, },
AnchorLocation::Url(location.to_string()),
)) ))
} }
(mime::IMAGE, mime::SVG) => Ok(( (mime::IMAGE, mime::SVG) => Ok((
@ -206,9 +193,8 @@ pub async fn fetch(
})?), })?),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::Url(location.to_string())),
}, },
AnchorLocation::Url(location.to_string()),
)), )),
(mime::IMAGE, image_ty) => { (mime::IMAGE, image_ty) => {
let buf: Vec<u8> = r.body_bytes().await.map_err(|_| { let buf: Vec<u8> = r.body_bytes().await.map_err(|_| {
@ -223,9 +209,8 @@ pub async fn fetch(
Value::binary(buf), Value::binary(buf),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::Url(location.to_string())),
}, },
AnchorLocation::Url(location.to_string()),
)) ))
} }
(mime::TEXT, mime::HTML) => Ok(( (mime::TEXT, mime::HTML) => Ok((
@ -239,9 +224,8 @@ pub async fn fetch(
})?), })?),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::Url(location.to_string())),
}, },
AnchorLocation::Url(location.to_string()),
)), )),
(mime::TEXT, mime::PLAIN) => { (mime::TEXT, mime::PLAIN) => {
let path_extension = url::Url::parse(location) let path_extension = url::Url::parse(location)
@ -266,9 +250,8 @@ pub async fn fetch(
})?), })?),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::Url(location.to_string())),
}, },
AnchorLocation::Url(location.to_string()),
)) ))
} }
(ty, sub_ty) => Ok(( (ty, sub_ty) => Ok((
@ -276,9 +259,8 @@ pub async fn fetch(
Value::string(format!("Not yet supported MIME type: {} {}", ty, sub_ty)), Value::string(format!("Not yet supported MIME type: {} {}", ty, sub_ty)),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::Url(location.to_string())),
}, },
AnchorLocation::Url(location.to_string()),
)), )),
} }
} }
@ -287,9 +269,8 @@ pub async fn fetch(
Value::string(format!("No content type found")), Value::string(format!("No content type found")),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::Url(location.to_string())),
}, },
AnchorLocation::Url(location.to_string()),
)), )),
}, },
Err(_) => { Err(_) => {

View File

@ -16,7 +16,7 @@ impl WholeStreamCommand for First {
} }
fn signature(&self) -> Signature { fn signature(&self) -> Signature {
Signature::build("first").required("amount", SyntaxShape::Literal) Signature::build("first").required("amount", SyntaxShape::Int)
} }
fn usage(&self) -> &str { fn usage(&self) -> &str {

View File

@ -33,7 +33,7 @@ fn bson_array(input: &Vec<Bson>, tag: Tag) -> Result<Vec<Tagged<Value>>, ShellEr
let mut out = vec![]; let mut out = vec![];
for value in input { for value in input {
out.push(convert_bson_value_to_nu_value(value, tag)?); out.push(convert_bson_value_to_nu_value(value, &tag)?);
} }
Ok(out) Ok(out)
@ -46,100 +46,100 @@ fn convert_bson_value_to_nu_value(
let tag = tag.into(); let tag = tag.into();
Ok(match v { Ok(match v {
Bson::FloatingPoint(n) => Value::Primitive(Primitive::from(*n)).tagged(tag), Bson::FloatingPoint(n) => Value::Primitive(Primitive::from(*n)).tagged(&tag),
Bson::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag), Bson::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(&tag),
Bson::Array(a) => Value::Table(bson_array(a, tag)?).tagged(tag), Bson::Array(a) => Value::Table(bson_array(a, tag.clone())?).tagged(&tag),
Bson::Document(doc) => { Bson::Document(doc) => {
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(tag.clone());
for (k, v) in doc.iter() { for (k, v) in doc.iter() {
collected.insert_tagged(k.clone(), convert_bson_value_to_nu_value(v, tag)?); collected.insert_tagged(k.clone(), convert_bson_value_to_nu_value(v, &tag)?);
} }
collected.into_tagged_value() collected.into_tagged_value()
} }
Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(tag), Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(&tag),
Bson::Null => Value::Primitive(Primitive::Nothing).tagged(tag), Bson::Null => Value::Primitive(Primitive::Nothing).tagged(&tag),
Bson::RegExp(r, opts) => { Bson::RegExp(r, opts) => {
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged( collected.insert_tagged(
"$regex".to_string(), "$regex".to_string(),
Value::Primitive(Primitive::String(String::from(r))).tagged(tag), Value::Primitive(Primitive::String(String::from(r))).tagged(&tag),
); );
collected.insert_tagged( collected.insert_tagged(
"$options".to_string(), "$options".to_string(),
Value::Primitive(Primitive::String(String::from(opts))).tagged(tag), Value::Primitive(Primitive::String(String::from(opts))).tagged(&tag),
); );
collected.into_tagged_value() collected.into_tagged_value()
} }
Bson::I32(n) => Value::number(n).tagged(tag), Bson::I32(n) => Value::number(n).tagged(&tag),
Bson::I64(n) => Value::number(n).tagged(tag), Bson::I64(n) => Value::number(n).tagged(&tag),
Bson::Decimal128(n) => { Bson::Decimal128(n) => {
// TODO: this really isn't great, and we should update this to do a higher // TODO: this really isn't great, and we should update this to do a higher
// fidelity translation // fidelity translation
let decimal = BigDecimal::from_str(&format!("{}", n)).map_err(|_| { let decimal = BigDecimal::from_str(&format!("{}", n)).map_err(|_| {
ShellError::range_error( ShellError::range_error(
ExpectedRange::BigDecimal, ExpectedRange::BigDecimal,
&n.tagged(tag), &n.tagged(&tag),
format!("converting BSON Decimal128 to BigDecimal"), format!("converting BSON Decimal128 to BigDecimal"),
) )
})?; })?;
Value::Primitive(Primitive::Decimal(decimal)).tagged(tag) Value::Primitive(Primitive::Decimal(decimal)).tagged(&tag)
} }
Bson::JavaScriptCode(js) => { Bson::JavaScriptCode(js) => {
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged( collected.insert_tagged(
"$javascript".to_string(), "$javascript".to_string(),
Value::Primitive(Primitive::String(String::from(js))).tagged(tag), Value::Primitive(Primitive::String(String::from(js))).tagged(&tag),
); );
collected.into_tagged_value() collected.into_tagged_value()
} }
Bson::JavaScriptCodeWithScope(js, doc) => { Bson::JavaScriptCodeWithScope(js, doc) => {
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged( collected.insert_tagged(
"$javascript".to_string(), "$javascript".to_string(),
Value::Primitive(Primitive::String(String::from(js))).tagged(tag), Value::Primitive(Primitive::String(String::from(js))).tagged(&tag),
); );
collected.insert_tagged( collected.insert_tagged(
"$scope".to_string(), "$scope".to_string(),
convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag)?, convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag.clone())?,
); );
collected.into_tagged_value() collected.into_tagged_value()
} }
Bson::TimeStamp(ts) => { Bson::TimeStamp(ts) => {
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged("$timestamp".to_string(), Value::number(ts).tagged(tag)); collected.insert_tagged("$timestamp".to_string(), Value::number(ts).tagged(&tag));
collected.into_tagged_value() collected.into_tagged_value()
} }
Bson::Binary(bst, bytes) => { Bson::Binary(bst, bytes) => {
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged( collected.insert_tagged(
"$binary_subtype".to_string(), "$binary_subtype".to_string(),
match bst { match bst {
BinarySubtype::UserDefined(u) => Value::number(u), BinarySubtype::UserDefined(u) => Value::number(u),
_ => Value::Primitive(Primitive::String(binary_subtype_to_string(*bst))), _ => Value::Primitive(Primitive::String(binary_subtype_to_string(*bst))),
} }
.tagged(tag), .tagged(&tag),
); );
collected.insert_tagged( collected.insert_tagged(
"$binary".to_string(), "$binary".to_string(),
Value::Primitive(Primitive::Binary(bytes.to_owned())).tagged(tag), Value::Primitive(Primitive::Binary(bytes.to_owned())).tagged(&tag),
); );
collected.into_tagged_value() collected.into_tagged_value()
} }
Bson::ObjectId(obj_id) => { Bson::ObjectId(obj_id) => {
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged( collected.insert_tagged(
"$object_id".to_string(), "$object_id".to_string(),
Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(tag), Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(&tag),
); );
collected.into_tagged_value() collected.into_tagged_value()
} }
Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(tag), Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(&tag),
Bson::Symbol(s) => { Bson::Symbol(s) => {
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged( collected.insert_tagged(
"$symbol".to_string(), "$symbol".to_string(),
Value::Primitive(Primitive::String(String::from(s))).tagged(tag), Value::Primitive(Primitive::String(String::from(s))).tagged(&tag),
); );
collected.into_tagged_value() collected.into_tagged_value()
} }
@ -208,13 +208,13 @@ fn from_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
let value_tag = value.tag(); let value_tag = value.tag();
match value.item { match value.item {
Value::Primitive(Primitive::Binary(vb)) => Value::Primitive(Primitive::Binary(vb)) =>
match from_bson_bytes_to_value(vb, tag) { match from_bson_bytes_to_value(vb, tag.clone()) {
Ok(x) => yield ReturnSuccess::value(x), Ok(x) => yield ReturnSuccess::value(x),
Err(_) => { Err(_) => {
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as BSON", "Could not parse as BSON",
"input cannot be parsed as BSON", "input cannot be parsed as BSON",
tag, tag.clone(),
"value originates from here", "value originates from here",
value_tag, value_tag,
)) ))
@ -223,7 +223,7 @@ fn from_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
tag, tag.clone(),
"value originates from here", "value originates from here",
value_tag, value_tag,
)), )),

View File

@ -62,12 +62,12 @@ pub fn from_csv_string_to_value(
if let Some(row_values) = iter.next() { if let Some(row_values) = iter.next() {
let row_values = row_values?; let row_values = row_values?;
let mut row = TaggedDictBuilder::new(tag); let mut row = TaggedDictBuilder::new(tag.clone());
for (idx, entry) in row_values.iter().enumerate() { for (idx, entry) in row_values.iter().enumerate() {
row.insert_tagged( row.insert_tagged(
fields.get(idx).unwrap(), fields.get(idx).unwrap(),
Value::Primitive(Primitive::String(String::from(entry))).tagged(tag), Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag),
); );
} }
@ -77,7 +77,7 @@ pub fn from_csv_string_to_value(
} }
} }
Ok(Tagged::from_item(Value::Table(rows), tag)) Ok(Value::Table(rows).tagged(&tag))
} }
fn from_csv( fn from_csv(
@ -96,7 +96,7 @@ fn from_csv(
for value in values { for value in values {
let value_tag = value.tag(); let value_tag = value.tag();
latest_tag = Some(value_tag); latest_tag = Some(value_tag.clone());
match value.item { match value.item {
Value::Primitive(Primitive::String(s)) => { Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s); concat_string.push_str(&s);
@ -105,15 +105,15 @@ fn from_csv(
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
name_tag, name_tag.clone(),
"value originates from here", "value originates from here",
value_tag, value_tag.clone(),
)), )),
} }
} }
match from_csv_string_to_value(concat_string, skip_headers, name_tag) { match from_csv_string_to_value(concat_string, skip_headers, name_tag.clone()) {
Ok(x) => match x { Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => { Tagged { item: Value::Table(list), .. } => {
for l in list { for l in list {
@ -126,9 +126,9 @@ fn from_csv(
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as CSV", "Could not parse as CSV",
"input cannot be parsed as CSV", "input cannot be parsed as CSV",
name_tag, name_tag.clone(),
"value originates from here", "value originates from here",
last_tag, last_tag.clone(),
)) ))
} , } ,
} }

View File

@ -45,10 +45,13 @@ fn convert_ini_top_to_nu_value(
tag: impl Into<Tag>, tag: impl Into<Tag>,
) -> Tagged<Value> { ) -> Tagged<Value> {
let tag = tag.into(); let tag = tag.into();
let mut top_level = TaggedDictBuilder::new(tag); let mut top_level = TaggedDictBuilder::new(tag.clone());
for (key, value) in v.iter() { for (key, value) in v.iter() {
top_level.insert_tagged(key.clone(), convert_ini_second_to_nu_value(value, tag)); top_level.insert_tagged(
key.clone(),
convert_ini_second_to_nu_value(value, tag.clone()),
);
} }
top_level.into_tagged_value() top_level.into_tagged_value()
@ -75,7 +78,7 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
for value in values { for value in values {
let value_tag = value.tag(); let value_tag = value.tag();
latest_tag = Some(value_tag); latest_tag = Some(value_tag.clone());
match value.item { match value.item {
Value::Primitive(Primitive::String(s)) => { Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s); concat_string.push_str(&s);
@ -84,15 +87,15 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
tag, &tag,
"value originates from here", "value originates from here",
value_tag, &value_tag,
)), )),
} }
} }
match from_ini_string_to_value(concat_string, tag) { match from_ini_string_to_value(concat_string, tag.clone()) {
Ok(x) => match x { Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => { Tagged { item: Value::Table(list), .. } => {
for l in list { for l in list {
@ -105,7 +108,7 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as INI", "Could not parse as INI",
"input cannot be parsed as INI", "input cannot be parsed as INI",
tag, &tag,
"value originates from here", "value originates from here",
last_tag, last_tag,
)) ))

View File

@ -35,24 +35,24 @@ fn convert_json_value_to_nu_value(v: &serde_hjson::Value, tag: impl Into<Tag>) -
let tag = tag.into(); let tag = tag.into();
match v { match v {
serde_hjson::Value::Null => Value::Primitive(Primitive::Nothing).tagged(tag), serde_hjson::Value::Null => Value::Primitive(Primitive::Nothing).tagged(&tag),
serde_hjson::Value::Bool(b) => Value::boolean(*b).tagged(tag), serde_hjson::Value::Bool(b) => Value::boolean(*b).tagged(&tag),
serde_hjson::Value::F64(n) => Value::number(n).tagged(tag), serde_hjson::Value::F64(n) => Value::number(n).tagged(&tag),
serde_hjson::Value::U64(n) => Value::number(n).tagged(tag), serde_hjson::Value::U64(n) => Value::number(n).tagged(&tag),
serde_hjson::Value::I64(n) => Value::number(n).tagged(tag), serde_hjson::Value::I64(n) => Value::number(n).tagged(&tag),
serde_hjson::Value::String(s) => { serde_hjson::Value::String(s) => {
Value::Primitive(Primitive::String(String::from(s))).tagged(tag) Value::Primitive(Primitive::String(String::from(s))).tagged(&tag)
} }
serde_hjson::Value::Array(a) => Value::Table( serde_hjson::Value::Array(a) => Value::Table(
a.iter() a.iter()
.map(|x| convert_json_value_to_nu_value(x, tag)) .map(|x| convert_json_value_to_nu_value(x, &tag))
.collect(), .collect(),
) )
.tagged(tag), .tagged(tag),
serde_hjson::Value::Object(o) => { serde_hjson::Value::Object(o) => {
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(&tag);
for (k, v) in o.iter() { for (k, v) in o.iter() {
collected.insert_tagged(k.clone(), convert_json_value_to_nu_value(v, tag)); collected.insert_tagged(k.clone(), convert_json_value_to_nu_value(v, &tag));
} }
collected.into_tagged_value() collected.into_tagged_value()
@ -82,7 +82,7 @@ fn from_json(
for value in values { for value in values {
let value_tag = value.tag(); let value_tag = value.tag();
latest_tag = Some(value_tag); latest_tag = Some(value_tag.clone());
match value.item { match value.item {
Value::Primitive(Primitive::String(s)) => { Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s); concat_string.push_str(&s);
@ -91,9 +91,9 @@ fn from_json(
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
name_tag, &name_tag,
"value originates from here", "value originates from here",
value_tag, &value_tag,
)), )),
} }
@ -106,15 +106,15 @@ fn from_json(
continue; continue;
} }
match from_json_string_to_value(json_str.to_string(), name_tag) { match from_json_string_to_value(json_str.to_string(), &name_tag) {
Ok(x) => Ok(x) =>
yield ReturnSuccess::value(x), yield ReturnSuccess::value(x),
Err(_) => { Err(_) => {
if let Some(last_tag) = latest_tag { if let Some(ref last_tag) = latest_tag {
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Could nnot parse as JSON", "Could nnot parse as JSON",
"input cannot be parsed as JSON", "input cannot be parsed as JSON",
name_tag, &name_tag,
"value originates from here", "value originates from here",
last_tag)) last_tag))
} }
@ -122,7 +122,7 @@ fn from_json(
} }
} }
} else { } else {
match from_json_string_to_value(concat_string, name_tag) { match from_json_string_to_value(concat_string, name_tag.clone()) {
Ok(x) => Ok(x) =>
match x { match x {
Tagged { item: Value::Table(list), .. } => { Tagged { item: Value::Table(list), .. } => {

View File

@ -138,7 +138,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
let value_tag = value.tag(); let value_tag = value.tag();
match value.item { match value.item {
Value::Primitive(Primitive::Binary(vb)) => Value::Primitive(Primitive::Binary(vb)) =>
match from_sqlite_bytes_to_value(vb, tag) { match from_sqlite_bytes_to_value(vb, tag.clone()) {
Ok(x) => match x { Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => { Tagged { item: Value::Table(list), .. } => {
for l in list { for l in list {
@ -151,7 +151,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as SQLite", "Could not parse as SQLite",
"input cannot be parsed as SQLite", "input cannot be parsed as SQLite",
tag, &tag,
"value originates from here", "value originates from here",
value_tag, value_tag,
)) ))
@ -160,7 +160,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
tag, &tag,
"value originates from here", "value originates from here",
value_tag, value_tag,
)), )),

View File

@ -36,7 +36,7 @@ pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into<Tag>) -> T
toml::Value::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag), toml::Value::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag),
toml::Value::Array(a) => Value::Table( toml::Value::Array(a) => Value::Table(
a.iter() a.iter()
.map(|x| convert_toml_value_to_nu_value(x, tag)) .map(|x| convert_toml_value_to_nu_value(x, &tag))
.collect(), .collect(),
) )
.tagged(tag), .tagged(tag),
@ -44,10 +44,10 @@ pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into<Tag>) -> T
Value::Primitive(Primitive::String(dt.to_string())).tagged(tag) Value::Primitive(Primitive::String(dt.to_string())).tagged(tag)
} }
toml::Value::Table(t) => { toml::Value::Table(t) => {
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(&tag);
for (k, v) in t.iter() { for (k, v) in t.iter() {
collected.insert_tagged(k.clone(), convert_toml_value_to_nu_value(v, tag)); collected.insert_tagged(k.clone(), convert_toml_value_to_nu_value(v, &tag));
} }
collected.into_tagged_value() collected.into_tagged_value()
@ -79,7 +79,7 @@ pub fn from_toml(
for value in values { for value in values {
let value_tag = value.tag(); let value_tag = value.tag();
latest_tag = Some(value_tag); latest_tag = Some(value_tag.clone());
match value.item { match value.item {
Value::Primitive(Primitive::String(s)) => { Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s); concat_string.push_str(&s);
@ -88,15 +88,15 @@ pub fn from_toml(
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
tag, &tag,
"value originates from here", "value originates from here",
value_tag, &value_tag,
)), )),
} }
} }
match from_toml_string_to_value(concat_string, tag) { match from_toml_string_to_value(concat_string, tag.clone()) {
Ok(x) => match x { Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => { Tagged { item: Value::Table(list), .. } => {
for l in list { for l in list {
@ -109,7 +109,7 @@ pub fn from_toml(
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as TOML", "Could not parse as TOML",
"input cannot be parsed as TOML", "input cannot be parsed as TOML",
tag, &tag,
"value originates from here", "value originates from here",
last_tag, last_tag,
)) ))

View File

@ -63,12 +63,12 @@ pub fn from_tsv_string_to_value(
if let Some(row_values) = iter.next() { if let Some(row_values) = iter.next() {
let row_values = row_values?; let row_values = row_values?;
let mut row = TaggedDictBuilder::new(tag); let mut row = TaggedDictBuilder::new(&tag);
for (idx, entry) in row_values.iter().enumerate() { for (idx, entry) in row_values.iter().enumerate() {
row.insert_tagged( row.insert_tagged(
fields.get(idx).unwrap(), fields.get(idx).unwrap(),
Value::Primitive(Primitive::String(String::from(entry))).tagged(tag), Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag),
); );
} }
@ -78,7 +78,7 @@ pub fn from_tsv_string_to_value(
} }
} }
Ok(Tagged::from_item(Value::Table(rows), tag)) Ok(Value::Table(rows).tagged(&tag))
} }
fn from_tsv( fn from_tsv(
@ -97,7 +97,7 @@ fn from_tsv(
for value in values { for value in values {
let value_tag = value.tag(); let value_tag = value.tag();
latest_tag = Some(value_tag); latest_tag = Some(value_tag.clone());
match value.item { match value.item {
Value::Primitive(Primitive::String(s)) => { Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s); concat_string.push_str(&s);
@ -106,15 +106,15 @@ fn from_tsv(
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
name_tag, &name_tag,
"value originates from here", "value originates from here",
value_tag, &value_tag,
)), )),
} }
} }
match from_tsv_string_to_value(concat_string, skip_headers, name_tag) { match from_tsv_string_to_value(concat_string, skip_headers, name_tag.clone()) {
Ok(x) => match x { Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => { Tagged { item: Value::Table(list), .. } => {
for l in list { for l in list {
@ -127,9 +127,9 @@ fn from_tsv(
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as TSV", "Could not parse as TSV",
"input cannot be parsed as TSV", "input cannot be parsed as TSV",
name_tag, &name_tag,
"value originates from here", "value originates from here",
last_tag, &last_tag,
)) ))
} , } ,
} }

View File

@ -39,7 +39,7 @@ fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
for value in values { for value in values {
let value_tag = value.tag(); let value_tag = value.tag();
latest_tag = Some(value_tag); latest_tag = Some(value_tag.clone());
match value.item { match value.item {
Value::Primitive(Primitive::String(s)) => { Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s); concat_string.push_str(&s);
@ -47,9 +47,9 @@ fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
tag, &tag,
"value originates from here", "value originates from here",
value_tag, &value_tag,
)), )),
} }

View File

@ -34,7 +34,7 @@ fn from_node_to_value<'a, 'd>(n: &roxmltree::Node<'a, 'd>, tag: impl Into<Tag>)
let mut children_values = vec![]; let mut children_values = vec![];
for c in n.children() { for c in n.children() {
children_values.push(from_node_to_value(&c, tag)); children_values.push(from_node_to_value(&c, &tag));
} }
let children_values: Vec<Tagged<Value>> = children_values let children_values: Vec<Tagged<Value>> = children_values
@ -94,7 +94,7 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
for value in values { for value in values {
let value_tag = value.tag(); let value_tag = value.tag();
latest_tag = Some(value_tag); latest_tag = Some(value_tag.clone());
match value.item { match value.item {
Value::Primitive(Primitive::String(s)) => { Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s); concat_string.push_str(&s);
@ -103,15 +103,15 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
tag, &tag,
"value originates from here", "value originates from here",
value_tag, &value_tag,
)), )),
} }
} }
match from_xml_string_to_value(concat_string, tag) { match from_xml_string_to_value(concat_string, tag.clone()) {
Ok(x) => match x { Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => { Tagged { item: Value::Table(list), .. } => {
for l in list { for l in list {
@ -124,9 +124,9 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as XML", "Could not parse as XML",
"input cannot be parsed as XML", "input cannot be parsed as XML",
tag, &tag,
"value originates from here", "value originates from here",
last_tag, &last_tag,
)) ))
} , } ,
} }

View File

@ -64,17 +64,17 @@ fn convert_yaml_value_to_nu_value(v: &serde_yaml::Value, tag: impl Into<Tag>) ->
serde_yaml::Value::String(s) => Value::string(s).tagged(tag), serde_yaml::Value::String(s) => Value::string(s).tagged(tag),
serde_yaml::Value::Sequence(a) => Value::Table( serde_yaml::Value::Sequence(a) => Value::Table(
a.iter() a.iter()
.map(|x| convert_yaml_value_to_nu_value(x, tag)) .map(|x| convert_yaml_value_to_nu_value(x, &tag))
.collect(), .collect(),
) )
.tagged(tag), .tagged(tag),
serde_yaml::Value::Mapping(t) => { serde_yaml::Value::Mapping(t) => {
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(&tag);
for (k, v) in t.iter() { for (k, v) in t.iter() {
match k { match k {
serde_yaml::Value::String(k) => { serde_yaml::Value::String(k) => {
collected.insert_tagged(k.clone(), convert_yaml_value_to_nu_value(v, tag)); collected.insert_tagged(k.clone(), convert_yaml_value_to_nu_value(v, &tag));
} }
_ => unimplemented!("Unknown key type"), _ => unimplemented!("Unknown key type"),
} }
@ -108,7 +108,7 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
for value in values { for value in values {
let value_tag = value.tag(); let value_tag = value.tag();
latest_tag = Some(value_tag); latest_tag = Some(value_tag.clone());
match value.item { match value.item {
Value::Primitive(Primitive::String(s)) => { Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s); concat_string.push_str(&s);
@ -117,15 +117,15 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
tag, &tag,
"value originates from here", "value originates from here",
value_tag, &value_tag,
)), )),
} }
} }
match from_yaml_string_to_value(concat_string, tag) { match from_yaml_string_to_value(concat_string, tag.clone()) {
Ok(x) => match x { Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => { Tagged { item: Value::Table(list), .. } => {
for l in list { for l in list {
@ -138,9 +138,9 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as YAML", "Could not parse as YAML",
"input cannot be parsed as YAML", "input cannot be parsed as YAML",
tag, &tag,
"value originates from here", "value originates from here",
last_tag, &last_tag,
)) ))
} , } ,
} }

View File

@ -1,14 +1,16 @@
use crate::commands::WholeStreamCommand; use crate::commands::WholeStreamCommand;
use crate::data::meta::tag_for_tagged_list;
use crate::data::Value; use crate::data::Value;
use crate::errors::ShellError; use crate::errors::ShellError;
use crate::prelude::*; use crate::prelude::*;
use log::trace;
pub struct Get; pub struct Get;
#[derive(Deserialize)] #[derive(Deserialize)]
pub struct GetArgs { pub struct GetArgs {
member: Tagged<String>, member: ColumnPath,
rest: Vec<Tagged<String>>, rest: Vec<ColumnPath>,
} }
impl WholeStreamCommand for Get { impl WholeStreamCommand for Get {
@ -18,8 +20,8 @@ impl WholeStreamCommand for Get {
fn signature(&self) -> Signature { fn signature(&self) -> Signature {
Signature::build("get") Signature::build("get")
.required("member", SyntaxShape::Member) .required("member", SyntaxShape::ColumnPath)
.rest(SyntaxShape::Member) .rest(SyntaxShape::ColumnPath)
} }
fn usage(&self) -> &str { fn usage(&self) -> &str {
@ -35,38 +37,41 @@ impl WholeStreamCommand for Get {
} }
} }
fn get_member(path: &Tagged<String>, obj: &Tagged<Value>) -> Result<Tagged<Value>, ShellError> { pub type ColumnPath = Vec<Tagged<String>>;
pub fn get_column_path(
path: &ColumnPath,
obj: &Tagged<Value>,
) -> Result<Tagged<Value>, ShellError> {
let mut current = Some(obj); let mut current = Some(obj);
for p in path.split(".") { for p in path.iter() {
if let Some(obj) = current { if let Some(obj) = current {
current = match obj.get_data_by_key(p) { current = match obj.get_data_by_key(&p) {
Some(v) => Some(v), Some(v) => Some(v),
None => None =>
// Before we give up, see if they gave us a path that matches a field name by itself // Before we give up, see if they gave us a path that matches a field name by itself
{ {
match obj.get_data_by_key(&path.item) { let possibilities = obj.data_descriptors();
Some(v) => return Ok(v.clone()),
None => {
let possibilities = obj.data_descriptors();
let mut possible_matches: Vec<_> = possibilities let mut possible_matches: Vec<_> = possibilities
.iter() .iter()
.map(|x| { .map(|x| (natural::distance::levenshtein_distance(x, &p), x))
(natural::distance::levenshtein_distance(x, &path.item), x) .collect();
})
.collect();
possible_matches.sort(); possible_matches.sort();
if possible_matches.len() > 0 { if possible_matches.len() > 0 {
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"Unknown column", "Unknown column",
format!("did you mean '{}'?", possible_matches[0].1), format!("did you mean '{}'?", possible_matches[0].1),
path.tag(), tag_for_tagged_list(path.iter().map(|p| p.tag())),
)); ));
} } else {
None return Err(ShellError::labeled_error(
} "Unknown column",
"row does not contain this column",
tag_for_tagged_list(path.iter().map(|p| p.tag())),
));
} }
} }
} }
@ -85,7 +90,7 @@ fn get_member(path: &Tagged<String>, obj: &Tagged<Value>) -> Result<Tagged<Value
item: Value::Primitive(Primitive::Path(_)), item: Value::Primitive(Primitive::Path(_)),
.. ..
} => Ok(obj.clone()), } => Ok(obj.clone()),
_ => Ok(Value::nothing().tagged(obj.tag)), _ => Ok(Value::nothing().tagged(&obj.tag)),
}, },
} }
} }
@ -97,6 +102,8 @@ pub fn get(
}: GetArgs, }: GetArgs,
RunnableContext { input, .. }: RunnableContext, RunnableContext { input, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> { ) -> Result<OutputStream, ShellError> {
trace!("get {:?} {:?}", member, fields);
let stream = input let stream = input
.values .values
.map(move |item| { .map(move |item| {
@ -107,10 +114,10 @@ pub fn get(
let fields = vec![&member, &fields] let fields = vec![&member, &fields]
.into_iter() .into_iter()
.flatten() .flatten()
.collect::<Vec<&Tagged<String>>>(); .collect::<Vec<&ColumnPath>>();
for field in &fields { for column_path in &fields {
match get_member(field, &item) { match get_column_path(column_path, &item) {
Ok(Tagged { Ok(Tagged {
item: Value::Table(l), item: Value::Table(l),
.. ..

View File

@ -26,7 +26,7 @@ impl PerItemCommand for Help {
_raw_args: &RawCommandArgs, _raw_args: &RawCommandArgs,
_input: Tagged<Value>, _input: Tagged<Value>,
) -> Result<OutputStream, ShellError> { ) -> Result<OutputStream, ShellError> {
let tag = call_info.name_tag; let tag = &call_info.name_tag;
match call_info.args.nth(0) { match call_info.args.nth(0) {
Some(Tagged { Some(Tagged {

View File

@ -58,7 +58,7 @@ fn lines(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
result.push_back(Err(ShellError::labeled_error_with_secondary( result.push_back(Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
tag, &tag,
"value originates from here", "value originates from here",
v.tag(), v.tag(),
))); )));

View File

@ -34,5 +34,5 @@ impl WholeStreamCommand for LS {
} }
fn ls(LsArgs { path }: LsArgs, context: RunnableContext) -> Result<OutputStream, ShellError> { fn ls(LsArgs { path }: LsArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
context.shell_manager.ls(path, context.name) context.shell_manager.ls(path, &context)
} }

View File

@ -7,7 +7,6 @@ use crate::parser::hir::SyntaxShape;
use crate::parser::registry::Signature; use crate::parser::registry::Signature;
use crate::prelude::*; use crate::prelude::*;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use uuid::Uuid;
pub struct Open; pub struct Open;
impl PerItemCommand for Open { impl PerItemCommand for Open {
@ -45,16 +44,18 @@ fn run(
let cwd = PathBuf::from(shell_manager.path()); let cwd = PathBuf::from(shell_manager.path());
let full_path = PathBuf::from(cwd); let full_path = PathBuf::from(cwd);
let path = match call_info let path = match call_info.args.nth(0).ok_or_else(|| {
.args ShellError::labeled_error(
.nth(0) "No file or directory specified",
.ok_or_else(|| ShellError::string(&format!("No file or directory specified")))? "for command",
{ &call_info.name_tag,
)
})? {
file => file, file => file,
}; };
let path_buf = path.as_path()?; let path_buf = path.as_path()?;
let path_str = path_buf.display().to_string(); let path_str = path_buf.display().to_string();
let path_span = path.span(); let path_span = path.tag.span;
let has_raw = call_info.args.has("raw"); let has_raw = call_info.args.has("raw");
let registry = registry.clone(); let registry = registry.clone();
let raw_args = raw_args.clone(); let raw_args = raw_args.clone();
@ -67,7 +68,7 @@ fn run(
yield Err(e); yield Err(e);
return; return;
} }
let (file_extension, contents, contents_tag, anchor_location) = result.unwrap(); let (file_extension, contents, contents_tag) = result.unwrap();
let file_extension = if has_raw { let file_extension = if has_raw {
None None
@ -77,21 +78,14 @@ fn run(
file_extension.or(path_str.split('.').last().map(String::from)) file_extension.or(path_str.split('.').last().map(String::from))
}; };
if contents_tag.anchor != uuid::Uuid::nil() { let tagged_contents = contents.tagged(&contents_tag);
// If we have loaded something, track its source
yield ReturnSuccess::action(CommandAction::AddAnchorLocation(
contents_tag.anchor,
anchor_location,
));
}
let tagged_contents = contents.tagged(contents_tag);
if let Some(extension) = file_extension { if let Some(extension) = file_extension {
let command_name = format!("from-{}", extension); let command_name = format!("from-{}", extension);
if let Some(converter) = registry.get_command(&command_name) { if let Some(converter) = registry.get_command(&command_name) {
let new_args = RawCommandArgs { let new_args = RawCommandArgs {
host: raw_args.host, host: raw_args.host,
ctrl_c: raw_args.ctrl_c,
shell_manager: raw_args.shell_manager, shell_manager: raw_args.shell_manager,
call_info: UnevaluatedCallInfo { call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call { args: crate::parser::hir::Call {
@ -100,7 +94,6 @@ fn run(
named: None named: None
}, },
source: raw_args.call_info.source, source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag, name_tag: raw_args.call_info.name_tag,
} }
}; };
@ -114,7 +107,7 @@ fn run(
} }
} }
Ok(ReturnSuccess::Value(Tagged { item, .. })) => { Ok(ReturnSuccess::Value(Tagged { item, .. })) => {
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag })); yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() }));
} }
x => yield x, x => yield x,
} }
@ -134,7 +127,7 @@ pub async fn fetch(
cwd: &PathBuf, cwd: &PathBuf,
location: &str, location: &str,
span: Span, span: Span,
) -> Result<(Option<String>, Value, Tag, AnchorLocation), ShellError> { ) -> Result<(Option<String>, Value, Tag), ShellError> {
let mut cwd = cwd.clone(); let mut cwd = cwd.clone();
cwd.push(Path::new(location)); cwd.push(Path::new(location));
@ -147,9 +140,8 @@ pub async fn fetch(
Value::string(s), Value::string(s),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::File(cwd.to_string_lossy().to_string())),
}, },
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)), )),
Err(_) => { Err(_) => {
//Non utf8 data. //Non utf8 data.
@ -166,18 +158,20 @@ pub async fn fetch(
Value::string(s), Value::string(s),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
}, },
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)), )),
Err(_) => Ok(( Err(_) => Ok((
None, None,
Value::binary(bytes), Value::binary(bytes),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
}, },
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)), )),
} }
} else { } else {
@ -186,9 +180,10 @@ pub async fn fetch(
Value::binary(bytes), Value::binary(bytes),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
}, },
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)) ))
} }
} }
@ -204,18 +199,20 @@ pub async fn fetch(
Value::string(s), Value::string(s),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
}, },
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)), )),
Err(_) => Ok(( Err(_) => Ok((
None, None,
Value::binary(bytes), Value::binary(bytes),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
}, },
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)), )),
} }
} else { } else {
@ -224,9 +221,10 @@ pub async fn fetch(
Value::binary(bytes), Value::binary(bytes),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
}, },
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)) ))
} }
} }
@ -235,9 +233,10 @@ pub async fn fetch(
Value::binary(bytes), Value::binary(bytes),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
}, },
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)), )),
} }
} }

View File

@ -104,7 +104,7 @@ pub fn pivot(args: PivotArgs, context: RunnableContext) -> Result<OutputStream,
for desc in descs { for desc in descs {
let mut column_num: usize = 0; let mut column_num: usize = 0;
let mut dict = TaggedDictBuilder::new(context.name); let mut dict = TaggedDictBuilder::new(&context.name);
if !args.ignore_titles && !args.header_row { if !args.ignore_titles && !args.header_row {
dict.insert(headers[column_num].clone(), Value::string(desc.clone())); dict.insert(headers[column_num].clone(), Value::string(desc.clone()));

View File

@ -128,7 +128,7 @@ pub fn filter_plugin(
}, },
Err(e) => { Err(e) => {
let mut result = VecDeque::new(); let mut result = VecDeque::new();
result.push_back(Err(ShellError::string(format!( result.push_back(Err(ShellError::untagged_runtime_error(format!(
"Error while processing begin_filter response: {:?} {}", "Error while processing begin_filter response: {:?} {}",
e, input e, input
)))); ))));
@ -138,7 +138,7 @@ pub fn filter_plugin(
} }
Err(e) => { Err(e) => {
let mut result = VecDeque::new(); let mut result = VecDeque::new();
result.push_back(Err(ShellError::string(format!( result.push_back(Err(ShellError::untagged_runtime_error(format!(
"Error while reading begin_filter response: {:?}", "Error while reading begin_filter response: {:?}",
e e
)))); ))));
@ -189,7 +189,7 @@ pub fn filter_plugin(
}, },
Err(e) => { Err(e) => {
let mut result = VecDeque::new(); let mut result = VecDeque::new();
result.push_back(Err(ShellError::string(format!( result.push_back(Err(ShellError::untagged_runtime_error(format!(
"Error while processing end_filter response: {:?} {}", "Error while processing end_filter response: {:?} {}",
e, input e, input
)))); ))));
@ -199,7 +199,7 @@ pub fn filter_plugin(
} }
Err(e) => { Err(e) => {
let mut result = VecDeque::new(); let mut result = VecDeque::new();
result.push_back(Err(ShellError::string(format!( result.push_back(Err(ShellError::untagged_runtime_error(format!(
"Error while reading end_filter: {:?}", "Error while reading end_filter: {:?}",
e e
)))); ))));
@ -236,7 +236,7 @@ pub fn filter_plugin(
}, },
Err(e) => { Err(e) => {
let mut result = VecDeque::new(); let mut result = VecDeque::new();
result.push_back(Err(ShellError::string(format!( result.push_back(Err(ShellError::untagged_runtime_error(format!(
"Error while processing filter response: {:?} {}", "Error while processing filter response: {:?} {}",
e, input e, input
)))); ))));
@ -246,7 +246,7 @@ pub fn filter_plugin(
} }
Err(e) => { Err(e) => {
let mut result = VecDeque::new(); let mut result = VecDeque::new();
result.push_back(Err(ShellError::string(format!( result.push_back(Err(ShellError::untagged_runtime_error(format!(
"Error while reading filter response: {:?}", "Error while reading filter response: {:?}",
e e
)))); ))));

View File

@ -54,21 +54,20 @@ fn run(
registry: &CommandRegistry, registry: &CommandRegistry,
raw_args: &RawCommandArgs, raw_args: &RawCommandArgs,
) -> Result<OutputStream, ShellError> { ) -> Result<OutputStream, ShellError> {
let name_tag = call_info.name_tag.clone();
let call_info = call_info.clone(); let call_info = call_info.clone();
let path = match call_info let path =
.args match call_info.args.nth(0).ok_or_else(|| {
.nth(0) ShellError::labeled_error("No url specified", "for command", &name_tag)
.ok_or_else(|| ShellError::string(&format!("No url specified")))? })? {
{ file => file.clone(),
file => file.clone(), };
}; let body =
let body = match call_info match call_info.args.nth(1).ok_or_else(|| {
.args ShellError::labeled_error("No body specified", "for command", &name_tag)
.nth(1) })? {
.ok_or_else(|| ShellError::string(&format!("No body specified")))? file => file.clone(),
{ };
file => file.clone(),
};
let path_str = path.as_string()?; let path_str = path.as_string()?;
let path_span = path.tag(); let path_span = path.tag();
let has_raw = call_info.args.has("raw"); let has_raw = call_info.args.has("raw");
@ -83,7 +82,7 @@ fn run(
let headers = get_headers(&call_info)?; let headers = get_headers(&call_info)?;
let stream = async_stream! { let stream = async_stream! {
let (file_extension, contents, contents_tag, anchor_location) = let (file_extension, contents, contents_tag) =
post(&path_str, &body, user, password, &headers, path_span, &registry, &raw_args).await.unwrap(); post(&path_str, &body, user, password, &headers, path_span, &registry, &raw_args).await.unwrap();
let file_extension = if has_raw { let file_extension = if has_raw {
@ -94,21 +93,14 @@ fn run(
file_extension.or(path_str.split('.').last().map(String::from)) file_extension.or(path_str.split('.').last().map(String::from))
}; };
if contents_tag.anchor != uuid::Uuid::nil() { let tagged_contents = contents.tagged(&contents_tag);
// If we have loaded something, track its source
yield ReturnSuccess::action(CommandAction::AddAnchorLocation(
contents_tag.anchor,
anchor_location,
));
}
let tagged_contents = contents.tagged(contents_tag);
if let Some(extension) = file_extension { if let Some(extension) = file_extension {
let command_name = format!("from-{}", extension); let command_name = format!("from-{}", extension);
if let Some(converter) = registry.get_command(&command_name) { if let Some(converter) = registry.get_command(&command_name) {
let new_args = RawCommandArgs { let new_args = RawCommandArgs {
host: raw_args.host, host: raw_args.host,
ctrl_c: raw_args.ctrl_c,
shell_manager: raw_args.shell_manager, shell_manager: raw_args.shell_manager,
call_info: UnevaluatedCallInfo { call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call { args: crate::parser::hir::Call {
@ -117,7 +109,6 @@ fn run(
named: None named: None
}, },
source: raw_args.call_info.source, source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag, name_tag: raw_args.call_info.name_tag,
} }
}; };
@ -131,7 +122,7 @@ fn run(
} }
} }
Ok(ReturnSuccess::Value(Tagged { item, .. })) => { Ok(ReturnSuccess::Value(Tagged { item, .. })) => {
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag })); yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() }));
} }
x => yield x, x => yield x,
} }
@ -211,7 +202,7 @@ pub async fn post(
tag: Tag, tag: Tag,
registry: &CommandRegistry, registry: &CommandRegistry,
raw_args: &RawCommandArgs, raw_args: &RawCommandArgs,
) -> Result<(Option<String>, Value, Tag, AnchorLocation), ShellError> { ) -> Result<(Option<String>, Value, Tag), ShellError> {
let registry = registry.clone(); let registry = registry.clone();
let raw_args = raw_args.clone(); let raw_args = raw_args.clone();
if location.starts_with("http:") || location.starts_with("https:") { if location.starts_with("http:") || location.starts_with("https:") {
@ -252,6 +243,7 @@ pub async fn post(
if let Some(converter) = registry.get_command("to-json") { if let Some(converter) = registry.get_command("to-json") {
let new_args = RawCommandArgs { let new_args = RawCommandArgs {
host: raw_args.host, host: raw_args.host,
ctrl_c: raw_args.ctrl_c,
shell_manager: raw_args.shell_manager, shell_manager: raw_args.shell_manager,
call_info: UnevaluatedCallInfo { call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call { args: crate::parser::hir::Call {
@ -260,7 +252,6 @@ pub async fn post(
named: None, named: None,
}, },
source: raw_args.call_info.source, source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag, name_tag: raw_args.call_info.name_tag,
}, },
}; };
@ -284,7 +275,7 @@ pub async fn post(
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"Save could not successfully save", "Save could not successfully save",
"unexpected data during save", "unexpected data during save",
*tag, tag,
)); ));
} }
} }
@ -300,7 +291,7 @@ pub async fn post(
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"Could not automatically convert table", "Could not automatically convert table",
"needs manual conversion", "needs manual conversion",
*tag, tag,
)); ));
} }
} }
@ -316,11 +307,13 @@ pub async fn post(
ShellError::labeled_error( ShellError::labeled_error(
"Could not load text from remote url", "Could not load text from remote url",
"could not load", "could not load",
tag, &tag,
) )
})?), })?),
tag, Tag {
AnchorLocation::Url(location.to_string()), anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)), )),
(mime::APPLICATION, mime::JSON) => Ok(( (mime::APPLICATION, mime::JSON) => Ok((
Some("json".to_string()), Some("json".to_string()),
@ -328,25 +321,29 @@ pub async fn post(
ShellError::labeled_error( ShellError::labeled_error(
"Could not load text from remote url", "Could not load text from remote url",
"could not load", "could not load",
tag, &tag,
) )
})?), })?),
tag, Tag {
AnchorLocation::Url(location.to_string()), anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)), )),
(mime::APPLICATION, mime::OCTET_STREAM) => { (mime::APPLICATION, mime::OCTET_STREAM) => {
let buf: Vec<u8> = r.body_bytes().await.map_err(|_| { let buf: Vec<u8> = r.body_bytes().await.map_err(|_| {
ShellError::labeled_error( ShellError::labeled_error(
"Could not load binary file", "Could not load binary file",
"could not load", "could not load",
tag, &tag,
) )
})?; })?;
Ok(( Ok((
None, None,
Value::binary(buf), Value::binary(buf),
tag, Tag {
AnchorLocation::Url(location.to_string()), anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)) ))
} }
(mime::IMAGE, image_ty) => { (mime::IMAGE, image_ty) => {
@ -354,14 +351,16 @@ pub async fn post(
ShellError::labeled_error( ShellError::labeled_error(
"Could not load image file", "Could not load image file",
"could not load", "could not load",
tag, &tag,
) )
})?; })?;
Ok(( Ok((
Some(image_ty.to_string()), Some(image_ty.to_string()),
Value::binary(buf), Value::binary(buf),
tag, Tag {
AnchorLocation::Url(location.to_string()), anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)) ))
} }
(mime::TEXT, mime::HTML) => Ok(( (mime::TEXT, mime::HTML) => Ok((
@ -370,11 +369,13 @@ pub async fn post(
ShellError::labeled_error( ShellError::labeled_error(
"Could not load text from remote url", "Could not load text from remote url",
"could not load", "could not load",
tag, &tag,
) )
})?), })?),
tag, Tag {
AnchorLocation::Url(location.to_string()), anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)), )),
(mime::TEXT, mime::PLAIN) => { (mime::TEXT, mime::PLAIN) => {
let path_extension = url::Url::parse(location) let path_extension = url::Url::parse(location)
@ -394,11 +395,13 @@ pub async fn post(
ShellError::labeled_error( ShellError::labeled_error(
"Could not load text from remote url", "Could not load text from remote url",
"could not load", "could not load",
tag, &tag,
) )
})?), })?),
tag, Tag {
AnchorLocation::Url(location.to_string()), anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)) ))
} }
(ty, sub_ty) => Ok(( (ty, sub_ty) => Ok((
@ -407,16 +410,20 @@ pub async fn post(
"Not yet supported MIME type: {} {}", "Not yet supported MIME type: {} {}",
ty, sub_ty ty, sub_ty
)), )),
tag, Tag {
AnchorLocation::Url(location.to_string()), anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)), )),
} }
} }
None => Ok(( None => Ok((
None, None,
Value::string(format!("No content type found")), Value::string(format!("No content type found")),
tag, Tag {
AnchorLocation::Url(location.to_string()), anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)), )),
}, },
Err(_) => { Err(_) => {

View File

@ -119,49 +119,48 @@ fn save(
input, input,
name, name,
shell_manager, shell_manager,
source_map,
host, host,
ctrl_c,
commands: registry, commands: registry,
.. ..
}: RunnableContext, }: RunnableContext,
raw_args: RawCommandArgs, raw_args: RawCommandArgs,
) -> Result<OutputStream, ShellError> { ) -> Result<OutputStream, ShellError> {
let mut full_path = PathBuf::from(shell_manager.path()); let mut full_path = PathBuf::from(shell_manager.path());
let name_tag = name; let name_tag = name.clone();
let source_map = source_map.clone();
let stream = async_stream! { let stream = async_stream! {
let input: Vec<Tagged<Value>> = input.values.collect().await; let input: Vec<Tagged<Value>> = input.values.collect().await;
if path.is_none() { if path.is_none() {
// If there is no filename, check the metadata for the anchor filename // If there is no filename, check the metadata for the anchor filename
if input.len() > 0 { if input.len() > 0 {
let anchor = input[0].anchor(); let anchor = input[0].anchor();
match source_map.get(&anchor) { match anchor {
Some(path) => match path { Some(path) => match path {
AnchorLocation::File(file) => { AnchorLocation::File(file) => {
full_path.push(Path::new(file)); full_path.push(Path::new(&file));
} }
_ => { _ => {
yield Err(ShellError::labeled_error( yield Err(ShellError::labeled_error(
"Save requires a filepath", "Save requires a filepath (1)",
"needs path", "needs path",
name_tag, name_tag.clone(),
)); ));
} }
}, },
None => { None => {
yield Err(ShellError::labeled_error( yield Err(ShellError::labeled_error(
"Save requires a filepath", "Save requires a filepath (2)",
"needs path", "needs path",
name_tag, name_tag.clone(),
)); ));
} }
} }
} else { } else {
yield Err(ShellError::labeled_error( yield Err(ShellError::labeled_error(
"Save requires a filepath", "Save requires a filepath (3)",
"needs path", "needs path",
name_tag, name_tag.clone(),
)); ));
} }
} else { } else {
@ -179,6 +178,7 @@ fn save(
if let Some(converter) = registry.get_command(&command_name) { if let Some(converter) = registry.get_command(&command_name) {
let new_args = RawCommandArgs { let new_args = RawCommandArgs {
host, host,
ctrl_c,
shell_manager, shell_manager,
call_info: UnevaluatedCallInfo { call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call { args: crate::parser::hir::Call {
@ -187,7 +187,6 @@ fn save(
named: None named: None
}, },
source: raw_args.call_info.source, source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag, name_tag: raw_args.call_info.name_tag,
} }
}; };
@ -212,9 +211,9 @@ fn save(
match content { match content {
Ok(save_data) => match std::fs::write(full_path, save_data) { Ok(save_data) => match std::fs::write(full_path, save_data) {
Ok(o) => o, Ok(o) => o,
Err(e) => yield Err(ShellError::string(e.to_string())), Err(e) => yield Err(ShellError::labeled_error(e.to_string(), "for command", name)),
}, },
Err(e) => yield Err(ShellError::string(e.to_string())), Err(e) => yield Err(ShellError::labeled_error(e.to_string(), "for command", name)),
} }
}; };

View File

@ -2,6 +2,7 @@ use crate::commands::WholeStreamCommand;
use crate::data::TaggedDictBuilder; use crate::data::TaggedDictBuilder;
use crate::errors::ShellError; use crate::errors::ShellError;
use crate::prelude::*; use crate::prelude::*;
use std::sync::atomic::Ordering;
pub struct Shells; pub struct Shells;
@ -32,14 +33,14 @@ fn shells(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream
let tag = args.call_info.name_tag; let tag = args.call_info.name_tag;
for (index, shell) in args.shell_manager.shells.lock().unwrap().iter().enumerate() { for (index, shell) in args.shell_manager.shells.lock().unwrap().iter().enumerate() {
let mut dict = TaggedDictBuilder::new(tag); let mut dict = TaggedDictBuilder::new(&tag);
if index == args.shell_manager.current_shell { if index == (*args.shell_manager.current_shell).load(Ordering::SeqCst) {
dict.insert(" ", "X".to_string()); dict.insert(" ", "X".to_string());
} else { } else {
dict.insert(" ", " ".to_string()); dict.insert(" ", " ".to_string());
} }
dict.insert("name", shell.name(&args.call_info.source_map)); dict.insert("name", shell.name());
dict.insert("path", shell.path()); dict.insert("path", shell.path());
shells_out.push_back(dict.into_tagged_value()); shells_out.push_back(dict.into_tagged_value());

View File

@ -37,7 +37,7 @@ fn size(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream,
_ => Err(ShellError::labeled_error_with_secondary( _ => Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
tag, &tag,
"value originates from here", "value originates from here",
v.tag(), v.tag(),
)), )),

View File

@ -1,6 +1,7 @@
use crate::commands::WholeStreamCommand; use crate::commands::WholeStreamCommand;
use crate::errors::ShellError; use crate::errors::ShellError;
use crate::prelude::*; use crate::prelude::*;
use log::trace;
pub struct SkipWhile; pub struct SkipWhile;
@ -38,7 +39,9 @@ pub fn skip_while(
RunnableContext { input, .. }: RunnableContext, RunnableContext { input, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> { ) -> Result<OutputStream, ShellError> {
let objects = input.values.skip_while(move |item| { let objects = input.values.skip_while(move |item| {
trace!("ITEM = {:?}", item);
let result = condition.invoke(&item); let result = condition.invoke(&item);
trace!("RESULT = {:?}", result);
let return_value = match result { let return_value = match result {
Ok(ref v) if v.is_true() => true, Ok(ref v) if v.is_true() => true,

View File

@ -94,7 +94,7 @@ fn split_column(
_ => Err(ShellError::labeled_error_with_secondary( _ => Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
name, &name,
"value originates from here", "value originates from here",
v.tag(), v.tag(),
)), )),

View File

@ -60,7 +60,7 @@ fn split_row(
result.push_back(Err(ShellError::labeled_error_with_secondary( result.push_back(Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
name, &name,
"value originates from here", "value originates from here",
v.tag(), v.tag(),
))); )));

View File

@ -5,16 +5,13 @@ use crate::prelude::*;
pub struct Table; pub struct Table;
#[derive(Deserialize)]
pub struct TableArgs {}
impl WholeStreamCommand for Table { impl WholeStreamCommand for Table {
fn name(&self) -> &str { fn name(&self) -> &str {
"table" "table"
} }
fn signature(&self) -> Signature { fn signature(&self) -> Signature {
Signature::build("table") Signature::build("table").named("start_number", SyntaxShape::Number)
} }
fn usage(&self) -> &str { fn usage(&self) -> &str {
@ -26,16 +23,29 @@ impl WholeStreamCommand for Table {
args: CommandArgs, args: CommandArgs,
registry: &CommandRegistry, registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> { ) -> Result<OutputStream, ShellError> {
args.process(registry, table)?.run() table(args, registry)
} }
} }
pub fn table(_args: TableArgs, context: RunnableContext) -> Result<OutputStream, ShellError> { fn table(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?;
let stream = async_stream! { let stream = async_stream! {
let input: Vec<Tagged<Value>> = context.input.into_vec().await; let host = args.host.clone();
let start_number = match args.get("start_number") {
Some(Tagged { item: Value::Primitive(Primitive::Int(i)), .. }) => {
i.to_usize().unwrap()
}
_ => {
0
}
};
let input: Vec<Tagged<Value>> = args.input.into_vec().await;
if input.len() > 0 { if input.len() > 0 {
let mut host = context.host.lock().unwrap(); let mut host = host.lock().unwrap();
let view = TableView::from_list(&input); let view = TableView::from_list(&input, start_number);
if let Some(view) = view { if let Some(view) = view {
handle_unexpected(&mut *host, |host| crate::format::print_view(&view, host)); handle_unexpected(&mut *host, |host| crate::format::print_view(&view, host));
} }

View File

@ -28,7 +28,6 @@ impl WholeStreamCommand for Tags {
} }
fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream, ShellError> { fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let source_map = args.call_info.source_map.clone();
Ok(args Ok(args
.input .input
.values .values
@ -38,11 +37,11 @@ fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream,
let anchor = v.anchor(); let anchor = v.anchor();
let span = v.tag().span; let span = v.tag().span;
let mut dict = TaggedDictBuilder::new(v.tag()); let mut dict = TaggedDictBuilder::new(v.tag());
dict.insert("start", Value::int(span.start as i64)); dict.insert("start", Value::int(span.start() as i64));
dict.insert("end", Value::int(span.end as i64)); dict.insert("end", Value::int(span.end() as i64));
tags.insert_tagged("span", dict.into_tagged_value()); tags.insert_tagged("span", dict.into_tagged_value());
match source_map.get(&anchor) { match anchor {
Some(AnchorLocation::File(source)) => { Some(AnchorLocation::File(source)) => {
tags.insert("anchor", Value::string(source)); tags.insert("anchor", Value::string(source));
} }

View File

@ -46,7 +46,7 @@ pub fn value_to_bson_value(v: &Tagged<Value>) -> Result<Bson, ShellError> {
Value::Primitive(Primitive::BeginningOfStream) => Bson::Null, Value::Primitive(Primitive::BeginningOfStream) => Bson::Null,
Value::Primitive(Primitive::Decimal(d)) => Bson::FloatingPoint(d.to_f64().unwrap()), Value::Primitive(Primitive::Decimal(d)) => Bson::FloatingPoint(d.to_f64().unwrap()),
Value::Primitive(Primitive::Int(i)) => { Value::Primitive(Primitive::Int(i)) => {
Bson::I64(i.tagged(v.tag).coerce_into("converting to BSON")?) Bson::I64(i.tagged(&v.tag).coerce_into("converting to BSON")?)
} }
Value::Primitive(Primitive::Nothing) => Bson::Null, Value::Primitive(Primitive::Nothing) => Bson::Null,
Value::Primitive(Primitive::String(s)) => Bson::String(s.clone()), Value::Primitive(Primitive::String(s)) => Bson::String(s.clone()),
@ -58,6 +58,7 @@ pub fn value_to_bson_value(v: &Tagged<Value>) -> Result<Bson, ShellError> {
.collect::<Result<_, _>>()?, .collect::<Result<_, _>>()?,
), ),
Value::Block(_) => Bson::Null, Value::Block(_) => Bson::Null,
Value::Error(e) => return Err(e.clone()),
Value::Primitive(Primitive::Binary(b)) => Bson::Binary(BinarySubtype::Generic, b.clone()), Value::Primitive(Primitive::Binary(b)) => Bson::Binary(BinarySubtype::Generic, b.clone()),
Value::Row(o) => object_value_to_bson(o)?, Value::Row(o) => object_value_to_bson(o)?,
}) })
@ -170,7 +171,7 @@ fn get_binary_subtype<'a>(tagged_value: &'a Tagged<Value>) -> Result<BinarySubty
_ => unreachable!(), _ => unreachable!(),
}), }),
Value::Primitive(Primitive::Int(i)) => Ok(BinarySubtype::UserDefined( Value::Primitive(Primitive::Int(i)) => Ok(BinarySubtype::UserDefined(
i.tagged(tagged_value.tag) i.tagged(&tagged_value.tag)
.coerce_into("converting to BSON binary subtype")?, .coerce_into("converting to BSON binary subtype")?,
)), )),
_ => Err(ShellError::type_error( _ => Err(ShellError::type_error(
@ -207,12 +208,12 @@ fn bson_value_to_bytes(bson: Bson, tag: Tag) -> Result<Vec<u8>, ShellError> {
Bson::Array(a) => { Bson::Array(a) => {
for v in a.into_iter() { for v in a.into_iter() {
match v { match v {
Bson::Document(d) => shell_encode_document(&mut out, d, tag)?, Bson::Document(d) => shell_encode_document(&mut out, d, tag.clone())?,
_ => { _ => {
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
format!("All top level values must be Documents, got {:?}", v), format!("All top level values must be Documents, got {:?}", v),
"requires BSON-compatible document", "requires BSON-compatible document",
tag, &tag,
)) ))
} }
} }
@ -237,7 +238,7 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
let input: Vec<Tagged<Value>> = args.input.values.collect().await; let input: Vec<Tagged<Value>> = args.input.values.collect().await;
let to_process_input = if input.len() > 1 { let to_process_input = if input.len() > 1 {
let tag = input[0].tag; let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ] vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 { } else if input.len() == 1 {
input input
@ -248,14 +249,14 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
for value in to_process_input { for value in to_process_input {
match value_to_bson_value(&value) { match value_to_bson_value(&value) {
Ok(bson_value) => { Ok(bson_value) => {
match bson_value_to_bytes(bson_value, name_tag) { match bson_value_to_bytes(bson_value, name_tag.clone()) {
Ok(x) => yield ReturnSuccess::value( Ok(x) => yield ReturnSuccess::value(
Value::binary(x).tagged(name_tag), Value::binary(x).tagged(&name_tag),
), ),
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with BSON-compatible structure.tag() from pipeline", "Expected a table with BSON-compatible structure.tag() from pipeline",
"requires BSON-compatible input", "requires BSON-compatible input",
name_tag, &name_tag,
"originates from here".to_string(), "originates from here".to_string(),
value.tag(), value.tag(),
)), )),
@ -264,7 +265,7 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
_ => yield Err(ShellError::labeled_error( _ => yield Err(ShellError::labeled_error(
"Expected a table with BSON-compatible structure from pipeline", "Expected a table with BSON-compatible structure from pipeline",
"requires BSON-compatible input", "requires BSON-compatible input",
name_tag)) &name_tag))
} }
} }
}; };

View File

@ -32,8 +32,8 @@ impl WholeStreamCommand for ToCSV {
} }
} }
pub fn value_to_csv_value(v: &Value) -> Value { pub fn value_to_csv_value(v: &Tagged<Value>) -> Tagged<Value> {
match v { match &v.item {
Value::Primitive(Primitive::String(s)) => Value::Primitive(Primitive::String(s.clone())), Value::Primitive(Primitive::String(s)) => Value::Primitive(Primitive::String(s.clone())),
Value::Primitive(Primitive::Nothing) => Value::Primitive(Primitive::Nothing), Value::Primitive(Primitive::Nothing) => Value::Primitive(Primitive::Nothing),
Value::Primitive(Primitive::Boolean(b)) => Value::Primitive(Primitive::Boolean(b.clone())), Value::Primitive(Primitive::Boolean(b)) => Value::Primitive(Primitive::Boolean(b.clone())),
@ -47,10 +47,11 @@ pub fn value_to_csv_value(v: &Value) -> Value {
Value::Block(_) => Value::Primitive(Primitive::Nothing), Value::Block(_) => Value::Primitive(Primitive::Nothing),
_ => Value::Primitive(Primitive::Nothing), _ => Value::Primitive(Primitive::Nothing),
} }
.tagged(v.tag.clone())
} }
fn to_string_helper(v: &Value) -> Result<String, ShellError> { fn to_string_helper(v: &Tagged<Value>) -> Result<String, ShellError> {
match v { match &v.item {
Value::Primitive(Primitive::Date(d)) => Ok(d.to_string()), Value::Primitive(Primitive::Date(d)) => Ok(d.to_string()),
Value::Primitive(Primitive::Bytes(b)) => Ok(format!("{}", b)), Value::Primitive(Primitive::Bytes(b)) => Ok(format!("{}", b)),
Value::Primitive(Primitive::Boolean(_)) => Ok(v.as_string()?), Value::Primitive(Primitive::Boolean(_)) => Ok(v.as_string()?),
@ -60,7 +61,13 @@ fn to_string_helper(v: &Value) -> Result<String, ShellError> {
Value::Table(_) => return Ok(String::from("[Table]")), Value::Table(_) => return Ok(String::from("[Table]")),
Value::Row(_) => return Ok(String::from("[Row]")), Value::Row(_) => return Ok(String::from("[Row]")),
Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()), Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()),
_ => return Err(ShellError::string("Unexpected value")), _ => {
return Err(ShellError::labeled_error(
"Unexpected value",
"",
v.tag.clone(),
))
}
} }
} }
@ -76,7 +83,9 @@ fn merge_descriptors(values: &[Tagged<Value>]) -> Vec<String> {
ret ret
} }
pub fn to_string(v: &Value) -> Result<String, ShellError> { pub fn to_string(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
let v = &tagged_value.item;
match v { match v {
Value::Row(o) => { Value::Row(o) => {
let mut wtr = WriterBuilder::new().from_writer(vec![]); let mut wtr = WriterBuilder::new().from_writer(vec![]);
@ -92,11 +101,20 @@ pub fn to_string(v: &Value) -> Result<String, ShellError> {
wtr.write_record(fields).expect("can not write."); wtr.write_record(fields).expect("can not write.");
wtr.write_record(values).expect("can not write."); wtr.write_record(values).expect("can not write.");
return Ok(String::from_utf8( return Ok(String::from_utf8(wtr.into_inner().map_err(|_| {
wtr.into_inner() ShellError::labeled_error(
.map_err(|_| ShellError::string("Could not convert record"))?, "Could not convert record",
) "original value",
.map_err(|_| ShellError::string("Could not convert record"))?); &tagged_value.tag,
)
})?)
.map_err(|_| {
ShellError::labeled_error(
"Could not convert record",
"original value",
&tagged_value.tag,
)
})?);
} }
Value::Table(list) => { Value::Table(list) => {
let mut wtr = WriterBuilder::new().from_writer(vec![]); let mut wtr = WriterBuilder::new().from_writer(vec![]);
@ -120,13 +138,22 @@ pub fn to_string(v: &Value) -> Result<String, ShellError> {
wtr.write_record(&row).expect("can not write"); wtr.write_record(&row).expect("can not write");
} }
return Ok(String::from_utf8( return Ok(String::from_utf8(wtr.into_inner().map_err(|_| {
wtr.into_inner() ShellError::labeled_error(
.map_err(|_| ShellError::string("Could not convert record"))?, "Could not convert record",
) "original value",
.map_err(|_| ShellError::string("Could not convert record"))?); &tagged_value.tag,
)
})?)
.map_err(|_| {
ShellError::labeled_error(
"Could not convert record",
"original value",
&tagged_value.tag,
)
})?);
} }
_ => return to_string_helper(&v), _ => return to_string_helper(tagged_value),
} }
} }
@ -139,7 +166,7 @@ fn to_csv(
let input: Vec<Tagged<Value>> = input.values.collect().await; let input: Vec<Tagged<Value>> = input.values.collect().await;
let to_process_input = if input.len() > 1 { let to_process_input = if input.len() > 1 {
let tag = input[0].tag; let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ] vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 { } else if input.len() == 1 {
input input
@ -148,20 +175,20 @@ fn to_csv(
}; };
for value in to_process_input { for value in to_process_input {
match to_string(&value_to_csv_value(&value.item)) { match to_string(&value_to_csv_value(&value)) {
Ok(x) => { Ok(x) => {
let converted = if headerless { let converted = if headerless {
x.lines().skip(1).collect() x.lines().skip(1).collect()
} else { } else {
x x
}; };
yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(name_tag)) yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(&name_tag))
} }
_ => { _ => {
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with CSV-compatible structure.tag() from pipeline", "Expected a table with CSV-compatible structure.tag() from pipeline",
"requires CSV-compatible input", "requires CSV-compatible input",
name_tag, &name_tag,
"originates from here".to_string(), "originates from here".to_string(),
value.tag(), value.tag(),
)) ))

View File

@ -42,7 +42,7 @@ pub fn value_to_json_value(v: &Tagged<Value>) -> Result<serde_json::Value, Shell
.unwrap(), .unwrap(),
), ),
Value::Primitive(Primitive::Int(i)) => serde_json::Value::Number(serde_json::Number::from( Value::Primitive(Primitive::Int(i)) => serde_json::Value::Number(serde_json::Number::from(
CoerceInto::<i64>::coerce_into(i.tagged(v.tag), "converting to JSON number")?, CoerceInto::<i64>::coerce_into(i.tagged(&v.tag), "converting to JSON number")?,
)), )),
Value::Primitive(Primitive::Nothing) => serde_json::Value::Null, Value::Primitive(Primitive::Nothing) => serde_json::Value::Null,
Value::Primitive(Primitive::Pattern(s)) => serde_json::Value::String(s.clone()), Value::Primitive(Primitive::Pattern(s)) => serde_json::Value::String(s.clone()),
@ -50,6 +50,7 @@ pub fn value_to_json_value(v: &Tagged<Value>) -> Result<serde_json::Value, Shell
Value::Primitive(Primitive::Path(s)) => serde_json::Value::String(s.display().to_string()), Value::Primitive(Primitive::Path(s)) => serde_json::Value::String(s.display().to_string()),
Value::Table(l) => serde_json::Value::Array(json_list(l)?), Value::Table(l) => serde_json::Value::Array(json_list(l)?),
Value::Error(e) => return Err(e.clone()),
Value::Block(_) => serde_json::Value::Null, Value::Block(_) => serde_json::Value::Null,
Value::Primitive(Primitive::Binary(b)) => serde_json::Value::Array( Value::Primitive(Primitive::Binary(b)) => serde_json::Value::Array(
b.iter() b.iter()
@ -85,7 +86,7 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
let input: Vec<Tagged<Value>> = args.input.values.collect().await; let input: Vec<Tagged<Value>> = args.input.values.collect().await;
let to_process_input = if input.len() > 1 { let to_process_input = if input.len() > 1 {
let tag = input[0].tag; let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ] vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 { } else if input.len() == 1 {
input input
@ -98,12 +99,12 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
Ok(json_value) => { Ok(json_value) => {
match serde_json::to_string(&json_value) { match serde_json::to_string(&json_value) {
Ok(x) => yield ReturnSuccess::value( Ok(x) => yield ReturnSuccess::value(
Value::Primitive(Primitive::String(x)).tagged(name_tag), Value::Primitive(Primitive::String(x)).tagged(&name_tag),
), ),
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with JSON-compatible structure.tag() from pipeline", "Expected a table with JSON-compatible structure.tag() from pipeline",
"requires JSON-compatible input", "requires JSON-compatible input",
name_tag, &name_tag,
"originates from here".to_string(), "originates from here".to_string(),
value.tag(), value.tag(),
)), )),
@ -112,7 +113,7 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
_ => yield Err(ShellError::labeled_error( _ => yield Err(ShellError::labeled_error(
"Expected a table with JSON-compatible structure from pipeline", "Expected a table with JSON-compatible structure from pipeline",
"requires JSON-compatible input", "requires JSON-compatible input",
name_tag)) &name_tag))
} }
} }
}; };

View File

@ -38,10 +38,10 @@ pub fn value_to_toml_value(v: &Tagged<Value>) -> Result<toml::Value, ShellError>
toml::Value::String("<Beginning of Stream>".to_string()) toml::Value::String("<Beginning of Stream>".to_string())
} }
Value::Primitive(Primitive::Decimal(f)) => { Value::Primitive(Primitive::Decimal(f)) => {
toml::Value::Float(f.tagged(v.tag).coerce_into("converting to TOML float")?) toml::Value::Float(f.tagged(&v.tag).coerce_into("converting to TOML float")?)
} }
Value::Primitive(Primitive::Int(i)) => { Value::Primitive(Primitive::Int(i)) => {
toml::Value::Integer(i.tagged(v.tag).coerce_into("converting to TOML integer")?) toml::Value::Integer(i.tagged(&v.tag).coerce_into("converting to TOML integer")?)
} }
Value::Primitive(Primitive::Nothing) => toml::Value::String("<Nothing>".to_string()), Value::Primitive(Primitive::Nothing) => toml::Value::String("<Nothing>".to_string()),
Value::Primitive(Primitive::Pattern(s)) => toml::Value::String(s.clone()), Value::Primitive(Primitive::Pattern(s)) => toml::Value::String(s.clone()),
@ -49,6 +49,7 @@ pub fn value_to_toml_value(v: &Tagged<Value>) -> Result<toml::Value, ShellError>
Value::Primitive(Primitive::Path(s)) => toml::Value::String(s.display().to_string()), Value::Primitive(Primitive::Path(s)) => toml::Value::String(s.display().to_string()),
Value::Table(l) => toml::Value::Array(collect_values(l)?), Value::Table(l) => toml::Value::Array(collect_values(l)?),
Value::Error(e) => return Err(e.clone()),
Value::Block(_) => toml::Value::String("<Block>".to_string()), Value::Block(_) => toml::Value::String("<Block>".to_string()),
Value::Primitive(Primitive::Binary(b)) => { Value::Primitive(Primitive::Binary(b)) => {
toml::Value::Array(b.iter().map(|x| toml::Value::Integer(*x as i64)).collect()) toml::Value::Array(b.iter().map(|x| toml::Value::Integer(*x as i64)).collect())
@ -80,7 +81,7 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
let input: Vec<Tagged<Value>> = args.input.values.collect().await; let input: Vec<Tagged<Value>> = args.input.values.collect().await;
let to_process_input = if input.len() > 1 { let to_process_input = if input.len() > 1 {
let tag = input[0].tag; let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ] vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 { } else if input.len() == 1 {
input input
@ -93,12 +94,12 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
Ok(toml_value) => { Ok(toml_value) => {
match toml::to_string(&toml_value) { match toml::to_string(&toml_value) {
Ok(x) => yield ReturnSuccess::value( Ok(x) => yield ReturnSuccess::value(
Value::Primitive(Primitive::String(x)).tagged(name_tag), Value::Primitive(Primitive::String(x)).tagged(&name_tag),
), ),
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with TOML-compatible structure.tag() from pipeline", "Expected a table with TOML-compatible structure.tag() from pipeline",
"requires TOML-compatible input", "requires TOML-compatible input",
name_tag, &name_tag,
"originates from here".to_string(), "originates from here".to_string(),
value.tag(), value.tag(),
)), )),
@ -107,7 +108,7 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
_ => yield Err(ShellError::labeled_error( _ => yield Err(ShellError::labeled_error(
"Expected a table with TOML-compatible structure from pipeline", "Expected a table with TOML-compatible structure from pipeline",
"requires TOML-compatible input", "requires TOML-compatible input",
name_tag)) &name_tag))
} }
} }
}; };

View File

@ -32,7 +32,9 @@ impl WholeStreamCommand for ToTSV {
} }
} }
pub fn value_to_tsv_value(v: &Value) -> Value { pub fn value_to_tsv_value(tagged_value: &Tagged<Value>) -> Tagged<Value> {
let v = &tagged_value.item;
match v { match v {
Value::Primitive(Primitive::String(s)) => Value::Primitive(Primitive::String(s.clone())), Value::Primitive(Primitive::String(s)) => Value::Primitive(Primitive::String(s.clone())),
Value::Primitive(Primitive::Nothing) => Value::Primitive(Primitive::Nothing), Value::Primitive(Primitive::Nothing) => Value::Primitive(Primitive::Nothing),
@ -47,20 +49,28 @@ pub fn value_to_tsv_value(v: &Value) -> Value {
Value::Block(_) => Value::Primitive(Primitive::Nothing), Value::Block(_) => Value::Primitive(Primitive::Nothing),
_ => Value::Primitive(Primitive::Nothing), _ => Value::Primitive(Primitive::Nothing),
} }
.tagged(&tagged_value.tag)
} }
fn to_string_helper(v: &Value) -> Result<String, ShellError> { fn to_string_helper(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
let v = &tagged_value.item;
match v { match v {
Value::Primitive(Primitive::Date(d)) => Ok(d.to_string()), Value::Primitive(Primitive::Date(d)) => Ok(d.to_string()),
Value::Primitive(Primitive::Bytes(b)) => Ok(format!("{}", b)), Value::Primitive(Primitive::Bytes(b)) => Ok(format!("{}", b)),
Value::Primitive(Primitive::Boolean(_)) => Ok(v.as_string()?), Value::Primitive(Primitive::Boolean(_)) => Ok(tagged_value.as_string()?),
Value::Primitive(Primitive::Decimal(_)) => Ok(v.as_string()?), Value::Primitive(Primitive::Decimal(_)) => Ok(tagged_value.as_string()?),
Value::Primitive(Primitive::Int(_)) => Ok(v.as_string()?), Value::Primitive(Primitive::Int(_)) => Ok(tagged_value.as_string()?),
Value::Primitive(Primitive::Path(_)) => Ok(v.as_string()?), Value::Primitive(Primitive::Path(_)) => Ok(tagged_value.as_string()?),
Value::Table(_) => return Ok(String::from("[table]")), Value::Table(_) => return Ok(String::from("[table]")),
Value::Row(_) => return Ok(String::from("[row]")), Value::Row(_) => return Ok(String::from("[row]")),
Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()), Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()),
_ => return Err(ShellError::string("Unexpected value")), _ => {
return Err(ShellError::labeled_error(
"Unexpected value",
"original value",
&tagged_value.tag,
))
}
} }
} }
@ -76,7 +86,9 @@ fn merge_descriptors(values: &[Tagged<Value>]) -> Vec<String> {
ret ret
} }
pub fn to_string(v: &Value) -> Result<String, ShellError> { pub fn to_string(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
let v = &tagged_value.item;
match v { match v {
Value::Row(o) => { Value::Row(o) => {
let mut wtr = WriterBuilder::new().delimiter(b'\t').from_writer(vec![]); let mut wtr = WriterBuilder::new().delimiter(b'\t').from_writer(vec![]);
@ -91,11 +103,20 @@ pub fn to_string(v: &Value) -> Result<String, ShellError> {
wtr.write_record(fields).expect("can not write."); wtr.write_record(fields).expect("can not write.");
wtr.write_record(values).expect("can not write."); wtr.write_record(values).expect("can not write.");
return Ok(String::from_utf8( return Ok(String::from_utf8(wtr.into_inner().map_err(|_| {
wtr.into_inner() ShellError::labeled_error(
.map_err(|_| ShellError::string("Could not convert record"))?, "Could not convert record",
) "original value",
.map_err(|_| ShellError::string("Could not convert record"))?); &tagged_value.tag,
)
})?)
.map_err(|_| {
ShellError::labeled_error(
"Could not convert record",
"original value",
&tagged_value.tag,
)
})?);
} }
Value::Table(list) => { Value::Table(list) => {
let mut wtr = WriterBuilder::new().delimiter(b'\t').from_writer(vec![]); let mut wtr = WriterBuilder::new().delimiter(b'\t').from_writer(vec![]);
@ -119,13 +140,22 @@ pub fn to_string(v: &Value) -> Result<String, ShellError> {
wtr.write_record(&row).expect("can not write"); wtr.write_record(&row).expect("can not write");
} }
return Ok(String::from_utf8( return Ok(String::from_utf8(wtr.into_inner().map_err(|_| {
wtr.into_inner() ShellError::labeled_error(
.map_err(|_| ShellError::string("Could not convert record"))?, "Could not convert record",
) "original value",
.map_err(|_| ShellError::string("Could not convert record"))?); &tagged_value.tag,
)
})?)
.map_err(|_| {
ShellError::labeled_error(
"Could not convert record",
"original value",
&tagged_value.tag,
)
})?);
} }
_ => return to_string_helper(&v), _ => return to_string_helper(tagged_value),
} }
} }
@ -138,7 +168,7 @@ fn to_tsv(
let input: Vec<Tagged<Value>> = input.values.collect().await; let input: Vec<Tagged<Value>> = input.values.collect().await;
let to_process_input = if input.len() > 1 { let to_process_input = if input.len() > 1 {
let tag = input[0].tag; let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ] vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 { } else if input.len() == 1 {
input input
@ -147,20 +177,20 @@ fn to_tsv(
}; };
for value in to_process_input { for value in to_process_input {
match to_string(&value_to_tsv_value(&value.item)) { match to_string(&value_to_tsv_value(&value)) {
Ok(x) => { Ok(x) => {
let converted = if headerless { let converted = if headerless {
x.lines().skip(1).collect() x.lines().skip(1).collect()
} else { } else {
x x
}; };
yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(name_tag)) yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(&name_tag))
} }
_ => { _ => {
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with TSV-compatible structure.tag() from pipeline", "Expected a table with TSV-compatible structure.tag() from pipeline",
"requires TSV-compatible input", "requires TSV-compatible input",
name_tag, &name_tag,
"originates from here".to_string(), "originates from here".to_string(),
value.tag(), value.tag(),
)) ))

View File

@ -47,7 +47,7 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Expected table with string values", "Expected table with string values",
"requires table with strings", "requires table with strings",
tag, &tag,
"value originates from here", "value originates from here",
v.tag, v.tag,
)) ))
@ -57,13 +57,13 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
match serde_urlencoded::to_string(row_vec) { match serde_urlencoded::to_string(row_vec) {
Ok(s) => { Ok(s) => {
yield ReturnSuccess::value(Value::string(s).tagged(tag)); yield ReturnSuccess::value(Value::string(s).tagged(&tag));
} }
_ => { _ => {
yield Err(ShellError::labeled_error( yield Err(ShellError::labeled_error(
"Failed to convert to url-encoded", "Failed to convert to url-encoded",
"cannot url-encode", "cannot url-encode",
tag, &tag,
)) ))
} }
} }
@ -72,7 +72,7 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Expected a table from pipeline", "Expected a table from pipeline",
"requires table input", "requires table input",
tag, &tag,
"value originates from here", "value originates from here",
value_tag, value_tag,
)) ))

View File

@ -39,7 +39,7 @@ pub fn value_to_yaml_value(v: &Tagged<Value>) -> Result<serde_yaml::Value, Shell
serde_yaml::Value::Number(serde_yaml::Number::from(f.to_f64().unwrap())) serde_yaml::Value::Number(serde_yaml::Number::from(f.to_f64().unwrap()))
} }
Value::Primitive(Primitive::Int(i)) => serde_yaml::Value::Number(serde_yaml::Number::from( Value::Primitive(Primitive::Int(i)) => serde_yaml::Value::Number(serde_yaml::Number::from(
CoerceInto::<i64>::coerce_into(i.tagged(v.tag), "converting to YAML number")?, CoerceInto::<i64>::coerce_into(i.tagged(&v.tag), "converting to YAML number")?,
)), )),
Value::Primitive(Primitive::Nothing) => serde_yaml::Value::Null, Value::Primitive(Primitive::Nothing) => serde_yaml::Value::Null,
Value::Primitive(Primitive::Pattern(s)) => serde_yaml::Value::String(s.clone()), Value::Primitive(Primitive::Pattern(s)) => serde_yaml::Value::String(s.clone()),
@ -55,6 +55,7 @@ pub fn value_to_yaml_value(v: &Tagged<Value>) -> Result<serde_yaml::Value, Shell
serde_yaml::Value::Sequence(out) serde_yaml::Value::Sequence(out)
} }
Value::Error(e) => return Err(e.clone()),
Value::Block(_) => serde_yaml::Value::Null, Value::Block(_) => serde_yaml::Value::Null,
Value::Primitive(Primitive::Binary(b)) => serde_yaml::Value::Sequence( Value::Primitive(Primitive::Binary(b)) => serde_yaml::Value::Sequence(
b.iter() b.iter()
@ -81,7 +82,7 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
let input: Vec<Tagged<Value>> = args.input.values.collect().await; let input: Vec<Tagged<Value>> = args.input.values.collect().await;
let to_process_input = if input.len() > 1 { let to_process_input = if input.len() > 1 {
let tag = input[0].tag; let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ] vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 { } else if input.len() == 1 {
input input
@ -94,12 +95,12 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
Ok(yaml_value) => { Ok(yaml_value) => {
match serde_yaml::to_string(&yaml_value) { match serde_yaml::to_string(&yaml_value) {
Ok(x) => yield ReturnSuccess::value( Ok(x) => yield ReturnSuccess::value(
Value::Primitive(Primitive::String(x)).tagged(name_tag), Value::Primitive(Primitive::String(x)).tagged(&name_tag),
), ),
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with YAML-compatible structure.tag() from pipeline", "Expected a table with YAML-compatible structure.tag() from pipeline",
"requires YAML-compatible input", "requires YAML-compatible input",
name_tag, &name_tag,
"originates from here".to_string(), "originates from here".to_string(),
value.tag(), value.tag(),
)), )),
@ -108,7 +109,7 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
_ => yield Err(ShellError::labeled_error( _ => yield Err(ShellError::labeled_error(
"Expected a table with YAML-compatible structure from pipeline", "Expected a table with YAML-compatible structure from pipeline",
"requires YAML-compatible input", "requires YAML-compatible input",
name_tag)) &name_tag))
} }
} }
}; };

View File

@ -31,14 +31,14 @@ impl WholeStreamCommand for Version {
pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> { pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?; let args = args.evaluate_once(registry)?;
let tag = args.call_info.name_tag; let tag = args.call_info.name_tag.clone();
let mut indexmap = IndexMap::new(); let mut indexmap = IndexMap::new();
indexmap.insert( indexmap.insert(
"version".to_string(), "version".to_string(),
Value::string(clap::crate_version!()).tagged(tag), Value::string(clap::crate_version!()).tagged(&tag),
); );
let value = Value::Row(Dictionary::from(indexmap)).tagged(tag); let value = Value::Row(Dictionary::from(indexmap)).tagged(&tag);
Ok(OutputStream::one(value)) Ok(OutputStream::one(value))
} }

View File

@ -49,7 +49,7 @@ impl PerItemCommand for Where {
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"Expected a condition", "Expected a condition",
"where needs a condition", "where needs a condition",
*tag, tag,
)) ))
} }
}; };

View File

@ -33,7 +33,7 @@ pub fn which(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
let args = args.evaluate_once(registry)?; let args = args.evaluate_once(registry)?;
let mut which_out = VecDeque::new(); let mut which_out = VecDeque::new();
let tag = args.call_info.name_tag; let tag = args.call_info.name_tag.clone();
if let Some(v) = &args.call_info.args.positional { if let Some(v) = &args.call_info.args.positional {
if v.len() > 0 { if v.len() > 0 {
@ -52,7 +52,7 @@ pub fn which(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"Expected a filename to find", "Expected a filename to find",
"needs a filename", "needs a filename",
*tag, tag,
)); ));
} }
} }

View File

@ -1,39 +1,20 @@
use crate::commands::{Command, UnevaluatedCallInfo}; use crate::commands::{Command, UnevaluatedCallInfo};
use crate::parser::hir; use crate::parser::{hir, hir::syntax_shape::ExpandContext};
use crate::prelude::*; use crate::prelude::*;
use derive_new::new; use derive_new::new;
use indexmap::IndexMap; use indexmap::IndexMap;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::error::Error; use std::error::Error;
use std::sync::Arc; use std::sync::atomic::AtomicBool;
use uuid::Uuid; use std::sync::{Arc, Mutex};
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum AnchorLocation { pub enum AnchorLocation {
Url(String), Url(String),
File(String), File(String),
Source(Text), Source(Text),
} }
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct SourceMap(HashMap<Uuid, AnchorLocation>);
impl SourceMap {
pub fn insert(&mut self, uuid: Uuid, anchor_location: AnchorLocation) {
self.0.insert(uuid, anchor_location);
}
pub fn get(&self, uuid: &Uuid) -> Option<&AnchorLocation> {
self.0.get(uuid)
}
pub fn new() -> SourceMap {
SourceMap(HashMap::new())
}
}
#[derive(Clone, new)] #[derive(Clone, new)]
pub struct CommandRegistry { pub struct CommandRegistry {
#[new(value = "Arc::new(Mutex::new(IndexMap::default()))")] #[new(value = "Arc::new(Mutex::new(IndexMap::default()))")]
@ -53,13 +34,17 @@ impl CommandRegistry {
registry.get(name).map(|c| c.clone()) registry.get(name).map(|c| c.clone())
} }
pub(crate) fn expect_command(&self, name: &str) -> Arc<Command> {
self.get_command(name).unwrap()
}
pub(crate) fn has(&self, name: &str) -> bool { pub(crate) fn has(&self, name: &str) -> bool {
let registry = self.registry.lock().unwrap(); let registry = self.registry.lock().unwrap();
registry.contains_key(name) registry.contains_key(name)
} }
fn insert(&mut self, name: impl Into<String>, command: Arc<Command>) { pub(crate) fn insert(&mut self, name: impl Into<String>, command: Arc<Command>) {
let mut registry = self.registry.lock().unwrap(); let mut registry = self.registry.lock().unwrap();
registry.insert(name.into(), command); registry.insert(name.into(), command);
} }
@ -73,8 +58,8 @@ impl CommandRegistry {
#[derive(Clone)] #[derive(Clone)]
pub struct Context { pub struct Context {
registry: CommandRegistry, registry: CommandRegistry,
pub(crate) source_map: SourceMap,
host: Arc<Mutex<dyn Host + Send>>, host: Arc<Mutex<dyn Host + Send>>,
pub ctrl_c: Arc<AtomicBool>,
pub(crate) shell_manager: ShellManager, pub(crate) shell_manager: ShellManager,
} }
@ -83,12 +68,20 @@ impl Context {
&self.registry &self.registry
} }
pub(crate) fn expand_context<'context>(
&'context self,
source: &'context Text,
span: Span,
) -> ExpandContext<'context> {
ExpandContext::new(&self.registry, span, source, self.shell_manager.homedir())
}
pub(crate) fn basic() -> Result<Context, Box<dyn Error>> { pub(crate) fn basic() -> Result<Context, Box<dyn Error>> {
let registry = CommandRegistry::new(); let registry = CommandRegistry::new();
Ok(Context { Ok(Context {
registry: registry.clone(), registry: registry.clone(),
source_map: SourceMap::new(),
host: Arc::new(Mutex::new(crate::env::host::BasicHost)), host: Arc::new(Mutex::new(crate::env::host::BasicHost)),
ctrl_c: Arc::new(AtomicBool::new(false)),
shell_manager: ShellManager::basic(registry)?, shell_manager: ShellManager::basic(registry)?,
}) })
} }
@ -105,43 +98,31 @@ impl Context {
} }
} }
pub fn add_anchor_location(&mut self, uuid: Uuid, anchor_location: AnchorLocation) { pub(crate) fn get_command(&self, name: &str) -> Option<Arc<Command>> {
self.source_map.insert(uuid, anchor_location); self.registry.get_command(name)
} }
pub(crate) fn has_command(&self, name: &str) -> bool { pub(crate) fn expect_command(&self, name: &str) -> Arc<Command> {
self.registry.has(name) self.registry.expect_command(name)
}
pub(crate) fn get_command(&self, name: &str) -> Arc<Command> {
self.registry.get_command(name).unwrap()
} }
pub(crate) fn run_command<'a>( pub(crate) fn run_command<'a>(
&mut self, &mut self,
command: Arc<Command>, command: Arc<Command>,
name_tag: Tag, name_tag: Tag,
source_map: SourceMap,
args: hir::Call, args: hir::Call,
source: &Text, source: &Text,
input: InputStream, input: InputStream,
is_first_command: bool, is_first_command: bool,
) -> OutputStream { ) -> OutputStream {
let command_args = self.command_args(args, input, source, source_map, name_tag); let command_args = self.command_args(args, input, source, name_tag);
command.run(command_args, self.registry(), is_first_command) command.run(command_args, self.registry(), is_first_command)
} }
fn call_info( fn call_info(&self, args: hir::Call, source: &Text, name_tag: Tag) -> UnevaluatedCallInfo {
&self,
args: hir::Call,
source: &Text,
source_map: SourceMap,
name_tag: Tag,
) -> UnevaluatedCallInfo {
UnevaluatedCallInfo { UnevaluatedCallInfo {
args, args,
source: source.clone(), source: source.clone(),
source_map,
name_tag, name_tag,
} }
} }
@ -151,13 +132,13 @@ impl Context {
args: hir::Call, args: hir::Call,
input: InputStream, input: InputStream,
source: &Text, source: &Text,
source_map: SourceMap,
name_tag: Tag, name_tag: Tag,
) -> CommandArgs { ) -> CommandArgs {
CommandArgs { CommandArgs {
host: self.host.clone(), host: self.host.clone(),
ctrl_c: self.ctrl_c.clone(),
shell_manager: self.shell_manager.clone(), shell_manager: self.shell_manager.clone(),
call_info: self.call_info(args, source, source_map, name_tag), call_info: self.call_info(args, source, name_tag),
input, input,
} }
} }

View File

@ -8,6 +8,7 @@ use crate::Text;
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use chrono_humanize::Humanize; use chrono_humanize::Humanize;
use derive_new::new; use derive_new::new;
use log::trace;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt; use std::fmt;
use std::path::PathBuf; use std::path::PathBuf;
@ -212,11 +213,19 @@ impl Block {
let scope = Scope::new(value.clone()); let scope = Scope::new(value.clone());
if self.expressions.len() == 0 { if self.expressions.len() == 0 {
return Ok(Value::nothing().tagged(self.tag)); return Ok(Value::nothing().tagged(&self.tag));
} }
let mut last = None; let mut last = None;
trace!(
"EXPRS = {:?}",
self.expressions
.iter()
.map(|e| format!("{}", e))
.collect::<Vec<_>>()
);
for expr in self.expressions.iter() { for expr in self.expressions.iter() {
last = Some(evaluate_baseline_expr( last = Some(evaluate_baseline_expr(
&expr, &expr,
@ -236,6 +245,9 @@ pub enum Value {
Row(crate::data::Dictionary), Row(crate::data::Dictionary),
Table(Vec<Tagged<Value>>), Table(Vec<Tagged<Value>>),
// Errors are a type of value too
Error(ShellError),
Block(Block), Block(Block),
} }
@ -284,14 +296,15 @@ impl fmt::Debug for ValueDebug<'_> {
Value::Row(o) => o.debug(f), Value::Row(o) => o.debug(f),
Value::Table(l) => debug_list(l).fmt(f), Value::Table(l) => debug_list(l).fmt(f),
Value::Block(_) => write!(f, "[[block]]"), Value::Block(_) => write!(f, "[[block]]"),
Value::Error(_) => write!(f, "[[error]]"),
} }
} }
} }
impl Tagged<Value> { impl Tagged<Value> {
pub(crate) fn tagged_type_name(&self) -> Tagged<String> { pub fn tagged_type_name(&self) -> Tagged<String> {
let name = self.type_name(); let name = self.type_name();
Tagged::from_item(name, self.tag()) name.tagged(self.tag())
} }
} }
@ -303,7 +316,7 @@ impl std::convert::TryFrom<&Tagged<Value>> for Block {
Value::Block(block) => Ok(block.clone()), Value::Block(block) => Ok(block.clone()),
v => Err(ShellError::type_error( v => Err(ShellError::type_error(
"Block", "Block",
value.copy_tag(v.type_name()), v.type_name().tagged(value.tag()),
)), )),
} }
} }
@ -315,11 +328,11 @@ impl std::convert::TryFrom<&Tagged<Value>> for i64 {
fn try_from(value: &Tagged<Value>) -> Result<i64, ShellError> { fn try_from(value: &Tagged<Value>) -> Result<i64, ShellError> {
match value.item() { match value.item() {
Value::Primitive(Primitive::Int(int)) => { Value::Primitive(Primitive::Int(int)) => {
int.tagged(value.tag).coerce_into("converting to i64") int.tagged(&value.tag).coerce_into("converting to i64")
} }
v => Err(ShellError::type_error( v => Err(ShellError::type_error(
"Integer", "Integer",
value.copy_tag(v.type_name()), v.type_name().tagged(value.tag()),
)), )),
} }
} }
@ -333,7 +346,7 @@ impl std::convert::TryFrom<&Tagged<Value>> for String {
Value::Primitive(Primitive::String(s)) => Ok(s.clone()), Value::Primitive(Primitive::String(s)) => Ok(s.clone()),
v => Err(ShellError::type_error( v => Err(ShellError::type_error(
"String", "String",
value.copy_tag(v.type_name()), v.type_name().tagged(value.tag()),
)), )),
} }
} }
@ -347,7 +360,7 @@ impl std::convert::TryFrom<&Tagged<Value>> for Vec<u8> {
Value::Primitive(Primitive::Binary(b)) => Ok(b.clone()), Value::Primitive(Primitive::Binary(b)) => Ok(b.clone()),
v => Err(ShellError::type_error( v => Err(ShellError::type_error(
"Binary", "Binary",
value.copy_tag(v.type_name()), v.type_name().tagged(value.tag()),
)), )),
} }
} }
@ -361,7 +374,7 @@ impl<'a> std::convert::TryFrom<&'a Tagged<Value>> for &'a crate::data::Dictionar
Value::Row(d) => Ok(d), Value::Row(d) => Ok(d),
v => Err(ShellError::type_error( v => Err(ShellError::type_error(
"Dictionary", "Dictionary",
value.copy_tag(v.type_name()), v.type_name().tagged(value.tag()),
)), )),
} }
} }
@ -383,7 +396,7 @@ impl std::convert::TryFrom<Option<&Tagged<Value>>> for Switch {
Value::Primitive(Primitive::Boolean(true)) => Ok(Switch::Present), Value::Primitive(Primitive::Boolean(true)) => Ok(Switch::Present),
v => Err(ShellError::type_error( v => Err(ShellError::type_error(
"Boolean", "Boolean",
value.copy_tag(v.type_name()), v.type_name().tagged(value.tag()),
)), )),
}, },
} }
@ -394,15 +407,54 @@ impl Tagged<Value> {
pub(crate) fn debug(&self) -> ValueDebug<'_> { pub(crate) fn debug(&self) -> ValueDebug<'_> {
ValueDebug { value: self } ValueDebug { value: self }
} }
pub fn as_column_path(&self) -> Result<Tagged<Vec<Tagged<String>>>, ShellError> {
let mut out: Vec<Tagged<String>> = vec![];
match &self.item {
Value::Table(table) => {
for item in table {
out.push(item.as_string()?.tagged(&item.tag));
}
}
other => {
return Err(ShellError::type_error(
"column name",
other.type_name().tagged(&self.tag),
))
}
}
Ok(out.tagged(&self.tag))
}
pub(crate) fn as_string(&self) -> Result<String, ShellError> {
match &self.item {
Value::Primitive(Primitive::String(s)) => Ok(s.clone()),
Value::Primitive(Primitive::Boolean(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Decimal(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Int(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Path(x)) => Ok(format!("{}", x.display())),
// TODO: this should definitely be more general with better errors
other => Err(ShellError::labeled_error(
"Expected string",
other.type_name(),
&self.tag,
)),
}
}
} }
impl Value { impl Value {
pub(crate) fn type_name(&self) -> String { pub fn type_name(&self) -> String {
match self { match self {
Value::Primitive(p) => p.type_name(), Value::Primitive(p) => p.type_name(),
Value::Row(_) => format!("object"), Value::Row(_) => format!("row"),
Value::Table(_) => format!("list"), Value::Table(_) => format!("list"),
Value::Block(_) => format!("block"), Value::Block(_) => format!("block"),
Value::Error(_) => format!("error"),
} }
} }
@ -418,6 +470,7 @@ impl Value {
.collect(), .collect(),
Value::Block(_) => vec![], Value::Block(_) => vec![],
Value::Table(_) => vec![], Value::Table(_) => vec![],
Value::Error(_) => vec![],
} }
} }
@ -443,6 +496,22 @@ impl Value {
} }
} }
pub fn get_data_by_column_path(
&self,
tag: Tag,
path: &Vec<Tagged<String>>,
) -> Option<Tagged<&Value>> {
let mut current = self;
for p in path {
match current.get_data_by_key(p) {
Some(v) => current = v,
None => return None,
}
}
Some(current.tagged(tag))
}
pub fn get_data_by_path(&self, tag: Tag, path: &str) -> Option<Tagged<&Value>> { pub fn get_data_by_path(&self, tag: Tag, path: &str) -> Option<Tagged<&Value>> {
let mut current = self; let mut current = self;
for p in path.split(".") { for p in path.split(".") {
@ -452,7 +521,7 @@ impl Value {
} }
} }
Some(Tagged::from_item(current, tag)) Some(current.tagged(tag))
} }
pub fn insert_data_at_path( pub fn insert_data_at_path(
@ -472,8 +541,8 @@ impl Value {
// Special case for inserting at the top level // Special case for inserting at the top level
current current
.entries .entries
.insert(path.to_string(), Tagged::from_item(new_value, tag)); .insert(path.to_string(), new_value.tagged(&tag));
return Some(Tagged::from_item(new_obj, tag)); return Some(new_obj.tagged(&tag));
} }
for idx in 0..split_path.len() { for idx in 0..split_path.len() {
@ -484,13 +553,64 @@ impl Value {
Value::Row(o) => { Value::Row(o) => {
o.entries.insert( o.entries.insert(
split_path[idx + 1].to_string(), split_path[idx + 1].to_string(),
Tagged::from_item(new_value, tag), new_value.tagged(&tag),
); );
} }
_ => {} _ => {}
} }
return Some(Tagged::from_item(new_obj, tag)); return Some(new_obj.tagged(&tag));
} else {
match next.item {
Value::Row(ref mut o) => {
current = o;
}
_ => return None,
}
}
}
_ => return None,
}
}
}
None
}
pub fn insert_data_at_column_path(
&self,
tag: Tag,
split_path: &Vec<Tagged<String>>,
new_value: Value,
) -> Option<Tagged<Value>> {
let mut new_obj = self.clone();
if let Value::Row(ref mut o) = new_obj {
let mut current = o;
if split_path.len() == 1 {
// Special case for inserting at the top level
current
.entries
.insert(split_path[0].item.clone(), new_value.tagged(&tag));
return Some(new_obj.tagged(&tag));
}
for idx in 0..split_path.len() {
match current.entries.get_mut(&split_path[idx].item) {
Some(next) => {
if idx == (split_path.len() - 2) {
match &mut next.item {
Value::Row(o) => {
o.entries.insert(
split_path[idx + 1].to_string(),
new_value.tagged(&tag),
);
}
_ => {}
}
return Some(new_obj.tagged(&tag));
} else { } else {
match next.item { match next.item {
Value::Row(ref mut o) => { Value::Row(ref mut o) => {
@ -524,8 +644,41 @@ impl Value {
match current.entries.get_mut(split_path[idx]) { match current.entries.get_mut(split_path[idx]) {
Some(next) => { Some(next) => {
if idx == (split_path.len() - 1) { if idx == (split_path.len() - 1) {
*next = Tagged::from_item(replaced_value, tag); *next = replaced_value.tagged(&tag);
return Some(Tagged::from_item(new_obj, tag)); return Some(new_obj.tagged(&tag));
} else {
match next.item {
Value::Row(ref mut o) => {
current = o;
}
_ => return None,
}
}
}
_ => return None,
}
}
}
None
}
pub fn replace_data_at_column_path(
&self,
tag: Tag,
split_path: &Vec<Tagged<String>>,
replaced_value: Value,
) -> Option<Tagged<Value>> {
let mut new_obj = self.clone();
if let Value::Row(ref mut o) = new_obj {
let mut current = o;
for idx in 0..split_path.len() {
match current.entries.get_mut(&split_path[idx].item) {
Some(next) => {
if idx == (split_path.len() - 1) {
*next = replaced_value.tagged(&tag);
return Some(new_obj.tagged(&tag));
} else { } else {
match next.item { match next.item {
Value::Row(ref mut o) => { Value::Row(ref mut o) => {
@ -549,6 +702,7 @@ impl Value {
Value::Row(o) => o.get_data(desc), Value::Row(o) => o.get_data(desc),
Value::Block(_) => MaybeOwned::Owned(Value::nothing()), Value::Block(_) => MaybeOwned::Owned(Value::nothing()),
Value::Table(_) => MaybeOwned::Owned(Value::nothing()), Value::Table(_) => MaybeOwned::Owned(Value::nothing()),
Value::Error(_) => MaybeOwned::Owned(Value::nothing()),
} }
} }
@ -558,7 +712,7 @@ impl Value {
Value::Block(b) => itertools::join( Value::Block(b) => itertools::join(
b.expressions b.expressions
.iter() .iter()
.map(|e| e.source(&b.source).to_string()), .map(|e| e.span.slice(&b.source).to_string()),
"; ", "; ",
), ),
Value::Row(_) => format!("[table: 1 row]"), Value::Row(_) => format!("[table: 1 row]"),
@ -567,6 +721,7 @@ impl Value {
l.len(), l.len(),
if l.len() == 1 { "row" } else { "rows" } if l.len() == 1 { "row" } else { "rows" }
), ),
Value::Error(_) => format!("[error]"),
} }
} }
@ -607,22 +762,6 @@ impl Value {
} }
} }
pub(crate) fn as_string(&self) -> Result<String, ShellError> {
match self {
Value::Primitive(Primitive::String(s)) => Ok(s.clone()),
Value::Primitive(Primitive::Boolean(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Decimal(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Int(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Path(x)) => Ok(format!("{}", x.display())),
// TODO: this should definitely be more general with better errors
other => Err(ShellError::string(format!(
"Expected string, got {:?}",
other
))),
}
}
pub(crate) fn is_true(&self) -> bool { pub(crate) fn is_true(&self) -> bool {
match self { match self {
Value::Primitive(Primitive::Boolean(true)) => true, Value::Primitive(Primitive::Boolean(true)) => true,
@ -675,9 +814,14 @@ impl Value {
Value::Primitive(Primitive::Date(s.into())) Value::Primitive(Primitive::Date(s.into()))
} }
pub fn date_from_str(s: &str) -> Result<Value, ShellError> { pub fn date_from_str(s: Tagged<&str>) -> Result<Value, ShellError> {
let date = DateTime::parse_from_rfc3339(s) let date = DateTime::parse_from_rfc3339(s.item).map_err(|err| {
.map_err(|err| ShellError::string(&format!("Date parse error: {}", err)))?; ShellError::labeled_error(
&format!("Date parse error: {}", err),
"original value",
s.tag,
)
})?;
let date = date.with_timezone(&chrono::offset::Utc); let date = date.with_timezone(&chrono::offset::Utc);

View File

@ -7,7 +7,7 @@ use std::ops::Deref;
pub(crate) fn command_dict(command: Arc<Command>, tag: impl Into<Tag>) -> Tagged<Value> { pub(crate) fn command_dict(command: Arc<Command>, tag: impl Into<Tag>) -> Tagged<Value> {
let tag = tag.into(); let tag = tag.into();
let mut cmd_dict = TaggedDictBuilder::new(tag); let mut cmd_dict = TaggedDictBuilder::new(&tag);
cmd_dict.insert("name", Value::string(command.name())); cmd_dict.insert("name", Value::string(command.name()));
@ -42,7 +42,7 @@ fn for_spec(name: &str, ty: &str, required: bool, tag: impl Into<Tag>) -> Tagged
fn signature_dict(signature: Signature, tag: impl Into<Tag>) -> Tagged<Value> { fn signature_dict(signature: Signature, tag: impl Into<Tag>) -> Tagged<Value> {
let tag = tag.into(); let tag = tag.into();
let mut sig = TaggedListBuilder::new(tag); let mut sig = TaggedListBuilder::new(&tag);
for arg in signature.positional.iter() { for arg in signature.positional.iter() {
let is_required = match arg { let is_required = match arg {
@ -50,19 +50,19 @@ fn signature_dict(signature: Signature, tag: impl Into<Tag>) -> Tagged<Value> {
PositionalType::Optional(_, _) => false, PositionalType::Optional(_, _) => false,
}; };
sig.insert_tagged(for_spec(arg.name(), "argument", is_required, tag)); sig.insert_tagged(for_spec(arg.name(), "argument", is_required, &tag));
} }
if let Some(_) = signature.rest_positional { if let Some(_) = signature.rest_positional {
let is_required = false; let is_required = false;
sig.insert_tagged(for_spec("rest", "argument", is_required, tag)); sig.insert_tagged(for_spec("rest", "argument", is_required, &tag));
} }
for (name, ty) in signature.named.iter() { for (name, ty) in signature.named.iter() {
match ty { match ty {
NamedType::Mandatory(_) => sig.insert_tagged(for_spec(name, "flag", true, tag)), NamedType::Mandatory(_) => sig.insert_tagged(for_spec(name, "flag", true, &tag)),
NamedType::Optional(_) => sig.insert_tagged(for_spec(name, "flag", false, tag)), NamedType::Optional(_) => sig.insert_tagged(for_spec(name, "flag", false, &tag)),
NamedType::Switch => sig.insert_tagged(for_spec(name, "switch", false, tag)), NamedType::Switch => sig.insert_tagged(for_spec(name, "switch", false, &tag)),
} }
} }

View File

@ -51,8 +51,9 @@ pub fn user_data() -> Result<PathBuf, ShellError> {
} }
pub fn app_path(app_data_type: AppDataType, display: &str) -> Result<PathBuf, ShellError> { pub fn app_path(app_data_type: AppDataType, display: &str) -> Result<PathBuf, ShellError> {
let path = app_root(app_data_type, &APP_INFO) let path = app_root(app_data_type, &APP_INFO).map_err(|err| {
.map_err(|err| ShellError::string(&format!("Couldn't open {} path:\n{}", display, err)))?; ShellError::untagged_runtime_error(&format!("Couldn't open {} path:\n{}", display, err))
})?;
Ok(path) Ok(path)
} }
@ -74,11 +75,22 @@ pub fn read(
let tag = tag.into(); let tag = tag.into();
let contents = fs::read_to_string(filename) let contents = fs::read_to_string(filename)
.map(|v| v.tagged(tag)) .map(|v| v.tagged(&tag))
.map_err(|err| ShellError::string(&format!("Couldn't read config file:\n{}", err)))?; .map_err(|err| {
ShellError::labeled_error(
&format!("Couldn't read config file:\n{}", err),
"file name",
&tag,
)
})?;
let parsed: toml::Value = toml::from_str(&contents) let parsed: toml::Value = toml::from_str(&contents).map_err(|err| {
.map_err(|err| ShellError::string(&format!("Couldn't parse config file:\n{}", err)))?; ShellError::labeled_error(
&format!("Couldn't parse config file:\n{}", err),
"file name",
&tag,
)
})?;
let value = convert_toml_value_to_nu_value(&parsed, tag); let value = convert_toml_value_to_nu_value(&parsed, tag);
let tag = value.tag(); let tag = value.tag();
@ -86,7 +98,7 @@ pub fn read(
Value::Row(Dictionary { entries }) => Ok(entries), Value::Row(Dictionary { entries }) => Ok(entries),
other => Err(ShellError::type_error( other => Err(ShellError::type_error(
"Dictionary", "Dictionary",
other.type_name().tagged(tag), other.type_name().tagged(&tag),
)), )),
} }
} }

View File

@ -115,7 +115,7 @@ impl TaggedListBuilder {
} }
pub fn push(&mut self, value: impl Into<Value>) { pub fn push(&mut self, value: impl Into<Value>) {
self.list.push(value.into().tagged(self.tag)); self.list.push(value.into().tagged(&self.tag));
} }
pub fn insert_tagged(&mut self, value: impl Into<Tagged<Value>>) { pub fn insert_tagged(&mut self, value: impl Into<Tagged<Value>>) {
@ -155,7 +155,7 @@ impl TaggedDictBuilder {
} }
pub fn insert(&mut self, key: impl Into<String>, value: impl Into<Value>) { pub fn insert(&mut self, key: impl Into<String>, value: impl Into<Value>) {
self.dict.insert(key.into(), value.into().tagged(self.tag)); self.dict.insert(key.into(), value.into().tagged(&self.tag));
} }
pub fn insert_tagged(&mut self, key: impl Into<String>, value: impl Into<Tagged<Value>>) { pub fn insert_tagged(&mut self, key: impl Into<String>, value: impl Into<Tagged<Value>>) {

View File

@ -1,14 +1,52 @@
use crate::context::{AnchorLocation, SourceMap}; use crate::context::AnchorLocation;
use crate::parser::parse::parser::TracableContext;
use crate::prelude::*; use crate::prelude::*;
use crate::Text;
use derive_new::new; use derive_new::new;
use getset::Getters; use getset::Getters;
use serde::Deserialize; use serde::Deserialize;
use serde::Serialize; use serde::Serialize;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use uuid::Uuid;
#[derive(new, Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)] #[derive(new, Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
pub struct Spanned<T> {
pub span: Span,
pub item: T,
}
impl<T> Spanned<T> {
pub fn map<U>(self, input: impl FnOnce(T) -> U) -> Spanned<U> {
let span = self.span;
let mapped = input(self.item);
mapped.spanned(span)
}
}
pub trait SpannedItem: Sized {
fn spanned(self, span: impl Into<Span>) -> Spanned<Self> {
Spanned {
item: self,
span: span.into(),
}
}
fn spanned_unknown(self) -> Spanned<Self> {
Spanned {
item: self,
span: Span::unknown(),
}
}
}
impl<T> SpannedItem for T {}
impl<T> std::ops::Deref for Spanned<T> {
type Target = T;
fn deref(&self) -> &T {
&self.item
}
}
#[derive(new, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
pub struct Tagged<T> { pub struct Tagged<T> {
pub tag: Tag, pub tag: Tag,
pub item: T, pub item: T,
@ -16,7 +54,7 @@ pub struct Tagged<T> {
impl<T> HasTag for Tagged<T> { impl<T> HasTag for Tagged<T> {
fn tag(&self) -> Tag { fn tag(&self) -> Tag {
self.tag self.tag.clone()
} }
} }
@ -28,20 +66,23 @@ impl AsRef<Path> for Tagged<PathBuf> {
pub trait TaggedItem: Sized { pub trait TaggedItem: Sized {
fn tagged(self, tag: impl Into<Tag>) -> Tagged<Self> { fn tagged(self, tag: impl Into<Tag>) -> Tagged<Self> {
Tagged::from_item(self, tag.into()) Tagged {
item: self,
tag: tag.into(),
}
} }
// For now, this is a temporary facility. In many cases, there are other useful spans that we // For now, this is a temporary facility. In many cases, there are other useful spans that we
// could be using, such as the original source spans of JSON or Toml files, but we don't yet // could be using, such as the original source spans of JSON or Toml files, but we don't yet
// have the infrastructure to make that work. // have the infrastructure to make that work.
fn tagged_unknown(self) -> Tagged<Self> { fn tagged_unknown(self) -> Tagged<Self> {
Tagged::from_item( Tagged {
self, item: self,
Tag { tag: Tag {
span: Span::unknown(), span: Span::unknown(),
anchor: uuid::Uuid::nil(), anchor: None,
}, },
) }
} }
} }
@ -56,48 +97,29 @@ impl<T> std::ops::Deref for Tagged<T> {
} }
impl<T> Tagged<T> { impl<T> Tagged<T> {
pub fn with_tag(self, tag: impl Into<Tag>) -> Tagged<T> {
Tagged::from_item(self.item, tag)
}
pub fn from_item(item: T, tag: impl Into<Tag>) -> Tagged<T> {
Tagged {
item,
tag: tag.into(),
}
}
pub fn map<U>(self, input: impl FnOnce(T) -> U) -> Tagged<U> { pub fn map<U>(self, input: impl FnOnce(T) -> U) -> Tagged<U> {
let tag = self.tag(); let tag = self.tag();
let mapped = input(self.item); let mapped = input(self.item);
Tagged::from_item(mapped, tag) mapped.tagged(tag)
}
pub(crate) fn copy_tag<U>(&self, output: U) -> Tagged<U> {
Tagged::from_item(output, self.tag())
}
pub fn source(&self, source: &Text) -> Text {
Text::from(self.tag().slice(source))
} }
pub fn tag(&self) -> Tag { pub fn tag(&self) -> Tag {
self.tag self.tag.clone()
} }
pub fn span(&self) -> Span { pub fn span(&self) -> Span {
self.tag.span self.tag.span
} }
pub fn anchor(&self) -> uuid::Uuid { pub fn anchor(&self) -> Option<AnchorLocation> {
self.tag.anchor self.tag.anchor.clone()
} }
pub fn anchor_name(&self, source_map: &SourceMap) -> Option<String> { pub fn anchor_name(&self) -> Option<String> {
match source_map.get(&self.tag.anchor) { match self.tag.anchor {
Some(AnchorLocation::File(file)) => Some(file.clone()), Some(AnchorLocation::File(ref file)) => Some(file.clone()),
Some(AnchorLocation::Url(url)) => Some(url.clone()), Some(AnchorLocation::Url(ref url)) => Some(url.clone()),
_ => None, _ => None,
} }
} }
@ -113,29 +135,32 @@ impl<T> Tagged<T> {
impl From<&Tag> for Tag { impl From<&Tag> for Tag {
fn from(input: &Tag) -> Tag { fn from(input: &Tag) -> Tag {
*input input.clone()
} }
} }
impl From<nom_locate::LocatedSpanEx<&str, Uuid>> for Span { impl From<nom_locate::LocatedSpanEx<&str, TracableContext>> for Span {
fn from(input: nom_locate::LocatedSpanEx<&str, Uuid>) -> Span { fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Span {
Span { Span::new(input.offset, input.offset + input.fragment.len())
start: input.offset, }
end: input.offset + input.fragment.len(), }
}
impl From<nom_locate::LocatedSpanEx<&str, u64>> for Span {
fn from(input: nom_locate::LocatedSpanEx<&str, u64>) -> Span {
Span::new(input.offset, input.offset + input.fragment.len())
} }
} }
impl<T> impl<T>
From<( From<(
nom_locate::LocatedSpanEx<T, Uuid>, nom_locate::LocatedSpanEx<T, u64>,
nom_locate::LocatedSpanEx<T, Uuid>, nom_locate::LocatedSpanEx<T, u64>,
)> for Span )> for Span
{ {
fn from( fn from(
input: ( input: (
nom_locate::LocatedSpanEx<T, Uuid>, nom_locate::LocatedSpanEx<T, u64>,
nom_locate::LocatedSpanEx<T, Uuid>, nom_locate::LocatedSpanEx<T, u64>,
), ),
) -> Span { ) -> Span {
Span { Span {
@ -147,10 +172,7 @@ impl<T>
impl From<(usize, usize)> for Span { impl From<(usize, usize)> for Span {
fn from(input: (usize, usize)) -> Span { fn from(input: (usize, usize)) -> Span {
Span { Span::new(input.0, input.1)
start: input.0,
end: input.1,
}
} }
} }
@ -164,61 +186,60 @@ impl From<&std::ops::Range<usize>> for Span {
} }
#[derive( #[derive(
Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, Debug, Clone, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, new,
)] )]
pub struct Tag { pub struct Tag {
pub anchor: Uuid, pub anchor: Option<AnchorLocation>,
pub span: Span, pub span: Span,
} }
impl From<Span> for Tag { impl From<Span> for Tag {
fn from(span: Span) -> Self { fn from(span: Span) -> Self {
Tag { Tag { anchor: None, span }
anchor: uuid::Uuid::nil(),
span,
}
} }
} }
impl From<&Span> for Tag { impl From<&Span> for Tag {
fn from(span: &Span) -> Self { fn from(span: &Span) -> Self {
Tag { Tag {
anchor: uuid::Uuid::nil(), anchor: None,
span: *span, span: *span,
} }
} }
} }
impl From<(usize, usize, Uuid)> for Tag { impl From<(usize, usize, TracableContext)> for Tag {
fn from((start, end, anchor): (usize, usize, Uuid)) -> Self { fn from((start, end, _context): (usize, usize, TracableContext)) -> Self {
Tag {
anchor: None,
span: Span::new(start, end),
}
}
}
impl From<(usize, usize, AnchorLocation)> for Tag {
fn from((start, end, anchor): (usize, usize, AnchorLocation)) -> Self {
Tag {
anchor: Some(anchor),
span: Span::new(start, end),
}
}
}
impl From<(usize, usize, Option<AnchorLocation>)> for Tag {
fn from((start, end, anchor): (usize, usize, Option<AnchorLocation>)) -> Self {
Tag { Tag {
anchor, anchor,
span: Span { start, end }, span: Span::new(start, end),
} }
} }
} }
impl From<(usize, usize, Option<Uuid>)> for Tag { impl From<nom_locate::LocatedSpanEx<&str, TracableContext>> for Tag {
fn from((start, end, anchor): (usize, usize, Option<Uuid>)) -> Self { fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Tag {
Tag { Tag {
anchor: if let Some(uuid) = anchor { anchor: None,
uuid span: Span::new(input.offset, input.offset + input.fragment.len()),
} else {
uuid::Uuid::nil()
},
span: Span { start, end },
}
}
}
impl From<nom_locate::LocatedSpanEx<&str, Uuid>> for Tag {
fn from(input: nom_locate::LocatedSpanEx<&str, Uuid>) -> Tag {
Tag {
anchor: input.extra,
span: Span {
start: input.offset,
end: input.offset + input.fragment.len(),
},
} }
} }
} }
@ -237,22 +258,29 @@ impl From<&Tag> for Span {
impl Tag { impl Tag {
pub fn unknown_anchor(span: Span) -> Tag { pub fn unknown_anchor(span: Span) -> Tag {
Tag { anchor: None, span }
}
pub fn for_char(pos: usize, anchor: AnchorLocation) -> Tag {
Tag { Tag {
anchor: uuid::Uuid::nil(), anchor: Some(anchor),
span, span: Span {
start: pos,
end: pos + 1,
},
} }
} }
pub fn unknown_span(anchor: Uuid) -> Tag { pub fn unknown_span(anchor: AnchorLocation) -> Tag {
Tag { Tag {
anchor, anchor: Some(anchor),
span: Span::unknown(), span: Span::unknown(),
} }
} }
pub fn unknown() -> Tag { pub fn unknown() -> Tag {
Tag { Tag {
anchor: uuid::Uuid::nil(), anchor: None,
span: Span::unknown(), span: Span::unknown(),
} }
} }
@ -265,29 +293,73 @@ impl Tag {
); );
Tag { Tag {
span: Span { span: Span::new(self.span.start, other.span.end),
start: self.span.start, anchor: self.anchor.clone(),
end: other.span.end, }
}, }
anchor: self.anchor,
pub fn until_option(&self, other: Option<impl Into<Tag>>) -> Tag {
match other {
Some(other) => {
let other = other.into();
debug_assert!(
self.anchor == other.anchor,
"Can only merge two tags with the same anchor"
);
Tag {
span: Span::new(self.span.start, other.span.end),
anchor: self.anchor.clone(),
}
}
None => self.clone(),
} }
} }
pub fn slice<'a>(&self, source: &'a str) -> &'a str { pub fn slice<'a>(&self, source: &'a str) -> &'a str {
self.span.slice(source) self.span.slice(source)
} }
pub fn string<'a>(&self, source: &'a str) -> String {
self.span.slice(source).to_string()
}
pub fn tagged_slice<'a>(&self, source: &'a str) -> Tagged<&'a str> {
self.span.slice(source).tagged(self)
}
pub fn tagged_string<'a>(&self, source: &'a str) -> Tagged<String> {
self.span.slice(source).to_string().tagged(self)
}
}
#[allow(unused)]
pub fn tag_for_tagged_list(mut iter: impl Iterator<Item = Tag>) -> Tag {
let first = iter.next();
let first = match first {
None => return Tag::unknown(),
Some(first) => first,
};
let last = iter.last();
match last {
None => first,
Some(last) => first.until(last),
}
} }
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
pub struct Span { pub struct Span {
pub(crate) start: usize, start: usize,
pub(crate) end: usize, end: usize,
} }
impl From<Option<Span>> for Span { impl From<Option<Span>> for Span {
fn from(input: Option<Span>) -> Span { fn from(input: Option<Span>) -> Span {
match input { match input {
None => Span { start: 0, end: 0 }, None => Span::new(0, 0),
Some(span) => span, Some(span) => span,
} }
} }
@ -295,7 +367,54 @@ impl From<Option<Span>> for Span {
impl Span { impl Span {
pub fn unknown() -> Span { pub fn unknown() -> Span {
Span { start: 0, end: 0 } Span::new(0, 0)
}
pub fn new(start: usize, end: usize) -> Span {
assert!(
end >= start,
"Can't create a Span whose end < start, start={}, end={}",
start,
end
);
Span { start, end }
}
pub fn for_char(pos: usize) -> Span {
Span {
start: pos,
end: pos + 1,
}
}
pub fn until(&self, other: impl Into<Span>) -> Span {
let other = other.into();
Span::new(self.start, other.end)
}
pub fn until_option(&self, other: Option<impl Into<Span>>) -> Span {
match other {
Some(other) => {
let other = other.into();
Span::new(self.start, other.end)
}
None => *self,
}
}
pub fn string<'a>(&self, source: &'a str) -> String {
self.slice(source).to_string()
}
pub fn spanned_slice<'a>(&self, source: &'a str) -> Spanned<&'a str> {
self.slice(source).spanned(*self)
}
pub fn spanned_string<'a>(&self, source: &'a str) -> Spanned<String> {
self.slice(source).to_string().spanned(*self)
} }
/* /*
@ -308,6 +427,14 @@ impl Span {
} }
*/ */
pub fn start(&self) -> usize {
self.start
}
pub fn end(&self) -> usize {
self.end
}
pub fn is_unknown(&self) -> bool { pub fn is_unknown(&self) -> bool {
self.start == 0 && self.end == 0 self.start == 0 && self.end == 0
} }
@ -319,17 +446,11 @@ impl Span {
impl language_reporting::ReportingSpan for Span { impl language_reporting::ReportingSpan for Span {
fn with_start(&self, start: usize) -> Self { fn with_start(&self, start: usize) -> Self {
Span { Span::new(start, self.end)
start,
end: self.end,
}
} }
fn with_end(&self, end: usize) -> Self { fn with_end(&self, end: usize) -> Self {
Span { Span::new(self.start, end)
start: self.start,
end,
}
} }
fn start(&self) -> usize { fn start(&self) -> usize {
@ -340,33 +461,3 @@ impl language_reporting::ReportingSpan for Span {
self.end self.end
} }
} }
impl language_reporting::ReportingSpan for Tag {
fn with_start(&self, start: usize) -> Self {
Tag {
span: Span {
start,
end: self.span.end,
},
anchor: self.anchor,
}
}
fn with_end(&self, end: usize) -> Self {
Tag {
span: Span {
start: self.span.start,
end,
},
anchor: self.anchor,
}
}
fn start(&self) -> usize {
self.span.start
}
fn end(&self) -> usize {
self.span.end
}
}

View File

@ -54,7 +54,7 @@ impl ExtractType for i64 {
&Tagged { &Tagged {
item: Value::Primitive(Primitive::Int(int)), item: Value::Primitive(Primitive::Int(int)),
.. ..
} => Ok(int.tagged(value.tag).coerce_into("converting to i64")?), } => Ok(int.tagged(&value.tag).coerce_into("converting to i64")?),
other => Err(ShellError::type_error("Integer", other.tagged_type_name())), other => Err(ShellError::type_error("Integer", other.tagged_type_name())),
} }
} }
@ -68,7 +68,7 @@ impl ExtractType for u64 {
&Tagged { &Tagged {
item: Value::Primitive(Primitive::Int(int)), item: Value::Primitive(Primitive::Int(int)),
.. ..
} => Ok(int.tagged(value.tag).coerce_into("converting to u64")?), } => Ok(int.tagged(&value.tag).coerce_into("converting to u64")?),
other => Err(ShellError::type_error("Integer", other.tagged_type_name())), other => Err(ShellError::type_error("Integer", other.tagged_type_name())),
} }
} }

View File

@ -1,5 +1,6 @@
use crate::prelude::*; use crate::prelude::*;
use crate::parser::parse::parser::TracableContext;
use ansi_term::Color; use ansi_term::Color;
use derive_new::new; use derive_new::new;
use language_reporting::{Diagnostic, Label, Severity}; use language_reporting::{Diagnostic, Label, Severity};
@ -13,12 +14,20 @@ pub enum Description {
} }
impl Description { impl Description {
fn into_label(self) -> Result<Label<Tag>, String> { fn into_label(self) -> Result<Label<Span>, String> {
match self { match self {
Description::Source(s) => Ok(Label::new_primary(s.tag()).with_message(s.item)), Description::Source(s) => Ok(Label::new_primary(s.span()).with_message(s.item)),
Description::Synthetic(s) => Err(s), Description::Synthetic(s) => Err(s),
} }
} }
#[allow(unused)]
fn tag(&self) -> Tag {
match self {
Description::Source(tagged) => tagged.tag.clone(),
Description::Synthetic(_) => Tag::unknown(),
}
}
} }
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)] #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)]
@ -35,6 +44,13 @@ pub struct ShellError {
cause: Option<Box<ProximateShellError>>, cause: Option<Box<ProximateShellError>>,
} }
impl ShellError {
#[allow(unused)]
pub(crate) fn tag(&self) -> Option<Tag> {
self.error.tag()
}
}
impl ToDebug for ShellError { impl ToDebug for ShellError {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
self.error.fmt_debug(f, source) self.error.fmt_debug(f, source)
@ -46,12 +62,12 @@ impl serde::de::Error for ShellError {
where where
T: std::fmt::Display, T: std::fmt::Display,
{ {
ShellError::string(msg.to_string()) ShellError::untagged_runtime_error(msg.to_string())
} }
} }
impl ShellError { impl ShellError {
pub(crate) fn type_error( pub fn type_error(
expected: impl Into<String>, expected: impl Into<String>,
actual: Tagged<impl Into<String>>, actual: Tagged<impl Into<String>>,
) -> ShellError { ) -> ShellError {
@ -62,6 +78,21 @@ impl ShellError {
.start() .start()
} }
pub fn untagged_runtime_error(error: impl Into<String>) -> ShellError {
ProximateShellError::UntaggedRuntimeError {
reason: error.into(),
}
.start()
}
pub(crate) fn unexpected_eof(expected: impl Into<String>, tag: impl Into<Tag>) -> ShellError {
ProximateShellError::UnexpectedEof {
expected: expected.into(),
tag: tag.into(),
}
.start()
}
pub(crate) fn range_error( pub(crate) fn range_error(
expected: impl Into<ExpectedRange>, expected: impl Into<ExpectedRange>,
actual: &Tagged<impl fmt::Debug>, actual: &Tagged<impl fmt::Debug>,
@ -69,7 +100,7 @@ impl ShellError {
) -> ShellError { ) -> ShellError {
ProximateShellError::RangeError { ProximateShellError::RangeError {
kind: expected.into(), kind: expected.into(),
actual_kind: actual.copy_tag(format!("{:?}", actual.item)), actual_kind: format!("{:?}", actual.item).tagged(actual.tag()),
operation, operation,
} }
.start() .start()
@ -82,6 +113,7 @@ impl ShellError {
.start() .start()
} }
#[allow(unused)]
pub(crate) fn invalid_command(problem: impl Into<Tag>) -> ShellError { pub(crate) fn invalid_command(problem: impl Into<Tag>) -> ShellError {
ProximateShellError::InvalidCommand { ProximateShellError::InvalidCommand {
command: problem.into(), command: problem.into(),
@ -111,29 +143,29 @@ impl ShellError {
pub(crate) fn argument_error( pub(crate) fn argument_error(
command: impl Into<String>, command: impl Into<String>,
kind: ArgumentError, kind: ArgumentError,
tag: Tag, tag: impl Into<Tag>,
) -> ShellError { ) -> ShellError {
ProximateShellError::ArgumentError { ProximateShellError::ArgumentError {
command: command.into(), command: command.into(),
error: kind, error: kind,
tag, tag: tag.into(),
} }
.start() .start()
} }
pub(crate) fn invalid_external_word(tag: Tag) -> ShellError { pub(crate) fn invalid_external_word(tag: impl Into<Tag>) -> ShellError {
ProximateShellError::ArgumentError { ProximateShellError::ArgumentError {
command: "Invalid argument to Nu command (did you mean to call an external command?)" command: "Invalid argument to Nu command (did you mean to call an external command?)"
.into(), .into(),
error: ArgumentError::InvalidExternalWord, error: ArgumentError::InvalidExternalWord,
tag, tag: tag.into(),
} }
.start() .start()
} }
pub(crate) fn parse_error( pub(crate) fn parse_error(
error: nom::Err<( error: nom::Err<(
nom_locate::LocatedSpanEx<&str, uuid::Uuid>, nom_locate::LocatedSpanEx<&str, TracableContext>,
nom::error::ErrorKind, nom::error::ErrorKind,
)>, )>,
) -> ShellError { ) -> ShellError {
@ -151,25 +183,22 @@ impl ShellError {
} }
nom::Err::Failure(span) | nom::Err::Error(span) => { nom::Err::Failure(span) | nom::Err::Error(span) => {
let diagnostic = Diagnostic::new(Severity::Error, format!("Parse Error")) let diagnostic = Diagnostic::new(Severity::Error, format!("Parse Error"))
.with_label(Label::new_primary(Tag::from(span.0))); .with_label(Label::new_primary(Span::from(span.0)));
ShellError::diagnostic(diagnostic) ShellError::diagnostic(diagnostic)
} }
} }
} }
pub(crate) fn diagnostic(diagnostic: Diagnostic<Tag>) -> ShellError { pub(crate) fn diagnostic(diagnostic: Diagnostic<Span>) -> ShellError {
ProximateShellError::Diagnostic(ShellDiagnostic { diagnostic }).start() ProximateShellError::Diagnostic(ShellDiagnostic { diagnostic }).start()
} }
pub(crate) fn to_diagnostic(self) -> Diagnostic<Tag> { pub(crate) fn to_diagnostic(self) -> Diagnostic<Span> {
match self.error { match self.error {
ProximateShellError::String(StringError { title, .. }) => {
Diagnostic::new(Severity::Error, title)
}
ProximateShellError::InvalidCommand { command } => { ProximateShellError::InvalidCommand { command } => {
Diagnostic::new(Severity::Error, "Invalid command") Diagnostic::new(Severity::Error, "Invalid command")
.with_label(Label::new_primary(command)) .with_label(Label::new_primary(command.span))
} }
ProximateShellError::MissingValue { tag, reason } => { ProximateShellError::MissingValue { tag, reason } => {
let mut d = Diagnostic::new( let mut d = Diagnostic::new(
@ -178,7 +207,7 @@ impl ShellError {
); );
if let Some(tag) = tag { if let Some(tag) = tag {
d = d.with_label(Label::new_primary(tag)); d = d.with_label(Label::new_primary(tag.span));
} }
d d
@ -191,7 +220,7 @@ impl ShellError {
ArgumentError::InvalidExternalWord => Diagnostic::new( ArgumentError::InvalidExternalWord => Diagnostic::new(
Severity::Error, Severity::Error,
format!("Invalid bare word for Nu command (did you intend to invoke an external command?)")) format!("Invalid bare word for Nu command (did you intend to invoke an external command?)"))
.with_label(Label::new_primary(tag)), .with_label(Label::new_primary(tag.span)),
ArgumentError::MissingMandatoryFlag(name) => Diagnostic::new( ArgumentError::MissingMandatoryFlag(name) => Diagnostic::new(
Severity::Error, Severity::Error,
format!( format!(
@ -201,7 +230,7 @@ impl ShellError {
Color::Black.bold().paint(name) Color::Black.bold().paint(name)
), ),
) )
.with_label(Label::new_primary(tag)), .with_label(Label::new_primary(tag.span)),
ArgumentError::MissingMandatoryPositional(name) => Diagnostic::new( ArgumentError::MissingMandatoryPositional(name) => Diagnostic::new(
Severity::Error, Severity::Error,
format!( format!(
@ -211,7 +240,7 @@ impl ShellError {
), ),
) )
.with_label( .with_label(
Label::new_primary(tag).with_message(format!("requires {} parameter", name)), Label::new_primary(tag.span).with_message(format!("requires {} parameter", name)),
), ),
ArgumentError::MissingValueForName(name) => Diagnostic::new( ArgumentError::MissingValueForName(name) => Diagnostic::new(
Severity::Error, Severity::Error,
@ -222,7 +251,7 @@ impl ShellError {
Color::Black.bold().paint(name) Color::Black.bold().paint(name)
), ),
) )
.with_label(Label::new_primary(tag)), .with_label(Label::new_primary(tag.span)),
}, },
ProximateShellError::TypeError { ProximateShellError::TypeError {
expected, expected,
@ -232,10 +261,9 @@ impl ShellError {
tag, tag,
}, },
} => Diagnostic::new(Severity::Error, "Type Error").with_label( } => Diagnostic::new(Severity::Error, "Type Error").with_label(
Label::new_primary(tag) Label::new_primary(tag.span)
.with_message(format!("Expected {}, found {}", expected, actual)), .with_message(format!("Expected {}, found {}", expected, actual)),
), ),
ProximateShellError::TypeError { ProximateShellError::TypeError {
expected, expected,
actual: actual:
@ -244,7 +272,12 @@ impl ShellError {
tag tag
}, },
} => Diagnostic::new(Severity::Error, "Type Error") } => Diagnostic::new(Severity::Error, "Type Error")
.with_label(Label::new_primary(tag).with_message(expected)), .with_label(Label::new_primary(tag.span).with_message(expected)),
ProximateShellError::UnexpectedEof {
expected, tag
} => Diagnostic::new(Severity::Error, format!("Unexpected end of input"))
.with_label(Label::new_primary(tag.span).with_message(format!("Expected {}", expected))),
ProximateShellError::RangeError { ProximateShellError::RangeError {
kind, kind,
@ -255,7 +288,7 @@ impl ShellError {
tag tag
}, },
} => Diagnostic::new(Severity::Error, "Range Error").with_label( } => Diagnostic::new(Severity::Error, "Range Error").with_label(
Label::new_primary(tag).with_message(format!( Label::new_primary(tag.span).with_message(format!(
"Expected to convert {} to {} while {}, but it was out of range", "Expected to convert {} to {} while {}, but it was out of range",
item, item,
kind.desc(), kind.desc(),
@ -267,12 +300,12 @@ impl ShellError {
problem: problem:
Tagged { Tagged {
tag, tag,
.. item
}, },
} => Diagnostic::new(Severity::Error, "Syntax Error") } => Diagnostic::new(Severity::Error, "Syntax Error")
.with_label(Label::new_primary(tag).with_message("Unexpected external command")), .with_label(Label::new_primary(tag.span).with_message(item)),
ProximateShellError::MissingProperty { subpath, expr } => { ProximateShellError::MissingProperty { subpath, expr, .. } => {
let subpath = subpath.into_label(); let subpath = subpath.into_label();
let expr = expr.into_label(); let expr = expr.into_label();
@ -293,9 +326,11 @@ impl ShellError {
ProximateShellError::Diagnostic(diag) => diag.diagnostic, ProximateShellError::Diagnostic(diag) => diag.diagnostic,
ProximateShellError::CoerceError { left, right } => { ProximateShellError::CoerceError { left, right } => {
Diagnostic::new(Severity::Error, "Coercion error") Diagnostic::new(Severity::Error, "Coercion error")
.with_label(Label::new_primary(left.tag()).with_message(left.item)) .with_label(Label::new_primary(left.tag().span).with_message(left.item))
.with_label(Label::new_secondary(right.tag()).with_message(right.item)) .with_label(Label::new_secondary(right.tag().span).with_message(right.item))
} }
ProximateShellError::UntaggedRuntimeError { reason } => Diagnostic::new(Severity::Error, format!("Error: {}", reason))
} }
} }
@ -306,7 +341,7 @@ impl ShellError {
) -> ShellError { ) -> ShellError {
ShellError::diagnostic( ShellError::diagnostic(
Diagnostic::new(Severity::Error, msg.into()) Diagnostic::new(Severity::Error, msg.into())
.with_label(Label::new_primary(tag.into()).with_message(label.into())), .with_label(Label::new_primary(tag.into().span).with_message(label.into())),
) )
} }
@ -320,25 +355,29 @@ impl ShellError {
ShellError::diagnostic( ShellError::diagnostic(
Diagnostic::new_error(msg.into()) Diagnostic::new_error(msg.into())
.with_label( .with_label(
Label::new_primary(primary_span.into()).with_message(primary_label.into()), Label::new_primary(primary_span.into().span).with_message(primary_label.into()),
) )
.with_label( .with_label(
Label::new_secondary(secondary_span.into()) Label::new_secondary(secondary_span.into().span)
.with_message(secondary_label.into()), .with_message(secondary_label.into()),
), ),
) )
} }
pub fn string(title: impl Into<String>) -> ShellError { // pub fn string(title: impl Into<String>) -> ShellError {
ProximateShellError::String(StringError::new(title.into(), Value::nothing())).start() // ProximateShellError::String(StringError::new(title.into(), String::new())).start()
} // }
pub(crate) fn unimplemented(title: impl Into<String>) -> ShellError { pub(crate) fn unimplemented(title: impl Into<String>) -> ShellError {
ShellError::string(&format!("Unimplemented: {}", title.into())) ShellError::untagged_runtime_error(&format!("Unimplemented: {}", title.into()))
} }
pub(crate) fn unexpected(title: impl Into<String>) -> ShellError { pub(crate) fn unexpected(title: impl Into<String>) -> ShellError {
ShellError::string(&format!("Unexpected: {}", title.into())) ShellError::untagged_runtime_error(&format!("Unexpected: {}", title.into()))
}
pub(crate) fn unreachable(title: impl Into<String>) -> ShellError {
ShellError::untagged_runtime_error(&format!("BUG: Unreachable: {}", title.into()))
} }
} }
@ -383,10 +422,13 @@ impl ExpectedRange {
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)] #[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)]
pub enum ProximateShellError { pub enum ProximateShellError {
String(StringError),
SyntaxError { SyntaxError {
problem: Tagged<String>, problem: Tagged<String>,
}, },
UnexpectedEof {
expected: String,
tag: Tag,
},
InvalidCommand { InvalidCommand {
command: Tag, command: Tag,
}, },
@ -397,6 +439,7 @@ pub enum ProximateShellError {
MissingProperty { MissingProperty {
subpath: Description, subpath: Description,
expr: Description, expr: Description,
tag: Tag,
}, },
MissingValue { MissingValue {
tag: Option<Tag>, tag: Option<Tag>,
@ -417,6 +460,9 @@ pub enum ProximateShellError {
left: Tagged<String>, left: Tagged<String>,
right: Tagged<String>, right: Tagged<String>,
}, },
UntaggedRuntimeError {
reason: String,
},
} }
impl ProximateShellError { impl ProximateShellError {
@ -426,6 +472,22 @@ impl ProximateShellError {
error: self, error: self,
} }
} }
pub(crate) fn tag(&self) -> Option<Tag> {
Some(match self {
ProximateShellError::SyntaxError { problem } => problem.tag(),
ProximateShellError::UnexpectedEof { tag, .. } => tag.clone(),
ProximateShellError::InvalidCommand { command } => command.clone(),
ProximateShellError::TypeError { actual, .. } => actual.tag.clone(),
ProximateShellError::MissingProperty { tag, .. } => tag.clone(),
ProximateShellError::MissingValue { tag, .. } => return tag.clone(),
ProximateShellError::ArgumentError { tag, .. } => tag.clone(),
ProximateShellError::RangeError { actual_kind, .. } => actual_kind.tag.clone(),
ProximateShellError::Diagnostic(..) => return None,
ProximateShellError::UntaggedRuntimeError { .. } => return None,
ProximateShellError::CoerceError { left, right } => left.tag.until(&right.tag),
})
}
} }
impl ToDebug for ProximateShellError { impl ToDebug for ProximateShellError {
@ -437,7 +499,7 @@ impl ToDebug for ProximateShellError {
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ShellDiagnostic { pub struct ShellDiagnostic {
pub(crate) diagnostic: Diagnostic<Tag>, pub(crate) diagnostic: Diagnostic<Span>,
} }
impl PartialEq for ShellDiagnostic { impl PartialEq for ShellDiagnostic {
@ -463,22 +525,23 @@ impl std::cmp::Ord for ShellDiagnostic {
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, new, Clone, Serialize, Deserialize)] #[derive(Debug, Ord, PartialOrd, Eq, PartialEq, new, Clone, Serialize, Deserialize)]
pub struct StringError { pub struct StringError {
title: String, title: String,
error: Value, error: String,
} }
impl std::fmt::Display for ShellError { impl std::fmt::Display for ShellError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match &self.error { match &self.error {
ProximateShellError::String(s) => write!(f, "{}", &s.title),
ProximateShellError::MissingValue { .. } => write!(f, "MissingValue"), ProximateShellError::MissingValue { .. } => write!(f, "MissingValue"),
ProximateShellError::InvalidCommand { .. } => write!(f, "InvalidCommand"), ProximateShellError::InvalidCommand { .. } => write!(f, "InvalidCommand"),
ProximateShellError::TypeError { .. } => write!(f, "TypeError"), ProximateShellError::TypeError { .. } => write!(f, "TypeError"),
ProximateShellError::UnexpectedEof { .. } => write!(f, "UnexpectedEof"),
ProximateShellError::RangeError { .. } => write!(f, "RangeError"), ProximateShellError::RangeError { .. } => write!(f, "RangeError"),
ProximateShellError::SyntaxError { .. } => write!(f, "SyntaxError"), ProximateShellError::SyntaxError { .. } => write!(f, "SyntaxError"),
ProximateShellError::MissingProperty { .. } => write!(f, "MissingProperty"), ProximateShellError::MissingProperty { .. } => write!(f, "MissingProperty"),
ProximateShellError::ArgumentError { .. } => write!(f, "ArgumentError"), ProximateShellError::ArgumentError { .. } => write!(f, "ArgumentError"),
ProximateShellError::Diagnostic(_) => write!(f, "<diagnostic>"), ProximateShellError::Diagnostic(_) => write!(f, "<diagnostic>"),
ProximateShellError::CoerceError { .. } => write!(f, "CoerceError"), ProximateShellError::CoerceError { .. } => write!(f, "CoerceError"),
ProximateShellError::UntaggedRuntimeError { .. } => write!(f, "UntaggedRuntimeError"),
} }
} }
} }
@ -487,71 +550,43 @@ impl std::error::Error for ShellError {}
impl std::convert::From<Box<dyn std::error::Error>> for ShellError { impl std::convert::From<Box<dyn std::error::Error>> for ShellError {
fn from(input: Box<dyn std::error::Error>) -> ShellError { fn from(input: Box<dyn std::error::Error>) -> ShellError {
ProximateShellError::String(StringError { ShellError::untagged_runtime_error(format!("{}", input))
title: format!("{}", input),
error: Value::nothing(),
})
.start()
} }
} }
impl std::convert::From<std::io::Error> for ShellError { impl std::convert::From<std::io::Error> for ShellError {
fn from(input: std::io::Error) -> ShellError { fn from(input: std::io::Error) -> ShellError {
ProximateShellError::String(StringError { ShellError::untagged_runtime_error(format!("{}", input))
title: format!("{}", input),
error: Value::nothing(),
})
.start()
} }
} }
impl std::convert::From<subprocess::PopenError> for ShellError { impl std::convert::From<subprocess::PopenError> for ShellError {
fn from(input: subprocess::PopenError) -> ShellError { fn from(input: subprocess::PopenError) -> ShellError {
ProximateShellError::String(StringError { ShellError::untagged_runtime_error(format!("{}", input))
title: format!("{}", input),
error: Value::nothing(),
})
.start()
} }
} }
impl std::convert::From<serde_yaml::Error> for ShellError { impl std::convert::From<serde_yaml::Error> for ShellError {
fn from(input: serde_yaml::Error) -> ShellError { fn from(input: serde_yaml::Error) -> ShellError {
ProximateShellError::String(StringError { ShellError::untagged_runtime_error(format!("{:?}", input))
title: format!("{:?}", input),
error: Value::nothing(),
})
.start()
} }
} }
impl std::convert::From<toml::ser::Error> for ShellError { impl std::convert::From<toml::ser::Error> for ShellError {
fn from(input: toml::ser::Error) -> ShellError { fn from(input: toml::ser::Error) -> ShellError {
ProximateShellError::String(StringError { ShellError::untagged_runtime_error(format!("{:?}", input))
title: format!("{:?}", input),
error: Value::nothing(),
})
.start()
} }
} }
impl std::convert::From<serde_json::Error> for ShellError { impl std::convert::From<serde_json::Error> for ShellError {
fn from(input: serde_json::Error) -> ShellError { fn from(input: serde_json::Error) -> ShellError {
ProximateShellError::String(StringError { ShellError::untagged_runtime_error(format!("{:?}", input))
title: format!("{:?}", input),
error: Value::nothing(),
})
.start()
} }
} }
impl std::convert::From<Box<dyn std::error::Error + Send + Sync>> for ShellError { impl std::convert::From<Box<dyn std::error::Error + Send + Sync>> for ShellError {
fn from(input: Box<dyn std::error::Error + Send + Sync>) -> ShellError { fn from(input: Box<dyn std::error::Error + Send + Sync>) -> ShellError {
ProximateShellError::String(StringError { ShellError::untagged_runtime_error(format!("{:?}", input))
title: format!("{:?}", input),
error: Value::nothing(),
})
.start()
} }
} }
@ -567,7 +602,6 @@ impl<T> ShellErrorUtils<Tagged<T>> for Option<Tagged<T>> {
} }
} }
} }
pub trait CoerceInto<U> { pub trait CoerceInto<U> {
fn coerce_into(self, operation: impl Into<String>) -> Result<U, ShellError>; fn coerce_into(self, operation: impl Into<String>) -> Result<U, ShellError>;
} }

View File

@ -7,6 +7,8 @@ use crate::parser::{
use crate::prelude::*; use crate::prelude::*;
use derive_new::new; use derive_new::new;
use indexmap::IndexMap; use indexmap::IndexMap;
use log::trace;
use std::fmt;
#[derive(new)] #[derive(new)]
pub struct Scope { pub struct Scope {
@ -15,6 +17,15 @@ pub struct Scope {
vars: IndexMap<String, Tagged<Value>>, vars: IndexMap<String, Tagged<Value>>,
} }
impl fmt::Display for Scope {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_map()
.entry(&"$it", &format!("{:?}", self.it.item))
.entries(self.vars.iter().map(|(k, v)| (k, &v.item)))
.finish()
}
}
impl Scope { impl Scope {
pub(crate) fn empty() -> Scope { pub(crate) fn empty() -> Scope {
Scope { Scope {
@ -37,28 +48,41 @@ pub(crate) fn evaluate_baseline_expr(
scope: &Scope, scope: &Scope,
source: &Text, source: &Text,
) -> Result<Tagged<Value>, ShellError> { ) -> Result<Tagged<Value>, ShellError> {
let tag = Tag {
span: expr.span,
anchor: None,
};
match &expr.item { match &expr.item {
RawExpression::Literal(literal) => Ok(evaluate_literal(expr.copy_tag(literal), source)), RawExpression::Literal(literal) => Ok(evaluate_literal(literal.tagged(tag), source)),
RawExpression::ExternalWord => Err(ShellError::argument_error( RawExpression::ExternalWord => Err(ShellError::argument_error(
"Invalid external word", "Invalid external word",
ArgumentError::InvalidExternalWord, ArgumentError::InvalidExternalWord,
expr.tag(), tag,
)), )),
RawExpression::FilePath(path) => Ok(Value::path(path.clone()).tagged(expr.tag())), RawExpression::FilePath(path) => Ok(Value::path(path.clone()).tagged(tag)),
RawExpression::Synthetic(hir::Synthetic::String(s)) => { RawExpression::Synthetic(hir::Synthetic::String(s)) => {
Ok(Value::string(s).tagged_unknown()) Ok(Value::string(s).tagged_unknown())
} }
RawExpression::Variable(var) => evaluate_reference(var, scope, source), RawExpression::Variable(var) => evaluate_reference(var, scope, source, tag),
RawExpression::Command(_) => evaluate_command(tag, scope, source),
RawExpression::ExternalCommand(external) => evaluate_external(external, scope, source), RawExpression::ExternalCommand(external) => evaluate_external(external, scope, source),
RawExpression::Binary(binary) => { RawExpression::Binary(binary) => {
let left = evaluate_baseline_expr(binary.left(), registry, scope, source)?; let left = evaluate_baseline_expr(binary.left(), registry, scope, source)?;
let right = evaluate_baseline_expr(binary.right(), registry, scope, source)?; let right = evaluate_baseline_expr(binary.right(), registry, scope, source)?;
trace!("left={:?} right={:?}", left.item, right.item);
match left.compare(binary.op(), &*right) { match left.compare(binary.op(), &*right) {
Ok(result) => Ok(Value::boolean(result).tagged(expr.tag())), Ok(result) => Ok(Value::boolean(result).tagged(tag)),
Err((left_type, right_type)) => Err(ShellError::coerce_error( Err((left_type, right_type)) => Err(ShellError::coerce_error(
binary.left().copy_tag(left_type), left_type.tagged(Tag {
binary.right().copy_tag(right_type), span: binary.left().span,
anchor: None,
}),
right_type.tagged(Tag {
span: binary.right().span,
anchor: None,
}),
)), )),
} }
} }
@ -70,13 +94,10 @@ pub(crate) fn evaluate_baseline_expr(
exprs.push(expr); exprs.push(expr);
} }
Ok(Value::Table(exprs).tagged(expr.tag())) Ok(Value::Table(exprs).tagged(tag))
} }
RawExpression::Block(block) => { RawExpression::Block(block) => {
Ok( Ok(Value::Block(Block::new(block.clone(), source.clone(), tag.clone())).tagged(&tag))
Value::Block(Block::new(block.clone(), source.clone(), expr.tag()))
.tagged(expr.tag()),
)
} }
RawExpression::Path(path) => { RawExpression::Path(path) => {
let value = evaluate_baseline_expr(path.head(), registry, scope, source)?; let value = evaluate_baseline_expr(path.head(), registry, scope, source)?;
@ -96,19 +117,27 @@ pub(crate) fn evaluate_baseline_expr(
possible_matches.sort(); possible_matches.sort();
return Err(ShellError::labeled_error( if possible_matches.len() > 0 {
"Unknown column", return Err(ShellError::labeled_error(
format!("did you mean '{}'?", possible_matches[0].1), "Unknown column",
expr.tag(), format!("did you mean '{}'?", possible_matches[0].1),
)); &tag,
));
} else {
return Err(ShellError::labeled_error(
"Unknown column",
"row does not have this column",
&tag,
));
}
} }
Some(next) => { Some(next) => {
item = next.clone().item.tagged(expr.tag()); item = next.clone().item.tagged(&tag);
} }
}; };
} }
Ok(item.item().clone().tagged(expr.tag())) Ok(item.item().clone().tagged(tag))
} }
RawExpression::Boolean(_boolean) => unimplemented!(), RawExpression::Boolean(_boolean) => unimplemented!(),
} }
@ -130,14 +159,16 @@ fn evaluate_reference(
name: &hir::Variable, name: &hir::Variable,
scope: &Scope, scope: &Scope,
source: &Text, source: &Text,
tag: Tag,
) -> Result<Tagged<Value>, ShellError> { ) -> Result<Tagged<Value>, ShellError> {
trace!("Evaluating {} with Scope {}", name, scope);
match name { match name {
hir::Variable::It(tag) => Ok(scope.it.item.clone().tagged(*tag)), hir::Variable::It(_) => Ok(scope.it.item.clone().tagged(tag)),
hir::Variable::Other(tag) => Ok(scope hir::Variable::Other(inner) => Ok(scope
.vars .vars
.get(tag.slice(source)) .get(inner.slice(source))
.map(|v| v.clone()) .map(|v| v.clone())
.unwrap_or_else(|| Value::nothing().tagged(*tag))), .unwrap_or_else(|| Value::nothing().tagged(tag))),
} }
} }
@ -150,3 +181,7 @@ fn evaluate_external(
"Unexpected external command".tagged(*external.name()), "Unexpected external command".tagged(*external.name()),
)) ))
} }
fn evaluate_command(tag: Tag, _scope: &Scope, _source: &Text) -> Result<Tagged<Value>, ShellError> {
Err(ShellError::syntax_error("Unexpected command".tagged(tag)))
}

View File

@ -14,7 +14,7 @@ impl RenderView for GenericView<'_> {
match self.value { match self.value {
Value::Primitive(p) => Ok(host.stdout(&p.format(None))), Value::Primitive(p) => Ok(host.stdout(&p.format(None))),
Value::Table(l) => { Value::Table(l) => {
let view = TableView::from_list(l); let view = TableView::from_list(l, 0);
if let Some(view) = view { if let Some(view) = view {
view.render_view(host)?; view.render_view(host)?;
@ -35,6 +35,8 @@ impl RenderView for GenericView<'_> {
view.render_view(host)?; view.render_view(host)?;
Ok(()) Ok(())
} }
Value::Error(e) => Err(e.clone()),
} }
} }
} }

View File

@ -34,7 +34,7 @@ impl TableView {
ret ret
} }
pub fn from_list(values: &[Tagged<Value>]) -> Option<TableView> { pub fn from_list(values: &[Tagged<Value>], starting_idx: usize) -> Option<TableView> {
if values.len() == 0 { if values.len() == 0 {
return None; return None;
} }
@ -42,7 +42,7 @@ impl TableView {
let mut headers = TableView::merge_descriptors(values); let mut headers = TableView::merge_descriptors(values);
if headers.len() == 0 { if headers.len() == 0 {
headers.push("value".to_string()); headers.push("<unknown>".to_string());
} }
let mut entries = vec![]; let mut entries = vec![];
@ -68,7 +68,7 @@ impl TableView {
if values.len() > 1 { if values.len() > 1 {
// Indices are black, bold, right-aligned: // Indices are black, bold, right-aligned:
row.insert(0, (format!("{}", idx.to_string()), "Fdbr")); row.insert(0, (format!("{}", (starting_idx + idx).to_string()), "Fdbr"));
} }
entries.push(row); entries.push(row);

View File

@ -1,4 +1,4 @@
#![recursion_limit = "512"] #![recursion_limit = "1024"]
#[macro_use] #[macro_use]
mod prelude; mod prelude;
@ -21,7 +21,7 @@ mod traits;
mod utils; mod utils;
pub use crate::commands::command::{CallInfo, ReturnSuccess, ReturnValue}; pub use crate::commands::command::{CallInfo, ReturnSuccess, ReturnValue};
pub use crate::context::{AnchorLocation, SourceMap}; pub use crate::context::AnchorLocation;
pub use crate::env::host::BasicHost; pub use crate::env::host::BasicHost;
pub use crate::parser::hir::SyntaxShape; pub use crate::parser::hir::SyntaxShape;
pub use crate::parser::parse::token_tree_builder::TokenTreeBuilder; pub use crate::parser::parse::token_tree_builder::TokenTreeBuilder;
@ -31,7 +31,7 @@ pub use cli::cli;
pub use data::base::{Primitive, Value}; pub use data::base::{Primitive, Value};
pub use data::config::{config_path, APP_INFO}; pub use data::config::{config_path, APP_INFO};
pub use data::dict::{Dictionary, TaggedDictBuilder}; pub use data::dict::{Dictionary, TaggedDictBuilder};
pub use data::meta::{Tag, Tagged, TaggedItem}; pub use data::meta::{Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem};
pub use errors::{CoerceInto, ShellError}; pub use errors::{CoerceInto, ShellError};
pub use num_traits::cast::ToPrimitive; pub use num_traits::cast::ToPrimitive;
pub use parser::parse::text::Text; pub use parser::parse::text::Text;

View File

@ -3,6 +3,9 @@ use log::LevelFilter;
use std::error::Error; use std::error::Error;
fn main() -> Result<(), Box<dyn Error>> { fn main() -> Result<(), Box<dyn Error>> {
#[cfg(feature1)]
println!("feature1 is enabled");
let matches = App::new("nushell") let matches = App::new("nushell")
.version(clap::crate_version!()) .version(clap::crate_version!())
.arg( .arg(

View File

@ -7,24 +7,24 @@ pub(crate) mod registry;
use crate::errors::ShellError; use crate::errors::ShellError;
pub(crate) use deserializer::ConfigDeserializer; pub(crate) use deserializer::ConfigDeserializer;
pub(crate) use hir::baseline_parse_tokens::baseline_parse_tokens; pub(crate) use hir::syntax_shape::flat_shape::FlatShape;
pub(crate) use hir::TokensIterator;
pub(crate) use parse::call_node::CallNode; pub(crate) use parse::call_node::CallNode;
pub(crate) use parse::files::Files; pub(crate) use parse::files::Files;
pub(crate) use parse::flag::Flag; pub(crate) use parse::flag::{Flag, FlagKind};
pub(crate) use parse::operator::Operator; pub(crate) use parse::operator::Operator;
pub(crate) use parse::parser::{nom_input, pipeline}; pub(crate) use parse::parser::{nom_input, pipeline};
pub(crate) use parse::pipeline::{Pipeline, PipelineElement}; pub(crate) use parse::pipeline::{Pipeline, PipelineElement};
pub(crate) use parse::text::Text; pub(crate) use parse::text::Text;
pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, PathNode, TokenNode}; pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
pub(crate) use parse::tokens::{RawToken, Token}; pub(crate) use parse::tokens::{RawNumber, RawToken};
pub(crate) use parse::unit::Unit; pub(crate) use parse::unit::Unit;
pub(crate) use parse_command::parse_command;
pub(crate) use registry::CommandRegistry; pub(crate) use registry::CommandRegistry;
pub fn parse(input: &str, anchor: uuid::Uuid) -> Result<TokenNode, ShellError> { pub fn parse(input: &str) -> Result<TokenNode, ShellError> {
let _ = pretty_env_logger::try_init(); let _ = pretty_env_logger::try_init();
match pipeline(nom_input(input, anchor)) { match pipeline(nom_input(input)) {
Ok((_rest, val)) => Ok(val), Ok((_rest, val)) => Ok(val),
Err(err) => Err(ShellError::parse_error(err)), Err(err) => Err(ShellError::parse_error(err)),
} }

View File

@ -52,7 +52,7 @@ impl<'de> ConfigDeserializer<'de> {
self.stack.push(DeserializerItem { self.stack.push(DeserializerItem {
key_struct_field: Some((name.to_string(), name)), key_struct_field: Some((name.to_string(), name)),
val: value.unwrap_or_else(|| Value::nothing().tagged(self.call.name_tag)), val: value.unwrap_or_else(|| Value::nothing().tagged(&self.call.name_tag)),
}); });
Ok(()) Ok(())
@ -310,9 +310,10 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> {
return Ok(r); return Ok(r);
} }
trace!( trace!(
"deserializing struct {:?} {:?} (stack={:?})", "deserializing struct {:?} {:?} (saw_root={} stack={:?})",
name, name,
fields, fields,
self.saw_root,
self.stack self.stack
); );
@ -326,6 +327,12 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> {
let type_name = std::any::type_name::<V::Value>(); let type_name = std::any::type_name::<V::Value>();
let tagged_val_name = std::any::type_name::<Tagged<Value>>(); let tagged_val_name = std::any::type_name::<Tagged<Value>>();
trace!(
"type_name={} tagged_val_name={}",
type_name,
tagged_val_name
);
if type_name == tagged_val_name { if type_name == tagged_val_name {
return visit::<Tagged<Value>, _>(value.val, name, fields, visitor); return visit::<Tagged<Value>, _>(value.val, name, fields, visitor);
} }

View File

@ -1,11 +1,13 @@
pub(crate) mod baseline_parse; pub(crate) mod baseline_parse;
pub(crate) mod baseline_parse_tokens;
pub(crate) mod binary; pub(crate) mod binary;
pub(crate) mod expand_external_tokens;
pub(crate) mod external_command; pub(crate) mod external_command;
pub(crate) mod named; pub(crate) mod named;
pub(crate) mod path; pub(crate) mod path;
pub(crate) mod syntax_shape;
pub(crate) mod tokens_iterator;
use crate::parser::{registry, Unit}; use crate::parser::{registry, Operator, Unit};
use crate::prelude::*; use crate::prelude::*;
use derive_new::new; use derive_new::new;
use getset::Getters; use getset::Getters;
@ -14,27 +16,18 @@ use std::fmt;
use std::path::PathBuf; use std::path::PathBuf;
use crate::evaluate::Scope; use crate::evaluate::Scope;
use crate::parser::parse::tokens::RawNumber;
use crate::traits::ToDebug;
pub(crate) use self::baseline_parse::{
baseline_parse_single_token, baseline_parse_token_as_number, baseline_parse_token_as_path,
baseline_parse_token_as_pattern, baseline_parse_token_as_string,
};
pub(crate) use self::baseline_parse_tokens::{baseline_parse_next_expr, TokensIterator};
pub(crate) use self::binary::Binary; pub(crate) use self::binary::Binary;
pub(crate) use self::external_command::ExternalCommand; pub(crate) use self::external_command::ExternalCommand;
pub(crate) use self::named::NamedArguments; pub(crate) use self::named::NamedArguments;
pub(crate) use self::path::Path; pub(crate) use self::path::Path;
pub(crate) use self::syntax_shape::ExpandContext;
pub(crate) use self::tokens_iterator::debug::debug_tokens;
pub(crate) use self::tokens_iterator::TokensIterator;
pub use self::baseline_parse_tokens::SyntaxShape; pub use self::syntax_shape::SyntaxShape;
pub fn path(head: impl Into<Expression>, tail: Vec<Tagged<impl Into<String>>>) -> Path {
Path::new(
head.into(),
tail.into_iter()
.map(|item| item.map(|string| string.into()))
.collect(),
)
}
#[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)] #[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)]
pub struct Call { pub struct Call {
@ -93,6 +86,7 @@ pub enum RawExpression {
FilePath(PathBuf), FilePath(PathBuf),
ExternalCommand(ExternalCommand), ExternalCommand(ExternalCommand),
Command(Span),
Boolean(bool), Boolean(bool),
} }
@ -115,73 +109,148 @@ impl RawExpression {
match self { match self {
RawExpression::Literal(literal) => literal.type_name(), RawExpression::Literal(literal) => literal.type_name(),
RawExpression::Synthetic(synthetic) => synthetic.type_name(), RawExpression::Synthetic(synthetic) => synthetic.type_name(),
RawExpression::ExternalWord => "externalword", RawExpression::Command(..) => "command",
RawExpression::FilePath(..) => "filepath", RawExpression::ExternalWord => "external word",
RawExpression::FilePath(..) => "file path",
RawExpression::Variable(..) => "variable", RawExpression::Variable(..) => "variable",
RawExpression::List(..) => "list", RawExpression::List(..) => "list",
RawExpression::Binary(..) => "binary", RawExpression::Binary(..) => "binary",
RawExpression::Block(..) => "block", RawExpression::Block(..) => "block",
RawExpression::Path(..) => "path", RawExpression::Path(..) => "variable path",
RawExpression::Boolean(..) => "boolean", RawExpression::Boolean(..) => "boolean",
RawExpression::ExternalCommand(..) => "external", RawExpression::ExternalCommand(..) => "external",
} }
} }
} }
pub type Expression = Tagged<RawExpression>; pub type Expression = Spanned<RawExpression>;
impl std::fmt::Display for Expression {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let span = self.span;
match &self.item {
RawExpression::Literal(literal) => write!(f, "{}", literal.tagged(self.span)),
RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{}", s),
RawExpression::Command(_) => write!(f, "Command{{ {}..{} }}", span.start(), span.end()),
RawExpression::ExternalWord => {
write!(f, "ExternalWord{{ {}..{} }}", span.start(), span.end())
}
RawExpression::FilePath(file) => write!(f, "Path{{ {} }}", file.display()),
RawExpression::Variable(variable) => write!(f, "{}", variable),
RawExpression::List(list) => f
.debug_list()
.entries(list.iter().map(|e| format!("{}", e)))
.finish(),
RawExpression::Binary(binary) => write!(f, "{}", binary),
RawExpression::Block(items) => {
write!(f, "Block")?;
f.debug_set()
.entries(items.iter().map(|i| format!("{}", i)))
.finish()
}
RawExpression::Path(path) => write!(f, "{}", path),
RawExpression::Boolean(b) => write!(f, "${}", b),
RawExpression::ExternalCommand(..) => {
write!(f, "ExternalComment{{ {}..{} }}", span.start(), span.end())
}
}
}
}
impl Expression { impl Expression {
pub(crate) fn number(i: impl Into<Number>, tag: impl Into<Tag>) -> Expression { pub(crate) fn number(i: impl Into<Number>, span: impl Into<Span>) -> Expression {
RawExpression::Literal(Literal::Number(i.into())).tagged(tag.into()) RawExpression::Literal(Literal::Number(i.into())).spanned(span.into())
} }
pub(crate) fn size( pub(crate) fn size(
i: impl Into<Number>, i: impl Into<Number>,
unit: impl Into<Unit>, unit: impl Into<Unit>,
tag: impl Into<Tag>, span: impl Into<Span>,
) -> Expression { ) -> Expression {
RawExpression::Literal(Literal::Size(i.into(), unit.into())).tagged(tag.into()) RawExpression::Literal(Literal::Size(i.into(), unit.into())).spanned(span.into())
} }
pub(crate) fn synthetic_string(s: impl Into<String>) -> Expression { pub(crate) fn synthetic_string(s: impl Into<String>) -> Expression {
RawExpression::Synthetic(Synthetic::String(s.into())).tagged_unknown() RawExpression::Synthetic(Synthetic::String(s.into())).spanned_unknown()
} }
pub(crate) fn string(inner: impl Into<Tag>, outer: impl Into<Tag>) -> Expression { pub(crate) fn string(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
RawExpression::Literal(Literal::String(inner.into())).tagged(outer.into()) RawExpression::Literal(Literal::String(inner.into())).spanned(outer.into())
} }
pub(crate) fn file_path(path: impl Into<PathBuf>, outer: impl Into<Tag>) -> Expression { pub(crate) fn path(
RawExpression::FilePath(path.into()).tagged(outer) head: Expression,
tail: Vec<Spanned<impl Into<String>>>,
span: impl Into<Span>,
) -> Expression {
let tail = tail.into_iter().map(|t| t.map(|s| s.into())).collect();
RawExpression::Path(Box::new(Path::new(head, tail))).spanned(span.into())
} }
pub(crate) fn bare(tag: impl Into<Tag>) -> Expression { pub(crate) fn dot_member(head: Expression, next: Spanned<impl Into<String>>) -> Expression {
RawExpression::Literal(Literal::Bare).tagged(tag) let Spanned { item, span } = head;
let new_span = head.span.until(next.span);
match item {
RawExpression::Path(path) => {
let (head, mut tail) = path.parts();
tail.push(next.map(|i| i.into()));
Expression::path(head, tail, new_span)
}
other => Expression::path(other.spanned(span), vec![next], new_span),
}
} }
pub(crate) fn pattern(tag: impl Into<Tag>) -> Expression { pub(crate) fn infix(
RawExpression::Literal(Literal::GlobPattern).tagged(tag.into()) left: Expression,
op: Spanned<impl Into<Operator>>,
right: Expression,
) -> Expression {
let new_span = left.span.until(right.span);
RawExpression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right)))
.spanned(new_span)
} }
pub(crate) fn variable(inner: impl Into<Tag>, outer: impl Into<Tag>) -> Expression { pub(crate) fn file_path(path: impl Into<PathBuf>, outer: impl Into<Span>) -> Expression {
RawExpression::Variable(Variable::Other(inner.into())).tagged(outer) RawExpression::FilePath(path.into()).spanned(outer)
} }
pub(crate) fn external_command(inner: impl Into<Tag>, outer: impl Into<Tag>) -> Expression { pub(crate) fn list(list: Vec<Expression>, span: impl Into<Span>) -> Expression {
RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).tagged(outer) RawExpression::List(list).spanned(span)
} }
pub(crate) fn it_variable(inner: impl Into<Tag>, outer: impl Into<Tag>) -> Expression { pub(crate) fn bare(span: impl Into<Span>) -> Expression {
RawExpression::Variable(Variable::It(inner.into())).tagged(outer) RawExpression::Literal(Literal::Bare).spanned(span)
}
pub(crate) fn pattern(span: impl Into<Span>) -> Expression {
RawExpression::Literal(Literal::GlobPattern).spanned(span.into())
}
pub(crate) fn variable(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
RawExpression::Variable(Variable::Other(inner.into())).spanned(outer)
}
pub(crate) fn external_command(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).spanned(outer)
}
pub(crate) fn it_variable(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
RawExpression::Variable(Variable::It(inner.into())).spanned(outer)
} }
} }
impl ToDebug for Expression { impl ToDebug for Expression {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
match self.item() { match &self.item {
RawExpression::Literal(l) => l.tagged(self.tag()).fmt_debug(f, source), RawExpression::Literal(l) => l.spanned(self.span).fmt_debug(f, source),
RawExpression::FilePath(p) => write!(f, "{}", p.display()), RawExpression::FilePath(p) => write!(f, "{}", p.display()),
RawExpression::ExternalWord => write!(f, "{}", self.tag().slice(source)), RawExpression::ExternalWord => write!(f, "{}", self.span.slice(source)),
RawExpression::Command(tag) => write!(f, "{}", tag.slice(source)),
RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{:?}", s), RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{:?}", s),
RawExpression::Variable(Variable::It(_)) => write!(f, "$it"), RawExpression::Variable(Variable::It(_)) => write!(f, "$it"),
RawExpression::Variable(Variable::Other(s)) => write!(f, "${}", s.slice(source)), RawExpression::Variable(Variable::Other(s)) => write!(f, "${}", s.slice(source)),
@ -212,8 +281,8 @@ impl ToDebug for Expression {
} }
} }
impl From<Tagged<Path>> for Expression { impl From<Spanned<Path>> for Expression {
fn from(path: Tagged<Path>) -> Expression { fn from(path: Spanned<Path>) -> Expression {
path.map(|p| RawExpression::Path(Box::new(p))) path.map(|p| RawExpression::Path(Box::new(p)))
} }
} }
@ -227,19 +296,39 @@ impl From<Tagged<Path>> for Expression {
pub enum Literal { pub enum Literal {
Number(Number), Number(Number),
Size(Number, Unit), Size(Number, Unit),
String(Tag), String(Span),
GlobPattern, GlobPattern,
Bare, Bare,
} }
impl ToDebug for Tagged<&Literal> { impl std::fmt::Display for Tagged<Literal> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", Tagged::new(self.tag.clone(), &self.item))
}
}
impl std::fmt::Display for Tagged<&Literal> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let span = self.tag.span;
match &self.item {
Literal::Number(number) => write!(f, "{}", number),
Literal::Size(number, unit) => write!(f, "{}{}", number, unit.as_str()),
Literal::String(_) => write!(f, "String{{ {}..{} }}", span.start(), span.end()),
Literal::GlobPattern => write!(f, "Glob{{ {}..{} }}", span.start(), span.end()),
Literal::Bare => write!(f, "Bare{{ {}..{} }}", span.start(), span.end()),
}
}
}
impl ToDebug for Spanned<&Literal> {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
match self.item() { match self.item {
Literal::Number(number) => write!(f, "{:?}", *number), Literal::Number(number) => write!(f, "{:?}", number),
Literal::Size(number, unit) => write!(f, "{:?}{:?}", *number, unit), Literal::Size(number, unit) => write!(f, "{:?}{:?}", *number, unit),
Literal::String(tag) => write!(f, "{}", tag.slice(source)), Literal::String(tag) => write!(f, "{}", tag.slice(source)),
Literal::GlobPattern => write!(f, "{}", self.tag().slice(source)), Literal::GlobPattern => write!(f, "{}", self.span.slice(source)),
Literal::Bare => write!(f, "{}", self.tag().slice(source)), Literal::Bare => write!(f, "{}", self.span.slice(source)),
} }
} }
} }
@ -258,6 +347,15 @@ impl Literal {
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum Variable { pub enum Variable {
It(Tag), It(Span),
Other(Tag), Other(Span),
}
impl std::fmt::Display for Variable {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Variable::It(_) => write!(f, "$it"),
Variable::Other(span) => write!(f, "${{ {}..{} }}", span.start(), span.end()),
}
}
} }

View File

@ -1,140 +1,2 @@
use crate::context::Context; #[cfg(test)]
use crate::errors::ShellError; mod tests;
use crate::parser::{hir, RawToken, Token};
use crate::TaggedItem;
use crate::Text;
use std::path::PathBuf;
pub fn baseline_parse_single_token(
token: &Token,
source: &Text,
) -> Result<hir::Expression, ShellError> {
Ok(match *token.item() {
RawToken::Number(number) => hir::Expression::number(number.to_number(source), token.tag()),
RawToken::Size(int, unit) => {
hir::Expression::size(int.to_number(source), unit, token.tag())
}
RawToken::String(tag) => hir::Expression::string(tag, token.tag()),
RawToken::Variable(tag) if tag.slice(source) == "it" => {
hir::Expression::it_variable(tag, token.tag())
}
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()),
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
RawToken::GlobPattern => hir::Expression::pattern(token.tag()),
RawToken::Bare => hir::Expression::bare(token.tag()),
})
}
pub fn baseline_parse_token_as_number(
token: &Token,
source: &Text,
) -> Result<hir::Expression, ShellError> {
Ok(match *token.item() {
RawToken::Variable(tag) if tag.slice(source) == "it" => {
hir::Expression::it_variable(tag, token.tag())
}
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()),
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
RawToken::Number(number) => hir::Expression::number(number.to_number(source), token.tag()),
RawToken::Size(number, unit) => {
hir::Expression::size(number.to_number(source), unit, token.tag())
}
RawToken::Bare => hir::Expression::bare(token.tag()),
RawToken::GlobPattern => {
return Err(ShellError::type_error(
"Number",
"glob pattern".to_string().tagged(token.tag()),
))
}
RawToken::String(tag) => hir::Expression::string(tag, token.tag()),
})
}
pub fn baseline_parse_token_as_string(
token: &Token,
source: &Text,
) -> Result<hir::Expression, ShellError> {
Ok(match *token.item() {
RawToken::Variable(tag) if tag.slice(source) == "it" => {
hir::Expression::it_variable(tag, token.tag())
}
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()),
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
RawToken::Number(_) => hir::Expression::bare(token.tag()),
RawToken::Size(_, _) => hir::Expression::bare(token.tag()),
RawToken::Bare => hir::Expression::bare(token.tag()),
RawToken::GlobPattern => {
return Err(ShellError::type_error(
"String",
"glob pattern".tagged(token.tag()),
))
}
RawToken::String(tag) => hir::Expression::string(tag, token.tag()),
})
}
pub fn baseline_parse_token_as_path(
token: &Token,
context: &Context,
source: &Text,
) -> Result<hir::Expression, ShellError> {
Ok(match *token.item() {
RawToken::Variable(tag) if tag.slice(source) == "it" => {
hir::Expression::it_variable(tag, token.tag())
}
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()),
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
RawToken::Number(_) => hir::Expression::bare(token.tag()),
RawToken::Size(_, _) => hir::Expression::bare(token.tag()),
RawToken::Bare => {
hir::Expression::file_path(expand_path(token.tag().slice(source), context), token.tag())
}
RawToken::GlobPattern => {
return Err(ShellError::type_error(
"Path",
"glob pattern".tagged(token.tag()),
))
}
RawToken::String(tag) => {
hir::Expression::file_path(expand_path(tag.slice(source), context), token.tag())
}
})
}
pub fn baseline_parse_token_as_pattern(
token: &Token,
context: &Context,
source: &Text,
) -> Result<hir::Expression, ShellError> {
Ok(match *token.item() {
RawToken::Variable(tag) if tag.slice(source) == "it" => {
hir::Expression::it_variable(tag, token.tag())
}
RawToken::ExternalCommand(_) => {
return Err(ShellError::syntax_error(
"Invalid external command".to_string().tagged(token.tag()),
))
}
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
RawToken::Number(_) => hir::Expression::bare(token.tag()),
RawToken::Size(_, _) => hir::Expression::bare(token.tag()),
RawToken::GlobPattern => hir::Expression::pattern(token.tag()),
RawToken::Bare => {
hir::Expression::file_path(expand_path(token.tag().slice(source), context), token.tag())
}
RawToken::String(tag) => {
hir::Expression::file_path(expand_path(tag.slice(source), context), token.tag())
}
})
}
pub fn expand_path(string: &str, context: &Context) -> PathBuf {
let expanded = shellexpand::tilde_with_context(string, || context.shell_manager.homedir());
PathBuf::from(expanded.as_ref())
}

View File

@ -0,0 +1,139 @@
use crate::commands::classified::InternalCommand;
use crate::commands::ClassifiedCommand;
use crate::env::host::BasicHost;
use crate::parser::hir;
use crate::parser::hir::syntax_shape::*;
use crate::parser::hir::TokensIterator;
use crate::parser::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b};
use crate::parser::TokenNode;
use crate::{Span, SpannedItem, Tag, Tagged, Text};
use pretty_assertions::assert_eq;
use std::fmt::Debug;
#[test]
fn test_parse_string() {
parse_tokens(StringShape, vec![b::string("hello")], |tokens| {
hir::Expression::string(inner_string_span(tokens[0].span()), tokens[0].span())
});
}
#[test]
fn test_parse_path() {
parse_tokens(
VariablePathShape,
vec![b::var("it"), b::op("."), b::bare("cpu")],
|tokens| {
let (outer_var, inner_var) = tokens[0].expect_var();
let bare = tokens[2].expect_bare();
hir::Expression::path(
hir::Expression::it_variable(inner_var, outer_var),
vec!["cpu".spanned(bare)],
outer_var.until(bare),
)
},
);
parse_tokens(
VariablePathShape,
vec![
b::var("cpu"),
b::op("."),
b::bare("amount"),
b::op("."),
b::string("max ghz"),
],
|tokens| {
let (outer_var, inner_var) = tokens[0].expect_var();
let amount = tokens[2].expect_bare();
let (outer_max_ghz, _) = tokens[4].expect_string();
hir::Expression::path(
hir::Expression::variable(inner_var, outer_var),
vec!["amount".spanned(amount), "max ghz".spanned(outer_max_ghz)],
outer_var.until(outer_max_ghz),
)
},
);
}
#[test]
fn test_parse_command() {
parse_tokens(
ClassifiedCommandShape,
vec![b::bare("ls"), b::sp(), b::pattern("*.txt")],
|tokens| {
let bare = tokens[0].expect_bare();
let pat = tokens[2].span();
ClassifiedCommand::Internal(InternalCommand::new(
"ls".to_string(),
Tag {
span: bare,
anchor: None,
},
hir::Call {
head: Box::new(hir::RawExpression::Command(bare).spanned(bare)),
positional: Some(vec![hir::Expression::pattern(pat)]),
named: None,
},
))
// hir::Expression::path(
// hir::Expression::variable(inner_var, outer_var),
// vec!["cpu".tagged(bare)],
// outer_var.until(bare),
// )
},
);
parse_tokens(
VariablePathShape,
vec![
b::var("cpu"),
b::op("."),
b::bare("amount"),
b::op("."),
b::string("max ghz"),
],
|tokens| {
let (outer_var, inner_var) = tokens[0].expect_var();
let amount = tokens[2].expect_bare();
let (outer_max_ghz, _) = tokens[4].expect_string();
hir::Expression::path(
hir::Expression::variable(inner_var, outer_var),
vec!["amount".spanned(amount), "max ghz".spanned(outer_max_ghz)],
outer_var.until(outer_max_ghz),
)
},
);
}
fn parse_tokens<T: Eq + Debug>(
shape: impl ExpandSyntax<Output = T>,
tokens: Vec<CurriedToken>,
expected: impl FnOnce(Tagged<&[TokenNode]>) -> T,
) {
let tokens = b::token_list(tokens);
let (tokens, source) = b::build(tokens);
ExpandContext::with_empty(&Text::from(source), |context| {
let tokens = tokens.expect_list();
let mut iterator = TokensIterator::all(tokens.item, *context.span());
let expr = expand_syntax(&shape, &mut iterator, &context);
let expr = match expr {
Ok(expr) => expr,
Err(err) => {
crate::cli::print_err(err, &BasicHost, context.source().clone());
panic!("Parse failed");
}
};
assert_eq!(expr, expected(tokens));
})
}
fn inner_string_span(span: Span) -> Span {
Span::new(span.start() + 1, span.end() - 1)
}

View File

@ -1,459 +0,0 @@
use crate::context::Context;
use crate::errors::ShellError;
use crate::parser::{
hir,
hir::{
baseline_parse_single_token, baseline_parse_token_as_number, baseline_parse_token_as_path,
baseline_parse_token_as_pattern, baseline_parse_token_as_string,
},
DelimitedNode, Delimiter, PathNode, RawToken, TokenNode,
};
use crate::{Tag, Tagged, TaggedItem, Text};
use derive_new::new;
use log::trace;
use serde::{Deserialize, Serialize};
pub fn baseline_parse_tokens(
token_nodes: &mut TokensIterator<'_>,
context: &Context,
source: &Text,
syntax_type: SyntaxShape,
) -> Result<Vec<hir::Expression>, ShellError> {
let mut exprs: Vec<hir::Expression> = vec![];
loop {
if token_nodes.at_end() {
break;
}
let expr = baseline_parse_next_expr(token_nodes, context, source, syntax_type)?;
exprs.push(expr);
}
Ok(exprs)
}
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
pub enum SyntaxShape {
Any,
List,
Literal,
String,
Member,
Variable,
Number,
Path,
Pattern,
Binary,
Block,
Boolean,
}
impl std::fmt::Display for SyntaxShape {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
SyntaxShape::Any => write!(f, "Any"),
SyntaxShape::List => write!(f, "List"),
SyntaxShape::Literal => write!(f, "Literal"),
SyntaxShape::String => write!(f, "String"),
SyntaxShape::Member => write!(f, "Member"),
SyntaxShape::Variable => write!(f, "Variable"),
SyntaxShape::Number => write!(f, "Number"),
SyntaxShape::Path => write!(f, "Path"),
SyntaxShape::Pattern => write!(f, "Pattern"),
SyntaxShape::Binary => write!(f, "Binary"),
SyntaxShape::Block => write!(f, "Block"),
SyntaxShape::Boolean => write!(f, "Boolean"),
}
}
}
pub fn baseline_parse_next_expr(
tokens: &mut TokensIterator,
context: &Context,
source: &Text,
syntax_type: SyntaxShape,
) -> Result<hir::Expression, ShellError> {
let next = tokens
.next()
.ok_or_else(|| ShellError::string("Expected token, found none"))?;
trace!(target: "nu::parser::parse_one_expr", "syntax_type={:?}, token={:?}", syntax_type, next);
match (syntax_type, next) {
(SyntaxShape::Path, TokenNode::Token(token)) => {
return baseline_parse_token_as_path(token, context, source)
}
(SyntaxShape::Path, token) => {
return Err(ShellError::type_error(
"Path",
token.type_name().tagged(token.tag()),
))
}
(SyntaxShape::Pattern, TokenNode::Token(token)) => {
return baseline_parse_token_as_pattern(token, context, source)
}
(SyntaxShape::Pattern, token) => {
return Err(ShellError::type_error(
"Path",
token.type_name().tagged(token.tag()),
))
}
(SyntaxShape::String, TokenNode::Token(token)) => {
return baseline_parse_token_as_string(token, source);
}
(SyntaxShape::String, token) => {
return Err(ShellError::type_error(
"String",
token.type_name().tagged(token.tag()),
))
}
(SyntaxShape::Number, TokenNode::Token(token)) => {
return Ok(baseline_parse_token_as_number(token, source)?);
}
(SyntaxShape::Number, token) => {
return Err(ShellError::type_error(
"Numeric",
token.type_name().tagged(token.tag()),
))
}
// TODO: More legit member processing
(SyntaxShape::Member, TokenNode::Token(token)) => {
return baseline_parse_token_as_string(token, source);
}
(SyntaxShape::Member, token) => {
return Err(ShellError::type_error(
"member",
token.type_name().tagged(token.tag()),
))
}
(SyntaxShape::Any, _) => {}
(SyntaxShape::List, _) => {}
(SyntaxShape::Literal, _) => {}
(SyntaxShape::Variable, _) => {}
(SyntaxShape::Binary, _) => {}
(SyntaxShape::Block, _) => {}
(SyntaxShape::Boolean, _) => {}
};
let first = baseline_parse_semantic_token(next, context, source)?;
let possible_op = tokens.peek();
let op = match possible_op {
Some(TokenNode::Operator(op)) => op.clone(),
_ => return Ok(first),
};
tokens.next();
let second = match tokens.next() {
None => {
return Err(ShellError::labeled_error(
"Expected something after an operator",
"operator",
op.tag(),
))
}
Some(token) => baseline_parse_semantic_token(token, context, source)?,
};
// We definitely have a binary expression here -- let's see if we should coerce it into a block
match syntax_type {
SyntaxShape::Any => {
let tag = first.tag().until(second.tag());
let binary = hir::Binary::new(first, op, second);
let binary = hir::RawExpression::Binary(Box::new(binary));
let binary = binary.tagged(tag);
Ok(binary)
}
SyntaxShape::Block => {
let tag = first.tag().until(second.tag());
let path: Tagged<hir::RawExpression> = match first {
Tagged {
item: hir::RawExpression::Literal(hir::Literal::Bare),
tag,
} => {
let string = tag.slice(source).to_string().tagged(tag);
let path = hir::Path::new(
// TODO: Deal with synthetic nodes that have no representation at all in source
hir::RawExpression::Variable(hir::Variable::It(Tag::unknown()))
.tagged(Tag::unknown()),
vec![string],
);
let path = hir::RawExpression::Path(Box::new(path));
path.tagged(first.tag())
}
Tagged {
item: hir::RawExpression::Literal(hir::Literal::String(inner)),
tag,
} => {
let string = inner.slice(source).to_string().tagged(tag);
let path = hir::Path::new(
// TODO: Deal with synthetic nodes that have no representation at all in source
hir::RawExpression::Variable(hir::Variable::It(Tag::unknown()))
.tagged_unknown(),
vec![string],
);
let path = hir::RawExpression::Path(Box::new(path));
path.tagged(first.tag())
}
Tagged {
item: hir::RawExpression::Variable(..),
..
} => first,
Tagged { tag, item } => {
return Err(ShellError::labeled_error(
"The first part of an un-braced block must be a column name",
item.type_name(),
tag,
))
}
};
let binary = hir::Binary::new(path, op, second);
let binary = hir::RawExpression::Binary(Box::new(binary));
let binary = binary.tagged(tag);
let block = hir::RawExpression::Block(vec![binary]);
let block = block.tagged(tag);
Ok(block)
}
other => Err(ShellError::unimplemented(format!(
"coerce hint {:?}",
other
))),
}
}
pub fn baseline_parse_semantic_token(
token: &TokenNode,
context: &Context,
source: &Text,
) -> Result<hir::Expression, ShellError> {
match token {
TokenNode::Token(token) => baseline_parse_single_token(token, source),
TokenNode::Call(_call) => unimplemented!(),
TokenNode::Delimited(delimited) => baseline_parse_delimited(delimited, context, source),
TokenNode::Pipeline(_pipeline) => unimplemented!(),
TokenNode::Operator(op) => Err(ShellError::syntax_error(
"Unexpected operator".tagged(op.tag),
)),
TokenNode::Flag(flag) => Err(ShellError::syntax_error("Unexpected flag".tagged(flag.tag))),
TokenNode::Member(tag) => Err(ShellError::syntax_error(
"BUG: Top-level member".tagged(*tag),
)),
TokenNode::Whitespace(tag) => Err(ShellError::syntax_error(
"BUG: Whitespace found during parse".tagged(*tag),
)),
TokenNode::Error(error) => Err(*error.item.clone()),
TokenNode::Path(path) => baseline_parse_path(path, context, source),
}
}
pub fn baseline_parse_delimited(
token: &Tagged<DelimitedNode>,
context: &Context,
source: &Text,
) -> Result<hir::Expression, ShellError> {
match token.delimiter() {
Delimiter::Brace => {
let children = token.children();
let exprs = baseline_parse_tokens(
&mut TokensIterator::new(children),
context,
source,
SyntaxShape::Any,
)?;
let expr = hir::RawExpression::Block(exprs);
Ok(expr.tagged(token.tag()))
}
Delimiter::Paren => unimplemented!(),
Delimiter::Square => {
let children = token.children();
let exprs = baseline_parse_tokens(
&mut TokensIterator::new(children),
context,
source,
SyntaxShape::Any,
)?;
let expr = hir::RawExpression::List(exprs);
Ok(expr.tagged(token.tag()))
}
}
}
pub fn baseline_parse_path(
token: &Tagged<PathNode>,
context: &Context,
source: &Text,
) -> Result<hir::Expression, ShellError> {
let head = baseline_parse_semantic_token(token.head(), context, source)?;
let mut tail = vec![];
for part in token.tail() {
let string = match part {
TokenNode::Token(token) => match token.item() {
RawToken::Bare => token.tag().slice(source),
RawToken::String(tag) => tag.slice(source),
RawToken::Number(_)
| RawToken::Size(..)
| RawToken::Variable(_)
| RawToken::ExternalCommand(_)
| RawToken::GlobPattern
| RawToken::ExternalWord => {
return Err(ShellError::type_error(
"String",
token.type_name().tagged(part.tag()),
))
}
},
TokenNode::Member(tag) => tag.slice(source),
// TODO: Make this impossible
other => {
return Err(ShellError::syntax_error(
format!("{} in path", other.type_name()).tagged(other.tag()),
))
}
}
.to_string();
tail.push(string.tagged(part.tag()));
}
Ok(hir::path(head, tail).tagged(token.tag()).into())
}
#[derive(Debug, new)]
pub struct TokensIterator<'a> {
tokens: &'a [TokenNode],
#[new(default)]
index: usize,
#[new(default)]
seen: indexmap::IndexSet<usize>,
}
impl TokensIterator<'_> {
pub fn remove(&mut self, position: usize) {
self.seen.insert(position);
}
pub fn len(&self) -> usize {
self.tokens.len()
}
pub fn at_end(&self) -> bool {
for index in self.index..self.tokens.len() {
if !self.seen.contains(&index) {
return false;
}
}
true
}
pub fn advance(&mut self) {
self.seen.insert(self.index);
self.index += 1;
}
pub fn extract<T>(&mut self, f: impl Fn(&TokenNode) -> Option<T>) -> Option<(usize, T)> {
for (i, item) in self.tokens.iter().enumerate() {
if self.seen.contains(&i) {
continue;
}
match f(item) {
None => {
continue;
}
Some(value) => {
self.seen.insert(i);
return Some((i, value));
}
}
}
None
}
pub fn move_to(&mut self, pos: usize) {
self.index = pos;
}
pub fn restart(&mut self) {
self.index = 0;
}
pub fn clone(&self) -> TokensIterator {
TokensIterator {
tokens: self.tokens,
index: self.index,
seen: self.seen.clone(),
}
}
pub fn peek(&self) -> Option<&TokenNode> {
let mut tokens = self.clone();
tokens.next()
}
pub fn debug_remaining(&self) -> Vec<TokenNode> {
let mut tokens = self.clone();
tokens.restart();
tokens.cloned().collect()
}
}
impl<'a> Iterator for TokensIterator<'a> {
type Item = &'a TokenNode;
fn next(&mut self) -> Option<&'a TokenNode> {
loop {
if self.index >= self.tokens.len() {
return None;
}
if self.seen.contains(&self.index) {
self.advance();
continue;
}
if self.index >= self.tokens.len() {
return None;
}
match &self.tokens[self.index] {
TokenNode::Whitespace(_) => {
self.advance();
}
other => {
self.advance();
return Some(other);
}
}
}
}
}

View File

@ -1,6 +1,6 @@
use crate::parser::{hir::Expression, Operator}; use crate::parser::{hir::Expression, Operator};
use crate::prelude::*; use crate::prelude::*;
use crate::Tagged;
use derive_new::new; use derive_new::new;
use getset::Getters; use getset::Getters;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -12,10 +12,16 @@ use std::fmt;
#[get = "pub(crate)"] #[get = "pub(crate)"]
pub struct Binary { pub struct Binary {
left: Expression, left: Expression,
op: Tagged<Operator>, op: Spanned<Operator>,
right: Expression, right: Expression,
} }
impl fmt::Display for Binary {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "({} {} {})", self.op.as_str(), self.left, self.right)
}
}
impl ToDebug for Binary { impl ToDebug for Binary {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
write!(f, "{}", self.left.debug(source))?; write!(f, "{}", self.left.debug(source))?;

View File

@ -0,0 +1,159 @@
use crate::errors::ShellError;
use crate::parser::{
hir::syntax_shape::{
color_syntax, expand_atom, AtomicToken, ColorSyntax, ExpandContext, ExpansionRule,
MaybeSpaceShape,
},
FlatShape, TokenNode, TokensIterator,
};
use crate::{Span, Spanned, Text};
pub fn expand_external_tokens(
token_nodes: &mut TokensIterator<'_>,
source: &Text,
) -> Result<Vec<Spanned<String>>, ShellError> {
let mut out: Vec<Spanned<String>> = vec![];
loop {
if let Some(span) = expand_next_expression(token_nodes)? {
out.push(span.spanned_string(source));
} else {
break;
}
}
Ok(out)
}
#[derive(Debug, Copy, Clone)]
pub struct ExternalTokensShape;
impl ColorSyntax for ExternalTokensShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Self::Info {
loop {
// Allow a space
color_syntax(&MaybeSpaceShape, token_nodes, context, shapes);
// Process an external expression. External expressions are mostly words, with a
// few exceptions (like $variables and path expansion rules)
match color_syntax(&ExternalExpression, token_nodes, context, shapes).1 {
ExternalExpressionResult::Eof => break,
ExternalExpressionResult::Processed => continue,
}
}
}
}
pub fn expand_next_expression(
token_nodes: &mut TokensIterator<'_>,
) -> Result<Option<Span>, ShellError> {
let first = token_nodes.next_non_ws();
let first = match first {
None => return Ok(None),
Some(v) => v,
};
let first = triage_external_head(first)?;
let mut last = first;
loop {
let continuation = triage_continuation(token_nodes)?;
if let Some(continuation) = continuation {
last = continuation;
} else {
break;
}
}
Ok(Some(first.until(last)))
}
fn triage_external_head(node: &TokenNode) -> Result<Span, ShellError> {
Ok(match node {
TokenNode::Token(token) => token.span,
TokenNode::Call(_call) => unimplemented!("TODO: OMG"),
TokenNode::Nodes(_nodes) => unimplemented!("TODO: OMG"),
TokenNode::Delimited(_delimited) => unimplemented!("TODO: OMG"),
TokenNode::Pipeline(_pipeline) => unimplemented!("TODO: OMG"),
TokenNode::Flag(flag) => flag.span,
TokenNode::Whitespace(_whitespace) => {
unreachable!("This function should be called after next_non_ws()")
}
TokenNode::Error(_error) => unimplemented!("TODO: OMG"),
})
}
fn triage_continuation<'a, 'b>(
nodes: &'a mut TokensIterator<'b>,
) -> Result<Option<Span>, ShellError> {
let mut peeked = nodes.peek_any();
let node = match peeked.node {
None => return Ok(None),
Some(node) => node,
};
match &node {
node if node.is_whitespace() => return Ok(None),
TokenNode::Token(..) | TokenNode::Flag(..) => {}
TokenNode::Call(..) => unimplemented!("call"),
TokenNode::Nodes(..) => unimplemented!("nodes"),
TokenNode::Delimited(..) => unimplemented!("delimited"),
TokenNode::Pipeline(..) => unimplemented!("pipeline"),
TokenNode::Whitespace(..) => unimplemented!("whitespace"),
TokenNode::Error(..) => unimplemented!("error"),
}
peeked.commit();
Ok(Some(node.span()))
}
#[must_use]
enum ExternalExpressionResult {
Eof,
Processed,
}
#[derive(Debug, Copy, Clone)]
struct ExternalExpression;
impl ColorSyntax for ExternalExpression {
type Info = ExternalExpressionResult;
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> ExternalExpressionResult {
let atom = match expand_atom(
token_nodes,
"external word",
context,
ExpansionRule::permissive(),
) {
Err(_) => unreachable!("TODO: separate infallible expand_atom"),
Ok(Spanned {
item: AtomicToken::Eof { .. },
..
}) => return ExternalExpressionResult::Eof,
Ok(atom) => atom,
};
atom.color_tokens(shapes);
return ExternalExpressionResult::Processed;
}
}

View File

@ -9,7 +9,7 @@ use std::fmt;
)] )]
#[get = "pub(crate)"] #[get = "pub(crate)"]
pub struct ExternalCommand { pub struct ExternalCommand {
name: Tag, pub(crate) name: Span,
} }
impl ToDebug for ExternalCommand { impl ToDebug for ExternalCommand {

View File

@ -43,9 +43,13 @@ impl NamedArguments {
match switch { match switch {
None => self.named.insert(name.into(), NamedValue::AbsentSwitch), None => self.named.insert(name.into(), NamedValue::AbsentSwitch),
Some(flag) => self Some(flag) => self.named.insert(
.named name,
.insert(name, NamedValue::PresentSwitch(*flag.name())), NamedValue::PresentSwitch(Tag {
span: *flag.name(),
anchor: None,
}),
),
}; };
} }

View File

@ -1,18 +1,47 @@
use crate::parser::hir::Expression; use crate::parser::hir::Expression;
use crate::prelude::*; use crate::prelude::*;
use crate::Tagged;
use derive_new::new; use derive_new::new;
use getset::Getters; use getset::{Getters, MutGetters};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt; use std::fmt;
#[derive( #[derive(
Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new, Debug,
Clone,
Eq,
PartialEq,
Ord,
PartialOrd,
Hash,
Getters,
MutGetters,
Serialize,
Deserialize,
new,
)] )]
#[get = "pub(crate)"] #[get = "pub(crate)"]
pub struct Path { pub struct Path {
head: Expression, head: Expression,
tail: Vec<Tagged<String>>, #[get_mut = "pub(crate)"]
tail: Vec<Spanned<String>>,
}
impl fmt::Display for Path {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.head)?;
for entry in &self.tail {
write!(f, ".{}", entry.item)?;
}
Ok(())
}
}
impl Path {
pub(crate) fn parts(self) -> (Expression, Vec<Spanned<String>>) {
(self.head, self.tail)
}
} }
impl ToDebug for Path { impl ToDebug for Path {
@ -20,7 +49,7 @@ impl ToDebug for Path {
write!(f, "{}", self.head.debug(source))?; write!(f, "{}", self.head.debug(source))?;
for part in &self.tail { for part in &self.tail {
write!(f, ".{}", part.item())?; write!(f, ".{}", part.item)?;
} }
Ok(()) Ok(())

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,330 @@
use crate::errors::ShellError;
use crate::parser::{
hir,
hir::syntax_shape::{
color_fallible_syntax, color_syntax_with, continue_expression, expand_expr, expand_syntax,
DelimitedShape, ExpandContext, ExpandExpression, ExpressionContinuationShape,
ExpressionListShape, FallibleColorSyntax, FlatShape, MemberShape, PathTailShape,
VariablePathShape,
},
hir::tokens_iterator::TokensIterator,
parse::token_tree::Delimiter,
RawToken, TokenNode,
};
use crate::{Span, Spanned, SpannedItem};
#[derive(Debug, Copy, Clone)]
pub struct AnyBlockShape;
impl FallibleColorSyntax for AnyBlockShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let block = token_nodes.peek_non_ws().not_eof("block");
let block = match block {
Err(_) => return Ok(()),
Ok(block) => block,
};
// is it just a block?
let block = block.node.as_block();
match block {
// If so, color it as a block
Some((children, spans)) => {
let mut token_nodes = TokensIterator::new(children.item, context.span, false);
color_syntax_with(
&DelimitedShape,
&(Delimiter::Brace, spans.0, spans.1),
&mut token_nodes,
context,
shapes,
);
return Ok(());
}
_ => {}
}
// Otherwise, look for a shorthand block. If none found, fail
color_fallible_syntax(&ShorthandBlock, token_nodes, context, shapes)
}
}
impl ExpandExpression for AnyBlockShape {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
let block = token_nodes.peek_non_ws().not_eof("block")?;
// is it just a block?
let block = block.node.as_block();
match block {
Some((block, _tags)) => {
let mut iterator = TokensIterator::new(&block.item, context.span, false);
let exprs = expand_syntax(&ExpressionListShape, &mut iterator, context)?;
return Ok(hir::RawExpression::Block(exprs).spanned(block.span));
}
_ => {}
}
expand_syntax(&ShorthandBlock, token_nodes, context)
}
}
#[derive(Debug, Copy, Clone)]
pub struct ShorthandBlock;
impl FallibleColorSyntax for ShorthandBlock {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
// Try to find a shorthand head. If none found, fail
color_fallible_syntax(&ShorthandPath, token_nodes, context, shapes)?;
loop {
// Check to see whether there's any continuation after the head expression
let result =
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes);
match result {
// if no continuation was found, we're done
Err(_) => break,
// if a continuation was found, look for another one
Ok(_) => continue,
}
}
Ok(())
}
}
impl ExpandExpression for ShorthandBlock {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
let path = expand_expr(&ShorthandPath, token_nodes, context)?;
let start = path.span;
let expr = continue_expression(path, token_nodes, context)?;
let end = expr.span;
let block = hir::RawExpression::Block(vec![expr]).spanned(start.until(end));
Ok(block)
}
}
/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block
#[derive(Debug, Copy, Clone)]
pub struct ShorthandPath;
impl FallibleColorSyntax for ShorthandPath {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
token_nodes.atomic(|token_nodes| {
let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context, shapes);
match variable {
Ok(_) => {
// if it's a variable path, that's the head part
return Ok(());
}
Err(_) => {
// otherwise, we'll try to find a member path
}
}
// look for a member (`<member>` -> `$it.<member>`)
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
// like any other path.
let tail = color_fallible_syntax(&PathTailShape, token_nodes, context, shapes);
match tail {
Ok(_) => {}
Err(_) => {
// It's ok if there's no path tail; a single member is sufficient
}
}
Ok(())
})
}
}
impl ExpandExpression for ShorthandPath {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
// if it's a variable path, that's the head part
let path = expand_expr(&VariablePathShape, token_nodes, context);
match path {
Ok(path) => return Ok(path),
Err(_) => {}
}
// Synthesize the head of the shorthand path (`<member>` -> `$it.<member>`)
let mut head = expand_expr(&ShorthandHeadShape, token_nodes, context)?;
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
// like any other path.
let tail = expand_syntax(&PathTailShape, token_nodes, context);
match tail {
Err(_) => return Ok(head),
Ok((tail, _)) => {
// For each member that `PathTailShape` expanded, join it onto the existing expression
// to form a new path
for member in tail {
head = hir::Expression::dot_member(head, member);
}
Ok(head)
}
}
}
}
/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block
#[derive(Debug, Copy, Clone)]
pub struct ShorthandHeadShape;
impl FallibleColorSyntax for ShorthandHeadShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
// A shorthand path must not be at EOF
let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?;
match peeked.node {
// If the head of a shorthand path is a bare token, it expands to `$it.bare`
TokenNode::Token(Spanned {
item: RawToken::Bare,
span,
}) => {
peeked.commit();
shapes.push(FlatShape::BareMember.spanned(*span));
Ok(())
}
// If the head of a shorthand path is a string, it expands to `$it."some string"`
TokenNode::Token(Spanned {
item: RawToken::String(_),
span: outer,
}) => {
peeked.commit();
shapes.push(FlatShape::StringMember.spanned(*outer));
Ok(())
}
other => Err(ShellError::type_error(
"shorthand head",
other.tagged_type_name(),
)),
}
}
}
impl ExpandExpression for ShorthandHeadShape {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
// A shorthand path must not be at EOF
let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?;
match peeked.node {
// If the head of a shorthand path is a bare token, it expands to `$it.bare`
TokenNode::Token(Spanned {
item: RawToken::Bare,
span,
}) => {
// Commit the peeked token
peeked.commit();
// Synthesize an `$it` expression
let it = synthetic_it();
// Make a path out of `$it` and the bare token as a member
Ok(hir::Expression::path(
it,
vec![span.spanned_string(context.source)],
*span,
))
}
// If the head of a shorthand path is a string, it expands to `$it."some string"`
TokenNode::Token(Spanned {
item: RawToken::String(inner),
span: outer,
}) => {
// Commit the peeked token
peeked.commit();
// Synthesize an `$it` expression
let it = synthetic_it();
// Make a path out of `$it` and the bare token as a member
Ok(hir::Expression::path(
it,
vec![inner.string(context.source).spanned(*outer)],
*outer,
))
}
// Any other token is not a valid bare head
other => {
return Err(ShellError::type_error(
"shorthand path",
other.tagged_type_name(),
))
}
}
}
}
fn synthetic_it() -> hir::Expression {
hir::Expression::it_variable(Span::unknown(), Span::unknown())
}

View File

@ -0,0 +1,308 @@
pub(crate) mod atom;
pub(crate) mod delimited;
pub(crate) mod file_path;
pub(crate) mod list;
pub(crate) mod number;
pub(crate) mod pattern;
pub(crate) mod string;
pub(crate) mod unit;
pub(crate) mod variable_path;
use crate::parser::hir::syntax_shape::{
color_delimited_square, color_fallible_syntax, color_fallible_syntax_with, expand_atom,
expand_delimited_square, expand_expr, expand_syntax, AtomicToken, BareShape, ColorableDotShape,
DotShape, ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, ExpressionContinuation,
ExpressionContinuationShape, FallibleColorSyntax, FlatShape,
};
use crate::parser::{
hir,
hir::{Expression, TokensIterator},
};
use crate::prelude::*;
use std::path::PathBuf;
#[derive(Debug, Copy, Clone)]
pub struct AnyExpressionShape;
impl ExpandExpression for AnyExpressionShape {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
// Look for an expression at the cursor
let head = expand_expr(&AnyExpressionStartShape, token_nodes, context)?;
continue_expression(head, token_nodes, context)
}
}
impl FallibleColorSyntax for AnyExpressionShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
// Look for an expression at the cursor
color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context, shapes)?;
match continue_coloring_expression(token_nodes, context, shapes) {
Err(_) => {
// it's fine for there to be no continuation
}
Ok(()) => {}
}
Ok(())
}
}
pub(crate) fn continue_expression(
mut head: hir::Expression,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
loop {
// Check to see whether there's any continuation after the head expression
let continuation = expand_syntax(&ExpressionContinuationShape, token_nodes, context);
match continuation {
// If there's no continuation, return the head
Err(_) => return Ok(head),
// Otherwise, form a new expression by combining the head with the continuation
Ok(continuation) => match continuation {
// If the continuation is a `.member`, form a path with the new member
ExpressionContinuation::DotSuffix(_dot, member) => {
head = Expression::dot_member(head, member);
}
// Otherwise, if the continuation is an infix suffix, form an infix expression
ExpressionContinuation::InfixSuffix(op, expr) => {
head = Expression::infix(head, op, expr);
}
},
}
}
}
pub(crate) fn continue_coloring_expression(
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
// if there's not even one expression continuation, fail
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes)?;
loop {
// Check to see whether there's any continuation after the head expression
let result =
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes);
match result {
Err(_) => {
// We already saw one continuation, so just return
return Ok(());
}
Ok(_) => {}
}
}
}
#[derive(Debug, Copy, Clone)]
pub struct AnyExpressionStartShape;
impl ExpandExpression for AnyExpressionStartShape {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
let atom = expand_atom(token_nodes, "expression", context, ExpansionRule::new())?;
match atom.item {
AtomicToken::Size { number, unit } => {
return Ok(hir::Expression::size(
number.to_number(context.source),
unit.item,
Tag {
span: atom.span,
anchor: None,
},
))
}
AtomicToken::SquareDelimited { nodes, .. } => {
expand_delimited_square(&nodes, atom.span.into(), context)
}
AtomicToken::Word { .. } | AtomicToken::Dot { .. } => {
let end = expand_syntax(&BareTailShape, token_nodes, context)?;
Ok(hir::Expression::bare(atom.span.until_option(end)))
}
other => return other.spanned(atom.span).into_hir(context, "expression"),
}
}
}
impl FallibleColorSyntax for AnyExpressionStartShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let atom = token_nodes.spanned(|token_nodes| {
expand_atom(
token_nodes,
"expression",
context,
ExpansionRule::permissive(),
)
});
let atom = match atom {
Spanned {
item: Err(_err),
span,
} => {
shapes.push(FlatShape::Error.spanned(span));
return Ok(());
}
Spanned {
item: Ok(value), ..
} => value,
};
match atom.item {
AtomicToken::Size { number, unit } => shapes.push(
FlatShape::Size {
number: number.span.into(),
unit: unit.span.into(),
}
.spanned(atom.span),
),
AtomicToken::SquareDelimited { nodes, spans } => {
color_delimited_square(spans, &nodes, atom.span.into(), context, shapes)
}
AtomicToken::Word { .. } | AtomicToken::Dot { .. } => {
shapes.push(FlatShape::Word.spanned(atom.span));
}
_ => atom.color_tokens(shapes),
}
Ok(())
}
}
#[derive(Debug, Copy, Clone)]
pub struct BareTailShape;
impl FallibleColorSyntax for BareTailShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let len = shapes.len();
loop {
let word = color_fallible_syntax_with(
&BareShape,
&FlatShape::Word,
token_nodes,
context,
shapes,
);
match word {
// if a word was found, continue
Ok(_) => continue,
// if a word wasn't found, try to find a dot
Err(_) => {}
}
// try to find a dot
let dot = color_fallible_syntax_with(
&ColorableDotShape,
&FlatShape::Word,
token_nodes,
context,
shapes,
);
match dot {
// if a dot was found, try to find another word
Ok(_) => continue,
// otherwise, we're done
Err(_) => break,
}
}
if shapes.len() > len {
Ok(())
} else {
Err(ShellError::syntax_error(
"No tokens matched BareTailShape".tagged_unknown(),
))
}
}
}
impl ExpandSyntax for BareTailShape {
type Output = Option<Span>;
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Option<Span>, ShellError> {
let mut end: Option<Span> = None;
loop {
match expand_syntax(&BareShape, token_nodes, context) {
Ok(bare) => {
end = Some(bare.span);
continue;
}
Err(_) => match expand_syntax(&DotShape, token_nodes, context) {
Ok(dot) => {
end = Some(dot);
continue;
}
Err(_) => break,
},
}
}
Ok(end)
}
}
pub fn expand_file_path(string: &str, context: &ExpandContext) -> PathBuf {
let expanded = shellexpand::tilde_with_context(string, || context.homedir());
PathBuf::from(expanded.as_ref())
}

View File

@ -0,0 +1,580 @@
use crate::parser::hir::syntax_shape::{
expand_syntax, expression::expand_file_path, parse_single_node, BarePathShape,
BarePatternShape, ExpandContext, UnitShape,
};
use crate::parser::{
hir,
hir::{Expression, RawNumber, TokensIterator},
parse::flag::{Flag, FlagKind},
DelimitedNode, Delimiter, FlatShape, RawToken, TokenNode, Unit,
};
use crate::prelude::*;
use crate::{Span, Spanned};
#[derive(Debug)]
pub enum AtomicToken<'tokens> {
Eof {
span: Span,
},
Error {
error: Spanned<ShellError>,
},
Number {
number: RawNumber,
},
Size {
number: Spanned<RawNumber>,
unit: Spanned<Unit>,
},
String {
body: Span,
},
ItVariable {
name: Span,
},
Variable {
name: Span,
},
ExternalCommand {
command: Span,
},
ExternalWord {
text: Span,
},
GlobPattern {
pattern: Span,
},
FilePath {
path: Span,
},
Word {
text: Span,
},
SquareDelimited {
spans: (Span, Span),
nodes: &'tokens Vec<TokenNode>,
},
ParenDelimited {
span: (Span, Span),
nodes: &'tokens Vec<TokenNode>,
},
BraceDelimited {
spans: (Span, Span),
nodes: &'tokens Vec<TokenNode>,
},
Pipeline {
pipe: Option<Span>,
elements: Spanned<&'tokens Vec<TokenNode>>,
},
ShorthandFlag {
name: Span,
},
LonghandFlag {
name: Span,
},
Dot {
text: Span,
},
Operator {
text: Span,
},
Whitespace {
text: Span,
},
}
pub type SpannedAtomicToken<'tokens> = Spanned<AtomicToken<'tokens>>;
impl<'tokens> SpannedAtomicToken<'tokens> {
pub fn into_hir(
&self,
context: &ExpandContext,
expected: &'static str,
) -> Result<hir::Expression, ShellError> {
Ok(match &self.item {
AtomicToken::Eof { .. } => {
return Err(ShellError::type_error(
expected,
"eof atomic token".tagged(self.span),
))
}
AtomicToken::Error { .. } => {
return Err(ShellError::type_error(
expected,
"eof atomic token".tagged(self.span),
))
}
AtomicToken::Operator { .. } => {
return Err(ShellError::type_error(
expected,
"operator".tagged(self.span),
))
}
AtomicToken::ShorthandFlag { .. } => {
return Err(ShellError::type_error(
expected,
"shorthand flag".tagged(self.span),
))
}
AtomicToken::LonghandFlag { .. } => {
return Err(ShellError::type_error(expected, "flag".tagged(self.span)))
}
AtomicToken::Whitespace { .. } => {
return Err(ShellError::unimplemented("whitespace in AtomicToken"))
}
AtomicToken::Dot { .. } => {
return Err(ShellError::type_error(expected, "dot".tagged(self.span)))
}
AtomicToken::Number { number } => {
Expression::number(number.to_number(context.source), self.span)
}
AtomicToken::FilePath { path } => Expression::file_path(
expand_file_path(path.slice(context.source), context),
self.span,
),
AtomicToken::Size { number, unit } => {
Expression::size(number.to_number(context.source), **unit, self.span)
}
AtomicToken::String { body } => Expression::string(*body, self.span),
AtomicToken::ItVariable { name } => Expression::it_variable(*name, self.span),
AtomicToken::Variable { name } => Expression::variable(*name, self.span),
AtomicToken::ExternalCommand { command } => {
Expression::external_command(*command, self.span)
}
AtomicToken::ExternalWord { text } => Expression::string(*text, self.span),
AtomicToken::GlobPattern { pattern } => Expression::pattern(*pattern),
AtomicToken::Word { text } => Expression::string(*text, *text),
AtomicToken::SquareDelimited { .. } => unimplemented!("into_hir"),
AtomicToken::ParenDelimited { .. } => unimplemented!("into_hir"),
AtomicToken::BraceDelimited { .. } => unimplemented!("into_hir"),
AtomicToken::Pipeline { .. } => unimplemented!("into_hir"),
})
}
pub fn spanned_type_name(&self) -> Spanned<&'static str> {
match &self.item {
AtomicToken::Eof { .. } => "eof",
AtomicToken::Error { .. } => "error",
AtomicToken::Operator { .. } => "operator",
AtomicToken::ShorthandFlag { .. } => "shorthand flag",
AtomicToken::LonghandFlag { .. } => "flag",
AtomicToken::Whitespace { .. } => "whitespace",
AtomicToken::Dot { .. } => "dot",
AtomicToken::Number { .. } => "number",
AtomicToken::FilePath { .. } => "file path",
AtomicToken::Size { .. } => "size",
AtomicToken::String { .. } => "string",
AtomicToken::ItVariable { .. } => "$it",
AtomicToken::Variable { .. } => "variable",
AtomicToken::ExternalCommand { .. } => "external command",
AtomicToken::ExternalWord { .. } => "external word",
AtomicToken::GlobPattern { .. } => "file pattern",
AtomicToken::Word { .. } => "word",
AtomicToken::SquareDelimited { .. } => "array literal",
AtomicToken::ParenDelimited { .. } => "parenthesized expression",
AtomicToken::BraceDelimited { .. } => "block",
AtomicToken::Pipeline { .. } => "pipeline",
}
.spanned(self.span)
}
pub fn tagged_type_name(&self) -> Tagged<&'static str> {
match &self.item {
AtomicToken::Eof { .. } => "eof",
AtomicToken::Error { .. } => "error",
AtomicToken::Operator { .. } => "operator",
AtomicToken::ShorthandFlag { .. } => "shorthand flag",
AtomicToken::LonghandFlag { .. } => "flag",
AtomicToken::Whitespace { .. } => "whitespace",
AtomicToken::Dot { .. } => "dot",
AtomicToken::Number { .. } => "number",
AtomicToken::FilePath { .. } => "file path",
AtomicToken::Size { .. } => "size",
AtomicToken::String { .. } => "string",
AtomicToken::ItVariable { .. } => "$it",
AtomicToken::Variable { .. } => "variable",
AtomicToken::ExternalCommand { .. } => "external command",
AtomicToken::ExternalWord { .. } => "external word",
AtomicToken::GlobPattern { .. } => "file pattern",
AtomicToken::Word { .. } => "word",
AtomicToken::SquareDelimited { .. } => "array literal",
AtomicToken::ParenDelimited { .. } => "parenthesized expression",
AtomicToken::BraceDelimited { .. } => "block",
AtomicToken::Pipeline { .. } => "pipeline",
}
.tagged(self.span)
}
pub(crate) fn color_tokens(&self, shapes: &mut Vec<Spanned<FlatShape>>) {
match &self.item {
AtomicToken::Eof { .. } => {}
AtomicToken::Error { .. } => return shapes.push(FlatShape::Error.spanned(self.span)),
AtomicToken::Operator { .. } => {
return shapes.push(FlatShape::Operator.spanned(self.span));
}
AtomicToken::ShorthandFlag { .. } => {
return shapes.push(FlatShape::ShorthandFlag.spanned(self.span));
}
AtomicToken::LonghandFlag { .. } => {
return shapes.push(FlatShape::Flag.spanned(self.span));
}
AtomicToken::Whitespace { .. } => {
return shapes.push(FlatShape::Whitespace.spanned(self.span));
}
AtomicToken::FilePath { .. } => return shapes.push(FlatShape::Path.spanned(self.span)),
AtomicToken::Dot { .. } => return shapes.push(FlatShape::Dot.spanned(self.span)),
AtomicToken::Number {
number: RawNumber::Decimal(_),
} => {
return shapes.push(FlatShape::Decimal.spanned(self.span));
}
AtomicToken::Number {
number: RawNumber::Int(_),
} => {
return shapes.push(FlatShape::Int.spanned(self.span));
}
AtomicToken::Size { number, unit } => {
return shapes.push(
FlatShape::Size {
number: number.span,
unit: unit.span,
}
.spanned(self.span),
);
}
AtomicToken::String { .. } => return shapes.push(FlatShape::String.spanned(self.span)),
AtomicToken::ItVariable { .. } => {
return shapes.push(FlatShape::ItVariable.spanned(self.span))
}
AtomicToken::Variable { .. } => {
return shapes.push(FlatShape::Variable.spanned(self.span))
}
AtomicToken::ExternalCommand { .. } => {
return shapes.push(FlatShape::ExternalCommand.spanned(self.span));
}
AtomicToken::ExternalWord { .. } => {
return shapes.push(FlatShape::ExternalWord.spanned(self.span))
}
AtomicToken::GlobPattern { .. } => {
return shapes.push(FlatShape::GlobPattern.spanned(self.span))
}
AtomicToken::Word { .. } => return shapes.push(FlatShape::Word.spanned(self.span)),
_ => return shapes.push(FlatShape::Error.spanned(self.span)),
}
}
}
#[derive(Debug)]
pub enum WhitespaceHandling {
#[allow(unused)]
AllowWhitespace,
RejectWhitespace,
}
#[derive(Debug)]
pub struct ExpansionRule {
pub(crate) allow_external_command: bool,
pub(crate) allow_external_word: bool,
pub(crate) allow_operator: bool,
pub(crate) allow_eof: bool,
pub(crate) treat_size_as_word: bool,
pub(crate) commit_errors: bool,
pub(crate) whitespace: WhitespaceHandling,
}
impl ExpansionRule {
pub fn new() -> ExpansionRule {
ExpansionRule {
allow_external_command: false,
allow_external_word: false,
allow_operator: false,
allow_eof: false,
treat_size_as_word: false,
commit_errors: false,
whitespace: WhitespaceHandling::RejectWhitespace,
}
}
/// The intent of permissive mode is to return an atomic token for every possible
/// input token. This is important for error-correcting parsing, such as the
/// syntax highlighter.
pub fn permissive() -> ExpansionRule {
ExpansionRule {
allow_external_command: true,
allow_external_word: true,
allow_operator: true,
allow_eof: true,
treat_size_as_word: false,
commit_errors: true,
whitespace: WhitespaceHandling::AllowWhitespace,
}
}
#[allow(unused)]
pub fn allow_external_command(mut self) -> ExpansionRule {
self.allow_external_command = true;
self
}
#[allow(unused)]
pub fn allow_operator(mut self) -> ExpansionRule {
self.allow_operator = true;
self
}
#[allow(unused)]
pub fn no_operator(mut self) -> ExpansionRule {
self.allow_operator = false;
self
}
#[allow(unused)]
pub fn no_external_command(mut self) -> ExpansionRule {
self.allow_external_command = false;
self
}
#[allow(unused)]
pub fn allow_external_word(mut self) -> ExpansionRule {
self.allow_external_word = true;
self
}
#[allow(unused)]
pub fn no_external_word(mut self) -> ExpansionRule {
self.allow_external_word = false;
self
}
#[allow(unused)]
pub fn treat_size_as_word(mut self) -> ExpansionRule {
self.treat_size_as_word = true;
self
}
#[allow(unused)]
pub fn commit_errors(mut self) -> ExpansionRule {
self.commit_errors = true;
self
}
#[allow(unused)]
pub fn allow_whitespace(mut self) -> ExpansionRule {
self.whitespace = WhitespaceHandling::AllowWhitespace;
self
}
#[allow(unused)]
pub fn reject_whitespace(mut self) -> ExpansionRule {
self.whitespace = WhitespaceHandling::RejectWhitespace;
self
}
}
/// If the caller of expand_atom throws away the returned atomic token returned, it
/// must use a checkpoint to roll it back.
pub fn expand_atom<'me, 'content>(
token_nodes: &'me mut TokensIterator<'content>,
expected: &'static str,
context: &ExpandContext,
rule: ExpansionRule,
) -> Result<SpannedAtomicToken<'content>, ShellError> {
if token_nodes.at_end() {
match rule.allow_eof {
true => {
return Ok(AtomicToken::Eof {
span: Span::unknown(),
}
.spanned(Span::unknown()))
}
false => return Err(ShellError::unexpected_eof("anything", Tag::unknown())),
}
}
// First, we'll need to handle the situation where more than one token corresponds
// to a single atomic token
// If treat_size_as_word, don't try to parse the head of the token stream
// as a size.
match rule.treat_size_as_word {
true => {}
false => match expand_syntax(&UnitShape, token_nodes, context) {
// If the head of the stream isn't a valid unit, we'll try to parse
// it again next as a word
Err(_) => {}
// But if it was a valid unit, we're done here
Ok(Spanned {
item: (number, unit),
span,
}) => return Ok(AtomicToken::Size { number, unit }.spanned(span)),
},
}
// Try to parse the head of the stream as a bare path. A bare path includes
// words as well as `.`s, connected together without whitespace.
match expand_syntax(&BarePathShape, token_nodes, context) {
// If we didn't find a bare path
Err(_) => {}
Ok(span) => {
let next = token_nodes.peek_any();
match next.node {
Some(token) if token.is_pattern() => {
// if the very next token is a pattern, we're looking at a glob, not a
// word, and we should try to parse it as a glob next
}
_ => return Ok(AtomicToken::Word { text: span }.spanned(span)),
}
}
}
// Try to parse the head of the stream as a pattern. A pattern includes
// words, words with `*` as well as `.`s, connected together without whitespace.
match expand_syntax(&BarePatternShape, token_nodes, context) {
// If we didn't find a bare path
Err(_) => {}
Ok(span) => return Ok(AtomicToken::GlobPattern { pattern: span }.spanned(span)),
}
// The next token corresponds to at most one atomic token
// We need to `peek` because `parse_single_node` doesn't cover all of the
// cases that `expand_atom` covers. We should probably collapse the two
// if possible.
let peeked = token_nodes.peek_any().not_eof(expected)?;
match peeked.node {
TokenNode::Token(_) => {
// handle this next
}
TokenNode::Error(error) => {
peeked.commit();
return Ok(AtomicToken::Error {
error: error.clone(),
}
.spanned(error.span));
}
// [ ... ]
TokenNode::Delimited(Spanned {
item:
DelimitedNode {
delimiter: Delimiter::Square,
spans,
children,
},
span,
}) => {
peeked.commit();
let span = *span;
return Ok(AtomicToken::SquareDelimited {
nodes: children,
spans: *spans,
}
.spanned(span));
}
TokenNode::Flag(Spanned {
item:
Flag {
kind: FlagKind::Shorthand,
name,
},
span,
}) => {
peeked.commit();
return Ok(AtomicToken::ShorthandFlag { name: *name }.spanned(*span));
}
TokenNode::Flag(Spanned {
item:
Flag {
kind: FlagKind::Longhand,
name,
},
span,
}) => {
peeked.commit();
return Ok(AtomicToken::ShorthandFlag { name: *name }.spanned(*span));
}
// If we see whitespace, process the whitespace according to the whitespace
// handling rules
TokenNode::Whitespace(span) => match rule.whitespace {
// if whitespace is allowed, return a whitespace token
WhitespaceHandling::AllowWhitespace => {
peeked.commit();
return Ok(AtomicToken::Whitespace { text: *span }.spanned(*span));
}
// if whitespace is disallowed, return an error
WhitespaceHandling::RejectWhitespace => {
return Err(ShellError::syntax_error("Unexpected whitespace".tagged(
Tag {
span: *span,
anchor: None,
},
)))
}
},
other => {
let span = peeked.node.span();
peeked.commit();
return Ok(AtomicToken::Error {
error: ShellError::type_error("token", other.tagged_type_name()).spanned(span),
}
.spanned(span));
}
}
parse_single_node(token_nodes, expected, |token, token_span, err| {
Ok(match token {
// First, the error cases. Each error case corresponds to a expansion rule
// flag that can be used to allow the case
// rule.allow_operator
RawToken::Operator(_) if !rule.allow_operator => return Err(err.error()),
// rule.allow_external_command
RawToken::ExternalCommand(_) if !rule.allow_external_command => {
return Err(ShellError::type_error(
expected,
token.type_name().tagged(Tag {
span: token_span,
anchor: None,
}),
))
}
// rule.allow_external_word
RawToken::ExternalWord if !rule.allow_external_word => {
return Err(ShellError::invalid_external_word(Tag {
span: token_span,
anchor: None,
}))
}
RawToken::Number(number) => AtomicToken::Number { number }.spanned(token_span),
RawToken::Operator(_) => AtomicToken::Operator { text: token_span }.spanned(token_span),
RawToken::String(body) => AtomicToken::String { body }.spanned(token_span),
RawToken::Variable(name) if name.slice(context.source) == "it" => {
AtomicToken::ItVariable { name }.spanned(token_span)
}
RawToken::Variable(name) => AtomicToken::Variable { name }.spanned(token_span),
RawToken::ExternalCommand(command) => {
AtomicToken::ExternalCommand { command }.spanned(token_span)
}
RawToken::ExternalWord => {
AtomicToken::ExternalWord { text: token_span }.spanned(token_span)
}
RawToken::GlobPattern => AtomicToken::GlobPattern {
pattern: token_span,
}
.spanned(token_span),
RawToken::Bare => AtomicToken::Word { text: token_span }.spanned(token_span),
})
})
}

View File

@ -0,0 +1,49 @@
use crate::parser::hir::syntax_shape::{
color_syntax, expand_syntax, ColorSyntax, ExpandContext, ExpressionListShape, TokenNode,
};
use crate::parser::{hir, hir::TokensIterator, Delimiter, FlatShape};
use crate::prelude::*;
pub fn expand_delimited_square(
children: &Vec<TokenNode>,
span: Span,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
let mut tokens = TokensIterator::new(&children, span, false);
let list = expand_syntax(&ExpressionListShape, &mut tokens, context);
Ok(hir::Expression::list(list?, Tag { span, anchor: None }))
}
pub fn color_delimited_square(
(open, close): (Span, Span),
children: &Vec<TokenNode>,
span: Span,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) {
shapes.push(FlatShape::OpenDelimiter(Delimiter::Square).spanned(open));
let mut tokens = TokensIterator::new(&children, span, false);
let _list = color_syntax(&ExpressionListShape, &mut tokens, context, shapes);
shapes.push(FlatShape::CloseDelimiter(Delimiter::Square).spanned(close));
}
#[derive(Debug, Copy, Clone)]
pub struct DelimitedShape;
impl ColorSyntax for DelimitedShape {
type Info = ();
type Input = (Delimiter, Span, Span);
fn color_syntax<'a, 'b>(
&self,
(delimiter, open, close): &(Delimiter, Span, Span),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Self::Info {
shapes.push(FlatShape::OpenDelimiter(*delimiter).spanned(*open));
color_syntax(&ExpressionListShape, token_nodes, context, shapes);
shapes.push(FlatShape::CloseDelimiter(*delimiter).spanned(*close));
}
}

View File

@ -0,0 +1,71 @@
use crate::parser::hir::syntax_shape::expression::atom::{expand_atom, AtomicToken, ExpansionRule};
use crate::parser::hir::syntax_shape::{
expression::expand_file_path, ExpandContext, ExpandExpression, FallibleColorSyntax, FlatShape,
};
use crate::parser::{hir, hir::TokensIterator};
use crate::prelude::*;
#[derive(Debug, Copy, Clone)]
pub struct FilePathShape;
impl FallibleColorSyntax for FilePathShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let atom = expand_atom(
token_nodes,
"file path",
context,
ExpansionRule::permissive(),
);
let atom = match atom {
Err(_) => return Ok(()),
Ok(atom) => atom,
};
match atom.item {
AtomicToken::Word { .. }
| AtomicToken::String { .. }
| AtomicToken::Number { .. }
| AtomicToken::Size { .. } => {
shapes.push(FlatShape::Path.spanned(atom.span));
}
_ => atom.color_tokens(shapes),
}
Ok(())
}
}
impl ExpandExpression for FilePathShape {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
let atom = expand_atom(token_nodes, "file path", context, ExpansionRule::new())?;
match atom.item {
AtomicToken::Word { text: body } | AtomicToken::String { body } => {
let path = expand_file_path(body.slice(context.source), context);
return Ok(hir::Expression::file_path(path, atom.span));
}
AtomicToken::Number { .. } | AtomicToken::Size { .. } => {
let path = atom.span.slice(context.source);
return Ok(hir::Expression::file_path(path, atom.span));
}
_ => return atom.into_hir(context, "file path"),
}
}
}

View File

@ -0,0 +1,176 @@
use crate::errors::ShellError;
use crate::parser::{
hir,
hir::syntax_shape::{
color_fallible_syntax, color_syntax, expand_atom, expand_expr, maybe_spaced, spaced,
AnyExpressionShape, ColorSyntax, ExpandContext, ExpandSyntax, ExpansionRule,
MaybeSpaceShape, SpaceShape,
},
hir::TokensIterator,
FlatShape,
};
use crate::Spanned;
#[derive(Debug, Copy, Clone)]
pub struct ExpressionListShape;
impl ExpandSyntax for ExpressionListShape {
type Output = Vec<hir::Expression>;
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<Vec<hir::Expression>, ShellError> {
let mut exprs = vec![];
if token_nodes.at_end_possible_ws() {
return Ok(exprs);
}
let expr = expand_expr(&maybe_spaced(AnyExpressionShape), token_nodes, context)?;
exprs.push(expr);
loop {
if token_nodes.at_end_possible_ws() {
return Ok(exprs);
}
let expr = expand_expr(&spaced(AnyExpressionShape), token_nodes, context)?;
exprs.push(expr);
}
}
}
impl ColorSyntax for ExpressionListShape {
type Info = ();
type Input = ();
/// The intent of this method is to fully color an expression list shape infallibly.
/// This means that if we can't expand a token into an expression, we fall back to
/// a simpler coloring strategy.
///
/// This would apply to something like `where x >`, which includes an incomplete
/// binary operator. Since we will fail to process it as a binary operator, we'll
/// fall back to a simpler coloring and move on.
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) {
// We encountered a parsing error and will continue with simpler coloring ("backoff
// coloring mode")
let mut backoff = false;
// Consume any leading whitespace
color_syntax(&MaybeSpaceShape, token_nodes, context, shapes);
loop {
// If we reached the very end of the token stream, we're done
if token_nodes.at_end() {
return;
}
if backoff {
let len = shapes.len();
// If we previously encountered a parsing error, use backoff coloring mode
color_syntax(&SimplestExpression, token_nodes, context, shapes);
if len == shapes.len() && !token_nodes.at_end() {
// This should never happen, but if it does, a panic is better than an infinite loop
panic!("Unexpected tokens left that couldn't be colored even with SimplestExpression")
}
} else {
// Try to color the head of the stream as an expression
match color_fallible_syntax(&AnyExpressionShape, token_nodes, context, shapes) {
// If no expression was found, switch to backoff coloring mode
Err(_) => {
backoff = true;
continue;
}
Ok(_) => {}
}
// If an expression was found, consume a space
match color_fallible_syntax(&SpaceShape, token_nodes, context, shapes) {
Err(_) => {
// If no space was found, we're either at the end or there's an error.
// Either way, switch to backoff coloring mode. If we're at the end
// it won't have any consequences.
backoff = true;
}
Ok(_) => {
// Otherwise, move on to the next expression
}
}
}
}
}
}
/// BackoffColoringMode consumes all of the remaining tokens in an infallible way
#[derive(Debug, Copy, Clone)]
pub struct BackoffColoringMode;
impl ColorSyntax for BackoffColoringMode {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &Self::Input,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Self::Info {
loop {
if token_nodes.at_end() {
break;
}
let len = shapes.len();
color_syntax(&SimplestExpression, token_nodes, context, shapes);
if len == shapes.len() && !token_nodes.at_end() {
// This shouldn't happen, but if it does, a panic is better than an infinite loop
panic!("SimplestExpression failed to consume any tokens, but it's not at the end. This is unexpected\n== token nodes==\n{:#?}\n\n== shapes ==\n{:#?}", token_nodes, shapes);
}
}
}
}
/// The point of `SimplestExpression` is to serve as an infallible base case for coloring.
/// As a last ditch effort, if we can't find any way to parse the head of the stream as an
/// expression, fall back to simple coloring.
#[derive(Debug, Copy, Clone)]
pub struct SimplestExpression;
impl ColorSyntax for SimplestExpression {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) {
let atom = expand_atom(
token_nodes,
"any token",
context,
ExpansionRule::permissive(),
);
match atom {
Err(_) => {}
Ok(atom) => atom.color_tokens(shapes),
}
}
}

View File

@ -0,0 +1,136 @@
use crate::parser::hir::syntax_shape::{
expand_atom, parse_single_node, ExpandContext, ExpandExpression, ExpansionRule,
FallibleColorSyntax, FlatShape,
};
use crate::parser::{
hir,
hir::{RawNumber, TokensIterator},
RawToken,
};
use crate::prelude::*;
#[derive(Debug, Copy, Clone)]
pub struct NumberShape;
impl ExpandExpression for NumberShape {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
parse_single_node(token_nodes, "Number", |token, token_span, err| {
Ok(match token {
RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()),
RawToken::Variable(tag) if tag.slice(context.source) == "it" => {
hir::Expression::it_variable(tag, token_span)
}
RawToken::ExternalCommand(tag) => {
hir::Expression::external_command(tag, token_span)
}
RawToken::ExternalWord => {
return Err(ShellError::invalid_external_word(Tag {
span: token_span,
anchor: None,
}))
}
RawToken::Variable(tag) => hir::Expression::variable(tag, token_span),
RawToken::Number(number) => {
hir::Expression::number(number.to_number(context.source), token_span)
}
RawToken::Bare => hir::Expression::bare(token_span),
RawToken::String(tag) => hir::Expression::string(tag, token_span),
})
})
}
}
impl FallibleColorSyntax for NumberShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let atom = token_nodes.spanned(|token_nodes| {
expand_atom(token_nodes, "number", context, ExpansionRule::permissive())
});
let atom = match atom {
Spanned { item: Err(_), span } => {
shapes.push(FlatShape::Error.spanned(span));
return Ok(());
}
Spanned { item: Ok(atom), .. } => atom,
};
atom.color_tokens(shapes);
Ok(())
}
}
#[derive(Debug, Copy, Clone)]
pub struct IntShape;
impl ExpandExpression for IntShape {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
parse_single_node(token_nodes, "Integer", |token, token_span, err| {
Ok(match token {
RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()),
RawToken::ExternalWord => {
return Err(ShellError::invalid_external_word(token_span))
}
RawToken::Variable(span) if span.slice(context.source) == "it" => {
hir::Expression::it_variable(span, token_span)
}
RawToken::ExternalCommand(span) => {
hir::Expression::external_command(span, token_span)
}
RawToken::Variable(span) => hir::Expression::variable(span, token_span),
RawToken::Number(number @ RawNumber::Int(_)) => {
hir::Expression::number(number.to_number(context.source), token_span)
}
RawToken::Number(_) => return Err(err.error()),
RawToken::Bare => hir::Expression::bare(token_span),
RawToken::String(span) => hir::Expression::string(span, token_span),
})
})
}
}
impl FallibleColorSyntax for IntShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let atom = token_nodes.spanned(|token_nodes| {
expand_atom(token_nodes, "integer", context, ExpansionRule::permissive())
});
let atom = match atom {
Spanned { item: Err(_), span } => {
shapes.push(FlatShape::Error.spanned(span));
return Ok(());
}
Spanned { item: Ok(atom), .. } => atom,
};
atom.color_tokens(shapes);
Ok(())
}
}

View File

@ -0,0 +1,112 @@
use crate::parser::hir::syntax_shape::{
expand_atom, expand_bare, expand_syntax, expression::expand_file_path, parse_single_node,
AtomicToken, ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, FallibleColorSyntax,
FlatShape,
};
use crate::parser::{hir, hir::TokensIterator, Operator, RawToken, TokenNode};
use crate::prelude::*;
#[derive(Debug, Copy, Clone)]
pub struct PatternShape;
impl FallibleColorSyntax for PatternShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
token_nodes.atomic(|token_nodes| {
let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?;
match &atom.item {
AtomicToken::GlobPattern { .. } | AtomicToken::Word { .. } => {
shapes.push(FlatShape::GlobPattern.spanned(atom.span));
Ok(())
}
_ => Err(ShellError::type_error("pattern", atom.tagged_type_name())),
}
})
}
}
impl ExpandExpression for PatternShape {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
let pattern = expand_syntax(&BarePatternShape, token_nodes, context);
match pattern {
Ok(tag) => {
return Ok(hir::Expression::pattern(tag));
}
Err(_) => {}
}
parse_single_node(token_nodes, "Pattern", |token, token_tag, _| {
Ok(match token {
RawToken::GlobPattern => {
return Err(ShellError::unreachable(
"glob pattern after glob already returned",
))
}
RawToken::Operator(..) => {
return Err(ShellError::unreachable("dot after glob already returned"))
}
RawToken::Bare => {
return Err(ShellError::unreachable("bare after glob already returned"))
}
RawToken::Variable(tag) if tag.slice(context.source) == "it" => {
hir::Expression::it_variable(tag, token_tag)
}
RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag),
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag),
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)),
RawToken::Number(_) => hir::Expression::bare(token_tag),
RawToken::String(tag) => hir::Expression::file_path(
expand_file_path(tag.slice(context.source), context),
token_tag,
),
})
})
}
}
#[derive(Debug, Copy, Clone)]
pub struct BarePatternShape;
impl ExpandSyntax for BarePatternShape {
type Output = Span;
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Span, ShellError> {
expand_bare(token_nodes, context, |token| match token {
TokenNode::Token(Spanned {
item: RawToken::Bare,
..
})
| TokenNode::Token(Spanned {
item: RawToken::Operator(Operator::Dot),
..
})
| TokenNode::Token(Spanned {
item: RawToken::GlobPattern,
..
}) => true,
_ => false,
})
}
}

View File

@ -0,0 +1,94 @@
use crate::parser::hir::syntax_shape::{
expand_atom, expand_variable, parse_single_node, AtomicToken, ExpandContext, ExpandExpression,
ExpansionRule, FallibleColorSyntax, FlatShape, TestSyntax,
};
use crate::parser::hir::tokens_iterator::Peeked;
use crate::parser::{hir, hir::TokensIterator, RawToken, TokenNode};
use crate::prelude::*;
#[derive(Debug, Copy, Clone)]
pub struct StringShape;
impl FallibleColorSyntax for StringShape {
type Info = ();
type Input = FlatShape;
fn color_syntax<'a, 'b>(
&self,
input: &FlatShape,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive());
let atom = match atom {
Err(_) => return Ok(()),
Ok(atom) => atom,
};
match atom {
Spanned {
item: AtomicToken::String { .. },
span,
} => shapes.push((*input).spanned(span)),
other => other.color_tokens(shapes),
}
Ok(())
}
}
impl ExpandExpression for StringShape {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
parse_single_node(token_nodes, "String", |token, token_span, _| {
Ok(match token {
RawToken::GlobPattern => {
return Err(ShellError::type_error(
"String",
"glob pattern".tagged(token_span),
))
}
RawToken::Operator(..) => {
return Err(ShellError::type_error(
"String",
"operator".tagged(token_span),
))
}
RawToken::Variable(span) => expand_variable(span, token_span, &context.source),
RawToken::ExternalCommand(span) => {
hir::Expression::external_command(span, token_span)
}
RawToken::ExternalWord => {
return Err(ShellError::invalid_external_word(token_span))
}
RawToken::Number(_) => hir::Expression::bare(token_span),
RawToken::Bare => hir::Expression::bare(token_span),
RawToken::String(span) => hir::Expression::string(span, token_span),
})
})
}
}
impl TestSyntax for StringShape {
fn test<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
) -> Option<Peeked<'a, 'b>> {
let peeked = token_nodes.peek_any();
match peeked.node {
Some(TokenNode::Token(token)) => match token.item {
RawToken::String(_) => Some(peeked),
_ => None,
},
_ => None,
}
}
}

View File

@ -0,0 +1,92 @@
use crate::data::meta::Span;
use crate::parser::hir::syntax_shape::{ExpandContext, ExpandSyntax};
use crate::parser::parse::tokens::RawNumber;
use crate::parser::parse::unit::Unit;
use crate::parser::{hir::TokensIterator, RawToken, TokenNode};
use crate::prelude::*;
use nom::branch::alt;
use nom::bytes::complete::tag;
use nom::character::complete::digit1;
use nom::combinator::{all_consuming, opt, value};
use nom::IResult;
#[derive(Debug, Copy, Clone)]
pub struct UnitShape;
impl ExpandSyntax for UnitShape {
type Output = Spanned<(Spanned<RawNumber>, Spanned<Unit>)>;
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Spanned<(Spanned<RawNumber>, Spanned<Unit>)>, ShellError> {
let peeked = token_nodes.peek_any().not_eof("unit")?;
let span = match peeked.node {
TokenNode::Token(Spanned {
item: RawToken::Bare,
span,
}) => span,
_ => return Err(peeked.type_error("unit")),
};
let unit = unit_size(span.slice(context.source), *span);
let (_, (number, unit)) = match unit {
Err(_) => {
return Err(ShellError::type_error(
"unit",
"word".tagged(Tag::unknown()),
))
}
Ok((number, unit)) => (number, unit),
};
peeked.commit();
Ok((number, unit).spanned(*span))
}
}
fn unit_size(input: &str, bare_span: Span) -> IResult<&str, (Spanned<RawNumber>, Spanned<Unit>)> {
let (input, digits) = digit1(input)?;
let (input, dot) = opt(tag("."))(input)?;
let (input, number) = match dot {
Some(dot) => {
let (input, rest) = digit1(input)?;
(
input,
RawNumber::decimal(Span::new(
bare_span.start(),
bare_span.start() + digits.len() + dot.len() + rest.len(),
)),
)
}
None => (
input,
RawNumber::int(Span::new(
bare_span.start(),
bare_span.start() + digits.len(),
)),
),
};
let (input, unit) = all_consuming(alt((
value(Unit::B, alt((tag("B"), tag("b")))),
value(Unit::KB, alt((tag("KB"), tag("kb"), tag("Kb")))),
value(Unit::MB, alt((tag("MB"), tag("mb"), tag("Mb")))),
value(Unit::MB, alt((tag("GB"), tag("gb"), tag("Gb")))),
value(Unit::MB, alt((tag("TB"), tag("tb"), tag("Tb")))),
value(Unit::MB, alt((tag("PB"), tag("pb"), tag("Pb")))),
)))(input)?;
let start_span = number.span.end();
Ok((
input,
(number, unit.spanned(Span::new(start_span, bare_span.end()))),
))
}

View File

@ -0,0 +1,735 @@
use crate::parser::hir::syntax_shape::{
color_fallible_syntax, color_fallible_syntax_with, expand_atom, expand_expr, expand_syntax,
parse_single_node, AnyExpressionShape, AtomicToken, BareShape, ExpandContext, ExpandExpression,
ExpandSyntax, ExpansionRule, FallibleColorSyntax, FlatShape, Peeked, SkipSyntax, StringShape,
TestSyntax, WhitespaceShape,
};
use crate::parser::{hir, hir::Expression, hir::TokensIterator, Operator, RawToken};
use crate::prelude::*;
#[derive(Debug, Copy, Clone)]
pub struct VariablePathShape;
impl ExpandExpression for VariablePathShape {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
// 1. let the head be the first token, expecting a variable
// 2. let the tail be an empty list of members
// 2. while the next token (excluding ws) is a dot:
// 1. consume the dot
// 2. consume the next token as a member and push it onto tail
let head = expand_expr(&VariableShape, token_nodes, context)?;
let start = head.span;
let mut end = start;
let mut tail: Vec<Spanned<String>> = vec![];
loop {
match DotShape.skip(token_nodes, context) {
Err(_) => break,
Ok(_) => {}
}
let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
let member = syntax.to_spanned_string(context.source);
end = member.span;
tail.push(member);
}
Ok(hir::Expression::path(head, tail, start.until(end)))
}
}
impl FallibleColorSyntax for VariablePathShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
token_nodes.atomic(|token_nodes| {
// If the head of the token stream is not a variable, fail
color_fallible_syntax(&VariableShape, token_nodes, context, shapes)?;
loop {
// look for a dot at the head of a stream
let dot = color_fallible_syntax_with(
&ColorableDotShape,
&FlatShape::Dot,
token_nodes,
context,
shapes,
);
// if there's no dot, we're done
match dot {
Err(_) => break,
Ok(_) => {}
}
// otherwise, look for a member, and if you don't find one, fail
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
}
Ok(())
})
}
}
#[derive(Debug, Copy, Clone)]
pub struct PathTailShape;
/// The failure mode of `PathTailShape` is a dot followed by a non-member
impl FallibleColorSyntax for PathTailShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
token_nodes.atomic(|token_nodes| loop {
let result = color_fallible_syntax_with(
&ColorableDotShape,
&FlatShape::Dot,
token_nodes,
context,
shapes,
);
match result {
Err(_) => return Ok(()),
Ok(_) => {}
}
// If we've seen a dot but not a member, fail
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
})
}
}
impl ExpandSyntax for PathTailShape {
type Output = (Vec<Spanned<String>>, Span);
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Self::Output, ShellError> {
let mut end: Option<Span> = None;
let mut tail = vec![];
loop {
match DotShape.skip(token_nodes, context) {
Err(_) => break,
Ok(_) => {}
}
let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
let member = syntax.to_spanned_string(context.source);
end = Some(member.span);
tail.push(member);
}
match end {
None => {
return Err(ShellError::type_error("path tail", {
let typed_span = token_nodes.typed_span_at_cursor();
Tagged {
tag: typed_span.span.into(),
item: typed_span.item,
}
}))
}
Some(end) => Ok((tail, end)),
}
}
}
#[derive(Debug)]
pub enum ExpressionContinuation {
DotSuffix(Span, Spanned<String>),
InfixSuffix(Spanned<Operator>, Expression),
}
/// An expression continuation
#[derive(Debug, Copy, Clone)]
pub struct ExpressionContinuationShape;
impl ExpandSyntax for ExpressionContinuationShape {
type Output = ExpressionContinuation;
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<ExpressionContinuation, ShellError> {
// Try to expand a `.`
let dot = expand_syntax(&DotShape, token_nodes, context);
match dot {
// If a `.` was matched, it's a `Path`, and we expect a `Member` next
Ok(dot) => {
let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
let member = syntax.to_spanned_string(context.source);
Ok(ExpressionContinuation::DotSuffix(dot, member))
}
// Otherwise, we expect an infix operator and an expression next
Err(_) => {
let (_, op, _) = expand_syntax(&InfixShape, token_nodes, context)?;
let next = expand_expr(&AnyExpressionShape, token_nodes, context)?;
Ok(ExpressionContinuation::InfixSuffix(op, next))
}
}
}
}
pub enum ContinuationInfo {
Dot,
Infix,
}
impl FallibleColorSyntax for ExpressionContinuationShape {
type Info = ContinuationInfo;
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<ContinuationInfo, ShellError> {
token_nodes.atomic(|token_nodes| {
// Try to expand a `.`
let dot = color_fallible_syntax_with(
&ColorableDotShape,
&FlatShape::Dot,
token_nodes,
context,
shapes,
);
match dot {
Ok(_) => {
// we found a dot, so let's keep looking for a member; if no member was found, fail
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
Ok(ContinuationInfo::Dot)
}
Err(_) => {
let mut new_shapes = vec![];
let result = token_nodes.atomic(|token_nodes| {
// we didn't find a dot, so let's see if we're looking at an infix. If not found, fail
color_fallible_syntax(&InfixShape, token_nodes, context, &mut new_shapes)?;
// now that we've seen an infix shape, look for any expression. If not found, fail
color_fallible_syntax(
&AnyExpressionShape,
token_nodes,
context,
&mut new_shapes,
)?;
Ok(ContinuationInfo::Infix)
})?;
shapes.extend(new_shapes);
Ok(result)
}
}
})
}
}
#[derive(Debug, Copy, Clone)]
pub struct VariableShape;
impl ExpandExpression for VariableShape {
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ShellError> {
parse_single_node(token_nodes, "variable", |token, token_tag, _| {
Ok(match token {
RawToken::Variable(tag) => {
if tag.slice(context.source) == "it" {
hir::Expression::it_variable(tag, token_tag)
} else {
hir::Expression::variable(tag, token_tag)
}
}
_ => {
return Err(ShellError::type_error(
"variable",
token.type_name().tagged(token_tag),
))
}
})
})
}
}
impl FallibleColorSyntax for VariableShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let atom = expand_atom(
token_nodes,
"variable",
context,
ExpansionRule::permissive(),
);
let atom = match atom {
Err(err) => return Err(err),
Ok(atom) => atom,
};
match &atom.item {
AtomicToken::Variable { .. } => {
shapes.push(FlatShape::Variable.spanned(atom.span));
Ok(())
}
AtomicToken::ItVariable { .. } => {
shapes.push(FlatShape::ItVariable.spanned(atom.span));
Ok(())
}
_ => Err(ShellError::type_error("variable", atom.tagged_type_name())),
}
}
}
#[derive(Debug, Clone, Copy)]
pub enum Member {
String(/* outer */ Span, /* inner */ Span),
Bare(Span),
}
impl Member {
pub(crate) fn to_expr(&self) -> hir::Expression {
match self {
Member::String(outer, inner) => hir::Expression::string(*inner, *outer),
Member::Bare(span) => hir::Expression::string(*span, *span),
}
}
pub(crate) fn span(&self) -> Span {
match self {
Member::String(outer, _inner) => *outer,
Member::Bare(span) => *span,
}
}
pub(crate) fn to_spanned_string(&self, source: &str) -> Spanned<String> {
match self {
Member::String(outer, inner) => inner.string(source).spanned(*outer),
Member::Bare(span) => span.spanned_string(source),
}
}
pub(crate) fn tagged_type_name(&self) -> Tagged<&'static str> {
match self {
Member::String(outer, _inner) => "string".tagged(outer),
Member::Bare(span) => "word".tagged(Tag {
span: *span,
anchor: None,
}),
}
}
}
enum ColumnPathState {
Initial,
LeadingDot(Span),
Dot(Span, Vec<Member>, Span),
Member(Span, Vec<Member>),
Error(ShellError),
}
impl ColumnPathState {
pub fn dot(self, dot: Span) -> ColumnPathState {
match self {
ColumnPathState::Initial => ColumnPathState::LeadingDot(dot),
ColumnPathState::LeadingDot(_) => {
ColumnPathState::Error(ShellError::type_error("column", "dot".tagged(dot)))
}
ColumnPathState::Dot(..) => {
ColumnPathState::Error(ShellError::type_error("column", "dot".tagged(dot)))
}
ColumnPathState::Member(tag, members) => ColumnPathState::Dot(tag, members, dot),
ColumnPathState::Error(err) => ColumnPathState::Error(err),
}
}
pub fn member(self, member: Member) -> ColumnPathState {
match self {
ColumnPathState::Initial => ColumnPathState::Member(member.span(), vec![member]),
ColumnPathState::LeadingDot(tag) => {
ColumnPathState::Member(tag.until(member.span()), vec![member])
}
ColumnPathState::Dot(tag, mut tags, _) => {
ColumnPathState::Member(tag.until(member.span()), {
tags.push(member);
tags
})
}
ColumnPathState::Member(..) => {
ColumnPathState::Error(ShellError::type_error("column", member.tagged_type_name()))
}
ColumnPathState::Error(err) => ColumnPathState::Error(err),
}
}
pub fn into_path(self, next: Peeked) -> Result<Tagged<Vec<Member>>, ShellError> {
match self {
ColumnPathState::Initial => Err(next.type_error("column path")),
ColumnPathState::LeadingDot(dot) => {
Err(ShellError::type_error("column", "dot".tagged(dot)))
}
ColumnPathState::Dot(_tag, _members, dot) => {
Err(ShellError::type_error("column", "dot".tagged(dot)))
}
ColumnPathState::Member(tag, tags) => Ok(tags.tagged(tag)),
ColumnPathState::Error(err) => Err(err),
}
}
}
pub fn expand_column_path<'a, 'b>(
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Tagged<Vec<Member>>, ShellError> {
let mut state = ColumnPathState::Initial;
loop {
let member = MemberShape.expand_syntax(token_nodes, context);
match member {
Err(_) => break,
Ok(member) => state = state.member(member),
}
let dot = DotShape.expand_syntax(token_nodes, context);
match dot {
Err(_) => break,
Ok(dot) => state = state.dot(dot),
}
}
state.into_path(token_nodes.peek_non_ws())
}
#[derive(Debug, Copy, Clone)]
pub struct ColumnPathShape;
impl FallibleColorSyntax for ColumnPathShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
// If there's not even one member shape, fail
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
loop {
let checkpoint = token_nodes.checkpoint();
match color_fallible_syntax_with(
&ColorableDotShape,
&FlatShape::Dot,
checkpoint.iterator,
context,
shapes,
) {
Err(_) => {
// we already saw at least one member shape, so return successfully
return Ok(());
}
Ok(_) => {
match color_fallible_syntax(&MemberShape, checkpoint.iterator, context, shapes)
{
Err(_) => {
// we saw a dot but not a member (but we saw at least one member),
// so don't commit the dot but return successfully
return Ok(());
}
Ok(_) => {
// we saw a dot and a member, so commit it and continue on
checkpoint.commit();
}
}
}
}
}
}
}
impl ExpandSyntax for ColumnPathShape {
type Output = Tagged<Vec<Member>>;
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Self::Output, ShellError> {
expand_column_path(token_nodes, context)
}
}
#[derive(Debug, Copy, Clone)]
pub struct MemberShape;
impl FallibleColorSyntax for MemberShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let bare = color_fallible_syntax_with(
&BareShape,
&FlatShape::BareMember,
token_nodes,
context,
shapes,
);
match bare {
Ok(_) => return Ok(()),
Err(_) => {
// If we don't have a bare word, we'll look for a string
}
}
// Look for a string token. If we don't find one, fail
color_fallible_syntax_with(
&StringShape,
&FlatShape::StringMember,
token_nodes,
context,
shapes,
)
}
}
impl ExpandSyntax for MemberShape {
type Output = Member;
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<Member, ShellError> {
let bare = BareShape.test(token_nodes, context);
if let Some(peeked) = bare {
let node = peeked.not_eof("column")?.commit();
return Ok(Member::Bare(node.span()));
}
let string = StringShape.test(token_nodes, context);
if let Some(peeked) = string {
let node = peeked.not_eof("column")?.commit();
let (outer, inner) = node.expect_string();
return Ok(Member::String(outer, inner));
}
Err(token_nodes.peek_any().type_error("column"))
}
}
#[derive(Debug, Copy, Clone)]
pub struct DotShape;
#[derive(Debug, Copy, Clone)]
pub struct ColorableDotShape;
impl FallibleColorSyntax for ColorableDotShape {
type Info = ();
type Input = FlatShape;
fn color_syntax<'a, 'b>(
&self,
input: &FlatShape,
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let peeked = token_nodes.peek_any().not_eof("dot")?;
match peeked.node {
node if node.is_dot() => {
peeked.commit();
shapes.push((*input).spanned(node.span()));
Ok(())
}
other => Err(ShellError::type_error("dot", other.tagged_type_name())),
}
}
}
impl SkipSyntax for DotShape {
fn skip<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<(), ShellError> {
expand_syntax(self, token_nodes, context)?;
Ok(())
}
}
impl ExpandSyntax for DotShape {
type Output = Span;
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
) -> Result<Self::Output, ShellError> {
parse_single_node(token_nodes, "dot", |token, token_span, _| {
Ok(match token {
RawToken::Operator(Operator::Dot) => token_span,
_ => {
return Err(ShellError::type_error(
"dot",
token.type_name().tagged(token_span),
))
}
})
})
}
}
#[derive(Debug, Copy, Clone)]
pub struct InfixShape;
impl FallibleColorSyntax for InfixShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
outer_shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let checkpoint = token_nodes.checkpoint();
let mut shapes = vec![];
// An infix operator must be prefixed by whitespace. If no whitespace was found, fail
color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context, &mut shapes)?;
// Parse the next TokenNode after the whitespace
parse_single_node(
checkpoint.iterator,
"infix operator",
|token, token_span, _| {
match token {
// If it's an operator (and not `.`), it's a match
RawToken::Operator(operator) if operator != Operator::Dot => {
shapes.push(FlatShape::Operator.spanned(token_span));
Ok(())
}
// Otherwise, it's not a match
_ => Err(ShellError::type_error(
"infix operator",
token.type_name().tagged(token_span),
)),
}
},
)?;
// An infix operator must be followed by whitespace. If no whitespace was found, fail
color_fallible_syntax(&WhitespaceShape, checkpoint.iterator, context, &mut shapes)?;
outer_shapes.extend(shapes);
checkpoint.commit();
Ok(())
}
}
impl ExpandSyntax for InfixShape {
type Output = (Span, Spanned<Operator>, Span);
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Self::Output, ShellError> {
let checkpoint = token_nodes.checkpoint();
// An infix operator must be prefixed by whitespace
let start = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?;
// Parse the next TokenNode after the whitespace
let operator = parse_single_node(
checkpoint.iterator,
"infix operator",
|token, token_span, _| {
Ok(match token {
// If it's an operator (and not `.`), it's a match
RawToken::Operator(operator) if operator != Operator::Dot => {
operator.spanned(token_span)
}
// Otherwise, it's not a match
_ => {
return Err(ShellError::type_error(
"infix operator",
token.type_name().tagged(token_span),
))
}
})
},
)?;
// An infix operator must be followed by whitespace
let end = expand_syntax(&WhitespaceShape, checkpoint.iterator, context)?;
checkpoint.commit();
Ok((start, operator, end))
}
}

View File

@ -0,0 +1,97 @@
use crate::parser::{Delimiter, Flag, FlagKind, Operator, RawNumber, RawToken, TokenNode};
use crate::{Span, Spanned, SpannedItem, Text};
#[derive(Debug, Copy, Clone)]
pub enum FlatShape {
OpenDelimiter(Delimiter),
CloseDelimiter(Delimiter),
ItVariable,
Variable,
Operator,
Dot,
InternalCommand,
ExternalCommand,
ExternalWord,
BareMember,
StringMember,
String,
Path,
Word,
Pipe,
GlobPattern,
Flag,
ShorthandFlag,
Int,
Decimal,
Whitespace,
Error,
Size { number: Span, unit: Span },
}
impl FlatShape {
pub fn from(token: &TokenNode, source: &Text, shapes: &mut Vec<Spanned<FlatShape>>) -> () {
match token {
TokenNode::Token(token) => match token.item {
RawToken::Number(RawNumber::Int(_)) => {
shapes.push(FlatShape::Int.spanned(token.span))
}
RawToken::Number(RawNumber::Decimal(_)) => {
shapes.push(FlatShape::Decimal.spanned(token.span))
}
RawToken::Operator(Operator::Dot) => {
shapes.push(FlatShape::Dot.spanned(token.span))
}
RawToken::Operator(_) => shapes.push(FlatShape::Operator.spanned(token.span)),
RawToken::String(_) => shapes.push(FlatShape::String.spanned(token.span)),
RawToken::Variable(v) if v.slice(source) == "it" => {
shapes.push(FlatShape::ItVariable.spanned(token.span))
}
RawToken::Variable(_) => shapes.push(FlatShape::Variable.spanned(token.span)),
RawToken::ExternalCommand(_) => {
shapes.push(FlatShape::ExternalCommand.spanned(token.span))
}
RawToken::ExternalWord => shapes.push(FlatShape::ExternalWord.spanned(token.span)),
RawToken::GlobPattern => shapes.push(FlatShape::GlobPattern.spanned(token.span)),
RawToken::Bare => shapes.push(FlatShape::Word.spanned(token.span)),
},
TokenNode::Call(_) => unimplemented!(),
TokenNode::Nodes(nodes) => {
for node in &nodes.item {
FlatShape::from(node, source, shapes);
}
}
TokenNode::Delimited(v) => {
shapes.push(FlatShape::OpenDelimiter(v.item.delimiter).spanned(v.item.spans.0));
for token in &v.item.children {
FlatShape::from(token, source, shapes);
}
shapes.push(FlatShape::CloseDelimiter(v.item.delimiter).spanned(v.item.spans.1));
}
TokenNode::Pipeline(pipeline) => {
for part in &pipeline.parts {
if let Some(_) = part.pipe {
shapes.push(FlatShape::Pipe.spanned(part.span));
}
}
}
TokenNode::Flag(Spanned {
item:
Flag {
kind: FlagKind::Longhand,
..
},
span,
}) => shapes.push(FlatShape::Flag.spanned(*span)),
TokenNode::Flag(Spanned {
item:
Flag {
kind: FlagKind::Shorthand,
..
},
span,
}) => shapes.push(FlatShape::ShorthandFlag.spanned(*span)),
TokenNode::Whitespace(_) => shapes.push(FlatShape::Whitespace.spanned(token.span())),
TokenNode::Error(v) => shapes.push(FlatShape::Error.spanned(v.span)),
}
}
}

View File

@ -0,0 +1,477 @@
pub(crate) mod debug;
use crate::errors::ShellError;
use crate::parser::TokenNode;
use crate::{Span, Spanned, SpannedItem};
#[derive(Debug)]
pub struct TokensIterator<'content> {
tokens: &'content [TokenNode],
span: Span,
skip_ws: bool,
index: usize,
seen: indexmap::IndexSet<usize>,
}
#[derive(Debug)]
pub struct Checkpoint<'content, 'me> {
pub(crate) iterator: &'me mut TokensIterator<'content>,
index: usize,
seen: indexmap::IndexSet<usize>,
committed: bool,
}
impl<'content, 'me> Checkpoint<'content, 'me> {
pub(crate) fn commit(mut self) {
self.committed = true;
}
}
impl<'content, 'me> std::ops::Drop for Checkpoint<'content, 'me> {
fn drop(&mut self) {
if !self.committed {
self.iterator.index = self.index;
self.iterator.seen = self.seen.clone();
}
}
}
#[derive(Debug)]
pub struct Peeked<'content, 'me> {
pub(crate) node: Option<&'content TokenNode>,
iterator: &'me mut TokensIterator<'content>,
from: usize,
to: usize,
}
impl<'content, 'me> Peeked<'content, 'me> {
pub fn commit(&mut self) -> Option<&'content TokenNode> {
let Peeked {
node,
iterator,
from,
to,
} = self;
let node = (*node)?;
iterator.commit(*from, *to);
Some(node)
}
pub fn not_eof(
self,
expected: impl Into<String>,
) -> Result<PeekedNode<'content, 'me>, ShellError> {
match self.node {
None => Err(ShellError::unexpected_eof(
expected,
self.iterator.eof_span(),
)),
Some(node) => Ok(PeekedNode {
node,
iterator: self.iterator,
from: self.from,
to: self.to,
}),
}
}
pub fn type_error(&self, expected: impl Into<String>) -> ShellError {
peek_error(&self.node, self.iterator.eof_span(), expected)
}
}
#[derive(Debug)]
pub struct PeekedNode<'content, 'me> {
pub(crate) node: &'content TokenNode,
iterator: &'me mut TokensIterator<'content>,
from: usize,
to: usize,
}
impl<'content, 'me> PeekedNode<'content, 'me> {
pub fn commit(self) -> &'content TokenNode {
let PeekedNode {
node,
iterator,
from,
to,
} = self;
iterator.commit(from, to);
node
}
pub fn rollback(self) {}
pub fn type_error(&self, expected: impl Into<String>) -> ShellError {
peek_error(&Some(self.node), self.iterator.eof_span(), expected)
}
}
pub fn peek_error(
node: &Option<&TokenNode>,
eof_span: Span,
expected: impl Into<String>,
) -> ShellError {
match node {
None => ShellError::unexpected_eof(expected, eof_span),
Some(node) => ShellError::type_error(expected, node.tagged_type_name()),
}
}
impl<'content> TokensIterator<'content> {
pub fn new(
items: &'content [TokenNode],
span: Span,
skip_ws: bool,
) -> TokensIterator<'content> {
TokensIterator {
tokens: items,
span,
skip_ws,
index: 0,
seen: indexmap::IndexSet::new(),
}
}
pub fn all(tokens: &'content [TokenNode], span: Span) -> TokensIterator<'content> {
TokensIterator::new(tokens, span, false)
}
pub fn len(&self) -> usize {
self.tokens.len()
}
pub fn spanned<T>(
&mut self,
block: impl FnOnce(&mut TokensIterator<'content>) -> T,
) -> Spanned<T> {
let start = self.span_at_cursor();
let result = block(self);
let end = self.span_at_cursor();
result.spanned(start.until(end))
}
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
/// that you'll succeed.
pub fn checkpoint<'me>(&'me mut self) -> Checkpoint<'content, 'me> {
let index = self.index;
let seen = self.seen.clone();
Checkpoint {
iterator: self,
index,
seen,
committed: false,
}
}
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
/// that you'll succeed.
pub fn atomic<'me, T>(
&'me mut self,
block: impl FnOnce(&mut TokensIterator<'content>) -> Result<T, ShellError>,
) -> Result<T, ShellError> {
let index = self.index;
let seen = self.seen.clone();
let checkpoint = Checkpoint {
iterator: self,
index,
seen,
committed: false,
};
let value = block(checkpoint.iterator)?;
checkpoint.commit();
return Ok(value);
}
fn eof_span(&self) -> Span {
Span::new(self.span.end(), self.span.end())
}
pub fn typed_span_at_cursor(&mut self) -> Spanned<&'static str> {
let next = self.peek_any();
match next.node {
None => "end".spanned(self.eof_span()),
Some(node) => node.spanned_type_name(),
}
}
pub fn span_at_cursor(&mut self) -> Span {
let next = self.peek_any();
match next.node {
None => self.eof_span(),
Some(node) => node.span(),
}
}
pub fn remove(&mut self, position: usize) {
self.seen.insert(position);
}
pub fn at_end(&self) -> bool {
peek(self, self.skip_ws).is_none()
}
pub fn at_end_possible_ws(&self) -> bool {
peek(self, true).is_none()
}
pub fn advance(&mut self) {
self.seen.insert(self.index);
self.index += 1;
}
pub fn extract<T>(&mut self, f: impl Fn(&TokenNode) -> Option<T>) -> Option<(usize, T)> {
for (i, item) in self.tokens.iter().enumerate() {
if self.seen.contains(&i) {
continue;
}
match f(item) {
None => {
continue;
}
Some(value) => {
self.seen.insert(i);
return Some((i, value));
}
}
}
None
}
pub fn move_to(&mut self, pos: usize) {
self.index = pos;
}
pub fn restart(&mut self) {
self.index = 0;
}
pub fn clone(&self) -> TokensIterator<'content> {
TokensIterator {
tokens: self.tokens,
span: self.span,
index: self.index,
seen: self.seen.clone(),
skip_ws: self.skip_ws,
}
}
// Get the next token, not including whitespace
pub fn next_non_ws(&mut self) -> Option<&TokenNode> {
let mut peeked = start_next(self, true);
peeked.commit()
}
// Peek the next token, not including whitespace
pub fn peek_non_ws<'me>(&'me mut self) -> Peeked<'content, 'me> {
start_next(self, true)
}
// Peek the next token, including whitespace
pub fn peek_any<'me>(&'me mut self) -> Peeked<'content, 'me> {
start_next(self, false)
}
// Peek the next token, including whitespace, but not EOF
pub fn peek_any_token<'me, T>(
&'me mut self,
block: impl FnOnce(&'content TokenNode) -> Result<T, ShellError>,
) -> Result<T, ShellError> {
let peeked = start_next(self, false);
let peeked = peeked.not_eof("invariant");
match peeked {
Err(err) => return Err(err),
Ok(peeked) => match block(peeked.node) {
Err(err) => return Err(err),
Ok(val) => {
peeked.commit();
return Ok(val);
}
},
}
}
fn commit(&mut self, from: usize, to: usize) {
for index in from..to {
self.seen.insert(index);
}
self.index = to;
}
pub fn pos(&self, skip_ws: bool) -> Option<usize> {
peek_pos(self, skip_ws)
}
pub fn debug_remaining(&self) -> Vec<TokenNode> {
let mut tokens = self.clone();
tokens.restart();
tokens.cloned().collect()
}
}
impl<'content> Iterator for TokensIterator<'content> {
type Item = &'content TokenNode;
fn next(&mut self) -> Option<&'content TokenNode> {
next(self, self.skip_ws)
}
}
fn peek<'content, 'me>(
iterator: &'me TokensIterator<'content>,
skip_ws: bool,
) -> Option<&'me TokenNode> {
let mut to = iterator.index;
loop {
if to >= iterator.tokens.len() {
return None;
}
if iterator.seen.contains(&to) {
to += 1;
continue;
}
if to >= iterator.tokens.len() {
return None;
}
let node = &iterator.tokens[to];
match node {
TokenNode::Whitespace(_) if skip_ws => {
to += 1;
}
_ => {
return Some(node);
}
}
}
}
fn peek_pos<'content, 'me>(
iterator: &'me TokensIterator<'content>,
skip_ws: bool,
) -> Option<usize> {
let mut to = iterator.index;
loop {
if to >= iterator.tokens.len() {
return None;
}
if iterator.seen.contains(&to) {
to += 1;
continue;
}
if to >= iterator.tokens.len() {
return None;
}
let node = &iterator.tokens[to];
match node {
TokenNode::Whitespace(_) if skip_ws => {
to += 1;
}
_ => return Some(to),
}
}
}
fn start_next<'content, 'me>(
iterator: &'me mut TokensIterator<'content>,
skip_ws: bool,
) -> Peeked<'content, 'me> {
let from = iterator.index;
let mut to = iterator.index;
loop {
if to >= iterator.tokens.len() {
return Peeked {
node: None,
iterator,
from,
to,
};
}
if iterator.seen.contains(&to) {
to += 1;
continue;
}
if to >= iterator.tokens.len() {
return Peeked {
node: None,
iterator,
from,
to,
};
}
let node = &iterator.tokens[to];
match node {
TokenNode::Whitespace(_) if skip_ws => {
to += 1;
}
_ => {
to += 1;
return Peeked {
node: Some(node),
iterator,
from,
to,
};
}
}
}
}
fn next<'me, 'content>(
iterator: &'me mut TokensIterator<'content>,
skip_ws: bool,
) -> Option<&'content TokenNode> {
loop {
if iterator.index >= iterator.tokens.len() {
return None;
}
if iterator.seen.contains(&iterator.index) {
iterator.advance();
continue;
}
if iterator.index >= iterator.tokens.len() {
return None;
}
match &iterator.tokens[iterator.index] {
TokenNode::Whitespace(_) if skip_ws => {
iterator.advance();
}
other => {
iterator.advance();
return Some(other);
}
}
}
}

View File

@ -0,0 +1,30 @@
use crate::parser::hir::tokens_iterator::TokensIterator;
use crate::traits::ToDebug;
#[derive(Debug)]
pub(crate) enum DebugIteratorToken {
Seen(String),
Unseen(String),
Cursor,
}
pub(crate) fn debug_tokens(iterator: &TokensIterator, source: &str) -> Vec<DebugIteratorToken> {
let mut out = vec![];
for (i, token) in iterator.tokens.iter().enumerate() {
if iterator.index == i {
out.push(DebugIteratorToken::Cursor);
}
if iterator.seen.contains(&i) {
out.push(DebugIteratorToken::Seen(format!("{}", token.debug(source))));
} else {
out.push(DebugIteratorToken::Unseen(format!(
"{}",
token.debug(source)
)));
}
}
out
}

View File

@ -1,7 +1,7 @@
use crate::Tag; use crate::Span;
use derive_new::new; use derive_new::new;
use language_reporting::{FileName, Location}; use language_reporting::{FileName, Location};
use uuid::Uuid; use log::trace;
#[derive(new, Debug, Clone)] #[derive(new, Debug, Clone)]
pub struct Files { pub struct Files {
@ -9,20 +9,20 @@ pub struct Files {
} }
impl language_reporting::ReportingFiles for Files { impl language_reporting::ReportingFiles for Files {
type Span = Tag; type Span = Span;
type FileId = Uuid; type FileId = usize;
fn byte_span( fn byte_span(
&self, &self,
file: Self::FileId, _file: Self::FileId,
from_index: usize, from_index: usize,
to_index: usize, to_index: usize,
) -> Option<Self::Span> { ) -> Option<Self::Span> {
Some(Tag::from((from_index, to_index, file))) Some(Span::new(from_index, to_index))
} }
fn file_id(&self, tag: Self::Span) -> Self::FileId { fn file_id(&self, _tag: Self::Span) -> Self::FileId {
tag.anchor 0
} }
fn file_name(&self, _file: Self::FileId) -> FileName { fn file_name(&self, _file: Self::FileId) -> FileName {
@ -38,8 +38,18 @@ impl language_reporting::ReportingFiles for Files {
let mut seen_lines = 0; let mut seen_lines = 0;
let mut seen_bytes = 0; let mut seen_bytes = 0;
for (pos, _) in source.match_indices('\n') { for (pos, slice) in source.match_indices('\n') {
if pos > byte_index { trace!(
"SEARCH={} SEEN={} POS={} SLICE={:?} LEN={} ALL={:?}",
byte_index,
seen_bytes,
pos,
slice,
source.len(),
source
);
if pos >= byte_index {
return Some(language_reporting::Location::new( return Some(language_reporting::Location::new(
seen_lines, seen_lines,
byte_index - seen_bytes, byte_index - seen_bytes,
@ -53,18 +63,18 @@ impl language_reporting::ReportingFiles for Files {
if seen_lines == 0 { if seen_lines == 0 {
Some(language_reporting::Location::new(0, byte_index)) Some(language_reporting::Location::new(0, byte_index))
} else { } else {
None panic!("byte index {} wasn't valid", byte_index);
} }
} }
fn line_span(&self, file: Self::FileId, lineno: usize) -> Option<Self::Span> { fn line_span(&self, _file: Self::FileId, lineno: usize) -> Option<Self::Span> {
let source = &self.snippet; let source = &self.snippet;
let mut seen_lines = 0; let mut seen_lines = 0;
let mut seen_bytes = 0; let mut seen_bytes = 0;
for (pos, _) in source.match_indices('\n') { for (pos, _) in source.match_indices('\n') {
if seen_lines == lineno { if seen_lines == lineno {
return Some(Tag::from((seen_bytes, pos, file))); return Some(Span::new(seen_bytes, pos + 1));
} else { } else {
seen_lines += 1; seen_lines += 1;
seen_bytes = pos + 1; seen_bytes = pos + 1;
@ -72,18 +82,20 @@ impl language_reporting::ReportingFiles for Files {
} }
if seen_lines == 0 { if seen_lines == 0 {
Some(Tag::from((0, self.snippet.len() - 1, file))) Some(Span::new(0, self.snippet.len() - 1))
} else { } else {
None None
} }
} }
fn source(&self, tag: Self::Span) -> Option<String> { fn source(&self, span: Self::Span) -> Option<String> {
if tag.span.start > tag.span.end { trace!("source(tag={:?}) snippet={:?}", span, self.snippet);
if span.start() > span.end() {
return None; return None;
} else if tag.span.end >= self.snippet.len() { } else if span.end() > self.snippet.len() {
return None; return None;
} }
Some(tag.slice(&self.snippet).to_string()) Some(span.slice(&self.snippet).to_string())
} }
} }

View File

@ -1,4 +1,5 @@
use crate::Tag; use crate::parser::hir::syntax_shape::flat_shape::FlatShape;
use crate::{Span, Spanned, SpannedItem};
use derive_new::new; use derive_new::new;
use getset::Getters; use getset::Getters;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -12,6 +13,15 @@ pub enum FlagKind {
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Getters, new)] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Getters, new)]
#[get = "pub(crate)"] #[get = "pub(crate)"]
pub struct Flag { pub struct Flag {
kind: FlagKind, pub(crate) kind: FlagKind,
name: Tag, pub(crate) name: Span,
}
impl Spanned<Flag> {
pub fn color(&self) -> Spanned<FlatShape> {
match self.item.kind {
FlagKind::Longhand => FlatShape::Flag.spanned(self.span),
FlagKind::Shorthand => FlatShape::ShorthandFlag.spanned(self.span),
}
}
} }

View File

@ -11,6 +11,7 @@ pub enum Operator {
GreaterThan, GreaterThan,
LessThanOrEqual, LessThanOrEqual,
GreaterThanOrEqual, GreaterThanOrEqual,
Dot,
} }
impl ToDebug for Operator { impl ToDebug for Operator {
@ -32,6 +33,7 @@ impl Operator {
Operator::GreaterThan => ">", Operator::GreaterThan => ">",
Operator::LessThanOrEqual => "<=", Operator::LessThanOrEqual => "<=",
Operator::GreaterThanOrEqual => ">=", Operator::GreaterThanOrEqual => ">=",
Operator::Dot => ".",
} }
} }
} }
@ -52,6 +54,7 @@ impl FromStr for Operator {
">" => Ok(Operator::GreaterThan), ">" => Ok(Operator::GreaterThan),
"<=" => Ok(Operator::LessThanOrEqual), "<=" => Ok(Operator::LessThanOrEqual),
">=" => Ok(Operator::GreaterThanOrEqual), ">=" => Ok(Operator::GreaterThanOrEqual),
"." => Ok(Operator::Dot),
_ => Err(()), _ => Err(()),
} }
} }

File diff suppressed because it is too large Load Diff

View File

@ -1,37 +1,30 @@
use crate::parser::CallNode; use crate::parser::TokenNode;
use crate::traits::ToDebug; use crate::traits::ToDebug;
use crate::{Tag, Tagged}; use crate::{Span, Spanned};
use derive_new::new; use derive_new::new;
use getset::Getters; use getset::Getters;
use std::fmt; use std::fmt;
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, new)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, new)]
pub struct Pipeline { pub struct Pipeline {
pub(crate) parts: Vec<PipelineElement>, pub(crate) parts: Vec<Spanned<PipelineElement>>,
pub(crate) post_ws: Option<Tag>, // pub(crate) post_ws: Option<Tag>,
} }
impl ToDebug for Pipeline { impl ToDebug for Pipeline {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
for part in &self.parts { for part in self.parts.iter() {
write!(f, "{}", part.debug(source))?; write!(f, "{}", part.debug(source))?;
} }
if let Some(post_ws) = self.post_ws {
write!(f, "{}", post_ws.slice(source))?
}
Ok(()) Ok(())
} }
} }
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)] #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
pub struct PipelineElement { pub struct PipelineElement {
pub pipe: Option<Tag>, pub pipe: Option<Span>,
pub pre_ws: Option<Tag>, pub tokens: Spanned<Vec<TokenNode>>,
#[get = "pub(crate)"]
call: Tagged<CallNode>,
pub post_ws: Option<Tag>,
} }
impl ToDebug for PipelineElement { impl ToDebug for PipelineElement {
@ -40,14 +33,8 @@ impl ToDebug for PipelineElement {
write!(f, "{}", pipe.slice(source))?; write!(f, "{}", pipe.slice(source))?;
} }
if let Some(pre_ws) = self.pre_ws { for token in &self.tokens.item {
write!(f, "{}", pre_ws.slice(source))?; write!(f, "{}", token.debug(source))?;
}
write!(f, "{}", self.call.debug(source))?;
if let Some(post_ws) = self.post_ws {
write!(f, "{}", post_ws.slice(source))?;
} }
Ok(()) Ok(())

View File

@ -1,9 +1,9 @@
use crate::errors::ShellError; use crate::errors::ShellError;
use crate::parser::parse::{call_node::*, flag::*, operator::*, pipeline::*, tokens::*}; use crate::parser::parse::{call_node::*, flag::*, operator::*, pipeline::*, tokens::*};
use crate::prelude::*;
use crate::traits::ToDebug; use crate::traits::ToDebug;
use crate::{Tag, Tagged, Text}; use crate::{Tagged, Text};
use derive_new::new; use derive_new::new;
use enum_utils::FromStr;
use getset::Getters; use getset::Getters;
use std::fmt; use std::fmt;
@ -11,16 +11,14 @@ use std::fmt;
pub enum TokenNode { pub enum TokenNode {
Token(Token), Token(Token),
Call(Tagged<CallNode>), Call(Spanned<CallNode>),
Delimited(Tagged<DelimitedNode>), Nodes(Spanned<Vec<TokenNode>>),
Pipeline(Tagged<Pipeline>), Delimited(Spanned<DelimitedNode>),
Operator(Tagged<Operator>), Pipeline(Spanned<Pipeline>),
Flag(Tagged<Flag>), Flag(Spanned<Flag>),
Member(Tag), Whitespace(Span),
Whitespace(Tag),
Error(Tagged<Box<ShellError>>), Error(Spanned<ShellError>),
Path(Tagged<PathNode>),
} }
impl ToDebug for TokenNode { impl ToDebug for TokenNode {
@ -78,48 +76,51 @@ impl fmt::Debug for DebugTokenNode<'_> {
) )
} }
TokenNode::Pipeline(pipeline) => write!(f, "{}", pipeline.debug(self.source)), TokenNode::Pipeline(pipeline) => write!(f, "{}", pipeline.debug(self.source)),
TokenNode::Error(s) => write!(f, "<error> for {:?}", s.tag().slice(self.source)), TokenNode::Error(_) => write!(f, "<error>"),
rest => write!(f, "{}", rest.tag().slice(self.source)), rest => write!(f, "{}", rest.span().slice(self.source)),
} }
} }
} }
impl From<&TokenNode> for Tag { impl From<&TokenNode> for Span {
fn from(token: &TokenNode) -> Tag { fn from(token: &TokenNode) -> Span {
token.tag() token.span()
} }
} }
impl TokenNode { impl TokenNode {
pub fn tag(&self) -> Tag { pub fn span(&self) -> Span {
match self { match self {
TokenNode::Token(t) => t.tag(), TokenNode::Token(t) => t.span,
TokenNode::Call(s) => s.tag(), TokenNode::Nodes(t) => t.span,
TokenNode::Delimited(s) => s.tag(), TokenNode::Call(s) => s.span,
TokenNode::Pipeline(s) => s.tag(), TokenNode::Delimited(s) => s.span,
TokenNode::Operator(s) => s.tag(), TokenNode::Pipeline(s) => s.span,
TokenNode::Flag(s) => s.tag(), TokenNode::Flag(s) => s.span,
TokenNode::Member(s) => *s,
TokenNode::Whitespace(s) => *s, TokenNode::Whitespace(s) => *s,
TokenNode::Error(s) => s.tag(), TokenNode::Error(s) => s.span,
TokenNode::Path(s) => s.tag(),
} }
} }
pub fn type_name(&self) -> String { pub fn type_name(&self) -> &'static str {
match self { match self {
TokenNode::Token(t) => t.type_name(), TokenNode::Token(t) => t.type_name(),
TokenNode::Nodes(_) => "nodes",
TokenNode::Call(_) => "command", TokenNode::Call(_) => "command",
TokenNode::Delimited(d) => d.type_name(), TokenNode::Delimited(d) => d.type_name(),
TokenNode::Pipeline(_) => "pipeline", TokenNode::Pipeline(_) => "pipeline",
TokenNode::Operator(_) => "operator",
TokenNode::Flag(_) => "flag", TokenNode::Flag(_) => "flag",
TokenNode::Member(_) => "member",
TokenNode::Whitespace(_) => "whitespace", TokenNode::Whitespace(_) => "whitespace",
TokenNode::Error(_) => "error", TokenNode::Error(_) => "error",
TokenNode::Path(_) => "path",
} }
.to_string() }
pub fn spanned_type_name(&self) -> Spanned<&'static str> {
self.type_name().spanned(self.span())
}
pub fn tagged_type_name(&self) -> Tagged<&'static str> {
self.type_name().tagged(self.span())
} }
pub fn old_debug<'a>(&'a self, source: &'a Text) -> DebugTokenNode<'a> { pub fn old_debug<'a>(&'a self, source: &'a Text) -> DebugTokenNode<'a> {
@ -127,16 +128,26 @@ impl TokenNode {
} }
pub fn as_external_arg(&self, source: &Text) -> String { pub fn as_external_arg(&self, source: &Text) -> String {
self.tag().slice(source).to_string() self.span().slice(source).to_string()
} }
pub fn source<'a>(&self, source: &'a Text) -> &'a str { pub fn source<'a>(&self, source: &'a Text) -> &'a str {
self.tag().slice(source) self.span().slice(source)
}
pub fn get_variable(&self) -> Result<(Span, Span), ShellError> {
match self {
TokenNode::Token(Spanned {
item: RawToken::Variable(inner_span),
span: outer_span,
}) => Ok((*outer_span, *inner_span)),
_ => Err(ShellError::type_error("variable", self.tagged_type_name())),
}
} }
pub fn is_bare(&self) -> bool { pub fn is_bare(&self) -> bool {
match self { match self {
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::Bare, item: RawToken::Bare,
.. ..
}) => true, }) => true,
@ -144,9 +155,44 @@ impl TokenNode {
} }
} }
pub fn is_pattern(&self) -> bool {
match self {
TokenNode::Token(Spanned {
item: RawToken::GlobPattern,
..
}) => true,
_ => false,
}
}
pub fn is_dot(&self) -> bool {
match self {
TokenNode::Token(Spanned {
item: RawToken::Operator(Operator::Dot),
..
}) => true,
_ => false,
}
}
pub fn as_block(&self) -> Option<(Spanned<&[TokenNode]>, (Span, Span))> {
match self {
TokenNode::Delimited(Spanned {
item:
DelimitedNode {
delimiter,
children,
spans,
},
span,
}) if *delimiter == Delimiter::Brace => Some(((&children[..]).spanned(*span), *spans)),
_ => None,
}
}
pub fn is_external(&self) -> bool { pub fn is_external(&self) -> bool {
match self { match self {
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::ExternalCommand(..), item: RawToken::ExternalCommand(..),
.. ..
}) => true, }) => true,
@ -154,20 +200,20 @@ impl TokenNode {
} }
} }
pub fn expect_external(&self) -> Tag { pub fn expect_external(&self) -> Span {
match self { match self {
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::ExternalCommand(tag), item: RawToken::ExternalCommand(span),
.. ..
}) => *tag, }) => *span,
_ => panic!("Only call expect_external if you checked is_external first"), _ => panic!("Only call expect_external if you checked is_external first"),
} }
} }
pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option<Tagged<Flag>> { pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option<Spanned<Flag>> {
match self { match self {
TokenNode::Flag( TokenNode::Flag(
flag @ Tagged { flag @ Spanned {
item: Flag { .. }, .. item: Flag { .. }, ..
}, },
) if value == flag.name().slice(source) => Some(*flag), ) if value == flag.name().slice(source) => Some(*flag),
@ -177,8 +223,58 @@ impl TokenNode {
pub fn as_pipeline(&self) -> Result<Pipeline, ShellError> { pub fn as_pipeline(&self) -> Result<Pipeline, ShellError> {
match self { match self {
TokenNode::Pipeline(Tagged { item, .. }) => Ok(item.clone()), TokenNode::Pipeline(Spanned { item, .. }) => Ok(item.clone()),
_ => Err(ShellError::string("unimplemented")), _ => Err(ShellError::unimplemented("unimplemented")),
}
}
pub fn is_whitespace(&self) -> bool {
match self {
TokenNode::Whitespace(_) => true,
_ => false,
}
}
pub fn expect_string(&self) -> (Span, Span) {
match self {
TokenNode::Token(Spanned {
item: RawToken::String(inner_span),
span: outer_span,
}) => (*outer_span, *inner_span),
other => panic!("Expected string, found {:?}", other),
}
}
}
#[cfg(test)]
impl TokenNode {
pub fn expect_list(&self) -> Tagged<&[TokenNode]> {
match self {
TokenNode::Nodes(Spanned { item, span }) => (&item[..]).tagged(Tag {
span: *span,
anchor: None,
}),
other => panic!("Expected list, found {:?}", other),
}
}
pub fn expect_var(&self) -> (Span, Span) {
match self {
TokenNode::Token(Spanned {
item: RawToken::Variable(inner_span),
span: outer_span,
}) => (*outer_span, *inner_span),
other => panic!("Expected var, found {:?}", other),
}
}
pub fn expect_bare(&self) -> Span {
match self {
TokenNode::Token(Spanned {
item: RawToken::Bare,
span,
}) => *span,
other => panic!("Expected var, found {:?}", other),
} }
} }
} }
@ -186,8 +282,9 @@ impl TokenNode {
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)] #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
#[get = "pub(crate)"] #[get = "pub(crate)"]
pub struct DelimitedNode { pub struct DelimitedNode {
delimiter: Delimiter, pub(crate) delimiter: Delimiter,
children: Vec<TokenNode>, pub(crate) spans: (Span, Span),
pub(crate) children: Vec<TokenNode>,
} }
impl DelimitedNode { impl DelimitedNode {
@ -200,13 +297,31 @@ impl DelimitedNode {
} }
} }
#[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd, FromStr)] #[derive(Debug, Copy, Clone, Eq, PartialEq, Ord, PartialOrd)]
pub enum Delimiter { pub enum Delimiter {
Paren, Paren,
Brace, Brace,
Square, Square,
} }
impl Delimiter {
pub(crate) fn open(&self) -> &'static str {
match self {
Delimiter::Paren => "(",
Delimiter::Brace => "{",
Delimiter::Square => "[",
}
}
pub(crate) fn close(&self) -> &'static str {
match self {
Delimiter::Paren => ")",
Delimiter::Brace => "}",
Delimiter::Square => "]",
}
}
}
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)] #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
#[get = "pub(crate)"] #[get = "pub(crate)"]
pub struct PathNode { pub struct PathNode {

View File

@ -3,12 +3,10 @@ use crate::prelude::*;
use crate::parser::parse::flag::{Flag, FlagKind}; use crate::parser::parse::flag::{Flag, FlagKind};
use crate::parser::parse::operator::Operator; use crate::parser::parse::operator::Operator;
use crate::parser::parse::pipeline::{Pipeline, PipelineElement}; use crate::parser::parse::pipeline::{Pipeline, PipelineElement};
use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, PathNode, TokenNode}; use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
use crate::parser::parse::tokens::{RawNumber, RawToken}; use crate::parser::parse::tokens::{RawNumber, RawToken};
use crate::parser::parse::unit::Unit;
use crate::parser::CallNode; use crate::parser::CallNode;
use derive_new::new; use derive_new::new;
use uuid::Uuid;
#[derive(new)] #[derive(new)]
pub struct TokenTreeBuilder { pub struct TokenTreeBuilder {
@ -17,74 +15,86 @@ pub struct TokenTreeBuilder {
#[new(default)] #[new(default)]
output: String, output: String,
anchor: Uuid,
} }
pub type CurriedToken = Box<dyn FnOnce(&mut TokenTreeBuilder) -> TokenNode + 'static>; pub type CurriedToken = Box<dyn FnOnce(&mut TokenTreeBuilder) -> TokenNode + 'static>;
pub type CurriedCall = Box<dyn FnOnce(&mut TokenTreeBuilder) -> Tagged<CallNode> + 'static>; pub type CurriedCall = Box<dyn FnOnce(&mut TokenTreeBuilder) -> Tagged<CallNode> + 'static>;
impl TokenTreeBuilder { impl TokenTreeBuilder {
pub fn build(anchor: Uuid, block: impl FnOnce(&mut Self) -> TokenNode) -> (TokenNode, String) { pub fn build(block: impl FnOnce(&mut Self) -> TokenNode) -> (TokenNode, String) {
let mut builder = TokenTreeBuilder::new(anchor); let mut builder = TokenTreeBuilder::new();
let node = block(&mut builder); let node = block(&mut builder);
(node, builder.output) (node, builder.output)
} }
pub fn pipeline(input: Vec<(Option<&str>, CurriedCall, Option<&str>)>) -> CurriedToken { fn build_spanned<T>(
let input: Vec<(Option<String>, CurriedCall, Option<String>)> = input &mut self,
.into_iter() callback: impl FnOnce(&mut TokenTreeBuilder) -> T,
.map(|(pre, call, post)| { ) -> Spanned<T> {
( let start = self.pos;
pre.map(|s| s.to_string()), let ret = callback(self);
call, let end = self.pos;
post.map(|s| s.to_string()),
)
})
.collect();
ret.spanned(Span::new(start, end))
}
pub fn pipeline(input: Vec<Vec<CurriedToken>>) -> CurriedToken {
Box::new(move |b| { Box::new(move |b| {
let start = b.pos; let start = b.pos;
let mut out: Vec<PipelineElement> = vec![]; let mut out: Vec<Spanned<PipelineElement>> = vec![];
let mut input = input.into_iter().peekable(); let mut input = input.into_iter().peekable();
let (pre, call, post) = input let head = input
.next() .next()
.expect("A pipeline must contain at least one element"); .expect("A pipeline must contain at least one element");
let pipe = None; let pipe = None;
let pre_tag = pre.map(|pre| b.consume_tag(&pre)); let head = b.build_spanned(|b| head.into_iter().map(|node| node(b)).collect());
let call = call(b);
let post_tag = post.map(|post| b.consume_tag(&post));
out.push(PipelineElement::new(pipe, pre_tag, call, post_tag)); let head_span: Span = head.span;
out.push(PipelineElement::new(pipe, head).spanned(head_span));
loop { loop {
match input.next() { match input.next() {
None => break, None => break,
Some((pre, call, post)) => { Some(node) => {
let pipe = Some(b.consume_tag("|")); let start = b.pos;
let pre_span = pre.map(|pre| b.consume_tag(&pre)); let pipe = Some(b.consume_span("|"));
let call = call(b); let node =
let post_span = post.map(|post| b.consume_tag(&post)); b.build_spanned(|b| node.into_iter().map(|node| node(b)).collect());
let end = b.pos;
out.push(PipelineElement::new(pipe, pre_span, call, post_span)); out.push(PipelineElement::new(pipe, node).spanned(Span::new(start, end)));
} }
} }
} }
let end = b.pos; let end = b.pos;
TokenTreeBuilder::tagged_pipeline((out, None), (start, end, b.anchor)) TokenTreeBuilder::spanned_pipeline(out, Span::new(start, end))
}) })
} }
pub fn tagged_pipeline( pub fn spanned_pipeline(
input: (Vec<PipelineElement>, Option<Tag>), input: Vec<Spanned<PipelineElement>>,
tag: impl Into<Tag>, span: impl Into<Span>,
) -> TokenNode { ) -> TokenNode {
TokenNode::Pipeline(Pipeline::new(input.0, input.1.into()).tagged(tag.into())) TokenNode::Pipeline(Pipeline::new(input).spanned(span))
}
pub fn token_list(input: Vec<CurriedToken>) -> CurriedToken {
Box::new(move |b| {
let start = b.pos;
let tokens = input.into_iter().map(|i| i(b)).collect();
let end = b.pos;
TokenTreeBuilder::tagged_token_list(tokens, (start, end, None))
})
}
pub fn tagged_token_list(input: Vec<TokenNode>, tag: impl Into<Tag>) -> TokenNode {
TokenNode::Nodes(input.spanned(tag.into().span))
} }
pub fn op(input: impl Into<Operator>) -> CurriedToken { pub fn op(input: impl Into<Operator>) -> CurriedToken {
@ -95,12 +105,12 @@ impl TokenTreeBuilder {
b.pos = end; b.pos = end;
TokenTreeBuilder::tagged_op(input, (start, end, b.anchor)) TokenTreeBuilder::spanned_op(input, Span::new(start, end))
}) })
} }
pub fn tagged_op(input: impl Into<Operator>, tag: impl Into<Tag>) -> TokenNode { pub fn spanned_op(input: impl Into<Operator>, span: impl Into<Span>) -> TokenNode {
TokenNode::Operator(input.into().tagged(tag.into())) TokenNode::Token(RawToken::Operator(input.into()).spanned(span.into()))
} }
pub fn string(input: impl Into<String>) -> CurriedToken { pub fn string(input: impl Into<String>) -> CurriedToken {
@ -112,15 +122,15 @@ impl TokenTreeBuilder {
let (_, end) = b.consume("\""); let (_, end) = b.consume("\"");
b.pos = end; b.pos = end;
TokenTreeBuilder::tagged_string( TokenTreeBuilder::spanned_string(
(inner_start, inner_end, b.anchor), Span::new(inner_start, inner_end),
(start, end, b.anchor), Span::new(start, end),
) )
}) })
} }
pub fn tagged_string(input: impl Into<Tag>, tag: impl Into<Tag>) -> TokenNode { pub fn spanned_string(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::String(input.into()).tagged(tag.into())) TokenNode::Token(RawToken::String(input.into()).spanned(span.into()))
} }
pub fn bare(input: impl Into<String>) -> CurriedToken { pub fn bare(input: impl Into<String>) -> CurriedToken {
@ -130,12 +140,12 @@ impl TokenTreeBuilder {
let (start, end) = b.consume(&input); let (start, end) = b.consume(&input);
b.pos = end; b.pos = end;
TokenTreeBuilder::tagged_bare((start, end, b.anchor)) TokenTreeBuilder::spanned_bare(Span::new(start, end))
}) })
} }
pub fn tagged_bare(tag: impl Into<Tag>) -> TokenNode { pub fn spanned_bare(span: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::Bare.tagged(tag.into())) TokenNode::Token(RawToken::Bare.spanned(span))
} }
pub fn pattern(input: impl Into<String>) -> CurriedToken { pub fn pattern(input: impl Into<String>) -> CurriedToken {
@ -145,12 +155,12 @@ impl TokenTreeBuilder {
let (start, end) = b.consume(&input); let (start, end) = b.consume(&input);
b.pos = end; b.pos = end;
TokenTreeBuilder::tagged_pattern((start, end, b.anchor)) TokenTreeBuilder::spanned_pattern(Span::new(start, end))
}) })
} }
pub fn tagged_pattern(input: impl Into<Tag>) -> TokenNode { pub fn spanned_pattern(input: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::GlobPattern.tagged(input.into())) TokenNode::Token(RawToken::GlobPattern.spanned(input.into()))
} }
pub fn external_word(input: impl Into<String>) -> CurriedToken { pub fn external_word(input: impl Into<String>) -> CurriedToken {
@ -160,16 +170,31 @@ impl TokenTreeBuilder {
let (start, end) = b.consume(&input); let (start, end) = b.consume(&input);
b.pos = end; b.pos = end;
TokenTreeBuilder::tagged_external_word((start, end, b.anchor)) TokenTreeBuilder::spanned_external_word(Span::new(start, end))
}) })
} }
pub fn tagged_external_word(input: impl Into<Tag>) -> TokenNode { pub fn spanned_external_word(input: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::ExternalWord.tagged(input.into())) TokenNode::Token(RawToken::ExternalWord.spanned(input.into()))
} }
pub fn tagged_external(input: impl Into<Tag>, tag: impl Into<Tag>) -> TokenNode { pub fn external_command(input: impl Into<String>) -> CurriedToken {
TokenNode::Token(RawToken::ExternalCommand(input.into()).tagged(tag.into())) let input = input.into();
Box::new(move |b| {
let (outer_start, _) = b.consume("^");
let (inner_start, end) = b.consume(&input);
b.pos = end;
TokenTreeBuilder::spanned_external_command(
Span::new(inner_start, end),
Span::new(outer_start, end),
)
})
}
pub fn spanned_external_command(inner: impl Into<Span>, outer: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::ExternalCommand(inner.into()).spanned(outer.into()))
} }
pub fn int(input: impl Into<BigInt>) -> CurriedToken { pub fn int(input: impl Into<BigInt>) -> CurriedToken {
@ -179,9 +204,9 @@ impl TokenTreeBuilder {
let (start, end) = b.consume(&int.to_string()); let (start, end) = b.consume(&int.to_string());
b.pos = end; b.pos = end;
TokenTreeBuilder::tagged_number( TokenTreeBuilder::spanned_number(
RawNumber::Int((start, end, b.anchor).into()), RawNumber::Int(Span::new(start, end)),
(start, end, b.anchor), Span::new(start, end),
) )
}) })
} }
@ -193,63 +218,15 @@ impl TokenTreeBuilder {
let (start, end) = b.consume(&decimal.to_string()); let (start, end) = b.consume(&decimal.to_string());
b.pos = end; b.pos = end;
TokenTreeBuilder::tagged_number( TokenTreeBuilder::spanned_number(
RawNumber::Decimal((start, end, b.anchor).into()), RawNumber::Decimal(Span::new(start, end)),
(start, end, b.anchor), Span::new(start, end),
) )
}) })
} }
pub fn tagged_number(input: impl Into<RawNumber>, tag: impl Into<Tag>) -> TokenNode { pub fn spanned_number(input: impl Into<RawNumber>, span: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::Number(input.into()).tagged(tag.into())) TokenNode::Token(RawToken::Number(input.into()).spanned(span.into()))
}
pub fn size(int: impl Into<i64>, unit: impl Into<Unit>) -> CurriedToken {
let int = int.into();
let unit = unit.into();
Box::new(move |b| {
let (start_int, end_int) = b.consume(&int.to_string());
let (_, end_unit) = b.consume(unit.as_str());
b.pos = end_unit;
TokenTreeBuilder::tagged_size(
(RawNumber::Int((start_int, end_int, b.anchor).into()), unit),
(start_int, end_unit, b.anchor),
)
})
}
pub fn tagged_size(
input: (impl Into<RawNumber>, impl Into<Unit>),
tag: impl Into<Tag>,
) -> TokenNode {
let (int, unit) = (input.0.into(), input.1.into());
TokenNode::Token(RawToken::Size(int, unit).tagged(tag.into()))
}
pub fn path(head: CurriedToken, tail: Vec<CurriedToken>) -> CurriedToken {
Box::new(move |b| {
let start = b.pos;
let head = head(b);
let mut output = vec![];
for item in tail {
b.consume(".");
output.push(item(b));
}
let end = b.pos;
TokenTreeBuilder::tagged_path((head, output), (start, end, b.anchor))
})
}
pub fn tagged_path(input: (TokenNode, Vec<TokenNode>), tag: impl Into<Tag>) -> TokenNode {
TokenNode::Path(PathNode::new(Box::new(input.0), input.1).tagged(tag.into()))
} }
pub fn var(input: impl Into<String>) -> CurriedToken { pub fn var(input: impl Into<String>) -> CurriedToken {
@ -259,12 +236,12 @@ impl TokenTreeBuilder {
let (start, _) = b.consume("$"); let (start, _) = b.consume("$");
let (inner_start, end) = b.consume(&input); let (inner_start, end) = b.consume(&input);
TokenTreeBuilder::tagged_var((inner_start, end, b.anchor), (start, end, b.anchor)) TokenTreeBuilder::spanned_var(Span::new(inner_start, end), Span::new(start, end))
}) })
} }
pub fn tagged_var(input: impl Into<Tag>, tag: impl Into<Tag>) -> TokenNode { pub fn spanned_var(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::Variable(input.into()).tagged(tag.into())) TokenNode::Token(RawToken::Variable(input.into()).spanned(span.into()))
} }
pub fn flag(input: impl Into<String>) -> CurriedToken { pub fn flag(input: impl Into<String>) -> CurriedToken {
@ -274,12 +251,12 @@ impl TokenTreeBuilder {
let (start, _) = b.consume("--"); let (start, _) = b.consume("--");
let (inner_start, end) = b.consume(&input); let (inner_start, end) = b.consume(&input);
TokenTreeBuilder::tagged_flag((inner_start, end, b.anchor), (start, end, b.anchor)) TokenTreeBuilder::spanned_flag(Span::new(inner_start, end), Span::new(start, end))
}) })
} }
pub fn tagged_flag(input: impl Into<Tag>, tag: impl Into<Tag>) -> TokenNode { pub fn spanned_flag(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
TokenNode::Flag(Flag::new(FlagKind::Longhand, input.into()).tagged(tag.into())) TokenNode::Flag(Flag::new(FlagKind::Longhand, input.into()).spanned(span.into()))
} }
pub fn shorthand(input: impl Into<String>) -> CurriedToken { pub fn shorthand(input: impl Into<String>) -> CurriedToken {
@ -289,25 +266,12 @@ impl TokenTreeBuilder {
let (start, _) = b.consume("-"); let (start, _) = b.consume("-");
let (inner_start, end) = b.consume(&input); let (inner_start, end) = b.consume(&input);
TokenTreeBuilder::tagged_shorthand((inner_start, end, b.anchor), (start, end, b.anchor)) TokenTreeBuilder::spanned_shorthand((inner_start, end), (start, end))
}) })
} }
pub fn tagged_shorthand(input: impl Into<Tag>, tag: impl Into<Tag>) -> TokenNode { pub fn spanned_shorthand(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into()).tagged(tag.into())) TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into()).spanned(span.into()))
}
pub fn member(input: impl Into<String>) -> CurriedToken {
let input = input.into();
Box::new(move |b| {
let (start, end) = b.consume(&input);
TokenTreeBuilder::tagged_member((start, end, b.anchor))
})
}
pub fn tagged_member(tag: impl Into<Tag>) -> TokenNode {
TokenNode::Member(tag.into())
} }
pub fn call(head: CurriedToken, input: Vec<CurriedToken>) -> CurriedCall { pub fn call(head: CurriedToken, input: Vec<CurriedToken>) -> CurriedCall {
@ -323,7 +287,7 @@ impl TokenTreeBuilder {
let end = b.pos; let end = b.pos;
TokenTreeBuilder::tagged_call(nodes, (start, end, b.anchor)) TokenTreeBuilder::tagged_call(nodes, (start, end, None))
}) })
} }
@ -340,64 +304,85 @@ impl TokenTreeBuilder {
CallNode::new(Box::new(head), tail).tagged(tag.into()) CallNode::new(Box::new(head), tail).tagged(tag.into())
} }
fn consume_delimiter(
&mut self,
input: Vec<CurriedToken>,
_open: &str,
_close: &str,
) -> (Span, Span, Span, Vec<TokenNode>) {
let (start_open_paren, end_open_paren) = self.consume("(");
let mut output = vec![];
for item in input {
output.push(item(self));
}
let (start_close_paren, end_close_paren) = self.consume(")");
let open = Span::new(start_open_paren, end_open_paren);
let close = Span::new(start_close_paren, end_close_paren);
let whole = Span::new(start_open_paren, end_close_paren);
(open, close, whole, output)
}
pub fn parens(input: Vec<CurriedToken>) -> CurriedToken { pub fn parens(input: Vec<CurriedToken>) -> CurriedToken {
Box::new(move |b| { Box::new(move |b| {
let (start, _) = b.consume("("); let (open, close, whole, output) = b.consume_delimiter(input, "(", ")");
let mut output = vec![];
for item in input {
output.push(item(b));
}
let (_, end) = b.consume(")"); TokenTreeBuilder::spanned_parens(output, (open, close), whole)
TokenTreeBuilder::tagged_parens(output, (start, end, b.anchor))
}) })
} }
pub fn tagged_parens(input: impl Into<Vec<TokenNode>>, tag: impl Into<Tag>) -> TokenNode { pub fn spanned_parens(
TokenNode::Delimited(DelimitedNode::new(Delimiter::Paren, input.into()).tagged(tag.into())) input: impl Into<Vec<TokenNode>>,
spans: (Span, Span),
span: impl Into<Span>,
) -> TokenNode {
TokenNode::Delimited(
DelimitedNode::new(Delimiter::Paren, spans, input.into()).spanned(span.into()),
)
} }
pub fn square(input: Vec<CurriedToken>) -> CurriedToken { pub fn square(input: Vec<CurriedToken>) -> CurriedToken {
Box::new(move |b| { Box::new(move |b| {
let (start, _) = b.consume("["); let (open, close, whole, tokens) = b.consume_delimiter(input, "[", "]");
let mut output = vec![];
for item in input {
output.push(item(b));
}
let (_, end) = b.consume("]"); TokenTreeBuilder::spanned_square(tokens, (open, close), whole)
TokenTreeBuilder::tagged_square(output, (start, end, b.anchor))
}) })
} }
pub fn tagged_square(input: impl Into<Vec<TokenNode>>, tag: impl Into<Tag>) -> TokenNode { pub fn spanned_square(
TokenNode::Delimited(DelimitedNode::new(Delimiter::Square, input.into()).tagged(tag.into())) input: impl Into<Vec<TokenNode>>,
spans: (Span, Span),
span: impl Into<Span>,
) -> TokenNode {
TokenNode::Delimited(
DelimitedNode::new(Delimiter::Square, spans, input.into()).spanned(span.into()),
)
} }
pub fn braced(input: Vec<CurriedToken>) -> CurriedToken { pub fn braced(input: Vec<CurriedToken>) -> CurriedToken {
Box::new(move |b| { Box::new(move |b| {
let (start, _) = b.consume("{ "); let (open, close, whole, tokens) = b.consume_delimiter(input, "{", "}");
let mut output = vec![];
for item in input {
output.push(item(b));
}
let (_, end) = b.consume(" }"); TokenTreeBuilder::spanned_brace(tokens, (open, close), whole)
TokenTreeBuilder::tagged_brace(output, (start, end, b.anchor))
}) })
} }
pub fn tagged_brace(input: impl Into<Vec<TokenNode>>, tag: impl Into<Tag>) -> TokenNode { pub fn spanned_brace(
TokenNode::Delimited(DelimitedNode::new(Delimiter::Brace, input.into()).tagged(tag.into())) input: impl Into<Vec<TokenNode>>,
spans: (Span, Span),
span: impl Into<Span>,
) -> TokenNode {
TokenNode::Delimited(
DelimitedNode::new(Delimiter::Brace, spans, input.into()).spanned(span.into()),
)
} }
pub fn sp() -> CurriedToken { pub fn sp() -> CurriedToken {
Box::new(|b| { Box::new(|b| {
let (start, end) = b.consume(" "); let (start, end) = b.consume(" ");
TokenNode::Whitespace(Tag::from((start, end, b.anchor))) TokenNode::Whitespace(Span::new(start, end))
}) })
} }
@ -406,12 +391,12 @@ impl TokenTreeBuilder {
Box::new(move |b| { Box::new(move |b| {
let (start, end) = b.consume(&input); let (start, end) = b.consume(&input);
TokenTreeBuilder::tagged_ws((start, end, b.anchor)) TokenTreeBuilder::spanned_ws(Span::new(start, end))
}) })
} }
pub fn tagged_ws(tag: impl Into<Tag>) -> TokenNode { pub fn spanned_ws(span: impl Into<Span>) -> TokenNode {
TokenNode::Whitespace(tag.into()) TokenNode::Whitespace(span.into())
} }
fn consume(&mut self, input: &str) -> (usize, usize) { fn consume(&mut self, input: &str) -> (usize, usize) {
@ -421,10 +406,10 @@ impl TokenTreeBuilder {
(start, self.pos) (start, self.pos)
} }
fn consume_tag(&mut self, input: &str) -> Tag { fn consume_span(&mut self, input: &str) -> Span {
let start = self.pos; let start = self.pos;
self.pos += input.len(); self.pos += input.len();
self.output.push_str(input); self.output.push_str(input);
(start, self.pos, self.anchor).into() Span::new(start, self.pos)
} }
} }

View File

@ -1,38 +1,53 @@
use crate::parser::parse::unit::*; use crate::parser::Operator;
use crate::prelude::*; use crate::prelude::*;
use crate::{Tagged, Text}; use crate::Text;
use std::fmt; use std::fmt;
use std::str::FromStr; use std::str::FromStr;
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] #[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum RawToken { pub enum RawToken {
Number(RawNumber), Number(RawNumber),
Size(RawNumber, Unit), Operator(Operator),
String(Tag), String(Span),
Variable(Tag), Variable(Span),
ExternalCommand(Tag), ExternalCommand(Span),
ExternalWord, ExternalWord,
GlobPattern, GlobPattern,
Bare, Bare,
} }
impl RawToken {
pub fn type_name(&self) -> &'static str {
match self {
RawToken::Number(_) => "Number",
RawToken::Operator(..) => "operator",
RawToken::String(_) => "String",
RawToken::Variable(_) => "variable",
RawToken::ExternalCommand(_) => "external command",
RawToken::ExternalWord => "external word",
RawToken::GlobPattern => "glob pattern",
RawToken::Bare => "String",
}
}
}
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] #[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum RawNumber { pub enum RawNumber {
Int(Tag), Int(Span),
Decimal(Tag), Decimal(Span),
} }
impl RawNumber { impl RawNumber {
pub fn int(tag: impl Into<Tag>) -> Tagged<RawNumber> { pub fn int(span: impl Into<Span>) -> Spanned<RawNumber> {
let tag = tag.into(); let span = span.into();
RawNumber::Int(tag).tagged(tag) RawNumber::Int(span).spanned(span)
} }
pub fn decimal(tag: impl Into<Tag>) -> Tagged<RawNumber> { pub fn decimal(span: impl Into<Span>) -> Spanned<RawNumber> {
let tag = tag.into(); let span = span.into();
RawNumber::Decimal(tag).tagged(tag) RawNumber::Decimal(span).spanned(span)
} }
pub(crate) fn to_number(self, source: &Text) -> Number { pub(crate) fn to_number(self, source: &Text) -> Number {
@ -45,22 +60,7 @@ impl RawNumber {
} }
} }
impl RawToken { pub type Token = Spanned<RawToken>;
pub fn type_name(&self) -> &'static str {
match self {
RawToken::Number(_) => "Number",
RawToken::Size(..) => "Size",
RawToken::String(_) => "String",
RawToken::Variable(_) => "Variable",
RawToken::ExternalCommand(_) => "ExternalCommand",
RawToken::ExternalWord => "ExternalWord",
RawToken::GlobPattern => "GlobPattern",
RawToken::Bare => "String",
}
}
}
pub type Token = Tagged<RawToken>;
impl Token { impl Token {
pub fn debug<'a>(&self, source: &'a Text) -> DebugToken<'a> { pub fn debug<'a>(&self, source: &'a Text) -> DebugToken<'a> {
@ -69,6 +69,76 @@ impl Token {
source, source,
} }
} }
pub fn extract_number(&self) -> Option<Spanned<RawNumber>> {
match self.item {
RawToken::Number(number) => Some((number).spanned(self.span)),
_ => None,
}
}
pub fn extract_int(&self) -> Option<(Span, Span)> {
match self.item {
RawToken::Number(RawNumber::Int(int)) => Some((int, self.span)),
_ => None,
}
}
pub fn extract_decimal(&self) -> Option<(Span, Span)> {
match self.item {
RawToken::Number(RawNumber::Decimal(decimal)) => Some((decimal, self.span)),
_ => None,
}
}
pub fn extract_operator(&self) -> Option<Spanned<Operator>> {
match self.item {
RawToken::Operator(operator) => Some(operator.spanned(self.span)),
_ => None,
}
}
pub fn extract_string(&self) -> Option<(Span, Span)> {
match self.item {
RawToken::String(span) => Some((span, self.span)),
_ => None,
}
}
pub fn extract_variable(&self) -> Option<(Span, Span)> {
match self.item {
RawToken::Variable(span) => Some((span, self.span)),
_ => None,
}
}
pub fn extract_external_command(&self) -> Option<(Span, Span)> {
match self.item {
RawToken::ExternalCommand(span) => Some((span, self.span)),
_ => None,
}
}
pub fn extract_external_word(&self) -> Option<Span> {
match self.item {
RawToken::ExternalWord => Some(self.span),
_ => None,
}
}
pub fn extract_glob_pattern(&self) -> Option<Span> {
match self.item {
RawToken::GlobPattern => Some(self.span),
_ => None,
}
}
pub fn extract_bare(&self) -> Option<Span> {
match self.item {
RawToken::Bare => Some(self.span),
_ => None,
}
}
} }
pub struct DebugToken<'a> { pub struct DebugToken<'a> {
@ -78,6 +148,6 @@ pub struct DebugToken<'a> {
impl fmt::Debug for DebugToken<'_> { impl fmt::Debug for DebugToken<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.node.tag().slice(self.source)) write!(f, "{}", self.node.span.slice(self.source))
} }
} }

Some files were not shown because too many files have changed in this diff Show More