Stream support (#812)

* Moves off of draining between filters. Instead, the sink will pull on the stream, and will drain element-wise. This moves the whole stream to being lazy.
* Adds ctrl-c support and connects it into some of the key points where we pull on the stream. If a ctrl-c is detect, we immediately halt pulling on the stream and return to the prompt.
* Moves away from having a SourceMap where anchor locations are stored. Now AnchorLocation is kept directly in the Tag.
* To make this possible, split tag and span. Span is largely used in the parser and is copyable. Tag is now no longer copyable.
This commit is contained in:
Jonathan Turner 2019-10-13 17:12:43 +13:00 committed by GitHub
parent 8ca678440a
commit 193b00764b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
110 changed files with 1988 additions and 1892 deletions

88
Cargo.lock generated
View File

@ -1604,7 +1604,6 @@ dependencies = [
"toml 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)", "toml 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
"unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
"url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)",
"which 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "which 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
@ -1920,24 +1919,6 @@ dependencies = [
"proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "rand"
version = "0.6.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "rand" name = "rand"
version = "0.7.0" version = "0.7.0"
@ -1950,15 +1931,6 @@ dependencies = [
"rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "rand_chacha"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "rand_chacha" name = "rand_chacha"
version = "0.2.1" version = "0.2.1"
@ -1989,14 +1961,6 @@ dependencies = [
"getrandom 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)", "getrandom 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "rand_hc"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "rand_hc" name = "rand_hc"
version = "0.2.0" version = "0.2.0"
@ -2005,24 +1969,6 @@ dependencies = [
"rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", "rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "rand_isaac"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rand_jitter"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "rand_os" name = "rand_os"
version = "0.1.3" version = "0.1.3"
@ -2036,23 +1982,6 @@ dependencies = [
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]]
name = "rand_pcg"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "rand_xorshift"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "raw-cpuid" name = "raw-cpuid"
version = "7.0.3" version = "7.0.3"
@ -2752,15 +2681,6 @@ name = "utf8parse"
version = "0.1.1" version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "uuid"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]] [[package]]
name = "vcpkg" name = "vcpkg"
version = "0.2.7" version = "0.2.7"
@ -3201,20 +3121,13 @@ dependencies = [
"checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0" "checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0"
"checksum quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" "checksum quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1"
"checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe" "checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe"
"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca"
"checksum rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d47eab0e83d9693d40f825f86948aa16eff6750ead4bdffc4ab95b8b3a7f052c" "checksum rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d47eab0e83d9693d40f825f86948aa16eff6750ead4bdffc4ab95b8b3a7f052c"
"checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef"
"checksum rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853" "checksum rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853"
"checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" "checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b"
"checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" "checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc"
"checksum rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "615e683324e75af5d43d8f7a39ffe3ee4a9dc42c5c701167a71dc59c3a493aca" "checksum rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "615e683324e75af5d43d8f7a39ffe3ee4a9dc42c5c701167a71dc59c3a493aca"
"checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4"
"checksum rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" "checksum rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
"checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08"
"checksum rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b"
"checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" "checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071"
"checksum rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44"
"checksum rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c"
"checksum raw-cpuid 7.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b4a349ca83373cfa5d6dbb66fd76e58b2cca08da71a5f6400de0a0a6a9bceeaf" "checksum raw-cpuid 7.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b4a349ca83373cfa5d6dbb66fd76e58b2cca08da71a5f6400de0a0a6a9bceeaf"
"checksum rawkey 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "33ec17a493dcb820725c002bc253f6f3ba4e4dc635e72c238540691b05e43897" "checksum rawkey 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "33ec17a493dcb820725c002bc253f6f3ba4e4dc635e72c238540691b05e43897"
"checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" "checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2"
@ -3297,7 +3210,6 @@ dependencies = [
"checksum url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "75b414f6c464c879d7f9babf951f23bc3743fb7313c081b2e6ca719067ea9d61" "checksum url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "75b414f6c464c879d7f9babf951f23bc3743fb7313c081b2e6ca719067ea9d61"
"checksum user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4ef4711d107b21b410a3a974b1204d9accc8b10dad75d8324b5d755de1617d47" "checksum user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4ef4711d107b21b410a3a974b1204d9accc8b10dad75d8324b5d755de1617d47"
"checksum utf8parse 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8772a4ccbb4e89959023bc5b7cb8623a795caa7092d99f3aa9501b9484d4557d" "checksum utf8parse 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8772a4ccbb4e89959023bc5b7cb8623a795caa7092d99f3aa9501b9484d4557d"
"checksum uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "90dbc611eb48397705a6b0f6e917da23ae517e4d127123d2cf7674206627d32a"
"checksum vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "33dd455d0f96e90a75803cfeb7f948768c08d70a6de9a8d2362461935698bf95" "checksum vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "33dd455d0f96e90a75803cfeb7f948768c08d70a6de9a8d2362461935698bf95"
"checksum vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a" "checksum vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a"
"checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd" "checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd"

View File

@ -66,7 +66,6 @@ hex = "0.3.2"
tempfile = "3.1.0" tempfile = "3.1.0"
semver = "0.9.0" semver = "0.9.0"
which = "2.0.1" which = "2.0.1"
uuid = {version = "0.7.4", features = [ "v4", "serde" ]}
textwrap = {version = "0.11.0", features = ["term_size"]} textwrap = {version = "0.11.0", features = ["term_size"]}
shellexpand = "1.0.0" shellexpand = "1.0.0"
futures-timer = "0.4.0" futures-timer = "0.4.0"

View File

@ -28,8 +28,7 @@ use std::error::Error;
use std::io::{BufRead, BufReader, Write}; use std::io::{BufRead, BufReader, Write};
use std::iter::Iterator; use std::iter::Iterator;
use std::path::PathBuf; use std::path::PathBuf;
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::Ordering;
use std::sync::Arc;
#[derive(Debug)] #[derive(Debug)]
pub enum MaybeOwned<'a, T> { pub enum MaybeOwned<'a, T> {
@ -339,16 +338,15 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
// we are ok if history does not exist // we are ok if history does not exist
let _ = rl.load_history(&History::path()); let _ = rl.load_history(&History::path());
let ctrl_c = Arc::new(AtomicBool::new(false)); let cc = context.ctrl_c.clone();
let cc = ctrl_c.clone();
ctrlc::set_handler(move || { ctrlc::set_handler(move || {
cc.store(true, Ordering::SeqCst); cc.store(true, Ordering::SeqCst);
}) })
.expect("Error setting Ctrl-C handler"); .expect("Error setting Ctrl-C handler");
let mut ctrlcbreak = false; let mut ctrlcbreak = false;
loop { loop {
if ctrl_c.load(Ordering::SeqCst) { if context.ctrl_c.load(Ordering::SeqCst) {
ctrl_c.store(false, Ordering::SeqCst); context.ctrl_c.store(false, Ordering::SeqCst);
continue; continue;
} }
@ -481,7 +479,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
Ok(line) => { Ok(line) => {
let line = chomp_newline(line); let line = chomp_newline(line);
let result = match crate::parser::parse(&line, uuid::Uuid::nil()) { let result = match crate::parser::parse(&line) {
Err(err) => { Err(err) => {
return LineResult::Error(line.to_string(), err); return LineResult::Error(line.to_string(), err);
} }
@ -549,30 +547,45 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
( (
Some(ClassifiedCommand::Internal(left)), Some(ClassifiedCommand::Internal(left)),
Some(ClassifiedCommand::External(_)), Some(ClassifiedCommand::External(_)),
) => match left ) => match left.run(ctx, input, Text::from(line), is_first_command) {
.run(ctx, input, Text::from(line), is_first_command)
.await
{
Ok(val) => ClassifiedInputStream::from_input_stream(val), Ok(val) => ClassifiedInputStream::from_input_stream(val),
Err(err) => return LineResult::Error(line.to_string(), err), Err(err) => return LineResult::Error(line.to_string(), err),
}, },
(Some(ClassifiedCommand::Internal(left)), Some(_)) => { (Some(ClassifiedCommand::Internal(left)), Some(_)) => {
match left match left.run(ctx, input, Text::from(line), is_first_command) {
.run(ctx, input, Text::from(line), is_first_command)
.await
{
Ok(val) => ClassifiedInputStream::from_input_stream(val), Ok(val) => ClassifiedInputStream::from_input_stream(val),
Err(err) => return LineResult::Error(line.to_string(), err), Err(err) => return LineResult::Error(line.to_string(), err),
} }
} }
(Some(ClassifiedCommand::Internal(left)), None) => { (Some(ClassifiedCommand::Internal(left)), None) => {
match left match left.run(ctx, input, Text::from(line), is_first_command) {
.run(ctx, input, Text::from(line), is_first_command) Ok(val) => {
.await use futures::stream::TryStreamExt;
{
Ok(val) => ClassifiedInputStream::from_input_stream(val), let mut output_stream: OutputStream = val.into();
loop {
match output_stream.try_next().await {
Ok(Some(ReturnSuccess::Value(Tagged {
item: Value::Error(e),
..
}))) => {
return LineResult::Error(line.to_string(), e);
}
Ok(Some(_item)) => {
if ctx.ctrl_c.load(Ordering::SeqCst) {
break;
}
}
_ => {
break;
}
}
}
return LineResult::Success(line.to_string());
}
Err(err) => return LineResult::Error(line.to_string(), err), Err(err) => return LineResult::Error(line.to_string(), err),
} }
} }
@ -620,12 +633,12 @@ fn classify_pipeline(
source: &Text, source: &Text,
) -> Result<ClassifiedPipeline, ShellError> { ) -> Result<ClassifiedPipeline, ShellError> {
let mut pipeline_list = vec![pipeline.clone()]; let mut pipeline_list = vec![pipeline.clone()];
let mut iterator = TokensIterator::all(&mut pipeline_list, pipeline.tag()); let mut iterator = TokensIterator::all(&mut pipeline_list, pipeline.span());
expand_syntax( expand_syntax(
&PipelineShape, &PipelineShape,
&mut iterator, &mut iterator,
&context.expand_context(source, pipeline.tag()), &context.expand_context(source, pipeline.span()),
) )
} }
@ -642,7 +655,13 @@ pub(crate) fn external_command(
Ok(ClassifiedCommand::External(ExternalCommand { Ok(ClassifiedCommand::External(ExternalCommand {
name: name.to_string(), name: name.to_string(),
name_tag: name.tag(), name_tag: name.tag(),
args: arg_list_strings, args: arg_list_strings
.iter()
.map(|x| Tagged {
tag: x.span.into(),
item: x.item.clone(),
})
.collect(),
})) }))
} }

View File

@ -1,9 +1,14 @@
use crate::commands::{RawCommandArgs, WholeStreamCommand}; use crate::commands::{RawCommandArgs, WholeStreamCommand};
use crate::errors::ShellError; use crate::errors::ShellError;
use crate::parser::hir::{Expression, NamedArguments};
use crate::prelude::*; use crate::prelude::*;
use futures::stream::TryStreamExt;
use std::sync::atomic::Ordering;
pub struct Autoview; pub struct Autoview;
const STREAM_PAGE_SIZE: u64 = 50;
#[derive(Deserialize)] #[derive(Deserialize)]
pub struct AutoviewArgs {} pub struct AutoviewArgs {}
@ -31,61 +36,132 @@ impl WholeStreamCommand for Autoview {
pub fn autoview( pub fn autoview(
AutoviewArgs {}: AutoviewArgs, AutoviewArgs {}: AutoviewArgs,
mut context: RunnableContext, context: RunnableContext,
raw: RawCommandArgs, raw: RawCommandArgs,
) -> Result<OutputStream, ShellError> { ) -> Result<OutputStream, ShellError> {
Ok(OutputStream::new(async_stream! { let binary = context.get_command("binaryview");
let input = context.input.drain_vec().await; let text = context.get_command("textview");
let table = context.get_command("table");
if input.len() > 0 { Ok(OutputStream::new(async_stream! {
if let Tagged { let mut output_stream: OutputStream = context.input.into();
item: Value::Primitive(Primitive::Binary(_)),
.. match output_stream.try_next().await {
} = input[0usize] Ok(Some(x)) => {
{ match output_stream.try_next().await {
let binary = context.get_command("binaryview"); Ok(Some(y)) => {
if let Some(binary) = binary { let ctrl_c = context.ctrl_c.clone();
let result = binary.run(raw.with_input(input), &context.commands, false); let stream = async_stream! {
result.collect::<Vec<_>>().await; yield Ok(x);
} else { yield Ok(y);
for i in input {
match i.item { loop {
Value::Primitive(Primitive::Binary(b)) => { match output_stream.try_next().await {
use pretty_hex::*; Ok(Some(z)) => {
println!("{:?}", b.hex_dump()); if ctrl_c.load(Ordering::SeqCst) {
break;
}
yield Ok(z);
}
_ => break,
}
}
};
if let Some(table) = table {
let mut new_output_stream: OutputStream = stream.to_output_stream();
let mut finished = false;
let mut current_idx = 0;
loop {
let mut new_input = VecDeque::new();
for _ in 0..STREAM_PAGE_SIZE {
match new_output_stream.try_next().await {
Ok(Some(a)) => {
if let ReturnSuccess::Value(v) = a {
new_input.push_back(v);
}
}
_ => {
finished = true;
break;
}
}
}
let raw = raw.clone();
let mut command_args = raw.with_input(new_input.into());
let mut named_args = NamedArguments::new();
named_args.insert_optional("start_number", Some(Expression::number(current_idx, Tag::unknown())));
command_args.call_info.args.named = Some(named_args);
let result = table.run(command_args, &context.commands, false);
result.collect::<Vec<_>>().await;
if finished {
break;
} else {
current_idx += STREAM_PAGE_SIZE;
}
} }
_ => {}
} }
} }
}; _ => {
} else if is_single_anchored_text_value(&input) { if let ReturnSuccess::Value(x) = x {
let text = context.get_command("textview"); match x {
if let Some(text) = text { Tagged {
let result = text.run(raw.with_input(input), &context.commands, false); item: Value::Primitive(Primitive::String(ref s)),
result.collect::<Vec<_>>().await; tag: Tag { anchor, span },
} else { } if anchor.is_some() => {
for i in input { if let Some(text) = text {
match i.item { let mut stream = VecDeque::new();
Value::Primitive(Primitive::String(s)) => { stream.push_back(Value::string(s).tagged(Tag { anchor, span }));
println!("{}", s); let result = text.run(raw.with_input(stream.into()), &context.commands, false);
result.collect::<Vec<_>>().await;
} else {
println!("{}", s);
}
}
Tagged {
item: Value::Primitive(Primitive::String(s)),
..
} => {
println!("{}", s);
}
Tagged { item: Value::Primitive(Primitive::Binary(ref b)), .. } => {
if let Some(binary) = binary {
let mut stream = VecDeque::new();
stream.push_back(x.clone());
let result = binary.run(raw.with_input(stream.into()), &context.commands, false);
result.collect::<Vec<_>>().await;
} else {
use pretty_hex::*;
println!("{:?}", b.hex_dump());
}
}
Tagged { item: Value::Error(e), .. } => {
yield Err(e);
}
Tagged { item: ref item, .. } => {
if let Some(table) = table {
let mut stream = VecDeque::new();
stream.push_back(x.clone());
let result = table.run(raw.with_input(stream.into()), &context.commands, false);
result.collect::<Vec<_>>().await;
} else {
println!("{:?}", item);
}
}
} }
_ => {}
} }
} }
} }
} else if is_single_text_value(&input) { }
for i in input { _ => {
match i.item { //println!("<no results>");
Value::Primitive(Primitive::String(s)) => {
println!("{}", s);
}
_ => {}
}
}
} else {
let table = context.expect_command("table");
let result = table.run(raw.with_input(input), &context.commands, false);
result.collect::<Vec<_>>().await;
} }
} }
@ -95,35 +171,3 @@ pub fn autoview(
} }
})) }))
} }
fn is_single_text_value(input: &Vec<Tagged<Value>>) -> bool {
if input.len() != 1 {
return false;
}
if let Tagged {
item: Value::Primitive(Primitive::String(_)),
..
} = input[0]
{
true
} else {
false
}
}
#[allow(unused)]
fn is_single_anchored_text_value(input: &Vec<Tagged<Value>>) -> bool {
if input.len() != 1 {
return false;
}
if let Tagged {
item: Value::Primitive(Primitive::String(_)),
tag: Tag { anchor, .. },
} = input[0]
{
anchor != uuid::Uuid::nil()
} else {
false
}
}

View File

@ -100,7 +100,7 @@ pub(crate) struct DynamicCommand {
} }
impl InternalCommand { impl InternalCommand {
pub(crate) async fn run( pub(crate) fn run(
self, self,
context: &mut Context, context: &mut Context,
input: ClassifiedInputStream, input: ClassifiedInputStream,
@ -119,12 +119,9 @@ impl InternalCommand {
let command = context.expect_command(&self.name); let command = context.expect_command(&self.name);
let result = { let result = {
let source_map = context.source_map.lock().unwrap().clone();
context.run_command( context.run_command(
command, command,
self.name_tag.clone(), self.name_tag.clone(),
source_map,
self.args, self.args,
&source, &source,
objects, objects,
@ -134,69 +131,73 @@ impl InternalCommand {
let result = trace_out_stream!(target: "nu::trace_stream::internal", source: &source, "output" = result); let result = trace_out_stream!(target: "nu::trace_stream::internal", source: &source, "output" = result);
let mut result = result.values; let mut result = result.values;
let mut context = context.clone();
let mut stream = VecDeque::new(); let stream = async_stream! {
while let Some(item) = result.next().await { while let Some(item) = result.next().await {
match item? { match item {
ReturnSuccess::Action(action) => match action { Ok(ReturnSuccess::Action(action)) => match action {
CommandAction::ChangePath(path) => { CommandAction::ChangePath(path) => {
context.shell_manager.set_path(path); context.shell_manager.set_path(path);
} }
CommandAction::AddAnchorLocation(uuid, anchor_location) => { CommandAction::Exit => std::process::exit(0), // TODO: save history.txt
context.add_anchor_location(uuid, anchor_location); CommandAction::EnterHelpShell(value) => {
} match value {
CommandAction::Exit => std::process::exit(0), // TODO: save history.txt Tagged {
CommandAction::EnterHelpShell(value) => { item: Value::Primitive(Primitive::String(cmd)),
match value { tag,
Tagged { } => {
item: Value::Primitive(Primitive::String(cmd)), context.shell_manager.insert_at_current(Box::new(
tag, HelpShell::for_command(
} => { Value::string(cmd).tagged(tag),
context.shell_manager.insert_at_current(Box::new( &context.registry(),
HelpShell::for_command( ).unwrap(),
Value::string(cmd).tagged(tag), ));
&context.registry(), }
)?, _ => {
)); context.shell_manager.insert_at_current(Box::new(
} HelpShell::index(&context.registry()).unwrap(),
_ => { ));
context.shell_manager.insert_at_current(Box::new( }
HelpShell::index(&context.registry())?,
));
} }
} }
} CommandAction::EnterValueShell(value) => {
CommandAction::EnterValueShell(value) => { context
context .shell_manager
.shell_manager .insert_at_current(Box::new(ValueShell::new(value)));
.insert_at_current(Box::new(ValueShell::new(value)));
}
CommandAction::EnterShell(location) => {
context.shell_manager.insert_at_current(Box::new(
FilesystemShell::with_location(location, context.registry().clone())?,
));
}
CommandAction::PreviousShell => {
context.shell_manager.prev();
}
CommandAction::NextShell => {
context.shell_manager.next();
}
CommandAction::LeaveShell => {
context.shell_manager.remove_at_current();
if context.shell_manager.is_empty() {
std::process::exit(0); // TODO: save history.txt
} }
} CommandAction::EnterShell(location) => {
}, context.shell_manager.insert_at_current(Box::new(
FilesystemShell::with_location(location, context.registry().clone()).unwrap(),
));
}
CommandAction::PreviousShell => {
context.shell_manager.prev();
}
CommandAction::NextShell => {
context.shell_manager.next();
}
CommandAction::LeaveShell => {
context.shell_manager.remove_at_current();
if context.shell_manager.is_empty() {
std::process::exit(0); // TODO: save history.txt
}
}
},
ReturnSuccess::Value(v) => { Ok(ReturnSuccess::Value(v)) => {
stream.push_back(v); yield Ok(v);
}
Err(x) => {
yield Ok(Value::Error(x).tagged_unknown());
break;
}
} }
} }
} };
Ok(stream.into()) Ok(stream.to_input_stream())
} }
} }
@ -346,7 +347,7 @@ impl ExternalCommand {
let stdout = popen.stdout.take().unwrap(); let stdout = popen.stdout.take().unwrap();
let file = futures::io::AllowStdIo::new(stdout); let file = futures::io::AllowStdIo::new(stdout);
let stream = Framed::new(file, LinesCodec {}); let stream = Framed::new(file, LinesCodec {});
let stream = stream.map(move |line| Value::string(line.unwrap()).tagged(name_tag)); let stream = stream.map(move |line| Value::string(line.unwrap()).tagged(&name_tag));
Ok(ClassifiedInputStream::from_input_stream( Ok(ClassifiedInputStream::from_input_stream(
stream.boxed() as BoxStream<'static, Tagged<Value>> stream.boxed() as BoxStream<'static, Tagged<Value>>
)) ))

View File

@ -1,4 +1,3 @@
use crate::context::{AnchorLocation, SourceMap};
use crate::data::Value; use crate::data::Value;
use crate::errors::ShellError; use crate::errors::ShellError;
use crate::evaluate::Scope; use crate::evaluate::Scope;
@ -11,13 +10,12 @@ use serde::{Deserialize, Serialize};
use std::fmt; use std::fmt;
use std::ops::Deref; use std::ops::Deref;
use std::path::PathBuf; use std::path::PathBuf;
use uuid::Uuid; use std::sync::atomic::AtomicBool;
#[derive(Deserialize, Serialize, Debug, Clone)] #[derive(Deserialize, Serialize, Debug, Clone)]
pub struct UnevaluatedCallInfo { pub struct UnevaluatedCallInfo {
pub args: hir::Call, pub args: hir::Call,
pub source: Text, pub source: Text,
pub source_map: SourceMap,
pub name_tag: Tag, pub name_tag: Tag,
} }
@ -37,7 +35,6 @@ impl UnevaluatedCallInfo {
Ok(CallInfo { Ok(CallInfo {
args, args,
source_map: self.source_map,
name_tag: self.name_tag, name_tag: self.name_tag,
}) })
} }
@ -46,7 +43,6 @@ impl UnevaluatedCallInfo {
#[derive(Deserialize, Serialize, Debug, Clone)] #[derive(Deserialize, Serialize, Debug, Clone)]
pub struct CallInfo { pub struct CallInfo {
pub args: registry::EvaluatedArgs, pub args: registry::EvaluatedArgs,
pub source_map: SourceMap,
pub name_tag: Tag, pub name_tag: Tag,
} }
@ -62,7 +58,7 @@ impl CallInfo {
args: T::deserialize(&mut deserializer)?, args: T::deserialize(&mut deserializer)?,
context: RunnablePerItemContext { context: RunnablePerItemContext {
shell_manager: shell_manager.clone(), shell_manager: shell_manager.clone(),
name: self.name_tag, name: self.name_tag.clone(),
}, },
callback, callback,
}) })
@ -73,6 +69,7 @@ impl CallInfo {
#[get = "pub(crate)"] #[get = "pub(crate)"]
pub struct CommandArgs { pub struct CommandArgs {
pub host: Arc<Mutex<dyn Host>>, pub host: Arc<Mutex<dyn Host>>,
pub ctrl_c: Arc<AtomicBool>,
pub shell_manager: ShellManager, pub shell_manager: ShellManager,
pub call_info: UnevaluatedCallInfo, pub call_info: UnevaluatedCallInfo,
pub input: InputStream, pub input: InputStream,
@ -82,6 +79,7 @@ pub struct CommandArgs {
#[get = "pub(crate)"] #[get = "pub(crate)"]
pub struct RawCommandArgs { pub struct RawCommandArgs {
pub host: Arc<Mutex<dyn Host>>, pub host: Arc<Mutex<dyn Host>>,
pub ctrl_c: Arc<AtomicBool>,
pub shell_manager: ShellManager, pub shell_manager: ShellManager,
pub call_info: UnevaluatedCallInfo, pub call_info: UnevaluatedCallInfo,
} }
@ -90,6 +88,7 @@ impl RawCommandArgs {
pub fn with_input(self, input: Vec<Tagged<Value>>) -> CommandArgs { pub fn with_input(self, input: Vec<Tagged<Value>>) -> CommandArgs {
CommandArgs { CommandArgs {
host: self.host, host: self.host,
ctrl_c: self.ctrl_c,
shell_manager: self.shell_manager, shell_manager: self.shell_manager,
call_info: self.call_info, call_info: self.call_info,
input: input.into(), input: input.into(),
@ -109,12 +108,14 @@ impl CommandArgs {
registry: &registry::CommandRegistry, registry: &registry::CommandRegistry,
) -> Result<EvaluatedWholeStreamCommandArgs, ShellError> { ) -> Result<EvaluatedWholeStreamCommandArgs, ShellError> {
let host = self.host.clone(); let host = self.host.clone();
let ctrl_c = self.ctrl_c.clone();
let shell_manager = self.shell_manager.clone(); let shell_manager = self.shell_manager.clone();
let input = self.input; let input = self.input;
let call_info = self.call_info.evaluate(registry, &Scope::empty())?; let call_info = self.call_info.evaluate(registry, &Scope::empty())?;
Ok(EvaluatedWholeStreamCommandArgs::new( Ok(EvaluatedWholeStreamCommandArgs::new(
host, host,
ctrl_c,
shell_manager, shell_manager,
call_info, call_info,
input, input,
@ -127,12 +128,13 @@ impl CommandArgs {
callback: fn(T, RunnableContext) -> Result<OutputStream, ShellError>, callback: fn(T, RunnableContext) -> Result<OutputStream, ShellError>,
) -> Result<RunnableArgs<T>, ShellError> { ) -> Result<RunnableArgs<T>, ShellError> {
let shell_manager = self.shell_manager.clone(); let shell_manager = self.shell_manager.clone();
let source_map = self.call_info.source_map.clone();
let host = self.host.clone(); let host = self.host.clone();
let ctrl_c = self.ctrl_c.clone();
let args = self.evaluate_once(registry)?; let args = self.evaluate_once(registry)?;
let call_info = args.call_info.clone();
let (input, args) = args.split(); let (input, args) = args.split();
let name_tag = args.call_info.name_tag; let name_tag = args.call_info.name_tag;
let mut deserializer = ConfigDeserializer::from_call_info(args.call_info); let mut deserializer = ConfigDeserializer::from_call_info(call_info);
Ok(RunnableArgs { Ok(RunnableArgs {
args: T::deserialize(&mut deserializer)?, args: T::deserialize(&mut deserializer)?,
@ -141,8 +143,8 @@ impl CommandArgs {
commands: registry.clone(), commands: registry.clone(),
shell_manager, shell_manager,
name: name_tag, name: name_tag,
source_map,
host, host,
ctrl_c,
}, },
callback, callback,
}) })
@ -155,17 +157,20 @@ impl CommandArgs {
) -> Result<RunnableRawArgs<T>, ShellError> { ) -> Result<RunnableRawArgs<T>, ShellError> {
let raw_args = RawCommandArgs { let raw_args = RawCommandArgs {
host: self.host.clone(), host: self.host.clone(),
ctrl_c: self.ctrl_c.clone(),
shell_manager: self.shell_manager.clone(), shell_manager: self.shell_manager.clone(),
call_info: self.call_info.clone(), call_info: self.call_info.clone(),
}; };
let shell_manager = self.shell_manager.clone(); let shell_manager = self.shell_manager.clone();
let source_map = self.call_info.source_map.clone();
let host = self.host.clone(); let host = self.host.clone();
let ctrl_c = self.ctrl_c.clone();
let args = self.evaluate_once(registry)?; let args = self.evaluate_once(registry)?;
let call_info = args.call_info.clone();
let (input, args) = args.split(); let (input, args) = args.split();
let name_tag = args.call_info.name_tag; let name_tag = args.call_info.name_tag;
let mut deserializer = ConfigDeserializer::from_call_info(args.call_info); let mut deserializer = ConfigDeserializer::from_call_info(call_info.clone());
Ok(RunnableRawArgs { Ok(RunnableRawArgs {
args: T::deserialize(&mut deserializer)?, args: T::deserialize(&mut deserializer)?,
@ -174,8 +179,8 @@ impl CommandArgs {
commands: registry.clone(), commands: registry.clone(),
shell_manager, shell_manager,
name: name_tag, name: name_tag,
source_map,
host, host,
ctrl_c,
}, },
raw_args, raw_args,
callback, callback,
@ -198,18 +203,12 @@ pub struct RunnableContext {
pub input: InputStream, pub input: InputStream,
pub shell_manager: ShellManager, pub shell_manager: ShellManager,
pub host: Arc<Mutex<dyn Host>>, pub host: Arc<Mutex<dyn Host>>,
pub ctrl_c: Arc<AtomicBool>,
pub commands: CommandRegistry, pub commands: CommandRegistry,
pub source_map: SourceMap,
pub name: Tag, pub name: Tag,
} }
impl RunnableContext { impl RunnableContext {
pub fn expect_command(&self, name: &str) -> Arc<Command> {
self.commands
.get_command(name)
.expect(&format!("Expected command {}", name))
}
pub fn get_command(&self, name: &str) -> Option<Arc<Command>> { pub fn get_command(&self, name: &str) -> Option<Arc<Command>> {
self.commands.get_command(name) self.commands.get_command(name)
} }
@ -270,6 +269,7 @@ impl Deref for EvaluatedWholeStreamCommandArgs {
impl EvaluatedWholeStreamCommandArgs { impl EvaluatedWholeStreamCommandArgs {
pub fn new( pub fn new(
host: Arc<Mutex<dyn Host>>, host: Arc<Mutex<dyn Host>>,
ctrl_c: Arc<AtomicBool>,
shell_manager: ShellManager, shell_manager: ShellManager,
call_info: CallInfo, call_info: CallInfo,
input: impl Into<InputStream>, input: impl Into<InputStream>,
@ -277,6 +277,7 @@ impl EvaluatedWholeStreamCommandArgs {
EvaluatedWholeStreamCommandArgs { EvaluatedWholeStreamCommandArgs {
args: EvaluatedCommandArgs { args: EvaluatedCommandArgs {
host, host,
ctrl_c,
shell_manager, shell_manager,
call_info, call_info,
}, },
@ -285,7 +286,7 @@ impl EvaluatedWholeStreamCommandArgs {
} }
pub fn name_tag(&self) -> Tag { pub fn name_tag(&self) -> Tag {
self.args.call_info.name_tag self.args.call_info.name_tag.clone()
} }
pub fn parts(self) -> (InputStream, registry::EvaluatedArgs) { pub fn parts(self) -> (InputStream, registry::EvaluatedArgs) {
@ -317,12 +318,14 @@ impl Deref for EvaluatedFilterCommandArgs {
impl EvaluatedFilterCommandArgs { impl EvaluatedFilterCommandArgs {
pub fn new( pub fn new(
host: Arc<Mutex<dyn Host>>, host: Arc<Mutex<dyn Host>>,
ctrl_c: Arc<AtomicBool>,
shell_manager: ShellManager, shell_manager: ShellManager,
call_info: CallInfo, call_info: CallInfo,
) -> EvaluatedFilterCommandArgs { ) -> EvaluatedFilterCommandArgs {
EvaluatedFilterCommandArgs { EvaluatedFilterCommandArgs {
args: EvaluatedCommandArgs { args: EvaluatedCommandArgs {
host, host,
ctrl_c,
shell_manager, shell_manager,
call_info, call_info,
}, },
@ -334,6 +337,7 @@ impl EvaluatedFilterCommandArgs {
#[get = "pub(crate)"] #[get = "pub(crate)"]
pub struct EvaluatedCommandArgs { pub struct EvaluatedCommandArgs {
pub host: Arc<Mutex<dyn Host>>, pub host: Arc<Mutex<dyn Host>>,
pub ctrl_c: Arc<AtomicBool>,
pub shell_manager: ShellManager, pub shell_manager: ShellManager,
pub call_info: CallInfo, pub call_info: CallInfo,
} }
@ -376,7 +380,6 @@ impl EvaluatedCommandArgs {
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub enum CommandAction { pub enum CommandAction {
ChangePath(String), ChangePath(String),
AddAnchorLocation(Uuid, AnchorLocation),
Exit, Exit,
EnterShell(String), EnterShell(String),
EnterValueShell(Tagged<Value>), EnterValueShell(Tagged<Value>),
@ -390,9 +393,6 @@ impl ToDebug for CommandAction {
fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result { fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result {
match self { match self {
CommandAction::ChangePath(s) => write!(f, "action:change-path={}", s), CommandAction::ChangePath(s) => write!(f, "action:change-path={}", s),
CommandAction::AddAnchorLocation(u, source) => {
write!(f, "action:add-span-source={}@{:?}", u, source)
}
CommandAction::Exit => write!(f, "action:exit"), CommandAction::Exit => write!(f, "action:exit"),
CommandAction::EnterShell(s) => write!(f, "action:enter-shell={}", s), CommandAction::EnterShell(s) => write!(f, "action:enter-shell={}", s),
CommandAction::EnterValueShell(t) => { CommandAction::EnterValueShell(t) => {
@ -564,6 +564,7 @@ impl Command {
) -> OutputStream { ) -> OutputStream {
let raw_args = RawCommandArgs { let raw_args = RawCommandArgs {
host: args.host, host: args.host,
ctrl_c: args.ctrl_c,
shell_manager: args.shell_manager, shell_manager: args.shell_manager,
call_info: args.call_info, call_info: args.call_info,
}; };
@ -633,6 +634,7 @@ impl WholeStreamCommand for FnFilterCommand {
) -> Result<OutputStream, ShellError> { ) -> Result<OutputStream, ShellError> {
let CommandArgs { let CommandArgs {
host, host,
ctrl_c,
shell_manager, shell_manager,
call_info, call_info,
input, input,
@ -650,8 +652,12 @@ impl WholeStreamCommand for FnFilterCommand {
Ok(args) => args, Ok(args) => args,
}; };
let args = let args = EvaluatedFilterCommandArgs::new(
EvaluatedFilterCommandArgs::new(host.clone(), shell_manager.clone(), call_info); host.clone(),
ctrl_c.clone(),
shell_manager.clone(),
call_info,
);
match func(args) { match func(args) {
Err(err) => return OutputStream::from(vec![Err(err)]).values, Err(err) => return OutputStream::from(vec![Err(err)]).values,

View File

@ -58,7 +58,7 @@ pub fn config(
}: ConfigArgs, }: ConfigArgs,
RunnableContext { name, .. }: RunnableContext, RunnableContext { name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> { ) -> Result<OutputStream, ShellError> {
let name_span = name; let name_span = name.clone();
let configuration = if let Some(supplied) = load { let configuration = if let Some(supplied) = load {
Some(supplied.item().clone()) Some(supplied.item().clone())

View File

@ -39,27 +39,27 @@ where
{ {
let mut indexmap = IndexMap::new(); let mut indexmap = IndexMap::new();
indexmap.insert("year".to_string(), Value::int(dt.year()).tagged(tag)); indexmap.insert("year".to_string(), Value::int(dt.year()).tagged(&tag));
indexmap.insert("month".to_string(), Value::int(dt.month()).tagged(tag)); indexmap.insert("month".to_string(), Value::int(dt.month()).tagged(&tag));
indexmap.insert("day".to_string(), Value::int(dt.day()).tagged(tag)); indexmap.insert("day".to_string(), Value::int(dt.day()).tagged(&tag));
indexmap.insert("hour".to_string(), Value::int(dt.hour()).tagged(tag)); indexmap.insert("hour".to_string(), Value::int(dt.hour()).tagged(&tag));
indexmap.insert("minute".to_string(), Value::int(dt.minute()).tagged(tag)); indexmap.insert("minute".to_string(), Value::int(dt.minute()).tagged(&tag));
indexmap.insert("second".to_string(), Value::int(dt.second()).tagged(tag)); indexmap.insert("second".to_string(), Value::int(dt.second()).tagged(&tag));
let tz = dt.offset(); let tz = dt.offset();
indexmap.insert( indexmap.insert(
"timezone".to_string(), "timezone".to_string(),
Value::string(format!("{}", tz)).tagged(tag), Value::string(format!("{}", tz)).tagged(&tag),
); );
Value::Row(Dictionary::from(indexmap)).tagged(tag) Value::Row(Dictionary::from(indexmap)).tagged(&tag)
} }
pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> { pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?; let args = args.evaluate_once(registry)?;
let mut date_out = VecDeque::new(); let mut date_out = VecDeque::new();
let tag = args.call_info.name_tag; let tag = args.call_info.name_tag.clone();
let value = if args.has("utc") { let value = if args.has("utc") {
let utc: DateTime<Utc> = Utc::now(); let utc: DateTime<Utc> = Utc::now();

View File

@ -35,7 +35,7 @@ fn run(
_registry: &CommandRegistry, _registry: &CommandRegistry,
_raw_args: &RawCommandArgs, _raw_args: &RawCommandArgs,
) -> Result<OutputStream, ShellError> { ) -> Result<OutputStream, ShellError> {
let name = call_info.name_tag; let name = call_info.name_tag.clone();
let mut output = String::new(); let mut output = String::new();

View File

@ -67,7 +67,7 @@ impl PerItemCommand for Enter {
let full_path = std::path::PathBuf::from(cwd); let full_path = std::path::PathBuf::from(cwd);
let (file_extension, contents, contents_tag, anchor_location) = let (file_extension, contents, contents_tag) =
crate::commands::open::fetch( crate::commands::open::fetch(
&full_path, &full_path,
&location_clone, &location_clone,
@ -75,18 +75,9 @@ impl PerItemCommand for Enter {
) )
.await.unwrap(); .await.unwrap();
if contents_tag.anchor != uuid::Uuid::nil() {
// If we have loaded something, track its source
yield ReturnSuccess::action(CommandAction::AddAnchorLocation(
contents_tag.anchor,
anchor_location,
));
}
match contents { match contents {
Value::Primitive(Primitive::String(_)) => { Value::Primitive(Primitive::String(_)) => {
let tagged_contents = contents.tagged(contents_tag); let tagged_contents = contents.tagged(&contents_tag);
if let Some(extension) = file_extension { if let Some(extension) = file_extension {
let command_name = format!("from-{}", extension); let command_name = format!("from-{}", extension);
@ -95,6 +86,7 @@ impl PerItemCommand for Enter {
{ {
let new_args = RawCommandArgs { let new_args = RawCommandArgs {
host: raw_args.host, host: raw_args.host,
ctrl_c: raw_args.ctrl_c,
shell_manager: raw_args.shell_manager, shell_manager: raw_args.shell_manager,
call_info: UnevaluatedCallInfo { call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call { args: crate::parser::hir::Call {
@ -103,7 +95,6 @@ impl PerItemCommand for Enter {
named: None, named: None,
}, },
source: raw_args.call_info.source, source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag, name_tag: raw_args.call_info.name_tag,
}, },
}; };
@ -123,7 +114,7 @@ impl PerItemCommand for Enter {
yield Ok(ReturnSuccess::Action(CommandAction::EnterValueShell( yield Ok(ReturnSuccess::Action(CommandAction::EnterValueShell(
Tagged { Tagged {
item, item,
tag: contents_tag, tag: contents_tag.clone(),
}))); })));
} }
x => yield x, x => yield x,

View File

@ -37,22 +37,22 @@ pub fn get_environment(tag: Tag) -> Result<Tagged<Value>, Box<dyn std::error::Er
let mut indexmap = IndexMap::new(); let mut indexmap = IndexMap::new();
let path = std::env::current_dir()?; let path = std::env::current_dir()?;
indexmap.insert("cwd".to_string(), Value::path(path).tagged(tag)); indexmap.insert("cwd".to_string(), Value::path(path).tagged(&tag));
if let Some(home) = dirs::home_dir() { if let Some(home) = dirs::home_dir() {
indexmap.insert("home".to_string(), Value::path(home).tagged(tag)); indexmap.insert("home".to_string(), Value::path(home).tagged(&tag));
} }
let config = config::default_path()?; let config = config::default_path()?;
indexmap.insert("config".to_string(), Value::path(config).tagged(tag)); indexmap.insert("config".to_string(), Value::path(config).tagged(&tag));
let history = History::path(); let history = History::path();
indexmap.insert("history".to_string(), Value::path(history).tagged(tag)); indexmap.insert("history".to_string(), Value::path(history).tagged(&tag));
let temp = std::env::temp_dir(); let temp = std::env::temp_dir();
indexmap.insert("temp".to_string(), Value::path(temp).tagged(tag)); indexmap.insert("temp".to_string(), Value::path(temp).tagged(&tag));
let mut dict = TaggedDictBuilder::new(tag); let mut dict = TaggedDictBuilder::new(&tag);
for v in std::env::vars() { for v in std::env::vars() {
dict.insert(v.0, Value::string(v.1)); dict.insert(v.0, Value::string(v.1));
} }
@ -60,14 +60,14 @@ pub fn get_environment(tag: Tag) -> Result<Tagged<Value>, Box<dyn std::error::Er
indexmap.insert("vars".to_string(), dict.into_tagged_value()); indexmap.insert("vars".to_string(), dict.into_tagged_value());
} }
Ok(Value::Row(Dictionary::from(indexmap)).tagged(tag)) Ok(Value::Row(Dictionary::from(indexmap)).tagged(&tag))
} }
pub fn env(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> { pub fn env(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?; let args = args.evaluate_once(registry)?;
let mut env_out = VecDeque::new(); let mut env_out = VecDeque::new();
let tag = args.call_info.name_tag; let tag = args.call_info.name_tag.clone();
let value = get_environment(tag)?; let value = get_environment(tag)?;
env_out.push_back(value); env_out.push_back(value);

View File

@ -10,7 +10,6 @@ use mime::Mime;
use std::path::PathBuf; use std::path::PathBuf;
use std::str::FromStr; use std::str::FromStr;
use surf::mime; use surf::mime;
use uuid::Uuid;
pub struct Fetch; pub struct Fetch;
impl PerItemCommand for Fetch { impl PerItemCommand for Fetch {
@ -48,7 +47,7 @@ fn run(
ShellError::labeled_error( ShellError::labeled_error(
"No file or directory specified", "No file or directory specified",
"for command", "for command",
call_info.name_tag, &call_info.name_tag,
) )
})? { })? {
file => file, file => file,
@ -68,7 +67,7 @@ fn run(
yield Err(e); yield Err(e);
return; return;
} }
let (file_extension, contents, contents_tag, anchor_location) = result.unwrap(); let (file_extension, contents, contents_tag) = result.unwrap();
let file_extension = if has_raw { let file_extension = if has_raw {
None None
@ -78,21 +77,14 @@ fn run(
file_extension.or(path_str.split('.').last().map(String::from)) file_extension.or(path_str.split('.').last().map(String::from))
}; };
if contents_tag.anchor != uuid::Uuid::nil() { let tagged_contents = contents.tagged(&contents_tag);
// If we have loaded something, track its source
yield ReturnSuccess::action(CommandAction::AddAnchorLocation(
contents_tag.anchor,
anchor_location,
));
}
let tagged_contents = contents.tagged(contents_tag);
if let Some(extension) = file_extension { if let Some(extension) = file_extension {
let command_name = format!("from-{}", extension); let command_name = format!("from-{}", extension);
if let Some(converter) = registry.get_command(&command_name) { if let Some(converter) = registry.get_command(&command_name) {
let new_args = RawCommandArgs { let new_args = RawCommandArgs {
host: raw_args.host, host: raw_args.host,
ctrl_c: raw_args.ctrl_c,
shell_manager: raw_args.shell_manager, shell_manager: raw_args.shell_manager,
call_info: UnevaluatedCallInfo { call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call { args: crate::parser::hir::Call {
@ -101,7 +93,6 @@ fn run(
named: None named: None
}, },
source: raw_args.call_info.source, source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag, name_tag: raw_args.call_info.name_tag,
} }
}; };
@ -115,7 +106,7 @@ fn run(
} }
} }
Ok(ReturnSuccess::Value(Tagged { item, .. })) => { Ok(ReturnSuccess::Value(Tagged { item, .. })) => {
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag })); yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() }));
} }
x => yield x, x => yield x,
} }
@ -131,10 +122,7 @@ fn run(
Ok(stream.to_output_stream()) Ok(stream.to_output_stream())
} }
pub async fn fetch( pub async fn fetch(location: &str, span: Span) -> Result<(Option<String>, Value, Tag), ShellError> {
location: &str,
span: Span,
) -> Result<(Option<String>, Value, Tag, AnchorLocation), ShellError> {
if let Err(_) = url::Url::parse(location) { if let Err(_) = url::Url::parse(location) {
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"Incomplete or incorrect url", "Incomplete or incorrect url",
@ -160,9 +148,8 @@ pub async fn fetch(
})?), })?),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::Url(location.to_string())),
}, },
AnchorLocation::Url(location.to_string()),
)), )),
(mime::APPLICATION, mime::JSON) => Ok(( (mime::APPLICATION, mime::JSON) => Ok((
Some("json".to_string()), Some("json".to_string()),
@ -175,9 +162,8 @@ pub async fn fetch(
})?), })?),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::Url(location.to_string())),
}, },
AnchorLocation::Url(location.to_string()),
)), )),
(mime::APPLICATION, mime::OCTET_STREAM) => { (mime::APPLICATION, mime::OCTET_STREAM) => {
let buf: Vec<u8> = r.body_bytes().await.map_err(|_| { let buf: Vec<u8> = r.body_bytes().await.map_err(|_| {
@ -192,9 +178,8 @@ pub async fn fetch(
Value::binary(buf), Value::binary(buf),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::Url(location.to_string())),
}, },
AnchorLocation::Url(location.to_string()),
)) ))
} }
(mime::IMAGE, mime::SVG) => Ok(( (mime::IMAGE, mime::SVG) => Ok((
@ -208,9 +193,8 @@ pub async fn fetch(
})?), })?),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::Url(location.to_string())),
}, },
AnchorLocation::Url(location.to_string()),
)), )),
(mime::IMAGE, image_ty) => { (mime::IMAGE, image_ty) => {
let buf: Vec<u8> = r.body_bytes().await.map_err(|_| { let buf: Vec<u8> = r.body_bytes().await.map_err(|_| {
@ -225,9 +209,8 @@ pub async fn fetch(
Value::binary(buf), Value::binary(buf),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::Url(location.to_string())),
}, },
AnchorLocation::Url(location.to_string()),
)) ))
} }
(mime::TEXT, mime::HTML) => Ok(( (mime::TEXT, mime::HTML) => Ok((
@ -241,9 +224,8 @@ pub async fn fetch(
})?), })?),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::Url(location.to_string())),
}, },
AnchorLocation::Url(location.to_string()),
)), )),
(mime::TEXT, mime::PLAIN) => { (mime::TEXT, mime::PLAIN) => {
let path_extension = url::Url::parse(location) let path_extension = url::Url::parse(location)
@ -268,9 +250,8 @@ pub async fn fetch(
})?), })?),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::Url(location.to_string())),
}, },
AnchorLocation::Url(location.to_string()),
)) ))
} }
(ty, sub_ty) => Ok(( (ty, sub_ty) => Ok((
@ -278,9 +259,8 @@ pub async fn fetch(
Value::string(format!("Not yet supported MIME type: {} {}", ty, sub_ty)), Value::string(format!("Not yet supported MIME type: {} {}", ty, sub_ty)),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::Url(location.to_string())),
}, },
AnchorLocation::Url(location.to_string()),
)), )),
} }
} }
@ -289,9 +269,8 @@ pub async fn fetch(
Value::string(format!("No content type found")), Value::string(format!("No content type found")),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::Url(location.to_string())),
}, },
AnchorLocation::Url(location.to_string()),
)), )),
}, },
Err(_) => { Err(_) => {

View File

@ -33,7 +33,7 @@ fn bson_array(input: &Vec<Bson>, tag: Tag) -> Result<Vec<Tagged<Value>>, ShellEr
let mut out = vec![]; let mut out = vec![];
for value in input { for value in input {
out.push(convert_bson_value_to_nu_value(value, tag)?); out.push(convert_bson_value_to_nu_value(value, &tag)?);
} }
Ok(out) Ok(out)
@ -46,100 +46,100 @@ fn convert_bson_value_to_nu_value(
let tag = tag.into(); let tag = tag.into();
Ok(match v { Ok(match v {
Bson::FloatingPoint(n) => Value::Primitive(Primitive::from(*n)).tagged(tag), Bson::FloatingPoint(n) => Value::Primitive(Primitive::from(*n)).tagged(&tag),
Bson::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag), Bson::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(&tag),
Bson::Array(a) => Value::Table(bson_array(a, tag)?).tagged(tag), Bson::Array(a) => Value::Table(bson_array(a, tag.clone())?).tagged(&tag),
Bson::Document(doc) => { Bson::Document(doc) => {
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(tag.clone());
for (k, v) in doc.iter() { for (k, v) in doc.iter() {
collected.insert_tagged(k.clone(), convert_bson_value_to_nu_value(v, tag)?); collected.insert_tagged(k.clone(), convert_bson_value_to_nu_value(v, &tag)?);
} }
collected.into_tagged_value() collected.into_tagged_value()
} }
Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(tag), Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(&tag),
Bson::Null => Value::Primitive(Primitive::Nothing).tagged(tag), Bson::Null => Value::Primitive(Primitive::Nothing).tagged(&tag),
Bson::RegExp(r, opts) => { Bson::RegExp(r, opts) => {
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged( collected.insert_tagged(
"$regex".to_string(), "$regex".to_string(),
Value::Primitive(Primitive::String(String::from(r))).tagged(tag), Value::Primitive(Primitive::String(String::from(r))).tagged(&tag),
); );
collected.insert_tagged( collected.insert_tagged(
"$options".to_string(), "$options".to_string(),
Value::Primitive(Primitive::String(String::from(opts))).tagged(tag), Value::Primitive(Primitive::String(String::from(opts))).tagged(&tag),
); );
collected.into_tagged_value() collected.into_tagged_value()
} }
Bson::I32(n) => Value::number(n).tagged(tag), Bson::I32(n) => Value::number(n).tagged(&tag),
Bson::I64(n) => Value::number(n).tagged(tag), Bson::I64(n) => Value::number(n).tagged(&tag),
Bson::Decimal128(n) => { Bson::Decimal128(n) => {
// TODO: this really isn't great, and we should update this to do a higher // TODO: this really isn't great, and we should update this to do a higher
// fidelity translation // fidelity translation
let decimal = BigDecimal::from_str(&format!("{}", n)).map_err(|_| { let decimal = BigDecimal::from_str(&format!("{}", n)).map_err(|_| {
ShellError::range_error( ShellError::range_error(
ExpectedRange::BigDecimal, ExpectedRange::BigDecimal,
&n.tagged(tag), &n.tagged(&tag),
format!("converting BSON Decimal128 to BigDecimal"), format!("converting BSON Decimal128 to BigDecimal"),
) )
})?; })?;
Value::Primitive(Primitive::Decimal(decimal)).tagged(tag) Value::Primitive(Primitive::Decimal(decimal)).tagged(&tag)
} }
Bson::JavaScriptCode(js) => { Bson::JavaScriptCode(js) => {
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged( collected.insert_tagged(
"$javascript".to_string(), "$javascript".to_string(),
Value::Primitive(Primitive::String(String::from(js))).tagged(tag), Value::Primitive(Primitive::String(String::from(js))).tagged(&tag),
); );
collected.into_tagged_value() collected.into_tagged_value()
} }
Bson::JavaScriptCodeWithScope(js, doc) => { Bson::JavaScriptCodeWithScope(js, doc) => {
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged( collected.insert_tagged(
"$javascript".to_string(), "$javascript".to_string(),
Value::Primitive(Primitive::String(String::from(js))).tagged(tag), Value::Primitive(Primitive::String(String::from(js))).tagged(&tag),
); );
collected.insert_tagged( collected.insert_tagged(
"$scope".to_string(), "$scope".to_string(),
convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag)?, convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag.clone())?,
); );
collected.into_tagged_value() collected.into_tagged_value()
} }
Bson::TimeStamp(ts) => { Bson::TimeStamp(ts) => {
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged("$timestamp".to_string(), Value::number(ts).tagged(tag)); collected.insert_tagged("$timestamp".to_string(), Value::number(ts).tagged(&tag));
collected.into_tagged_value() collected.into_tagged_value()
} }
Bson::Binary(bst, bytes) => { Bson::Binary(bst, bytes) => {
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged( collected.insert_tagged(
"$binary_subtype".to_string(), "$binary_subtype".to_string(),
match bst { match bst {
BinarySubtype::UserDefined(u) => Value::number(u), BinarySubtype::UserDefined(u) => Value::number(u),
_ => Value::Primitive(Primitive::String(binary_subtype_to_string(*bst))), _ => Value::Primitive(Primitive::String(binary_subtype_to_string(*bst))),
} }
.tagged(tag), .tagged(&tag),
); );
collected.insert_tagged( collected.insert_tagged(
"$binary".to_string(), "$binary".to_string(),
Value::Primitive(Primitive::Binary(bytes.to_owned())).tagged(tag), Value::Primitive(Primitive::Binary(bytes.to_owned())).tagged(&tag),
); );
collected.into_tagged_value() collected.into_tagged_value()
} }
Bson::ObjectId(obj_id) => { Bson::ObjectId(obj_id) => {
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged( collected.insert_tagged(
"$object_id".to_string(), "$object_id".to_string(),
Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(tag), Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(&tag),
); );
collected.into_tagged_value() collected.into_tagged_value()
} }
Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(tag), Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(&tag),
Bson::Symbol(s) => { Bson::Symbol(s) => {
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged( collected.insert_tagged(
"$symbol".to_string(), "$symbol".to_string(),
Value::Primitive(Primitive::String(String::from(s))).tagged(tag), Value::Primitive(Primitive::String(String::from(s))).tagged(&tag),
); );
collected.into_tagged_value() collected.into_tagged_value()
} }
@ -208,13 +208,13 @@ fn from_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
let value_tag = value.tag(); let value_tag = value.tag();
match value.item { match value.item {
Value::Primitive(Primitive::Binary(vb)) => Value::Primitive(Primitive::Binary(vb)) =>
match from_bson_bytes_to_value(vb, tag) { match from_bson_bytes_to_value(vb, tag.clone()) {
Ok(x) => yield ReturnSuccess::value(x), Ok(x) => yield ReturnSuccess::value(x),
Err(_) => { Err(_) => {
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as BSON", "Could not parse as BSON",
"input cannot be parsed as BSON", "input cannot be parsed as BSON",
tag, tag.clone(),
"value originates from here", "value originates from here",
value_tag, value_tag,
)) ))
@ -223,7 +223,7 @@ fn from_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
tag, tag.clone(),
"value originates from here", "value originates from here",
value_tag, value_tag,
)), )),

View File

@ -62,12 +62,12 @@ pub fn from_csv_string_to_value(
if let Some(row_values) = iter.next() { if let Some(row_values) = iter.next() {
let row_values = row_values?; let row_values = row_values?;
let mut row = TaggedDictBuilder::new(tag); let mut row = TaggedDictBuilder::new(tag.clone());
for (idx, entry) in row_values.iter().enumerate() { for (idx, entry) in row_values.iter().enumerate() {
row.insert_tagged( row.insert_tagged(
fields.get(idx).unwrap(), fields.get(idx).unwrap(),
Value::Primitive(Primitive::String(String::from(entry))).tagged(tag), Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag),
); );
} }
@ -77,7 +77,7 @@ pub fn from_csv_string_to_value(
} }
} }
Ok(Tagged::from_item(Value::Table(rows), tag)) Ok(Value::Table(rows).tagged(&tag))
} }
fn from_csv( fn from_csv(
@ -96,7 +96,7 @@ fn from_csv(
for value in values { for value in values {
let value_tag = value.tag(); let value_tag = value.tag();
latest_tag = Some(value_tag); latest_tag = Some(value_tag.clone());
match value.item { match value.item {
Value::Primitive(Primitive::String(s)) => { Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s); concat_string.push_str(&s);
@ -105,15 +105,15 @@ fn from_csv(
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
name_tag, name_tag.clone(),
"value originates from here", "value originates from here",
value_tag, value_tag.clone(),
)), )),
} }
} }
match from_csv_string_to_value(concat_string, skip_headers, name_tag) { match from_csv_string_to_value(concat_string, skip_headers, name_tag.clone()) {
Ok(x) => match x { Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => { Tagged { item: Value::Table(list), .. } => {
for l in list { for l in list {
@ -126,9 +126,9 @@ fn from_csv(
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as CSV", "Could not parse as CSV",
"input cannot be parsed as CSV", "input cannot be parsed as CSV",
name_tag, name_tag.clone(),
"value originates from here", "value originates from here",
last_tag, last_tag.clone(),
)) ))
} , } ,
} }

View File

@ -45,10 +45,13 @@ fn convert_ini_top_to_nu_value(
tag: impl Into<Tag>, tag: impl Into<Tag>,
) -> Tagged<Value> { ) -> Tagged<Value> {
let tag = tag.into(); let tag = tag.into();
let mut top_level = TaggedDictBuilder::new(tag); let mut top_level = TaggedDictBuilder::new(tag.clone());
for (key, value) in v.iter() { for (key, value) in v.iter() {
top_level.insert_tagged(key.clone(), convert_ini_second_to_nu_value(value, tag)); top_level.insert_tagged(
key.clone(),
convert_ini_second_to_nu_value(value, tag.clone()),
);
} }
top_level.into_tagged_value() top_level.into_tagged_value()
@ -75,7 +78,7 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
for value in values { for value in values {
let value_tag = value.tag(); let value_tag = value.tag();
latest_tag = Some(value_tag); latest_tag = Some(value_tag.clone());
match value.item { match value.item {
Value::Primitive(Primitive::String(s)) => { Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s); concat_string.push_str(&s);
@ -84,15 +87,15 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
tag, &tag,
"value originates from here", "value originates from here",
value_tag, &value_tag,
)), )),
} }
} }
match from_ini_string_to_value(concat_string, tag) { match from_ini_string_to_value(concat_string, tag.clone()) {
Ok(x) => match x { Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => { Tagged { item: Value::Table(list), .. } => {
for l in list { for l in list {
@ -105,7 +108,7 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as INI", "Could not parse as INI",
"input cannot be parsed as INI", "input cannot be parsed as INI",
tag, &tag,
"value originates from here", "value originates from here",
last_tag, last_tag,
)) ))

View File

@ -35,24 +35,24 @@ fn convert_json_value_to_nu_value(v: &serde_hjson::Value, tag: impl Into<Tag>) -
let tag = tag.into(); let tag = tag.into();
match v { match v {
serde_hjson::Value::Null => Value::Primitive(Primitive::Nothing).tagged(tag), serde_hjson::Value::Null => Value::Primitive(Primitive::Nothing).tagged(&tag),
serde_hjson::Value::Bool(b) => Value::boolean(*b).tagged(tag), serde_hjson::Value::Bool(b) => Value::boolean(*b).tagged(&tag),
serde_hjson::Value::F64(n) => Value::number(n).tagged(tag), serde_hjson::Value::F64(n) => Value::number(n).tagged(&tag),
serde_hjson::Value::U64(n) => Value::number(n).tagged(tag), serde_hjson::Value::U64(n) => Value::number(n).tagged(&tag),
serde_hjson::Value::I64(n) => Value::number(n).tagged(tag), serde_hjson::Value::I64(n) => Value::number(n).tagged(&tag),
serde_hjson::Value::String(s) => { serde_hjson::Value::String(s) => {
Value::Primitive(Primitive::String(String::from(s))).tagged(tag) Value::Primitive(Primitive::String(String::from(s))).tagged(&tag)
} }
serde_hjson::Value::Array(a) => Value::Table( serde_hjson::Value::Array(a) => Value::Table(
a.iter() a.iter()
.map(|x| convert_json_value_to_nu_value(x, tag)) .map(|x| convert_json_value_to_nu_value(x, &tag))
.collect(), .collect(),
) )
.tagged(tag), .tagged(tag),
serde_hjson::Value::Object(o) => { serde_hjson::Value::Object(o) => {
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(&tag);
for (k, v) in o.iter() { for (k, v) in o.iter() {
collected.insert_tagged(k.clone(), convert_json_value_to_nu_value(v, tag)); collected.insert_tagged(k.clone(), convert_json_value_to_nu_value(v, &tag));
} }
collected.into_tagged_value() collected.into_tagged_value()
@ -82,7 +82,7 @@ fn from_json(
for value in values { for value in values {
let value_tag = value.tag(); let value_tag = value.tag();
latest_tag = Some(value_tag); latest_tag = Some(value_tag.clone());
match value.item { match value.item {
Value::Primitive(Primitive::String(s)) => { Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s); concat_string.push_str(&s);
@ -91,9 +91,9 @@ fn from_json(
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
name_tag, &name_tag,
"value originates from here", "value originates from here",
value_tag, &value_tag,
)), )),
} }
@ -106,15 +106,15 @@ fn from_json(
continue; continue;
} }
match from_json_string_to_value(json_str.to_string(), name_tag) { match from_json_string_to_value(json_str.to_string(), &name_tag) {
Ok(x) => Ok(x) =>
yield ReturnSuccess::value(x), yield ReturnSuccess::value(x),
Err(_) => { Err(_) => {
if let Some(last_tag) = latest_tag { if let Some(ref last_tag) = latest_tag {
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Could nnot parse as JSON", "Could nnot parse as JSON",
"input cannot be parsed as JSON", "input cannot be parsed as JSON",
name_tag, &name_tag,
"value originates from here", "value originates from here",
last_tag)) last_tag))
} }
@ -122,7 +122,7 @@ fn from_json(
} }
} }
} else { } else {
match from_json_string_to_value(concat_string, name_tag) { match from_json_string_to_value(concat_string, name_tag.clone()) {
Ok(x) => Ok(x) =>
match x { match x {
Tagged { item: Value::Table(list), .. } => { Tagged { item: Value::Table(list), .. } => {

View File

@ -138,7 +138,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
let value_tag = value.tag(); let value_tag = value.tag();
match value.item { match value.item {
Value::Primitive(Primitive::Binary(vb)) => Value::Primitive(Primitive::Binary(vb)) =>
match from_sqlite_bytes_to_value(vb, tag) { match from_sqlite_bytes_to_value(vb, tag.clone()) {
Ok(x) => match x { Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => { Tagged { item: Value::Table(list), .. } => {
for l in list { for l in list {
@ -151,7 +151,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as SQLite", "Could not parse as SQLite",
"input cannot be parsed as SQLite", "input cannot be parsed as SQLite",
tag, &tag,
"value originates from here", "value originates from here",
value_tag, value_tag,
)) ))
@ -160,7 +160,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
tag, &tag,
"value originates from here", "value originates from here",
value_tag, value_tag,
)), )),

View File

@ -36,7 +36,7 @@ pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into<Tag>) -> T
toml::Value::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag), toml::Value::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag),
toml::Value::Array(a) => Value::Table( toml::Value::Array(a) => Value::Table(
a.iter() a.iter()
.map(|x| convert_toml_value_to_nu_value(x, tag)) .map(|x| convert_toml_value_to_nu_value(x, &tag))
.collect(), .collect(),
) )
.tagged(tag), .tagged(tag),
@ -44,10 +44,10 @@ pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into<Tag>) -> T
Value::Primitive(Primitive::String(dt.to_string())).tagged(tag) Value::Primitive(Primitive::String(dt.to_string())).tagged(tag)
} }
toml::Value::Table(t) => { toml::Value::Table(t) => {
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(&tag);
for (k, v) in t.iter() { for (k, v) in t.iter() {
collected.insert_tagged(k.clone(), convert_toml_value_to_nu_value(v, tag)); collected.insert_tagged(k.clone(), convert_toml_value_to_nu_value(v, &tag));
} }
collected.into_tagged_value() collected.into_tagged_value()
@ -79,7 +79,7 @@ pub fn from_toml(
for value in values { for value in values {
let value_tag = value.tag(); let value_tag = value.tag();
latest_tag = Some(value_tag); latest_tag = Some(value_tag.clone());
match value.item { match value.item {
Value::Primitive(Primitive::String(s)) => { Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s); concat_string.push_str(&s);
@ -88,15 +88,15 @@ pub fn from_toml(
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
tag, &tag,
"value originates from here", "value originates from here",
value_tag, &value_tag,
)), )),
} }
} }
match from_toml_string_to_value(concat_string, tag) { match from_toml_string_to_value(concat_string, tag.clone()) {
Ok(x) => match x { Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => { Tagged { item: Value::Table(list), .. } => {
for l in list { for l in list {
@ -109,7 +109,7 @@ pub fn from_toml(
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as TOML", "Could not parse as TOML",
"input cannot be parsed as TOML", "input cannot be parsed as TOML",
tag, &tag,
"value originates from here", "value originates from here",
last_tag, last_tag,
)) ))

View File

@ -63,12 +63,12 @@ pub fn from_tsv_string_to_value(
if let Some(row_values) = iter.next() { if let Some(row_values) = iter.next() {
let row_values = row_values?; let row_values = row_values?;
let mut row = TaggedDictBuilder::new(tag); let mut row = TaggedDictBuilder::new(&tag);
for (idx, entry) in row_values.iter().enumerate() { for (idx, entry) in row_values.iter().enumerate() {
row.insert_tagged( row.insert_tagged(
fields.get(idx).unwrap(), fields.get(idx).unwrap(),
Value::Primitive(Primitive::String(String::from(entry))).tagged(tag), Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag),
); );
} }
@ -78,7 +78,7 @@ pub fn from_tsv_string_to_value(
} }
} }
Ok(Tagged::from_item(Value::Table(rows), tag)) Ok(Value::Table(rows).tagged(&tag))
} }
fn from_tsv( fn from_tsv(
@ -97,7 +97,7 @@ fn from_tsv(
for value in values { for value in values {
let value_tag = value.tag(); let value_tag = value.tag();
latest_tag = Some(value_tag); latest_tag = Some(value_tag.clone());
match value.item { match value.item {
Value::Primitive(Primitive::String(s)) => { Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s); concat_string.push_str(&s);
@ -106,15 +106,15 @@ fn from_tsv(
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
name_tag, &name_tag,
"value originates from here", "value originates from here",
value_tag, &value_tag,
)), )),
} }
} }
match from_tsv_string_to_value(concat_string, skip_headers, name_tag) { match from_tsv_string_to_value(concat_string, skip_headers, name_tag.clone()) {
Ok(x) => match x { Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => { Tagged { item: Value::Table(list), .. } => {
for l in list { for l in list {
@ -127,9 +127,9 @@ fn from_tsv(
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as TSV", "Could not parse as TSV",
"input cannot be parsed as TSV", "input cannot be parsed as TSV",
name_tag, &name_tag,
"value originates from here", "value originates from here",
last_tag, &last_tag,
)) ))
} , } ,
} }

View File

@ -39,7 +39,7 @@ fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
for value in values { for value in values {
let value_tag = value.tag(); let value_tag = value.tag();
latest_tag = Some(value_tag); latest_tag = Some(value_tag.clone());
match value.item { match value.item {
Value::Primitive(Primitive::String(s)) => { Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s); concat_string.push_str(&s);
@ -47,9 +47,9 @@ fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
tag, &tag,
"value originates from here", "value originates from here",
value_tag, &value_tag,
)), )),
} }

View File

@ -34,7 +34,7 @@ fn from_node_to_value<'a, 'd>(n: &roxmltree::Node<'a, 'd>, tag: impl Into<Tag>)
let mut children_values = vec![]; let mut children_values = vec![];
for c in n.children() { for c in n.children() {
children_values.push(from_node_to_value(&c, tag)); children_values.push(from_node_to_value(&c, &tag));
} }
let children_values: Vec<Tagged<Value>> = children_values let children_values: Vec<Tagged<Value>> = children_values
@ -94,7 +94,7 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
for value in values { for value in values {
let value_tag = value.tag(); let value_tag = value.tag();
latest_tag = Some(value_tag); latest_tag = Some(value_tag.clone());
match value.item { match value.item {
Value::Primitive(Primitive::String(s)) => { Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s); concat_string.push_str(&s);
@ -103,15 +103,15 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
tag, &tag,
"value originates from here", "value originates from here",
value_tag, &value_tag,
)), )),
} }
} }
match from_xml_string_to_value(concat_string, tag) { match from_xml_string_to_value(concat_string, tag.clone()) {
Ok(x) => match x { Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => { Tagged { item: Value::Table(list), .. } => {
for l in list { for l in list {
@ -124,9 +124,9 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as XML", "Could not parse as XML",
"input cannot be parsed as XML", "input cannot be parsed as XML",
tag, &tag,
"value originates from here", "value originates from here",
last_tag, &last_tag,
)) ))
} , } ,
} }

View File

@ -64,17 +64,17 @@ fn convert_yaml_value_to_nu_value(v: &serde_yaml::Value, tag: impl Into<Tag>) ->
serde_yaml::Value::String(s) => Value::string(s).tagged(tag), serde_yaml::Value::String(s) => Value::string(s).tagged(tag),
serde_yaml::Value::Sequence(a) => Value::Table( serde_yaml::Value::Sequence(a) => Value::Table(
a.iter() a.iter()
.map(|x| convert_yaml_value_to_nu_value(x, tag)) .map(|x| convert_yaml_value_to_nu_value(x, &tag))
.collect(), .collect(),
) )
.tagged(tag), .tagged(tag),
serde_yaml::Value::Mapping(t) => { serde_yaml::Value::Mapping(t) => {
let mut collected = TaggedDictBuilder::new(tag); let mut collected = TaggedDictBuilder::new(&tag);
for (k, v) in t.iter() { for (k, v) in t.iter() {
match k { match k {
serde_yaml::Value::String(k) => { serde_yaml::Value::String(k) => {
collected.insert_tagged(k.clone(), convert_yaml_value_to_nu_value(v, tag)); collected.insert_tagged(k.clone(), convert_yaml_value_to_nu_value(v, &tag));
} }
_ => unimplemented!("Unknown key type"), _ => unimplemented!("Unknown key type"),
} }
@ -108,7 +108,7 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
for value in values { for value in values {
let value_tag = value.tag(); let value_tag = value.tag();
latest_tag = Some(value_tag); latest_tag = Some(value_tag.clone());
match value.item { match value.item {
Value::Primitive(Primitive::String(s)) => { Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s); concat_string.push_str(&s);
@ -117,15 +117,15 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
tag, &tag,
"value originates from here", "value originates from here",
value_tag, &value_tag,
)), )),
} }
} }
match from_yaml_string_to_value(concat_string, tag) { match from_yaml_string_to_value(concat_string, tag.clone()) {
Ok(x) => match x { Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => { Tagged { item: Value::Table(list), .. } => {
for l in list { for l in list {
@ -138,9 +138,9 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as YAML", "Could not parse as YAML",
"input cannot be parsed as YAML", "input cannot be parsed as YAML",
tag, &tag,
"value originates from here", "value originates from here",
last_tag, &last_tag,
)) ))
} , } ,
} }

View File

@ -82,7 +82,7 @@ pub fn get_column_path(
item: Value::Primitive(Primitive::Path(_)), item: Value::Primitive(Primitive::Path(_)),
.. ..
} => Ok(obj.clone()), } => Ok(obj.clone()),
_ => Ok(Value::nothing().tagged(obj.tag)), _ => Ok(Value::nothing().tagged(&obj.tag)),
}, },
} }
} }

View File

@ -26,7 +26,7 @@ impl PerItemCommand for Help {
_raw_args: &RawCommandArgs, _raw_args: &RawCommandArgs,
_input: Tagged<Value>, _input: Tagged<Value>,
) -> Result<OutputStream, ShellError> { ) -> Result<OutputStream, ShellError> {
let tag = call_info.name_tag; let tag = &call_info.name_tag;
match call_info.args.nth(0) { match call_info.args.nth(0) {
Some(Tagged { Some(Tagged {

View File

@ -58,7 +58,7 @@ fn lines(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
result.push_back(Err(ShellError::labeled_error_with_secondary( result.push_back(Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
tag, &tag,
"value originates from here", "value originates from here",
v.tag(), v.tag(),
))); )));

View File

@ -34,5 +34,5 @@ impl WholeStreamCommand for LS {
} }
fn ls(LsArgs { path }: LsArgs, context: RunnableContext) -> Result<OutputStream, ShellError> { fn ls(LsArgs { path }: LsArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
context.shell_manager.ls(path, context.name) context.shell_manager.ls(path, &context)
} }

View File

@ -7,7 +7,6 @@ use crate::parser::hir::SyntaxShape;
use crate::parser::registry::Signature; use crate::parser::registry::Signature;
use crate::prelude::*; use crate::prelude::*;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use uuid::Uuid;
pub struct Open; pub struct Open;
impl PerItemCommand for Open { impl PerItemCommand for Open {
@ -49,7 +48,7 @@ fn run(
ShellError::labeled_error( ShellError::labeled_error(
"No file or directory specified", "No file or directory specified",
"for command", "for command",
call_info.name_tag, &call_info.name_tag,
) )
})? { })? {
file => file, file => file,
@ -69,7 +68,7 @@ fn run(
yield Err(e); yield Err(e);
return; return;
} }
let (file_extension, contents, contents_tag, anchor_location) = result.unwrap(); let (file_extension, contents, contents_tag) = result.unwrap();
let file_extension = if has_raw { let file_extension = if has_raw {
None None
@ -79,21 +78,14 @@ fn run(
file_extension.or(path_str.split('.').last().map(String::from)) file_extension.or(path_str.split('.').last().map(String::from))
}; };
if contents_tag.anchor != uuid::Uuid::nil() { let tagged_contents = contents.tagged(&contents_tag);
// If we have loaded something, track its source
yield ReturnSuccess::action(CommandAction::AddAnchorLocation(
contents_tag.anchor,
anchor_location,
));
}
let tagged_contents = contents.tagged(contents_tag);
if let Some(extension) = file_extension { if let Some(extension) = file_extension {
let command_name = format!("from-{}", extension); let command_name = format!("from-{}", extension);
if let Some(converter) = registry.get_command(&command_name) { if let Some(converter) = registry.get_command(&command_name) {
let new_args = RawCommandArgs { let new_args = RawCommandArgs {
host: raw_args.host, host: raw_args.host,
ctrl_c: raw_args.ctrl_c,
shell_manager: raw_args.shell_manager, shell_manager: raw_args.shell_manager,
call_info: UnevaluatedCallInfo { call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call { args: crate::parser::hir::Call {
@ -102,7 +94,6 @@ fn run(
named: None named: None
}, },
source: raw_args.call_info.source, source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag, name_tag: raw_args.call_info.name_tag,
} }
}; };
@ -116,7 +107,7 @@ fn run(
} }
} }
Ok(ReturnSuccess::Value(Tagged { item, .. })) => { Ok(ReturnSuccess::Value(Tagged { item, .. })) => {
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag })); yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() }));
} }
x => yield x, x => yield x,
} }
@ -136,7 +127,7 @@ pub async fn fetch(
cwd: &PathBuf, cwd: &PathBuf,
location: &str, location: &str,
span: Span, span: Span,
) -> Result<(Option<String>, Value, Tag, AnchorLocation), ShellError> { ) -> Result<(Option<String>, Value, Tag), ShellError> {
let mut cwd = cwd.clone(); let mut cwd = cwd.clone();
cwd.push(Path::new(location)); cwd.push(Path::new(location));
@ -149,9 +140,8 @@ pub async fn fetch(
Value::string(s), Value::string(s),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::File(cwd.to_string_lossy().to_string())),
}, },
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)), )),
Err(_) => { Err(_) => {
//Non utf8 data. //Non utf8 data.
@ -168,18 +158,20 @@ pub async fn fetch(
Value::string(s), Value::string(s),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
}, },
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)), )),
Err(_) => Ok(( Err(_) => Ok((
None, None,
Value::binary(bytes), Value::binary(bytes),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
}, },
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)), )),
} }
} else { } else {
@ -188,9 +180,10 @@ pub async fn fetch(
Value::binary(bytes), Value::binary(bytes),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
}, },
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)) ))
} }
} }
@ -206,18 +199,20 @@ pub async fn fetch(
Value::string(s), Value::string(s),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
}, },
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)), )),
Err(_) => Ok(( Err(_) => Ok((
None, None,
Value::binary(bytes), Value::binary(bytes),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
}, },
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)), )),
} }
} else { } else {
@ -226,9 +221,10 @@ pub async fn fetch(
Value::binary(bytes), Value::binary(bytes),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
}, },
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)) ))
} }
} }
@ -237,9 +233,10 @@ pub async fn fetch(
Value::binary(bytes), Value::binary(bytes),
Tag { Tag {
span, span,
anchor: Uuid::new_v4(), anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
}, },
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)), )),
} }
} }

View File

@ -104,7 +104,7 @@ pub fn pivot(args: PivotArgs, context: RunnableContext) -> Result<OutputStream,
for desc in descs { for desc in descs {
let mut column_num: usize = 0; let mut column_num: usize = 0;
let mut dict = TaggedDictBuilder::new(context.name); let mut dict = TaggedDictBuilder::new(&context.name);
if !args.ignore_titles && !args.header_row { if !args.ignore_titles && !args.header_row {
dict.insert(headers[column_num].clone(), Value::string(desc.clone())); dict.insert(headers[column_num].clone(), Value::string(desc.clone()));

View File

@ -54,17 +54,20 @@ fn run(
registry: &CommandRegistry, registry: &CommandRegistry,
raw_args: &RawCommandArgs, raw_args: &RawCommandArgs,
) -> Result<OutputStream, ShellError> { ) -> Result<OutputStream, ShellError> {
let name_tag = call_info.name_tag.clone();
let call_info = call_info.clone(); let call_info = call_info.clone();
let path = match call_info.args.nth(0).ok_or_else(|| { let path =
ShellError::labeled_error("No url specified", "for command", call_info.name_tag) match call_info.args.nth(0).ok_or_else(|| {
})? { ShellError::labeled_error("No url specified", "for command", &name_tag)
file => file.clone(), })? {
}; file => file.clone(),
let body = match call_info.args.nth(1).ok_or_else(|| { };
ShellError::labeled_error("No body specified", "for command", call_info.name_tag) let body =
})? { match call_info.args.nth(1).ok_or_else(|| {
file => file.clone(), ShellError::labeled_error("No body specified", "for command", &name_tag)
}; })? {
file => file.clone(),
};
let path_str = path.as_string()?; let path_str = path.as_string()?;
let path_span = path.tag(); let path_span = path.tag();
let has_raw = call_info.args.has("raw"); let has_raw = call_info.args.has("raw");
@ -79,7 +82,7 @@ fn run(
let headers = get_headers(&call_info)?; let headers = get_headers(&call_info)?;
let stream = async_stream! { let stream = async_stream! {
let (file_extension, contents, contents_tag, anchor_location) = let (file_extension, contents, contents_tag) =
post(&path_str, &body, user, password, &headers, path_span, &registry, &raw_args).await.unwrap(); post(&path_str, &body, user, password, &headers, path_span, &registry, &raw_args).await.unwrap();
let file_extension = if has_raw { let file_extension = if has_raw {
@ -90,21 +93,14 @@ fn run(
file_extension.or(path_str.split('.').last().map(String::from)) file_extension.or(path_str.split('.').last().map(String::from))
}; };
if contents_tag.anchor != uuid::Uuid::nil() { let tagged_contents = contents.tagged(&contents_tag);
// If we have loaded something, track its source
yield ReturnSuccess::action(CommandAction::AddAnchorLocation(
contents_tag.anchor,
anchor_location,
));
}
let tagged_contents = contents.tagged(contents_tag);
if let Some(extension) = file_extension { if let Some(extension) = file_extension {
let command_name = format!("from-{}", extension); let command_name = format!("from-{}", extension);
if let Some(converter) = registry.get_command(&command_name) { if let Some(converter) = registry.get_command(&command_name) {
let new_args = RawCommandArgs { let new_args = RawCommandArgs {
host: raw_args.host, host: raw_args.host,
ctrl_c: raw_args.ctrl_c,
shell_manager: raw_args.shell_manager, shell_manager: raw_args.shell_manager,
call_info: UnevaluatedCallInfo { call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call { args: crate::parser::hir::Call {
@ -113,7 +109,6 @@ fn run(
named: None named: None
}, },
source: raw_args.call_info.source, source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag, name_tag: raw_args.call_info.name_tag,
} }
}; };
@ -127,7 +122,7 @@ fn run(
} }
} }
Ok(ReturnSuccess::Value(Tagged { item, .. })) => { Ok(ReturnSuccess::Value(Tagged { item, .. })) => {
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag })); yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() }));
} }
x => yield x, x => yield x,
} }
@ -207,7 +202,7 @@ pub async fn post(
tag: Tag, tag: Tag,
registry: &CommandRegistry, registry: &CommandRegistry,
raw_args: &RawCommandArgs, raw_args: &RawCommandArgs,
) -> Result<(Option<String>, Value, Tag, AnchorLocation), ShellError> { ) -> Result<(Option<String>, Value, Tag), ShellError> {
let registry = registry.clone(); let registry = registry.clone();
let raw_args = raw_args.clone(); let raw_args = raw_args.clone();
if location.starts_with("http:") || location.starts_with("https:") { if location.starts_with("http:") || location.starts_with("https:") {
@ -248,6 +243,7 @@ pub async fn post(
if let Some(converter) = registry.get_command("to-json") { if let Some(converter) = registry.get_command("to-json") {
let new_args = RawCommandArgs { let new_args = RawCommandArgs {
host: raw_args.host, host: raw_args.host,
ctrl_c: raw_args.ctrl_c,
shell_manager: raw_args.shell_manager, shell_manager: raw_args.shell_manager,
call_info: UnevaluatedCallInfo { call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call { args: crate::parser::hir::Call {
@ -256,7 +252,6 @@ pub async fn post(
named: None, named: None,
}, },
source: raw_args.call_info.source, source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag, name_tag: raw_args.call_info.name_tag,
}, },
}; };
@ -280,7 +275,7 @@ pub async fn post(
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"Save could not successfully save", "Save could not successfully save",
"unexpected data during save", "unexpected data during save",
*tag, tag,
)); ));
} }
} }
@ -296,7 +291,7 @@ pub async fn post(
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"Could not automatically convert table", "Could not automatically convert table",
"needs manual conversion", "needs manual conversion",
*tag, tag,
)); ));
} }
} }
@ -312,11 +307,13 @@ pub async fn post(
ShellError::labeled_error( ShellError::labeled_error(
"Could not load text from remote url", "Could not load text from remote url",
"could not load", "could not load",
tag, &tag,
) )
})?), })?),
tag, Tag {
AnchorLocation::Url(location.to_string()), anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)), )),
(mime::APPLICATION, mime::JSON) => Ok(( (mime::APPLICATION, mime::JSON) => Ok((
Some("json".to_string()), Some("json".to_string()),
@ -324,25 +321,29 @@ pub async fn post(
ShellError::labeled_error( ShellError::labeled_error(
"Could not load text from remote url", "Could not load text from remote url",
"could not load", "could not load",
tag, &tag,
) )
})?), })?),
tag, Tag {
AnchorLocation::Url(location.to_string()), anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)), )),
(mime::APPLICATION, mime::OCTET_STREAM) => { (mime::APPLICATION, mime::OCTET_STREAM) => {
let buf: Vec<u8> = r.body_bytes().await.map_err(|_| { let buf: Vec<u8> = r.body_bytes().await.map_err(|_| {
ShellError::labeled_error( ShellError::labeled_error(
"Could not load binary file", "Could not load binary file",
"could not load", "could not load",
tag, &tag,
) )
})?; })?;
Ok(( Ok((
None, None,
Value::binary(buf), Value::binary(buf),
tag, Tag {
AnchorLocation::Url(location.to_string()), anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)) ))
} }
(mime::IMAGE, image_ty) => { (mime::IMAGE, image_ty) => {
@ -350,14 +351,16 @@ pub async fn post(
ShellError::labeled_error( ShellError::labeled_error(
"Could not load image file", "Could not load image file",
"could not load", "could not load",
tag, &tag,
) )
})?; })?;
Ok(( Ok((
Some(image_ty.to_string()), Some(image_ty.to_string()),
Value::binary(buf), Value::binary(buf),
tag, Tag {
AnchorLocation::Url(location.to_string()), anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)) ))
} }
(mime::TEXT, mime::HTML) => Ok(( (mime::TEXT, mime::HTML) => Ok((
@ -366,11 +369,13 @@ pub async fn post(
ShellError::labeled_error( ShellError::labeled_error(
"Could not load text from remote url", "Could not load text from remote url",
"could not load", "could not load",
tag, &tag,
) )
})?), })?),
tag, Tag {
AnchorLocation::Url(location.to_string()), anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)), )),
(mime::TEXT, mime::PLAIN) => { (mime::TEXT, mime::PLAIN) => {
let path_extension = url::Url::parse(location) let path_extension = url::Url::parse(location)
@ -390,11 +395,13 @@ pub async fn post(
ShellError::labeled_error( ShellError::labeled_error(
"Could not load text from remote url", "Could not load text from remote url",
"could not load", "could not load",
tag, &tag,
) )
})?), })?),
tag, Tag {
AnchorLocation::Url(location.to_string()), anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)) ))
} }
(ty, sub_ty) => Ok(( (ty, sub_ty) => Ok((
@ -403,16 +410,20 @@ pub async fn post(
"Not yet supported MIME type: {} {}", "Not yet supported MIME type: {} {}",
ty, sub_ty ty, sub_ty
)), )),
tag, Tag {
AnchorLocation::Url(location.to_string()), anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)), )),
} }
} }
None => Ok(( None => Ok((
None, None,
Value::string(format!("No content type found")), Value::string(format!("No content type found")),
tag, Tag {
AnchorLocation::Url(location.to_string()), anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)), )),
}, },
Err(_) => { Err(_) => {

View File

@ -119,33 +119,32 @@ fn save(
input, input,
name, name,
shell_manager, shell_manager,
source_map,
host, host,
ctrl_c,
commands: registry, commands: registry,
.. ..
}: RunnableContext, }: RunnableContext,
raw_args: RawCommandArgs, raw_args: RawCommandArgs,
) -> Result<OutputStream, ShellError> { ) -> Result<OutputStream, ShellError> {
let mut full_path = PathBuf::from(shell_manager.path()); let mut full_path = PathBuf::from(shell_manager.path());
let name_tag = name; let name_tag = name.clone();
let source_map = source_map.clone();
let stream = async_stream! { let stream = async_stream! {
let input: Vec<Tagged<Value>> = input.values.collect().await; let input: Vec<Tagged<Value>> = input.values.collect().await;
if path.is_none() { if path.is_none() {
// If there is no filename, check the metadata for the anchor filename // If there is no filename, check the metadata for the anchor filename
if input.len() > 0 { if input.len() > 0 {
let anchor = input[0].anchor(); let anchor = input[0].anchor();
match source_map.get(&anchor) { match anchor {
Some(path) => match path { Some(path) => match path {
AnchorLocation::File(file) => { AnchorLocation::File(file) => {
full_path.push(Path::new(file)); full_path.push(Path::new(&file));
} }
_ => { _ => {
yield Err(ShellError::labeled_error( yield Err(ShellError::labeled_error(
"Save requires a filepath (1)", "Save requires a filepath (1)",
"needs path", "needs path",
name_tag, name_tag.clone(),
)); ));
} }
}, },
@ -153,7 +152,7 @@ fn save(
yield Err(ShellError::labeled_error( yield Err(ShellError::labeled_error(
"Save requires a filepath (2)", "Save requires a filepath (2)",
"needs path", "needs path",
name_tag, name_tag.clone(),
)); ));
} }
} }
@ -161,7 +160,7 @@ fn save(
yield Err(ShellError::labeled_error( yield Err(ShellError::labeled_error(
"Save requires a filepath (3)", "Save requires a filepath (3)",
"needs path", "needs path",
name_tag, name_tag.clone(),
)); ));
} }
} else { } else {
@ -179,6 +178,7 @@ fn save(
if let Some(converter) = registry.get_command(&command_name) { if let Some(converter) = registry.get_command(&command_name) {
let new_args = RawCommandArgs { let new_args = RawCommandArgs {
host, host,
ctrl_c,
shell_manager, shell_manager,
call_info: UnevaluatedCallInfo { call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call { args: crate::parser::hir::Call {
@ -187,7 +187,6 @@ fn save(
named: None named: None
}, },
source: raw_args.call_info.source, source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag, name_tag: raw_args.call_info.name_tag,
} }
}; };

View File

@ -2,6 +2,7 @@ use crate::commands::WholeStreamCommand;
use crate::data::TaggedDictBuilder; use crate::data::TaggedDictBuilder;
use crate::errors::ShellError; use crate::errors::ShellError;
use crate::prelude::*; use crate::prelude::*;
use std::sync::atomic::Ordering;
pub struct Shells; pub struct Shells;
@ -32,14 +33,14 @@ fn shells(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream
let tag = args.call_info.name_tag; let tag = args.call_info.name_tag;
for (index, shell) in args.shell_manager.shells.lock().unwrap().iter().enumerate() { for (index, shell) in args.shell_manager.shells.lock().unwrap().iter().enumerate() {
let mut dict = TaggedDictBuilder::new(tag); let mut dict = TaggedDictBuilder::new(&tag);
if index == args.shell_manager.current_shell { if index == (*args.shell_manager.current_shell).load(Ordering::SeqCst) {
dict.insert(" ", "X".to_string()); dict.insert(" ", "X".to_string());
} else { } else {
dict.insert(" ", " ".to_string()); dict.insert(" ", " ".to_string());
} }
dict.insert("name", shell.name(&args.call_info.source_map)); dict.insert("name", shell.name());
dict.insert("path", shell.path()); dict.insert("path", shell.path());
shells_out.push_back(dict.into_tagged_value()); shells_out.push_back(dict.into_tagged_value());

View File

@ -37,7 +37,7 @@ fn size(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream,
_ => Err(ShellError::labeled_error_with_secondary( _ => Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
tag, &tag,
"value originates from here", "value originates from here",
v.tag(), v.tag(),
)), )),

View File

@ -94,7 +94,7 @@ fn split_column(
_ => Err(ShellError::labeled_error_with_secondary( _ => Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
name, &name,
"value originates from here", "value originates from here",
v.tag(), v.tag(),
)), )),

View File

@ -60,7 +60,7 @@ fn split_row(
result.push_back(Err(ShellError::labeled_error_with_secondary( result.push_back(Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline", "Expected a string from pipeline",
"requires string input", "requires string input",
name, &name,
"value originates from here", "value originates from here",
v.tag(), v.tag(),
))); )));

View File

@ -5,16 +5,13 @@ use crate::prelude::*;
pub struct Table; pub struct Table;
#[derive(Deserialize)]
pub struct TableArgs {}
impl WholeStreamCommand for Table { impl WholeStreamCommand for Table {
fn name(&self) -> &str { fn name(&self) -> &str {
"table" "table"
} }
fn signature(&self) -> Signature { fn signature(&self) -> Signature {
Signature::build("table") Signature::build("table").named("start_number", SyntaxShape::Number)
} }
fn usage(&self) -> &str { fn usage(&self) -> &str {
@ -26,16 +23,29 @@ impl WholeStreamCommand for Table {
args: CommandArgs, args: CommandArgs,
registry: &CommandRegistry, registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> { ) -> Result<OutputStream, ShellError> {
args.process(registry, table)?.run() table(args, registry)
} }
} }
pub fn table(_args: TableArgs, context: RunnableContext) -> Result<OutputStream, ShellError> { fn table(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?;
let stream = async_stream! { let stream = async_stream! {
let input: Vec<Tagged<Value>> = context.input.into_vec().await; let host = args.host.clone();
let start_number = match args.get("start_number") {
Some(Tagged { item: Value::Primitive(Primitive::Int(i)), .. }) => {
i.to_usize().unwrap()
}
_ => {
0
}
};
let input: Vec<Tagged<Value>> = args.input.into_vec().await;
if input.len() > 0 { if input.len() > 0 {
let mut host = context.host.lock().unwrap(); let mut host = host.lock().unwrap();
let view = TableView::from_list(&input); let view = TableView::from_list(&input, start_number);
if let Some(view) = view { if let Some(view) = view {
handle_unexpected(&mut *host, |host| crate::format::print_view(&view, host)); handle_unexpected(&mut *host, |host| crate::format::print_view(&view, host));
} }

View File

@ -28,7 +28,6 @@ impl WholeStreamCommand for Tags {
} }
fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream, ShellError> { fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let source_map = args.call_info.source_map.clone();
Ok(args Ok(args
.input .input
.values .values
@ -42,7 +41,7 @@ fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream,
dict.insert("end", Value::int(span.end() as i64)); dict.insert("end", Value::int(span.end() as i64));
tags.insert_tagged("span", dict.into_tagged_value()); tags.insert_tagged("span", dict.into_tagged_value());
match source_map.get(&anchor) { match anchor {
Some(AnchorLocation::File(source)) => { Some(AnchorLocation::File(source)) => {
tags.insert("anchor", Value::string(source)); tags.insert("anchor", Value::string(source));
} }

View File

@ -46,7 +46,7 @@ pub fn value_to_bson_value(v: &Tagged<Value>) -> Result<Bson, ShellError> {
Value::Primitive(Primitive::BeginningOfStream) => Bson::Null, Value::Primitive(Primitive::BeginningOfStream) => Bson::Null,
Value::Primitive(Primitive::Decimal(d)) => Bson::FloatingPoint(d.to_f64().unwrap()), Value::Primitive(Primitive::Decimal(d)) => Bson::FloatingPoint(d.to_f64().unwrap()),
Value::Primitive(Primitive::Int(i)) => { Value::Primitive(Primitive::Int(i)) => {
Bson::I64(i.tagged(v.tag).coerce_into("converting to BSON")?) Bson::I64(i.tagged(&v.tag).coerce_into("converting to BSON")?)
} }
Value::Primitive(Primitive::Nothing) => Bson::Null, Value::Primitive(Primitive::Nothing) => Bson::Null,
Value::Primitive(Primitive::String(s)) => Bson::String(s.clone()), Value::Primitive(Primitive::String(s)) => Bson::String(s.clone()),
@ -58,6 +58,7 @@ pub fn value_to_bson_value(v: &Tagged<Value>) -> Result<Bson, ShellError> {
.collect::<Result<_, _>>()?, .collect::<Result<_, _>>()?,
), ),
Value::Block(_) => Bson::Null, Value::Block(_) => Bson::Null,
Value::Error(e) => return Err(e.clone()),
Value::Primitive(Primitive::Binary(b)) => Bson::Binary(BinarySubtype::Generic, b.clone()), Value::Primitive(Primitive::Binary(b)) => Bson::Binary(BinarySubtype::Generic, b.clone()),
Value::Row(o) => object_value_to_bson(o)?, Value::Row(o) => object_value_to_bson(o)?,
}) })
@ -170,7 +171,7 @@ fn get_binary_subtype<'a>(tagged_value: &'a Tagged<Value>) -> Result<BinarySubty
_ => unreachable!(), _ => unreachable!(),
}), }),
Value::Primitive(Primitive::Int(i)) => Ok(BinarySubtype::UserDefined( Value::Primitive(Primitive::Int(i)) => Ok(BinarySubtype::UserDefined(
i.tagged(tagged_value.tag) i.tagged(&tagged_value.tag)
.coerce_into("converting to BSON binary subtype")?, .coerce_into("converting to BSON binary subtype")?,
)), )),
_ => Err(ShellError::type_error( _ => Err(ShellError::type_error(
@ -207,12 +208,12 @@ fn bson_value_to_bytes(bson: Bson, tag: Tag) -> Result<Vec<u8>, ShellError> {
Bson::Array(a) => { Bson::Array(a) => {
for v in a.into_iter() { for v in a.into_iter() {
match v { match v {
Bson::Document(d) => shell_encode_document(&mut out, d, tag)?, Bson::Document(d) => shell_encode_document(&mut out, d, tag.clone())?,
_ => { _ => {
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
format!("All top level values must be Documents, got {:?}", v), format!("All top level values must be Documents, got {:?}", v),
"requires BSON-compatible document", "requires BSON-compatible document",
tag, &tag,
)) ))
} }
} }
@ -237,7 +238,7 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
let input: Vec<Tagged<Value>> = args.input.values.collect().await; let input: Vec<Tagged<Value>> = args.input.values.collect().await;
let to_process_input = if input.len() > 1 { let to_process_input = if input.len() > 1 {
let tag = input[0].tag; let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ] vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 { } else if input.len() == 1 {
input input
@ -248,14 +249,14 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
for value in to_process_input { for value in to_process_input {
match value_to_bson_value(&value) { match value_to_bson_value(&value) {
Ok(bson_value) => { Ok(bson_value) => {
match bson_value_to_bytes(bson_value, name_tag) { match bson_value_to_bytes(bson_value, name_tag.clone()) {
Ok(x) => yield ReturnSuccess::value( Ok(x) => yield ReturnSuccess::value(
Value::binary(x).tagged(name_tag), Value::binary(x).tagged(&name_tag),
), ),
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with BSON-compatible structure.tag() from pipeline", "Expected a table with BSON-compatible structure.tag() from pipeline",
"requires BSON-compatible input", "requires BSON-compatible input",
name_tag, &name_tag,
"originates from here".to_string(), "originates from here".to_string(),
value.tag(), value.tag(),
)), )),
@ -264,7 +265,7 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
_ => yield Err(ShellError::labeled_error( _ => yield Err(ShellError::labeled_error(
"Expected a table with BSON-compatible structure from pipeline", "Expected a table with BSON-compatible structure from pipeline",
"requires BSON-compatible input", "requires BSON-compatible input",
name_tag)) &name_tag))
} }
} }
}; };

View File

@ -47,7 +47,7 @@ pub fn value_to_csv_value(v: &Tagged<Value>) -> Tagged<Value> {
Value::Block(_) => Value::Primitive(Primitive::Nothing), Value::Block(_) => Value::Primitive(Primitive::Nothing),
_ => Value::Primitive(Primitive::Nothing), _ => Value::Primitive(Primitive::Nothing),
} }
.tagged(v.tag) .tagged(v.tag.clone())
} }
fn to_string_helper(v: &Tagged<Value>) -> Result<String, ShellError> { fn to_string_helper(v: &Tagged<Value>) -> Result<String, ShellError> {
@ -61,7 +61,13 @@ fn to_string_helper(v: &Tagged<Value>) -> Result<String, ShellError> {
Value::Table(_) => return Ok(String::from("[Table]")), Value::Table(_) => return Ok(String::from("[Table]")),
Value::Row(_) => return Ok(String::from("[Row]")), Value::Row(_) => return Ok(String::from("[Row]")),
Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()), Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()),
_ => return Err(ShellError::labeled_error("Unexpected value", "", v.tag)), _ => {
return Err(ShellError::labeled_error(
"Unexpected value",
"",
v.tag.clone(),
))
}
} }
} }
@ -99,14 +105,14 @@ pub fn to_string(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
ShellError::labeled_error( ShellError::labeled_error(
"Could not convert record", "Could not convert record",
"original value", "original value",
tagged_value.tag, &tagged_value.tag,
) )
})?) })?)
.map_err(|_| { .map_err(|_| {
ShellError::labeled_error( ShellError::labeled_error(
"Could not convert record", "Could not convert record",
"original value", "original value",
tagged_value.tag, &tagged_value.tag,
) )
})?); })?);
} }
@ -136,14 +142,14 @@ pub fn to_string(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
ShellError::labeled_error( ShellError::labeled_error(
"Could not convert record", "Could not convert record",
"original value", "original value",
tagged_value.tag, &tagged_value.tag,
) )
})?) })?)
.map_err(|_| { .map_err(|_| {
ShellError::labeled_error( ShellError::labeled_error(
"Could not convert record", "Could not convert record",
"original value", "original value",
tagged_value.tag, &tagged_value.tag,
) )
})?); })?);
} }
@ -160,7 +166,7 @@ fn to_csv(
let input: Vec<Tagged<Value>> = input.values.collect().await; let input: Vec<Tagged<Value>> = input.values.collect().await;
let to_process_input = if input.len() > 1 { let to_process_input = if input.len() > 1 {
let tag = input[0].tag; let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ] vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 { } else if input.len() == 1 {
input input
@ -176,13 +182,13 @@ fn to_csv(
} else { } else {
x x
}; };
yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(name_tag)) yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(&name_tag))
} }
_ => { _ => {
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with CSV-compatible structure.tag() from pipeline", "Expected a table with CSV-compatible structure.tag() from pipeline",
"requires CSV-compatible input", "requires CSV-compatible input",
name_tag, &name_tag,
"originates from here".to_string(), "originates from here".to_string(),
value.tag(), value.tag(),
)) ))

View File

@ -42,7 +42,7 @@ pub fn value_to_json_value(v: &Tagged<Value>) -> Result<serde_json::Value, Shell
.unwrap(), .unwrap(),
), ),
Value::Primitive(Primitive::Int(i)) => serde_json::Value::Number(serde_json::Number::from( Value::Primitive(Primitive::Int(i)) => serde_json::Value::Number(serde_json::Number::from(
CoerceInto::<i64>::coerce_into(i.tagged(v.tag), "converting to JSON number")?, CoerceInto::<i64>::coerce_into(i.tagged(&v.tag), "converting to JSON number")?,
)), )),
Value::Primitive(Primitive::Nothing) => serde_json::Value::Null, Value::Primitive(Primitive::Nothing) => serde_json::Value::Null,
Value::Primitive(Primitive::Pattern(s)) => serde_json::Value::String(s.clone()), Value::Primitive(Primitive::Pattern(s)) => serde_json::Value::String(s.clone()),
@ -50,6 +50,7 @@ pub fn value_to_json_value(v: &Tagged<Value>) -> Result<serde_json::Value, Shell
Value::Primitive(Primitive::Path(s)) => serde_json::Value::String(s.display().to_string()), Value::Primitive(Primitive::Path(s)) => serde_json::Value::String(s.display().to_string()),
Value::Table(l) => serde_json::Value::Array(json_list(l)?), Value::Table(l) => serde_json::Value::Array(json_list(l)?),
Value::Error(e) => return Err(e.clone()),
Value::Block(_) => serde_json::Value::Null, Value::Block(_) => serde_json::Value::Null,
Value::Primitive(Primitive::Binary(b)) => serde_json::Value::Array( Value::Primitive(Primitive::Binary(b)) => serde_json::Value::Array(
b.iter() b.iter()
@ -85,7 +86,7 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
let input: Vec<Tagged<Value>> = args.input.values.collect().await; let input: Vec<Tagged<Value>> = args.input.values.collect().await;
let to_process_input = if input.len() > 1 { let to_process_input = if input.len() > 1 {
let tag = input[0].tag; let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ] vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 { } else if input.len() == 1 {
input input
@ -98,12 +99,12 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
Ok(json_value) => { Ok(json_value) => {
match serde_json::to_string(&json_value) { match serde_json::to_string(&json_value) {
Ok(x) => yield ReturnSuccess::value( Ok(x) => yield ReturnSuccess::value(
Value::Primitive(Primitive::String(x)).tagged(name_tag), Value::Primitive(Primitive::String(x)).tagged(&name_tag),
), ),
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with JSON-compatible structure.tag() from pipeline", "Expected a table with JSON-compatible structure.tag() from pipeline",
"requires JSON-compatible input", "requires JSON-compatible input",
name_tag, &name_tag,
"originates from here".to_string(), "originates from here".to_string(),
value.tag(), value.tag(),
)), )),
@ -112,7 +113,7 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
_ => yield Err(ShellError::labeled_error( _ => yield Err(ShellError::labeled_error(
"Expected a table with JSON-compatible structure from pipeline", "Expected a table with JSON-compatible structure from pipeline",
"requires JSON-compatible input", "requires JSON-compatible input",
name_tag)) &name_tag))
} }
} }
}; };

View File

@ -38,10 +38,10 @@ pub fn value_to_toml_value(v: &Tagged<Value>) -> Result<toml::Value, ShellError>
toml::Value::String("<Beginning of Stream>".to_string()) toml::Value::String("<Beginning of Stream>".to_string())
} }
Value::Primitive(Primitive::Decimal(f)) => { Value::Primitive(Primitive::Decimal(f)) => {
toml::Value::Float(f.tagged(v.tag).coerce_into("converting to TOML float")?) toml::Value::Float(f.tagged(&v.tag).coerce_into("converting to TOML float")?)
} }
Value::Primitive(Primitive::Int(i)) => { Value::Primitive(Primitive::Int(i)) => {
toml::Value::Integer(i.tagged(v.tag).coerce_into("converting to TOML integer")?) toml::Value::Integer(i.tagged(&v.tag).coerce_into("converting to TOML integer")?)
} }
Value::Primitive(Primitive::Nothing) => toml::Value::String("<Nothing>".to_string()), Value::Primitive(Primitive::Nothing) => toml::Value::String("<Nothing>".to_string()),
Value::Primitive(Primitive::Pattern(s)) => toml::Value::String(s.clone()), Value::Primitive(Primitive::Pattern(s)) => toml::Value::String(s.clone()),
@ -49,6 +49,7 @@ pub fn value_to_toml_value(v: &Tagged<Value>) -> Result<toml::Value, ShellError>
Value::Primitive(Primitive::Path(s)) => toml::Value::String(s.display().to_string()), Value::Primitive(Primitive::Path(s)) => toml::Value::String(s.display().to_string()),
Value::Table(l) => toml::Value::Array(collect_values(l)?), Value::Table(l) => toml::Value::Array(collect_values(l)?),
Value::Error(e) => return Err(e.clone()),
Value::Block(_) => toml::Value::String("<Block>".to_string()), Value::Block(_) => toml::Value::String("<Block>".to_string()),
Value::Primitive(Primitive::Binary(b)) => { Value::Primitive(Primitive::Binary(b)) => {
toml::Value::Array(b.iter().map(|x| toml::Value::Integer(*x as i64)).collect()) toml::Value::Array(b.iter().map(|x| toml::Value::Integer(*x as i64)).collect())
@ -80,7 +81,7 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
let input: Vec<Tagged<Value>> = args.input.values.collect().await; let input: Vec<Tagged<Value>> = args.input.values.collect().await;
let to_process_input = if input.len() > 1 { let to_process_input = if input.len() > 1 {
let tag = input[0].tag; let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ] vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 { } else if input.len() == 1 {
input input
@ -93,12 +94,12 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
Ok(toml_value) => { Ok(toml_value) => {
match toml::to_string(&toml_value) { match toml::to_string(&toml_value) {
Ok(x) => yield ReturnSuccess::value( Ok(x) => yield ReturnSuccess::value(
Value::Primitive(Primitive::String(x)).tagged(name_tag), Value::Primitive(Primitive::String(x)).tagged(&name_tag),
), ),
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with TOML-compatible structure.tag() from pipeline", "Expected a table with TOML-compatible structure.tag() from pipeline",
"requires TOML-compatible input", "requires TOML-compatible input",
name_tag, &name_tag,
"originates from here".to_string(), "originates from here".to_string(),
value.tag(), value.tag(),
)), )),
@ -107,7 +108,7 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
_ => yield Err(ShellError::labeled_error( _ => yield Err(ShellError::labeled_error(
"Expected a table with TOML-compatible structure from pipeline", "Expected a table with TOML-compatible structure from pipeline",
"requires TOML-compatible input", "requires TOML-compatible input",
name_tag)) &name_tag))
} }
} }
}; };

View File

@ -49,7 +49,7 @@ pub fn value_to_tsv_value(tagged_value: &Tagged<Value>) -> Tagged<Value> {
Value::Block(_) => Value::Primitive(Primitive::Nothing), Value::Block(_) => Value::Primitive(Primitive::Nothing),
_ => Value::Primitive(Primitive::Nothing), _ => Value::Primitive(Primitive::Nothing),
} }
.tagged(tagged_value.tag) .tagged(&tagged_value.tag)
} }
fn to_string_helper(tagged_value: &Tagged<Value>) -> Result<String, ShellError> { fn to_string_helper(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
@ -68,7 +68,7 @@ fn to_string_helper(tagged_value: &Tagged<Value>) -> Result<String, ShellError>
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"Unexpected value", "Unexpected value",
"original value", "original value",
tagged_value.tag, &tagged_value.tag,
)) ))
} }
} }
@ -107,14 +107,14 @@ pub fn to_string(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
ShellError::labeled_error( ShellError::labeled_error(
"Could not convert record", "Could not convert record",
"original value", "original value",
tagged_value.tag, &tagged_value.tag,
) )
})?) })?)
.map_err(|_| { .map_err(|_| {
ShellError::labeled_error( ShellError::labeled_error(
"Could not convert record", "Could not convert record",
"original value", "original value",
tagged_value.tag, &tagged_value.tag,
) )
})?); })?);
} }
@ -144,14 +144,14 @@ pub fn to_string(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
ShellError::labeled_error( ShellError::labeled_error(
"Could not convert record", "Could not convert record",
"original value", "original value",
tagged_value.tag, &tagged_value.tag,
) )
})?) })?)
.map_err(|_| { .map_err(|_| {
ShellError::labeled_error( ShellError::labeled_error(
"Could not convert record", "Could not convert record",
"original value", "original value",
tagged_value.tag, &tagged_value.tag,
) )
})?); })?);
} }
@ -168,7 +168,7 @@ fn to_tsv(
let input: Vec<Tagged<Value>> = input.values.collect().await; let input: Vec<Tagged<Value>> = input.values.collect().await;
let to_process_input = if input.len() > 1 { let to_process_input = if input.len() > 1 {
let tag = input[0].tag; let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ] vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 { } else if input.len() == 1 {
input input
@ -184,13 +184,13 @@ fn to_tsv(
} else { } else {
x x
}; };
yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(name_tag)) yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(&name_tag))
} }
_ => { _ => {
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with TSV-compatible structure.tag() from pipeline", "Expected a table with TSV-compatible structure.tag() from pipeline",
"requires TSV-compatible input", "requires TSV-compatible input",
name_tag, &name_tag,
"originates from here".to_string(), "originates from here".to_string(),
value.tag(), value.tag(),
)) ))

View File

@ -47,7 +47,7 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Expected table with string values", "Expected table with string values",
"requires table with strings", "requires table with strings",
tag, &tag,
"value originates from here", "value originates from here",
v.tag, v.tag,
)) ))
@ -57,13 +57,13 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
match serde_urlencoded::to_string(row_vec) { match serde_urlencoded::to_string(row_vec) {
Ok(s) => { Ok(s) => {
yield ReturnSuccess::value(Value::string(s).tagged(tag)); yield ReturnSuccess::value(Value::string(s).tagged(&tag));
} }
_ => { _ => {
yield Err(ShellError::labeled_error( yield Err(ShellError::labeled_error(
"Failed to convert to url-encoded", "Failed to convert to url-encoded",
"cannot url-encode", "cannot url-encode",
tag, &tag,
)) ))
} }
} }
@ -72,7 +72,7 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
yield Err(ShellError::labeled_error_with_secondary( yield Err(ShellError::labeled_error_with_secondary(
"Expected a table from pipeline", "Expected a table from pipeline",
"requires table input", "requires table input",
tag, &tag,
"value originates from here", "value originates from here",
value_tag, value_tag,
)) ))

View File

@ -39,7 +39,7 @@ pub fn value_to_yaml_value(v: &Tagged<Value>) -> Result<serde_yaml::Value, Shell
serde_yaml::Value::Number(serde_yaml::Number::from(f.to_f64().unwrap())) serde_yaml::Value::Number(serde_yaml::Number::from(f.to_f64().unwrap()))
} }
Value::Primitive(Primitive::Int(i)) => serde_yaml::Value::Number(serde_yaml::Number::from( Value::Primitive(Primitive::Int(i)) => serde_yaml::Value::Number(serde_yaml::Number::from(
CoerceInto::<i64>::coerce_into(i.tagged(v.tag), "converting to YAML number")?, CoerceInto::<i64>::coerce_into(i.tagged(&v.tag), "converting to YAML number")?,
)), )),
Value::Primitive(Primitive::Nothing) => serde_yaml::Value::Null, Value::Primitive(Primitive::Nothing) => serde_yaml::Value::Null,
Value::Primitive(Primitive::Pattern(s)) => serde_yaml::Value::String(s.clone()), Value::Primitive(Primitive::Pattern(s)) => serde_yaml::Value::String(s.clone()),
@ -55,6 +55,7 @@ pub fn value_to_yaml_value(v: &Tagged<Value>) -> Result<serde_yaml::Value, Shell
serde_yaml::Value::Sequence(out) serde_yaml::Value::Sequence(out)
} }
Value::Error(e) => return Err(e.clone()),
Value::Block(_) => serde_yaml::Value::Null, Value::Block(_) => serde_yaml::Value::Null,
Value::Primitive(Primitive::Binary(b)) => serde_yaml::Value::Sequence( Value::Primitive(Primitive::Binary(b)) => serde_yaml::Value::Sequence(
b.iter() b.iter()
@ -81,7 +82,7 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
let input: Vec<Tagged<Value>> = args.input.values.collect().await; let input: Vec<Tagged<Value>> = args.input.values.collect().await;
let to_process_input = if input.len() > 1 { let to_process_input = if input.len() > 1 {
let tag = input[0].tag; let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ] vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 { } else if input.len() == 1 {
input input
@ -94,12 +95,12 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
Ok(yaml_value) => { Ok(yaml_value) => {
match serde_yaml::to_string(&yaml_value) { match serde_yaml::to_string(&yaml_value) {
Ok(x) => yield ReturnSuccess::value( Ok(x) => yield ReturnSuccess::value(
Value::Primitive(Primitive::String(x)).tagged(name_tag), Value::Primitive(Primitive::String(x)).tagged(&name_tag),
), ),
_ => yield Err(ShellError::labeled_error_with_secondary( _ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with YAML-compatible structure.tag() from pipeline", "Expected a table with YAML-compatible structure.tag() from pipeline",
"requires YAML-compatible input", "requires YAML-compatible input",
name_tag, &name_tag,
"originates from here".to_string(), "originates from here".to_string(),
value.tag(), value.tag(),
)), )),
@ -108,7 +109,7 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
_ => yield Err(ShellError::labeled_error( _ => yield Err(ShellError::labeled_error(
"Expected a table with YAML-compatible structure from pipeline", "Expected a table with YAML-compatible structure from pipeline",
"requires YAML-compatible input", "requires YAML-compatible input",
name_tag)) &name_tag))
} }
} }
}; };

View File

@ -31,14 +31,14 @@ impl WholeStreamCommand for Version {
pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> { pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?; let args = args.evaluate_once(registry)?;
let tag = args.call_info.name_tag; let tag = args.call_info.name_tag.clone();
let mut indexmap = IndexMap::new(); let mut indexmap = IndexMap::new();
indexmap.insert( indexmap.insert(
"version".to_string(), "version".to_string(),
Value::string(clap::crate_version!()).tagged(tag), Value::string(clap::crate_version!()).tagged(&tag),
); );
let value = Value::Row(Dictionary::from(indexmap)).tagged(tag); let value = Value::Row(Dictionary::from(indexmap)).tagged(&tag);
Ok(OutputStream::one(value)) Ok(OutputStream::one(value))
} }

View File

@ -49,7 +49,7 @@ impl PerItemCommand for Where {
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"Expected a condition", "Expected a condition",
"where needs a condition", "where needs a condition",
*tag, tag,
)) ))
} }
}; };

View File

@ -33,7 +33,7 @@ pub fn which(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
let args = args.evaluate_once(registry)?; let args = args.evaluate_once(registry)?;
let mut which_out = VecDeque::new(); let mut which_out = VecDeque::new();
let tag = args.call_info.name_tag; let tag = args.call_info.name_tag.clone();
if let Some(v) = &args.call_info.args.positional { if let Some(v) = &args.call_info.args.positional {
if v.len() > 0 { if v.len() > 0 {
@ -52,7 +52,7 @@ pub fn which(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"Expected a filename to find", "Expected a filename to find",
"needs a filename", "needs a filename",
*tag, tag,
)); ));
} }
} }

View File

@ -1,39 +1,20 @@
use crate::commands::{Command, UnevaluatedCallInfo}; use crate::commands::{Command, UnevaluatedCallInfo};
use crate::parser::{hir, hir::syntax_shape::ExpandContext}; use crate::parser::{hir, hir::syntax_shape::ExpandContext};
use crate::prelude::*; use crate::prelude::*;
use derive_new::new; use derive_new::new;
use indexmap::IndexMap; use indexmap::IndexMap;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::error::Error; use std::error::Error;
use std::sync::atomic::AtomicBool;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use uuid::Uuid;
#[derive(Clone, Debug, Serialize, Deserialize)] #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum AnchorLocation { pub enum AnchorLocation {
Url(String), Url(String),
File(String), File(String),
Source(Text), Source(Text),
} }
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct SourceMap(HashMap<Uuid, AnchorLocation>);
impl SourceMap {
pub fn insert(&mut self, uuid: Uuid, anchor_location: AnchorLocation) {
self.0.insert(uuid, anchor_location);
}
pub fn get(&self, uuid: &Uuid) -> Option<&AnchorLocation> {
self.0.get(uuid)
}
pub fn new() -> SourceMap {
SourceMap(HashMap::new())
}
}
#[derive(Clone, new)] #[derive(Clone, new)]
pub struct CommandRegistry { pub struct CommandRegistry {
#[new(value = "Arc::new(Mutex::new(IndexMap::default()))")] #[new(value = "Arc::new(Mutex::new(IndexMap::default()))")]
@ -77,8 +58,8 @@ impl CommandRegistry {
#[derive(Clone)] #[derive(Clone)]
pub struct Context { pub struct Context {
registry: CommandRegistry, registry: CommandRegistry,
pub(crate) source_map: Arc<Mutex<SourceMap>>,
host: Arc<Mutex<dyn Host + Send>>, host: Arc<Mutex<dyn Host + Send>>,
pub ctrl_c: Arc<AtomicBool>,
pub(crate) shell_manager: ShellManager, pub(crate) shell_manager: ShellManager,
} }
@ -90,17 +71,17 @@ impl Context {
pub(crate) fn expand_context<'context>( pub(crate) fn expand_context<'context>(
&'context self, &'context self,
source: &'context Text, source: &'context Text,
tag: Tag, span: Span,
) -> ExpandContext<'context> { ) -> ExpandContext<'context> {
ExpandContext::new(&self.registry, tag, source, self.shell_manager.homedir()) ExpandContext::new(&self.registry, span, source, self.shell_manager.homedir())
} }
pub(crate) fn basic() -> Result<Context, Box<dyn Error>> { pub(crate) fn basic() -> Result<Context, Box<dyn Error>> {
let registry = CommandRegistry::new(); let registry = CommandRegistry::new();
Ok(Context { Ok(Context {
registry: registry.clone(), registry: registry.clone(),
source_map: Arc::new(Mutex::new(SourceMap::new())),
host: Arc::new(Mutex::new(crate::env::host::BasicHost)), host: Arc::new(Mutex::new(crate::env::host::BasicHost)),
ctrl_c: Arc::new(AtomicBool::new(false)),
shell_manager: ShellManager::basic(registry)?, shell_manager: ShellManager::basic(registry)?,
}) })
} }
@ -117,12 +98,6 @@ impl Context {
} }
} }
pub fn add_anchor_location(&mut self, uuid: Uuid, anchor_location: AnchorLocation) {
let mut source_map = self.source_map.lock().unwrap();
source_map.insert(uuid, anchor_location);
}
pub(crate) fn get_command(&self, name: &str) -> Option<Arc<Command>> { pub(crate) fn get_command(&self, name: &str) -> Option<Arc<Command>> {
self.registry.get_command(name) self.registry.get_command(name)
} }
@ -135,27 +110,19 @@ impl Context {
&mut self, &mut self,
command: Arc<Command>, command: Arc<Command>,
name_tag: Tag, name_tag: Tag,
source_map: SourceMap,
args: hir::Call, args: hir::Call,
source: &Text, source: &Text,
input: InputStream, input: InputStream,
is_first_command: bool, is_first_command: bool,
) -> OutputStream { ) -> OutputStream {
let command_args = self.command_args(args, input, source, source_map, name_tag); let command_args = self.command_args(args, input, source, name_tag);
command.run(command_args, self.registry(), is_first_command) command.run(command_args, self.registry(), is_first_command)
} }
fn call_info( fn call_info(&self, args: hir::Call, source: &Text, name_tag: Tag) -> UnevaluatedCallInfo {
&self,
args: hir::Call,
source: &Text,
source_map: SourceMap,
name_tag: Tag,
) -> UnevaluatedCallInfo {
UnevaluatedCallInfo { UnevaluatedCallInfo {
args, args,
source: source.clone(), source: source.clone(),
source_map,
name_tag, name_tag,
} }
} }
@ -165,13 +132,13 @@ impl Context {
args: hir::Call, args: hir::Call,
input: InputStream, input: InputStream,
source: &Text, source: &Text,
source_map: SourceMap,
name_tag: Tag, name_tag: Tag,
) -> CommandArgs { ) -> CommandArgs {
CommandArgs { CommandArgs {
host: self.host.clone(), host: self.host.clone(),
ctrl_c: self.ctrl_c.clone(),
shell_manager: self.shell_manager.clone(), shell_manager: self.shell_manager.clone(),
call_info: self.call_info(args, source, source_map, name_tag), call_info: self.call_info(args, source, name_tag),
input, input,
} }
} }

View File

@ -213,7 +213,7 @@ impl Block {
let scope = Scope::new(value.clone()); let scope = Scope::new(value.clone());
if self.expressions.len() == 0 { if self.expressions.len() == 0 {
return Ok(Value::nothing().tagged(self.tag)); return Ok(Value::nothing().tagged(&self.tag));
} }
let mut last = None; let mut last = None;
@ -245,6 +245,9 @@ pub enum Value {
Row(crate::data::Dictionary), Row(crate::data::Dictionary),
Table(Vec<Tagged<Value>>), Table(Vec<Tagged<Value>>),
// Errors are a type of value too
Error(ShellError),
Block(Block), Block(Block),
} }
@ -293,6 +296,7 @@ impl fmt::Debug for ValueDebug<'_> {
Value::Row(o) => o.debug(f), Value::Row(o) => o.debug(f),
Value::Table(l) => debug_list(l).fmt(f), Value::Table(l) => debug_list(l).fmt(f),
Value::Block(_) => write!(f, "[[block]]"), Value::Block(_) => write!(f, "[[block]]"),
Value::Error(_) => write!(f, "[[error]]"),
} }
} }
} }
@ -300,7 +304,7 @@ impl fmt::Debug for ValueDebug<'_> {
impl Tagged<Value> { impl Tagged<Value> {
pub fn tagged_type_name(&self) -> Tagged<String> { pub fn tagged_type_name(&self) -> Tagged<String> {
let name = self.type_name(); let name = self.type_name();
Tagged::from_item(name, self.tag()) name.tagged(self.tag())
} }
} }
@ -312,7 +316,7 @@ impl std::convert::TryFrom<&Tagged<Value>> for Block {
Value::Block(block) => Ok(block.clone()), Value::Block(block) => Ok(block.clone()),
v => Err(ShellError::type_error( v => Err(ShellError::type_error(
"Block", "Block",
value.copy_tag(v.type_name()), v.type_name().tagged(value.tag()),
)), )),
} }
} }
@ -324,11 +328,11 @@ impl std::convert::TryFrom<&Tagged<Value>> for i64 {
fn try_from(value: &Tagged<Value>) -> Result<i64, ShellError> { fn try_from(value: &Tagged<Value>) -> Result<i64, ShellError> {
match value.item() { match value.item() {
Value::Primitive(Primitive::Int(int)) => { Value::Primitive(Primitive::Int(int)) => {
int.tagged(value.tag).coerce_into("converting to i64") int.tagged(&value.tag).coerce_into("converting to i64")
} }
v => Err(ShellError::type_error( v => Err(ShellError::type_error(
"Integer", "Integer",
value.copy_tag(v.type_name()), v.type_name().tagged(value.tag()),
)), )),
} }
} }
@ -342,7 +346,7 @@ impl std::convert::TryFrom<&Tagged<Value>> for String {
Value::Primitive(Primitive::String(s)) => Ok(s.clone()), Value::Primitive(Primitive::String(s)) => Ok(s.clone()),
v => Err(ShellError::type_error( v => Err(ShellError::type_error(
"String", "String",
value.copy_tag(v.type_name()), v.type_name().tagged(value.tag()),
)), )),
} }
} }
@ -356,7 +360,7 @@ impl std::convert::TryFrom<&Tagged<Value>> for Vec<u8> {
Value::Primitive(Primitive::Binary(b)) => Ok(b.clone()), Value::Primitive(Primitive::Binary(b)) => Ok(b.clone()),
v => Err(ShellError::type_error( v => Err(ShellError::type_error(
"Binary", "Binary",
value.copy_tag(v.type_name()), v.type_name().tagged(value.tag()),
)), )),
} }
} }
@ -370,7 +374,7 @@ impl<'a> std::convert::TryFrom<&'a Tagged<Value>> for &'a crate::data::Dictionar
Value::Row(d) => Ok(d), Value::Row(d) => Ok(d),
v => Err(ShellError::type_error( v => Err(ShellError::type_error(
"Dictionary", "Dictionary",
value.copy_tag(v.type_name()), v.type_name().tagged(value.tag()),
)), )),
} }
} }
@ -392,7 +396,7 @@ impl std::convert::TryFrom<Option<&Tagged<Value>>> for Switch {
Value::Primitive(Primitive::Boolean(true)) => Ok(Switch::Present), Value::Primitive(Primitive::Boolean(true)) => Ok(Switch::Present),
v => Err(ShellError::type_error( v => Err(ShellError::type_error(
"Boolean", "Boolean",
value.copy_tag(v.type_name()), v.type_name().tagged(value.tag()),
)), )),
}, },
} }
@ -410,19 +414,19 @@ impl Tagged<Value> {
match &self.item { match &self.item {
Value::Table(table) => { Value::Table(table) => {
for item in table { for item in table {
out.push(item.as_string()?.tagged(item.tag)); out.push(item.as_string()?.tagged(&item.tag));
} }
} }
other => { other => {
return Err(ShellError::type_error( return Err(ShellError::type_error(
"column name", "column name",
other.type_name().tagged(self.tag), other.type_name().tagged(&self.tag),
)) ))
} }
} }
Ok(out.tagged(self.tag)) Ok(out.tagged(&self.tag))
} }
pub(crate) fn as_string(&self) -> Result<String, ShellError> { pub(crate) fn as_string(&self) -> Result<String, ShellError> {
@ -437,7 +441,7 @@ impl Tagged<Value> {
other => Err(ShellError::labeled_error( other => Err(ShellError::labeled_error(
"Expected string", "Expected string",
other.type_name(), other.type_name(),
self.tag, &self.tag,
)), )),
} }
} }
@ -450,6 +454,7 @@ impl Value {
Value::Row(_) => format!("row"), Value::Row(_) => format!("row"),
Value::Table(_) => format!("list"), Value::Table(_) => format!("list"),
Value::Block(_) => format!("block"), Value::Block(_) => format!("block"),
Value::Error(_) => format!("error"),
} }
} }
@ -465,6 +470,7 @@ impl Value {
.collect(), .collect(),
Value::Block(_) => vec![], Value::Block(_) => vec![],
Value::Table(_) => vec![], Value::Table(_) => vec![],
Value::Error(_) => vec![],
} }
} }
@ -503,7 +509,7 @@ impl Value {
} }
} }
Some(Tagged::from_item(current, tag)) Some(current.tagged(tag))
} }
pub fn get_data_by_path(&self, tag: Tag, path: &str) -> Option<Tagged<&Value>> { pub fn get_data_by_path(&self, tag: Tag, path: &str) -> Option<Tagged<&Value>> {
@ -515,7 +521,7 @@ impl Value {
} }
} }
Some(Tagged::from_item(current, tag)) Some(current.tagged(tag))
} }
pub fn insert_data_at_path( pub fn insert_data_at_path(
@ -535,8 +541,8 @@ impl Value {
// Special case for inserting at the top level // Special case for inserting at the top level
current current
.entries .entries
.insert(path.to_string(), Tagged::from_item(new_value, tag)); .insert(path.to_string(), new_value.tagged(&tag));
return Some(Tagged::from_item(new_obj, tag)); return Some(new_obj.tagged(&tag));
} }
for idx in 0..split_path.len() { for idx in 0..split_path.len() {
@ -547,13 +553,13 @@ impl Value {
Value::Row(o) => { Value::Row(o) => {
o.entries.insert( o.entries.insert(
split_path[idx + 1].to_string(), split_path[idx + 1].to_string(),
Tagged::from_item(new_value, tag), new_value.tagged(&tag),
); );
} }
_ => {} _ => {}
} }
return Some(Tagged::from_item(new_obj, tag)); return Some(new_obj.tagged(&tag));
} else { } else {
match next.item { match next.item {
Value::Row(ref mut o) => { Value::Row(ref mut o) => {
@ -584,11 +590,10 @@ impl Value {
if split_path.len() == 1 { if split_path.len() == 1 {
// Special case for inserting at the top level // Special case for inserting at the top level
current.entries.insert( current
split_path[0].item.clone(), .entries
Tagged::from_item(new_value, tag), .insert(split_path[0].item.clone(), new_value.tagged(&tag));
); return Some(new_obj.tagged(&tag));
return Some(Tagged::from_item(new_obj, tag));
} }
for idx in 0..split_path.len() { for idx in 0..split_path.len() {
@ -599,13 +604,13 @@ impl Value {
Value::Row(o) => { Value::Row(o) => {
o.entries.insert( o.entries.insert(
split_path[idx + 1].to_string(), split_path[idx + 1].to_string(),
Tagged::from_item(new_value, tag), new_value.tagged(&tag),
); );
} }
_ => {} _ => {}
} }
return Some(Tagged::from_item(new_obj, tag)); return Some(new_obj.tagged(&tag));
} else { } else {
match next.item { match next.item {
Value::Row(ref mut o) => { Value::Row(ref mut o) => {
@ -639,8 +644,8 @@ impl Value {
match current.entries.get_mut(split_path[idx]) { match current.entries.get_mut(split_path[idx]) {
Some(next) => { Some(next) => {
if idx == (split_path.len() - 1) { if idx == (split_path.len() - 1) {
*next = Tagged::from_item(replaced_value, tag); *next = replaced_value.tagged(&tag);
return Some(Tagged::from_item(new_obj, tag)); return Some(new_obj.tagged(&tag));
} else { } else {
match next.item { match next.item {
Value::Row(ref mut o) => { Value::Row(ref mut o) => {
@ -672,8 +677,8 @@ impl Value {
match current.entries.get_mut(&split_path[idx].item) { match current.entries.get_mut(&split_path[idx].item) {
Some(next) => { Some(next) => {
if idx == (split_path.len() - 1) { if idx == (split_path.len() - 1) {
*next = Tagged::from_item(replaced_value, tag); *next = replaced_value.tagged(&tag);
return Some(Tagged::from_item(new_obj, tag)); return Some(new_obj.tagged(&tag));
} else { } else {
match next.item { match next.item {
Value::Row(ref mut o) => { Value::Row(ref mut o) => {
@ -697,6 +702,7 @@ impl Value {
Value::Row(o) => o.get_data(desc), Value::Row(o) => o.get_data(desc),
Value::Block(_) => MaybeOwned::Owned(Value::nothing()), Value::Block(_) => MaybeOwned::Owned(Value::nothing()),
Value::Table(_) => MaybeOwned::Owned(Value::nothing()), Value::Table(_) => MaybeOwned::Owned(Value::nothing()),
Value::Error(_) => MaybeOwned::Owned(Value::nothing()),
} }
} }
@ -706,7 +712,7 @@ impl Value {
Value::Block(b) => itertools::join( Value::Block(b) => itertools::join(
b.expressions b.expressions
.iter() .iter()
.map(|e| e.source(&b.source).to_string()), .map(|e| e.span.slice(&b.source).to_string()),
"; ", "; ",
), ),
Value::Row(_) => format!("[table: 1 row]"), Value::Row(_) => format!("[table: 1 row]"),
@ -715,6 +721,7 @@ impl Value {
l.len(), l.len(),
if l.len() == 1 { "row" } else { "rows" } if l.len() == 1 { "row" } else { "rows" }
), ),
Value::Error(_) => format!("[error]"),
} }
} }

View File

@ -7,7 +7,7 @@ use std::ops::Deref;
pub(crate) fn command_dict(command: Arc<Command>, tag: impl Into<Tag>) -> Tagged<Value> { pub(crate) fn command_dict(command: Arc<Command>, tag: impl Into<Tag>) -> Tagged<Value> {
let tag = tag.into(); let tag = tag.into();
let mut cmd_dict = TaggedDictBuilder::new(tag); let mut cmd_dict = TaggedDictBuilder::new(&tag);
cmd_dict.insert("name", Value::string(command.name())); cmd_dict.insert("name", Value::string(command.name()));
@ -42,7 +42,7 @@ fn for_spec(name: &str, ty: &str, required: bool, tag: impl Into<Tag>) -> Tagged
fn signature_dict(signature: Signature, tag: impl Into<Tag>) -> Tagged<Value> { fn signature_dict(signature: Signature, tag: impl Into<Tag>) -> Tagged<Value> {
let tag = tag.into(); let tag = tag.into();
let mut sig = TaggedListBuilder::new(tag); let mut sig = TaggedListBuilder::new(&tag);
for arg in signature.positional.iter() { for arg in signature.positional.iter() {
let is_required = match arg { let is_required = match arg {
@ -50,19 +50,19 @@ fn signature_dict(signature: Signature, tag: impl Into<Tag>) -> Tagged<Value> {
PositionalType::Optional(_, _) => false, PositionalType::Optional(_, _) => false,
}; };
sig.insert_tagged(for_spec(arg.name(), "argument", is_required, tag)); sig.insert_tagged(for_spec(arg.name(), "argument", is_required, &tag));
} }
if let Some(_) = signature.rest_positional { if let Some(_) = signature.rest_positional {
let is_required = false; let is_required = false;
sig.insert_tagged(for_spec("rest", "argument", is_required, tag)); sig.insert_tagged(for_spec("rest", "argument", is_required, &tag));
} }
for (name, ty) in signature.named.iter() { for (name, ty) in signature.named.iter() {
match ty { match ty {
NamedType::Mandatory(_) => sig.insert_tagged(for_spec(name, "flag", true, tag)), NamedType::Mandatory(_) => sig.insert_tagged(for_spec(name, "flag", true, &tag)),
NamedType::Optional(_) => sig.insert_tagged(for_spec(name, "flag", false, tag)), NamedType::Optional(_) => sig.insert_tagged(for_spec(name, "flag", false, &tag)),
NamedType::Switch => sig.insert_tagged(for_spec(name, "switch", false, tag)), NamedType::Switch => sig.insert_tagged(for_spec(name, "switch", false, &tag)),
} }
} }

View File

@ -75,12 +75,12 @@ pub fn read(
let tag = tag.into(); let tag = tag.into();
let contents = fs::read_to_string(filename) let contents = fs::read_to_string(filename)
.map(|v| v.tagged(tag)) .map(|v| v.tagged(&tag))
.map_err(|err| { .map_err(|err| {
ShellError::labeled_error( ShellError::labeled_error(
&format!("Couldn't read config file:\n{}", err), &format!("Couldn't read config file:\n{}", err),
"file name", "file name",
tag, &tag,
) )
})?; })?;
@ -88,7 +88,7 @@ pub fn read(
ShellError::labeled_error( ShellError::labeled_error(
&format!("Couldn't parse config file:\n{}", err), &format!("Couldn't parse config file:\n{}", err),
"file name", "file name",
tag, &tag,
) )
})?; })?;
@ -98,7 +98,7 @@ pub fn read(
Value::Row(Dictionary { entries }) => Ok(entries), Value::Row(Dictionary { entries }) => Ok(entries),
other => Err(ShellError::type_error( other => Err(ShellError::type_error(
"Dictionary", "Dictionary",
other.type_name().tagged(tag), other.type_name().tagged(&tag),
)), )),
} }
} }

View File

@ -115,7 +115,7 @@ impl TaggedListBuilder {
} }
pub fn push(&mut self, value: impl Into<Value>) { pub fn push(&mut self, value: impl Into<Value>) {
self.list.push(value.into().tagged(self.tag)); self.list.push(value.into().tagged(&self.tag));
} }
pub fn insert_tagged(&mut self, value: impl Into<Tagged<Value>>) { pub fn insert_tagged(&mut self, value: impl Into<Tagged<Value>>) {
@ -155,7 +155,7 @@ impl TaggedDictBuilder {
} }
pub fn insert(&mut self, key: impl Into<String>, value: impl Into<Value>) { pub fn insert(&mut self, key: impl Into<String>, value: impl Into<Value>) {
self.dict.insert(key.into(), value.into().tagged(self.tag)); self.dict.insert(key.into(), value.into().tagged(&self.tag));
} }
pub fn insert_tagged(&mut self, key: impl Into<String>, value: impl Into<Tagged<Value>>) { pub fn insert_tagged(&mut self, key: impl Into<String>, value: impl Into<Tagged<Value>>) {

View File

@ -1,15 +1,52 @@
use crate::context::{AnchorLocation, SourceMap}; use crate::context::AnchorLocation;
use crate::parser::parse::parser::TracableContext; use crate::parser::parse::parser::TracableContext;
use crate::prelude::*; use crate::prelude::*;
use crate::Text;
use derive_new::new; use derive_new::new;
use getset::Getters; use getset::Getters;
use serde::Deserialize; use serde::Deserialize;
use serde::Serialize; use serde::Serialize;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use uuid::Uuid;
#[derive(new, Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)] #[derive(new, Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
pub struct Spanned<T> {
pub span: Span,
pub item: T,
}
impl<T> Spanned<T> {
pub fn map<U>(self, input: impl FnOnce(T) -> U) -> Spanned<U> {
let span = self.span;
let mapped = input(self.item);
mapped.spanned(span)
}
}
pub trait SpannedItem: Sized {
fn spanned(self, span: impl Into<Span>) -> Spanned<Self> {
Spanned {
item: self,
span: span.into(),
}
}
fn spanned_unknown(self) -> Spanned<Self> {
Spanned {
item: self,
span: Span::unknown(),
}
}
}
impl<T> SpannedItem for T {}
impl<T> std::ops::Deref for Spanned<T> {
type Target = T;
fn deref(&self) -> &T {
&self.item
}
}
#[derive(new, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
pub struct Tagged<T> { pub struct Tagged<T> {
pub tag: Tag, pub tag: Tag,
pub item: T, pub item: T,
@ -17,7 +54,7 @@ pub struct Tagged<T> {
impl<T> HasTag for Tagged<T> { impl<T> HasTag for Tagged<T> {
fn tag(&self) -> Tag { fn tag(&self) -> Tag {
self.tag self.tag.clone()
} }
} }
@ -29,20 +66,23 @@ impl AsRef<Path> for Tagged<PathBuf> {
pub trait TaggedItem: Sized { pub trait TaggedItem: Sized {
fn tagged(self, tag: impl Into<Tag>) -> Tagged<Self> { fn tagged(self, tag: impl Into<Tag>) -> Tagged<Self> {
Tagged::from_item(self, tag.into()) Tagged {
item: self,
tag: tag.into(),
}
} }
// For now, this is a temporary facility. In many cases, there are other useful spans that we // For now, this is a temporary facility. In many cases, there are other useful spans that we
// could be using, such as the original source spans of JSON or Toml files, but we don't yet // could be using, such as the original source spans of JSON or Toml files, but we don't yet
// have the infrastructure to make that work. // have the infrastructure to make that work.
fn tagged_unknown(self) -> Tagged<Self> { fn tagged_unknown(self) -> Tagged<Self> {
Tagged::from_item( Tagged {
self, item: self,
Tag { tag: Tag {
span: Span::unknown(), span: Span::unknown(),
anchor: uuid::Uuid::nil(), anchor: None,
}, },
) }
} }
} }
@ -57,48 +97,29 @@ impl<T> std::ops::Deref for Tagged<T> {
} }
impl<T> Tagged<T> { impl<T> Tagged<T> {
pub fn with_tag(self, tag: impl Into<Tag>) -> Tagged<T> {
Tagged::from_item(self.item, tag)
}
pub fn from_item(item: T, tag: impl Into<Tag>) -> Tagged<T> {
Tagged {
item,
tag: tag.into(),
}
}
pub fn map<U>(self, input: impl FnOnce(T) -> U) -> Tagged<U> { pub fn map<U>(self, input: impl FnOnce(T) -> U) -> Tagged<U> {
let tag = self.tag(); let tag = self.tag();
let mapped = input(self.item); let mapped = input(self.item);
Tagged::from_item(mapped, tag) mapped.tagged(tag)
}
pub(crate) fn copy_tag<U>(&self, output: U) -> Tagged<U> {
Tagged::from_item(output, self.tag())
}
pub fn source(&self, source: &Text) -> Text {
Text::from(self.tag().slice(source))
} }
pub fn tag(&self) -> Tag { pub fn tag(&self) -> Tag {
self.tag self.tag.clone()
} }
pub fn span(&self) -> Span { pub fn span(&self) -> Span {
self.tag.span self.tag.span
} }
pub fn anchor(&self) -> uuid::Uuid { pub fn anchor(&self) -> Option<AnchorLocation> {
self.tag.anchor self.tag.anchor.clone()
} }
pub fn anchor_name(&self, source_map: &SourceMap) -> Option<String> { pub fn anchor_name(&self) -> Option<String> {
match source_map.get(&self.tag.anchor) { match self.tag.anchor {
Some(AnchorLocation::File(file)) => Some(file.clone()), Some(AnchorLocation::File(ref file)) => Some(file.clone()),
Some(AnchorLocation::Url(url)) => Some(url.clone()), Some(AnchorLocation::Url(ref url)) => Some(url.clone()),
_ => None, _ => None,
} }
} }
@ -114,26 +135,32 @@ impl<T> Tagged<T> {
impl From<&Tag> for Tag { impl From<&Tag> for Tag {
fn from(input: &Tag) -> Tag { fn from(input: &Tag) -> Tag {
*input input.clone()
} }
} }
impl From<nom_locate::LocatedSpanEx<&str, Uuid>> for Span { impl From<nom_locate::LocatedSpanEx<&str, TracableContext>> for Span {
fn from(input: nom_locate::LocatedSpanEx<&str, Uuid>) -> Span { fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Span {
Span::new(input.offset, input.offset + input.fragment.len())
}
}
impl From<nom_locate::LocatedSpanEx<&str, u64>> for Span {
fn from(input: nom_locate::LocatedSpanEx<&str, u64>) -> Span {
Span::new(input.offset, input.offset + input.fragment.len()) Span::new(input.offset, input.offset + input.fragment.len())
} }
} }
impl<T> impl<T>
From<( From<(
nom_locate::LocatedSpanEx<T, Uuid>, nom_locate::LocatedSpanEx<T, u64>,
nom_locate::LocatedSpanEx<T, Uuid>, nom_locate::LocatedSpanEx<T, u64>,
)> for Span )> for Span
{ {
fn from( fn from(
input: ( input: (
nom_locate::LocatedSpanEx<T, Uuid>, nom_locate::LocatedSpanEx<T, u64>,
nom_locate::LocatedSpanEx<T, Uuid>, nom_locate::LocatedSpanEx<T, u64>,
), ),
) -> Span { ) -> Span {
Span { Span {
@ -159,42 +186,48 @@ impl From<&std::ops::Range<usize>> for Span {
} }
#[derive( #[derive(
Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, new, Debug, Clone, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, new,
)] )]
pub struct Tag { pub struct Tag {
pub anchor: Uuid, pub anchor: Option<AnchorLocation>,
pub span: Span, pub span: Span,
} }
impl From<Span> for Tag { impl From<Span> for Tag {
fn from(span: Span) -> Self { fn from(span: Span) -> Self {
Tag { Tag { anchor: None, span }
anchor: uuid::Uuid::nil(),
span,
}
} }
} }
impl From<&Span> for Tag { impl From<&Span> for Tag {
fn from(span: &Span) -> Self { fn from(span: &Span) -> Self {
Tag { Tag {
anchor: uuid::Uuid::nil(), anchor: None,
span: *span, span: *span,
} }
} }
} }
impl From<(usize, usize, TracableContext)> for Tag { impl From<(usize, usize, TracableContext)> for Tag {
fn from((start, end, context): (usize, usize, TracableContext)) -> Self { fn from((start, end, _context): (usize, usize, TracableContext)) -> Self {
Tag { Tag {
anchor: context.origin, anchor: None,
span: Span::new(start, end), span: Span::new(start, end),
} }
} }
} }
impl From<(usize, usize, Uuid)> for Tag { impl From<(usize, usize, AnchorLocation)> for Tag {
fn from((start, end, anchor): (usize, usize, Uuid)) -> Self { fn from((start, end, anchor): (usize, usize, AnchorLocation)) -> Self {
Tag {
anchor: Some(anchor),
span: Span::new(start, end),
}
}
}
impl From<(usize, usize, Option<AnchorLocation>)> for Tag {
fn from((start, end, anchor): (usize, usize, Option<AnchorLocation>)) -> Self {
Tag { Tag {
anchor, anchor,
span: Span::new(start, end), span: Span::new(start, end),
@ -202,19 +235,10 @@ impl From<(usize, usize, Uuid)> for Tag {
} }
} }
impl From<(usize, usize, Option<Uuid>)> for Tag {
fn from((start, end, anchor): (usize, usize, Option<Uuid>)) -> Self {
Tag {
anchor: anchor.unwrap_or(uuid::Uuid::nil()),
span: Span::new(start, end),
}
}
}
impl From<nom_locate::LocatedSpanEx<&str, TracableContext>> for Tag { impl From<nom_locate::LocatedSpanEx<&str, TracableContext>> for Tag {
fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Tag { fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Tag {
Tag { Tag {
anchor: input.extra.origin, anchor: None,
span: Span::new(input.offset, input.offset + input.fragment.len()), span: Span::new(input.offset, input.offset + input.fragment.len()),
} }
} }
@ -234,15 +258,12 @@ impl From<&Tag> for Span {
impl Tag { impl Tag {
pub fn unknown_anchor(span: Span) -> Tag { pub fn unknown_anchor(span: Span) -> Tag {
Tag { Tag { anchor: None, span }
anchor: uuid::Uuid::nil(),
span,
}
} }
pub fn for_char(pos: usize, anchor: Uuid) -> Tag { pub fn for_char(pos: usize, anchor: AnchorLocation) -> Tag {
Tag { Tag {
anchor, anchor: Some(anchor),
span: Span { span: Span {
start: pos, start: pos,
end: pos + 1, end: pos + 1,
@ -250,16 +271,16 @@ impl Tag {
} }
} }
pub fn unknown_span(anchor: Uuid) -> Tag { pub fn unknown_span(anchor: AnchorLocation) -> Tag {
Tag { Tag {
anchor, anchor: Some(anchor),
span: Span::unknown(), span: Span::unknown(),
} }
} }
pub fn unknown() -> Tag { pub fn unknown() -> Tag {
Tag { Tag {
anchor: uuid::Uuid::nil(), anchor: None,
span: Span::unknown(), span: Span::unknown(),
} }
} }
@ -273,7 +294,7 @@ impl Tag {
Tag { Tag {
span: Span::new(self.span.start, other.span.end), span: Span::new(self.span.start, other.span.end),
anchor: self.anchor, anchor: self.anchor.clone(),
} }
} }
@ -288,10 +309,10 @@ impl Tag {
Tag { Tag {
span: Span::new(self.span.start, other.span.end), span: Span::new(self.span.start, other.span.end),
anchor: self.anchor, anchor: self.anchor.clone(),
} }
} }
None => *self, None => self.clone(),
} }
} }
@ -360,6 +381,42 @@ impl Span {
Span { start, end } Span { start, end }
} }
pub fn for_char(pos: usize) -> Span {
Span {
start: pos,
end: pos + 1,
}
}
pub fn until(&self, other: impl Into<Span>) -> Span {
let other = other.into();
Span::new(self.start, other.end)
}
pub fn until_option(&self, other: Option<impl Into<Span>>) -> Span {
match other {
Some(other) => {
let other = other.into();
Span::new(self.start, other.end)
}
None => *self,
}
}
pub fn string<'a>(&self, source: &'a str) -> String {
self.slice(source).to_string()
}
pub fn spanned_slice<'a>(&self, source: &'a str) -> Spanned<&'a str> {
self.slice(source).spanned(*self)
}
pub fn spanned_string<'a>(&self, source: &'a str) -> Spanned<String> {
self.slice(source).to_string().spanned(*self)
}
/* /*
pub fn unknown_with_uuid(uuid: Uuid) -> Span { pub fn unknown_with_uuid(uuid: Uuid) -> Span {
Span { Span {
@ -404,27 +461,3 @@ impl language_reporting::ReportingSpan for Span {
self.end self.end
} }
} }
impl language_reporting::ReportingSpan for Tag {
fn with_start(&self, start: usize) -> Self {
Tag {
span: Span::new(start, self.span.end),
anchor: self.anchor,
}
}
fn with_end(&self, end: usize) -> Self {
Tag {
span: Span::new(self.span.start, end),
anchor: self.anchor,
}
}
fn start(&self) -> usize {
self.span.start
}
fn end(&self) -> usize {
self.span.end
}
}

View File

@ -54,7 +54,7 @@ impl ExtractType for i64 {
&Tagged { &Tagged {
item: Value::Primitive(Primitive::Int(int)), item: Value::Primitive(Primitive::Int(int)),
.. ..
} => Ok(int.tagged(value.tag).coerce_into("converting to i64")?), } => Ok(int.tagged(&value.tag).coerce_into("converting to i64")?),
other => Err(ShellError::type_error("Integer", other.tagged_type_name())), other => Err(ShellError::type_error("Integer", other.tagged_type_name())),
} }
} }
@ -68,7 +68,7 @@ impl ExtractType for u64 {
&Tagged { &Tagged {
item: Value::Primitive(Primitive::Int(int)), item: Value::Primitive(Primitive::Int(int)),
.. ..
} => Ok(int.tagged(value.tag).coerce_into("converting to u64")?), } => Ok(int.tagged(&value.tag).coerce_into("converting to u64")?),
other => Err(ShellError::type_error("Integer", other.tagged_type_name())), other => Err(ShellError::type_error("Integer", other.tagged_type_name())),
} }
} }

View File

@ -14,9 +14,9 @@ pub enum Description {
} }
impl Description { impl Description {
fn into_label(self) -> Result<Label<Tag>, String> { fn into_label(self) -> Result<Label<Span>, String> {
match self { match self {
Description::Source(s) => Ok(Label::new_primary(s.tag()).with_message(s.item)), Description::Source(s) => Ok(Label::new_primary(s.span()).with_message(s.item)),
Description::Synthetic(s) => Err(s), Description::Synthetic(s) => Err(s),
} }
} }
@ -24,7 +24,7 @@ impl Description {
#[allow(unused)] #[allow(unused)]
fn tag(&self) -> Tag { fn tag(&self) -> Tag {
match self { match self {
Description::Source(tagged) => tagged.tag, Description::Source(tagged) => tagged.tag.clone(),
Description::Synthetic(_) => Tag::unknown(), Description::Synthetic(_) => Tag::unknown(),
} }
} }
@ -85,10 +85,10 @@ impl ShellError {
.start() .start()
} }
pub(crate) fn unexpected_eof(expected: impl Into<String>, tag: Tag) -> ShellError { pub(crate) fn unexpected_eof(expected: impl Into<String>, tag: impl Into<Tag>) -> ShellError {
ProximateShellError::UnexpectedEof { ProximateShellError::UnexpectedEof {
expected: expected.into(), expected: expected.into(),
tag, tag: tag.into(),
} }
.start() .start()
} }
@ -100,7 +100,7 @@ impl ShellError {
) -> ShellError { ) -> ShellError {
ProximateShellError::RangeError { ProximateShellError::RangeError {
kind: expected.into(), kind: expected.into(),
actual_kind: actual.copy_tag(format!("{:?}", actual.item)), actual_kind: format!("{:?}", actual.item).tagged(actual.tag()),
operation, operation,
} }
.start() .start()
@ -143,22 +143,22 @@ impl ShellError {
pub(crate) fn argument_error( pub(crate) fn argument_error(
command: impl Into<String>, command: impl Into<String>,
kind: ArgumentError, kind: ArgumentError,
tag: Tag, tag: impl Into<Tag>,
) -> ShellError { ) -> ShellError {
ProximateShellError::ArgumentError { ProximateShellError::ArgumentError {
command: command.into(), command: command.into(),
error: kind, error: kind,
tag, tag: tag.into(),
} }
.start() .start()
} }
pub(crate) fn invalid_external_word(tag: Tag) -> ShellError { pub(crate) fn invalid_external_word(tag: impl Into<Tag>) -> ShellError {
ProximateShellError::ArgumentError { ProximateShellError::ArgumentError {
command: "Invalid argument to Nu command (did you mean to call an external command?)" command: "Invalid argument to Nu command (did you mean to call an external command?)"
.into(), .into(),
error: ArgumentError::InvalidExternalWord, error: ArgumentError::InvalidExternalWord,
tag, tag: tag.into(),
} }
.start() .start()
} }
@ -183,22 +183,22 @@ impl ShellError {
} }
nom::Err::Failure(span) | nom::Err::Error(span) => { nom::Err::Failure(span) | nom::Err::Error(span) => {
let diagnostic = Diagnostic::new(Severity::Error, format!("Parse Error")) let diagnostic = Diagnostic::new(Severity::Error, format!("Parse Error"))
.with_label(Label::new_primary(Tag::from(span.0))); .with_label(Label::new_primary(Span::from(span.0)));
ShellError::diagnostic(diagnostic) ShellError::diagnostic(diagnostic)
} }
} }
} }
pub(crate) fn diagnostic(diagnostic: Diagnostic<Tag>) -> ShellError { pub(crate) fn diagnostic(diagnostic: Diagnostic<Span>) -> ShellError {
ProximateShellError::Diagnostic(ShellDiagnostic { diagnostic }).start() ProximateShellError::Diagnostic(ShellDiagnostic { diagnostic }).start()
} }
pub(crate) fn to_diagnostic(self) -> Diagnostic<Tag> { pub(crate) fn to_diagnostic(self) -> Diagnostic<Span> {
match self.error { match self.error {
ProximateShellError::InvalidCommand { command } => { ProximateShellError::InvalidCommand { command } => {
Diagnostic::new(Severity::Error, "Invalid command") Diagnostic::new(Severity::Error, "Invalid command")
.with_label(Label::new_primary(command)) .with_label(Label::new_primary(command.span))
} }
ProximateShellError::MissingValue { tag, reason } => { ProximateShellError::MissingValue { tag, reason } => {
let mut d = Diagnostic::new( let mut d = Diagnostic::new(
@ -207,7 +207,7 @@ impl ShellError {
); );
if let Some(tag) = tag { if let Some(tag) = tag {
d = d.with_label(Label::new_primary(tag)); d = d.with_label(Label::new_primary(tag.span));
} }
d d
@ -220,7 +220,7 @@ impl ShellError {
ArgumentError::InvalidExternalWord => Diagnostic::new( ArgumentError::InvalidExternalWord => Diagnostic::new(
Severity::Error, Severity::Error,
format!("Invalid bare word for Nu command (did you intend to invoke an external command?)")) format!("Invalid bare word for Nu command (did you intend to invoke an external command?)"))
.with_label(Label::new_primary(tag)), .with_label(Label::new_primary(tag.span)),
ArgumentError::MissingMandatoryFlag(name) => Diagnostic::new( ArgumentError::MissingMandatoryFlag(name) => Diagnostic::new(
Severity::Error, Severity::Error,
format!( format!(
@ -230,7 +230,7 @@ impl ShellError {
Color::Black.bold().paint(name) Color::Black.bold().paint(name)
), ),
) )
.with_label(Label::new_primary(tag)), .with_label(Label::new_primary(tag.span)),
ArgumentError::MissingMandatoryPositional(name) => Diagnostic::new( ArgumentError::MissingMandatoryPositional(name) => Diagnostic::new(
Severity::Error, Severity::Error,
format!( format!(
@ -240,7 +240,7 @@ impl ShellError {
), ),
) )
.with_label( .with_label(
Label::new_primary(tag).with_message(format!("requires {} parameter", name)), Label::new_primary(tag.span).with_message(format!("requires {} parameter", name)),
), ),
ArgumentError::MissingValueForName(name) => Diagnostic::new( ArgumentError::MissingValueForName(name) => Diagnostic::new(
Severity::Error, Severity::Error,
@ -251,7 +251,7 @@ impl ShellError {
Color::Black.bold().paint(name) Color::Black.bold().paint(name)
), ),
) )
.with_label(Label::new_primary(tag)), .with_label(Label::new_primary(tag.span)),
}, },
ProximateShellError::TypeError { ProximateShellError::TypeError {
expected, expected,
@ -261,7 +261,7 @@ impl ShellError {
tag, tag,
}, },
} => Diagnostic::new(Severity::Error, "Type Error").with_label( } => Diagnostic::new(Severity::Error, "Type Error").with_label(
Label::new_primary(tag) Label::new_primary(tag.span)
.with_message(format!("Expected {}, found {}", expected, actual)), .with_message(format!("Expected {}, found {}", expected, actual)),
), ),
ProximateShellError::TypeError { ProximateShellError::TypeError {
@ -272,12 +272,12 @@ impl ShellError {
tag tag
}, },
} => Diagnostic::new(Severity::Error, "Type Error") } => Diagnostic::new(Severity::Error, "Type Error")
.with_label(Label::new_primary(tag).with_message(expected)), .with_label(Label::new_primary(tag.span).with_message(expected)),
ProximateShellError::UnexpectedEof { ProximateShellError::UnexpectedEof {
expected, tag expected, tag
} => Diagnostic::new(Severity::Error, format!("Unexpected end of input")) } => Diagnostic::new(Severity::Error, format!("Unexpected end of input"))
.with_label(Label::new_primary(tag).with_message(format!("Expected {}", expected))), .with_label(Label::new_primary(tag.span).with_message(format!("Expected {}", expected))),
ProximateShellError::RangeError { ProximateShellError::RangeError {
kind, kind,
@ -288,7 +288,7 @@ impl ShellError {
tag tag
}, },
} => Diagnostic::new(Severity::Error, "Range Error").with_label( } => Diagnostic::new(Severity::Error, "Range Error").with_label(
Label::new_primary(tag).with_message(format!( Label::new_primary(tag.span).with_message(format!(
"Expected to convert {} to {} while {}, but it was out of range", "Expected to convert {} to {} while {}, but it was out of range",
item, item,
kind.desc(), kind.desc(),
@ -303,7 +303,7 @@ impl ShellError {
item item
}, },
} => Diagnostic::new(Severity::Error, "Syntax Error") } => Diagnostic::new(Severity::Error, "Syntax Error")
.with_label(Label::new_primary(tag).with_message(item)), .with_label(Label::new_primary(tag.span).with_message(item)),
ProximateShellError::MissingProperty { subpath, expr, .. } => { ProximateShellError::MissingProperty { subpath, expr, .. } => {
let subpath = subpath.into_label(); let subpath = subpath.into_label();
@ -326,8 +326,8 @@ impl ShellError {
ProximateShellError::Diagnostic(diag) => diag.diagnostic, ProximateShellError::Diagnostic(diag) => diag.diagnostic,
ProximateShellError::CoerceError { left, right } => { ProximateShellError::CoerceError { left, right } => {
Diagnostic::new(Severity::Error, "Coercion error") Diagnostic::new(Severity::Error, "Coercion error")
.with_label(Label::new_primary(left.tag()).with_message(left.item)) .with_label(Label::new_primary(left.tag().span).with_message(left.item))
.with_label(Label::new_secondary(right.tag()).with_message(right.item)) .with_label(Label::new_secondary(right.tag().span).with_message(right.item))
} }
ProximateShellError::UntaggedRuntimeError { reason } => Diagnostic::new(Severity::Error, format!("Error: {}", reason)) ProximateShellError::UntaggedRuntimeError { reason } => Diagnostic::new(Severity::Error, format!("Error: {}", reason))
@ -341,7 +341,7 @@ impl ShellError {
) -> ShellError { ) -> ShellError {
ShellError::diagnostic( ShellError::diagnostic(
Diagnostic::new(Severity::Error, msg.into()) Diagnostic::new(Severity::Error, msg.into())
.with_label(Label::new_primary(tag.into()).with_message(label.into())), .with_label(Label::new_primary(tag.into().span).with_message(label.into())),
) )
} }
@ -355,15 +355,19 @@ impl ShellError {
ShellError::diagnostic( ShellError::diagnostic(
Diagnostic::new_error(msg.into()) Diagnostic::new_error(msg.into())
.with_label( .with_label(
Label::new_primary(primary_span.into()).with_message(primary_label.into()), Label::new_primary(primary_span.into().span).with_message(primary_label.into()),
) )
.with_label( .with_label(
Label::new_secondary(secondary_span.into()) Label::new_secondary(secondary_span.into().span)
.with_message(secondary_label.into()), .with_message(secondary_label.into()),
), ),
) )
} }
// pub fn string(title: impl Into<String>) -> ShellError {
// ProximateShellError::String(StringError::new(title.into(), String::new())).start()
// }
pub(crate) fn unimplemented(title: impl Into<String>) -> ShellError { pub(crate) fn unimplemented(title: impl Into<String>) -> ShellError {
ShellError::untagged_runtime_error(&format!("Unimplemented: {}", title.into())) ShellError::untagged_runtime_error(&format!("Unimplemented: {}", title.into()))
} }
@ -472,16 +476,16 @@ impl ProximateShellError {
pub(crate) fn tag(&self) -> Option<Tag> { pub(crate) fn tag(&self) -> Option<Tag> {
Some(match self { Some(match self {
ProximateShellError::SyntaxError { problem } => problem.tag(), ProximateShellError::SyntaxError { problem } => problem.tag(),
ProximateShellError::UnexpectedEof { tag, .. } => *tag, ProximateShellError::UnexpectedEof { tag, .. } => tag.clone(),
ProximateShellError::InvalidCommand { command } => *command, ProximateShellError::InvalidCommand { command } => command.clone(),
ProximateShellError::TypeError { actual, .. } => actual.tag, ProximateShellError::TypeError { actual, .. } => actual.tag.clone(),
ProximateShellError::MissingProperty { tag, .. } => *tag, ProximateShellError::MissingProperty { tag, .. } => tag.clone(),
ProximateShellError::MissingValue { tag, .. } => return *tag, ProximateShellError::MissingValue { tag, .. } => return tag.clone(),
ProximateShellError::ArgumentError { tag, .. } => *tag, ProximateShellError::ArgumentError { tag, .. } => tag.clone(),
ProximateShellError::RangeError { actual_kind, .. } => actual_kind.tag, ProximateShellError::RangeError { actual_kind, .. } => actual_kind.tag.clone(),
ProximateShellError::Diagnostic(..) => return None, ProximateShellError::Diagnostic(..) => return None,
ProximateShellError::UntaggedRuntimeError { .. } => return None, ProximateShellError::UntaggedRuntimeError { .. } => return None,
ProximateShellError::CoerceError { left, right } => left.tag.until(right.tag), ProximateShellError::CoerceError { left, right } => left.tag.until(&right.tag),
}) })
} }
} }
@ -495,7 +499,7 @@ impl ToDebug for ProximateShellError {
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ShellDiagnostic { pub struct ShellDiagnostic {
pub(crate) diagnostic: Diagnostic<Tag>, pub(crate) diagnostic: Diagnostic<Span>,
} }
impl PartialEq for ShellDiagnostic { impl PartialEq for ShellDiagnostic {
@ -521,7 +525,7 @@ impl std::cmp::Ord for ShellDiagnostic {
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, new, Clone, Serialize, Deserialize)] #[derive(Debug, Ord, PartialOrd, Eq, PartialEq, new, Clone, Serialize, Deserialize)]
pub struct StringError { pub struct StringError {
title: String, title: String,
error: Value, error: String,
} }
impl std::fmt::Display for ShellError { impl std::fmt::Display for ShellError {
@ -598,7 +602,6 @@ impl<T> ShellErrorUtils<Tagged<T>> for Option<Tagged<T>> {
} }
} }
} }
pub trait CoerceInto<U> { pub trait CoerceInto<U> {
fn coerce_into(self, operation: impl Into<String>) -> Result<U, ShellError>; fn coerce_into(self, operation: impl Into<String>) -> Result<U, ShellError>;
} }

View File

@ -48,19 +48,23 @@ pub(crate) fn evaluate_baseline_expr(
scope: &Scope, scope: &Scope,
source: &Text, source: &Text,
) -> Result<Tagged<Value>, ShellError> { ) -> Result<Tagged<Value>, ShellError> {
let tag = Tag {
span: expr.span,
anchor: None,
};
match &expr.item { match &expr.item {
RawExpression::Literal(literal) => Ok(evaluate_literal(expr.copy_tag(literal), source)), RawExpression::Literal(literal) => Ok(evaluate_literal(literal.tagged(tag), source)),
RawExpression::ExternalWord => Err(ShellError::argument_error( RawExpression::ExternalWord => Err(ShellError::argument_error(
"Invalid external word", "Invalid external word",
ArgumentError::InvalidExternalWord, ArgumentError::InvalidExternalWord,
expr.tag(), tag,
)), )),
RawExpression::FilePath(path) => Ok(Value::path(path.clone()).tagged(expr.tag())), RawExpression::FilePath(path) => Ok(Value::path(path.clone()).tagged(tag)),
RawExpression::Synthetic(hir::Synthetic::String(s)) => { RawExpression::Synthetic(hir::Synthetic::String(s)) => {
Ok(Value::string(s).tagged_unknown()) Ok(Value::string(s).tagged_unknown())
} }
RawExpression::Variable(var) => evaluate_reference(var, scope, source, expr.tag()), RawExpression::Variable(var) => evaluate_reference(var, scope, source, tag),
RawExpression::Command(_) => evaluate_command(expr.tag(), scope, source), RawExpression::Command(_) => evaluate_command(tag, scope, source),
RawExpression::ExternalCommand(external) => evaluate_external(external, scope, source), RawExpression::ExternalCommand(external) => evaluate_external(external, scope, source),
RawExpression::Binary(binary) => { RawExpression::Binary(binary) => {
let left = evaluate_baseline_expr(binary.left(), registry, scope, source)?; let left = evaluate_baseline_expr(binary.left(), registry, scope, source)?;
@ -69,10 +73,16 @@ pub(crate) fn evaluate_baseline_expr(
trace!("left={:?} right={:?}", left.item, right.item); trace!("left={:?} right={:?}", left.item, right.item);
match left.compare(binary.op(), &*right) { match left.compare(binary.op(), &*right) {
Ok(result) => Ok(Value::boolean(result).tagged(expr.tag())), Ok(result) => Ok(Value::boolean(result).tagged(tag)),
Err((left_type, right_type)) => Err(ShellError::coerce_error( Err((left_type, right_type)) => Err(ShellError::coerce_error(
binary.left().copy_tag(left_type), left_type.tagged(Tag {
binary.right().copy_tag(right_type), span: binary.left().span,
anchor: None,
}),
right_type.tagged(Tag {
span: binary.right().span,
anchor: None,
}),
)), )),
} }
} }
@ -84,13 +94,10 @@ pub(crate) fn evaluate_baseline_expr(
exprs.push(expr); exprs.push(expr);
} }
Ok(Value::Table(exprs).tagged(expr.tag())) Ok(Value::Table(exprs).tagged(tag))
} }
RawExpression::Block(block) => { RawExpression::Block(block) => {
Ok( Ok(Value::Block(Block::new(block.clone(), source.clone(), tag.clone())).tagged(&tag))
Value::Block(Block::new(block.clone(), source.clone(), expr.tag()))
.tagged(expr.tag()),
)
} }
RawExpression::Path(path) => { RawExpression::Path(path) => {
let value = evaluate_baseline_expr(path.head(), registry, scope, source)?; let value = evaluate_baseline_expr(path.head(), registry, scope, source)?;
@ -113,16 +120,16 @@ pub(crate) fn evaluate_baseline_expr(
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"Unknown column", "Unknown column",
format!("did you mean '{}'?", possible_matches[0].1), format!("did you mean '{}'?", possible_matches[0].1),
expr.tag(), &tag,
)); ));
} }
Some(next) => { Some(next) => {
item = next.clone().item.tagged(expr.tag()); item = next.clone().item.tagged(&tag);
} }
}; };
} }
Ok(item.item().clone().tagged(expr.tag())) Ok(item.item().clone().tagged(tag))
} }
RawExpression::Boolean(_boolean) => unimplemented!(), RawExpression::Boolean(_boolean) => unimplemented!(),
} }

View File

@ -14,7 +14,7 @@ impl RenderView for GenericView<'_> {
match self.value { match self.value {
Value::Primitive(p) => Ok(host.stdout(&p.format(None))), Value::Primitive(p) => Ok(host.stdout(&p.format(None))),
Value::Table(l) => { Value::Table(l) => {
let view = TableView::from_list(l); let view = TableView::from_list(l, 0);
if let Some(view) = view { if let Some(view) = view {
view.render_view(host)?; view.render_view(host)?;
@ -35,6 +35,8 @@ impl RenderView for GenericView<'_> {
view.render_view(host)?; view.render_view(host)?;
Ok(()) Ok(())
} }
Value::Error(e) => Err(e.clone()),
} }
} }
} }

View File

@ -34,7 +34,7 @@ impl TableView {
ret ret
} }
pub fn from_list(values: &[Tagged<Value>]) -> Option<TableView> { pub fn from_list(values: &[Tagged<Value>], starting_idx: usize) -> Option<TableView> {
if values.len() == 0 { if values.len() == 0 {
return None; return None;
} }
@ -68,7 +68,7 @@ impl TableView {
if values.len() > 1 { if values.len() > 1 {
// Indices are black, bold, right-aligned: // Indices are black, bold, right-aligned:
row.insert(0, (format!("{}", idx.to_string()), "Fdbr")); row.insert(0, (format!("{}", (starting_idx + idx).to_string()), "Fdbr"));
} }
entries.push(row); entries.push(row);

View File

@ -1,4 +1,4 @@
#![recursion_limit = "512"] #![recursion_limit = "1024"]
#[macro_use] #[macro_use]
mod prelude; mod prelude;
@ -21,7 +21,7 @@ mod traits;
mod utils; mod utils;
pub use crate::commands::command::{CallInfo, ReturnSuccess, ReturnValue}; pub use crate::commands::command::{CallInfo, ReturnSuccess, ReturnValue};
pub use crate::context::{AnchorLocation, SourceMap}; pub use crate::context::AnchorLocation;
pub use crate::env::host::BasicHost; pub use crate::env::host::BasicHost;
pub use crate::parser::hir::SyntaxShape; pub use crate::parser::hir::SyntaxShape;
pub use crate::parser::parse::token_tree_builder::TokenTreeBuilder; pub use crate::parser::parse::token_tree_builder::TokenTreeBuilder;
@ -31,7 +31,7 @@ pub use cli::cli;
pub use data::base::{Primitive, Value}; pub use data::base::{Primitive, Value};
pub use data::config::{config_path, APP_INFO}; pub use data::config::{config_path, APP_INFO};
pub use data::dict::{Dictionary, TaggedDictBuilder}; pub use data::dict::{Dictionary, TaggedDictBuilder};
pub use data::meta::{Span, Tag, Tagged, TaggedItem}; pub use data::meta::{Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem};
pub use errors::{CoerceInto, ShellError}; pub use errors::{CoerceInto, ShellError};
pub use num_traits::cast::ToPrimitive; pub use num_traits::cast::ToPrimitive;
pub use parser::parse::text::Text; pub use parser::parse::text::Text;

View File

@ -21,10 +21,10 @@ pub(crate) use parse::tokens::{RawNumber, RawToken};
pub(crate) use parse::unit::Unit; pub(crate) use parse::unit::Unit;
pub(crate) use registry::CommandRegistry; pub(crate) use registry::CommandRegistry;
pub fn parse(input: &str, anchor: uuid::Uuid) -> Result<TokenNode, ShellError> { pub fn parse(input: &str) -> Result<TokenNode, ShellError> {
let _ = pretty_env_logger::try_init(); let _ = pretty_env_logger::try_init();
match pipeline(nom_input(input, anchor)) { match pipeline(nom_input(input)) {
Ok((_rest, val)) => Ok(val), Ok((_rest, val)) => Ok(val),
Err(err) => Err(ShellError::parse_error(err)), Err(err) => Err(ShellError::parse_error(err)),
} }

View File

@ -52,7 +52,7 @@ impl<'de> ConfigDeserializer<'de> {
self.stack.push(DeserializerItem { self.stack.push(DeserializerItem {
key_struct_field: Some((name.to_string(), name)), key_struct_field: Some((name.to_string(), name)),
val: value.unwrap_or_else(|| Value::nothing().tagged(self.call.name_tag)), val: value.unwrap_or_else(|| Value::nothing().tagged(&self.call.name_tag)),
}); });
Ok(()) Ok(())

View File

@ -86,7 +86,7 @@ pub enum RawExpression {
FilePath(PathBuf), FilePath(PathBuf),
ExternalCommand(ExternalCommand), ExternalCommand(ExternalCommand),
Command(Tag), Command(Span),
Boolean(bool), Boolean(bool),
} }
@ -123,14 +123,14 @@ impl RawExpression {
} }
} }
pub type Expression = Tagged<RawExpression>; pub type Expression = Spanned<RawExpression>;
impl std::fmt::Display for Expression { impl std::fmt::Display for Expression {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let span = self.tag.span; let span = self.span;
match &self.item { match &self.item {
RawExpression::Literal(literal) => write!(f, "{}", literal.tagged(self.tag)), RawExpression::Literal(literal) => write!(f, "{}", literal.tagged(self.span)),
RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{}", s), RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{}", s),
RawExpression::Command(_) => write!(f, "Command{{ {}..{} }}", span.start(), span.end()), RawExpression::Command(_) => write!(f, "Command{{ {}..{} }}", span.start(), span.end()),
RawExpression::ExternalWord => { RawExpression::ExternalWord => {
@ -159,97 +159,97 @@ impl std::fmt::Display for Expression {
} }
impl Expression { impl Expression {
pub(crate) fn number(i: impl Into<Number>, tag: impl Into<Tag>) -> Expression { pub(crate) fn number(i: impl Into<Number>, span: impl Into<Span>) -> Expression {
RawExpression::Literal(Literal::Number(i.into())).tagged(tag.into()) RawExpression::Literal(Literal::Number(i.into())).spanned(span.into())
} }
pub(crate) fn size( pub(crate) fn size(
i: impl Into<Number>, i: impl Into<Number>,
unit: impl Into<Unit>, unit: impl Into<Unit>,
tag: impl Into<Tag>, span: impl Into<Span>,
) -> Expression { ) -> Expression {
RawExpression::Literal(Literal::Size(i.into(), unit.into())).tagged(tag.into()) RawExpression::Literal(Literal::Size(i.into(), unit.into())).spanned(span.into())
} }
pub(crate) fn synthetic_string(s: impl Into<String>) -> Expression { pub(crate) fn synthetic_string(s: impl Into<String>) -> Expression {
RawExpression::Synthetic(Synthetic::String(s.into())).tagged_unknown() RawExpression::Synthetic(Synthetic::String(s.into())).spanned_unknown()
} }
pub(crate) fn string(inner: impl Into<Tag>, outer: impl Into<Tag>) -> Expression { pub(crate) fn string(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
RawExpression::Literal(Literal::String(inner.into())).tagged(outer.into()) RawExpression::Literal(Literal::String(inner.into())).spanned(outer.into())
} }
pub(crate) fn path( pub(crate) fn path(
head: Expression, head: Expression,
tail: Vec<Tagged<impl Into<String>>>, tail: Vec<Spanned<impl Into<String>>>,
tag: impl Into<Tag>, span: impl Into<Span>,
) -> Expression { ) -> Expression {
let tail = tail.into_iter().map(|t| t.map(|s| s.into())).collect(); let tail = tail.into_iter().map(|t| t.map(|s| s.into())).collect();
RawExpression::Path(Box::new(Path::new(head, tail))).tagged(tag.into()) RawExpression::Path(Box::new(Path::new(head, tail))).spanned(span.into())
} }
pub(crate) fn dot_member(head: Expression, next: Tagged<impl Into<String>>) -> Expression { pub(crate) fn dot_member(head: Expression, next: Spanned<impl Into<String>>) -> Expression {
let Tagged { item, tag } = head; let Spanned { item, span } = head;
let new_tag = head.tag.until(next.tag); let new_span = head.span.until(next.span);
match item { match item {
RawExpression::Path(path) => { RawExpression::Path(path) => {
let (head, mut tail) = path.parts(); let (head, mut tail) = path.parts();
tail.push(next.map(|i| i.into())); tail.push(next.map(|i| i.into()));
Expression::path(head, tail, new_tag) Expression::path(head, tail, new_span)
} }
other => Expression::path(other.tagged(tag), vec![next], new_tag), other => Expression::path(other.spanned(span), vec![next], new_span),
} }
} }
pub(crate) fn infix( pub(crate) fn infix(
left: Expression, left: Expression,
op: Tagged<impl Into<Operator>>, op: Spanned<impl Into<Operator>>,
right: Expression, right: Expression,
) -> Expression { ) -> Expression {
let new_tag = left.tag.until(right.tag); let new_span = left.span.until(right.span);
RawExpression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right))) RawExpression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right)))
.tagged(new_tag) .spanned(new_span)
} }
pub(crate) fn file_path(path: impl Into<PathBuf>, outer: impl Into<Tag>) -> Expression { pub(crate) fn file_path(path: impl Into<PathBuf>, outer: impl Into<Span>) -> Expression {
RawExpression::FilePath(path.into()).tagged(outer) RawExpression::FilePath(path.into()).spanned(outer)
} }
pub(crate) fn list(list: Vec<Expression>, tag: impl Into<Tag>) -> Expression { pub(crate) fn list(list: Vec<Expression>, span: impl Into<Span>) -> Expression {
RawExpression::List(list).tagged(tag) RawExpression::List(list).spanned(span)
} }
pub(crate) fn bare(tag: impl Into<Tag>) -> Expression { pub(crate) fn bare(span: impl Into<Span>) -> Expression {
RawExpression::Literal(Literal::Bare).tagged(tag) RawExpression::Literal(Literal::Bare).spanned(span)
} }
pub(crate) fn pattern(tag: impl Into<Tag>) -> Expression { pub(crate) fn pattern(span: impl Into<Span>) -> Expression {
RawExpression::Literal(Literal::GlobPattern).tagged(tag.into()) RawExpression::Literal(Literal::GlobPattern).spanned(span.into())
} }
pub(crate) fn variable(inner: impl Into<Tag>, outer: impl Into<Tag>) -> Expression { pub(crate) fn variable(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
RawExpression::Variable(Variable::Other(inner.into())).tagged(outer) RawExpression::Variable(Variable::Other(inner.into())).spanned(outer)
} }
pub(crate) fn external_command(inner: impl Into<Tag>, outer: impl Into<Tag>) -> Expression { pub(crate) fn external_command(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).tagged(outer) RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).spanned(outer)
} }
pub(crate) fn it_variable(inner: impl Into<Tag>, outer: impl Into<Tag>) -> Expression { pub(crate) fn it_variable(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
RawExpression::Variable(Variable::It(inner.into())).tagged(outer) RawExpression::Variable(Variable::It(inner.into())).spanned(outer)
} }
} }
impl ToDebug for Expression { impl ToDebug for Expression {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
match self.item() { match &self.item {
RawExpression::Literal(l) => l.tagged(self.tag()).fmt_debug(f, source), RawExpression::Literal(l) => l.spanned(self.span).fmt_debug(f, source),
RawExpression::FilePath(p) => write!(f, "{}", p.display()), RawExpression::FilePath(p) => write!(f, "{}", p.display()),
RawExpression::ExternalWord => write!(f, "{}", self.tag().slice(source)), RawExpression::ExternalWord => write!(f, "{}", self.span.slice(source)),
RawExpression::Command(tag) => write!(f, "{}", tag.slice(source)), RawExpression::Command(tag) => write!(f, "{}", tag.slice(source)),
RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{:?}", s), RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{:?}", s),
RawExpression::Variable(Variable::It(_)) => write!(f, "$it"), RawExpression::Variable(Variable::It(_)) => write!(f, "$it"),
@ -281,8 +281,8 @@ impl ToDebug for Expression {
} }
} }
impl From<Tagged<Path>> for Expression { impl From<Spanned<Path>> for Expression {
fn from(path: Tagged<Path>) -> Expression { fn from(path: Spanned<Path>) -> Expression {
path.map(|p| RawExpression::Path(Box::new(p))) path.map(|p| RawExpression::Path(Box::new(p)))
} }
} }
@ -296,14 +296,14 @@ impl From<Tagged<Path>> for Expression {
pub enum Literal { pub enum Literal {
Number(Number), Number(Number),
Size(Number, Unit), Size(Number, Unit),
String(Tag), String(Span),
GlobPattern, GlobPattern,
Bare, Bare,
} }
impl std::fmt::Display for Tagged<Literal> { impl std::fmt::Display for Tagged<Literal> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", Tagged::new(self.tag, &self.item)) write!(f, "{}", Tagged::new(self.tag.clone(), &self.item))
} }
} }
@ -321,14 +321,14 @@ impl std::fmt::Display for Tagged<&Literal> {
} }
} }
impl ToDebug for Tagged<&Literal> { impl ToDebug for Spanned<&Literal> {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result { fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
match self.item() { match self.item {
Literal::Number(number) => write!(f, "{:?}", *number), Literal::Number(number) => write!(f, "{:?}", number),
Literal::Size(number, unit) => write!(f, "{:?}{:?}", *number, unit), Literal::Size(number, unit) => write!(f, "{:?}{:?}", *number, unit),
Literal::String(tag) => write!(f, "{}", tag.slice(source)), Literal::String(tag) => write!(f, "{}", tag.slice(source)),
Literal::GlobPattern => write!(f, "{}", self.tag().slice(source)), Literal::GlobPattern => write!(f, "{}", self.span.slice(source)),
Literal::Bare => write!(f, "{}", self.tag().slice(source)), Literal::Bare => write!(f, "{}", self.span.slice(source)),
} }
} }
} }
@ -347,15 +347,15 @@ impl Literal {
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum Variable { pub enum Variable {
It(Tag), It(Span),
Other(Tag), Other(Span),
} }
impl std::fmt::Display for Variable { impl std::fmt::Display for Variable {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self { match self {
Variable::It(_) => write!(f, "$it"), Variable::It(_) => write!(f, "$it"),
Variable::Other(tag) => write!(f, "${{ {}..{} }}", tag.span.start(), tag.span.end()), Variable::Other(span) => write!(f, "${{ {}..{} }}", span.start(), span.end()),
} }
} }
} }

View File

@ -6,15 +6,14 @@ use crate::parser::hir::syntax_shape::*;
use crate::parser::hir::TokensIterator; use crate::parser::hir::TokensIterator;
use crate::parser::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b}; use crate::parser::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b};
use crate::parser::TokenNode; use crate::parser::TokenNode;
use crate::{Span, Tag, Tagged, TaggedItem, Text}; use crate::{Span, SpannedItem, Tag, Tagged, Text};
use pretty_assertions::assert_eq; use pretty_assertions::assert_eq;
use std::fmt::Debug; use std::fmt::Debug;
use uuid::Uuid;
#[test] #[test]
fn test_parse_string() { fn test_parse_string() {
parse_tokens(StringShape, vec![b::string("hello")], |tokens| { parse_tokens(StringShape, vec![b::string("hello")], |tokens| {
hir::Expression::string(inner_string_tag(tokens[0].tag()), tokens[0].tag()) hir::Expression::string(inner_string_span(tokens[0].span()), tokens[0].span())
}); });
} }
@ -28,7 +27,7 @@ fn test_parse_path() {
let bare = tokens[2].expect_bare(); let bare = tokens[2].expect_bare();
hir::Expression::path( hir::Expression::path(
hir::Expression::it_variable(inner_var, outer_var), hir::Expression::it_variable(inner_var, outer_var),
vec!["cpu".tagged(bare)], vec!["cpu".spanned(bare)],
outer_var.until(bare), outer_var.until(bare),
) )
}, },
@ -50,7 +49,7 @@ fn test_parse_path() {
hir::Expression::path( hir::Expression::path(
hir::Expression::variable(inner_var, outer_var), hir::Expression::variable(inner_var, outer_var),
vec!["amount".tagged(amount), "max ghz".tagged(outer_max_ghz)], vec!["amount".spanned(amount), "max ghz".spanned(outer_max_ghz)],
outer_var.until(outer_max_ghz), outer_var.until(outer_max_ghz),
) )
}, },
@ -64,13 +63,16 @@ fn test_parse_command() {
vec![b::bare("ls"), b::sp(), b::pattern("*.txt")], vec![b::bare("ls"), b::sp(), b::pattern("*.txt")],
|tokens| { |tokens| {
let bare = tokens[0].expect_bare(); let bare = tokens[0].expect_bare();
let pat = tokens[2].tag(); let pat = tokens[2].span();
ClassifiedCommand::Internal(InternalCommand::new( ClassifiedCommand::Internal(InternalCommand::new(
"ls".to_string(), "ls".to_string(),
bare, Tag {
span: bare,
anchor: None,
},
hir::Call { hir::Call {
head: Box::new(hir::RawExpression::Command(bare).tagged(bare)), head: Box::new(hir::RawExpression::Command(bare).spanned(bare)),
positional: Some(vec![hir::Expression::pattern(pat)]), positional: Some(vec![hir::Expression::pattern(pat)]),
named: None, named: None,
}, },
@ -99,7 +101,7 @@ fn test_parse_command() {
hir::Expression::path( hir::Expression::path(
hir::Expression::variable(inner_var, outer_var), hir::Expression::variable(inner_var, outer_var),
vec!["amount".tagged(amount), "max ghz".tagged(outer_max_ghz)], vec!["amount".spanned(amount), "max ghz".spanned(outer_max_ghz)],
outer_var.until(outer_max_ghz), outer_var.until(outer_max_ghz),
) )
}, },
@ -112,11 +114,11 @@ fn parse_tokens<T: Eq + Debug>(
expected: impl FnOnce(Tagged<&[TokenNode]>) -> T, expected: impl FnOnce(Tagged<&[TokenNode]>) -> T,
) { ) {
let tokens = b::token_list(tokens); let tokens = b::token_list(tokens);
let (tokens, source) = b::build(test_origin(), tokens); let (tokens, source) = b::build(tokens);
ExpandContext::with_empty(&Text::from(source), |context| { ExpandContext::with_empty(&Text::from(source), |context| {
let tokens = tokens.expect_list(); let tokens = tokens.expect_list();
let mut iterator = TokensIterator::all(tokens.item, *context.tag()); let mut iterator = TokensIterator::all(tokens.item, *context.span());
let expr = expand_syntax(&shape, &mut iterator, &context); let expr = expand_syntax(&shape, &mut iterator, &context);
@ -132,13 +134,6 @@ fn parse_tokens<T: Eq + Debug>(
}) })
} }
fn test_origin() -> Uuid { fn inner_string_span(span: Span) -> Span {
Uuid::nil() Span::new(span.start() + 1, span.end() - 1)
}
fn inner_string_tag(tag: Tag) -> Tag {
Tag {
span: Span::new(tag.span.start() + 1, tag.span.end() - 1),
anchor: tag.anchor,
}
} }

View File

@ -1,6 +1,6 @@
use crate::parser::{hir::Expression, Operator}; use crate::parser::{hir::Expression, Operator};
use crate::prelude::*; use crate::prelude::*;
use crate::Tagged;
use derive_new::new; use derive_new::new;
use getset::Getters; use getset::Getters;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -12,7 +12,7 @@ use std::fmt;
#[get = "pub(crate)"] #[get = "pub(crate)"]
pub struct Binary { pub struct Binary {
left: Expression, left: Expression,
op: Tagged<Operator>, op: Spanned<Operator>,
right: Expression, right: Expression,
} }

View File

@ -6,17 +6,17 @@ use crate::parser::{
}, },
FlatShape, TokenNode, TokensIterator, FlatShape, TokenNode, TokensIterator,
}; };
use crate::{Tag, Tagged, Text}; use crate::{Span, Spanned, Text};
pub fn expand_external_tokens( pub fn expand_external_tokens(
token_nodes: &mut TokensIterator<'_>, token_nodes: &mut TokensIterator<'_>,
source: &Text, source: &Text,
) -> Result<Vec<Tagged<String>>, ShellError> { ) -> Result<Vec<Spanned<String>>, ShellError> {
let mut out: Vec<Tagged<String>> = vec![]; let mut out: Vec<Spanned<String>> = vec![];
loop { loop {
if let Some(tag) = expand_next_expression(token_nodes)? { if let Some(span) = expand_next_expression(token_nodes)? {
out.push(tag.tagged_string(source)); out.push(span.spanned_string(source));
} else { } else {
break; break;
} }
@ -37,7 +37,7 @@ impl ColorSyntax for ExternalTokensShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Self::Info { ) -> Self::Info {
loop { loop {
// Allow a space // Allow a space
@ -55,7 +55,7 @@ impl ColorSyntax for ExternalTokensShape {
pub fn expand_next_expression( pub fn expand_next_expression(
token_nodes: &mut TokensIterator<'_>, token_nodes: &mut TokensIterator<'_>,
) -> Result<Option<Tag>, ShellError> { ) -> Result<Option<Span>, ShellError> {
let first = token_nodes.next_non_ws(); let first = token_nodes.next_non_ws();
let first = match first { let first = match first {
@ -79,14 +79,14 @@ pub fn expand_next_expression(
Ok(Some(first.until(last))) Ok(Some(first.until(last)))
} }
fn triage_external_head(node: &TokenNode) -> Result<Tag, ShellError> { fn triage_external_head(node: &TokenNode) -> Result<Span, ShellError> {
Ok(match node { Ok(match node {
TokenNode::Token(token) => token.tag(), TokenNode::Token(token) => token.span,
TokenNode::Call(_call) => unimplemented!("TODO: OMG"), TokenNode::Call(_call) => unimplemented!("TODO: OMG"),
TokenNode::Nodes(_nodes) => unimplemented!("TODO: OMG"), TokenNode::Nodes(_nodes) => unimplemented!("TODO: OMG"),
TokenNode::Delimited(_delimited) => unimplemented!("TODO: OMG"), TokenNode::Delimited(_delimited) => unimplemented!("TODO: OMG"),
TokenNode::Pipeline(_pipeline) => unimplemented!("TODO: OMG"), TokenNode::Pipeline(_pipeline) => unimplemented!("TODO: OMG"),
TokenNode::Flag(flag) => flag.tag(), TokenNode::Flag(flag) => flag.span,
TokenNode::Whitespace(_whitespace) => { TokenNode::Whitespace(_whitespace) => {
unreachable!("This function should be called after next_non_ws()") unreachable!("This function should be called after next_non_ws()")
} }
@ -96,7 +96,7 @@ fn triage_external_head(node: &TokenNode) -> Result<Tag, ShellError> {
fn triage_continuation<'a, 'b>( fn triage_continuation<'a, 'b>(
nodes: &'a mut TokensIterator<'b>, nodes: &'a mut TokensIterator<'b>,
) -> Result<Option<Tag>, ShellError> { ) -> Result<Option<Span>, ShellError> {
let mut peeked = nodes.peek_any(); let mut peeked = nodes.peek_any();
let node = match peeked.node { let node = match peeked.node {
@ -116,7 +116,7 @@ fn triage_continuation<'a, 'b>(
} }
peeked.commit(); peeked.commit();
Ok(Some(node.tag())) Ok(Some(node.span()))
} }
#[must_use] #[must_use]
@ -137,7 +137,7 @@ impl ColorSyntax for ExternalExpression {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> ExternalExpressionResult { ) -> ExternalExpressionResult {
let atom = match expand_atom( let atom = match expand_atom(
token_nodes, token_nodes,
@ -146,7 +146,7 @@ impl ColorSyntax for ExternalExpression {
ExpansionRule::permissive(), ExpansionRule::permissive(),
) { ) {
Err(_) => unreachable!("TODO: separate infallible expand_atom"), Err(_) => unreachable!("TODO: separate infallible expand_atom"),
Ok(Tagged { Ok(Spanned {
item: AtomicToken::Eof { .. }, item: AtomicToken::Eof { .. },
.. ..
}) => return ExternalExpressionResult::Eof, }) => return ExternalExpressionResult::Eof,

View File

@ -9,7 +9,7 @@ use std::fmt;
)] )]
#[get = "pub(crate)"] #[get = "pub(crate)"]
pub struct ExternalCommand { pub struct ExternalCommand {
pub(crate) name: Tag, pub(crate) name: Span,
} }
impl ToDebug for ExternalCommand { impl ToDebug for ExternalCommand {

View File

@ -43,9 +43,13 @@ impl NamedArguments {
match switch { match switch {
None => self.named.insert(name.into(), NamedValue::AbsentSwitch), None => self.named.insert(name.into(), NamedValue::AbsentSwitch),
Some(flag) => self Some(flag) => self.named.insert(
.named name,
.insert(name, NamedValue::PresentSwitch(*flag.name())), NamedValue::PresentSwitch(Tag {
span: *flag.name(),
anchor: None,
}),
),
}; };
} }

View File

@ -1,6 +1,5 @@
use crate::parser::hir::Expression; use crate::parser::hir::Expression;
use crate::prelude::*; use crate::prelude::*;
use crate::Tagged;
use derive_new::new; use derive_new::new;
use getset::{Getters, MutGetters}; use getset::{Getters, MutGetters};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -24,7 +23,7 @@ use std::fmt;
pub struct Path { pub struct Path {
head: Expression, head: Expression,
#[get_mut = "pub(crate)"] #[get_mut = "pub(crate)"]
tail: Vec<Tagged<String>>, tail: Vec<Spanned<String>>,
} }
impl fmt::Display for Path { impl fmt::Display for Path {
@ -40,7 +39,7 @@ impl fmt::Display for Path {
} }
impl Path { impl Path {
pub(crate) fn parts(self) -> (Expression, Vec<Tagged<String>>) { pub(crate) fn parts(self) -> (Expression, Vec<Spanned<String>>) {
(self.head, self.tail) (self.head, self.tail)
} }
} }
@ -50,7 +49,7 @@ impl ToDebug for Path {
write!(f, "{}", self.head.debug(source))?; write!(f, "{}", self.head.debug(source))?;
for part in &self.tail { for part in &self.tail {
write!(f, ".{}", part.item())?; write!(f, ".{}", part.item)?;
} }
Ok(()) Ok(())

View File

@ -64,7 +64,7 @@ impl FallibleColorSyntax for SyntaxShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
match self { match self {
SyntaxShape::Any => { SyntaxShape::Any => {
@ -158,7 +158,7 @@ pub struct ExpandContext<'context> {
#[get = "pub(crate)"] #[get = "pub(crate)"]
registry: &'context CommandRegistry, registry: &'context CommandRegistry,
#[get = "pub(crate)"] #[get = "pub(crate)"]
tag: Tag, span: Span,
#[get = "pub(crate)"] #[get = "pub(crate)"]
source: &'context Text, source: &'context Text,
homedir: Option<PathBuf>, homedir: Option<PathBuf>,
@ -179,7 +179,7 @@ impl<'context> ExpandContext<'context> {
callback(ExpandContext { callback(ExpandContext {
registry: &registry, registry: &registry,
tag: Tag::unknown(), span: Span::unknown(),
source, source,
homedir: None, homedir: None,
}) })
@ -211,7 +211,7 @@ pub trait FallibleColorSyntax: std::fmt::Debug + Copy {
input: &Self::Input, input: &Self::Input,
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<Self::Info, ShellError>; ) -> Result<Self::Info, ShellError>;
} }
@ -224,7 +224,7 @@ pub trait ColorSyntax: std::fmt::Debug + Copy {
input: &Self::Input, input: &Self::Input,
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Self::Info; ) -> Self::Info;
} }
@ -240,7 +240,7 @@ pub trait ColorSyntax: std::fmt::Debug + Copy {
// input: &Self::Input, // input: &Self::Input,
// token_nodes: &'b mut TokensIterator<'a>, // token_nodes: &'b mut TokensIterator<'a>,
// context: &ExpandContext, // context: &ExpandContext,
// shapes: &mut Vec<Tagged<FlatShape>>, // shapes: &mut Vec<Spanned<FlatShape>>,
// ) -> Result<T::Info, ShellError> { // ) -> Result<T::Info, ShellError> {
// FallibleColorSyntax::color_syntax(self, input, token_nodes, context, shapes) // FallibleColorSyntax::color_syntax(self, input, token_nodes, context, shapes)
// } // }
@ -282,7 +282,7 @@ pub fn color_syntax<'a, 'b, T: ColorSyntax<Info = U, Input = ()>, U>(
shape: &T, shape: &T,
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> ((), U) { ) -> ((), U) {
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source)); trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source));
@ -310,7 +310,7 @@ pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax<Info = U, Input = ()
shape: &T, shape: &T,
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<U, ShellError> { ) -> Result<U, ShellError> {
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source)); trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source));
@ -344,7 +344,7 @@ pub fn color_syntax_with<'a, 'b, T: ColorSyntax<Info = U, Input = I>, U, I>(
input: &I, input: &I,
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> ((), U) { ) -> ((), U) {
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source)); trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source));
@ -373,7 +373,7 @@ pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax<Info = U, Input
input: &I, input: &I,
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<U, ShellError> { ) -> Result<U, ShellError> {
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source)); trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source));
@ -446,15 +446,15 @@ pub trait SkipSyntax: std::fmt::Debug + Copy {
enum BarePathState { enum BarePathState {
Initial, Initial,
Seen(Tag, Tag), Seen(Span, Span),
Error(ShellError), Error(ShellError),
} }
impl BarePathState { impl BarePathState {
pub fn seen(self, tag: Tag) -> BarePathState { pub fn seen(self, span: Span) -> BarePathState {
match self { match self {
BarePathState::Initial => BarePathState::Seen(tag, tag), BarePathState::Initial => BarePathState::Seen(span, span),
BarePathState::Seen(start, _) => BarePathState::Seen(start, tag), BarePathState::Seen(start, _) => BarePathState::Seen(start, span),
BarePathState::Error(err) => BarePathState::Error(err), BarePathState::Error(err) => BarePathState::Error(err),
} }
} }
@ -467,7 +467,7 @@ impl BarePathState {
} }
} }
pub fn into_bare(self) -> Result<Tag, ShellError> { pub fn into_bare(self) -> Result<Span, ShellError> {
match self { match self {
BarePathState::Initial => unreachable!("into_bare in initial state"), BarePathState::Initial => unreachable!("into_bare in initial state"),
BarePathState::Seen(start, end) => Ok(start.until(end)), BarePathState::Seen(start, end) => Ok(start.until(end)),
@ -480,7 +480,7 @@ pub fn expand_bare<'a, 'b>(
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext, _context: &ExpandContext,
predicate: impl Fn(&TokenNode) -> bool, predicate: impl Fn(&TokenNode) -> bool,
) -> Result<Tag, ShellError> { ) -> Result<Span, ShellError> {
let mut state = BarePathState::Initial; let mut state = BarePathState::Initial;
loop { loop {
@ -494,7 +494,7 @@ pub fn expand_bare<'a, 'b>(
} }
Some(node) => { Some(node) => {
if predicate(node) { if predicate(node) {
state = state.seen(node.tag()); state = state.seen(node.span());
peeked.commit(); peeked.commit();
} else { } else {
state = state.end(peeked, "word"); state = state.end(peeked, "word");
@ -511,19 +511,19 @@ pub fn expand_bare<'a, 'b>(
pub struct BarePathShape; pub struct BarePathShape;
impl ExpandSyntax for BarePathShape { impl ExpandSyntax for BarePathShape {
type Output = Tag; type Output = Span;
fn expand_syntax<'a, 'b>( fn expand_syntax<'a, 'b>(
&self, &self,
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
) -> Result<Tag, ShellError> { ) -> Result<Span, ShellError> {
expand_bare(token_nodes, context, |token| match token { expand_bare(token_nodes, context, |token| match token {
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::Bare, item: RawToken::Bare,
.. ..
}) })
| TokenNode::Token(Tagged { | TokenNode::Token(Spanned {
item: RawToken::Operator(Operator::Dot), item: RawToken::Operator(Operator::Dot),
.. ..
}) => true, }) => true,
@ -545,15 +545,15 @@ impl FallibleColorSyntax for BareShape {
input: &FlatShape, input: &FlatShape,
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext, _context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
token_nodes.peek_any_token(|token| match token { token_nodes.peek_any_token(|token| match token {
// If it's a bare token, color it // If it's a bare token, color it
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::Bare, item: RawToken::Bare,
tag, span,
}) => { }) => {
shapes.push((*input).tagged(tag)); shapes.push((*input).spanned(*span));
Ok(()) Ok(())
} }
@ -564,7 +564,7 @@ impl FallibleColorSyntax for BareShape {
} }
impl ExpandSyntax for BareShape { impl ExpandSyntax for BareShape {
type Output = Tagged<String>; type Output = Spanned<String>;
fn expand_syntax<'a, 'b>( fn expand_syntax<'a, 'b>(
&self, &self,
@ -574,12 +574,12 @@ impl ExpandSyntax for BareShape {
let peeked = token_nodes.peek_any().not_eof("word")?; let peeked = token_nodes.peek_any().not_eof("word")?;
match peeked.node { match peeked.node {
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::Bare, item: RawToken::Bare,
tag, span,
}) => { }) => {
peeked.commit(); peeked.commit();
Ok(tag.tagged_string(context.source)) Ok(span.spanned_string(context.source))
} }
other => Err(ShellError::type_error("word", other.tagged_type_name())), other => Err(ShellError::type_error("word", other.tagged_type_name())),
@ -608,9 +608,9 @@ impl TestSyntax for BareShape {
#[derive(Debug)] #[derive(Debug)]
pub enum CommandSignature { pub enum CommandSignature {
Internal(Tagged<Arc<Command>>), Internal(Spanned<Arc<Command>>),
LiteralExternal { outer: Tag, inner: Tag }, LiteralExternal { outer: Span, inner: Span },
External(Tag), External(Span),
Expression(hir::Expression), Expression(hir::Expression),
} }
@ -618,14 +618,15 @@ impl CommandSignature {
pub fn to_expression(&self) -> hir::Expression { pub fn to_expression(&self) -> hir::Expression {
match self { match self {
CommandSignature::Internal(command) => { CommandSignature::Internal(command) => {
let tag = command.tag; let span = command.span;
hir::RawExpression::Command(tag).tagged(tag) hir::RawExpression::Command(span).spanned(span)
} }
CommandSignature::LiteralExternal { outer, inner } => { CommandSignature::LiteralExternal { outer, inner } => {
hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*inner)).tagged(outer) hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*inner))
.spanned(*outer)
} }
CommandSignature::External(tag) => { CommandSignature::External(span) => {
hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*tag)).tagged(tag) hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*span)).spanned(*span)
} }
CommandSignature::Expression(expr) => expr.clone(), CommandSignature::Expression(expr) => expr.clone(),
} }
@ -645,7 +646,7 @@ impl FallibleColorSyntax for PipelineShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
// Make sure we're looking at a pipeline // Make sure we're looking at a pipeline
let Pipeline { parts, .. } = token_nodes.peek_any_token(|node| node.as_pipeline())?; let Pipeline { parts, .. } = token_nodes.peek_any_token(|node| node.as_pipeline())?;
@ -654,11 +655,11 @@ impl FallibleColorSyntax for PipelineShape {
for part in parts { for part in parts {
// If the pipeline part has a prefix `|`, emit a pipe to color // If the pipeline part has a prefix `|`, emit a pipe to color
if let Some(pipe) = part.pipe { if let Some(pipe) = part.pipe {
shapes.push(FlatShape::Pipe.tagged(pipe)); shapes.push(FlatShape::Pipe.spanned(pipe));
} }
// Create a new iterator containing the tokens in the pipeline part to color // Create a new iterator containing the tokens in the pipeline part to color
let mut token_nodes = TokensIterator::new(&part.tokens.item, part.tag, false); let mut token_nodes = TokensIterator::new(&part.tokens.item, part.span, false);
color_syntax(&MaybeSpaceShape, &mut token_nodes, context, shapes); color_syntax(&MaybeSpaceShape, &mut token_nodes, context, shapes);
color_syntax(&CommandShape, &mut token_nodes, context, shapes); color_syntax(&CommandShape, &mut token_nodes, context, shapes);
@ -685,7 +686,7 @@ impl ExpandSyntax for PipelineShape {
let commands: Result<Vec<_>, ShellError> = parts let commands: Result<Vec<_>, ShellError> = parts
.iter() .iter()
.map(|item| classify_command(&item, context, &source)) .map(|item| classify_command(item, context, &source))
.collect(); .collect();
Ok(ClassifiedPipeline { Ok(ClassifiedPipeline {
@ -711,7 +712,7 @@ impl FallibleColorSyntax for CommandHeadShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<CommandHeadKind, ShellError> { ) -> Result<CommandHeadKind, ShellError> {
// If we don't ultimately find a token, roll back // If we don't ultimately find a token, roll back
token_nodes.atomic(|token_nodes| { token_nodes.atomic(|token_nodes| {
@ -726,7 +727,7 @@ impl FallibleColorSyntax for CommandHeadShape {
match atom.item { match atom.item {
// If the head is an explicit external command (^cmd), color it as an external command // If the head is an explicit external command (^cmd), color it as an external command
AtomicToken::ExternalCommand { command } => { AtomicToken::ExternalCommand { command } => {
shapes.push(FlatShape::ExternalCommand.tagged(command)); shapes.push(FlatShape::ExternalCommand.spanned(command));
Ok(CommandHeadKind::External) Ok(CommandHeadKind::External)
} }
@ -736,19 +737,19 @@ impl FallibleColorSyntax for CommandHeadShape {
if context.registry.has(name) { if context.registry.has(name) {
// If the registry has the command, color it as an internal command // If the registry has the command, color it as an internal command
shapes.push(FlatShape::InternalCommand.tagged(text)); shapes.push(FlatShape::InternalCommand.spanned(text));
let command = context.registry.expect_command(name); let command = context.registry.expect_command(name);
Ok(CommandHeadKind::Internal(command.signature())) Ok(CommandHeadKind::Internal(command.signature()))
} else { } else {
// Otherwise, color it as an external command // Otherwise, color it as an external command
shapes.push(FlatShape::ExternalCommand.tagged(text)); shapes.push(FlatShape::ExternalCommand.spanned(text));
Ok(CommandHeadKind::External) Ok(CommandHeadKind::External)
} }
} }
// Otherwise, we're not actually looking at a command // Otherwise, we're not actually looking at a command
_ => Err(ShellError::syntax_error( _ => Err(ShellError::syntax_error(
"No command at the head".tagged(atom.tag), "No command at the head".tagged(atom.span),
)), )),
} }
}) })
@ -764,25 +765,25 @@ impl ExpandSyntax for CommandHeadShape {
context: &ExpandContext, context: &ExpandContext,
) -> Result<CommandSignature, ShellError> { ) -> Result<CommandSignature, ShellError> {
let node = let node =
parse_single_node_skipping_ws(token_nodes, "command head1", |token, token_tag, _| { parse_single_node_skipping_ws(token_nodes, "command head1", |token, token_span, _| {
Ok(match token { Ok(match token {
RawToken::ExternalCommand(tag) => CommandSignature::LiteralExternal { RawToken::ExternalCommand(span) => CommandSignature::LiteralExternal {
outer: token_tag, outer: token_span,
inner: tag, inner: span,
}, },
RawToken::Bare => { RawToken::Bare => {
let name = token_tag.slice(context.source); let name = token_span.slice(context.source);
if context.registry.has(name) { if context.registry.has(name) {
let command = context.registry.expect_command(name); let command = context.registry.expect_command(name);
CommandSignature::Internal(command.tagged(token_tag)) CommandSignature::Internal(command.spanned(token_span))
} else { } else {
CommandSignature::External(token_tag) CommandSignature::External(token_span)
} }
} }
_ => { _ => {
return Err(ShellError::type_error( return Err(ShellError::type_error(
"command head2", "command head2",
token.type_name().tagged(token_tag), token.type_name().tagged(token_span),
)) ))
} }
}) })
@ -813,7 +814,7 @@ impl ExpandSyntax for ClassifiedCommandShape {
match &head { match &head {
CommandSignature::Expression(expr) => Err(ShellError::syntax_error( CommandSignature::Expression(expr) => Err(ShellError::syntax_error(
"Unexpected expression in command position".tagged(expr.tag), "Unexpected expression in command position".tagged(expr.span),
)), )),
// If the command starts with `^`, treat it as an external command no matter what // If the command starts with `^`, treat it as an external command no matter what
@ -831,7 +832,7 @@ impl ExpandSyntax for ClassifiedCommandShape {
CommandSignature::Internal(command) => { CommandSignature::Internal(command) => {
let tail = let tail =
parse_command_tail(&command.signature(), &context, iterator, command.tag)?; parse_command_tail(&command.signature(), &context, iterator, command.span)?;
let (positional, named) = match tail { let (positional, named) = match tail {
None => (None, None), None => (None, None),
@ -846,7 +847,10 @@ impl ExpandSyntax for ClassifiedCommandShape {
Ok(ClassifiedCommand::Internal(InternalCommand::new( Ok(ClassifiedCommand::Internal(InternalCommand::new(
command.item.name().to_string(), command.item.name().to_string(),
command.tag, Tag {
span: command.span,
anchor: None,
},
call, call,
))) )))
} }
@ -866,7 +870,7 @@ impl FallibleColorSyntax for InternalCommandHeadShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext, _context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
let peeked_head = token_nodes.peek_non_ws().not_eof("command head4"); let peeked_head = token_nodes.peek_non_ws().not_eof("command head4");
@ -876,17 +880,17 @@ impl FallibleColorSyntax for InternalCommandHeadShape {
}; };
let _expr = match peeked_head.node { let _expr = match peeked_head.node {
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::Bare, item: RawToken::Bare,
tag, span,
}) => shapes.push(FlatShape::Word.tagged(tag)), }) => shapes.push(FlatShape::Word.spanned(*span)),
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::String(_inner_tag), item: RawToken::String(_inner_tag),
tag, span,
}) => shapes.push(FlatShape::String.tagged(tag)), }) => shapes.push(FlatShape::String.spanned(*span)),
_node => shapes.push(FlatShape::Error.tagged(peeked_head.node.tag())), _node => shapes.push(FlatShape::Error.spanned(peeked_head.node.span())),
}; };
peeked_head.commit(); peeked_head.commit();
@ -905,16 +909,16 @@ impl ExpandExpression for InternalCommandHeadShape {
let expr = match peeked_head.node { let expr = match peeked_head.node {
TokenNode::Token( TokenNode::Token(
spanned @ Tagged { spanned @ Spanned {
item: RawToken::Bare, item: RawToken::Bare,
.. ..
}, },
) => spanned.map(|_| hir::RawExpression::Literal(hir::Literal::Bare)), ) => spanned.map(|_| hir::RawExpression::Literal(hir::Literal::Bare)),
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::String(inner_tag), item: RawToken::String(inner_span),
tag, span,
}) => hir::RawExpression::Literal(hir::Literal::String(*inner_tag)).tagged(*tag), }) => hir::RawExpression::Literal(hir::Literal::String(*inner_span)).spanned(*span),
node => { node => {
return Err(ShellError::type_error( return Err(ShellError::type_error(
@ -932,24 +936,24 @@ impl ExpandExpression for InternalCommandHeadShape {
pub(crate) struct SingleError<'token> { pub(crate) struct SingleError<'token> {
expected: &'static str, expected: &'static str,
node: &'token Tagged<RawToken>, node: &'token Spanned<RawToken>,
} }
impl<'token> SingleError<'token> { impl<'token> SingleError<'token> {
pub(crate) fn error(&self) -> ShellError { pub(crate) fn error(&self) -> ShellError {
ShellError::type_error(self.expected, self.node.type_name().tagged(self.node.tag)) ShellError::type_error(self.expected, self.node.type_name().tagged(self.node.span))
} }
} }
fn parse_single_node<'a, 'b, T>( fn parse_single_node<'a, 'b, T>(
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
expected: &'static str, expected: &'static str,
callback: impl FnOnce(RawToken, Tag, SingleError) -> Result<T, ShellError>, callback: impl FnOnce(RawToken, Span, SingleError) -> Result<T, ShellError>,
) -> Result<T, ShellError> { ) -> Result<T, ShellError> {
token_nodes.peek_any_token(|node| match node { token_nodes.peek_any_token(|node| match node {
TokenNode::Token(token) => callback( TokenNode::Token(token) => callback(
token.item, token.item,
token.tag(), token.span,
SingleError { SingleError {
expected, expected,
node: token, node: token,
@ -963,14 +967,14 @@ fn parse_single_node<'a, 'b, T>(
fn parse_single_node_skipping_ws<'a, 'b, T>( fn parse_single_node_skipping_ws<'a, 'b, T>(
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
expected: &'static str, expected: &'static str,
callback: impl FnOnce(RawToken, Tag, SingleError) -> Result<T, ShellError>, callback: impl FnOnce(RawToken, Span, SingleError) -> Result<T, ShellError>,
) -> Result<T, ShellError> { ) -> Result<T, ShellError> {
let peeked = token_nodes.peek_non_ws().not_eof(expected)?; let peeked = token_nodes.peek_non_ws().not_eof(expected)?;
let expr = match peeked.node { let expr = match peeked.node {
TokenNode::Token(token) => callback( TokenNode::Token(token) => callback(
token.item, token.item,
token.tag(), token.span,
SingleError { SingleError {
expected, expected,
node: token, node: token,
@ -997,7 +1001,7 @@ impl FallibleColorSyntax for WhitespaceShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext, _context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
let peeked = token_nodes.peek_any().not_eof("whitespace"); let peeked = token_nodes.peek_any().not_eof("whitespace");
@ -1007,7 +1011,7 @@ impl FallibleColorSyntax for WhitespaceShape {
}; };
let _tag = match peeked.node { let _tag = match peeked.node {
TokenNode::Whitespace(tag) => shapes.push(FlatShape::Whitespace.tagged(tag)), TokenNode::Whitespace(span) => shapes.push(FlatShape::Whitespace.spanned(*span)),
_other => return Ok(()), _other => return Ok(()),
}; };
@ -1019,7 +1023,7 @@ impl FallibleColorSyntax for WhitespaceShape {
} }
impl ExpandSyntax for WhitespaceShape { impl ExpandSyntax for WhitespaceShape {
type Output = Tag; type Output = Span;
fn expand_syntax<'a, 'b>( fn expand_syntax<'a, 'b>(
&self, &self,
@ -1028,7 +1032,7 @@ impl ExpandSyntax for WhitespaceShape {
) -> Result<Self::Output, ShellError> { ) -> Result<Self::Output, ShellError> {
let peeked = token_nodes.peek_any().not_eof("whitespace")?; let peeked = token_nodes.peek_any().not_eof("whitespace")?;
let tag = match peeked.node { let span = match peeked.node {
TokenNode::Whitespace(tag) => *tag, TokenNode::Whitespace(tag) => *tag,
other => { other => {
@ -1041,7 +1045,7 @@ impl ExpandSyntax for WhitespaceShape {
peeked.commit(); peeked.commit();
Ok(tag) Ok(span)
} }
} }
@ -1094,7 +1098,7 @@ impl ColorSyntax for MaybeSpaceShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext, _context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Self::Info { ) -> Self::Info {
let peeked = token_nodes.peek_any().not_eof("whitespace"); let peeked = token_nodes.peek_any().not_eof("whitespace");
@ -1103,9 +1107,9 @@ impl ColorSyntax for MaybeSpaceShape {
Ok(peeked) => peeked, Ok(peeked) => peeked,
}; };
if let TokenNode::Whitespace(tag) = peeked.node { if let TokenNode::Whitespace(span) = peeked.node {
peeked.commit(); peeked.commit();
shapes.push(FlatShape::Whitespace.tagged(tag)); shapes.push(FlatShape::Whitespace.spanned(*span));
} }
} }
} }
@ -1122,14 +1126,14 @@ impl FallibleColorSyntax for SpaceShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext, _context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
let peeked = token_nodes.peek_any().not_eof("whitespace")?; let peeked = token_nodes.peek_any().not_eof("whitespace")?;
match peeked.node { match peeked.node {
TokenNode::Whitespace(tag) => { TokenNode::Whitespace(span) => {
peeked.commit(); peeked.commit();
shapes.push(FlatShape::Whitespace.tagged(tag)); shapes.push(FlatShape::Whitespace.spanned(*span));
Ok(()) Ok(())
} }
@ -1168,26 +1172,26 @@ pub fn spaced<T: ExpandExpression>(inner: T) -> SpacedExpression<T> {
SpacedExpression { inner } SpacedExpression { inner }
} }
fn expand_variable(tag: Tag, token_tag: Tag, source: &Text) -> hir::Expression { fn expand_variable(span: Span, token_span: Span, source: &Text) -> hir::Expression {
if tag.slice(source) == "it" { if span.slice(source) == "it" {
hir::Expression::it_variable(tag, token_tag) hir::Expression::it_variable(span, token_span)
} else { } else {
hir::Expression::variable(tag, token_tag) hir::Expression::variable(span, token_span)
} }
} }
fn classify_command( fn classify_command(
command: &Tagged<PipelineElement>, command: &Spanned<PipelineElement>,
context: &ExpandContext, context: &ExpandContext,
source: &Text, source: &Text,
) -> Result<ClassifiedCommand, ShellError> { ) -> Result<ClassifiedCommand, ShellError> {
let mut iterator = TokensIterator::new(&command.tokens.item, command.tag, true); let mut iterator = TokensIterator::new(&command.tokens.item, command.span, true);
let head = CommandHeadShape.expand_syntax(&mut iterator, &context)?; let head = CommandHeadShape.expand_syntax(&mut iterator, &context)?;
match &head { match &head {
CommandSignature::Expression(_) => Err(ShellError::syntax_error( CommandSignature::Expression(_) => Err(ShellError::syntax_error(
"Unexpected expression in command position".tagged(command.tag), "Unexpected expression in command position".tagged(command.span),
)), )),
// If the command starts with `^`, treat it as an external command no matter what // If the command starts with `^`, treat it as an external command no matter what
@ -1205,7 +1209,7 @@ fn classify_command(
CommandSignature::Internal(command) => { CommandSignature::Internal(command) => {
let tail = let tail =
parse_command_tail(&command.signature(), &context, &mut iterator, command.tag)?; parse_command_tail(&command.signature(), &context, &mut iterator, command.span)?;
let (positional, named) = match tail { let (positional, named) = match tail {
None => (None, None), None => (None, None),
@ -1220,7 +1224,10 @@ fn classify_command(
Ok(ClassifiedCommand::Internal(InternalCommand::new( Ok(ClassifiedCommand::Internal(InternalCommand::new(
command.name().to_string(), command.name().to_string(),
command.tag, Tag {
span: command.span,
anchor: None,
},
call, call,
))) )))
} }
@ -1239,7 +1246,7 @@ impl ColorSyntax for CommandShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) { ) {
let kind = color_fallible_syntax(&CommandHeadShape, token_nodes, context, shapes); let kind = color_fallible_syntax(&CommandHeadShape, token_nodes, context, shapes);

View File

@ -11,7 +11,7 @@ use crate::parser::{
parse::token_tree::Delimiter, parse::token_tree::Delimiter,
RawToken, TokenNode, RawToken, TokenNode,
}; };
use crate::{Tag, Tagged, TaggedItem}; use crate::{Span, Spanned, SpannedItem};
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]
pub struct AnyBlockShape; pub struct AnyBlockShape;
@ -25,7 +25,7 @@ impl FallibleColorSyntax for AnyBlockShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
let block = token_nodes.peek_non_ws().not_eof("block"); let block = token_nodes.peek_non_ws().not_eof("block");
@ -39,11 +39,11 @@ impl FallibleColorSyntax for AnyBlockShape {
match block { match block {
// If so, color it as a block // If so, color it as a block
Some((children, tags)) => { Some((children, spans)) => {
let mut token_nodes = TokensIterator::new(children.item, context.tag, false); let mut token_nodes = TokensIterator::new(children.item, context.span, false);
color_syntax_with( color_syntax_with(
&DelimitedShape, &DelimitedShape,
&(Delimiter::Brace, tags.0, tags.1), &(Delimiter::Brace, spans.0, spans.1),
&mut token_nodes, &mut token_nodes,
context, context,
shapes, shapes,
@ -72,11 +72,11 @@ impl ExpandExpression for AnyBlockShape {
match block { match block {
Some((block, _tags)) => { Some((block, _tags)) => {
let mut iterator = TokensIterator::new(&block.item, context.tag, false); let mut iterator = TokensIterator::new(&block.item, context.span, false);
let exprs = expand_syntax(&ExpressionListShape, &mut iterator, context)?; let exprs = expand_syntax(&ExpressionListShape, &mut iterator, context)?;
return Ok(hir::RawExpression::Block(exprs).tagged(block.tag)); return Ok(hir::RawExpression::Block(exprs).spanned(block.span));
} }
_ => {} _ => {}
} }
@ -97,7 +97,7 @@ impl FallibleColorSyntax for ShorthandBlock {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
// Try to find a shorthand head. If none found, fail // Try to find a shorthand head. If none found, fail
color_fallible_syntax(&ShorthandPath, token_nodes, context, shapes)?; color_fallible_syntax(&ShorthandPath, token_nodes, context, shapes)?;
@ -126,10 +126,10 @@ impl ExpandExpression for ShorthandBlock {
context: &ExpandContext, context: &ExpandContext,
) -> Result<hir::Expression, ShellError> { ) -> Result<hir::Expression, ShellError> {
let path = expand_expr(&ShorthandPath, token_nodes, context)?; let path = expand_expr(&ShorthandPath, token_nodes, context)?;
let start = path.tag; let start = path.span;
let expr = continue_expression(path, token_nodes, context)?; let expr = continue_expression(path, token_nodes, context)?;
let end = expr.tag; let end = expr.span;
let block = hir::RawExpression::Block(vec![expr]).tagged(start.until(end)); let block = hir::RawExpression::Block(vec![expr]).spanned(start.until(end));
Ok(block) Ok(block)
} }
@ -148,7 +148,7 @@ impl FallibleColorSyntax for ShorthandPath {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
token_nodes.atomic(|token_nodes| { token_nodes.atomic(|token_nodes| {
let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context, shapes); let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context, shapes);
@ -232,29 +232,29 @@ impl FallibleColorSyntax for ShorthandHeadShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext, _context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
// A shorthand path must not be at EOF // A shorthand path must not be at EOF
let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?; let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?;
match peeked.node { match peeked.node {
// If the head of a shorthand path is a bare token, it expands to `$it.bare` // If the head of a shorthand path is a bare token, it expands to `$it.bare`
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::Bare, item: RawToken::Bare,
tag, span,
}) => { }) => {
peeked.commit(); peeked.commit();
shapes.push(FlatShape::BareMember.tagged(tag)); shapes.push(FlatShape::BareMember.spanned(*span));
Ok(()) Ok(())
} }
// If the head of a shorthand path is a string, it expands to `$it."some string"` // If the head of a shorthand path is a string, it expands to `$it."some string"`
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::String(_), item: RawToken::String(_),
tag: outer, span: outer,
}) => { }) => {
peeked.commit(); peeked.commit();
shapes.push(FlatShape::StringMember.tagged(outer)); shapes.push(FlatShape::StringMember.spanned(*outer));
Ok(()) Ok(())
} }
@ -277,40 +277,40 @@ impl ExpandExpression for ShorthandHeadShape {
match peeked.node { match peeked.node {
// If the head of a shorthand path is a bare token, it expands to `$it.bare` // If the head of a shorthand path is a bare token, it expands to `$it.bare`
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::Bare, item: RawToken::Bare,
tag, span,
}) => { }) => {
// Commit the peeked token // Commit the peeked token
peeked.commit(); peeked.commit();
// Synthesize an `$it` expression // Synthesize an `$it` expression
let it = synthetic_it(token_nodes.anchor()); let it = synthetic_it();
// Make a path out of `$it` and the bare token as a member // Make a path out of `$it` and the bare token as a member
Ok(hir::Expression::path( Ok(hir::Expression::path(
it, it,
vec![tag.tagged_string(context.source)], vec![span.spanned_string(context.source)],
tag, *span,
)) ))
} }
// If the head of a shorthand path is a string, it expands to `$it."some string"` // If the head of a shorthand path is a string, it expands to `$it."some string"`
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::String(inner), item: RawToken::String(inner),
tag: outer, span: outer,
}) => { }) => {
// Commit the peeked token // Commit the peeked token
peeked.commit(); peeked.commit();
// Synthesize an `$it` expression // Synthesize an `$it` expression
let it = synthetic_it(token_nodes.anchor()); let it = synthetic_it();
// Make a path out of `$it` and the bare token as a member // Make a path out of `$it` and the bare token as a member
Ok(hir::Expression::path( Ok(hir::Expression::path(
it, it,
vec![inner.string(context.source).tagged(outer)], vec![inner.string(context.source).spanned(*outer)],
outer, *outer,
)) ))
} }
@ -325,6 +325,6 @@ impl ExpandExpression for ShorthandHeadShape {
} }
} }
fn synthetic_it(origin: uuid::Uuid) -> hir::Expression { fn synthetic_it() -> hir::Expression {
hir::Expression::it_variable(Tag::unknown_span(origin), Tag::unknown_span(origin)) hir::Expression::it_variable(Span::unknown(), Span::unknown())
} }

View File

@ -46,7 +46,7 @@ impl FallibleColorSyntax for AnyExpressionShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
// Look for an expression at the cursor // Look for an expression at the cursor
color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context, shapes)?; color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context, shapes)?;
@ -94,7 +94,7 @@ pub(crate) fn continue_expression(
pub(crate) fn continue_coloring_expression( pub(crate) fn continue_coloring_expression(
token_nodes: &mut TokensIterator<'_>, token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
// if there's not even one expression continuation, fail // if there's not even one expression continuation, fail
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes)?; color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes)?;
@ -131,20 +131,23 @@ impl ExpandExpression for AnyExpressionStartShape {
return Ok(hir::Expression::size( return Ok(hir::Expression::size(
number.to_number(context.source), number.to_number(context.source),
unit.item, unit.item,
atom.tag, Tag {
span: atom.span,
anchor: None,
},
)) ))
} }
AtomicToken::SquareDelimited { nodes, .. } => { AtomicToken::SquareDelimited { nodes, .. } => {
expand_delimited_square(&nodes, atom.tag, context) expand_delimited_square(&nodes, atom.span.into(), context)
} }
AtomicToken::Word { .. } | AtomicToken::Dot { .. } => { AtomicToken::Word { .. } | AtomicToken::Dot { .. } => {
let end = expand_syntax(&BareTailShape, token_nodes, context)?; let end = expand_syntax(&BareTailShape, token_nodes, context)?;
Ok(hir::Expression::bare(atom.tag.until_option(end))) Ok(hir::Expression::bare(atom.span.until_option(end)))
} }
other => return other.tagged(atom.tag).into_hir(context, "expression"), other => return other.spanned(atom.span).into_hir(context, "expression"),
} }
} }
} }
@ -158,7 +161,7 @@ impl FallibleColorSyntax for AnyExpressionStartShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
let atom = token_nodes.spanned(|token_nodes| { let atom = token_nodes.spanned(|token_nodes| {
expand_atom( expand_atom(
@ -170,15 +173,15 @@ impl FallibleColorSyntax for AnyExpressionStartShape {
}); });
let atom = match atom { let atom = match atom {
Tagged { Spanned {
item: Err(_err), item: Err(_err),
tag, span,
} => { } => {
shapes.push(FlatShape::Error.tagged(tag)); shapes.push(FlatShape::Error.spanned(span));
return Ok(()); return Ok(());
} }
Tagged { Spanned {
item: Ok(value), .. item: Ok(value), ..
} => value, } => value,
}; };
@ -186,18 +189,18 @@ impl FallibleColorSyntax for AnyExpressionStartShape {
match atom.item { match atom.item {
AtomicToken::Size { number, unit } => shapes.push( AtomicToken::Size { number, unit } => shapes.push(
FlatShape::Size { FlatShape::Size {
number: number.tag, number: number.span.into(),
unit: unit.tag, unit: unit.span.into(),
} }
.tagged(atom.tag), .spanned(atom.span),
), ),
AtomicToken::SquareDelimited { nodes, tags } => { AtomicToken::SquareDelimited { nodes, spans } => {
color_delimited_square(tags, &nodes, atom.tag, context, shapes) color_delimited_square(spans, &nodes, atom.span.into(), context, shapes)
} }
AtomicToken::Word { .. } | AtomicToken::Dot { .. } => { AtomicToken::Word { .. } | AtomicToken::Dot { .. } => {
shapes.push(FlatShape::Word.tagged(atom.tag)); shapes.push(FlatShape::Word.spanned(atom.span));
} }
_ => atom.color_tokens(shapes), _ => atom.color_tokens(shapes),
@ -219,7 +222,7 @@ impl FallibleColorSyntax for BareTailShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
let len = shapes.len(); let len = shapes.len();
@ -267,19 +270,19 @@ impl FallibleColorSyntax for BareTailShape {
} }
impl ExpandSyntax for BareTailShape { impl ExpandSyntax for BareTailShape {
type Output = Option<Tag>; type Output = Option<Span>;
fn expand_syntax<'a, 'b>( fn expand_syntax<'a, 'b>(
&self, &self,
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
) -> Result<Option<Tag>, ShellError> { ) -> Result<Option<Span>, ShellError> {
let mut end: Option<Tag> = None; let mut end: Option<Span> = None;
loop { loop {
match expand_syntax(&BareShape, token_nodes, context) { match expand_syntax(&BareShape, token_nodes, context) {
Ok(bare) => { Ok(bare) => {
end = Some(bare.tag); end = Some(bare.span);
continue; continue;
} }

View File

@ -9,82 +9,83 @@ use crate::parser::{
DelimitedNode, Delimiter, FlatShape, RawToken, TokenNode, Unit, DelimitedNode, Delimiter, FlatShape, RawToken, TokenNode, Unit,
}; };
use crate::prelude::*; use crate::prelude::*;
use crate::{Span, Spanned};
#[derive(Debug)] #[derive(Debug)]
pub enum AtomicToken<'tokens> { pub enum AtomicToken<'tokens> {
Eof { Eof {
tag: Tag, span: Span,
}, },
Error { Error {
error: Tagged<ShellError>, error: Spanned<ShellError>,
}, },
Number { Number {
number: RawNumber, number: RawNumber,
}, },
Size { Size {
number: Tagged<RawNumber>, number: Spanned<RawNumber>,
unit: Tagged<Unit>, unit: Spanned<Unit>,
}, },
String { String {
body: Tag, body: Span,
}, },
ItVariable { ItVariable {
name: Tag, name: Span,
}, },
Variable { Variable {
name: Tag, name: Span,
}, },
ExternalCommand { ExternalCommand {
command: Tag, command: Span,
}, },
ExternalWord { ExternalWord {
text: Tag, text: Span,
}, },
GlobPattern { GlobPattern {
pattern: Tag, pattern: Span,
}, },
FilePath { FilePath {
path: Tag, path: Span,
}, },
Word { Word {
text: Tag, text: Span,
}, },
SquareDelimited { SquareDelimited {
tags: (Tag, Tag), spans: (Span, Span),
nodes: &'tokens Vec<TokenNode>, nodes: &'tokens Vec<TokenNode>,
}, },
ParenDelimited { ParenDelimited {
tags: (Tag, Tag), span: (Span, Span),
nodes: &'tokens Vec<TokenNode>, nodes: &'tokens Vec<TokenNode>,
}, },
BraceDelimited { BraceDelimited {
tags: (Tag, Tag), spans: (Span, Span),
nodes: &'tokens Vec<TokenNode>, nodes: &'tokens Vec<TokenNode>,
}, },
Pipeline { Pipeline {
pipe: Option<Tag>, pipe: Option<Span>,
elements: Tagged<&'tokens Vec<TokenNode>>, elements: Spanned<&'tokens Vec<TokenNode>>,
}, },
ShorthandFlag { ShorthandFlag {
name: Tag, name: Span,
}, },
LonghandFlag { LonghandFlag {
name: Tag, name: Span,
}, },
Dot { Dot {
text: Tag, text: Span,
}, },
Operator { Operator {
text: Tag, text: Span,
}, },
Whitespace { Whitespace {
text: Tag, text: Span,
}, },
} }
pub type TaggedAtomicToken<'tokens> = Tagged<AtomicToken<'tokens>>; pub type SpannedAtomicToken<'tokens> = Spanned<AtomicToken<'tokens>>;
impl<'tokens> TaggedAtomicToken<'tokens> { impl<'tokens> SpannedAtomicToken<'tokens> {
pub fn into_hir( pub fn into_hir(
&self, &self,
context: &ExpandContext, context: &ExpandContext,
@ -94,55 +95,55 @@ impl<'tokens> TaggedAtomicToken<'tokens> {
AtomicToken::Eof { .. } => { AtomicToken::Eof { .. } => {
return Err(ShellError::type_error( return Err(ShellError::type_error(
expected, expected,
"eof atomic token".tagged(self.tag), "eof atomic token".tagged(self.span),
)) ))
} }
AtomicToken::Error { .. } => { AtomicToken::Error { .. } => {
return Err(ShellError::type_error( return Err(ShellError::type_error(
expected, expected,
"eof atomic token".tagged(self.tag), "eof atomic token".tagged(self.span),
)) ))
} }
AtomicToken::Operator { .. } => { AtomicToken::Operator { .. } => {
return Err(ShellError::type_error( return Err(ShellError::type_error(
expected, expected,
"operator".tagged(self.tag), "operator".tagged(self.span),
)) ))
} }
AtomicToken::ShorthandFlag { .. } => { AtomicToken::ShorthandFlag { .. } => {
return Err(ShellError::type_error( return Err(ShellError::type_error(
expected, expected,
"shorthand flag".tagged(self.tag), "shorthand flag".tagged(self.span),
)) ))
} }
AtomicToken::LonghandFlag { .. } => { AtomicToken::LonghandFlag { .. } => {
return Err(ShellError::type_error(expected, "flag".tagged(self.tag))) return Err(ShellError::type_error(expected, "flag".tagged(self.span)))
} }
AtomicToken::Whitespace { .. } => { AtomicToken::Whitespace { .. } => {
return Err(ShellError::unimplemented("whitespace in AtomicToken")) return Err(ShellError::unimplemented("whitespace in AtomicToken"))
} }
AtomicToken::Dot { .. } => { AtomicToken::Dot { .. } => {
return Err(ShellError::type_error(expected, "dot".tagged(self.tag))) return Err(ShellError::type_error(expected, "dot".tagged(self.span)))
} }
AtomicToken::Number { number } => { AtomicToken::Number { number } => {
Expression::number(number.to_number(context.source), self.tag) Expression::number(number.to_number(context.source), self.span)
} }
AtomicToken::FilePath { path } => Expression::file_path( AtomicToken::FilePath { path } => Expression::file_path(
expand_file_path(path.slice(context.source), context), expand_file_path(path.slice(context.source), context),
self.tag, self.span,
), ),
AtomicToken::Size { number, unit } => { AtomicToken::Size { number, unit } => {
Expression::size(number.to_number(context.source), **unit, self.tag) Expression::size(number.to_number(context.source), **unit, self.span)
} }
AtomicToken::String { body } => Expression::string(body, self.tag), AtomicToken::String { body } => Expression::string(*body, self.span),
AtomicToken::ItVariable { name } => Expression::it_variable(name, self.tag), AtomicToken::ItVariable { name } => Expression::it_variable(*name, self.span),
AtomicToken::Variable { name } => Expression::variable(name, self.tag), AtomicToken::Variable { name } => Expression::variable(*name, self.span),
AtomicToken::ExternalCommand { command } => { AtomicToken::ExternalCommand { command } => {
Expression::external_command(command, self.tag) Expression::external_command(*command, self.span)
} }
AtomicToken::ExternalWord { text } => Expression::string(text, self.tag), AtomicToken::ExternalWord { text } => Expression::string(*text, self.span),
AtomicToken::GlobPattern { pattern } => Expression::pattern(pattern), AtomicToken::GlobPattern { pattern } => Expression::pattern(*pattern),
AtomicToken::Word { text } => Expression::string(text, text), AtomicToken::Word { text } => Expression::string(*text, *text),
AtomicToken::SquareDelimited { .. } => unimplemented!("into_hir"), AtomicToken::SquareDelimited { .. } => unimplemented!("into_hir"),
AtomicToken::ParenDelimited { .. } => unimplemented!("into_hir"), AtomicToken::ParenDelimited { .. } => unimplemented!("into_hir"),
AtomicToken::BraceDelimited { .. } => unimplemented!("into_hir"), AtomicToken::BraceDelimited { .. } => unimplemented!("into_hir"),
@ -150,6 +151,33 @@ impl<'tokens> TaggedAtomicToken<'tokens> {
}) })
} }
pub fn spanned_type_name(&self) -> Spanned<&'static str> {
match &self.item {
AtomicToken::Eof { .. } => "eof",
AtomicToken::Error { .. } => "error",
AtomicToken::Operator { .. } => "operator",
AtomicToken::ShorthandFlag { .. } => "shorthand flag",
AtomicToken::LonghandFlag { .. } => "flag",
AtomicToken::Whitespace { .. } => "whitespace",
AtomicToken::Dot { .. } => "dot",
AtomicToken::Number { .. } => "number",
AtomicToken::FilePath { .. } => "file path",
AtomicToken::Size { .. } => "size",
AtomicToken::String { .. } => "string",
AtomicToken::ItVariable { .. } => "$it",
AtomicToken::Variable { .. } => "variable",
AtomicToken::ExternalCommand { .. } => "external command",
AtomicToken::ExternalWord { .. } => "external word",
AtomicToken::GlobPattern { .. } => "file pattern",
AtomicToken::Word { .. } => "word",
AtomicToken::SquareDelimited { .. } => "array literal",
AtomicToken::ParenDelimited { .. } => "parenthesized expression",
AtomicToken::BraceDelimited { .. } => "block",
AtomicToken::Pipeline { .. } => "pipeline",
}
.spanned(self.span)
}
pub fn tagged_type_name(&self) -> Tagged<&'static str> { pub fn tagged_type_name(&self) -> Tagged<&'static str> {
match &self.item { match &self.item {
AtomicToken::Eof { .. } => "eof", AtomicToken::Eof { .. } => "eof",
@ -174,64 +202,64 @@ impl<'tokens> TaggedAtomicToken<'tokens> {
AtomicToken::BraceDelimited { .. } => "block", AtomicToken::BraceDelimited { .. } => "block",
AtomicToken::Pipeline { .. } => "pipeline", AtomicToken::Pipeline { .. } => "pipeline",
} }
.tagged(self.tag) .tagged(self.span)
} }
pub(crate) fn color_tokens(&self, shapes: &mut Vec<Tagged<FlatShape>>) { pub(crate) fn color_tokens(&self, shapes: &mut Vec<Spanned<FlatShape>>) {
match &self.item { match &self.item {
AtomicToken::Eof { .. } => {} AtomicToken::Eof { .. } => {}
AtomicToken::Error { .. } => return shapes.push(FlatShape::Error.tagged(self.tag)), AtomicToken::Error { .. } => return shapes.push(FlatShape::Error.spanned(self.span)),
AtomicToken::Operator { .. } => { AtomicToken::Operator { .. } => {
return shapes.push(FlatShape::Operator.tagged(self.tag)); return shapes.push(FlatShape::Operator.spanned(self.span));
} }
AtomicToken::ShorthandFlag { .. } => { AtomicToken::ShorthandFlag { .. } => {
return shapes.push(FlatShape::ShorthandFlag.tagged(self.tag)); return shapes.push(FlatShape::ShorthandFlag.spanned(self.span));
} }
AtomicToken::LonghandFlag { .. } => { AtomicToken::LonghandFlag { .. } => {
return shapes.push(FlatShape::Flag.tagged(self.tag)); return shapes.push(FlatShape::Flag.spanned(self.span));
} }
AtomicToken::Whitespace { .. } => { AtomicToken::Whitespace { .. } => {
return shapes.push(FlatShape::Whitespace.tagged(self.tag)); return shapes.push(FlatShape::Whitespace.spanned(self.span));
} }
AtomicToken::FilePath { .. } => return shapes.push(FlatShape::Path.tagged(self.tag)), AtomicToken::FilePath { .. } => return shapes.push(FlatShape::Path.spanned(self.span)),
AtomicToken::Dot { .. } => return shapes.push(FlatShape::Dot.tagged(self.tag)), AtomicToken::Dot { .. } => return shapes.push(FlatShape::Dot.spanned(self.span)),
AtomicToken::Number { AtomicToken::Number {
number: RawNumber::Decimal(_), number: RawNumber::Decimal(_),
} => { } => {
return shapes.push(FlatShape::Decimal.tagged(self.tag)); return shapes.push(FlatShape::Decimal.spanned(self.span));
} }
AtomicToken::Number { AtomicToken::Number {
number: RawNumber::Int(_), number: RawNumber::Int(_),
} => { } => {
return shapes.push(FlatShape::Int.tagged(self.tag)); return shapes.push(FlatShape::Int.spanned(self.span));
} }
AtomicToken::Size { number, unit } => { AtomicToken::Size { number, unit } => {
return shapes.push( return shapes.push(
FlatShape::Size { FlatShape::Size {
number: number.tag, number: number.span,
unit: unit.tag, unit: unit.span,
} }
.tagged(self.tag), .spanned(self.span),
); );
} }
AtomicToken::String { .. } => return shapes.push(FlatShape::String.tagged(self.tag)), AtomicToken::String { .. } => return shapes.push(FlatShape::String.spanned(self.span)),
AtomicToken::ItVariable { .. } => { AtomicToken::ItVariable { .. } => {
return shapes.push(FlatShape::ItVariable.tagged(self.tag)) return shapes.push(FlatShape::ItVariable.spanned(self.span))
} }
AtomicToken::Variable { .. } => { AtomicToken::Variable { .. } => {
return shapes.push(FlatShape::Variable.tagged(self.tag)) return shapes.push(FlatShape::Variable.spanned(self.span))
} }
AtomicToken::ExternalCommand { .. } => { AtomicToken::ExternalCommand { .. } => {
return shapes.push(FlatShape::ExternalCommand.tagged(self.tag)); return shapes.push(FlatShape::ExternalCommand.spanned(self.span));
} }
AtomicToken::ExternalWord { .. } => { AtomicToken::ExternalWord { .. } => {
return shapes.push(FlatShape::ExternalWord.tagged(self.tag)) return shapes.push(FlatShape::ExternalWord.spanned(self.span))
} }
AtomicToken::GlobPattern { .. } => { AtomicToken::GlobPattern { .. } => {
return shapes.push(FlatShape::GlobPattern.tagged(self.tag)) return shapes.push(FlatShape::GlobPattern.spanned(self.span))
} }
AtomicToken::Word { .. } => return shapes.push(FlatShape::Word.tagged(self.tag)), AtomicToken::Word { .. } => return shapes.push(FlatShape::Word.spanned(self.span)),
_ => return shapes.push(FlatShape::Error.tagged(self.tag)), _ => return shapes.push(FlatShape::Error.spanned(self.span)),
} }
} }
} }
@ -350,14 +378,14 @@ pub fn expand_atom<'me, 'content>(
expected: &'static str, expected: &'static str,
context: &ExpandContext, context: &ExpandContext,
rule: ExpansionRule, rule: ExpansionRule,
) -> Result<TaggedAtomicToken<'content>, ShellError> { ) -> Result<SpannedAtomicToken<'content>, ShellError> {
if token_nodes.at_end() { if token_nodes.at_end() {
match rule.allow_eof { match rule.allow_eof {
true => { true => {
return Ok(AtomicToken::Eof { return Ok(AtomicToken::Eof {
tag: Tag::unknown(), span: Span::unknown(),
} }
.tagged_unknown()) .spanned(Span::unknown()))
} }
false => return Err(ShellError::unexpected_eof("anything", Tag::unknown())), false => return Err(ShellError::unexpected_eof("anything", Tag::unknown())),
} }
@ -376,10 +404,10 @@ pub fn expand_atom<'me, 'content>(
Err(_) => {} Err(_) => {}
// But if it was a valid unit, we're done here // But if it was a valid unit, we're done here
Ok(Tagged { Ok(Spanned {
item: (number, unit), item: (number, unit),
tag, span,
}) => return Ok(AtomicToken::Size { number, unit }.tagged(tag)), }) => return Ok(AtomicToken::Size { number, unit }.spanned(span)),
}, },
} }
@ -388,7 +416,7 @@ pub fn expand_atom<'me, 'content>(
match expand_syntax(&BarePathShape, token_nodes, context) { match expand_syntax(&BarePathShape, token_nodes, context) {
// If we didn't find a bare path // If we didn't find a bare path
Err(_) => {} Err(_) => {}
Ok(tag) => { Ok(span) => {
let next = token_nodes.peek_any(); let next = token_nodes.peek_any();
match next.node { match next.node {
@ -397,7 +425,7 @@ pub fn expand_atom<'me, 'content>(
// word, and we should try to parse it as a glob next // word, and we should try to parse it as a glob next
} }
_ => return Ok(AtomicToken::Word { text: tag }.tagged(tag)), _ => return Ok(AtomicToken::Word { text: span }.spanned(span)),
} }
} }
} }
@ -407,7 +435,7 @@ pub fn expand_atom<'me, 'content>(
match expand_syntax(&BarePatternShape, token_nodes, context) { match expand_syntax(&BarePatternShape, token_nodes, context) {
// If we didn't find a bare path // If we didn't find a bare path
Err(_) => {} Err(_) => {}
Ok(tag) => return Ok(AtomicToken::GlobPattern { pattern: tag }.tagged(tag)), Ok(span) => return Ok(AtomicToken::GlobPattern { pattern: span }.spanned(span)),
} }
// The next token corresponds to at most one atomic token // The next token corresponds to at most one atomic token
@ -427,80 +455,84 @@ pub fn expand_atom<'me, 'content>(
return Ok(AtomicToken::Error { return Ok(AtomicToken::Error {
error: error.clone(), error: error.clone(),
} }
.tagged(error.tag)); .spanned(error.span));
} }
// [ ... ] // [ ... ]
TokenNode::Delimited(Tagged { TokenNode::Delimited(Spanned {
item: item:
DelimitedNode { DelimitedNode {
delimiter: Delimiter::Square, delimiter: Delimiter::Square,
tags, spans,
children, children,
}, },
tag, span,
}) => { }) => {
peeked.commit(); peeked.commit();
let span = *span;
return Ok(AtomicToken::SquareDelimited { return Ok(AtomicToken::SquareDelimited {
nodes: children, nodes: children,
tags: *tags, spans: *spans,
} }
.tagged(tag)); .spanned(span));
} }
TokenNode::Flag(Tagged { TokenNode::Flag(Spanned {
item: item:
Flag { Flag {
kind: FlagKind::Shorthand, kind: FlagKind::Shorthand,
name, name,
}, },
tag, span,
}) => { }) => {
peeked.commit(); peeked.commit();
return Ok(AtomicToken::ShorthandFlag { name: *name }.tagged(tag)); return Ok(AtomicToken::ShorthandFlag { name: *name }.spanned(*span));
} }
TokenNode::Flag(Tagged { TokenNode::Flag(Spanned {
item: item:
Flag { Flag {
kind: FlagKind::Longhand, kind: FlagKind::Longhand,
name, name,
}, },
tag, span,
}) => { }) => {
peeked.commit(); peeked.commit();
return Ok(AtomicToken::ShorthandFlag { name: *name }.tagged(tag)); return Ok(AtomicToken::ShorthandFlag { name: *name }.spanned(*span));
} }
// If we see whitespace, process the whitespace according to the whitespace // If we see whitespace, process the whitespace according to the whitespace
// handling rules // handling rules
TokenNode::Whitespace(tag) => match rule.whitespace { TokenNode::Whitespace(span) => match rule.whitespace {
// if whitespace is allowed, return a whitespace token // if whitespace is allowed, return a whitespace token
WhitespaceHandling::AllowWhitespace => { WhitespaceHandling::AllowWhitespace => {
peeked.commit(); peeked.commit();
return Ok(AtomicToken::Whitespace { text: *tag }.tagged(tag)); return Ok(AtomicToken::Whitespace { text: *span }.spanned(*span));
} }
// if whitespace is disallowed, return an error // if whitespace is disallowed, return an error
WhitespaceHandling::RejectWhitespace => { WhitespaceHandling::RejectWhitespace => {
return Err(ShellError::syntax_error( return Err(ShellError::syntax_error("Unexpected whitespace".tagged(
"Unexpected whitespace".tagged(tag), Tag {
)) span: *span,
anchor: None,
},
)))
} }
}, },
other => { other => {
let tag = peeked.node.tag(); let span = peeked.node.span();
peeked.commit(); peeked.commit();
return Ok(AtomicToken::Error { return Ok(AtomicToken::Error {
error: ShellError::type_error("token", other.tagged_type_name()).tagged(tag), error: ShellError::type_error("token", other.tagged_type_name()).spanned(span),
} }
.tagged(tag)); .spanned(span));
} }
} }
parse_single_node(token_nodes, expected, |token, token_tag, err| { parse_single_node(token_nodes, expected, |token, token_span, err| {
Ok(match token { Ok(match token {
// First, the error cases. Each error case corresponds to a expansion rule // First, the error cases. Each error case corresponds to a expansion rule
// flag that can be used to allow the case // flag that can be used to allow the case
@ -511,31 +543,38 @@ pub fn expand_atom<'me, 'content>(
RawToken::ExternalCommand(_) if !rule.allow_external_command => { RawToken::ExternalCommand(_) if !rule.allow_external_command => {
return Err(ShellError::type_error( return Err(ShellError::type_error(
expected, expected,
token.type_name().tagged(token_tag), token.type_name().tagged(Tag {
span: token_span,
anchor: None,
}),
)) ))
} }
// rule.allow_external_word // rule.allow_external_word
RawToken::ExternalWord if !rule.allow_external_word => { RawToken::ExternalWord if !rule.allow_external_word => {
return Err(ShellError::invalid_external_word(token_tag)) return Err(ShellError::invalid_external_word(Tag {
span: token_span,
anchor: None,
}))
} }
RawToken::Number(number) => AtomicToken::Number { number }.tagged(token_tag), RawToken::Number(number) => AtomicToken::Number { number }.spanned(token_span),
RawToken::Operator(_) => AtomicToken::Operator { text: token_tag }.tagged(token_tag), RawToken::Operator(_) => AtomicToken::Operator { text: token_span }.spanned(token_span),
RawToken::String(body) => AtomicToken::String { body }.tagged(token_tag), RawToken::String(body) => AtomicToken::String { body }.spanned(token_span),
RawToken::Variable(name) if name.slice(context.source) == "it" => { RawToken::Variable(name) if name.slice(context.source) == "it" => {
AtomicToken::ItVariable { name }.tagged(token_tag) AtomicToken::ItVariable { name }.spanned(token_span)
} }
RawToken::Variable(name) => AtomicToken::Variable { name }.tagged(token_tag), RawToken::Variable(name) => AtomicToken::Variable { name }.spanned(token_span),
RawToken::ExternalCommand(command) => { RawToken::ExternalCommand(command) => {
AtomicToken::ExternalCommand { command }.tagged(token_tag) AtomicToken::ExternalCommand { command }.spanned(token_span)
} }
RawToken::ExternalWord => { RawToken::ExternalWord => {
AtomicToken::ExternalWord { text: token_tag }.tagged(token_tag) AtomicToken::ExternalWord { text: token_span }.spanned(token_span)
} }
RawToken::GlobPattern => { RawToken::GlobPattern => AtomicToken::GlobPattern {
AtomicToken::GlobPattern { pattern: token_tag }.tagged(token_tag) pattern: token_span,
} }
RawToken::Bare => AtomicToken::Word { text: token_tag }.tagged(token_tag), .spanned(token_span),
RawToken::Bare => AtomicToken::Word { text: token_span }.spanned(token_span),
}) })
}) })
} }

View File

@ -6,27 +6,27 @@ use crate::prelude::*;
pub fn expand_delimited_square( pub fn expand_delimited_square(
children: &Vec<TokenNode>, children: &Vec<TokenNode>,
tag: Tag, span: Span,
context: &ExpandContext, context: &ExpandContext,
) -> Result<hir::Expression, ShellError> { ) -> Result<hir::Expression, ShellError> {
let mut tokens = TokensIterator::new(&children, tag, false); let mut tokens = TokensIterator::new(&children, span, false);
let list = expand_syntax(&ExpressionListShape, &mut tokens, context); let list = expand_syntax(&ExpressionListShape, &mut tokens, context);
Ok(hir::Expression::list(list?, tag)) Ok(hir::Expression::list(list?, Tag { span, anchor: None }))
} }
pub fn color_delimited_square( pub fn color_delimited_square(
(open, close): (Tag, Tag), (open, close): (Span, Span),
children: &Vec<TokenNode>, children: &Vec<TokenNode>,
tag: Tag, span: Span,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) { ) {
shapes.push(FlatShape::OpenDelimiter(Delimiter::Square).tagged(open)); shapes.push(FlatShape::OpenDelimiter(Delimiter::Square).spanned(open));
let mut tokens = TokensIterator::new(&children, tag, false); let mut tokens = TokensIterator::new(&children, span, false);
let _list = color_syntax(&ExpressionListShape, &mut tokens, context, shapes); let _list = color_syntax(&ExpressionListShape, &mut tokens, context, shapes);
shapes.push(FlatShape::CloseDelimiter(Delimiter::Square).tagged(close)); shapes.push(FlatShape::CloseDelimiter(Delimiter::Square).spanned(close));
} }
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]
@ -34,16 +34,16 @@ pub struct DelimitedShape;
impl ColorSyntax for DelimitedShape { impl ColorSyntax for DelimitedShape {
type Info = (); type Info = ();
type Input = (Delimiter, Tag, Tag); type Input = (Delimiter, Span, Span);
fn color_syntax<'a, 'b>( fn color_syntax<'a, 'b>(
&self, &self,
(delimiter, open, close): &(Delimiter, Tag, Tag), (delimiter, open, close): &(Delimiter, Span, Span),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Self::Info { ) -> Self::Info {
shapes.push(FlatShape::OpenDelimiter(*delimiter).tagged(open)); shapes.push(FlatShape::OpenDelimiter(*delimiter).spanned(*open));
color_syntax(&ExpressionListShape, token_nodes, context, shapes); color_syntax(&ExpressionListShape, token_nodes, context, shapes);
shapes.push(FlatShape::CloseDelimiter(*delimiter).tagged(close)); shapes.push(FlatShape::CloseDelimiter(*delimiter).spanned(*close));
} }
} }

View File

@ -17,7 +17,7 @@ impl FallibleColorSyntax for FilePathShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
let atom = expand_atom( let atom = expand_atom(
token_nodes, token_nodes,
@ -36,7 +36,7 @@ impl FallibleColorSyntax for FilePathShape {
| AtomicToken::String { .. } | AtomicToken::String { .. }
| AtomicToken::Number { .. } | AtomicToken::Number { .. }
| AtomicToken::Size { .. } => { | AtomicToken::Size { .. } => {
shapes.push(FlatShape::Path.tagged(atom.tag)); shapes.push(FlatShape::Path.spanned(atom.span));
} }
_ => atom.color_tokens(shapes), _ => atom.color_tokens(shapes),
@ -57,12 +57,12 @@ impl ExpandExpression for FilePathShape {
match atom.item { match atom.item {
AtomicToken::Word { text: body } | AtomicToken::String { body } => { AtomicToken::Word { text: body } | AtomicToken::String { body } => {
let path = expand_file_path(body.slice(context.source), context); let path = expand_file_path(body.slice(context.source), context);
return Ok(hir::Expression::file_path(path, atom.tag)); return Ok(hir::Expression::file_path(path, atom.span));
} }
AtomicToken::Number { .. } | AtomicToken::Size { .. } => { AtomicToken::Number { .. } | AtomicToken::Size { .. } => {
let path = atom.tag.slice(context.source); let path = atom.span.slice(context.source);
return Ok(hir::Expression::file_path(path, atom.tag)); return Ok(hir::Expression::file_path(path, atom.span));
} }
_ => return atom.into_hir(context, "file path"), _ => return atom.into_hir(context, "file path"),

View File

@ -9,7 +9,7 @@ use crate::parser::{
hir::TokensIterator, hir::TokensIterator,
FlatShape, FlatShape,
}; };
use crate::Tagged; use crate::Spanned;
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]
pub struct ExpressionListShape; pub struct ExpressionListShape;
@ -60,7 +60,7 @@ impl ColorSyntax for ExpressionListShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) { ) {
// We encountered a parsing error and will continue with simpler coloring ("backoff // We encountered a parsing error and will continue with simpler coloring ("backoff
// coloring mode") // coloring mode")
@ -126,7 +126,7 @@ impl ColorSyntax for BackoffColoringMode {
_input: &Self::Input, _input: &Self::Input,
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Self::Info { ) -> Self::Info {
loop { loop {
if token_nodes.at_end() { if token_nodes.at_end() {
@ -159,7 +159,7 @@ impl ColorSyntax for SimplestExpression {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) { ) {
let atom = expand_atom( let atom = expand_atom(
token_nodes, token_nodes,

View File

@ -18,20 +18,27 @@ impl ExpandExpression for NumberShape {
token_nodes: &mut TokensIterator<'_>, token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext, context: &ExpandContext,
) -> Result<hir::Expression, ShellError> { ) -> Result<hir::Expression, ShellError> {
parse_single_node(token_nodes, "Number", |token, token_tag, err| { parse_single_node(token_nodes, "Number", |token, token_span, err| {
Ok(match token { Ok(match token {
RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()), RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()),
RawToken::Variable(tag) if tag.slice(context.source) == "it" => { RawToken::Variable(tag) if tag.slice(context.source) == "it" => {
hir::Expression::it_variable(tag, token_tag) hir::Expression::it_variable(tag, token_span)
} }
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag), RawToken::ExternalCommand(tag) => {
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)), hir::Expression::external_command(tag, token_span)
RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag), }
RawToken::ExternalWord => {
return Err(ShellError::invalid_external_word(Tag {
span: token_span,
anchor: None,
}))
}
RawToken::Variable(tag) => hir::Expression::variable(tag, token_span),
RawToken::Number(number) => { RawToken::Number(number) => {
hir::Expression::number(number.to_number(context.source), token_tag) hir::Expression::number(number.to_number(context.source), token_span)
} }
RawToken::Bare => hir::Expression::bare(token_tag), RawToken::Bare => hir::Expression::bare(token_span),
RawToken::String(tag) => hir::Expression::string(tag, token_tag), RawToken::String(tag) => hir::Expression::string(tag, token_span),
}) })
}) })
} }
@ -46,18 +53,18 @@ impl FallibleColorSyntax for NumberShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
let atom = token_nodes.spanned(|token_nodes| { let atom = token_nodes.spanned(|token_nodes| {
expand_atom(token_nodes, "number", context, ExpansionRule::permissive()) expand_atom(token_nodes, "number", context, ExpansionRule::permissive())
}); });
let atom = match atom { let atom = match atom {
Tagged { item: Err(_), tag } => { Spanned { item: Err(_), span } => {
shapes.push(FlatShape::Error.tagged(tag)); shapes.push(FlatShape::Error.spanned(span));
return Ok(()); return Ok(());
} }
Tagged { item: Ok(atom), .. } => atom, Spanned { item: Ok(atom), .. } => atom,
}; };
atom.color_tokens(shapes); atom.color_tokens(shapes);
@ -75,21 +82,25 @@ impl ExpandExpression for IntShape {
token_nodes: &mut TokensIterator<'_>, token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext, context: &ExpandContext,
) -> Result<hir::Expression, ShellError> { ) -> Result<hir::Expression, ShellError> {
parse_single_node(token_nodes, "Integer", |token, token_tag, err| { parse_single_node(token_nodes, "Integer", |token, token_span, err| {
Ok(match token { Ok(match token {
RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()), RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()),
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)), RawToken::ExternalWord => {
RawToken::Variable(tag) if tag.slice(context.source) == "it" => { return Err(ShellError::invalid_external_word(token_span))
hir::Expression::it_variable(tag, token_tag)
} }
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag), RawToken::Variable(span) if span.slice(context.source) == "it" => {
RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag), hir::Expression::it_variable(span, token_span)
}
RawToken::ExternalCommand(span) => {
hir::Expression::external_command(span, token_span)
}
RawToken::Variable(span) => hir::Expression::variable(span, token_span),
RawToken::Number(number @ RawNumber::Int(_)) => { RawToken::Number(number @ RawNumber::Int(_)) => {
hir::Expression::number(number.to_number(context.source), token_tag) hir::Expression::number(number.to_number(context.source), token_span)
} }
RawToken::Number(_) => return Err(err.error()), RawToken::Number(_) => return Err(err.error()),
RawToken::Bare => hir::Expression::bare(token_tag), RawToken::Bare => hir::Expression::bare(token_span),
RawToken::String(tag) => hir::Expression::string(tag, token_tag), RawToken::String(span) => hir::Expression::string(span, token_span),
}) })
}) })
} }
@ -104,18 +115,18 @@ impl FallibleColorSyntax for IntShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
let atom = token_nodes.spanned(|token_nodes| { let atom = token_nodes.spanned(|token_nodes| {
expand_atom(token_nodes, "integer", context, ExpansionRule::permissive()) expand_atom(token_nodes, "integer", context, ExpansionRule::permissive())
}); });
let atom = match atom { let atom = match atom {
Tagged { item: Err(_), tag } => { Spanned { item: Err(_), span } => {
shapes.push(FlatShape::Error.tagged(tag)); shapes.push(FlatShape::Error.spanned(span));
return Ok(()); return Ok(());
} }
Tagged { item: Ok(atom), .. } => atom, Spanned { item: Ok(atom), .. } => atom,
}; };
atom.color_tokens(shapes); atom.color_tokens(shapes);

View File

@ -18,14 +18,14 @@ impl FallibleColorSyntax for PatternShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
token_nodes.atomic(|token_nodes| { token_nodes.atomic(|token_nodes| {
let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?; let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?;
match &atom.item { match &atom.item {
AtomicToken::GlobPattern { .. } | AtomicToken::Word { .. } => { AtomicToken::GlobPattern { .. } | AtomicToken::Word { .. } => {
shapes.push(FlatShape::GlobPattern.tagged(atom.tag)); shapes.push(FlatShape::GlobPattern.spanned(atom.span));
Ok(()) Ok(())
} }
@ -85,23 +85,23 @@ impl ExpandExpression for PatternShape {
pub struct BarePatternShape; pub struct BarePatternShape;
impl ExpandSyntax for BarePatternShape { impl ExpandSyntax for BarePatternShape {
type Output = Tag; type Output = Span;
fn expand_syntax<'a, 'b>( fn expand_syntax<'a, 'b>(
&self, &self,
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
) -> Result<Tag, ShellError> { ) -> Result<Span, ShellError> {
expand_bare(token_nodes, context, |token| match token { expand_bare(token_nodes, context, |token| match token {
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::Bare, item: RawToken::Bare,
.. ..
}) })
| TokenNode::Token(Tagged { | TokenNode::Token(Spanned {
item: RawToken::Operator(Operator::Dot), item: RawToken::Operator(Operator::Dot),
.. ..
}) })
| TokenNode::Token(Tagged { | TokenNode::Token(Spanned {
item: RawToken::GlobPattern, item: RawToken::GlobPattern,
.. ..
}) => true, }) => true,

View File

@ -18,7 +18,7 @@ impl FallibleColorSyntax for StringShape {
input: &FlatShape, input: &FlatShape,
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive()); let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive());
@ -28,10 +28,10 @@ impl FallibleColorSyntax for StringShape {
}; };
match atom { match atom {
Tagged { Spanned {
item: AtomicToken::String { .. }, item: AtomicToken::String { .. },
tag, span,
} => shapes.push((*input).tagged(tag)), } => shapes.push((*input).spanned(span)),
other => other.color_tokens(shapes), other => other.color_tokens(shapes),
} }
@ -45,26 +45,30 @@ impl ExpandExpression for StringShape {
token_nodes: &mut TokensIterator<'_>, token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext, context: &ExpandContext,
) -> Result<hir::Expression, ShellError> { ) -> Result<hir::Expression, ShellError> {
parse_single_node(token_nodes, "String", |token, token_tag, _| { parse_single_node(token_nodes, "String", |token, token_span, _| {
Ok(match token { Ok(match token {
RawToken::GlobPattern => { RawToken::GlobPattern => {
return Err(ShellError::type_error( return Err(ShellError::type_error(
"String", "String",
"glob pattern".tagged(token_tag), "glob pattern".tagged(token_span),
)) ))
} }
RawToken::Operator(..) => { RawToken::Operator(..) => {
return Err(ShellError::type_error( return Err(ShellError::type_error(
"String", "String",
"operator".tagged(token_tag), "operator".tagged(token_span),
)) ))
} }
RawToken::Variable(tag) => expand_variable(tag, token_tag, &context.source), RawToken::Variable(span) => expand_variable(span, token_span, &context.source),
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag), RawToken::ExternalCommand(span) => {
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)), hir::Expression::external_command(span, token_span)
RawToken::Number(_) => hir::Expression::bare(token_tag), }
RawToken::Bare => hir::Expression::bare(token_tag), RawToken::ExternalWord => {
RawToken::String(tag) => hir::Expression::string(tag, token_tag), return Err(ShellError::invalid_external_word(token_span))
}
RawToken::Number(_) => hir::Expression::bare(token_span),
RawToken::Bare => hir::Expression::bare(token_span),
RawToken::String(span) => hir::Expression::string(span, token_span),
}) })
}) })
} }

View File

@ -14,24 +14,24 @@ use nom::IResult;
pub struct UnitShape; pub struct UnitShape;
impl ExpandSyntax for UnitShape { impl ExpandSyntax for UnitShape {
type Output = Tagged<(Tagged<RawNumber>, Tagged<Unit>)>; type Output = Spanned<(Spanned<RawNumber>, Spanned<Unit>)>;
fn expand_syntax<'a, 'b>( fn expand_syntax<'a, 'b>(
&self, &self,
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
) -> Result<Tagged<(Tagged<RawNumber>, Tagged<Unit>)>, ShellError> { ) -> Result<Spanned<(Spanned<RawNumber>, Spanned<Unit>)>, ShellError> {
let peeked = token_nodes.peek_any().not_eof("unit")?; let peeked = token_nodes.peek_any().not_eof("unit")?;
let tag = match peeked.node { let span = match peeked.node {
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::Bare, item: RawToken::Bare,
tag, span,
}) => tag, }) => span,
_ => return Err(peeked.type_error("unit")), _ => return Err(peeked.type_error("unit")),
}; };
let unit = unit_size(tag.slice(context.source), *tag); let unit = unit_size(span.slice(context.source), *span);
let (_, (number, unit)) = match unit { let (_, (number, unit)) = match unit {
Err(_) => { Err(_) => {
@ -44,11 +44,11 @@ impl ExpandSyntax for UnitShape {
}; };
peeked.commit(); peeked.commit();
Ok((number, unit).tagged(tag)) Ok((number, unit).spanned(*span))
} }
} }
fn unit_size(input: &str, bare_tag: Tag) -> IResult<&str, (Tagged<RawNumber>, Tagged<Unit>)> { fn unit_size(input: &str, bare_span: Span) -> IResult<&str, (Spanned<RawNumber>, Spanned<Unit>)> {
let (input, digits) = digit1(input)?; let (input, digits) = digit1(input)?;
let (input, dot) = opt(tag("."))(input)?; let (input, dot) = opt(tag("."))(input)?;
@ -58,20 +58,18 @@ fn unit_size(input: &str, bare_tag: Tag) -> IResult<&str, (Tagged<RawNumber>, Ta
let (input, rest) = digit1(input)?; let (input, rest) = digit1(input)?;
( (
input, input,
RawNumber::decimal(( RawNumber::decimal(Span::new(
bare_tag.span.start(), bare_span.start(),
bare_tag.span.start() + digits.len() + dot.len() + rest.len(), bare_span.start() + digits.len() + dot.len() + rest.len(),
bare_tag.anchor,
)), )),
) )
} }
None => ( None => (
input, input,
RawNumber::int(( RawNumber::int(Span::new(
bare_tag.span.start(), bare_span.start(),
bare_tag.span.start() + digits.len(), bare_span.start() + digits.len(),
bare_tag.anchor,
)), )),
), ),
}; };
@ -85,12 +83,10 @@ fn unit_size(input: &str, bare_tag: Tag) -> IResult<&str, (Tagged<RawNumber>, Ta
value(Unit::MB, alt((tag("PB"), tag("pb"), tag("Pb")))), value(Unit::MB, alt((tag("PB"), tag("pb"), tag("Pb")))),
)))(input)?; )))(input)?;
let start_span = number.tag.span.end(); let start_span = number.span.end();
let unit_tag = Tag::new( Ok((
bare_tag.anchor, input,
Span::from((start_span, bare_tag.span.end())), (number, unit.spanned(Span::new(start_span, bare_span.end()))),
); ))
Ok((input, (number, unit.tagged(unit_tag))))
} }

View File

@ -23,9 +23,9 @@ impl ExpandExpression for VariablePathShape {
// 2. consume the next token as a member and push it onto tail // 2. consume the next token as a member and push it onto tail
let head = expand_expr(&VariableShape, token_nodes, context)?; let head = expand_expr(&VariableShape, token_nodes, context)?;
let start = head.tag(); let start = head.span;
let mut end = start; let mut end = start;
let mut tail: Vec<Tagged<String>> = vec![]; let mut tail: Vec<Spanned<String>> = vec![];
loop { loop {
match DotShape.skip(token_nodes, context) { match DotShape.skip(token_nodes, context) {
@ -34,9 +34,9 @@ impl ExpandExpression for VariablePathShape {
} }
let syntax = expand_syntax(&MemberShape, token_nodes, context)?; let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
let member = syntax.to_tagged_string(context.source); let member = syntax.to_spanned_string(context.source);
end = member.tag(); end = member.span;
tail.push(member); tail.push(member);
} }
@ -53,7 +53,7 @@ impl FallibleColorSyntax for VariablePathShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
token_nodes.atomic(|token_nodes| { token_nodes.atomic(|token_nodes| {
// If the head of the token stream is not a variable, fail // If the head of the token stream is not a variable, fail
@ -97,7 +97,7 @@ impl FallibleColorSyntax for PathTailShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
token_nodes.atomic(|token_nodes| loop { token_nodes.atomic(|token_nodes| loop {
let result = color_fallible_syntax_with( let result = color_fallible_syntax_with(
@ -120,13 +120,13 @@ impl FallibleColorSyntax for PathTailShape {
} }
impl ExpandSyntax for PathTailShape { impl ExpandSyntax for PathTailShape {
type Output = (Vec<Tagged<String>>, Tag); type Output = (Vec<Spanned<String>>, Span);
fn expand_syntax<'a, 'b>( fn expand_syntax<'a, 'b>(
&self, &self,
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
) -> Result<Self::Output, ShellError> { ) -> Result<Self::Output, ShellError> {
let mut end: Option<Tag> = None; let mut end: Option<Span> = None;
let mut tail = vec![]; let mut tail = vec![];
loop { loop {
@ -136,17 +136,21 @@ impl ExpandSyntax for PathTailShape {
} }
let syntax = expand_syntax(&MemberShape, token_nodes, context)?; let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
let member = syntax.to_tagged_string(context.source); let member = syntax.to_spanned_string(context.source);
end = Some(member.tag()); end = Some(member.span);
tail.push(member); tail.push(member);
} }
match end { match end {
None => { None => {
return Err(ShellError::type_error( return Err(ShellError::type_error("path tail", {
"path tail", let typed_span = token_nodes.typed_span_at_cursor();
token_nodes.typed_tag_at_cursor(),
)) Tagged {
tag: typed_span.span.into(),
item: typed_span.item,
}
}))
} }
Some(end) => Ok((tail, end)), Some(end) => Ok((tail, end)),
@ -156,8 +160,8 @@ impl ExpandSyntax for PathTailShape {
#[derive(Debug)] #[derive(Debug)]
pub enum ExpressionContinuation { pub enum ExpressionContinuation {
DotSuffix(Tag, Tagged<String>), DotSuffix(Span, Spanned<String>),
InfixSuffix(Tagged<Operator>, Expression), InfixSuffix(Spanned<Operator>, Expression),
} }
/// An expression continuation /// An expression continuation
@ -179,7 +183,7 @@ impl ExpandSyntax for ExpressionContinuationShape {
// If a `.` was matched, it's a `Path`, and we expect a `Member` next // If a `.` was matched, it's a `Path`, and we expect a `Member` next
Ok(dot) => { Ok(dot) => {
let syntax = expand_syntax(&MemberShape, token_nodes, context)?; let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
let member = syntax.to_tagged_string(context.source); let member = syntax.to_spanned_string(context.source);
Ok(ExpressionContinuation::DotSuffix(dot, member)) Ok(ExpressionContinuation::DotSuffix(dot, member))
} }
@ -209,7 +213,7 @@ impl FallibleColorSyntax for ExpressionContinuationShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<ContinuationInfo, ShellError> { ) -> Result<ContinuationInfo, ShellError> {
token_nodes.atomic(|token_nodes| { token_nodes.atomic(|token_nodes| {
// Try to expand a `.` // Try to expand a `.`
@ -290,7 +294,7 @@ impl FallibleColorSyntax for VariableShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
let atom = expand_atom( let atom = expand_atom(
token_nodes, token_nodes,
@ -306,11 +310,11 @@ impl FallibleColorSyntax for VariableShape {
match &atom.item { match &atom.item {
AtomicToken::Variable { .. } => { AtomicToken::Variable { .. } => {
shapes.push(FlatShape::Variable.tagged(atom.tag)); shapes.push(FlatShape::Variable.spanned(atom.span));
Ok(()) Ok(())
} }
AtomicToken::ItVariable { .. } => { AtomicToken::ItVariable { .. } => {
shapes.push(FlatShape::ItVariable.tagged(atom.tag)); shapes.push(FlatShape::ItVariable.spanned(atom.span));
Ok(()) Ok(())
} }
_ => Err(ShellError::type_error("variable", atom.tagged_type_name())), _ => Err(ShellError::type_error("variable", atom.tagged_type_name())),
@ -320,50 +324,53 @@ impl FallibleColorSyntax for VariableShape {
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub enum Member { pub enum Member {
String(/* outer */ Tag, /* inner */ Tag), String(/* outer */ Span, /* inner */ Span),
Bare(Tag), Bare(Span),
} }
impl Member { impl Member {
pub(crate) fn to_expr(&self) -> hir::Expression { pub(crate) fn to_expr(&self) -> hir::Expression {
match self { match self {
Member::String(outer, inner) => hir::Expression::string(inner, outer), Member::String(outer, inner) => hir::Expression::string(*inner, *outer),
Member::Bare(tag) => hir::Expression::string(tag, tag), Member::Bare(span) => hir::Expression::string(*span, *span),
} }
} }
pub(crate) fn tag(&self) -> Tag { pub(crate) fn span(&self) -> Span {
match self { match self {
Member::String(outer, _inner) => *outer, Member::String(outer, _inner) => *outer,
Member::Bare(tag) => *tag, Member::Bare(span) => *span,
} }
} }
pub(crate) fn to_tagged_string(&self, source: &str) -> Tagged<String> { pub(crate) fn to_spanned_string(&self, source: &str) -> Spanned<String> {
match self { match self {
Member::String(outer, inner) => inner.string(source).tagged(outer), Member::String(outer, inner) => inner.string(source).spanned(*outer),
Member::Bare(tag) => tag.tagged_string(source), Member::Bare(span) => span.spanned_string(source),
} }
} }
pub(crate) fn tagged_type_name(&self) -> Tagged<&'static str> { pub(crate) fn tagged_type_name(&self) -> Tagged<&'static str> {
match self { match self {
Member::String(outer, _inner) => "string".tagged(outer), Member::String(outer, _inner) => "string".tagged(outer),
Member::Bare(tag) => "word".tagged(tag), Member::Bare(span) => "word".tagged(Tag {
span: *span,
anchor: None,
}),
} }
} }
} }
enum ColumnPathState { enum ColumnPathState {
Initial, Initial,
LeadingDot(Tag), LeadingDot(Span),
Dot(Tag, Vec<Member>, Tag), Dot(Span, Vec<Member>, Span),
Member(Tag, Vec<Member>), Member(Span, Vec<Member>),
Error(ShellError), Error(ShellError),
} }
impl ColumnPathState { impl ColumnPathState {
pub fn dot(self, dot: Tag) -> ColumnPathState { pub fn dot(self, dot: Span) -> ColumnPathState {
match self { match self {
ColumnPathState::Initial => ColumnPathState::LeadingDot(dot), ColumnPathState::Initial => ColumnPathState::LeadingDot(dot),
ColumnPathState::LeadingDot(_) => { ColumnPathState::LeadingDot(_) => {
@ -379,13 +386,13 @@ impl ColumnPathState {
pub fn member(self, member: Member) -> ColumnPathState { pub fn member(self, member: Member) -> ColumnPathState {
match self { match self {
ColumnPathState::Initial => ColumnPathState::Member(member.tag(), vec![member]), ColumnPathState::Initial => ColumnPathState::Member(member.span(), vec![member]),
ColumnPathState::LeadingDot(tag) => { ColumnPathState::LeadingDot(tag) => {
ColumnPathState::Member(tag.until(member.tag()), vec![member]) ColumnPathState::Member(tag.until(member.span()), vec![member])
} }
ColumnPathState::Dot(tag, mut tags, _) => { ColumnPathState::Dot(tag, mut tags, _) => {
ColumnPathState::Member(tag.until(member.tag()), { ColumnPathState::Member(tag.until(member.span()), {
tags.push(member); tags.push(member);
tags tags
}) })
@ -449,7 +456,7 @@ impl FallibleColorSyntax for ColumnPathShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
// If there's not even one member shape, fail // If there's not even one member shape, fail
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?; color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
@ -513,7 +520,7 @@ impl FallibleColorSyntax for MemberShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
let bare = color_fallible_syntax_with( let bare = color_fallible_syntax_with(
&BareShape, &BareShape,
@ -552,7 +559,7 @@ impl ExpandSyntax for MemberShape {
let bare = BareShape.test(token_nodes, context); let bare = BareShape.test(token_nodes, context);
if let Some(peeked) = bare { if let Some(peeked) = bare {
let node = peeked.not_eof("column")?.commit(); let node = peeked.not_eof("column")?.commit();
return Ok(Member::Bare(node.tag())); return Ok(Member::Bare(node.span()));
} }
let string = StringShape.test(token_nodes, context); let string = StringShape.test(token_nodes, context);
@ -583,14 +590,14 @@ impl FallibleColorSyntax for ColorableDotShape {
input: &FlatShape, input: &FlatShape,
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext, _context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
let peeked = token_nodes.peek_any().not_eof("dot")?; let peeked = token_nodes.peek_any().not_eof("dot")?;
match peeked.node { match peeked.node {
node if node.is_dot() => { node if node.is_dot() => {
peeked.commit(); peeked.commit();
shapes.push((*input).tagged(node.tag())); shapes.push((*input).spanned(node.span()));
Ok(()) Ok(())
} }
@ -612,20 +619,20 @@ impl SkipSyntax for DotShape {
} }
impl ExpandSyntax for DotShape { impl ExpandSyntax for DotShape {
type Output = Tag; type Output = Span;
fn expand_syntax<'a, 'b>( fn expand_syntax<'a, 'b>(
&self, &self,
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext, _context: &ExpandContext,
) -> Result<Self::Output, ShellError> { ) -> Result<Self::Output, ShellError> {
parse_single_node(token_nodes, "dot", |token, token_tag, _| { parse_single_node(token_nodes, "dot", |token, token_span, _| {
Ok(match token { Ok(match token {
RawToken::Operator(Operator::Dot) => token_tag, RawToken::Operator(Operator::Dot) => token_span,
_ => { _ => {
return Err(ShellError::type_error( return Err(ShellError::type_error(
"dot", "dot",
token.type_name().tagged(token_tag), token.type_name().tagged(token_span),
)) ))
} }
}) })
@ -645,7 +652,7 @@ impl FallibleColorSyntax for InfixShape {
_input: &(), _input: &(),
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
outer_shapes: &mut Vec<Tagged<FlatShape>>, outer_shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> { ) -> Result<(), ShellError> {
let checkpoint = token_nodes.checkpoint(); let checkpoint = token_nodes.checkpoint();
let mut shapes = vec![]; let mut shapes = vec![];
@ -657,18 +664,18 @@ impl FallibleColorSyntax for InfixShape {
parse_single_node( parse_single_node(
checkpoint.iterator, checkpoint.iterator,
"infix operator", "infix operator",
|token, token_tag, _| { |token, token_span, _| {
match token { match token {
// If it's an operator (and not `.`), it's a match // If it's an operator (and not `.`), it's a match
RawToken::Operator(operator) if operator != Operator::Dot => { RawToken::Operator(operator) if operator != Operator::Dot => {
shapes.push(FlatShape::Operator.tagged(token_tag)); shapes.push(FlatShape::Operator.spanned(token_span));
Ok(()) Ok(())
} }
// Otherwise, it's not a match // Otherwise, it's not a match
_ => Err(ShellError::type_error( _ => Err(ShellError::type_error(
"infix operator", "infix operator",
token.type_name().tagged(token_tag), token.type_name().tagged(token_span),
)), )),
} }
}, },
@ -684,7 +691,7 @@ impl FallibleColorSyntax for InfixShape {
} }
impl ExpandSyntax for InfixShape { impl ExpandSyntax for InfixShape {
type Output = (Tag, Tagged<Operator>, Tag); type Output = (Span, Spanned<Operator>, Span);
fn expand_syntax<'a, 'b>( fn expand_syntax<'a, 'b>(
&self, &self,
@ -700,18 +707,18 @@ impl ExpandSyntax for InfixShape {
let operator = parse_single_node( let operator = parse_single_node(
checkpoint.iterator, checkpoint.iterator,
"infix operator", "infix operator",
|token, token_tag, _| { |token, token_span, _| {
Ok(match token { Ok(match token {
// If it's an operator (and not `.`), it's a match // If it's an operator (and not `.`), it's a match
RawToken::Operator(operator) if operator != Operator::Dot => { RawToken::Operator(operator) if operator != Operator::Dot => {
operator.tagged(token_tag) operator.spanned(token_span)
} }
// Otherwise, it's not a match // Otherwise, it's not a match
_ => { _ => {
return Err(ShellError::type_error( return Err(ShellError::type_error(
"infix operator", "infix operator",
token.type_name().tagged(token_tag), token.type_name().tagged(token_span),
)) ))
} }
}) })

View File

@ -1,5 +1,5 @@
use crate::parser::{Delimiter, Flag, FlagKind, Operator, RawNumber, RawToken, TokenNode}; use crate::parser::{Delimiter, Flag, FlagKind, Operator, RawNumber, RawToken, TokenNode};
use crate::{Tag, Tagged, TaggedItem, Text}; use crate::{Span, Spanned, SpannedItem, Text};
#[derive(Debug, Copy, Clone)] #[derive(Debug, Copy, Clone)]
pub enum FlatShape { pub enum FlatShape {
@ -25,32 +25,34 @@ pub enum FlatShape {
Decimal, Decimal,
Whitespace, Whitespace,
Error, Error,
Size { number: Tag, unit: Tag }, Size { number: Span, unit: Span },
} }
impl FlatShape { impl FlatShape {
pub fn from(token: &TokenNode, source: &Text, shapes: &mut Vec<Tagged<FlatShape>>) -> () { pub fn from(token: &TokenNode, source: &Text, shapes: &mut Vec<Spanned<FlatShape>>) -> () {
match token { match token {
TokenNode::Token(token) => match token.item { TokenNode::Token(token) => match token.item {
RawToken::Number(RawNumber::Int(_)) => { RawToken::Number(RawNumber::Int(_)) => {
shapes.push(FlatShape::Int.tagged(token.tag)) shapes.push(FlatShape::Int.spanned(token.span))
} }
RawToken::Number(RawNumber::Decimal(_)) => { RawToken::Number(RawNumber::Decimal(_)) => {
shapes.push(FlatShape::Decimal.tagged(token.tag)) shapes.push(FlatShape::Decimal.spanned(token.span))
} }
RawToken::Operator(Operator::Dot) => shapes.push(FlatShape::Dot.tagged(token.tag)), RawToken::Operator(Operator::Dot) => {
RawToken::Operator(_) => shapes.push(FlatShape::Operator.tagged(token.tag)), shapes.push(FlatShape::Dot.spanned(token.span))
RawToken::String(_) => shapes.push(FlatShape::String.tagged(token.tag)), }
RawToken::Operator(_) => shapes.push(FlatShape::Operator.spanned(token.span)),
RawToken::String(_) => shapes.push(FlatShape::String.spanned(token.span)),
RawToken::Variable(v) if v.slice(source) == "it" => { RawToken::Variable(v) if v.slice(source) == "it" => {
shapes.push(FlatShape::ItVariable.tagged(token.tag)) shapes.push(FlatShape::ItVariable.spanned(token.span))
} }
RawToken::Variable(_) => shapes.push(FlatShape::Variable.tagged(token.tag)), RawToken::Variable(_) => shapes.push(FlatShape::Variable.spanned(token.span)),
RawToken::ExternalCommand(_) => { RawToken::ExternalCommand(_) => {
shapes.push(FlatShape::ExternalCommand.tagged(token.tag)) shapes.push(FlatShape::ExternalCommand.spanned(token.span))
} }
RawToken::ExternalWord => shapes.push(FlatShape::ExternalWord.tagged(token.tag)), RawToken::ExternalWord => shapes.push(FlatShape::ExternalWord.spanned(token.span)),
RawToken::GlobPattern => shapes.push(FlatShape::GlobPattern.tagged(token.tag)), RawToken::GlobPattern => shapes.push(FlatShape::GlobPattern.spanned(token.span)),
RawToken::Bare => shapes.push(FlatShape::Word.tagged(token.tag)), RawToken::Bare => shapes.push(FlatShape::Word.spanned(token.span)),
}, },
TokenNode::Call(_) => unimplemented!(), TokenNode::Call(_) => unimplemented!(),
TokenNode::Nodes(nodes) => { TokenNode::Nodes(nodes) => {
@ -59,37 +61,37 @@ impl FlatShape {
} }
} }
TokenNode::Delimited(v) => { TokenNode::Delimited(v) => {
shapes.push(FlatShape::OpenDelimiter(v.item.delimiter).tagged(v.item.tags.0)); shapes.push(FlatShape::OpenDelimiter(v.item.delimiter).spanned(v.item.spans.0));
for token in &v.item.children { for token in &v.item.children {
FlatShape::from(token, source, shapes); FlatShape::from(token, source, shapes);
} }
shapes.push(FlatShape::CloseDelimiter(v.item.delimiter).tagged(v.item.tags.1)); shapes.push(FlatShape::CloseDelimiter(v.item.delimiter).spanned(v.item.spans.1));
} }
TokenNode::Pipeline(pipeline) => { TokenNode::Pipeline(pipeline) => {
for part in &pipeline.parts { for part in &pipeline.parts {
if let Some(_) = part.pipe { if let Some(_) = part.pipe {
shapes.push(FlatShape::Pipe.tagged(part.tag)); shapes.push(FlatShape::Pipe.spanned(part.span));
} }
} }
} }
TokenNode::Flag(Tagged { TokenNode::Flag(Spanned {
item: item:
Flag { Flag {
kind: FlagKind::Longhand, kind: FlagKind::Longhand,
.. ..
}, },
tag, span,
}) => shapes.push(FlatShape::Flag.tagged(tag)), }) => shapes.push(FlatShape::Flag.spanned(*span)),
TokenNode::Flag(Tagged { TokenNode::Flag(Spanned {
item: item:
Flag { Flag {
kind: FlagKind::Shorthand, kind: FlagKind::Shorthand,
.. ..
}, },
tag, span,
}) => shapes.push(FlatShape::ShorthandFlag.tagged(tag)), }) => shapes.push(FlatShape::ShorthandFlag.spanned(*span)),
TokenNode::Whitespace(_) => shapes.push(FlatShape::Whitespace.tagged(token.tag())), TokenNode::Whitespace(_) => shapes.push(FlatShape::Whitespace.spanned(token.span())),
TokenNode::Error(v) => shapes.push(FlatShape::Error.tagged(v.tag)), TokenNode::Error(v) => shapes.push(FlatShape::Error.spanned(v.span)),
} }
} }
} }

View File

@ -2,12 +2,12 @@ pub(crate) mod debug;
use crate::errors::ShellError; use crate::errors::ShellError;
use crate::parser::TokenNode; use crate::parser::TokenNode;
use crate::{Tag, Tagged, TaggedItem}; use crate::{Span, Spanned, SpannedItem};
#[derive(Debug)] #[derive(Debug)]
pub struct TokensIterator<'content> { pub struct TokensIterator<'content> {
tokens: &'content [TokenNode], tokens: &'content [TokenNode],
tag: Tag, span: Span,
skip_ws: bool, skip_ws: bool,
index: usize, index: usize,
seen: indexmap::IndexSet<usize>, seen: indexmap::IndexSet<usize>,
@ -65,7 +65,7 @@ impl<'content, 'me> Peeked<'content, 'me> {
match self.node { match self.node {
None => Err(ShellError::unexpected_eof( None => Err(ShellError::unexpected_eof(
expected, expected,
self.iterator.eof_tag(), self.iterator.eof_span(),
)), )),
Some(node) => Ok(PeekedNode { Some(node) => Ok(PeekedNode {
node, node,
@ -77,7 +77,7 @@ impl<'content, 'me> Peeked<'content, 'me> {
} }
pub fn type_error(&self, expected: impl Into<String>) -> ShellError { pub fn type_error(&self, expected: impl Into<String>) -> ShellError {
peek_error(&self.node, self.iterator.eof_tag(), expected) peek_error(&self.node, self.iterator.eof_span(), expected)
} }
} }
@ -105,38 +105,38 @@ impl<'content, 'me> PeekedNode<'content, 'me> {
pub fn rollback(self) {} pub fn rollback(self) {}
pub fn type_error(&self, expected: impl Into<String>) -> ShellError { pub fn type_error(&self, expected: impl Into<String>) -> ShellError {
peek_error(&Some(self.node), self.iterator.eof_tag(), expected) peek_error(&Some(self.node), self.iterator.eof_span(), expected)
} }
} }
pub fn peek_error( pub fn peek_error(
node: &Option<&TokenNode>, node: &Option<&TokenNode>,
eof_tag: Tag, eof_span: Span,
expected: impl Into<String>, expected: impl Into<String>,
) -> ShellError { ) -> ShellError {
match node { match node {
None => ShellError::unexpected_eof(expected, eof_tag), None => ShellError::unexpected_eof(expected, eof_span),
Some(node) => ShellError::type_error(expected, node.tagged_type_name()), Some(node) => ShellError::type_error(expected, node.tagged_type_name()),
} }
} }
impl<'content> TokensIterator<'content> { impl<'content> TokensIterator<'content> {
pub fn new(items: &'content [TokenNode], tag: Tag, skip_ws: bool) -> TokensIterator<'content> { pub fn new(
items: &'content [TokenNode],
span: Span,
skip_ws: bool,
) -> TokensIterator<'content> {
TokensIterator { TokensIterator {
tokens: items, tokens: items,
tag, span,
skip_ws, skip_ws,
index: 0, index: 0,
seen: indexmap::IndexSet::new(), seen: indexmap::IndexSet::new(),
} }
} }
pub fn anchor(&self) -> uuid::Uuid { pub fn all(tokens: &'content [TokenNode], span: Span) -> TokensIterator<'content> {
self.tag.anchor TokensIterator::new(tokens, span, false)
}
pub fn all(tokens: &'content [TokenNode], tag: Tag) -> TokensIterator<'content> {
TokensIterator::new(tokens, tag, false)
} }
pub fn len(&self) -> usize { pub fn len(&self) -> usize {
@ -146,14 +146,14 @@ impl<'content> TokensIterator<'content> {
pub fn spanned<T>( pub fn spanned<T>(
&mut self, &mut self,
block: impl FnOnce(&mut TokensIterator<'content>) -> T, block: impl FnOnce(&mut TokensIterator<'content>) -> T,
) -> Tagged<T> { ) -> Spanned<T> {
let start = self.tag_at_cursor(); let start = self.span_at_cursor();
let result = block(self); let result = block(self);
let end = self.tag_at_cursor(); let end = self.span_at_cursor();
result.tagged(start.until(end)) result.spanned(start.until(end))
} }
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure /// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
@ -192,25 +192,25 @@ impl<'content> TokensIterator<'content> {
return Ok(value); return Ok(value);
} }
fn eof_tag(&self) -> Tag { fn eof_span(&self) -> Span {
Tag::from((self.tag.span.end(), self.tag.span.end(), self.tag.anchor)) Span::new(self.span.end(), self.span.end())
} }
pub fn typed_tag_at_cursor(&mut self) -> Tagged<&'static str> { pub fn typed_span_at_cursor(&mut self) -> Spanned<&'static str> {
let next = self.peek_any(); let next = self.peek_any();
match next.node { match next.node {
None => "end".tagged(self.eof_tag()), None => "end".spanned(self.eof_span()),
Some(node) => node.tagged_type_name(), Some(node) => node.spanned_type_name(),
} }
} }
pub fn tag_at_cursor(&mut self) -> Tag { pub fn span_at_cursor(&mut self) -> Span {
let next = self.peek_any(); let next = self.peek_any();
match next.node { match next.node {
None => self.eof_tag(), None => self.eof_span(),
Some(node) => node.tag(), Some(node) => node.span(),
} }
} }
@ -262,7 +262,7 @@ impl<'content> TokensIterator<'content> {
pub fn clone(&self) -> TokensIterator<'content> { pub fn clone(&self) -> TokensIterator<'content> {
TokensIterator { TokensIterator {
tokens: self.tokens, tokens: self.tokens,
tag: self.tag, span: self.span,
index: self.index, index: self.index,
seen: self.seen.clone(), seen: self.seen.clone(),
skip_ws: self.skip_ws, skip_ws: self.skip_ws,

View File

@ -1,8 +1,7 @@
use crate::Tag; use crate::Span;
use derive_new::new; use derive_new::new;
use language_reporting::{FileName, Location}; use language_reporting::{FileName, Location};
use log::trace; use log::trace;
use uuid::Uuid;
#[derive(new, Debug, Clone)] #[derive(new, Debug, Clone)]
pub struct Files { pub struct Files {
@ -10,20 +9,20 @@ pub struct Files {
} }
impl language_reporting::ReportingFiles for Files { impl language_reporting::ReportingFiles for Files {
type Span = Tag; type Span = Span;
type FileId = Uuid; type FileId = usize;
fn byte_span( fn byte_span(
&self, &self,
file: Self::FileId, _file: Self::FileId,
from_index: usize, from_index: usize,
to_index: usize, to_index: usize,
) -> Option<Self::Span> { ) -> Option<Self::Span> {
Some(Tag::new(file, (from_index, to_index).into())) Some(Span::new(from_index, to_index))
} }
fn file_id(&self, tag: Self::Span) -> Self::FileId { fn file_id(&self, _tag: Self::Span) -> Self::FileId {
tag.anchor 0
} }
fn file_name(&self, _file: Self::FileId) -> FileName { fn file_name(&self, _file: Self::FileId) -> FileName {
@ -68,14 +67,14 @@ impl language_reporting::ReportingFiles for Files {
} }
} }
fn line_span(&self, file: Self::FileId, lineno: usize) -> Option<Self::Span> { fn line_span(&self, _file: Self::FileId, lineno: usize) -> Option<Self::Span> {
let source = &self.snippet; let source = &self.snippet;
let mut seen_lines = 0; let mut seen_lines = 0;
let mut seen_bytes = 0; let mut seen_bytes = 0;
for (pos, _) in source.match_indices('\n') { for (pos, _) in source.match_indices('\n') {
if seen_lines == lineno { if seen_lines == lineno {
return Some(Tag::new(file, (seen_bytes, pos + 1).into())); return Some(Span::new(seen_bytes, pos + 1));
} else { } else {
seen_lines += 1; seen_lines += 1;
seen_bytes = pos + 1; seen_bytes = pos + 1;
@ -83,20 +82,20 @@ impl language_reporting::ReportingFiles for Files {
} }
if seen_lines == 0 { if seen_lines == 0 {
Some(Tag::new(file, (0, self.snippet.len() - 1).into())) Some(Span::new(0, self.snippet.len() - 1))
} else { } else {
None None
} }
} }
fn source(&self, tag: Self::Span) -> Option<String> { fn source(&self, span: Self::Span) -> Option<String> {
trace!("source(tag={:?}) snippet={:?}", tag, self.snippet); trace!("source(tag={:?}) snippet={:?}", span, self.snippet);
if tag.span.start() > tag.span.end() { if span.start() > span.end() {
return None; return None;
} else if tag.span.end() > self.snippet.len() { } else if span.end() > self.snippet.len() {
return None; return None;
} }
Some(tag.slice(&self.snippet).to_string()) Some(span.slice(&self.snippet).to_string())
} }
} }

View File

@ -1,5 +1,5 @@
use crate::parser::hir::syntax_shape::flat_shape::FlatShape; use crate::parser::hir::syntax_shape::flat_shape::FlatShape;
use crate::{Tag, Tagged, TaggedItem}; use crate::{Span, Spanned, SpannedItem};
use derive_new::new; use derive_new::new;
use getset::Getters; use getset::Getters;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
@ -14,14 +14,14 @@ pub enum FlagKind {
#[get = "pub(crate)"] #[get = "pub(crate)"]
pub struct Flag { pub struct Flag {
pub(crate) kind: FlagKind, pub(crate) kind: FlagKind,
pub(crate) name: Tag, pub(crate) name: Span,
} }
impl Tagged<Flag> { impl Spanned<Flag> {
pub fn color(&self) -> Tagged<FlatShape> { pub fn color(&self) -> Spanned<FlatShape> {
match self.item.kind { match self.item.kind {
FlagKind::Longhand => FlatShape::Flag.tagged(self.tag), FlagKind::Longhand => FlatShape::Flag.spanned(self.span),
FlagKind::Shorthand => FlatShape::ShorthandFlag.tagged(self.tag), FlagKind::Shorthand => FlatShape::ShorthandFlag.spanned(self.span),
} }
} }
} }

View File

@ -24,13 +24,11 @@ use nom_tracable::{tracable_parser, HasTracableInfo, TracableInfo};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::fmt::Debug; use std::fmt::Debug;
use std::str::FromStr; use std::str::FromStr;
use uuid::Uuid;
pub type NomSpan<'a> = LocatedSpanEx<&'a str, TracableContext>; pub type NomSpan<'a> = LocatedSpanEx<&'a str, TracableContext>;
#[derive(Debug, Clone, Copy, PartialEq, new)] #[derive(Debug, Clone, Copy, PartialEq, new)]
pub struct TracableContext { pub struct TracableContext {
pub(crate) origin: Uuid,
pub(crate) info: TracableInfo, pub(crate) info: TracableInfo,
} }
@ -40,10 +38,7 @@ impl HasTracableInfo for TracableContext {
} }
fn set_tracable_info(mut self, info: TracableInfo) -> Self { fn set_tracable_info(mut self, info: TracableInfo) -> Self {
TracableContext { TracableContext { info }
origin: self.origin,
info,
}
} }
} }
@ -55,8 +50,8 @@ impl std::ops::Deref for TracableContext {
} }
} }
pub fn nom_input(s: &str, anchor: Uuid) -> NomSpan<'_> { pub fn nom_input(s: &str) -> NomSpan<'_> {
LocatedSpanEx::new_extra(s, TracableContext::new(anchor, TracableInfo::new())) LocatedSpanEx::new_extra(s, TracableContext::new(TracableInfo::new()))
} }
macro_rules! operator { macro_rules! operator {
@ -69,7 +64,7 @@ macro_rules! operator {
Ok(( Ok((
input, input,
TokenTreeBuilder::tagged_op(tag.fragment, (start, end, input.extra)), TokenTreeBuilder::spanned_op(tag.fragment, Span::new(start, end)),
)) ))
} }
}; };
@ -175,22 +170,22 @@ pub fn number(input: NomSpan) -> IResult<NomSpan, TokenNode> {
Ok(( Ok((
input, input,
TokenTreeBuilder::tagged_number(number.item, number.tag), TokenTreeBuilder::spanned_number(number.item, number.span),
)) ))
} }
#[tracable_parser] #[tracable_parser]
pub fn raw_number(input: NomSpan) -> IResult<NomSpan, Tagged<RawNumber>> { pub fn raw_number(input: NomSpan) -> IResult<NomSpan, Spanned<RawNumber>> {
let anchoral = input; let anchoral = input;
let start = input.offset; let start = input.offset;
let (input, neg) = opt(tag("-"))(input)?; let (input, neg) = opt(tag("-"))(input)?;
let (input, head) = digit1(input)?; let (input, head) = digit1(input)?;
match input.fragment.chars().next() { match input.fragment.chars().next() {
None => return Ok((input, RawNumber::int((start, input.offset, input.extra)))), None => return Ok((input, RawNumber::int(Span::new(start, input.offset)))),
Some('.') => (), Some('.') => (),
other if is_boundary(other) => { other if is_boundary(other) => {
return Ok((input, RawNumber::int((start, input.offset, input.extra)))) return Ok((input, RawNumber::int(Span::new(start, input.offset))))
} }
_ => { _ => {
return Err(nom::Err::Error(nom::error::make_error( return Err(nom::Err::Error(nom::error::make_error(
@ -206,7 +201,7 @@ pub fn raw_number(input: NomSpan) -> IResult<NomSpan, Tagged<RawNumber>> {
Ok((input, dot)) => input, Ok((input, dot)) => input,
// it's just an integer // it's just an integer
Err(_) => return Ok((input, RawNumber::int((start, input.offset, input.extra)))), Err(_) => return Ok((input, RawNumber::int(Span::new(start, input.offset)))),
}; };
let (input, tail) = digit1(input)?; let (input, tail) = digit1(input)?;
@ -216,7 +211,7 @@ pub fn raw_number(input: NomSpan) -> IResult<NomSpan, Tagged<RawNumber>> {
let next = input.fragment.chars().next(); let next = input.fragment.chars().next();
if is_boundary(next) { if is_boundary(next) {
Ok((input, RawNumber::decimal((start, end, input.extra)))) Ok((input, RawNumber::decimal(Span::new(start, end))))
} else { } else {
Err(nom::Err::Error(nom::error::make_error( Err(nom::Err::Error(nom::error::make_error(
input, input,
@ -243,7 +238,7 @@ pub fn dq_string(input: NomSpan) -> IResult<NomSpan, TokenNode> {
let end = input.offset; let end = input.offset;
Ok(( Ok((
input, input,
TokenTreeBuilder::tagged_string((start1, end1, input.extra), (start, end, input.extra)), TokenTreeBuilder::spanned_string(Span::new(start1, end1), Span::new(start, end)),
)) ))
} }
@ -259,7 +254,7 @@ pub fn sq_string(input: NomSpan) -> IResult<NomSpan, TokenNode> {
Ok(( Ok((
input, input,
TokenTreeBuilder::tagged_string((start1, end1, input.extra), (start, end, input.extra)), TokenTreeBuilder::spanned_string(Span::new(start1, end1), Span::new(start, end)),
)) ))
} }
@ -277,7 +272,7 @@ pub fn external(input: NomSpan) -> IResult<NomSpan, TokenNode> {
Ok(( Ok((
input, input,
TokenTreeBuilder::tagged_external_command(bare, (start, end, input.extra)), TokenTreeBuilder::spanned_external_command(bare, Span::new(start, end)),
)) ))
} }
@ -302,7 +297,7 @@ pub fn pattern(input: NomSpan) -> IResult<NomSpan, TokenNode> {
Ok(( Ok((
input, input,
TokenTreeBuilder::tagged_pattern((start, end, input.extra)), TokenTreeBuilder::spanned_pattern(Span::new(start, end)),
)) ))
} }
@ -335,10 +330,7 @@ pub fn bare(input: NomSpan) -> IResult<NomSpan, TokenNode> {
let end = input.offset; let end = input.offset;
Ok(( Ok((input, TokenTreeBuilder::spanned_bare(Span::new(start, end))))
input,
TokenTreeBuilder::tagged_bare((start, end, input.extra)),
))
} }
#[tracable_parser] #[tracable_parser]
@ -349,7 +341,7 @@ pub fn external_word(input: NomSpan) -> IResult<NomSpan, TokenNode> {
Ok(( Ok((
input, input,
TokenTreeBuilder::tagged_external_word((start, end, input.extra)), TokenTreeBuilder::spanned_external_word(Span::new(start, end)),
)) ))
} }
@ -362,7 +354,7 @@ pub fn var(input: NomSpan) -> IResult<NomSpan, TokenNode> {
Ok(( Ok((
input, input,
TokenTreeBuilder::tagged_var(bare, (start, end, input.extra)), TokenTreeBuilder::spanned_var(bare, Span::new(start, end)),
)) ))
} }
@ -373,7 +365,7 @@ pub fn ident(input: NomSpan) -> IResult<NomSpan, Tag> {
let (input, _) = take_while(is_bare_char)(input)?; let (input, _) = take_while(is_bare_char)(input)?;
let end = input.offset; let end = input.offset;
Ok((input, Tag::from((start, end, input.extra.origin)))) Ok((input, Tag::from((start, end, None))))
} }
#[tracable_parser] #[tracable_parser]
@ -385,7 +377,7 @@ pub fn flag(input: NomSpan) -> IResult<NomSpan, TokenNode> {
Ok(( Ok((
input, input,
TokenTreeBuilder::tagged_flag(bare.tag(), (start, end, input.extra)), TokenTreeBuilder::spanned_flag(bare.span(), Span::new(start, end)),
)) ))
} }
@ -398,7 +390,7 @@ pub fn shorthand(input: NomSpan) -> IResult<NomSpan, TokenNode> {
Ok(( Ok((
input, input,
TokenTreeBuilder::tagged_shorthand(bare.tag(), (start, end, input.extra)), TokenTreeBuilder::spanned_shorthand(bare.span(), Span::new(start, end)),
)) ))
} }
@ -420,12 +412,12 @@ pub fn token_list(input: NomSpan) -> IResult<NomSpan, Tagged<Vec<TokenNode>>> {
Ok(( Ok((
input, input,
make_token_list(first, list, None).tagged((start, end, input.extra.origin)), make_token_list(first, list, None).tagged((start, end, None)),
)) ))
} }
#[tracable_parser] #[tracable_parser]
pub fn spaced_token_list(input: NomSpan) -> IResult<NomSpan, Tagged<Vec<TokenNode>>> { pub fn spaced_token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<TokenNode>>> {
let start = input.offset; let start = input.offset;
let (input, pre_ws) = opt(whitespace)(input)?; let (input, pre_ws) = opt(whitespace)(input)?;
let (input, items) = token_list(input)?; let (input, items) = token_list(input)?;
@ -438,7 +430,7 @@ pub fn spaced_token_list(input: NomSpan) -> IResult<NomSpan, Tagged<Vec<TokenNod
out.extend(items.item); out.extend(items.item);
out.extend(post_ws); out.extend(post_ws);
Ok((input, out.tagged((start, end, input.extra.origin)))) Ok((input, out.spanned(Span::new(start, end))))
} }
fn make_token_list( fn make_token_list(
@ -468,20 +460,17 @@ pub fn whitespace(input: NomSpan) -> IResult<NomSpan, TokenNode> {
let (input, ws1) = space1(input)?; let (input, ws1) = space1(input)?;
let right = input.offset; let right = input.offset;
Ok(( Ok((input, TokenTreeBuilder::spanned_ws(Span::new(left, right))))
input,
TokenTreeBuilder::tagged_ws((left, right, input.extra)),
))
} }
pub fn delimited( pub fn delimited(
input: NomSpan, input: NomSpan,
delimiter: Delimiter, delimiter: Delimiter,
) -> IResult<NomSpan, (Tag, Tag, Tagged<Vec<TokenNode>>)> { ) -> IResult<NomSpan, (Span, Span, Spanned<Vec<TokenNode>>)> {
let left = input.offset; let left = input.offset;
let (input, open_tag) = tag(delimiter.open())(input)?; let (input, open_span) = tag(delimiter.open())(input)?;
let (input, inner_items) = opt(spaced_token_list)(input)?; let (input, inner_items) = opt(spaced_token_list)(input)?;
let (input, close_tag) = tag(delimiter.close())(input)?; let (input, close_span) = tag(delimiter.close())(input)?;
let right = input.offset; let right = input.offset;
let mut items = vec![]; let mut items = vec![];
@ -493,9 +482,9 @@ pub fn delimited(
Ok(( Ok((
input, input,
( (
Tag::from(open_tag), Span::from(open_span),
Tag::from(close_tag), Span::from(close_span),
items.tagged((left, right, input.extra.origin)), items.spanned(Span::new(left, right)),
), ),
)) ))
} }
@ -506,7 +495,7 @@ pub fn delimited_paren(input: NomSpan) -> IResult<NomSpan, TokenNode> {
Ok(( Ok((
input, input,
TokenTreeBuilder::tagged_parens(tokens.item, (left, right), tokens.tag), TokenTreeBuilder::spanned_parens(tokens.item, (left, right), tokens.span),
)) ))
} }
@ -516,7 +505,7 @@ pub fn delimited_square(input: NomSpan) -> IResult<NomSpan, TokenNode> {
Ok(( Ok((
input, input,
TokenTreeBuilder::tagged_square(tokens.item, (left, right), tokens.tag), TokenTreeBuilder::spanned_square(tokens.item, (left, right), tokens.span),
)) ))
} }
@ -526,7 +515,7 @@ pub fn delimited_brace(input: NomSpan) -> IResult<NomSpan, TokenNode> {
Ok(( Ok((
input, input,
TokenTreeBuilder::tagged_square(tokens.item, (left, right), tokens.tag), TokenTreeBuilder::spanned_square(tokens.item, (left, right), tokens.span),
)) ))
} }
@ -637,18 +626,19 @@ pub fn pipeline(input: NomSpan) -> IResult<NomSpan, TokenNode> {
let end = input.offset; let end = input.offset;
let head_tag = head.tag(); let head_span = head.span;
let mut all_items: Vec<Tagged<PipelineElement>> = let mut all_items: Vec<Spanned<PipelineElement>> =
vec![PipelineElement::new(None, head).tagged(head_tag)]; vec![PipelineElement::new(None, head).spanned(head_span)];
all_items.extend(items.into_iter().map(|(pipe, items)| { all_items.extend(items.into_iter().map(|(pipe, items)| {
let items_tag = items.tag(); let items_span = items.span;
PipelineElement::new(Some(Tag::from(pipe)), items).tagged(Tag::from(pipe).until(items_tag)) PipelineElement::new(Some(Span::from(pipe)), items)
.spanned(Span::from(pipe).until(items_span))
})); }));
Ok(( Ok((
input, input,
TokenTreeBuilder::tagged_pipeline(all_items, (start, end, input.extra)), TokenTreeBuilder::spanned_pipeline(all_items, Span::new(start, end)),
)) ))
} }
@ -757,7 +747,7 @@ mod tests {
macro_rules! equal_tokens { macro_rules! equal_tokens {
($source:tt -> $tokens:expr) => { ($source:tt -> $tokens:expr) => {
let result = apply(pipeline, "pipeline", $source); let result = apply(pipeline, "pipeline", $source);
let (expected_tree, expected_source) = TokenTreeBuilder::build(uuid::Uuid::nil(), $tokens); let (expected_tree, expected_source) = TokenTreeBuilder::build($tokens);
if result != expected_tree { if result != expected_tree {
let debug_result = format!("{}", result.debug($source)); let debug_result = format!("{}", result.debug($source));
@ -778,7 +768,7 @@ mod tests {
(<$parser:tt> $source:tt -> $tokens:expr) => { (<$parser:tt> $source:tt -> $tokens:expr) => {
let result = apply($parser, stringify!($parser), $source); let result = apply($parser, stringify!($parser), $source);
let (expected_tree, expected_source) = TokenTreeBuilder::build(uuid::Uuid::nil(), $tokens); let (expected_tree, expected_source) = TokenTreeBuilder::build($tokens);
if result != expected_tree { if result != expected_tree {
let debug_result = format!("{}", result.debug($source)); let debug_result = format!("{}", result.debug($source));
@ -1241,41 +1231,37 @@ mod tests {
desc: &str, desc: &str,
string: &str, string: &str,
) -> TokenNode { ) -> TokenNode {
f(nom_input(string, uuid::Uuid::nil())).unwrap().1 f(nom_input(string)).unwrap().1
} }
fn tag(left: usize, right: usize) -> Tag { fn span((left, right): (usize, usize)) -> Span {
Tag::from((left, right, uuid::Uuid::nil())) Span::new(left, right)
} }
fn delimited( fn delimited(
delimiter: Tagged<Delimiter>, delimiter: Spanned<Delimiter>,
children: Vec<TokenNode>, children: Vec<TokenNode>,
left: usize, left: usize,
right: usize, right: usize,
) -> TokenNode { ) -> TokenNode {
let start = Tag::for_char(left, delimiter.tag.anchor); let start = Span::for_char(left);
let end = Tag::for_char(right, delimiter.tag.anchor); let end = Span::for_char(right);
let node = DelimitedNode::new(delimiter.item, (start, end), children); let node = DelimitedNode::new(delimiter.item, (start, end), children);
let spanned = node.tagged((left, right, delimiter.tag.anchor)); let spanned = node.spanned(Span::new(left, right));
TokenNode::Delimited(spanned) TokenNode::Delimited(spanned)
} }
fn token(token: RawToken, left: usize, right: usize) -> TokenNode { fn token(token: RawToken, left: usize, right: usize) -> TokenNode {
TokenNode::Token(token.tagged((left, right, uuid::Uuid::nil()))) TokenNode::Token(token.spanned(Span::new(left, right)))
} }
fn build<T>(block: CurriedNode<T>) -> T { fn build<T>(block: CurriedNode<T>) -> T {
let mut builder = TokenTreeBuilder::new(uuid::Uuid::nil()); let mut builder = TokenTreeBuilder::new();
block(&mut builder) block(&mut builder)
} }
fn build_token(block: CurriedToken) -> TokenNode { fn build_token(block: CurriedToken) -> TokenNode {
TokenTreeBuilder::build(uuid::Uuid::nil(), block).0 TokenTreeBuilder::build(block).0
}
fn test_uuid() -> uuid::Uuid {
uuid::Uuid::nil()
} }
} }

View File

@ -1,13 +1,13 @@
use crate::parser::TokenNode; use crate::parser::TokenNode;
use crate::traits::ToDebug; use crate::traits::ToDebug;
use crate::{Tag, Tagged}; use crate::{Span, Spanned};
use derive_new::new; use derive_new::new;
use getset::Getters; use getset::Getters;
use std::fmt; use std::fmt;
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, new)] #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, new)]
pub struct Pipeline { pub struct Pipeline {
pub(crate) parts: Vec<Tagged<PipelineElement>>, pub(crate) parts: Vec<Spanned<PipelineElement>>,
// pub(crate) post_ws: Option<Tag>, // pub(crate) post_ws: Option<Tag>,
} }
@ -23,8 +23,8 @@ impl ToDebug for Pipeline {
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)] #[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
pub struct PipelineElement { pub struct PipelineElement {
pub pipe: Option<Tag>, pub pipe: Option<Span>,
pub tokens: Tagged<Vec<TokenNode>>, pub tokens: Spanned<Vec<TokenNode>>,
} }
impl ToDebug for PipelineElement { impl ToDebug for PipelineElement {

View File

@ -2,7 +2,7 @@ use crate::errors::ShellError;
use crate::parser::parse::{call_node::*, flag::*, operator::*, pipeline::*, tokens::*}; use crate::parser::parse::{call_node::*, flag::*, operator::*, pipeline::*, tokens::*};
use crate::prelude::*; use crate::prelude::*;
use crate::traits::ToDebug; use crate::traits::ToDebug;
use crate::{Tag, Tagged, Text}; use crate::{Tagged, Text};
use derive_new::new; use derive_new::new;
use enum_utils::FromStr; use enum_utils::FromStr;
use getset::Getters; use getset::Getters;
@ -12,14 +12,14 @@ use std::fmt;
pub enum TokenNode { pub enum TokenNode {
Token(Token), Token(Token),
Call(Tagged<CallNode>), Call(Spanned<CallNode>),
Nodes(Tagged<Vec<TokenNode>>), Nodes(Spanned<Vec<TokenNode>>),
Delimited(Tagged<DelimitedNode>), Delimited(Spanned<DelimitedNode>),
Pipeline(Tagged<Pipeline>), Pipeline(Spanned<Pipeline>),
Flag(Tagged<Flag>), Flag(Spanned<Flag>),
Whitespace(Tag), Whitespace(Span),
Error(Tagged<ShellError>), Error(Spanned<ShellError>),
} }
impl ToDebug for TokenNode { impl ToDebug for TokenNode {
@ -78,28 +78,28 @@ impl fmt::Debug for DebugTokenNode<'_> {
} }
TokenNode::Pipeline(pipeline) => write!(f, "{}", pipeline.debug(self.source)), TokenNode::Pipeline(pipeline) => write!(f, "{}", pipeline.debug(self.source)),
TokenNode::Error(_) => write!(f, "<error>"), TokenNode::Error(_) => write!(f, "<error>"),
rest => write!(f, "{}", rest.tag().slice(self.source)), rest => write!(f, "{}", rest.span().slice(self.source)),
} }
} }
} }
impl From<&TokenNode> for Tag { impl From<&TokenNode> for Span {
fn from(token: &TokenNode) -> Tag { fn from(token: &TokenNode) -> Span {
token.tag() token.span()
} }
} }
impl TokenNode { impl TokenNode {
pub fn tag(&self) -> Tag { pub fn span(&self) -> Span {
match self { match self {
TokenNode::Token(t) => t.tag(), TokenNode::Token(t) => t.span,
TokenNode::Nodes(t) => t.tag(), TokenNode::Nodes(t) => t.span,
TokenNode::Call(s) => s.tag(), TokenNode::Call(s) => s.span,
TokenNode::Delimited(s) => s.tag(), TokenNode::Delimited(s) => s.span,
TokenNode::Pipeline(s) => s.tag(), TokenNode::Pipeline(s) => s.span,
TokenNode::Flag(s) => s.tag(), TokenNode::Flag(s) => s.span,
TokenNode::Whitespace(s) => *s, TokenNode::Whitespace(s) => *s,
TokenNode::Error(s) => return s.tag, TokenNode::Error(s) => s.span,
} }
} }
@ -116,8 +116,12 @@ impl TokenNode {
} }
} }
pub fn spanned_type_name(&self) -> Spanned<&'static str> {
self.type_name().spanned(self.span())
}
pub fn tagged_type_name(&self) -> Tagged<&'static str> { pub fn tagged_type_name(&self) -> Tagged<&'static str> {
self.type_name().tagged(self.tag()) self.type_name().tagged(self.span())
} }
pub fn old_debug<'a>(&'a self, source: &'a Text) -> DebugTokenNode<'a> { pub fn old_debug<'a>(&'a self, source: &'a Text) -> DebugTokenNode<'a> {
@ -125,26 +129,26 @@ impl TokenNode {
} }
pub fn as_external_arg(&self, source: &Text) -> String { pub fn as_external_arg(&self, source: &Text) -> String {
self.tag().slice(source).to_string() self.span().slice(source).to_string()
} }
pub fn source<'a>(&self, source: &'a Text) -> &'a str { pub fn source<'a>(&self, source: &'a Text) -> &'a str {
self.tag().slice(source) self.span().slice(source)
} }
pub fn get_variable(&self) -> Result<(Tag, Tag), ShellError> { pub fn get_variable(&self) -> Result<(Span, Span), ShellError> {
match self { match self {
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::Variable(inner_tag), item: RawToken::Variable(inner_span),
tag: outer_tag, span: outer_span,
}) => Ok((*outer_tag, *inner_tag)), }) => Ok((*outer_span, *inner_span)),
_ => Err(ShellError::type_error("variable", self.tagged_type_name())), _ => Err(ShellError::type_error("variable", self.tagged_type_name())),
} }
} }
pub fn is_bare(&self) -> bool { pub fn is_bare(&self) -> bool {
match self { match self {
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::Bare, item: RawToken::Bare,
.. ..
}) => true, }) => true,
@ -154,7 +158,7 @@ impl TokenNode {
pub fn is_pattern(&self) -> bool { pub fn is_pattern(&self) -> bool {
match self { match self {
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::GlobPattern, item: RawToken::GlobPattern,
.. ..
}) => true, }) => true,
@ -164,7 +168,7 @@ impl TokenNode {
pub fn is_dot(&self) -> bool { pub fn is_dot(&self) -> bool {
match self { match self {
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::Operator(Operator::Dot), item: RawToken::Operator(Operator::Dot),
.. ..
}) => true, }) => true,
@ -172,24 +176,24 @@ impl TokenNode {
} }
} }
pub fn as_block(&self) -> Option<(Tagged<&[TokenNode]>, (Tag, Tag))> { pub fn as_block(&self) -> Option<(Spanned<&[TokenNode]>, (Span, Span))> {
match self { match self {
TokenNode::Delimited(Tagged { TokenNode::Delimited(Spanned {
item: item:
DelimitedNode { DelimitedNode {
delimiter, delimiter,
children, children,
tags, spans,
}, },
tag, span,
}) if *delimiter == Delimiter::Brace => Some(((&children[..]).tagged(tag), *tags)), }) if *delimiter == Delimiter::Brace => Some(((&children[..]).spanned(*span), *spans)),
_ => None, _ => None,
} }
} }
pub fn is_external(&self) -> bool { pub fn is_external(&self) -> bool {
match self { match self {
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::ExternalCommand(..), item: RawToken::ExternalCommand(..),
.. ..
}) => true, }) => true,
@ -197,20 +201,20 @@ impl TokenNode {
} }
} }
pub fn expect_external(&self) -> Tag { pub fn expect_external(&self) -> Span {
match self { match self {
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::ExternalCommand(tag), item: RawToken::ExternalCommand(span),
.. ..
}) => *tag, }) => *span,
_ => panic!("Only call expect_external if you checked is_external first"), _ => panic!("Only call expect_external if you checked is_external first"),
} }
} }
pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option<Tagged<Flag>> { pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option<Spanned<Flag>> {
match self { match self {
TokenNode::Flag( TokenNode::Flag(
flag @ Tagged { flag @ Spanned {
item: Flag { .. }, .. item: Flag { .. }, ..
}, },
) if value == flag.name().slice(source) => Some(*flag), ) if value == flag.name().slice(source) => Some(*flag),
@ -220,7 +224,7 @@ impl TokenNode {
pub fn as_pipeline(&self) -> Result<Pipeline, ShellError> { pub fn as_pipeline(&self) -> Result<Pipeline, ShellError> {
match self { match self {
TokenNode::Pipeline(Tagged { item, .. }) => Ok(item.clone()), TokenNode::Pipeline(Spanned { item, .. }) => Ok(item.clone()),
_ => Err(ShellError::unimplemented("unimplemented")), _ => Err(ShellError::unimplemented("unimplemented")),
} }
} }
@ -232,12 +236,12 @@ impl TokenNode {
} }
} }
pub fn expect_string(&self) -> (Tag, Tag) { pub fn expect_string(&self) -> (Span, Span) {
match self { match self {
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::String(inner_tag), item: RawToken::String(inner_span),
tag: outer_tag, span: outer_span,
}) => (*outer_tag, *inner_tag), }) => (*outer_span, *inner_span),
other => panic!("Expected string, found {:?}", other), other => panic!("Expected string, found {:?}", other),
} }
} }
@ -247,27 +251,30 @@ impl TokenNode {
impl TokenNode { impl TokenNode {
pub fn expect_list(&self) -> Tagged<&[TokenNode]> { pub fn expect_list(&self) -> Tagged<&[TokenNode]> {
match self { match self {
TokenNode::Nodes(Tagged { item, tag }) => (&item[..]).tagged(tag), TokenNode::Nodes(Spanned { item, span }) => (&item[..]).tagged(Tag {
span: *span,
anchor: None,
}),
other => panic!("Expected list, found {:?}", other), other => panic!("Expected list, found {:?}", other),
} }
} }
pub fn expect_var(&self) -> (Tag, Tag) { pub fn expect_var(&self) -> (Span, Span) {
match self { match self {
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::Variable(inner_tag), item: RawToken::Variable(inner_span),
tag: outer_tag, span: outer_span,
}) => (*outer_tag, *inner_tag), }) => (*outer_span, *inner_span),
other => panic!("Expected var, found {:?}", other), other => panic!("Expected var, found {:?}", other),
} }
} }
pub fn expect_bare(&self) -> Tag { pub fn expect_bare(&self) -> Span {
match self { match self {
TokenNode::Token(Tagged { TokenNode::Token(Spanned {
item: RawToken::Bare, item: RawToken::Bare,
tag, span,
}) => *tag, }) => *span,
other => panic!("Expected var, found {:?}", other), other => panic!("Expected var, found {:?}", other),
} }
} }
@ -277,7 +284,7 @@ impl TokenNode {
#[get = "pub(crate)"] #[get = "pub(crate)"]
pub struct DelimitedNode { pub struct DelimitedNode {
pub(crate) delimiter: Delimiter, pub(crate) delimiter: Delimiter,
pub(crate) tags: (Tag, Tag), pub(crate) spans: (Span, Span),
pub(crate) children: Vec<TokenNode>, pub(crate) children: Vec<TokenNode>,
} }

View File

@ -7,7 +7,6 @@ use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
use crate::parser::parse::tokens::{RawNumber, RawToken}; use crate::parser::parse::tokens::{RawNumber, RawToken};
use crate::parser::CallNode; use crate::parser::CallNode;
use derive_new::new; use derive_new::new;
use uuid::Uuid;
#[derive(new)] #[derive(new)]
pub struct TokenTreeBuilder { pub struct TokenTreeBuilder {
@ -16,33 +15,34 @@ pub struct TokenTreeBuilder {
#[new(default)] #[new(default)]
output: String, output: String,
anchor: Uuid,
} }
pub type CurriedToken = Box<dyn FnOnce(&mut TokenTreeBuilder) -> TokenNode + 'static>; pub type CurriedToken = Box<dyn FnOnce(&mut TokenTreeBuilder) -> TokenNode + 'static>;
pub type CurriedCall = Box<dyn FnOnce(&mut TokenTreeBuilder) -> Tagged<CallNode> + 'static>; pub type CurriedCall = Box<dyn FnOnce(&mut TokenTreeBuilder) -> Tagged<CallNode> + 'static>;
impl TokenTreeBuilder { impl TokenTreeBuilder {
pub fn build(anchor: Uuid, block: impl FnOnce(&mut Self) -> TokenNode) -> (TokenNode, String) { pub fn build(block: impl FnOnce(&mut Self) -> TokenNode) -> (TokenNode, String) {
let mut builder = TokenTreeBuilder::new(anchor); let mut builder = TokenTreeBuilder::new();
let node = block(&mut builder); let node = block(&mut builder);
(node, builder.output) (node, builder.output)
} }
fn build_tagged<T>(&mut self, callback: impl FnOnce(&mut TokenTreeBuilder) -> T) -> Tagged<T> { fn build_spanned<T>(
&mut self,
callback: impl FnOnce(&mut TokenTreeBuilder) -> T,
) -> Spanned<T> {
let start = self.pos; let start = self.pos;
let ret = callback(self); let ret = callback(self);
let end = self.pos; let end = self.pos;
ret.tagged((start, end, self.anchor)) ret.spanned(Span::new(start, end))
} }
pub fn pipeline(input: Vec<Vec<CurriedToken>>) -> CurriedToken { pub fn pipeline(input: Vec<Vec<CurriedToken>>) -> CurriedToken {
Box::new(move |b| { Box::new(move |b| {
let start = b.pos; let start = b.pos;
let mut out: Vec<Tagged<PipelineElement>> = vec![]; let mut out: Vec<Spanned<PipelineElement>> = vec![];
let mut input = input.into_iter().peekable(); let mut input = input.into_iter().peekable();
let head = input let head = input
@ -50,34 +50,37 @@ impl TokenTreeBuilder {
.expect("A pipeline must contain at least one element"); .expect("A pipeline must contain at least one element");
let pipe = None; let pipe = None;
let head = b.build_tagged(|b| head.into_iter().map(|node| node(b)).collect()); let head = b.build_spanned(|b| head.into_iter().map(|node| node(b)).collect());
let head_tag: Tag = head.tag; let head_span: Span = head.span;
out.push(PipelineElement::new(pipe, head).tagged(head_tag)); out.push(PipelineElement::new(pipe, head).spanned(head_span));
loop { loop {
match input.next() { match input.next() {
None => break, None => break,
Some(node) => { Some(node) => {
let start = b.pos; let start = b.pos;
let pipe = Some(b.consume_tag("|")); let pipe = Some(b.consume_span("|"));
let node = let node =
b.build_tagged(|b| node.into_iter().map(|node| node(b)).collect()); b.build_spanned(|b| node.into_iter().map(|node| node(b)).collect());
let end = b.pos; let end = b.pos;
out.push(PipelineElement::new(pipe, node).tagged((start, end, b.anchor))); out.push(PipelineElement::new(pipe, node).spanned(Span::new(start, end)));
} }
} }
} }
let end = b.pos; let end = b.pos;
TokenTreeBuilder::tagged_pipeline(out, (start, end, b.anchor)) TokenTreeBuilder::spanned_pipeline(out, Span::new(start, end))
}) })
} }
pub fn tagged_pipeline(input: Vec<Tagged<PipelineElement>>, tag: impl Into<Tag>) -> TokenNode { pub fn spanned_pipeline(
TokenNode::Pipeline(Pipeline::new(input).tagged(tag.into())) input: Vec<Spanned<PipelineElement>>,
span: impl Into<Span>,
) -> TokenNode {
TokenNode::Pipeline(Pipeline::new(input).spanned(span))
} }
pub fn token_list(input: Vec<CurriedToken>) -> CurriedToken { pub fn token_list(input: Vec<CurriedToken>) -> CurriedToken {
@ -86,12 +89,12 @@ impl TokenTreeBuilder {
let tokens = input.into_iter().map(|i| i(b)).collect(); let tokens = input.into_iter().map(|i| i(b)).collect();
let end = b.pos; let end = b.pos;
TokenTreeBuilder::tagged_token_list(tokens, (start, end, b.anchor)) TokenTreeBuilder::tagged_token_list(tokens, (start, end, None))
}) })
} }
pub fn tagged_token_list(input: Vec<TokenNode>, tag: impl Into<Tag>) -> TokenNode { pub fn tagged_token_list(input: Vec<TokenNode>, tag: impl Into<Tag>) -> TokenNode {
TokenNode::Nodes(input.tagged(tag)) TokenNode::Nodes(input.spanned(tag.into().span))
} }
pub fn op(input: impl Into<Operator>) -> CurriedToken { pub fn op(input: impl Into<Operator>) -> CurriedToken {
@ -102,12 +105,12 @@ impl TokenTreeBuilder {
b.pos = end; b.pos = end;
TokenTreeBuilder::tagged_op(input, (start, end, b.anchor)) TokenTreeBuilder::spanned_op(input, Span::new(start, end))
}) })
} }
pub fn tagged_op(input: impl Into<Operator>, tag: impl Into<Tag>) -> TokenNode { pub fn spanned_op(input: impl Into<Operator>, span: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::Operator(input.into()).tagged(tag.into())) TokenNode::Token(RawToken::Operator(input.into()).spanned(span.into()))
} }
pub fn string(input: impl Into<String>) -> CurriedToken { pub fn string(input: impl Into<String>) -> CurriedToken {
@ -119,15 +122,15 @@ impl TokenTreeBuilder {
let (_, end) = b.consume("\""); let (_, end) = b.consume("\"");
b.pos = end; b.pos = end;
TokenTreeBuilder::tagged_string( TokenTreeBuilder::spanned_string(
(inner_start, inner_end, b.anchor), Span::new(inner_start, inner_end),
(start, end, b.anchor), Span::new(start, end),
) )
}) })
} }
pub fn tagged_string(input: impl Into<Tag>, tag: impl Into<Tag>) -> TokenNode { pub fn spanned_string(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::String(input.into()).tagged(tag.into())) TokenNode::Token(RawToken::String(input.into()).spanned(span.into()))
} }
pub fn bare(input: impl Into<String>) -> CurriedToken { pub fn bare(input: impl Into<String>) -> CurriedToken {
@ -137,12 +140,12 @@ impl TokenTreeBuilder {
let (start, end) = b.consume(&input); let (start, end) = b.consume(&input);
b.pos = end; b.pos = end;
TokenTreeBuilder::tagged_bare((start, end, b.anchor)) TokenTreeBuilder::spanned_bare(Span::new(start, end))
}) })
} }
pub fn tagged_bare(tag: impl Into<Tag>) -> TokenNode { pub fn spanned_bare(span: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::Bare.tagged(tag.into())) TokenNode::Token(RawToken::Bare.spanned(span))
} }
pub fn pattern(input: impl Into<String>) -> CurriedToken { pub fn pattern(input: impl Into<String>) -> CurriedToken {
@ -152,12 +155,12 @@ impl TokenTreeBuilder {
let (start, end) = b.consume(&input); let (start, end) = b.consume(&input);
b.pos = end; b.pos = end;
TokenTreeBuilder::tagged_pattern((start, end, b.anchor)) TokenTreeBuilder::spanned_pattern(Span::new(start, end))
}) })
} }
pub fn tagged_pattern(input: impl Into<Tag>) -> TokenNode { pub fn spanned_pattern(input: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::GlobPattern.tagged(input.into())) TokenNode::Token(RawToken::GlobPattern.spanned(input.into()))
} }
pub fn external_word(input: impl Into<String>) -> CurriedToken { pub fn external_word(input: impl Into<String>) -> CurriedToken {
@ -167,12 +170,12 @@ impl TokenTreeBuilder {
let (start, end) = b.consume(&input); let (start, end) = b.consume(&input);
b.pos = end; b.pos = end;
TokenTreeBuilder::tagged_external_word((start, end, b.anchor)) TokenTreeBuilder::spanned_external_word(Span::new(start, end))
}) })
} }
pub fn tagged_external_word(input: impl Into<Tag>) -> TokenNode { pub fn spanned_external_word(input: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::ExternalWord.tagged(input.into())) TokenNode::Token(RawToken::ExternalWord.spanned(input.into()))
} }
pub fn external_command(input: impl Into<String>) -> CurriedToken { pub fn external_command(input: impl Into<String>) -> CurriedToken {
@ -183,15 +186,15 @@ impl TokenTreeBuilder {
let (inner_start, end) = b.consume(&input); let (inner_start, end) = b.consume(&input);
b.pos = end; b.pos = end;
TokenTreeBuilder::tagged_external_command( TokenTreeBuilder::spanned_external_command(
(inner_start, end, b.anchor), Span::new(inner_start, end),
(outer_start, end, b.anchor), Span::new(outer_start, end),
) )
}) })
} }
pub fn tagged_external_command(inner: impl Into<Tag>, outer: impl Into<Tag>) -> TokenNode { pub fn spanned_external_command(inner: impl Into<Span>, outer: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::ExternalCommand(inner.into()).tagged(outer.into())) TokenNode::Token(RawToken::ExternalCommand(inner.into()).spanned(outer.into()))
} }
pub fn int(input: impl Into<BigInt>) -> CurriedToken { pub fn int(input: impl Into<BigInt>) -> CurriedToken {
@ -201,9 +204,9 @@ impl TokenTreeBuilder {
let (start, end) = b.consume(&int.to_string()); let (start, end) = b.consume(&int.to_string());
b.pos = end; b.pos = end;
TokenTreeBuilder::tagged_number( TokenTreeBuilder::spanned_number(
RawNumber::Int((start, end, b.anchor).into()), RawNumber::Int(Span::new(start, end)),
(start, end, b.anchor), Span::new(start, end),
) )
}) })
} }
@ -215,15 +218,15 @@ impl TokenTreeBuilder {
let (start, end) = b.consume(&decimal.to_string()); let (start, end) = b.consume(&decimal.to_string());
b.pos = end; b.pos = end;
TokenTreeBuilder::tagged_number( TokenTreeBuilder::spanned_number(
RawNumber::Decimal((start, end, b.anchor).into()), RawNumber::Decimal(Span::new(start, end)),
(start, end, b.anchor), Span::new(start, end),
) )
}) })
} }
pub fn tagged_number(input: impl Into<RawNumber>, tag: impl Into<Tag>) -> TokenNode { pub fn spanned_number(input: impl Into<RawNumber>, span: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::Number(input.into()).tagged(tag.into())) TokenNode::Token(RawToken::Number(input.into()).spanned(span.into()))
} }
pub fn var(input: impl Into<String>) -> CurriedToken { pub fn var(input: impl Into<String>) -> CurriedToken {
@ -233,12 +236,12 @@ impl TokenTreeBuilder {
let (start, _) = b.consume("$"); let (start, _) = b.consume("$");
let (inner_start, end) = b.consume(&input); let (inner_start, end) = b.consume(&input);
TokenTreeBuilder::tagged_var((inner_start, end, b.anchor), (start, end, b.anchor)) TokenTreeBuilder::spanned_var(Span::new(inner_start, end), Span::new(start, end))
}) })
} }
pub fn tagged_var(input: impl Into<Tag>, tag: impl Into<Tag>) -> TokenNode { pub fn spanned_var(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
TokenNode::Token(RawToken::Variable(input.into()).tagged(tag.into())) TokenNode::Token(RawToken::Variable(input.into()).spanned(span.into()))
} }
pub fn flag(input: impl Into<String>) -> CurriedToken { pub fn flag(input: impl Into<String>) -> CurriedToken {
@ -248,12 +251,12 @@ impl TokenTreeBuilder {
let (start, _) = b.consume("--"); let (start, _) = b.consume("--");
let (inner_start, end) = b.consume(&input); let (inner_start, end) = b.consume(&input);
TokenTreeBuilder::tagged_flag((inner_start, end, b.anchor), (start, end, b.anchor)) TokenTreeBuilder::spanned_flag(Span::new(inner_start, end), Span::new(start, end))
}) })
} }
pub fn tagged_flag(input: impl Into<Tag>, tag: impl Into<Tag>) -> TokenNode { pub fn spanned_flag(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
TokenNode::Flag(Flag::new(FlagKind::Longhand, input.into()).tagged(tag.into())) TokenNode::Flag(Flag::new(FlagKind::Longhand, input.into()).spanned(span.into()))
} }
pub fn shorthand(input: impl Into<String>) -> CurriedToken { pub fn shorthand(input: impl Into<String>) -> CurriedToken {
@ -263,12 +266,12 @@ impl TokenTreeBuilder {
let (start, _) = b.consume("-"); let (start, _) = b.consume("-");
let (inner_start, end) = b.consume(&input); let (inner_start, end) = b.consume(&input);
TokenTreeBuilder::tagged_shorthand((inner_start, end, b.anchor), (start, end, b.anchor)) TokenTreeBuilder::spanned_shorthand((inner_start, end), (start, end))
}) })
} }
pub fn tagged_shorthand(input: impl Into<Tag>, tag: impl Into<Tag>) -> TokenNode { pub fn spanned_shorthand(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into()).tagged(tag.into())) TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into()).spanned(span.into()))
} }
pub fn call(head: CurriedToken, input: Vec<CurriedToken>) -> CurriedCall { pub fn call(head: CurriedToken, input: Vec<CurriedToken>) -> CurriedCall {
@ -284,7 +287,7 @@ impl TokenTreeBuilder {
let end = b.pos; let end = b.pos;
TokenTreeBuilder::tagged_call(nodes, (start, end, b.anchor)) TokenTreeBuilder::tagged_call(nodes, (start, end, None))
}) })
} }
@ -306,7 +309,7 @@ impl TokenTreeBuilder {
input: Vec<CurriedToken>, input: Vec<CurriedToken>,
_open: &str, _open: &str,
_close: &str, _close: &str,
) -> (Tag, Tag, Tag, Vec<TokenNode>) { ) -> (Span, Span, Span, Vec<TokenNode>) {
let (start_open_paren, end_open_paren) = self.consume("("); let (start_open_paren, end_open_paren) = self.consume("(");
let mut output = vec![]; let mut output = vec![];
for item in input { for item in input {
@ -315,9 +318,9 @@ impl TokenTreeBuilder {
let (start_close_paren, end_close_paren) = self.consume(")"); let (start_close_paren, end_close_paren) = self.consume(")");
let open = Tag::from((start_open_paren, end_open_paren, self.anchor)); let open = Span::new(start_open_paren, end_open_paren);
let close = Tag::from((start_close_paren, end_close_paren, self.anchor)); let close = Span::new(start_close_paren, end_close_paren);
let whole = Tag::from((start_open_paren, end_close_paren, self.anchor)); let whole = Span::new(start_open_paren, end_close_paren);
(open, close, whole, output) (open, close, whole, output)
} }
@ -326,17 +329,17 @@ impl TokenTreeBuilder {
Box::new(move |b| { Box::new(move |b| {
let (open, close, whole, output) = b.consume_delimiter(input, "(", ")"); let (open, close, whole, output) = b.consume_delimiter(input, "(", ")");
TokenTreeBuilder::tagged_parens(output, (open, close), whole) TokenTreeBuilder::spanned_parens(output, (open, close), whole)
}) })
} }
pub fn tagged_parens( pub fn spanned_parens(
input: impl Into<Vec<TokenNode>>, input: impl Into<Vec<TokenNode>>,
tags: (Tag, Tag), spans: (Span, Span),
tag: impl Into<Tag>, span: impl Into<Span>,
) -> TokenNode { ) -> TokenNode {
TokenNode::Delimited( TokenNode::Delimited(
DelimitedNode::new(Delimiter::Paren, tags, input.into()).tagged(tag.into()), DelimitedNode::new(Delimiter::Paren, spans, input.into()).spanned(span.into()),
) )
} }
@ -344,17 +347,17 @@ impl TokenTreeBuilder {
Box::new(move |b| { Box::new(move |b| {
let (open, close, whole, tokens) = b.consume_delimiter(input, "[", "]"); let (open, close, whole, tokens) = b.consume_delimiter(input, "[", "]");
TokenTreeBuilder::tagged_square(tokens, (open, close), whole) TokenTreeBuilder::spanned_square(tokens, (open, close), whole)
}) })
} }
pub fn tagged_square( pub fn spanned_square(
input: impl Into<Vec<TokenNode>>, input: impl Into<Vec<TokenNode>>,
tags: (Tag, Tag), spans: (Span, Span),
tag: impl Into<Tag>, span: impl Into<Span>,
) -> TokenNode { ) -> TokenNode {
TokenNode::Delimited( TokenNode::Delimited(
DelimitedNode::new(Delimiter::Square, tags, input.into()).tagged(tag.into()), DelimitedNode::new(Delimiter::Square, spans, input.into()).spanned(span.into()),
) )
} }
@ -362,24 +365,24 @@ impl TokenTreeBuilder {
Box::new(move |b| { Box::new(move |b| {
let (open, close, whole, tokens) = b.consume_delimiter(input, "{", "}"); let (open, close, whole, tokens) = b.consume_delimiter(input, "{", "}");
TokenTreeBuilder::tagged_brace(tokens, (open, close), whole) TokenTreeBuilder::spanned_brace(tokens, (open, close), whole)
}) })
} }
pub fn tagged_brace( pub fn spanned_brace(
input: impl Into<Vec<TokenNode>>, input: impl Into<Vec<TokenNode>>,
tags: (Tag, Tag), spans: (Span, Span),
tag: impl Into<Tag>, span: impl Into<Span>,
) -> TokenNode { ) -> TokenNode {
TokenNode::Delimited( TokenNode::Delimited(
DelimitedNode::new(Delimiter::Brace, tags, input.into()).tagged(tag.into()), DelimitedNode::new(Delimiter::Brace, spans, input.into()).spanned(span.into()),
) )
} }
pub fn sp() -> CurriedToken { pub fn sp() -> CurriedToken {
Box::new(|b| { Box::new(|b| {
let (start, end) = b.consume(" "); let (start, end) = b.consume(" ");
TokenNode::Whitespace(Tag::from((start, end, b.anchor))) TokenNode::Whitespace(Span::new(start, end))
}) })
} }
@ -388,12 +391,12 @@ impl TokenTreeBuilder {
Box::new(move |b| { Box::new(move |b| {
let (start, end) = b.consume(&input); let (start, end) = b.consume(&input);
TokenTreeBuilder::tagged_ws((start, end, b.anchor)) TokenTreeBuilder::spanned_ws(Span::new(start, end))
}) })
} }
pub fn tagged_ws(tag: impl Into<Tag>) -> TokenNode { pub fn spanned_ws(span: impl Into<Span>) -> TokenNode {
TokenNode::Whitespace(tag.into()) TokenNode::Whitespace(span.into())
} }
fn consume(&mut self, input: &str) -> (usize, usize) { fn consume(&mut self, input: &str) -> (usize, usize) {
@ -403,10 +406,10 @@ impl TokenTreeBuilder {
(start, self.pos) (start, self.pos)
} }
fn consume_tag(&mut self, input: &str) -> Tag { fn consume_span(&mut self, input: &str) -> Span {
let start = self.pos; let start = self.pos;
self.pos += input.len(); self.pos += input.len();
self.output.push_str(input); self.output.push_str(input);
(start, self.pos, self.anchor).into() Span::new(start, self.pos)
} }
} }

View File

@ -1,6 +1,6 @@
use crate::parser::Operator; use crate::parser::Operator;
use crate::prelude::*; use crate::prelude::*;
use crate::{Tagged, Text}; use crate::Text;
use std::fmt; use std::fmt;
use std::str::FromStr; use std::str::FromStr;
@ -8,9 +8,9 @@ use std::str::FromStr;
pub enum RawToken { pub enum RawToken {
Number(RawNumber), Number(RawNumber),
Operator(Operator), Operator(Operator),
String(Tag), String(Span),
Variable(Tag), Variable(Span),
ExternalCommand(Tag), ExternalCommand(Span),
ExternalWord, ExternalWord,
GlobPattern, GlobPattern,
Bare, Bare,
@ -33,21 +33,21 @@ impl RawToken {
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)] #[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
pub enum RawNumber { pub enum RawNumber {
Int(Tag), Int(Span),
Decimal(Tag), Decimal(Span),
} }
impl RawNumber { impl RawNumber {
pub fn int(tag: impl Into<Tag>) -> Tagged<RawNumber> { pub fn int(span: impl Into<Span>) -> Spanned<RawNumber> {
let tag = tag.into(); let span = span.into();
RawNumber::Int(tag).tagged(tag) RawNumber::Int(span).spanned(span)
} }
pub fn decimal(tag: impl Into<Tag>) -> Tagged<RawNumber> { pub fn decimal(span: impl Into<Span>) -> Spanned<RawNumber> {
let tag = tag.into(); let span = span.into();
RawNumber::Decimal(tag).tagged(tag) RawNumber::Decimal(span).spanned(span)
} }
pub(crate) fn to_number(self, source: &Text) -> Number { pub(crate) fn to_number(self, source: &Text) -> Number {
@ -60,7 +60,7 @@ impl RawNumber {
} }
} }
pub type Token = Tagged<RawToken>; pub type Token = Spanned<RawToken>;
impl Token { impl Token {
pub fn debug<'a>(&self, source: &'a Text) -> DebugToken<'a> { pub fn debug<'a>(&self, source: &'a Text) -> DebugToken<'a> {
@ -70,72 +70,72 @@ impl Token {
} }
} }
pub fn extract_number(&self) -> Option<Tagged<RawNumber>> { pub fn extract_number(&self) -> Option<Spanned<RawNumber>> {
match self.item { match self.item {
RawToken::Number(number) => Some((number).tagged(self.tag)), RawToken::Number(number) => Some((number).spanned(self.span)),
_ => None, _ => None,
} }
} }
pub fn extract_int(&self) -> Option<(Tag, Tag)> { pub fn extract_int(&self) -> Option<(Span, Span)> {
match self.item { match self.item {
RawToken::Number(RawNumber::Int(int)) => Some((int, self.tag)), RawToken::Number(RawNumber::Int(int)) => Some((int, self.span)),
_ => None, _ => None,
} }
} }
pub fn extract_decimal(&self) -> Option<(Tag, Tag)> { pub fn extract_decimal(&self) -> Option<(Span, Span)> {
match self.item { match self.item {
RawToken::Number(RawNumber::Decimal(decimal)) => Some((decimal, self.tag)), RawToken::Number(RawNumber::Decimal(decimal)) => Some((decimal, self.span)),
_ => None, _ => None,
} }
} }
pub fn extract_operator(&self) -> Option<Tagged<Operator>> { pub fn extract_operator(&self) -> Option<Spanned<Operator>> {
match self.item { match self.item {
RawToken::Operator(operator) => Some(operator.tagged(self.tag)), RawToken::Operator(operator) => Some(operator.spanned(self.span)),
_ => None, _ => None,
} }
} }
pub fn extract_string(&self) -> Option<(Tag, Tag)> { pub fn extract_string(&self) -> Option<(Span, Span)> {
match self.item { match self.item {
RawToken::String(tag) => Some((tag, self.tag)), RawToken::String(span) => Some((span, self.span)),
_ => None, _ => None,
} }
} }
pub fn extract_variable(&self) -> Option<(Tag, Tag)> { pub fn extract_variable(&self) -> Option<(Span, Span)> {
match self.item { match self.item {
RawToken::Variable(tag) => Some((tag, self.tag)), RawToken::Variable(span) => Some((span, self.span)),
_ => None, _ => None,
} }
} }
pub fn extract_external_command(&self) -> Option<(Tag, Tag)> { pub fn extract_external_command(&self) -> Option<(Span, Span)> {
match self.item { match self.item {
RawToken::ExternalCommand(tag) => Some((tag, self.tag)), RawToken::ExternalCommand(span) => Some((span, self.span)),
_ => None, _ => None,
} }
} }
pub fn extract_external_word(&self) -> Option<Tag> { pub fn extract_external_word(&self) -> Option<Span> {
match self.item { match self.item {
RawToken::ExternalWord => Some(self.tag), RawToken::ExternalWord => Some(self.span),
_ => None, _ => None,
} }
} }
pub fn extract_glob_pattern(&self) -> Option<Tag> { pub fn extract_glob_pattern(&self) -> Option<Span> {
match self.item { match self.item {
RawToken::GlobPattern => Some(self.tag), RawToken::GlobPattern => Some(self.span),
_ => None, _ => None,
} }
} }
pub fn extract_bare(&self) -> Option<Tag> { pub fn extract_bare(&self) -> Option<Span> {
match self.item { match self.item {
RawToken::Bare => Some(self.tag), RawToken::Bare => Some(self.span),
_ => None, _ => None,
} }
} }
@ -148,6 +148,6 @@ pub struct DebugToken<'a> {
impl fmt::Debug for DebugToken<'_> { impl fmt::Debug for DebugToken<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.node.tag().slice(self.source)) write!(f, "{}", self.node.span.slice(self.source))
} }
} }

View File

@ -10,14 +10,14 @@ use crate::parser::{
Flag, Flag,
}; };
use crate::traits::ToDebug; use crate::traits::ToDebug;
use crate::{Tag, Tagged, Text}; use crate::{Span, Spanned, Tag, Text};
use log::trace; use log::trace;
pub fn parse_command_tail( pub fn parse_command_tail(
config: &Signature, config: &Signature,
context: &ExpandContext, context: &ExpandContext,
tail: &mut TokensIterator, tail: &mut TokensIterator,
command_tag: Tag, command_span: Span,
) -> Result<Option<(Option<Vec<hir::Expression>>, Option<NamedArguments>)>, ShellError> { ) -> Result<Option<(Option<Vec<hir::Expression>>, Option<NamedArguments>)>, ShellError> {
let mut named = NamedArguments::new(); let mut named = NamedArguments::new();
trace_remaining("nodes", tail.clone(), context.source()); trace_remaining("nodes", tail.clone(), context.source());
@ -32,7 +32,7 @@ pub fn parse_command_tail(
named.insert_switch(name, flag); named.insert_switch(name, flag);
} }
NamedType::Mandatory(syntax_type) => { NamedType::Mandatory(syntax_type) => {
match extract_mandatory(config, name, tail, context.source(), command_tag) { match extract_mandatory(config, name, tail, context.source(), command_span) {
Err(err) => return Err(err), // produce a correct diagnostic Err(err) => return Err(err), // produce a correct diagnostic
Ok((pos, flag)) => { Ok((pos, flag)) => {
tail.move_to(pos); tail.move_to(pos);
@ -41,7 +41,7 @@ pub fn parse_command_tail(
return Err(ShellError::argument_error( return Err(ShellError::argument_error(
config.name.clone(), config.name.clone(),
ArgumentError::MissingValueForName(name.to_string()), ArgumentError::MissingValueForName(name.to_string()),
flag.tag(), flag.span,
)); ));
} }
@ -62,7 +62,7 @@ pub fn parse_command_tail(
return Err(ShellError::argument_error( return Err(ShellError::argument_error(
config.name.clone(), config.name.clone(),
ArgumentError::MissingValueForName(name.to_string()), ArgumentError::MissingValueForName(name.to_string()),
flag.tag(), flag.span,
)); ));
} }
@ -98,7 +98,10 @@ pub fn parse_command_tail(
return Err(ShellError::argument_error( return Err(ShellError::argument_error(
config.name.clone(), config.name.clone(),
ArgumentError::MissingMandatoryPositional(arg.name().to_string()), ArgumentError::MissingMandatoryPositional(arg.name().to_string()),
command_tag, Tag {
span: command_span,
anchor: None,
},
)); ));
} }
} }
@ -158,7 +161,7 @@ pub fn parse_command_tail(
#[derive(Debug)] #[derive(Debug)]
struct ColoringArgs { struct ColoringArgs {
vec: Vec<Option<Vec<Tagged<FlatShape>>>>, vec: Vec<Option<Vec<Spanned<FlatShape>>>>,
} }
impl ColoringArgs { impl ColoringArgs {
@ -167,11 +170,11 @@ impl ColoringArgs {
ColoringArgs { vec } ColoringArgs { vec }
} }
fn insert(&mut self, pos: usize, shapes: Vec<Tagged<FlatShape>>) { fn insert(&mut self, pos: usize, shapes: Vec<Spanned<FlatShape>>) {
self.vec[pos] = Some(shapes); self.vec[pos] = Some(shapes);
} }
fn spread_shapes(self, shapes: &mut Vec<Tagged<FlatShape>>) { fn spread_shapes(self, shapes: &mut Vec<Spanned<FlatShape>>) {
for item in self.vec { for item in self.vec {
match item { match item {
None => {} None => {}
@ -195,7 +198,7 @@ impl ColorSyntax for CommandTailShape {
signature: &Signature, signature: &Signature,
token_nodes: &'b mut TokensIterator<'a>, token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext, context: &ExpandContext,
shapes: &mut Vec<Tagged<FlatShape>>, shapes: &mut Vec<Spanned<FlatShape>>,
) -> Self::Info { ) -> Self::Info {
let mut args = ColoringArgs::new(token_nodes.len()); let mut args = ColoringArgs::new(token_nodes.len());
trace_remaining("nodes", token_nodes.clone(), context.source()); trace_remaining("nodes", token_nodes.clone(), context.source());
@ -216,7 +219,7 @@ impl ColorSyntax for CommandTailShape {
name, name,
token_nodes, token_nodes,
context.source(), context.source(),
Tag::unknown(), Span::unknown(),
) { ) {
Err(_) => { Err(_) => {
// The mandatory flag didn't exist at all, so there's nothing to color // The mandatory flag didn't exist at all, so there's nothing to color
@ -378,7 +381,7 @@ impl ColorSyntax for CommandTailShape {
// Consume any remaining tokens with backoff coloring mode // Consume any remaining tokens with backoff coloring mode
color_syntax(&BackoffColoringMode, token_nodes, context, shapes); color_syntax(&BackoffColoringMode, token_nodes, context, shapes);
shapes.sort_by(|a, b| a.tag.span.start().cmp(&b.tag.span.start())); shapes.sort_by(|a, b| a.span.start().cmp(&b.span.start()));
} }
} }
@ -393,15 +396,15 @@ fn extract_mandatory(
name: &str, name: &str,
tokens: &mut hir::TokensIterator<'_>, tokens: &mut hir::TokensIterator<'_>,
source: &Text, source: &Text,
tag: Tag, span: Span,
) -> Result<(usize, Tagged<Flag>), ShellError> { ) -> Result<(usize, Spanned<Flag>), ShellError> {
let flag = tokens.extract(|t| t.as_flag(name, source)); let flag = tokens.extract(|t| t.as_flag(name, source));
match flag { match flag {
None => Err(ShellError::argument_error( None => Err(ShellError::argument_error(
config.name.clone(), config.name.clone(),
ArgumentError::MissingMandatoryFlag(name.to_string()), ArgumentError::MissingMandatoryFlag(name.to_string()),
tag, span,
)), )),
Some((pos, flag)) => { Some((pos, flag)) => {
@ -415,7 +418,7 @@ fn extract_optional(
name: &str, name: &str,
tokens: &mut hir::TokensIterator<'_>, tokens: &mut hir::TokensIterator<'_>,
source: &Text, source: &Text,
) -> Result<(Option<(usize, Tagged<Flag>)>), ShellError> { ) -> Result<(Option<(usize, Spanned<Flag>)>), ShellError> {
let flag = tokens.extract(|t| t.as_flag(name, source)); let flag = tokens.extract(|t| t.as_flag(name, source));
match flag { match flag {

View File

@ -298,7 +298,7 @@ pub(crate) fn evaluate_args(
for (name, value) in n.named.iter() { for (name, value) in n.named.iter() {
match value { match value {
hir::named::NamedValue::PresentSwitch(tag) => { hir::named::NamedValue::PresentSwitch(tag) => {
results.insert(name.clone(), Value::boolean(true).tagged(*tag)); results.insert(name.clone(), Value::boolean(true).tagged(tag));
} }
hir::named::NamedValue::Value(expr) => { hir::named::NamedValue::Value(expr) => {
results.insert( results.insert(

View File

@ -22,7 +22,7 @@ impl Add {
let value_tag = value.tag(); let value_tag = value.tag();
match (value.item, self.value.clone()) { match (value.item, self.value.clone()) {
(obj @ Value::Row(_), Some(v)) => match &self.field { (obj @ Value::Row(_), Some(v)) => match &self.field {
Some(f) => match obj.insert_data_at_column_path(value_tag, &f, v) { Some(f) => match obj.insert_data_at_column_path(value_tag.clone(), &f, v) {
Some(v) => return Ok(v), Some(v) => return Ok(v),
None => { None => {
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
@ -32,7 +32,7 @@ impl Add {
f.iter().map(|i| &i.item).join(".") f.iter().map(|i| &i.item).join(".")
), ),
"column name", "column name",
value_tag, &value_tag,
)) ))
} }
}, },

View File

@ -24,8 +24,7 @@ impl Plugin for BinaryView {
let value_anchor = v.anchor(); let value_anchor = v.anchor();
match v.item { match v.item {
Value::Primitive(Primitive::Binary(b)) => { Value::Primitive(Primitive::Binary(b)) => {
let source = call_info.source_map.get(&value_anchor); let _ = view_binary(&b, value_anchor.as_ref(), call_info.args.has("lores"));
let _ = view_binary(&b, source, call_info.args.has("lores"));
} }
_ => {} _ => {}
} }

View File

@ -27,7 +27,7 @@ impl Edit {
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"edit could not find place to insert column", "edit could not find place to insert column",
"column name", "column name",
f.tag, &f.tag,
)) ))
} }
}, },

View File

@ -28,7 +28,7 @@ impl Embed {
None => Err(ShellError::labeled_error( None => Err(ShellError::labeled_error(
"embed needs a field when embedding a value", "embed needs a field when embedding a value",
"original value", "original value",
value.tag, &tag,
)), )),
}, },
} }

View File

@ -82,9 +82,7 @@ impl Inc {
Value::Primitive(Primitive::Bytes(b)) => { Value::Primitive(Primitive::Bytes(b)) => {
Ok(Value::bytes(b + 1 as u64).tagged(value.tag())) Ok(Value::bytes(b + 1 as u64).tagged(value.tag()))
} }
Value::Primitive(Primitive::String(ref s)) => { Value::Primitive(Primitive::String(ref s)) => Ok(self.apply(&s)?.tagged(value.tag())),
Ok(Tagged::from_item(self.apply(&s)?, value.tag()))
}
Value::Row(_) => match self.field { Value::Row(_) => match self.field {
Some(ref f) => { Some(ref f) => {
let replacement = match value.item.get_data_by_column_path(value.tag(), f) { let replacement = match value.item.get_data_by_column_path(value.tag(), f) {
@ -93,7 +91,7 @@ impl Inc {
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"inc could not find field to replace", "inc could not find field to replace",
"column name", "column name",
f.tag, &f.tag,
)) ))
} }
}; };
@ -107,7 +105,7 @@ impl Inc {
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"inc could not find field to replace", "inc could not find field to replace",
"column name", "column name",
f.tag, &f.tag,
)) ))
} }
} }
@ -191,20 +189,18 @@ mod tests {
use super::{Inc, SemVerAction}; use super::{Inc, SemVerAction};
use indexmap::IndexMap; use indexmap::IndexMap;
use nu::{ use nu::{
CallInfo, EvaluatedArgs, Plugin, ReturnSuccess, SourceMap, Tag, Tagged, TaggedDictBuilder, CallInfo, EvaluatedArgs, Plugin, ReturnSuccess, Tag, Tagged, TaggedDictBuilder, TaggedItem,
TaggedItem, Value, Value,
}; };
struct CallStub { struct CallStub {
anchor: uuid::Uuid,
positionals: Vec<Tagged<Value>>, positionals: Vec<Tagged<Value>>,
flags: IndexMap<String, Tagged<Value>>, flags: IndexMap<String, Tagged<Value>>,
} }
impl CallStub { impl CallStub {
fn new(anchor: uuid::Uuid) -> CallStub { fn new() -> CallStub {
CallStub { CallStub {
anchor,
positionals: vec![], positionals: vec![],
flags: indexmap::IndexMap::new(), flags: indexmap::IndexMap::new(),
} }
@ -221,19 +217,18 @@ mod tests {
fn with_parameter(&mut self, name: &str) -> &mut Self { fn with_parameter(&mut self, name: &str) -> &mut Self {
let fields: Vec<Tagged<Value>> = name let fields: Vec<Tagged<Value>> = name
.split(".") .split(".")
.map(|s| Value::string(s.to_string()).tagged(Tag::unknown_span(self.anchor))) .map(|s| Value::string(s.to_string()).tagged(Tag::unknown()))
.collect(); .collect();
self.positionals self.positionals
.push(Value::Table(fields).tagged(Tag::unknown_span(self.anchor))); .push(Value::Table(fields).tagged(Tag::unknown()));
self self
} }
fn create(&self) -> CallInfo { fn create(&self) -> CallInfo {
CallInfo { CallInfo {
args: EvaluatedArgs::new(Some(self.positionals.clone()), Some(self.flags.clone())), args: EvaluatedArgs::new(Some(self.positionals.clone()), Some(self.flags.clone())),
source_map: SourceMap::new(), name_tag: Tag::unknown(),
name_tag: Tag::unknown_span(self.anchor),
} }
} }
} }
@ -260,7 +255,7 @@ mod tests {
let mut plugin = Inc::new(); let mut plugin = Inc::new();
assert!(plugin assert!(plugin
.begin_filter(CallStub::new(test_uuid()).with_long_flag("major").create()) .begin_filter(CallStub::new().with_long_flag("major").create())
.is_ok()); .is_ok());
assert!(plugin.action.is_some()); assert!(plugin.action.is_some());
} }
@ -270,7 +265,7 @@ mod tests {
let mut plugin = Inc::new(); let mut plugin = Inc::new();
assert!(plugin assert!(plugin
.begin_filter(CallStub::new(test_uuid()).with_long_flag("minor").create()) .begin_filter(CallStub::new().with_long_flag("minor").create())
.is_ok()); .is_ok());
assert!(plugin.action.is_some()); assert!(plugin.action.is_some());
} }
@ -280,7 +275,7 @@ mod tests {
let mut plugin = Inc::new(); let mut plugin = Inc::new();
assert!(plugin assert!(plugin
.begin_filter(CallStub::new(test_uuid()).with_long_flag("patch").create()) .begin_filter(CallStub::new().with_long_flag("patch").create())
.is_ok()); .is_ok());
assert!(plugin.action.is_some()); assert!(plugin.action.is_some());
} }
@ -291,7 +286,7 @@ mod tests {
assert!(plugin assert!(plugin
.begin_filter( .begin_filter(
CallStub::new(test_uuid()) CallStub::new()
.with_long_flag("major") .with_long_flag("major")
.with_long_flag("minor") .with_long_flag("minor")
.create(), .create(),
@ -305,11 +300,7 @@ mod tests {
let mut plugin = Inc::new(); let mut plugin = Inc::new();
assert!(plugin assert!(plugin
.begin_filter( .begin_filter(CallStub::new().with_parameter("package.version").create())
CallStub::new(test_uuid())
.with_parameter("package.version")
.create()
)
.is_ok()); .is_ok());
assert_eq!( assert_eq!(
@ -347,7 +338,7 @@ mod tests {
assert!(plugin assert!(plugin
.begin_filter( .begin_filter(
CallStub::new(test_uuid()) CallStub::new()
.with_long_flag("major") .with_long_flag("major")
.with_parameter("version") .with_parameter("version")
.create() .create()
@ -375,7 +366,7 @@ mod tests {
assert!(plugin assert!(plugin
.begin_filter( .begin_filter(
CallStub::new(test_uuid()) CallStub::new()
.with_long_flag("minor") .with_long_flag("minor")
.with_parameter("version") .with_parameter("version")
.create() .create()
@ -404,7 +395,7 @@ mod tests {
assert!(plugin assert!(plugin
.begin_filter( .begin_filter(
CallStub::new(test_uuid()) CallStub::new()
.with_long_flag("patch") .with_long_flag("patch")
.with_parameter(&field) .with_parameter(&field)
.create() .create()
@ -425,8 +416,4 @@ mod tests {
_ => {} _ => {}
} }
} }
fn test_uuid() -> uuid::Uuid {
uuid::Uuid::nil()
}
} }

View File

@ -40,7 +40,7 @@ async fn ps(tag: Tag) -> Vec<Tagged<Value>> {
let mut output = vec![]; let mut output = vec![];
while let Some(res) = processes.next().await { while let Some(res) = processes.next().await {
if let Ok((process, usage)) = res { if let Ok((process, usage)) = res {
let mut dict = TaggedDictBuilder::new(tag); let mut dict = TaggedDictBuilder::new(&tag);
dict.insert("pid", Value::int(process.pid())); dict.insert("pid", Value::int(process.pid()));
if let Ok(name) = process.name().await { if let Ok(name) = process.name().await {
dict.insert("name", Value::string(name)); dict.insert("name", Value::string(name));

View File

@ -89,14 +89,12 @@ impl Str {
impl Str { impl Str {
fn strutils(&self, value: Tagged<Value>) -> Result<Tagged<Value>, ShellError> { fn strutils(&self, value: Tagged<Value>) -> Result<Tagged<Value>, ShellError> {
match value.item { match value.item {
Value::Primitive(Primitive::String(ref s)) => { Value::Primitive(Primitive::String(ref s)) => Ok(self.apply(&s)?.tagged(value.tag())),
Ok(Tagged::from_item(self.apply(&s)?, value.tag()))
}
Value::Row(_) => match self.field { Value::Row(_) => match self.field {
Some(ref f) => { Some(ref f) => {
let replacement = match value.item.get_data_by_column_path(value.tag(), f) { let replacement = match value.item.get_data_by_column_path(value.tag(), f) {
Some(result) => self.strutils(result.map(|x| x.clone()))?, Some(result) => self.strutils(result.map(|x| x.clone()))?,
None => return Ok(Tagged::from_item(Value::nothing(), value.tag)), None => return Ok(Value::nothing().tagged(value.tag)),
}; };
match value.item.replace_data_at_column_path( match value.item.replace_data_at_column_path(
value.tag(), value.tag(),
@ -174,7 +172,7 @@ impl Plugin for Str {
return Err(ShellError::labeled_error( return Err(ShellError::labeled_error(
"Unrecognized type in params", "Unrecognized type in params",
possible_field.type_name(), possible_field.type_name(),
possible_field.tag, &possible_field.tag,
)) ))
} }
} }
@ -216,13 +214,12 @@ mod tests {
use super::{Action, Str}; use super::{Action, Str};
use indexmap::IndexMap; use indexmap::IndexMap;
use nu::{ use nu::{
CallInfo, EvaluatedArgs, Plugin, Primitive, ReturnSuccess, SourceMap, Tag, Tagged, CallInfo, EvaluatedArgs, Plugin, Primitive, ReturnSuccess, Tag, Tagged, TaggedDictBuilder,
TaggedDictBuilder, TaggedItem, Value, TaggedItem, Value,
}; };
use num_bigint::BigInt; use num_bigint::BigInt;
struct CallStub { struct CallStub {
anchor: uuid::Uuid,
positionals: Vec<Tagged<Value>>, positionals: Vec<Tagged<Value>>,
flags: IndexMap<String, Tagged<Value>>, flags: IndexMap<String, Tagged<Value>>,
} }
@ -230,7 +227,6 @@ mod tests {
impl CallStub { impl CallStub {
fn new() -> CallStub { fn new() -> CallStub {
CallStub { CallStub {
anchor: uuid::Uuid::nil(),
positionals: vec![], positionals: vec![],
flags: indexmap::IndexMap::new(), flags: indexmap::IndexMap::new(),
} }
@ -247,19 +243,18 @@ mod tests {
fn with_parameter(&mut self, name: &str) -> &mut Self { fn with_parameter(&mut self, name: &str) -> &mut Self {
let fields: Vec<Tagged<Value>> = name let fields: Vec<Tagged<Value>> = name
.split(".") .split(".")
.map(|s| Value::string(s.to_string()).tagged(Tag::unknown_span(self.anchor))) .map(|s| Value::string(s.to_string()).tagged(Tag::unknown()))
.collect(); .collect();
self.positionals self.positionals
.push(Value::Table(fields).tagged(Tag::unknown_span(self.anchor))); .push(Value::Table(fields).tagged(Tag::unknown()));
self self
} }
fn create(&self) -> CallInfo { fn create(&self) -> CallInfo {
CallInfo { CallInfo {
args: EvaluatedArgs::new(Some(self.positionals.clone()), Some(self.flags.clone())), args: EvaluatedArgs::new(Some(self.positionals.clone()), Some(self.flags.clone())),
source_map: SourceMap::new(), name_tag: Tag::unknown(),
name_tag: Tag::unknown_span(self.anchor),
} }
} }
} }
@ -271,7 +266,7 @@ mod tests {
} }
fn unstructured_sample_record(value: &str) -> Tagged<Value> { fn unstructured_sample_record(value: &str) -> Tagged<Value> {
Tagged::from_item(Value::string(value), Tag::unknown()) Value::string(value).tagged(Tag::unknown())
} }
#[test] #[test]

View File

@ -21,7 +21,7 @@ impl Sum {
tag, tag,
}) => { }) => {
//TODO: handle overflow //TODO: handle overflow
self.total = Some(Value::int(i + j).tagged(*tag)); self.total = Some(Value::int(i + j).tagged(tag));
Ok(()) Ok(())
} }
None => { None => {
@ -36,7 +36,7 @@ impl Sum {
} }
} }
Value::Primitive(Primitive::Bytes(b)) => { Value::Primitive(Primitive::Bytes(b)) => {
match self.total { match &self.total {
Some(Tagged { Some(Tagged {
item: Value::Primitive(Primitive::Bytes(j)), item: Value::Primitive(Primitive::Bytes(j)),
tag, tag,

View File

@ -80,7 +80,7 @@ async fn mem(tag: Tag) -> Tagged<Value> {
} }
async fn host(tag: Tag) -> Tagged<Value> { async fn host(tag: Tag) -> Tagged<Value> {
let mut dict = TaggedDictBuilder::with_capacity(tag, 6); let mut dict = TaggedDictBuilder::with_capacity(&tag, 6);
let (platform_result, uptime_result) = let (platform_result, uptime_result) =
futures::future::join(host::platform(), host::uptime()).await; futures::future::join(host::platform(), host::uptime()).await;
@ -95,7 +95,7 @@ async fn host(tag: Tag) -> Tagged<Value> {
// Uptime // Uptime
if let Ok(uptime) = uptime_result { if let Ok(uptime) = uptime_result {
let mut uptime_dict = TaggedDictBuilder::with_capacity(tag, 4); let mut uptime_dict = TaggedDictBuilder::with_capacity(&tag, 4);
let uptime = uptime.get::<time::second>().round() as i64; let uptime = uptime.get::<time::second>().round() as i64;
let days = uptime / (60 * 60 * 24); let days = uptime / (60 * 60 * 24);
@ -116,7 +116,10 @@ async fn host(tag: Tag) -> Tagged<Value> {
let mut user_vec = vec![]; let mut user_vec = vec![];
while let Some(user) = users.next().await { while let Some(user) = users.next().await {
if let Ok(user) = user { if let Ok(user) = user {
user_vec.push(Tagged::from_item(Value::string(user.username()), tag)); user_vec.push(Tagged {
item: Value::string(user.username()),
tag: tag.clone(),
});
} }
} }
let user_list = Value::Table(user_vec); let user_list = Value::Table(user_vec);
@ -130,7 +133,7 @@ async fn disks(tag: Tag) -> Option<Value> {
let mut partitions = disk::partitions_physical(); let mut partitions = disk::partitions_physical();
while let Some(part) = partitions.next().await { while let Some(part) = partitions.next().await {
if let Ok(part) = part { if let Ok(part) = part {
let mut dict = TaggedDictBuilder::with_capacity(tag, 6); let mut dict = TaggedDictBuilder::with_capacity(&tag, 6);
dict.insert( dict.insert(
"device", "device",
Value::string( Value::string(
@ -176,7 +179,7 @@ async fn battery(tag: Tag) -> Option<Value> {
if let Ok(batteries) = manager.batteries() { if let Ok(batteries) = manager.batteries() {
for battery in batteries { for battery in batteries {
if let Ok(battery) = battery { if let Ok(battery) = battery {
let mut dict = TaggedDictBuilder::new(tag); let mut dict = TaggedDictBuilder::new(&tag);
if let Some(vendor) = battery.vendor() { if let Some(vendor) = battery.vendor() {
dict.insert("vendor", Value::string(vendor)); dict.insert("vendor", Value::string(vendor));
} }
@ -217,7 +220,7 @@ async fn temp(tag: Tag) -> Option<Value> {
let mut sensors = sensors::temperatures(); let mut sensors = sensors::temperatures();
while let Some(sensor) = sensors.next().await { while let Some(sensor) = sensors.next().await {
if let Ok(sensor) = sensor { if let Ok(sensor) = sensor {
let mut dict = TaggedDictBuilder::new(tag); let mut dict = TaggedDictBuilder::new(&tag);
dict.insert("unit", Value::string(sensor.unit())); dict.insert("unit", Value::string(sensor.unit()));
if let Some(label) = sensor.label() { if let Some(label) = sensor.label() {
dict.insert("label", Value::string(label)); dict.insert("label", Value::string(label));
@ -259,7 +262,7 @@ async fn net(tag: Tag) -> Option<Value> {
let mut io_counters = net::io_counters(); let mut io_counters = net::io_counters();
while let Some(nic) = io_counters.next().await { while let Some(nic) = io_counters.next().await {
if let Ok(nic) = nic { if let Ok(nic) = nic {
let mut network_idx = TaggedDictBuilder::with_capacity(tag, 3); let mut network_idx = TaggedDictBuilder::with_capacity(&tag, 3);
network_idx.insert("name", Value::string(nic.interface())); network_idx.insert("name", Value::string(nic.interface()));
network_idx.insert( network_idx.insert(
"sent", "sent",
@ -280,11 +283,17 @@ async fn net(tag: Tag) -> Option<Value> {
} }
async fn sysinfo(tag: Tag) -> Vec<Tagged<Value>> { async fn sysinfo(tag: Tag) -> Vec<Tagged<Value>> {
let mut sysinfo = TaggedDictBuilder::with_capacity(tag, 7); let mut sysinfo = TaggedDictBuilder::with_capacity(&tag, 7);
let (host, cpu, disks, memory, temp) = let (host, cpu, disks, memory, temp) = futures::future::join5(
futures::future::join5(host(tag), cpu(tag), disks(tag), mem(tag), temp(tag)).await; host(tag.clone()),
let (net, battery) = futures::future::join(net(tag), battery(tag)).await; cpu(tag.clone()),
disks(tag.clone()),
mem(tag.clone()),
temp(tag.clone()),
)
.await;
let (net, battery) = futures::future::join(net(tag.clone()), battery(tag.clone())).await;
sysinfo.insert_tagged("host", host); sysinfo.insert_tagged("host", host);
if let Some(cpu) = cpu { if let Some(cpu) = cpu {

View File

@ -1,8 +1,7 @@
use crossterm::{cursor, terminal, RawScreen}; use crossterm::{cursor, terminal, RawScreen};
use crossterm::{InputEvent, KeyEvent}; use crossterm::{InputEvent, KeyEvent};
use nu::{ use nu::{
serve_plugin, AnchorLocation, CallInfo, Plugin, Primitive, ShellError, Signature, SourceMap, serve_plugin, AnchorLocation, CallInfo, Plugin, Primitive, ShellError, Signature, Tagged, Value,
Tagged, Value,
}; };
use syntect::easy::HighlightLines; use syntect::easy::HighlightLines;
@ -29,8 +28,8 @@ impl Plugin for TextView {
Ok(Signature::build("textview").desc("Autoview of text data.")) Ok(Signature::build("textview").desc("Autoview of text data."))
} }
fn sink(&mut self, call_info: CallInfo, input: Vec<Tagged<Value>>) { fn sink(&mut self, _call_info: CallInfo, input: Vec<Tagged<Value>>) {
view_text_value(&input[0], &call_info.source_map); view_text_value(&input[0]);
} }
} }
@ -215,20 +214,18 @@ fn scroll_view(s: &str) {
scroll_view_lines_if_needed(v, false); scroll_view_lines_if_needed(v, false);
} }
fn view_text_value(value: &Tagged<Value>, source_map: &SourceMap) { fn view_text_value(value: &Tagged<Value>) {
let value_anchor = value.anchor(); let value_anchor = value.anchor();
match value.item { match value.item {
Value::Primitive(Primitive::String(ref s)) => { Value::Primitive(Primitive::String(ref s)) => {
let source = source_map.get(&value_anchor); if let Some(source) = value_anchor {
if let Some(source) = source {
let extension: Option<String> = match source { let extension: Option<String> = match source {
AnchorLocation::File(file) => { AnchorLocation::File(file) => {
let path = Path::new(file); let path = Path::new(&file);
path.extension().map(|x| x.to_string_lossy().to_string()) path.extension().map(|x| x.to_string_lossy().to_string())
} }
AnchorLocation::Url(url) => { AnchorLocation::Url(url) => {
let url = url::Url::parse(url); let url = url::Url::parse(&url);
if let Ok(url) = url { if let Ok(url) = url {
let url = url.clone(); let url = url.clone();
if let Some(mut segments) = url.path_segments() { if let Some(mut segments) = url.path_segments() {

Some files were not shown because too many files have changed in this diff Show More