mirror of
https://github.com/nushell/nushell.git
synced 2024-11-22 16:33:37 +01:00
Stream support (#812)
* Moves off of draining between filters. Instead, the sink will pull on the stream, and will drain element-wise. This moves the whole stream to being lazy. * Adds ctrl-c support and connects it into some of the key points where we pull on the stream. If a ctrl-c is detect, we immediately halt pulling on the stream and return to the prompt. * Moves away from having a SourceMap where anchor locations are stored. Now AnchorLocation is kept directly in the Tag. * To make this possible, split tag and span. Span is largely used in the parser and is copyable. Tag is now no longer copyable.
This commit is contained in:
parent
8ca678440a
commit
193b00764b
88
Cargo.lock
generated
88
Cargo.lock
generated
@ -1604,7 +1604,6 @@ dependencies = [
|
||||
"toml 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"which 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
@ -1920,24 +1919,6 @@ dependencies = [
|
||||
"proc-macro2 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.6.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand"
|
||||
version = "0.7.0"
|
||||
@ -1950,15 +1931,6 @@ dependencies = [
|
||||
"rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_chacha"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_chacha"
|
||||
version = "0.2.1"
|
||||
@ -1989,14 +1961,6 @@ dependencies = [
|
||||
"getrandom 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_hc"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_hc"
|
||||
version = "0.2.0"
|
||||
@ -2005,24 +1969,6 @@ dependencies = [
|
||||
"rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_isaac"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_jitter"
|
||||
version = "0.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_os"
|
||||
version = "0.1.3"
|
||||
@ -2036,23 +1982,6 @@ dependencies = [
|
||||
"winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_pcg"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"autocfg 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rand_xorshift"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "raw-cpuid"
|
||||
version = "7.0.3"
|
||||
@ -2752,15 +2681,6 @@ name = "utf8parse"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "0.7.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
dependencies = [
|
||||
"rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"serde 1.0.101 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "vcpkg"
|
||||
version = "0.2.7"
|
||||
@ -3201,20 +3121,13 @@ dependencies = [
|
||||
"checksum quick-error 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9274b940887ce9addde99c4eee6b5c44cc494b182b97e73dc8ffdcb3397fd3f0"
|
||||
"checksum quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1"
|
||||
"checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe"
|
||||
"checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca"
|
||||
"checksum rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d47eab0e83d9693d40f825f86948aa16eff6750ead4bdffc4ab95b8b3a7f052c"
|
||||
"checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef"
|
||||
"checksum rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853"
|
||||
"checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b"
|
||||
"checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc"
|
||||
"checksum rand_core 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "615e683324e75af5d43d8f7a39ffe3ee4a9dc42c5c701167a71dc59c3a493aca"
|
||||
"checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4"
|
||||
"checksum rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c"
|
||||
"checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08"
|
||||
"checksum rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b"
|
||||
"checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071"
|
||||
"checksum rand_pcg 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "abf9b09b01790cfe0364f52bf32995ea3c39f4d2dd011eac241d2914146d0b44"
|
||||
"checksum rand_xorshift 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "cbf7e9e623549b0e21f6e97cf8ecf247c1a8fd2e8a992ae265314300b2455d5c"
|
||||
"checksum raw-cpuid 7.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "b4a349ca83373cfa5d6dbb66fd76e58b2cca08da71a5f6400de0a0a6a9bceeaf"
|
||||
"checksum rawkey 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "33ec17a493dcb820725c002bc253f6f3ba4e4dc635e72c238540691b05e43897"
|
||||
"checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2"
|
||||
@ -3297,7 +3210,6 @@ dependencies = [
|
||||
"checksum url 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "75b414f6c464c879d7f9babf951f23bc3743fb7313c081b2e6ca719067ea9d61"
|
||||
"checksum user32-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "4ef4711d107b21b410a3a974b1204d9accc8b10dad75d8324b5d755de1617d47"
|
||||
"checksum utf8parse 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "8772a4ccbb4e89959023bc5b7cb8623a795caa7092d99f3aa9501b9484d4557d"
|
||||
"checksum uuid 0.7.4 (registry+https://github.com/rust-lang/crates.io-index)" = "90dbc611eb48397705a6b0f6e917da23ae517e4d127123d2cf7674206627d32a"
|
||||
"checksum vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "33dd455d0f96e90a75803cfeb7f948768c08d70a6de9a8d2362461935698bf95"
|
||||
"checksum vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a"
|
||||
"checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd"
|
||||
|
@ -66,7 +66,6 @@ hex = "0.3.2"
|
||||
tempfile = "3.1.0"
|
||||
semver = "0.9.0"
|
||||
which = "2.0.1"
|
||||
uuid = {version = "0.7.4", features = [ "v4", "serde" ]}
|
||||
textwrap = {version = "0.11.0", features = ["term_size"]}
|
||||
shellexpand = "1.0.0"
|
||||
futures-timer = "0.4.0"
|
||||
|
65
src/cli.rs
65
src/cli.rs
@ -28,8 +28,7 @@ use std::error::Error;
|
||||
use std::io::{BufRead, BufReader, Write};
|
||||
use std::iter::Iterator;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum MaybeOwned<'a, T> {
|
||||
@ -339,16 +338,15 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
|
||||
// we are ok if history does not exist
|
||||
let _ = rl.load_history(&History::path());
|
||||
|
||||
let ctrl_c = Arc::new(AtomicBool::new(false));
|
||||
let cc = ctrl_c.clone();
|
||||
let cc = context.ctrl_c.clone();
|
||||
ctrlc::set_handler(move || {
|
||||
cc.store(true, Ordering::SeqCst);
|
||||
})
|
||||
.expect("Error setting Ctrl-C handler");
|
||||
let mut ctrlcbreak = false;
|
||||
loop {
|
||||
if ctrl_c.load(Ordering::SeqCst) {
|
||||
ctrl_c.store(false, Ordering::SeqCst);
|
||||
if context.ctrl_c.load(Ordering::SeqCst) {
|
||||
context.ctrl_c.store(false, Ordering::SeqCst);
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -481,7 +479,7 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
Ok(line) => {
|
||||
let line = chomp_newline(line);
|
||||
|
||||
let result = match crate::parser::parse(&line, uuid::Uuid::nil()) {
|
||||
let result = match crate::parser::parse(&line) {
|
||||
Err(err) => {
|
||||
return LineResult::Error(line.to_string(), err);
|
||||
}
|
||||
@ -549,30 +547,45 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
|
||||
(
|
||||
Some(ClassifiedCommand::Internal(left)),
|
||||
Some(ClassifiedCommand::External(_)),
|
||||
) => match left
|
||||
.run(ctx, input, Text::from(line), is_first_command)
|
||||
.await
|
||||
{
|
||||
) => match left.run(ctx, input, Text::from(line), is_first_command) {
|
||||
Ok(val) => ClassifiedInputStream::from_input_stream(val),
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
},
|
||||
|
||||
(Some(ClassifiedCommand::Internal(left)), Some(_)) => {
|
||||
match left
|
||||
.run(ctx, input, Text::from(line), is_first_command)
|
||||
.await
|
||||
{
|
||||
match left.run(ctx, input, Text::from(line), is_first_command) {
|
||||
Ok(val) => ClassifiedInputStream::from_input_stream(val),
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
}
|
||||
}
|
||||
|
||||
(Some(ClassifiedCommand::Internal(left)), None) => {
|
||||
match left
|
||||
.run(ctx, input, Text::from(line), is_first_command)
|
||||
.await
|
||||
{
|
||||
Ok(val) => ClassifiedInputStream::from_input_stream(val),
|
||||
match left.run(ctx, input, Text::from(line), is_first_command) {
|
||||
Ok(val) => {
|
||||
use futures::stream::TryStreamExt;
|
||||
|
||||
let mut output_stream: OutputStream = val.into();
|
||||
loop {
|
||||
match output_stream.try_next().await {
|
||||
Ok(Some(ReturnSuccess::Value(Tagged {
|
||||
item: Value::Error(e),
|
||||
..
|
||||
}))) => {
|
||||
return LineResult::Error(line.to_string(), e);
|
||||
}
|
||||
Ok(Some(_item)) => {
|
||||
if ctx.ctrl_c.load(Ordering::SeqCst) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return LineResult::Success(line.to_string());
|
||||
}
|
||||
Err(err) => return LineResult::Error(line.to_string(), err),
|
||||
}
|
||||
}
|
||||
@ -620,12 +633,12 @@ fn classify_pipeline(
|
||||
source: &Text,
|
||||
) -> Result<ClassifiedPipeline, ShellError> {
|
||||
let mut pipeline_list = vec![pipeline.clone()];
|
||||
let mut iterator = TokensIterator::all(&mut pipeline_list, pipeline.tag());
|
||||
let mut iterator = TokensIterator::all(&mut pipeline_list, pipeline.span());
|
||||
|
||||
expand_syntax(
|
||||
&PipelineShape,
|
||||
&mut iterator,
|
||||
&context.expand_context(source, pipeline.tag()),
|
||||
&context.expand_context(source, pipeline.span()),
|
||||
)
|
||||
}
|
||||
|
||||
@ -642,7 +655,13 @@ pub(crate) fn external_command(
|
||||
Ok(ClassifiedCommand::External(ExternalCommand {
|
||||
name: name.to_string(),
|
||||
name_tag: name.tag(),
|
||||
args: arg_list_strings,
|
||||
args: arg_list_strings
|
||||
.iter()
|
||||
.map(|x| Tagged {
|
||||
tag: x.span.into(),
|
||||
item: x.item.clone(),
|
||||
})
|
||||
.collect(),
|
||||
}))
|
||||
}
|
||||
|
||||
|
@ -1,9 +1,14 @@
|
||||
use crate::commands::{RawCommandArgs, WholeStreamCommand};
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::hir::{Expression, NamedArguments};
|
||||
use crate::prelude::*;
|
||||
use futures::stream::TryStreamExt;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
pub struct Autoview;
|
||||
|
||||
const STREAM_PAGE_SIZE: u64 = 50;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct AutoviewArgs {}
|
||||
|
||||
@ -31,61 +36,132 @@ impl WholeStreamCommand for Autoview {
|
||||
|
||||
pub fn autoview(
|
||||
AutoviewArgs {}: AutoviewArgs,
|
||||
mut context: RunnableContext,
|
||||
context: RunnableContext,
|
||||
raw: RawCommandArgs,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
Ok(OutputStream::new(async_stream! {
|
||||
let input = context.input.drain_vec().await;
|
||||
|
||||
if input.len() > 0 {
|
||||
if let Tagged {
|
||||
item: Value::Primitive(Primitive::Binary(_)),
|
||||
..
|
||||
} = input[0usize]
|
||||
{
|
||||
let binary = context.get_command("binaryview");
|
||||
if let Some(binary) = binary {
|
||||
let result = binary.run(raw.with_input(input), &context.commands, false);
|
||||
result.collect::<Vec<_>>().await;
|
||||
} else {
|
||||
for i in input {
|
||||
match i.item {
|
||||
Value::Primitive(Primitive::Binary(b)) => {
|
||||
use pretty_hex::*;
|
||||
println!("{:?}", b.hex_dump());
|
||||
let text = context.get_command("textview");
|
||||
let table = context.get_command("table");
|
||||
|
||||
Ok(OutputStream::new(async_stream! {
|
||||
let mut output_stream: OutputStream = context.input.into();
|
||||
|
||||
match output_stream.try_next().await {
|
||||
Ok(Some(x)) => {
|
||||
match output_stream.try_next().await {
|
||||
Ok(Some(y)) => {
|
||||
let ctrl_c = context.ctrl_c.clone();
|
||||
let stream = async_stream! {
|
||||
yield Ok(x);
|
||||
yield Ok(y);
|
||||
|
||||
loop {
|
||||
match output_stream.try_next().await {
|
||||
Ok(Some(z)) => {
|
||||
if ctrl_c.load(Ordering::SeqCst) {
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
yield Ok(z);
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
};
|
||||
} else if is_single_anchored_text_value(&input) {
|
||||
let text = context.get_command("textview");
|
||||
if let Some(table) = table {
|
||||
let mut new_output_stream: OutputStream = stream.to_output_stream();
|
||||
let mut finished = false;
|
||||
let mut current_idx = 0;
|
||||
loop {
|
||||
let mut new_input = VecDeque::new();
|
||||
|
||||
for _ in 0..STREAM_PAGE_SIZE {
|
||||
match new_output_stream.try_next().await {
|
||||
|
||||
Ok(Some(a)) => {
|
||||
if let ReturnSuccess::Value(v) = a {
|
||||
new_input.push_back(v);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
finished = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let raw = raw.clone();
|
||||
|
||||
let mut command_args = raw.with_input(new_input.into());
|
||||
let mut named_args = NamedArguments::new();
|
||||
named_args.insert_optional("start_number", Some(Expression::number(current_idx, Tag::unknown())));
|
||||
command_args.call_info.args.named = Some(named_args);
|
||||
|
||||
let result = table.run(command_args, &context.commands, false);
|
||||
result.collect::<Vec<_>>().await;
|
||||
|
||||
if finished {
|
||||
break;
|
||||
} else {
|
||||
current_idx += STREAM_PAGE_SIZE;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
if let ReturnSuccess::Value(x) = x {
|
||||
match x {
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::String(ref s)),
|
||||
tag: Tag { anchor, span },
|
||||
} if anchor.is_some() => {
|
||||
if let Some(text) = text {
|
||||
let result = text.run(raw.with_input(input), &context.commands, false);
|
||||
let mut stream = VecDeque::new();
|
||||
stream.push_back(Value::string(s).tagged(Tag { anchor, span }));
|
||||
let result = text.run(raw.with_input(stream.into()), &context.commands, false);
|
||||
result.collect::<Vec<_>>().await;
|
||||
} else {
|
||||
for i in input {
|
||||
match i.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
println!("{}", s);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if is_single_text_value(&input) {
|
||||
for i in input {
|
||||
match i.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
Tagged {
|
||||
item: Value::Primitive(Primitive::String(s)),
|
||||
..
|
||||
} => {
|
||||
println!("{}", s);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let table = context.expect_command("table");
|
||||
let result = table.run(raw.with_input(input), &context.commands, false);
|
||||
|
||||
Tagged { item: Value::Primitive(Primitive::Binary(ref b)), .. } => {
|
||||
if let Some(binary) = binary {
|
||||
let mut stream = VecDeque::new();
|
||||
stream.push_back(x.clone());
|
||||
let result = binary.run(raw.with_input(stream.into()), &context.commands, false);
|
||||
result.collect::<Vec<_>>().await;
|
||||
} else {
|
||||
use pretty_hex::*;
|
||||
println!("{:?}", b.hex_dump());
|
||||
}
|
||||
}
|
||||
|
||||
Tagged { item: Value::Error(e), .. } => {
|
||||
yield Err(e);
|
||||
}
|
||||
Tagged { item: ref item, .. } => {
|
||||
if let Some(table) = table {
|
||||
let mut stream = VecDeque::new();
|
||||
stream.push_back(x.clone());
|
||||
let result = table.run(raw.with_input(stream.into()), &context.commands, false);
|
||||
result.collect::<Vec<_>>().await;
|
||||
} else {
|
||||
println!("{:?}", item);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
//println!("<no results>");
|
||||
}
|
||||
}
|
||||
|
||||
@ -95,35 +171,3 @@ pub fn autoview(
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
fn is_single_text_value(input: &Vec<Tagged<Value>>) -> bool {
|
||||
if input.len() != 1 {
|
||||
return false;
|
||||
}
|
||||
if let Tagged {
|
||||
item: Value::Primitive(Primitive::String(_)),
|
||||
..
|
||||
} = input[0]
|
||||
{
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
fn is_single_anchored_text_value(input: &Vec<Tagged<Value>>) -> bool {
|
||||
if input.len() != 1 {
|
||||
return false;
|
||||
}
|
||||
|
||||
if let Tagged {
|
||||
item: Value::Primitive(Primitive::String(_)),
|
||||
tag: Tag { anchor, .. },
|
||||
} = input[0]
|
||||
{
|
||||
anchor != uuid::Uuid::nil()
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
@ -100,7 +100,7 @@ pub(crate) struct DynamicCommand {
|
||||
}
|
||||
|
||||
impl InternalCommand {
|
||||
pub(crate) async fn run(
|
||||
pub(crate) fn run(
|
||||
self,
|
||||
context: &mut Context,
|
||||
input: ClassifiedInputStream,
|
||||
@ -119,12 +119,9 @@ impl InternalCommand {
|
||||
let command = context.expect_command(&self.name);
|
||||
|
||||
let result = {
|
||||
let source_map = context.source_map.lock().unwrap().clone();
|
||||
|
||||
context.run_command(
|
||||
command,
|
||||
self.name_tag.clone(),
|
||||
source_map,
|
||||
self.args,
|
||||
&source,
|
||||
objects,
|
||||
@ -134,17 +131,15 @@ impl InternalCommand {
|
||||
|
||||
let result = trace_out_stream!(target: "nu::trace_stream::internal", source: &source, "output" = result);
|
||||
let mut result = result.values;
|
||||
let mut context = context.clone();
|
||||
|
||||
let mut stream = VecDeque::new();
|
||||
let stream = async_stream! {
|
||||
while let Some(item) = result.next().await {
|
||||
match item? {
|
||||
ReturnSuccess::Action(action) => match action {
|
||||
match item {
|
||||
Ok(ReturnSuccess::Action(action)) => match action {
|
||||
CommandAction::ChangePath(path) => {
|
||||
context.shell_manager.set_path(path);
|
||||
}
|
||||
CommandAction::AddAnchorLocation(uuid, anchor_location) => {
|
||||
context.add_anchor_location(uuid, anchor_location);
|
||||
}
|
||||
CommandAction::Exit => std::process::exit(0), // TODO: save history.txt
|
||||
CommandAction::EnterHelpShell(value) => {
|
||||
match value {
|
||||
@ -156,12 +151,12 @@ impl InternalCommand {
|
||||
HelpShell::for_command(
|
||||
Value::string(cmd).tagged(tag),
|
||||
&context.registry(),
|
||||
)?,
|
||||
).unwrap(),
|
||||
));
|
||||
}
|
||||
_ => {
|
||||
context.shell_manager.insert_at_current(Box::new(
|
||||
HelpShell::index(&context.registry())?,
|
||||
HelpShell::index(&context.registry()).unwrap(),
|
||||
));
|
||||
}
|
||||
}
|
||||
@ -173,7 +168,7 @@ impl InternalCommand {
|
||||
}
|
||||
CommandAction::EnterShell(location) => {
|
||||
context.shell_manager.insert_at_current(Box::new(
|
||||
FilesystemShell::with_location(location, context.registry().clone())?,
|
||||
FilesystemShell::with_location(location, context.registry().clone()).unwrap(),
|
||||
));
|
||||
}
|
||||
CommandAction::PreviousShell => {
|
||||
@ -190,13 +185,19 @@ impl InternalCommand {
|
||||
}
|
||||
},
|
||||
|
||||
ReturnSuccess::Value(v) => {
|
||||
stream.push_back(v);
|
||||
}
|
||||
}
|
||||
Ok(ReturnSuccess::Value(v)) => {
|
||||
yield Ok(v);
|
||||
}
|
||||
|
||||
Ok(stream.into())
|
||||
Err(x) => {
|
||||
yield Ok(Value::Error(x).tagged_unknown());
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(stream.to_input_stream())
|
||||
}
|
||||
}
|
||||
|
||||
@ -346,7 +347,7 @@ impl ExternalCommand {
|
||||
let stdout = popen.stdout.take().unwrap();
|
||||
let file = futures::io::AllowStdIo::new(stdout);
|
||||
let stream = Framed::new(file, LinesCodec {});
|
||||
let stream = stream.map(move |line| Value::string(line.unwrap()).tagged(name_tag));
|
||||
let stream = stream.map(move |line| Value::string(line.unwrap()).tagged(&name_tag));
|
||||
Ok(ClassifiedInputStream::from_input_stream(
|
||||
stream.boxed() as BoxStream<'static, Tagged<Value>>
|
||||
))
|
||||
|
@ -1,4 +1,3 @@
|
||||
use crate::context::{AnchorLocation, SourceMap};
|
||||
use crate::data::Value;
|
||||
use crate::errors::ShellError;
|
||||
use crate::evaluate::Scope;
|
||||
@ -11,13 +10,12 @@ use serde::{Deserialize, Serialize};
|
||||
use std::fmt;
|
||||
use std::ops::Deref;
|
||||
use std::path::PathBuf;
|
||||
use uuid::Uuid;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
pub struct UnevaluatedCallInfo {
|
||||
pub args: hir::Call,
|
||||
pub source: Text,
|
||||
pub source_map: SourceMap,
|
||||
pub name_tag: Tag,
|
||||
}
|
||||
|
||||
@ -37,7 +35,6 @@ impl UnevaluatedCallInfo {
|
||||
|
||||
Ok(CallInfo {
|
||||
args,
|
||||
source_map: self.source_map,
|
||||
name_tag: self.name_tag,
|
||||
})
|
||||
}
|
||||
@ -46,7 +43,6 @@ impl UnevaluatedCallInfo {
|
||||
#[derive(Deserialize, Serialize, Debug, Clone)]
|
||||
pub struct CallInfo {
|
||||
pub args: registry::EvaluatedArgs,
|
||||
pub source_map: SourceMap,
|
||||
pub name_tag: Tag,
|
||||
}
|
||||
|
||||
@ -62,7 +58,7 @@ impl CallInfo {
|
||||
args: T::deserialize(&mut deserializer)?,
|
||||
context: RunnablePerItemContext {
|
||||
shell_manager: shell_manager.clone(),
|
||||
name: self.name_tag,
|
||||
name: self.name_tag.clone(),
|
||||
},
|
||||
callback,
|
||||
})
|
||||
@ -73,6 +69,7 @@ impl CallInfo {
|
||||
#[get = "pub(crate)"]
|
||||
pub struct CommandArgs {
|
||||
pub host: Arc<Mutex<dyn Host>>,
|
||||
pub ctrl_c: Arc<AtomicBool>,
|
||||
pub shell_manager: ShellManager,
|
||||
pub call_info: UnevaluatedCallInfo,
|
||||
pub input: InputStream,
|
||||
@ -82,6 +79,7 @@ pub struct CommandArgs {
|
||||
#[get = "pub(crate)"]
|
||||
pub struct RawCommandArgs {
|
||||
pub host: Arc<Mutex<dyn Host>>,
|
||||
pub ctrl_c: Arc<AtomicBool>,
|
||||
pub shell_manager: ShellManager,
|
||||
pub call_info: UnevaluatedCallInfo,
|
||||
}
|
||||
@ -90,6 +88,7 @@ impl RawCommandArgs {
|
||||
pub fn with_input(self, input: Vec<Tagged<Value>>) -> CommandArgs {
|
||||
CommandArgs {
|
||||
host: self.host,
|
||||
ctrl_c: self.ctrl_c,
|
||||
shell_manager: self.shell_manager,
|
||||
call_info: self.call_info,
|
||||
input: input.into(),
|
||||
@ -109,12 +108,14 @@ impl CommandArgs {
|
||||
registry: ®istry::CommandRegistry,
|
||||
) -> Result<EvaluatedWholeStreamCommandArgs, ShellError> {
|
||||
let host = self.host.clone();
|
||||
let ctrl_c = self.ctrl_c.clone();
|
||||
let shell_manager = self.shell_manager.clone();
|
||||
let input = self.input;
|
||||
let call_info = self.call_info.evaluate(registry, &Scope::empty())?;
|
||||
|
||||
Ok(EvaluatedWholeStreamCommandArgs::new(
|
||||
host,
|
||||
ctrl_c,
|
||||
shell_manager,
|
||||
call_info,
|
||||
input,
|
||||
@ -127,12 +128,13 @@ impl CommandArgs {
|
||||
callback: fn(T, RunnableContext) -> Result<OutputStream, ShellError>,
|
||||
) -> Result<RunnableArgs<T>, ShellError> {
|
||||
let shell_manager = self.shell_manager.clone();
|
||||
let source_map = self.call_info.source_map.clone();
|
||||
let host = self.host.clone();
|
||||
let ctrl_c = self.ctrl_c.clone();
|
||||
let args = self.evaluate_once(registry)?;
|
||||
let call_info = args.call_info.clone();
|
||||
let (input, args) = args.split();
|
||||
let name_tag = args.call_info.name_tag;
|
||||
let mut deserializer = ConfigDeserializer::from_call_info(args.call_info);
|
||||
let mut deserializer = ConfigDeserializer::from_call_info(call_info);
|
||||
|
||||
Ok(RunnableArgs {
|
||||
args: T::deserialize(&mut deserializer)?,
|
||||
@ -141,8 +143,8 @@ impl CommandArgs {
|
||||
commands: registry.clone(),
|
||||
shell_manager,
|
||||
name: name_tag,
|
||||
source_map,
|
||||
host,
|
||||
ctrl_c,
|
||||
},
|
||||
callback,
|
||||
})
|
||||
@ -155,17 +157,20 @@ impl CommandArgs {
|
||||
) -> Result<RunnableRawArgs<T>, ShellError> {
|
||||
let raw_args = RawCommandArgs {
|
||||
host: self.host.clone(),
|
||||
ctrl_c: self.ctrl_c.clone(),
|
||||
shell_manager: self.shell_manager.clone(),
|
||||
call_info: self.call_info.clone(),
|
||||
};
|
||||
|
||||
let shell_manager = self.shell_manager.clone();
|
||||
let source_map = self.call_info.source_map.clone();
|
||||
let host = self.host.clone();
|
||||
let ctrl_c = self.ctrl_c.clone();
|
||||
let args = self.evaluate_once(registry)?;
|
||||
let call_info = args.call_info.clone();
|
||||
|
||||
let (input, args) = args.split();
|
||||
let name_tag = args.call_info.name_tag;
|
||||
let mut deserializer = ConfigDeserializer::from_call_info(args.call_info);
|
||||
let mut deserializer = ConfigDeserializer::from_call_info(call_info.clone());
|
||||
|
||||
Ok(RunnableRawArgs {
|
||||
args: T::deserialize(&mut deserializer)?,
|
||||
@ -174,8 +179,8 @@ impl CommandArgs {
|
||||
commands: registry.clone(),
|
||||
shell_manager,
|
||||
name: name_tag,
|
||||
source_map,
|
||||
host,
|
||||
ctrl_c,
|
||||
},
|
||||
raw_args,
|
||||
callback,
|
||||
@ -198,18 +203,12 @@ pub struct RunnableContext {
|
||||
pub input: InputStream,
|
||||
pub shell_manager: ShellManager,
|
||||
pub host: Arc<Mutex<dyn Host>>,
|
||||
pub ctrl_c: Arc<AtomicBool>,
|
||||
pub commands: CommandRegistry,
|
||||
pub source_map: SourceMap,
|
||||
pub name: Tag,
|
||||
}
|
||||
|
||||
impl RunnableContext {
|
||||
pub fn expect_command(&self, name: &str) -> Arc<Command> {
|
||||
self.commands
|
||||
.get_command(name)
|
||||
.expect(&format!("Expected command {}", name))
|
||||
}
|
||||
|
||||
pub fn get_command(&self, name: &str) -> Option<Arc<Command>> {
|
||||
self.commands.get_command(name)
|
||||
}
|
||||
@ -270,6 +269,7 @@ impl Deref for EvaluatedWholeStreamCommandArgs {
|
||||
impl EvaluatedWholeStreamCommandArgs {
|
||||
pub fn new(
|
||||
host: Arc<Mutex<dyn Host>>,
|
||||
ctrl_c: Arc<AtomicBool>,
|
||||
shell_manager: ShellManager,
|
||||
call_info: CallInfo,
|
||||
input: impl Into<InputStream>,
|
||||
@ -277,6 +277,7 @@ impl EvaluatedWholeStreamCommandArgs {
|
||||
EvaluatedWholeStreamCommandArgs {
|
||||
args: EvaluatedCommandArgs {
|
||||
host,
|
||||
ctrl_c,
|
||||
shell_manager,
|
||||
call_info,
|
||||
},
|
||||
@ -285,7 +286,7 @@ impl EvaluatedWholeStreamCommandArgs {
|
||||
}
|
||||
|
||||
pub fn name_tag(&self) -> Tag {
|
||||
self.args.call_info.name_tag
|
||||
self.args.call_info.name_tag.clone()
|
||||
}
|
||||
|
||||
pub fn parts(self) -> (InputStream, registry::EvaluatedArgs) {
|
||||
@ -317,12 +318,14 @@ impl Deref for EvaluatedFilterCommandArgs {
|
||||
impl EvaluatedFilterCommandArgs {
|
||||
pub fn new(
|
||||
host: Arc<Mutex<dyn Host>>,
|
||||
ctrl_c: Arc<AtomicBool>,
|
||||
shell_manager: ShellManager,
|
||||
call_info: CallInfo,
|
||||
) -> EvaluatedFilterCommandArgs {
|
||||
EvaluatedFilterCommandArgs {
|
||||
args: EvaluatedCommandArgs {
|
||||
host,
|
||||
ctrl_c,
|
||||
shell_manager,
|
||||
call_info,
|
||||
},
|
||||
@ -334,6 +337,7 @@ impl EvaluatedFilterCommandArgs {
|
||||
#[get = "pub(crate)"]
|
||||
pub struct EvaluatedCommandArgs {
|
||||
pub host: Arc<Mutex<dyn Host>>,
|
||||
pub ctrl_c: Arc<AtomicBool>,
|
||||
pub shell_manager: ShellManager,
|
||||
pub call_info: CallInfo,
|
||||
}
|
||||
@ -376,7 +380,6 @@ impl EvaluatedCommandArgs {
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub enum CommandAction {
|
||||
ChangePath(String),
|
||||
AddAnchorLocation(Uuid, AnchorLocation),
|
||||
Exit,
|
||||
EnterShell(String),
|
||||
EnterValueShell(Tagged<Value>),
|
||||
@ -390,9 +393,6 @@ impl ToDebug for CommandAction {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result {
|
||||
match self {
|
||||
CommandAction::ChangePath(s) => write!(f, "action:change-path={}", s),
|
||||
CommandAction::AddAnchorLocation(u, source) => {
|
||||
write!(f, "action:add-span-source={}@{:?}", u, source)
|
||||
}
|
||||
CommandAction::Exit => write!(f, "action:exit"),
|
||||
CommandAction::EnterShell(s) => write!(f, "action:enter-shell={}", s),
|
||||
CommandAction::EnterValueShell(t) => {
|
||||
@ -564,6 +564,7 @@ impl Command {
|
||||
) -> OutputStream {
|
||||
let raw_args = RawCommandArgs {
|
||||
host: args.host,
|
||||
ctrl_c: args.ctrl_c,
|
||||
shell_manager: args.shell_manager,
|
||||
call_info: args.call_info,
|
||||
};
|
||||
@ -633,6 +634,7 @@ impl WholeStreamCommand for FnFilterCommand {
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let CommandArgs {
|
||||
host,
|
||||
ctrl_c,
|
||||
shell_manager,
|
||||
call_info,
|
||||
input,
|
||||
@ -650,8 +652,12 @@ impl WholeStreamCommand for FnFilterCommand {
|
||||
Ok(args) => args,
|
||||
};
|
||||
|
||||
let args =
|
||||
EvaluatedFilterCommandArgs::new(host.clone(), shell_manager.clone(), call_info);
|
||||
let args = EvaluatedFilterCommandArgs::new(
|
||||
host.clone(),
|
||||
ctrl_c.clone(),
|
||||
shell_manager.clone(),
|
||||
call_info,
|
||||
);
|
||||
|
||||
match func(args) {
|
||||
Err(err) => return OutputStream::from(vec![Err(err)]).values,
|
||||
|
@ -58,7 +58,7 @@ pub fn config(
|
||||
}: ConfigArgs,
|
||||
RunnableContext { name, .. }: RunnableContext,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let name_span = name;
|
||||
let name_span = name.clone();
|
||||
|
||||
let configuration = if let Some(supplied) = load {
|
||||
Some(supplied.item().clone())
|
||||
|
@ -39,27 +39,27 @@ where
|
||||
{
|
||||
let mut indexmap = IndexMap::new();
|
||||
|
||||
indexmap.insert("year".to_string(), Value::int(dt.year()).tagged(tag));
|
||||
indexmap.insert("month".to_string(), Value::int(dt.month()).tagged(tag));
|
||||
indexmap.insert("day".to_string(), Value::int(dt.day()).tagged(tag));
|
||||
indexmap.insert("hour".to_string(), Value::int(dt.hour()).tagged(tag));
|
||||
indexmap.insert("minute".to_string(), Value::int(dt.minute()).tagged(tag));
|
||||
indexmap.insert("second".to_string(), Value::int(dt.second()).tagged(tag));
|
||||
indexmap.insert("year".to_string(), Value::int(dt.year()).tagged(&tag));
|
||||
indexmap.insert("month".to_string(), Value::int(dt.month()).tagged(&tag));
|
||||
indexmap.insert("day".to_string(), Value::int(dt.day()).tagged(&tag));
|
||||
indexmap.insert("hour".to_string(), Value::int(dt.hour()).tagged(&tag));
|
||||
indexmap.insert("minute".to_string(), Value::int(dt.minute()).tagged(&tag));
|
||||
indexmap.insert("second".to_string(), Value::int(dt.second()).tagged(&tag));
|
||||
|
||||
let tz = dt.offset();
|
||||
indexmap.insert(
|
||||
"timezone".to_string(),
|
||||
Value::string(format!("{}", tz)).tagged(tag),
|
||||
Value::string(format!("{}", tz)).tagged(&tag),
|
||||
);
|
||||
|
||||
Value::Row(Dictionary::from(indexmap)).tagged(tag)
|
||||
Value::Row(Dictionary::from(indexmap)).tagged(&tag)
|
||||
}
|
||||
|
||||
pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
|
||||
let mut date_out = VecDeque::new();
|
||||
let tag = args.call_info.name_tag;
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
|
||||
let value = if args.has("utc") {
|
||||
let utc: DateTime<Utc> = Utc::now();
|
||||
|
@ -35,7 +35,7 @@ fn run(
|
||||
_registry: &CommandRegistry,
|
||||
_raw_args: &RawCommandArgs,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let name = call_info.name_tag;
|
||||
let name = call_info.name_tag.clone();
|
||||
|
||||
let mut output = String::new();
|
||||
|
||||
|
@ -67,7 +67,7 @@ impl PerItemCommand for Enter {
|
||||
|
||||
let full_path = std::path::PathBuf::from(cwd);
|
||||
|
||||
let (file_extension, contents, contents_tag, anchor_location) =
|
||||
let (file_extension, contents, contents_tag) =
|
||||
crate::commands::open::fetch(
|
||||
&full_path,
|
||||
&location_clone,
|
||||
@ -75,18 +75,9 @@ impl PerItemCommand for Enter {
|
||||
)
|
||||
.await.unwrap();
|
||||
|
||||
if contents_tag.anchor != uuid::Uuid::nil() {
|
||||
// If we have loaded something, track its source
|
||||
yield ReturnSuccess::action(CommandAction::AddAnchorLocation(
|
||||
contents_tag.anchor,
|
||||
anchor_location,
|
||||
));
|
||||
}
|
||||
|
||||
|
||||
match contents {
|
||||
Value::Primitive(Primitive::String(_)) => {
|
||||
let tagged_contents = contents.tagged(contents_tag);
|
||||
let tagged_contents = contents.tagged(&contents_tag);
|
||||
|
||||
if let Some(extension) = file_extension {
|
||||
let command_name = format!("from-{}", extension);
|
||||
@ -95,6 +86,7 @@ impl PerItemCommand for Enter {
|
||||
{
|
||||
let new_args = RawCommandArgs {
|
||||
host: raw_args.host,
|
||||
ctrl_c: raw_args.ctrl_c,
|
||||
shell_manager: raw_args.shell_manager,
|
||||
call_info: UnevaluatedCallInfo {
|
||||
args: crate::parser::hir::Call {
|
||||
@ -103,7 +95,6 @@ impl PerItemCommand for Enter {
|
||||
named: None,
|
||||
},
|
||||
source: raw_args.call_info.source,
|
||||
source_map: raw_args.call_info.source_map,
|
||||
name_tag: raw_args.call_info.name_tag,
|
||||
},
|
||||
};
|
||||
@ -123,7 +114,7 @@ impl PerItemCommand for Enter {
|
||||
yield Ok(ReturnSuccess::Action(CommandAction::EnterValueShell(
|
||||
Tagged {
|
||||
item,
|
||||
tag: contents_tag,
|
||||
tag: contents_tag.clone(),
|
||||
})));
|
||||
}
|
||||
x => yield x,
|
||||
|
@ -37,22 +37,22 @@ pub fn get_environment(tag: Tag) -> Result<Tagged<Value>, Box<dyn std::error::Er
|
||||
let mut indexmap = IndexMap::new();
|
||||
|
||||
let path = std::env::current_dir()?;
|
||||
indexmap.insert("cwd".to_string(), Value::path(path).tagged(tag));
|
||||
indexmap.insert("cwd".to_string(), Value::path(path).tagged(&tag));
|
||||
|
||||
if let Some(home) = dirs::home_dir() {
|
||||
indexmap.insert("home".to_string(), Value::path(home).tagged(tag));
|
||||
indexmap.insert("home".to_string(), Value::path(home).tagged(&tag));
|
||||
}
|
||||
|
||||
let config = config::default_path()?;
|
||||
indexmap.insert("config".to_string(), Value::path(config).tagged(tag));
|
||||
indexmap.insert("config".to_string(), Value::path(config).tagged(&tag));
|
||||
|
||||
let history = History::path();
|
||||
indexmap.insert("history".to_string(), Value::path(history).tagged(tag));
|
||||
indexmap.insert("history".to_string(), Value::path(history).tagged(&tag));
|
||||
|
||||
let temp = std::env::temp_dir();
|
||||
indexmap.insert("temp".to_string(), Value::path(temp).tagged(tag));
|
||||
indexmap.insert("temp".to_string(), Value::path(temp).tagged(&tag));
|
||||
|
||||
let mut dict = TaggedDictBuilder::new(tag);
|
||||
let mut dict = TaggedDictBuilder::new(&tag);
|
||||
for v in std::env::vars() {
|
||||
dict.insert(v.0, Value::string(v.1));
|
||||
}
|
||||
@ -60,14 +60,14 @@ pub fn get_environment(tag: Tag) -> Result<Tagged<Value>, Box<dyn std::error::Er
|
||||
indexmap.insert("vars".to_string(), dict.into_tagged_value());
|
||||
}
|
||||
|
||||
Ok(Value::Row(Dictionary::from(indexmap)).tagged(tag))
|
||||
Ok(Value::Row(Dictionary::from(indexmap)).tagged(&tag))
|
||||
}
|
||||
|
||||
pub fn env(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
|
||||
let mut env_out = VecDeque::new();
|
||||
let tag = args.call_info.name_tag;
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
|
||||
let value = get_environment(tag)?;
|
||||
env_out.push_back(value);
|
||||
|
@ -10,7 +10,6 @@ use mime::Mime;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use surf::mime;
|
||||
use uuid::Uuid;
|
||||
pub struct Fetch;
|
||||
|
||||
impl PerItemCommand for Fetch {
|
||||
@ -48,7 +47,7 @@ fn run(
|
||||
ShellError::labeled_error(
|
||||
"No file or directory specified",
|
||||
"for command",
|
||||
call_info.name_tag,
|
||||
&call_info.name_tag,
|
||||
)
|
||||
})? {
|
||||
file => file,
|
||||
@ -68,7 +67,7 @@ fn run(
|
||||
yield Err(e);
|
||||
return;
|
||||
}
|
||||
let (file_extension, contents, contents_tag, anchor_location) = result.unwrap();
|
||||
let (file_extension, contents, contents_tag) = result.unwrap();
|
||||
|
||||
let file_extension = if has_raw {
|
||||
None
|
||||
@ -78,21 +77,14 @@ fn run(
|
||||
file_extension.or(path_str.split('.').last().map(String::from))
|
||||
};
|
||||
|
||||
if contents_tag.anchor != uuid::Uuid::nil() {
|
||||
// If we have loaded something, track its source
|
||||
yield ReturnSuccess::action(CommandAction::AddAnchorLocation(
|
||||
contents_tag.anchor,
|
||||
anchor_location,
|
||||
));
|
||||
}
|
||||
|
||||
let tagged_contents = contents.tagged(contents_tag);
|
||||
let tagged_contents = contents.tagged(&contents_tag);
|
||||
|
||||
if let Some(extension) = file_extension {
|
||||
let command_name = format!("from-{}", extension);
|
||||
if let Some(converter) = registry.get_command(&command_name) {
|
||||
let new_args = RawCommandArgs {
|
||||
host: raw_args.host,
|
||||
ctrl_c: raw_args.ctrl_c,
|
||||
shell_manager: raw_args.shell_manager,
|
||||
call_info: UnevaluatedCallInfo {
|
||||
args: crate::parser::hir::Call {
|
||||
@ -101,7 +93,6 @@ fn run(
|
||||
named: None
|
||||
},
|
||||
source: raw_args.call_info.source,
|
||||
source_map: raw_args.call_info.source_map,
|
||||
name_tag: raw_args.call_info.name_tag,
|
||||
}
|
||||
};
|
||||
@ -115,7 +106,7 @@ fn run(
|
||||
}
|
||||
}
|
||||
Ok(ReturnSuccess::Value(Tagged { item, .. })) => {
|
||||
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag }));
|
||||
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() }));
|
||||
}
|
||||
x => yield x,
|
||||
}
|
||||
@ -131,10 +122,7 @@ fn run(
|
||||
Ok(stream.to_output_stream())
|
||||
}
|
||||
|
||||
pub async fn fetch(
|
||||
location: &str,
|
||||
span: Span,
|
||||
) -> Result<(Option<String>, Value, Tag, AnchorLocation), ShellError> {
|
||||
pub async fn fetch(location: &str, span: Span) -> Result<(Option<String>, Value, Tag), ShellError> {
|
||||
if let Err(_) = url::Url::parse(location) {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Incomplete or incorrect url",
|
||||
@ -160,9 +148,8 @@ pub async fn fetch(
|
||||
})?),
|
||||
Tag {
|
||||
span,
|
||||
anchor: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
},
|
||||
AnchorLocation::Url(location.to_string()),
|
||||
)),
|
||||
(mime::APPLICATION, mime::JSON) => Ok((
|
||||
Some("json".to_string()),
|
||||
@ -175,9 +162,8 @@ pub async fn fetch(
|
||||
})?),
|
||||
Tag {
|
||||
span,
|
||||
anchor: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
},
|
||||
AnchorLocation::Url(location.to_string()),
|
||||
)),
|
||||
(mime::APPLICATION, mime::OCTET_STREAM) => {
|
||||
let buf: Vec<u8> = r.body_bytes().await.map_err(|_| {
|
||||
@ -192,9 +178,8 @@ pub async fn fetch(
|
||||
Value::binary(buf),
|
||||
Tag {
|
||||
span,
|
||||
anchor: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
},
|
||||
AnchorLocation::Url(location.to_string()),
|
||||
))
|
||||
}
|
||||
(mime::IMAGE, mime::SVG) => Ok((
|
||||
@ -208,9 +193,8 @@ pub async fn fetch(
|
||||
})?),
|
||||
Tag {
|
||||
span,
|
||||
anchor: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
},
|
||||
AnchorLocation::Url(location.to_string()),
|
||||
)),
|
||||
(mime::IMAGE, image_ty) => {
|
||||
let buf: Vec<u8> = r.body_bytes().await.map_err(|_| {
|
||||
@ -225,9 +209,8 @@ pub async fn fetch(
|
||||
Value::binary(buf),
|
||||
Tag {
|
||||
span,
|
||||
anchor: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
},
|
||||
AnchorLocation::Url(location.to_string()),
|
||||
))
|
||||
}
|
||||
(mime::TEXT, mime::HTML) => Ok((
|
||||
@ -241,9 +224,8 @@ pub async fn fetch(
|
||||
})?),
|
||||
Tag {
|
||||
span,
|
||||
anchor: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
},
|
||||
AnchorLocation::Url(location.to_string()),
|
||||
)),
|
||||
(mime::TEXT, mime::PLAIN) => {
|
||||
let path_extension = url::Url::parse(location)
|
||||
@ -268,9 +250,8 @@ pub async fn fetch(
|
||||
})?),
|
||||
Tag {
|
||||
span,
|
||||
anchor: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
},
|
||||
AnchorLocation::Url(location.to_string()),
|
||||
))
|
||||
}
|
||||
(ty, sub_ty) => Ok((
|
||||
@ -278,9 +259,8 @@ pub async fn fetch(
|
||||
Value::string(format!("Not yet supported MIME type: {} {}", ty, sub_ty)),
|
||||
Tag {
|
||||
span,
|
||||
anchor: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
},
|
||||
AnchorLocation::Url(location.to_string()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
@ -289,9 +269,8 @@ pub async fn fetch(
|
||||
Value::string(format!("No content type found")),
|
||||
Tag {
|
||||
span,
|
||||
anchor: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
},
|
||||
AnchorLocation::Url(location.to_string()),
|
||||
)),
|
||||
},
|
||||
Err(_) => {
|
||||
|
@ -33,7 +33,7 @@ fn bson_array(input: &Vec<Bson>, tag: Tag) -> Result<Vec<Tagged<Value>>, ShellEr
|
||||
let mut out = vec![];
|
||||
|
||||
for value in input {
|
||||
out.push(convert_bson_value_to_nu_value(value, tag)?);
|
||||
out.push(convert_bson_value_to_nu_value(value, &tag)?);
|
||||
}
|
||||
|
||||
Ok(out)
|
||||
@ -46,100 +46,100 @@ fn convert_bson_value_to_nu_value(
|
||||
let tag = tag.into();
|
||||
|
||||
Ok(match v {
|
||||
Bson::FloatingPoint(n) => Value::Primitive(Primitive::from(*n)).tagged(tag),
|
||||
Bson::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag),
|
||||
Bson::Array(a) => Value::Table(bson_array(a, tag)?).tagged(tag),
|
||||
Bson::FloatingPoint(n) => Value::Primitive(Primitive::from(*n)).tagged(&tag),
|
||||
Bson::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(&tag),
|
||||
Bson::Array(a) => Value::Table(bson_array(a, tag.clone())?).tagged(&tag),
|
||||
Bson::Document(doc) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
for (k, v) in doc.iter() {
|
||||
collected.insert_tagged(k.clone(), convert_bson_value_to_nu_value(v, tag)?);
|
||||
collected.insert_tagged(k.clone(), convert_bson_value_to_nu_value(v, &tag)?);
|
||||
}
|
||||
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(tag),
|
||||
Bson::Null => Value::Primitive(Primitive::Nothing).tagged(tag),
|
||||
Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(&tag),
|
||||
Bson::Null => Value::Primitive(Primitive::Nothing).tagged(&tag),
|
||||
Bson::RegExp(r, opts) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_tagged(
|
||||
"$regex".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(r))).tagged(tag),
|
||||
Value::Primitive(Primitive::String(String::from(r))).tagged(&tag),
|
||||
);
|
||||
collected.insert_tagged(
|
||||
"$options".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(opts))).tagged(tag),
|
||||
Value::Primitive(Primitive::String(String::from(opts))).tagged(&tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
Bson::I32(n) => Value::number(n).tagged(tag),
|
||||
Bson::I64(n) => Value::number(n).tagged(tag),
|
||||
Bson::I32(n) => Value::number(n).tagged(&tag),
|
||||
Bson::I64(n) => Value::number(n).tagged(&tag),
|
||||
Bson::Decimal128(n) => {
|
||||
// TODO: this really isn't great, and we should update this to do a higher
|
||||
// fidelity translation
|
||||
let decimal = BigDecimal::from_str(&format!("{}", n)).map_err(|_| {
|
||||
ShellError::range_error(
|
||||
ExpectedRange::BigDecimal,
|
||||
&n.tagged(tag),
|
||||
&n.tagged(&tag),
|
||||
format!("converting BSON Decimal128 to BigDecimal"),
|
||||
)
|
||||
})?;
|
||||
Value::Primitive(Primitive::Decimal(decimal)).tagged(tag)
|
||||
Value::Primitive(Primitive::Decimal(decimal)).tagged(&tag)
|
||||
}
|
||||
Bson::JavaScriptCode(js) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_tagged(
|
||||
"$javascript".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(js))).tagged(tag),
|
||||
Value::Primitive(Primitive::String(String::from(js))).tagged(&tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
Bson::JavaScriptCodeWithScope(js, doc) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_tagged(
|
||||
"$javascript".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(js))).tagged(tag),
|
||||
Value::Primitive(Primitive::String(String::from(js))).tagged(&tag),
|
||||
);
|
||||
collected.insert_tagged(
|
||||
"$scope".to_string(),
|
||||
convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag)?,
|
||||
convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag.clone())?,
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
Bson::TimeStamp(ts) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
collected.insert_tagged("$timestamp".to_string(), Value::number(ts).tagged(tag));
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_tagged("$timestamp".to_string(), Value::number(ts).tagged(&tag));
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
Bson::Binary(bst, bytes) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_tagged(
|
||||
"$binary_subtype".to_string(),
|
||||
match bst {
|
||||
BinarySubtype::UserDefined(u) => Value::number(u),
|
||||
_ => Value::Primitive(Primitive::String(binary_subtype_to_string(*bst))),
|
||||
}
|
||||
.tagged(tag),
|
||||
.tagged(&tag),
|
||||
);
|
||||
collected.insert_tagged(
|
||||
"$binary".to_string(),
|
||||
Value::Primitive(Primitive::Binary(bytes.to_owned())).tagged(tag),
|
||||
Value::Primitive(Primitive::Binary(bytes.to_owned())).tagged(&tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
Bson::ObjectId(obj_id) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_tagged(
|
||||
"$object_id".to_string(),
|
||||
Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(tag),
|
||||
Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(&tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(tag),
|
||||
Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(&tag),
|
||||
Bson::Symbol(s) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
let mut collected = TaggedDictBuilder::new(tag.clone());
|
||||
collected.insert_tagged(
|
||||
"$symbol".to_string(),
|
||||
Value::Primitive(Primitive::String(String::from(s))).tagged(tag),
|
||||
Value::Primitive(Primitive::String(String::from(s))).tagged(&tag),
|
||||
);
|
||||
collected.into_tagged_value()
|
||||
}
|
||||
@ -208,13 +208,13 @@ fn from_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
|
||||
let value_tag = value.tag();
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::Binary(vb)) =>
|
||||
match from_bson_bytes_to_value(vb, tag) {
|
||||
match from_bson_bytes_to_value(vb, tag.clone()) {
|
||||
Ok(x) => yield ReturnSuccess::value(x),
|
||||
Err(_) => {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as BSON",
|
||||
"input cannot be parsed as BSON",
|
||||
tag,
|
||||
tag.clone(),
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
))
|
||||
@ -223,7 +223,7 @@ fn from_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
tag,
|
||||
tag.clone(),
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
)),
|
||||
|
@ -62,12 +62,12 @@ pub fn from_csv_string_to_value(
|
||||
if let Some(row_values) = iter.next() {
|
||||
let row_values = row_values?;
|
||||
|
||||
let mut row = TaggedDictBuilder::new(tag);
|
||||
let mut row = TaggedDictBuilder::new(tag.clone());
|
||||
|
||||
for (idx, entry) in row_values.iter().enumerate() {
|
||||
row.insert_tagged(
|
||||
fields.get(idx).unwrap(),
|
||||
Value::Primitive(Primitive::String(String::from(entry))).tagged(tag),
|
||||
Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag),
|
||||
);
|
||||
}
|
||||
|
||||
@ -77,7 +77,7 @@ pub fn from_csv_string_to_value(
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Tagged::from_item(Value::Table(rows), tag))
|
||||
Ok(Value::Table(rows).tagged(&tag))
|
||||
}
|
||||
|
||||
fn from_csv(
|
||||
@ -96,7 +96,7 @@ fn from_csv(
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag);
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
@ -105,15 +105,15 @@ fn from_csv(
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
name_tag,
|
||||
name_tag.clone(),
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
value_tag.clone(),
|
||||
)),
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
match from_csv_string_to_value(concat_string, skip_headers, name_tag) {
|
||||
match from_csv_string_to_value(concat_string, skip_headers, name_tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
for l in list {
|
||||
@ -126,9 +126,9 @@ fn from_csv(
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as CSV",
|
||||
"input cannot be parsed as CSV",
|
||||
name_tag,
|
||||
name_tag.clone(),
|
||||
"value originates from here",
|
||||
last_tag,
|
||||
last_tag.clone(),
|
||||
))
|
||||
} ,
|
||||
}
|
||||
|
@ -45,10 +45,13 @@ fn convert_ini_top_to_nu_value(
|
||||
tag: impl Into<Tag>,
|
||||
) -> Tagged<Value> {
|
||||
let tag = tag.into();
|
||||
let mut top_level = TaggedDictBuilder::new(tag);
|
||||
let mut top_level = TaggedDictBuilder::new(tag.clone());
|
||||
|
||||
for (key, value) in v.iter() {
|
||||
top_level.insert_tagged(key.clone(), convert_ini_second_to_nu_value(value, tag));
|
||||
top_level.insert_tagged(
|
||||
key.clone(),
|
||||
convert_ini_second_to_nu_value(value, tag.clone()),
|
||||
);
|
||||
}
|
||||
|
||||
top_level.into_tagged_value()
|
||||
@ -75,7 +78,7 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag);
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
@ -84,15 +87,15 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
&value_tag,
|
||||
)),
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
match from_ini_string_to_value(concat_string, tag) {
|
||||
match from_ini_string_to_value(concat_string, tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
for l in list {
|
||||
@ -105,7 +108,7 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as INI",
|
||||
"input cannot be parsed as INI",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
last_tag,
|
||||
))
|
||||
|
@ -35,24 +35,24 @@ fn convert_json_value_to_nu_value(v: &serde_hjson::Value, tag: impl Into<Tag>) -
|
||||
let tag = tag.into();
|
||||
|
||||
match v {
|
||||
serde_hjson::Value::Null => Value::Primitive(Primitive::Nothing).tagged(tag),
|
||||
serde_hjson::Value::Bool(b) => Value::boolean(*b).tagged(tag),
|
||||
serde_hjson::Value::F64(n) => Value::number(n).tagged(tag),
|
||||
serde_hjson::Value::U64(n) => Value::number(n).tagged(tag),
|
||||
serde_hjson::Value::I64(n) => Value::number(n).tagged(tag),
|
||||
serde_hjson::Value::Null => Value::Primitive(Primitive::Nothing).tagged(&tag),
|
||||
serde_hjson::Value::Bool(b) => Value::boolean(*b).tagged(&tag),
|
||||
serde_hjson::Value::F64(n) => Value::number(n).tagged(&tag),
|
||||
serde_hjson::Value::U64(n) => Value::number(n).tagged(&tag),
|
||||
serde_hjson::Value::I64(n) => Value::number(n).tagged(&tag),
|
||||
serde_hjson::Value::String(s) => {
|
||||
Value::Primitive(Primitive::String(String::from(s))).tagged(tag)
|
||||
Value::Primitive(Primitive::String(String::from(s))).tagged(&tag)
|
||||
}
|
||||
serde_hjson::Value::Array(a) => Value::Table(
|
||||
a.iter()
|
||||
.map(|x| convert_json_value_to_nu_value(x, tag))
|
||||
.map(|x| convert_json_value_to_nu_value(x, &tag))
|
||||
.collect(),
|
||||
)
|
||||
.tagged(tag),
|
||||
serde_hjson::Value::Object(o) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
let mut collected = TaggedDictBuilder::new(&tag);
|
||||
for (k, v) in o.iter() {
|
||||
collected.insert_tagged(k.clone(), convert_json_value_to_nu_value(v, tag));
|
||||
collected.insert_tagged(k.clone(), convert_json_value_to_nu_value(v, &tag));
|
||||
}
|
||||
|
||||
collected.into_tagged_value()
|
||||
@ -82,7 +82,7 @@ fn from_json(
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag);
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
@ -91,9 +91,9 @@ fn from_json(
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
name_tag,
|
||||
&name_tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
&value_tag,
|
||||
)),
|
||||
|
||||
}
|
||||
@ -106,15 +106,15 @@ fn from_json(
|
||||
continue;
|
||||
}
|
||||
|
||||
match from_json_string_to_value(json_str.to_string(), name_tag) {
|
||||
match from_json_string_to_value(json_str.to_string(), &name_tag) {
|
||||
Ok(x) =>
|
||||
yield ReturnSuccess::value(x),
|
||||
Err(_) => {
|
||||
if let Some(last_tag) = latest_tag {
|
||||
if let Some(ref last_tag) = latest_tag {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could nnot parse as JSON",
|
||||
"input cannot be parsed as JSON",
|
||||
name_tag,
|
||||
&name_tag,
|
||||
"value originates from here",
|
||||
last_tag))
|
||||
}
|
||||
@ -122,7 +122,7 @@ fn from_json(
|
||||
}
|
||||
}
|
||||
} else {
|
||||
match from_json_string_to_value(concat_string, name_tag) {
|
||||
match from_json_string_to_value(concat_string, name_tag.clone()) {
|
||||
Ok(x) =>
|
||||
match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
|
@ -138,7 +138,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
|
||||
let value_tag = value.tag();
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::Binary(vb)) =>
|
||||
match from_sqlite_bytes_to_value(vb, tag) {
|
||||
match from_sqlite_bytes_to_value(vb, tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
for l in list {
|
||||
@ -151,7 +151,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as SQLite",
|
||||
"input cannot be parsed as SQLite",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
))
|
||||
@ -160,7 +160,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
)),
|
||||
|
@ -36,7 +36,7 @@ pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into<Tag>) -> T
|
||||
toml::Value::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag),
|
||||
toml::Value::Array(a) => Value::Table(
|
||||
a.iter()
|
||||
.map(|x| convert_toml_value_to_nu_value(x, tag))
|
||||
.map(|x| convert_toml_value_to_nu_value(x, &tag))
|
||||
.collect(),
|
||||
)
|
||||
.tagged(tag),
|
||||
@ -44,10 +44,10 @@ pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into<Tag>) -> T
|
||||
Value::Primitive(Primitive::String(dt.to_string())).tagged(tag)
|
||||
}
|
||||
toml::Value::Table(t) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
let mut collected = TaggedDictBuilder::new(&tag);
|
||||
|
||||
for (k, v) in t.iter() {
|
||||
collected.insert_tagged(k.clone(), convert_toml_value_to_nu_value(v, tag));
|
||||
collected.insert_tagged(k.clone(), convert_toml_value_to_nu_value(v, &tag));
|
||||
}
|
||||
|
||||
collected.into_tagged_value()
|
||||
@ -79,7 +79,7 @@ pub fn from_toml(
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag);
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
@ -88,15 +88,15 @@ pub fn from_toml(
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
&value_tag,
|
||||
)),
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
match from_toml_string_to_value(concat_string, tag) {
|
||||
match from_toml_string_to_value(concat_string, tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
for l in list {
|
||||
@ -109,7 +109,7 @@ pub fn from_toml(
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as TOML",
|
||||
"input cannot be parsed as TOML",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
last_tag,
|
||||
))
|
||||
|
@ -63,12 +63,12 @@ pub fn from_tsv_string_to_value(
|
||||
if let Some(row_values) = iter.next() {
|
||||
let row_values = row_values?;
|
||||
|
||||
let mut row = TaggedDictBuilder::new(tag);
|
||||
let mut row = TaggedDictBuilder::new(&tag);
|
||||
|
||||
for (idx, entry) in row_values.iter().enumerate() {
|
||||
row.insert_tagged(
|
||||
fields.get(idx).unwrap(),
|
||||
Value::Primitive(Primitive::String(String::from(entry))).tagged(tag),
|
||||
Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag),
|
||||
);
|
||||
}
|
||||
|
||||
@ -78,7 +78,7 @@ pub fn from_tsv_string_to_value(
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Tagged::from_item(Value::Table(rows), tag))
|
||||
Ok(Value::Table(rows).tagged(&tag))
|
||||
}
|
||||
|
||||
fn from_tsv(
|
||||
@ -97,7 +97,7 @@ fn from_tsv(
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag);
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
@ -106,15 +106,15 @@ fn from_tsv(
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
name_tag,
|
||||
&name_tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
&value_tag,
|
||||
)),
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
match from_tsv_string_to_value(concat_string, skip_headers, name_tag) {
|
||||
match from_tsv_string_to_value(concat_string, skip_headers, name_tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
for l in list {
|
||||
@ -127,9 +127,9 @@ fn from_tsv(
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as TSV",
|
||||
"input cannot be parsed as TSV",
|
||||
name_tag,
|
||||
&name_tag,
|
||||
"value originates from here",
|
||||
last_tag,
|
||||
&last_tag,
|
||||
))
|
||||
} ,
|
||||
}
|
||||
|
@ -39,7 +39,7 @@ fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag);
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
@ -47,9 +47,9 @@ fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
&value_tag,
|
||||
)),
|
||||
|
||||
}
|
||||
|
@ -34,7 +34,7 @@ fn from_node_to_value<'a, 'd>(n: &roxmltree::Node<'a, 'd>, tag: impl Into<Tag>)
|
||||
|
||||
let mut children_values = vec![];
|
||||
for c in n.children() {
|
||||
children_values.push(from_node_to_value(&c, tag));
|
||||
children_values.push(from_node_to_value(&c, &tag));
|
||||
}
|
||||
|
||||
let children_values: Vec<Tagged<Value>> = children_values
|
||||
@ -94,7 +94,7 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag);
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
@ -103,15 +103,15 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
&value_tag,
|
||||
)),
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
match from_xml_string_to_value(concat_string, tag) {
|
||||
match from_xml_string_to_value(concat_string, tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
for l in list {
|
||||
@ -124,9 +124,9 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as XML",
|
||||
"input cannot be parsed as XML",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
last_tag,
|
||||
&last_tag,
|
||||
))
|
||||
} ,
|
||||
}
|
||||
|
@ -64,17 +64,17 @@ fn convert_yaml_value_to_nu_value(v: &serde_yaml::Value, tag: impl Into<Tag>) ->
|
||||
serde_yaml::Value::String(s) => Value::string(s).tagged(tag),
|
||||
serde_yaml::Value::Sequence(a) => Value::Table(
|
||||
a.iter()
|
||||
.map(|x| convert_yaml_value_to_nu_value(x, tag))
|
||||
.map(|x| convert_yaml_value_to_nu_value(x, &tag))
|
||||
.collect(),
|
||||
)
|
||||
.tagged(tag),
|
||||
serde_yaml::Value::Mapping(t) => {
|
||||
let mut collected = TaggedDictBuilder::new(tag);
|
||||
let mut collected = TaggedDictBuilder::new(&tag);
|
||||
|
||||
for (k, v) in t.iter() {
|
||||
match k {
|
||||
serde_yaml::Value::String(k) => {
|
||||
collected.insert_tagged(k.clone(), convert_yaml_value_to_nu_value(v, tag));
|
||||
collected.insert_tagged(k.clone(), convert_yaml_value_to_nu_value(v, &tag));
|
||||
}
|
||||
_ => unimplemented!("Unknown key type"),
|
||||
}
|
||||
@ -108,7 +108,7 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
|
||||
|
||||
for value in values {
|
||||
let value_tag = value.tag();
|
||||
latest_tag = Some(value_tag);
|
||||
latest_tag = Some(value_tag.clone());
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(s)) => {
|
||||
concat_string.push_str(&s);
|
||||
@ -117,15 +117,15 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
&value_tag,
|
||||
)),
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
match from_yaml_string_to_value(concat_string, tag) {
|
||||
match from_yaml_string_to_value(concat_string, tag.clone()) {
|
||||
Ok(x) => match x {
|
||||
Tagged { item: Value::Table(list), .. } => {
|
||||
for l in list {
|
||||
@ -138,9 +138,9 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Could not parse as YAML",
|
||||
"input cannot be parsed as YAML",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
last_tag,
|
||||
&last_tag,
|
||||
))
|
||||
} ,
|
||||
}
|
||||
|
@ -82,7 +82,7 @@ pub fn get_column_path(
|
||||
item: Value::Primitive(Primitive::Path(_)),
|
||||
..
|
||||
} => Ok(obj.clone()),
|
||||
_ => Ok(Value::nothing().tagged(obj.tag)),
|
||||
_ => Ok(Value::nothing().tagged(&obj.tag)),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
@ -26,7 +26,7 @@ impl PerItemCommand for Help {
|
||||
_raw_args: &RawCommandArgs,
|
||||
_input: Tagged<Value>,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let tag = call_info.name_tag;
|
||||
let tag = &call_info.name_tag;
|
||||
|
||||
match call_info.args.nth(0) {
|
||||
Some(Tagged {
|
||||
|
@ -58,7 +58,7 @@ fn lines(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
|
||||
result.push_back(Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
v.tag(),
|
||||
)));
|
||||
|
@ -34,5 +34,5 @@ impl WholeStreamCommand for LS {
|
||||
}
|
||||
|
||||
fn ls(LsArgs { path }: LsArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
|
||||
context.shell_manager.ls(path, context.name)
|
||||
context.shell_manager.ls(path, &context)
|
||||
}
|
||||
|
@ -7,7 +7,6 @@ use crate::parser::hir::SyntaxShape;
|
||||
use crate::parser::registry::Signature;
|
||||
use crate::prelude::*;
|
||||
use std::path::{Path, PathBuf};
|
||||
use uuid::Uuid;
|
||||
pub struct Open;
|
||||
|
||||
impl PerItemCommand for Open {
|
||||
@ -49,7 +48,7 @@ fn run(
|
||||
ShellError::labeled_error(
|
||||
"No file or directory specified",
|
||||
"for command",
|
||||
call_info.name_tag,
|
||||
&call_info.name_tag,
|
||||
)
|
||||
})? {
|
||||
file => file,
|
||||
@ -69,7 +68,7 @@ fn run(
|
||||
yield Err(e);
|
||||
return;
|
||||
}
|
||||
let (file_extension, contents, contents_tag, anchor_location) = result.unwrap();
|
||||
let (file_extension, contents, contents_tag) = result.unwrap();
|
||||
|
||||
let file_extension = if has_raw {
|
||||
None
|
||||
@ -79,21 +78,14 @@ fn run(
|
||||
file_extension.or(path_str.split('.').last().map(String::from))
|
||||
};
|
||||
|
||||
if contents_tag.anchor != uuid::Uuid::nil() {
|
||||
// If we have loaded something, track its source
|
||||
yield ReturnSuccess::action(CommandAction::AddAnchorLocation(
|
||||
contents_tag.anchor,
|
||||
anchor_location,
|
||||
));
|
||||
}
|
||||
|
||||
let tagged_contents = contents.tagged(contents_tag);
|
||||
let tagged_contents = contents.tagged(&contents_tag);
|
||||
|
||||
if let Some(extension) = file_extension {
|
||||
let command_name = format!("from-{}", extension);
|
||||
if let Some(converter) = registry.get_command(&command_name) {
|
||||
let new_args = RawCommandArgs {
|
||||
host: raw_args.host,
|
||||
ctrl_c: raw_args.ctrl_c,
|
||||
shell_manager: raw_args.shell_manager,
|
||||
call_info: UnevaluatedCallInfo {
|
||||
args: crate::parser::hir::Call {
|
||||
@ -102,7 +94,6 @@ fn run(
|
||||
named: None
|
||||
},
|
||||
source: raw_args.call_info.source,
|
||||
source_map: raw_args.call_info.source_map,
|
||||
name_tag: raw_args.call_info.name_tag,
|
||||
}
|
||||
};
|
||||
@ -116,7 +107,7 @@ fn run(
|
||||
}
|
||||
}
|
||||
Ok(ReturnSuccess::Value(Tagged { item, .. })) => {
|
||||
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag }));
|
||||
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() }));
|
||||
}
|
||||
x => yield x,
|
||||
}
|
||||
@ -136,7 +127,7 @@ pub async fn fetch(
|
||||
cwd: &PathBuf,
|
||||
location: &str,
|
||||
span: Span,
|
||||
) -> Result<(Option<String>, Value, Tag, AnchorLocation), ShellError> {
|
||||
) -> Result<(Option<String>, Value, Tag), ShellError> {
|
||||
let mut cwd = cwd.clone();
|
||||
|
||||
cwd.push(Path::new(location));
|
||||
@ -149,9 +140,8 @@ pub async fn fetch(
|
||||
Value::string(s),
|
||||
Tag {
|
||||
span,
|
||||
anchor: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::File(cwd.to_string_lossy().to_string())),
|
||||
},
|
||||
AnchorLocation::File(cwd.to_string_lossy().to_string()),
|
||||
)),
|
||||
Err(_) => {
|
||||
//Non utf8 data.
|
||||
@ -168,18 +158,20 @@ pub async fn fetch(
|
||||
Value::string(s),
|
||||
Tag {
|
||||
span,
|
||||
anchor: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::File(
|
||||
cwd.to_string_lossy().to_string(),
|
||||
)),
|
||||
},
|
||||
AnchorLocation::File(cwd.to_string_lossy().to_string()),
|
||||
)),
|
||||
Err(_) => Ok((
|
||||
None,
|
||||
Value::binary(bytes),
|
||||
Tag {
|
||||
span,
|
||||
anchor: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::File(
|
||||
cwd.to_string_lossy().to_string(),
|
||||
)),
|
||||
},
|
||||
AnchorLocation::File(cwd.to_string_lossy().to_string()),
|
||||
)),
|
||||
}
|
||||
} else {
|
||||
@ -188,9 +180,10 @@ pub async fn fetch(
|
||||
Value::binary(bytes),
|
||||
Tag {
|
||||
span,
|
||||
anchor: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::File(
|
||||
cwd.to_string_lossy().to_string(),
|
||||
)),
|
||||
},
|
||||
AnchorLocation::File(cwd.to_string_lossy().to_string()),
|
||||
))
|
||||
}
|
||||
}
|
||||
@ -206,18 +199,20 @@ pub async fn fetch(
|
||||
Value::string(s),
|
||||
Tag {
|
||||
span,
|
||||
anchor: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::File(
|
||||
cwd.to_string_lossy().to_string(),
|
||||
)),
|
||||
},
|
||||
AnchorLocation::File(cwd.to_string_lossy().to_string()),
|
||||
)),
|
||||
Err(_) => Ok((
|
||||
None,
|
||||
Value::binary(bytes),
|
||||
Tag {
|
||||
span,
|
||||
anchor: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::File(
|
||||
cwd.to_string_lossy().to_string(),
|
||||
)),
|
||||
},
|
||||
AnchorLocation::File(cwd.to_string_lossy().to_string()),
|
||||
)),
|
||||
}
|
||||
} else {
|
||||
@ -226,9 +221,10 @@ pub async fn fetch(
|
||||
Value::binary(bytes),
|
||||
Tag {
|
||||
span,
|
||||
anchor: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::File(
|
||||
cwd.to_string_lossy().to_string(),
|
||||
)),
|
||||
},
|
||||
AnchorLocation::File(cwd.to_string_lossy().to_string()),
|
||||
))
|
||||
}
|
||||
}
|
||||
@ -237,9 +233,10 @@ pub async fn fetch(
|
||||
Value::binary(bytes),
|
||||
Tag {
|
||||
span,
|
||||
anchor: Uuid::new_v4(),
|
||||
anchor: Some(AnchorLocation::File(
|
||||
cwd.to_string_lossy().to_string(),
|
||||
)),
|
||||
},
|
||||
AnchorLocation::File(cwd.to_string_lossy().to_string()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
@ -104,7 +104,7 @@ pub fn pivot(args: PivotArgs, context: RunnableContext) -> Result<OutputStream,
|
||||
|
||||
for desc in descs {
|
||||
let mut column_num: usize = 0;
|
||||
let mut dict = TaggedDictBuilder::new(context.name);
|
||||
let mut dict = TaggedDictBuilder::new(&context.name);
|
||||
|
||||
if !args.ignore_titles && !args.header_row {
|
||||
dict.insert(headers[column_num].clone(), Value::string(desc.clone()));
|
||||
|
@ -54,14 +54,17 @@ fn run(
|
||||
registry: &CommandRegistry,
|
||||
raw_args: &RawCommandArgs,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let name_tag = call_info.name_tag.clone();
|
||||
let call_info = call_info.clone();
|
||||
let path = match call_info.args.nth(0).ok_or_else(|| {
|
||||
ShellError::labeled_error("No url specified", "for command", call_info.name_tag)
|
||||
let path =
|
||||
match call_info.args.nth(0).ok_or_else(|| {
|
||||
ShellError::labeled_error("No url specified", "for command", &name_tag)
|
||||
})? {
|
||||
file => file.clone(),
|
||||
};
|
||||
let body = match call_info.args.nth(1).ok_or_else(|| {
|
||||
ShellError::labeled_error("No body specified", "for command", call_info.name_tag)
|
||||
let body =
|
||||
match call_info.args.nth(1).ok_or_else(|| {
|
||||
ShellError::labeled_error("No body specified", "for command", &name_tag)
|
||||
})? {
|
||||
file => file.clone(),
|
||||
};
|
||||
@ -79,7 +82,7 @@ fn run(
|
||||
let headers = get_headers(&call_info)?;
|
||||
|
||||
let stream = async_stream! {
|
||||
let (file_extension, contents, contents_tag, anchor_location) =
|
||||
let (file_extension, contents, contents_tag) =
|
||||
post(&path_str, &body, user, password, &headers, path_span, ®istry, &raw_args).await.unwrap();
|
||||
|
||||
let file_extension = if has_raw {
|
||||
@ -90,21 +93,14 @@ fn run(
|
||||
file_extension.or(path_str.split('.').last().map(String::from))
|
||||
};
|
||||
|
||||
if contents_tag.anchor != uuid::Uuid::nil() {
|
||||
// If we have loaded something, track its source
|
||||
yield ReturnSuccess::action(CommandAction::AddAnchorLocation(
|
||||
contents_tag.anchor,
|
||||
anchor_location,
|
||||
));
|
||||
}
|
||||
|
||||
let tagged_contents = contents.tagged(contents_tag);
|
||||
let tagged_contents = contents.tagged(&contents_tag);
|
||||
|
||||
if let Some(extension) = file_extension {
|
||||
let command_name = format!("from-{}", extension);
|
||||
if let Some(converter) = registry.get_command(&command_name) {
|
||||
let new_args = RawCommandArgs {
|
||||
host: raw_args.host,
|
||||
ctrl_c: raw_args.ctrl_c,
|
||||
shell_manager: raw_args.shell_manager,
|
||||
call_info: UnevaluatedCallInfo {
|
||||
args: crate::parser::hir::Call {
|
||||
@ -113,7 +109,6 @@ fn run(
|
||||
named: None
|
||||
},
|
||||
source: raw_args.call_info.source,
|
||||
source_map: raw_args.call_info.source_map,
|
||||
name_tag: raw_args.call_info.name_tag,
|
||||
}
|
||||
};
|
||||
@ -127,7 +122,7 @@ fn run(
|
||||
}
|
||||
}
|
||||
Ok(ReturnSuccess::Value(Tagged { item, .. })) => {
|
||||
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag }));
|
||||
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() }));
|
||||
}
|
||||
x => yield x,
|
||||
}
|
||||
@ -207,7 +202,7 @@ pub async fn post(
|
||||
tag: Tag,
|
||||
registry: &CommandRegistry,
|
||||
raw_args: &RawCommandArgs,
|
||||
) -> Result<(Option<String>, Value, Tag, AnchorLocation), ShellError> {
|
||||
) -> Result<(Option<String>, Value, Tag), ShellError> {
|
||||
let registry = registry.clone();
|
||||
let raw_args = raw_args.clone();
|
||||
if location.starts_with("http:") || location.starts_with("https:") {
|
||||
@ -248,6 +243,7 @@ pub async fn post(
|
||||
if let Some(converter) = registry.get_command("to-json") {
|
||||
let new_args = RawCommandArgs {
|
||||
host: raw_args.host,
|
||||
ctrl_c: raw_args.ctrl_c,
|
||||
shell_manager: raw_args.shell_manager,
|
||||
call_info: UnevaluatedCallInfo {
|
||||
args: crate::parser::hir::Call {
|
||||
@ -256,7 +252,6 @@ pub async fn post(
|
||||
named: None,
|
||||
},
|
||||
source: raw_args.call_info.source,
|
||||
source_map: raw_args.call_info.source_map,
|
||||
name_tag: raw_args.call_info.name_tag,
|
||||
},
|
||||
};
|
||||
@ -280,7 +275,7 @@ pub async fn post(
|
||||
return Err(ShellError::labeled_error(
|
||||
"Save could not successfully save",
|
||||
"unexpected data during save",
|
||||
*tag,
|
||||
tag,
|
||||
));
|
||||
}
|
||||
}
|
||||
@ -296,7 +291,7 @@ pub async fn post(
|
||||
return Err(ShellError::labeled_error(
|
||||
"Could not automatically convert table",
|
||||
"needs manual conversion",
|
||||
*tag,
|
||||
tag,
|
||||
));
|
||||
}
|
||||
}
|
||||
@ -312,11 +307,13 @@ pub async fn post(
|
||||
ShellError::labeled_error(
|
||||
"Could not load text from remote url",
|
||||
"could not load",
|
||||
tag,
|
||||
&tag,
|
||||
)
|
||||
})?),
|
||||
tag,
|
||||
AnchorLocation::Url(location.to_string()),
|
||||
Tag {
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
span: tag.span,
|
||||
},
|
||||
)),
|
||||
(mime::APPLICATION, mime::JSON) => Ok((
|
||||
Some("json".to_string()),
|
||||
@ -324,25 +321,29 @@ pub async fn post(
|
||||
ShellError::labeled_error(
|
||||
"Could not load text from remote url",
|
||||
"could not load",
|
||||
tag,
|
||||
&tag,
|
||||
)
|
||||
})?),
|
||||
tag,
|
||||
AnchorLocation::Url(location.to_string()),
|
||||
Tag {
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
span: tag.span,
|
||||
},
|
||||
)),
|
||||
(mime::APPLICATION, mime::OCTET_STREAM) => {
|
||||
let buf: Vec<u8> = r.body_bytes().await.map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not load binary file",
|
||||
"could not load",
|
||||
tag,
|
||||
&tag,
|
||||
)
|
||||
})?;
|
||||
Ok((
|
||||
None,
|
||||
Value::binary(buf),
|
||||
tag,
|
||||
AnchorLocation::Url(location.to_string()),
|
||||
Tag {
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
span: tag.span,
|
||||
},
|
||||
))
|
||||
}
|
||||
(mime::IMAGE, image_ty) => {
|
||||
@ -350,14 +351,16 @@ pub async fn post(
|
||||
ShellError::labeled_error(
|
||||
"Could not load image file",
|
||||
"could not load",
|
||||
tag,
|
||||
&tag,
|
||||
)
|
||||
})?;
|
||||
Ok((
|
||||
Some(image_ty.to_string()),
|
||||
Value::binary(buf),
|
||||
tag,
|
||||
AnchorLocation::Url(location.to_string()),
|
||||
Tag {
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
span: tag.span,
|
||||
},
|
||||
))
|
||||
}
|
||||
(mime::TEXT, mime::HTML) => Ok((
|
||||
@ -366,11 +369,13 @@ pub async fn post(
|
||||
ShellError::labeled_error(
|
||||
"Could not load text from remote url",
|
||||
"could not load",
|
||||
tag,
|
||||
&tag,
|
||||
)
|
||||
})?),
|
||||
tag,
|
||||
AnchorLocation::Url(location.to_string()),
|
||||
Tag {
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
span: tag.span,
|
||||
},
|
||||
)),
|
||||
(mime::TEXT, mime::PLAIN) => {
|
||||
let path_extension = url::Url::parse(location)
|
||||
@ -390,11 +395,13 @@ pub async fn post(
|
||||
ShellError::labeled_error(
|
||||
"Could not load text from remote url",
|
||||
"could not load",
|
||||
tag,
|
||||
&tag,
|
||||
)
|
||||
})?),
|
||||
tag,
|
||||
AnchorLocation::Url(location.to_string()),
|
||||
Tag {
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
span: tag.span,
|
||||
},
|
||||
))
|
||||
}
|
||||
(ty, sub_ty) => Ok((
|
||||
@ -403,16 +410,20 @@ pub async fn post(
|
||||
"Not yet supported MIME type: {} {}",
|
||||
ty, sub_ty
|
||||
)),
|
||||
tag,
|
||||
AnchorLocation::Url(location.to_string()),
|
||||
Tag {
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
span: tag.span,
|
||||
},
|
||||
)),
|
||||
}
|
||||
}
|
||||
None => Ok((
|
||||
None,
|
||||
Value::string(format!("No content type found")),
|
||||
tag,
|
||||
AnchorLocation::Url(location.to_string()),
|
||||
Tag {
|
||||
anchor: Some(AnchorLocation::Url(location.to_string())),
|
||||
span: tag.span,
|
||||
},
|
||||
)),
|
||||
},
|
||||
Err(_) => {
|
||||
|
@ -119,33 +119,32 @@ fn save(
|
||||
input,
|
||||
name,
|
||||
shell_manager,
|
||||
source_map,
|
||||
host,
|
||||
ctrl_c,
|
||||
commands: registry,
|
||||
..
|
||||
}: RunnableContext,
|
||||
raw_args: RawCommandArgs,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let mut full_path = PathBuf::from(shell_manager.path());
|
||||
let name_tag = name;
|
||||
let name_tag = name.clone();
|
||||
|
||||
let source_map = source_map.clone();
|
||||
let stream = async_stream! {
|
||||
let input: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
if path.is_none() {
|
||||
// If there is no filename, check the metadata for the anchor filename
|
||||
if input.len() > 0 {
|
||||
let anchor = input[0].anchor();
|
||||
match source_map.get(&anchor) {
|
||||
match anchor {
|
||||
Some(path) => match path {
|
||||
AnchorLocation::File(file) => {
|
||||
full_path.push(Path::new(file));
|
||||
full_path.push(Path::new(&file));
|
||||
}
|
||||
_ => {
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Save requires a filepath (1)",
|
||||
"needs path",
|
||||
name_tag,
|
||||
name_tag.clone(),
|
||||
));
|
||||
}
|
||||
},
|
||||
@ -153,7 +152,7 @@ fn save(
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Save requires a filepath (2)",
|
||||
"needs path",
|
||||
name_tag,
|
||||
name_tag.clone(),
|
||||
));
|
||||
}
|
||||
}
|
||||
@ -161,7 +160,7 @@ fn save(
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Save requires a filepath (3)",
|
||||
"needs path",
|
||||
name_tag,
|
||||
name_tag.clone(),
|
||||
));
|
||||
}
|
||||
} else {
|
||||
@ -179,6 +178,7 @@ fn save(
|
||||
if let Some(converter) = registry.get_command(&command_name) {
|
||||
let new_args = RawCommandArgs {
|
||||
host,
|
||||
ctrl_c,
|
||||
shell_manager,
|
||||
call_info: UnevaluatedCallInfo {
|
||||
args: crate::parser::hir::Call {
|
||||
@ -187,7 +187,6 @@ fn save(
|
||||
named: None
|
||||
},
|
||||
source: raw_args.call_info.source,
|
||||
source_map: raw_args.call_info.source_map,
|
||||
name_tag: raw_args.call_info.name_tag,
|
||||
}
|
||||
};
|
||||
|
@ -2,6 +2,7 @@ use crate::commands::WholeStreamCommand;
|
||||
use crate::data::TaggedDictBuilder;
|
||||
use crate::errors::ShellError;
|
||||
use crate::prelude::*;
|
||||
use std::sync::atomic::Ordering;
|
||||
|
||||
pub struct Shells;
|
||||
|
||||
@ -32,14 +33,14 @@ fn shells(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream
|
||||
let tag = args.call_info.name_tag;
|
||||
|
||||
for (index, shell) in args.shell_manager.shells.lock().unwrap().iter().enumerate() {
|
||||
let mut dict = TaggedDictBuilder::new(tag);
|
||||
let mut dict = TaggedDictBuilder::new(&tag);
|
||||
|
||||
if index == args.shell_manager.current_shell {
|
||||
if index == (*args.shell_manager.current_shell).load(Ordering::SeqCst) {
|
||||
dict.insert(" ", "X".to_string());
|
||||
} else {
|
||||
dict.insert(" ", " ".to_string());
|
||||
}
|
||||
dict.insert("name", shell.name(&args.call_info.source_map));
|
||||
dict.insert("name", shell.name());
|
||||
dict.insert("path", shell.path());
|
||||
|
||||
shells_out.push_back(dict.into_tagged_value());
|
||||
|
@ -37,7 +37,7 @@ fn size(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream,
|
||||
_ => Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
v.tag(),
|
||||
)),
|
||||
|
@ -94,7 +94,7 @@ fn split_column(
|
||||
_ => Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
name,
|
||||
&name,
|
||||
"value originates from here",
|
||||
v.tag(),
|
||||
)),
|
||||
|
@ -60,7 +60,7 @@ fn split_row(
|
||||
result.push_back(Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a string from pipeline",
|
||||
"requires string input",
|
||||
name,
|
||||
&name,
|
||||
"value originates from here",
|
||||
v.tag(),
|
||||
)));
|
||||
|
@ -5,16 +5,13 @@ use crate::prelude::*;
|
||||
|
||||
pub struct Table;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct TableArgs {}
|
||||
|
||||
impl WholeStreamCommand for Table {
|
||||
fn name(&self) -> &str {
|
||||
"table"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("table")
|
||||
Signature::build("table").named("start_number", SyntaxShape::Number)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -26,16 +23,29 @@ impl WholeStreamCommand for Table {
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
args.process(registry, table)?.run()
|
||||
table(args, registry)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn table(_args: TableArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
|
||||
fn table(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
|
||||
let stream = async_stream! {
|
||||
let input: Vec<Tagged<Value>> = context.input.into_vec().await;
|
||||
let host = args.host.clone();
|
||||
let start_number = match args.get("start_number") {
|
||||
Some(Tagged { item: Value::Primitive(Primitive::Int(i)), .. }) => {
|
||||
i.to_usize().unwrap()
|
||||
}
|
||||
_ => {
|
||||
0
|
||||
}
|
||||
};
|
||||
|
||||
let input: Vec<Tagged<Value>> = args.input.into_vec().await;
|
||||
if input.len() > 0 {
|
||||
let mut host = context.host.lock().unwrap();
|
||||
let view = TableView::from_list(&input);
|
||||
let mut host = host.lock().unwrap();
|
||||
let view = TableView::from_list(&input, start_number);
|
||||
|
||||
if let Some(view) = view {
|
||||
handle_unexpected(&mut *host, |host| crate::format::print_view(&view, host));
|
||||
}
|
||||
|
@ -28,7 +28,6 @@ impl WholeStreamCommand for Tags {
|
||||
}
|
||||
|
||||
fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let source_map = args.call_info.source_map.clone();
|
||||
Ok(args
|
||||
.input
|
||||
.values
|
||||
@ -42,7 +41,7 @@ fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream,
|
||||
dict.insert("end", Value::int(span.end() as i64));
|
||||
tags.insert_tagged("span", dict.into_tagged_value());
|
||||
|
||||
match source_map.get(&anchor) {
|
||||
match anchor {
|
||||
Some(AnchorLocation::File(source)) => {
|
||||
tags.insert("anchor", Value::string(source));
|
||||
}
|
||||
|
@ -46,7 +46,7 @@ pub fn value_to_bson_value(v: &Tagged<Value>) -> Result<Bson, ShellError> {
|
||||
Value::Primitive(Primitive::BeginningOfStream) => Bson::Null,
|
||||
Value::Primitive(Primitive::Decimal(d)) => Bson::FloatingPoint(d.to_f64().unwrap()),
|
||||
Value::Primitive(Primitive::Int(i)) => {
|
||||
Bson::I64(i.tagged(v.tag).coerce_into("converting to BSON")?)
|
||||
Bson::I64(i.tagged(&v.tag).coerce_into("converting to BSON")?)
|
||||
}
|
||||
Value::Primitive(Primitive::Nothing) => Bson::Null,
|
||||
Value::Primitive(Primitive::String(s)) => Bson::String(s.clone()),
|
||||
@ -58,6 +58,7 @@ pub fn value_to_bson_value(v: &Tagged<Value>) -> Result<Bson, ShellError> {
|
||||
.collect::<Result<_, _>>()?,
|
||||
),
|
||||
Value::Block(_) => Bson::Null,
|
||||
Value::Error(e) => return Err(e.clone()),
|
||||
Value::Primitive(Primitive::Binary(b)) => Bson::Binary(BinarySubtype::Generic, b.clone()),
|
||||
Value::Row(o) => object_value_to_bson(o)?,
|
||||
})
|
||||
@ -170,7 +171,7 @@ fn get_binary_subtype<'a>(tagged_value: &'a Tagged<Value>) -> Result<BinarySubty
|
||||
_ => unreachable!(),
|
||||
}),
|
||||
Value::Primitive(Primitive::Int(i)) => Ok(BinarySubtype::UserDefined(
|
||||
i.tagged(tagged_value.tag)
|
||||
i.tagged(&tagged_value.tag)
|
||||
.coerce_into("converting to BSON binary subtype")?,
|
||||
)),
|
||||
_ => Err(ShellError::type_error(
|
||||
@ -207,12 +208,12 @@ fn bson_value_to_bytes(bson: Bson, tag: Tag) -> Result<Vec<u8>, ShellError> {
|
||||
Bson::Array(a) => {
|
||||
for v in a.into_iter() {
|
||||
match v {
|
||||
Bson::Document(d) => shell_encode_document(&mut out, d, tag)?,
|
||||
Bson::Document(d) => shell_encode_document(&mut out, d, tag.clone())?,
|
||||
_ => {
|
||||
return Err(ShellError::labeled_error(
|
||||
format!("All top level values must be Documents, got {:?}", v),
|
||||
"requires BSON-compatible document",
|
||||
tag,
|
||||
&tag,
|
||||
))
|
||||
}
|
||||
}
|
||||
@ -237,7 +238,7 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
|
||||
let input: Vec<Tagged<Value>> = args.input.values.collect().await;
|
||||
|
||||
let to_process_input = if input.len() > 1 {
|
||||
let tag = input[0].tag;
|
||||
let tag = input[0].tag.clone();
|
||||
vec![Tagged { item: Value::Table(input), tag } ]
|
||||
} else if input.len() == 1 {
|
||||
input
|
||||
@ -248,14 +249,14 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
|
||||
for value in to_process_input {
|
||||
match value_to_bson_value(&value) {
|
||||
Ok(bson_value) => {
|
||||
match bson_value_to_bytes(bson_value, name_tag) {
|
||||
match bson_value_to_bytes(bson_value, name_tag.clone()) {
|
||||
Ok(x) => yield ReturnSuccess::value(
|
||||
Value::binary(x).tagged(name_tag),
|
||||
Value::binary(x).tagged(&name_tag),
|
||||
),
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a table with BSON-compatible structure.tag() from pipeline",
|
||||
"requires BSON-compatible input",
|
||||
name_tag,
|
||||
&name_tag,
|
||||
"originates from here".to_string(),
|
||||
value.tag(),
|
||||
)),
|
||||
@ -264,7 +265,7 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
|
||||
_ => yield Err(ShellError::labeled_error(
|
||||
"Expected a table with BSON-compatible structure from pipeline",
|
||||
"requires BSON-compatible input",
|
||||
name_tag))
|
||||
&name_tag))
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -47,7 +47,7 @@ pub fn value_to_csv_value(v: &Tagged<Value>) -> Tagged<Value> {
|
||||
Value::Block(_) => Value::Primitive(Primitive::Nothing),
|
||||
_ => Value::Primitive(Primitive::Nothing),
|
||||
}
|
||||
.tagged(v.tag)
|
||||
.tagged(v.tag.clone())
|
||||
}
|
||||
|
||||
fn to_string_helper(v: &Tagged<Value>) -> Result<String, ShellError> {
|
||||
@ -61,7 +61,13 @@ fn to_string_helper(v: &Tagged<Value>) -> Result<String, ShellError> {
|
||||
Value::Table(_) => return Ok(String::from("[Table]")),
|
||||
Value::Row(_) => return Ok(String::from("[Row]")),
|
||||
Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()),
|
||||
_ => return Err(ShellError::labeled_error("Unexpected value", "", v.tag)),
|
||||
_ => {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Unexpected value",
|
||||
"",
|
||||
v.tag.clone(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -99,14 +105,14 @@ pub fn to_string(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
tagged_value.tag,
|
||||
&tagged_value.tag,
|
||||
)
|
||||
})?)
|
||||
.map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
tagged_value.tag,
|
||||
&tagged_value.tag,
|
||||
)
|
||||
})?);
|
||||
}
|
||||
@ -136,14 +142,14 @@ pub fn to_string(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
tagged_value.tag,
|
||||
&tagged_value.tag,
|
||||
)
|
||||
})?)
|
||||
.map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
tagged_value.tag,
|
||||
&tagged_value.tag,
|
||||
)
|
||||
})?);
|
||||
}
|
||||
@ -160,7 +166,7 @@ fn to_csv(
|
||||
let input: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
|
||||
let to_process_input = if input.len() > 1 {
|
||||
let tag = input[0].tag;
|
||||
let tag = input[0].tag.clone();
|
||||
vec![Tagged { item: Value::Table(input), tag } ]
|
||||
} else if input.len() == 1 {
|
||||
input
|
||||
@ -176,13 +182,13 @@ fn to_csv(
|
||||
} else {
|
||||
x
|
||||
};
|
||||
yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(name_tag))
|
||||
yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(&name_tag))
|
||||
}
|
||||
_ => {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a table with CSV-compatible structure.tag() from pipeline",
|
||||
"requires CSV-compatible input",
|
||||
name_tag,
|
||||
&name_tag,
|
||||
"originates from here".to_string(),
|
||||
value.tag(),
|
||||
))
|
||||
|
@ -42,7 +42,7 @@ pub fn value_to_json_value(v: &Tagged<Value>) -> Result<serde_json::Value, Shell
|
||||
.unwrap(),
|
||||
),
|
||||
Value::Primitive(Primitive::Int(i)) => serde_json::Value::Number(serde_json::Number::from(
|
||||
CoerceInto::<i64>::coerce_into(i.tagged(v.tag), "converting to JSON number")?,
|
||||
CoerceInto::<i64>::coerce_into(i.tagged(&v.tag), "converting to JSON number")?,
|
||||
)),
|
||||
Value::Primitive(Primitive::Nothing) => serde_json::Value::Null,
|
||||
Value::Primitive(Primitive::Pattern(s)) => serde_json::Value::String(s.clone()),
|
||||
@ -50,6 +50,7 @@ pub fn value_to_json_value(v: &Tagged<Value>) -> Result<serde_json::Value, Shell
|
||||
Value::Primitive(Primitive::Path(s)) => serde_json::Value::String(s.display().to_string()),
|
||||
|
||||
Value::Table(l) => serde_json::Value::Array(json_list(l)?),
|
||||
Value::Error(e) => return Err(e.clone()),
|
||||
Value::Block(_) => serde_json::Value::Null,
|
||||
Value::Primitive(Primitive::Binary(b)) => serde_json::Value::Array(
|
||||
b.iter()
|
||||
@ -85,7 +86,7 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
|
||||
let input: Vec<Tagged<Value>> = args.input.values.collect().await;
|
||||
|
||||
let to_process_input = if input.len() > 1 {
|
||||
let tag = input[0].tag;
|
||||
let tag = input[0].tag.clone();
|
||||
vec![Tagged { item: Value::Table(input), tag } ]
|
||||
} else if input.len() == 1 {
|
||||
input
|
||||
@ -98,12 +99,12 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
|
||||
Ok(json_value) => {
|
||||
match serde_json::to_string(&json_value) {
|
||||
Ok(x) => yield ReturnSuccess::value(
|
||||
Value::Primitive(Primitive::String(x)).tagged(name_tag),
|
||||
Value::Primitive(Primitive::String(x)).tagged(&name_tag),
|
||||
),
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a table with JSON-compatible structure.tag() from pipeline",
|
||||
"requires JSON-compatible input",
|
||||
name_tag,
|
||||
&name_tag,
|
||||
"originates from here".to_string(),
|
||||
value.tag(),
|
||||
)),
|
||||
@ -112,7 +113,7 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
|
||||
_ => yield Err(ShellError::labeled_error(
|
||||
"Expected a table with JSON-compatible structure from pipeline",
|
||||
"requires JSON-compatible input",
|
||||
name_tag))
|
||||
&name_tag))
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -38,10 +38,10 @@ pub fn value_to_toml_value(v: &Tagged<Value>) -> Result<toml::Value, ShellError>
|
||||
toml::Value::String("<Beginning of Stream>".to_string())
|
||||
}
|
||||
Value::Primitive(Primitive::Decimal(f)) => {
|
||||
toml::Value::Float(f.tagged(v.tag).coerce_into("converting to TOML float")?)
|
||||
toml::Value::Float(f.tagged(&v.tag).coerce_into("converting to TOML float")?)
|
||||
}
|
||||
Value::Primitive(Primitive::Int(i)) => {
|
||||
toml::Value::Integer(i.tagged(v.tag).coerce_into("converting to TOML integer")?)
|
||||
toml::Value::Integer(i.tagged(&v.tag).coerce_into("converting to TOML integer")?)
|
||||
}
|
||||
Value::Primitive(Primitive::Nothing) => toml::Value::String("<Nothing>".to_string()),
|
||||
Value::Primitive(Primitive::Pattern(s)) => toml::Value::String(s.clone()),
|
||||
@ -49,6 +49,7 @@ pub fn value_to_toml_value(v: &Tagged<Value>) -> Result<toml::Value, ShellError>
|
||||
Value::Primitive(Primitive::Path(s)) => toml::Value::String(s.display().to_string()),
|
||||
|
||||
Value::Table(l) => toml::Value::Array(collect_values(l)?),
|
||||
Value::Error(e) => return Err(e.clone()),
|
||||
Value::Block(_) => toml::Value::String("<Block>".to_string()),
|
||||
Value::Primitive(Primitive::Binary(b)) => {
|
||||
toml::Value::Array(b.iter().map(|x| toml::Value::Integer(*x as i64)).collect())
|
||||
@ -80,7 +81,7 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
|
||||
let input: Vec<Tagged<Value>> = args.input.values.collect().await;
|
||||
|
||||
let to_process_input = if input.len() > 1 {
|
||||
let tag = input[0].tag;
|
||||
let tag = input[0].tag.clone();
|
||||
vec![Tagged { item: Value::Table(input), tag } ]
|
||||
} else if input.len() == 1 {
|
||||
input
|
||||
@ -93,12 +94,12 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
|
||||
Ok(toml_value) => {
|
||||
match toml::to_string(&toml_value) {
|
||||
Ok(x) => yield ReturnSuccess::value(
|
||||
Value::Primitive(Primitive::String(x)).tagged(name_tag),
|
||||
Value::Primitive(Primitive::String(x)).tagged(&name_tag),
|
||||
),
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a table with TOML-compatible structure.tag() from pipeline",
|
||||
"requires TOML-compatible input",
|
||||
name_tag,
|
||||
&name_tag,
|
||||
"originates from here".to_string(),
|
||||
value.tag(),
|
||||
)),
|
||||
@ -107,7 +108,7 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
|
||||
_ => yield Err(ShellError::labeled_error(
|
||||
"Expected a table with TOML-compatible structure from pipeline",
|
||||
"requires TOML-compatible input",
|
||||
name_tag))
|
||||
&name_tag))
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -49,7 +49,7 @@ pub fn value_to_tsv_value(tagged_value: &Tagged<Value>) -> Tagged<Value> {
|
||||
Value::Block(_) => Value::Primitive(Primitive::Nothing),
|
||||
_ => Value::Primitive(Primitive::Nothing),
|
||||
}
|
||||
.tagged(tagged_value.tag)
|
||||
.tagged(&tagged_value.tag)
|
||||
}
|
||||
|
||||
fn to_string_helper(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
|
||||
@ -68,7 +68,7 @@ fn to_string_helper(tagged_value: &Tagged<Value>) -> Result<String, ShellError>
|
||||
return Err(ShellError::labeled_error(
|
||||
"Unexpected value",
|
||||
"original value",
|
||||
tagged_value.tag,
|
||||
&tagged_value.tag,
|
||||
))
|
||||
}
|
||||
}
|
||||
@ -107,14 +107,14 @@ pub fn to_string(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
tagged_value.tag,
|
||||
&tagged_value.tag,
|
||||
)
|
||||
})?)
|
||||
.map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
tagged_value.tag,
|
||||
&tagged_value.tag,
|
||||
)
|
||||
})?);
|
||||
}
|
||||
@ -144,14 +144,14 @@ pub fn to_string(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
tagged_value.tag,
|
||||
&tagged_value.tag,
|
||||
)
|
||||
})?)
|
||||
.map_err(|_| {
|
||||
ShellError::labeled_error(
|
||||
"Could not convert record",
|
||||
"original value",
|
||||
tagged_value.tag,
|
||||
&tagged_value.tag,
|
||||
)
|
||||
})?);
|
||||
}
|
||||
@ -168,7 +168,7 @@ fn to_tsv(
|
||||
let input: Vec<Tagged<Value>> = input.values.collect().await;
|
||||
|
||||
let to_process_input = if input.len() > 1 {
|
||||
let tag = input[0].tag;
|
||||
let tag = input[0].tag.clone();
|
||||
vec![Tagged { item: Value::Table(input), tag } ]
|
||||
} else if input.len() == 1 {
|
||||
input
|
||||
@ -184,13 +184,13 @@ fn to_tsv(
|
||||
} else {
|
||||
x
|
||||
};
|
||||
yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(name_tag))
|
||||
yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(&name_tag))
|
||||
}
|
||||
_ => {
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a table with TSV-compatible structure.tag() from pipeline",
|
||||
"requires TSV-compatible input",
|
||||
name_tag,
|
||||
&name_tag,
|
||||
"originates from here".to_string(),
|
||||
value.tag(),
|
||||
))
|
||||
|
@ -47,7 +47,7 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected table with string values",
|
||||
"requires table with strings",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
v.tag,
|
||||
))
|
||||
@ -57,13 +57,13 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
|
||||
|
||||
match serde_urlencoded::to_string(row_vec) {
|
||||
Ok(s) => {
|
||||
yield ReturnSuccess::value(Value::string(s).tagged(tag));
|
||||
yield ReturnSuccess::value(Value::string(s).tagged(&tag));
|
||||
}
|
||||
_ => {
|
||||
yield Err(ShellError::labeled_error(
|
||||
"Failed to convert to url-encoded",
|
||||
"cannot url-encode",
|
||||
tag,
|
||||
&tag,
|
||||
))
|
||||
}
|
||||
}
|
||||
@ -72,7 +72,7 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
|
||||
yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a table from pipeline",
|
||||
"requires table input",
|
||||
tag,
|
||||
&tag,
|
||||
"value originates from here",
|
||||
value_tag,
|
||||
))
|
||||
|
@ -39,7 +39,7 @@ pub fn value_to_yaml_value(v: &Tagged<Value>) -> Result<serde_yaml::Value, Shell
|
||||
serde_yaml::Value::Number(serde_yaml::Number::from(f.to_f64().unwrap()))
|
||||
}
|
||||
Value::Primitive(Primitive::Int(i)) => serde_yaml::Value::Number(serde_yaml::Number::from(
|
||||
CoerceInto::<i64>::coerce_into(i.tagged(v.tag), "converting to YAML number")?,
|
||||
CoerceInto::<i64>::coerce_into(i.tagged(&v.tag), "converting to YAML number")?,
|
||||
)),
|
||||
Value::Primitive(Primitive::Nothing) => serde_yaml::Value::Null,
|
||||
Value::Primitive(Primitive::Pattern(s)) => serde_yaml::Value::String(s.clone()),
|
||||
@ -55,6 +55,7 @@ pub fn value_to_yaml_value(v: &Tagged<Value>) -> Result<serde_yaml::Value, Shell
|
||||
|
||||
serde_yaml::Value::Sequence(out)
|
||||
}
|
||||
Value::Error(e) => return Err(e.clone()),
|
||||
Value::Block(_) => serde_yaml::Value::Null,
|
||||
Value::Primitive(Primitive::Binary(b)) => serde_yaml::Value::Sequence(
|
||||
b.iter()
|
||||
@ -81,7 +82,7 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
|
||||
let input: Vec<Tagged<Value>> = args.input.values.collect().await;
|
||||
|
||||
let to_process_input = if input.len() > 1 {
|
||||
let tag = input[0].tag;
|
||||
let tag = input[0].tag.clone();
|
||||
vec![Tagged { item: Value::Table(input), tag } ]
|
||||
} else if input.len() == 1 {
|
||||
input
|
||||
@ -94,12 +95,12 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
|
||||
Ok(yaml_value) => {
|
||||
match serde_yaml::to_string(&yaml_value) {
|
||||
Ok(x) => yield ReturnSuccess::value(
|
||||
Value::Primitive(Primitive::String(x)).tagged(name_tag),
|
||||
Value::Primitive(Primitive::String(x)).tagged(&name_tag),
|
||||
),
|
||||
_ => yield Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected a table with YAML-compatible structure.tag() from pipeline",
|
||||
"requires YAML-compatible input",
|
||||
name_tag,
|
||||
&name_tag,
|
||||
"originates from here".to_string(),
|
||||
value.tag(),
|
||||
)),
|
||||
@ -108,7 +109,7 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
|
||||
_ => yield Err(ShellError::labeled_error(
|
||||
"Expected a table with YAML-compatible structure from pipeline",
|
||||
"requires YAML-compatible input",
|
||||
name_tag))
|
||||
&name_tag))
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -31,14 +31,14 @@ impl WholeStreamCommand for Version {
|
||||
|
||||
pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
let args = args.evaluate_once(registry)?;
|
||||
let tag = args.call_info.name_tag;
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
|
||||
let mut indexmap = IndexMap::new();
|
||||
indexmap.insert(
|
||||
"version".to_string(),
|
||||
Value::string(clap::crate_version!()).tagged(tag),
|
||||
Value::string(clap::crate_version!()).tagged(&tag),
|
||||
);
|
||||
|
||||
let value = Value::Row(Dictionary::from(indexmap)).tagged(tag);
|
||||
let value = Value::Row(Dictionary::from(indexmap)).tagged(&tag);
|
||||
Ok(OutputStream::one(value))
|
||||
}
|
||||
|
@ -49,7 +49,7 @@ impl PerItemCommand for Where {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Expected a condition",
|
||||
"where needs a condition",
|
||||
*tag,
|
||||
tag,
|
||||
))
|
||||
}
|
||||
};
|
||||
|
@ -33,7 +33,7 @@ pub fn which(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
|
||||
let args = args.evaluate_once(registry)?;
|
||||
|
||||
let mut which_out = VecDeque::new();
|
||||
let tag = args.call_info.name_tag;
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
|
||||
if let Some(v) = &args.call_info.args.positional {
|
||||
if v.len() > 0 {
|
||||
@ -52,7 +52,7 @@ pub fn which(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
|
||||
return Err(ShellError::labeled_error(
|
||||
"Expected a filename to find",
|
||||
"needs a filename",
|
||||
*tag,
|
||||
tag,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
@ -1,39 +1,20 @@
|
||||
use crate::commands::{Command, UnevaluatedCallInfo};
|
||||
use crate::parser::{hir, hir::syntax_shape::ExpandContext};
|
||||
use crate::prelude::*;
|
||||
|
||||
use derive_new::new;
|
||||
use indexmap::IndexMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::error::Error;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub enum AnchorLocation {
|
||||
Url(String),
|
||||
File(String),
|
||||
Source(Text),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct SourceMap(HashMap<Uuid, AnchorLocation>);
|
||||
|
||||
impl SourceMap {
|
||||
pub fn insert(&mut self, uuid: Uuid, anchor_location: AnchorLocation) {
|
||||
self.0.insert(uuid, anchor_location);
|
||||
}
|
||||
|
||||
pub fn get(&self, uuid: &Uuid) -> Option<&AnchorLocation> {
|
||||
self.0.get(uuid)
|
||||
}
|
||||
|
||||
pub fn new() -> SourceMap {
|
||||
SourceMap(HashMap::new())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, new)]
|
||||
pub struct CommandRegistry {
|
||||
#[new(value = "Arc::new(Mutex::new(IndexMap::default()))")]
|
||||
@ -77,8 +58,8 @@ impl CommandRegistry {
|
||||
#[derive(Clone)]
|
||||
pub struct Context {
|
||||
registry: CommandRegistry,
|
||||
pub(crate) source_map: Arc<Mutex<SourceMap>>,
|
||||
host: Arc<Mutex<dyn Host + Send>>,
|
||||
pub ctrl_c: Arc<AtomicBool>,
|
||||
pub(crate) shell_manager: ShellManager,
|
||||
}
|
||||
|
||||
@ -90,17 +71,17 @@ impl Context {
|
||||
pub(crate) fn expand_context<'context>(
|
||||
&'context self,
|
||||
source: &'context Text,
|
||||
tag: Tag,
|
||||
span: Span,
|
||||
) -> ExpandContext<'context> {
|
||||
ExpandContext::new(&self.registry, tag, source, self.shell_manager.homedir())
|
||||
ExpandContext::new(&self.registry, span, source, self.shell_manager.homedir())
|
||||
}
|
||||
|
||||
pub(crate) fn basic() -> Result<Context, Box<dyn Error>> {
|
||||
let registry = CommandRegistry::new();
|
||||
Ok(Context {
|
||||
registry: registry.clone(),
|
||||
source_map: Arc::new(Mutex::new(SourceMap::new())),
|
||||
host: Arc::new(Mutex::new(crate::env::host::BasicHost)),
|
||||
ctrl_c: Arc::new(AtomicBool::new(false)),
|
||||
shell_manager: ShellManager::basic(registry)?,
|
||||
})
|
||||
}
|
||||
@ -117,12 +98,6 @@ impl Context {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_anchor_location(&mut self, uuid: Uuid, anchor_location: AnchorLocation) {
|
||||
let mut source_map = self.source_map.lock().unwrap();
|
||||
|
||||
source_map.insert(uuid, anchor_location);
|
||||
}
|
||||
|
||||
pub(crate) fn get_command(&self, name: &str) -> Option<Arc<Command>> {
|
||||
self.registry.get_command(name)
|
||||
}
|
||||
@ -135,27 +110,19 @@ impl Context {
|
||||
&mut self,
|
||||
command: Arc<Command>,
|
||||
name_tag: Tag,
|
||||
source_map: SourceMap,
|
||||
args: hir::Call,
|
||||
source: &Text,
|
||||
input: InputStream,
|
||||
is_first_command: bool,
|
||||
) -> OutputStream {
|
||||
let command_args = self.command_args(args, input, source, source_map, name_tag);
|
||||
let command_args = self.command_args(args, input, source, name_tag);
|
||||
command.run(command_args, self.registry(), is_first_command)
|
||||
}
|
||||
|
||||
fn call_info(
|
||||
&self,
|
||||
args: hir::Call,
|
||||
source: &Text,
|
||||
source_map: SourceMap,
|
||||
name_tag: Tag,
|
||||
) -> UnevaluatedCallInfo {
|
||||
fn call_info(&self, args: hir::Call, source: &Text, name_tag: Tag) -> UnevaluatedCallInfo {
|
||||
UnevaluatedCallInfo {
|
||||
args,
|
||||
source: source.clone(),
|
||||
source_map,
|
||||
name_tag,
|
||||
}
|
||||
}
|
||||
@ -165,13 +132,13 @@ impl Context {
|
||||
args: hir::Call,
|
||||
input: InputStream,
|
||||
source: &Text,
|
||||
source_map: SourceMap,
|
||||
name_tag: Tag,
|
||||
) -> CommandArgs {
|
||||
CommandArgs {
|
||||
host: self.host.clone(),
|
||||
ctrl_c: self.ctrl_c.clone(),
|
||||
shell_manager: self.shell_manager.clone(),
|
||||
call_info: self.call_info(args, source, source_map, name_tag),
|
||||
call_info: self.call_info(args, source, name_tag),
|
||||
input,
|
||||
}
|
||||
}
|
||||
|
@ -213,7 +213,7 @@ impl Block {
|
||||
let scope = Scope::new(value.clone());
|
||||
|
||||
if self.expressions.len() == 0 {
|
||||
return Ok(Value::nothing().tagged(self.tag));
|
||||
return Ok(Value::nothing().tagged(&self.tag));
|
||||
}
|
||||
|
||||
let mut last = None;
|
||||
@ -245,6 +245,9 @@ pub enum Value {
|
||||
Row(crate::data::Dictionary),
|
||||
Table(Vec<Tagged<Value>>),
|
||||
|
||||
// Errors are a type of value too
|
||||
Error(ShellError),
|
||||
|
||||
Block(Block),
|
||||
}
|
||||
|
||||
@ -293,6 +296,7 @@ impl fmt::Debug for ValueDebug<'_> {
|
||||
Value::Row(o) => o.debug(f),
|
||||
Value::Table(l) => debug_list(l).fmt(f),
|
||||
Value::Block(_) => write!(f, "[[block]]"),
|
||||
Value::Error(_) => write!(f, "[[error]]"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -300,7 +304,7 @@ impl fmt::Debug for ValueDebug<'_> {
|
||||
impl Tagged<Value> {
|
||||
pub fn tagged_type_name(&self) -> Tagged<String> {
|
||||
let name = self.type_name();
|
||||
Tagged::from_item(name, self.tag())
|
||||
name.tagged(self.tag())
|
||||
}
|
||||
}
|
||||
|
||||
@ -312,7 +316,7 @@ impl std::convert::TryFrom<&Tagged<Value>> for Block {
|
||||
Value::Block(block) => Ok(block.clone()),
|
||||
v => Err(ShellError::type_error(
|
||||
"Block",
|
||||
value.copy_tag(v.type_name()),
|
||||
v.type_name().tagged(value.tag()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
@ -324,11 +328,11 @@ impl std::convert::TryFrom<&Tagged<Value>> for i64 {
|
||||
fn try_from(value: &Tagged<Value>) -> Result<i64, ShellError> {
|
||||
match value.item() {
|
||||
Value::Primitive(Primitive::Int(int)) => {
|
||||
int.tagged(value.tag).coerce_into("converting to i64")
|
||||
int.tagged(&value.tag).coerce_into("converting to i64")
|
||||
}
|
||||
v => Err(ShellError::type_error(
|
||||
"Integer",
|
||||
value.copy_tag(v.type_name()),
|
||||
v.type_name().tagged(value.tag()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
@ -342,7 +346,7 @@ impl std::convert::TryFrom<&Tagged<Value>> for String {
|
||||
Value::Primitive(Primitive::String(s)) => Ok(s.clone()),
|
||||
v => Err(ShellError::type_error(
|
||||
"String",
|
||||
value.copy_tag(v.type_name()),
|
||||
v.type_name().tagged(value.tag()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
@ -356,7 +360,7 @@ impl std::convert::TryFrom<&Tagged<Value>> for Vec<u8> {
|
||||
Value::Primitive(Primitive::Binary(b)) => Ok(b.clone()),
|
||||
v => Err(ShellError::type_error(
|
||||
"Binary",
|
||||
value.copy_tag(v.type_name()),
|
||||
v.type_name().tagged(value.tag()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
@ -370,7 +374,7 @@ impl<'a> std::convert::TryFrom<&'a Tagged<Value>> for &'a crate::data::Dictionar
|
||||
Value::Row(d) => Ok(d),
|
||||
v => Err(ShellError::type_error(
|
||||
"Dictionary",
|
||||
value.copy_tag(v.type_name()),
|
||||
v.type_name().tagged(value.tag()),
|
||||
)),
|
||||
}
|
||||
}
|
||||
@ -392,7 +396,7 @@ impl std::convert::TryFrom<Option<&Tagged<Value>>> for Switch {
|
||||
Value::Primitive(Primitive::Boolean(true)) => Ok(Switch::Present),
|
||||
v => Err(ShellError::type_error(
|
||||
"Boolean",
|
||||
value.copy_tag(v.type_name()),
|
||||
v.type_name().tagged(value.tag()),
|
||||
)),
|
||||
},
|
||||
}
|
||||
@ -410,19 +414,19 @@ impl Tagged<Value> {
|
||||
match &self.item {
|
||||
Value::Table(table) => {
|
||||
for item in table {
|
||||
out.push(item.as_string()?.tagged(item.tag));
|
||||
out.push(item.as_string()?.tagged(&item.tag));
|
||||
}
|
||||
}
|
||||
|
||||
other => {
|
||||
return Err(ShellError::type_error(
|
||||
"column name",
|
||||
other.type_name().tagged(self.tag),
|
||||
other.type_name().tagged(&self.tag),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
Ok(out.tagged(self.tag))
|
||||
Ok(out.tagged(&self.tag))
|
||||
}
|
||||
|
||||
pub(crate) fn as_string(&self) -> Result<String, ShellError> {
|
||||
@ -437,7 +441,7 @@ impl Tagged<Value> {
|
||||
other => Err(ShellError::labeled_error(
|
||||
"Expected string",
|
||||
other.type_name(),
|
||||
self.tag,
|
||||
&self.tag,
|
||||
)),
|
||||
}
|
||||
}
|
||||
@ -450,6 +454,7 @@ impl Value {
|
||||
Value::Row(_) => format!("row"),
|
||||
Value::Table(_) => format!("list"),
|
||||
Value::Block(_) => format!("block"),
|
||||
Value::Error(_) => format!("error"),
|
||||
}
|
||||
}
|
||||
|
||||
@ -465,6 +470,7 @@ impl Value {
|
||||
.collect(),
|
||||
Value::Block(_) => vec![],
|
||||
Value::Table(_) => vec![],
|
||||
Value::Error(_) => vec![],
|
||||
}
|
||||
}
|
||||
|
||||
@ -503,7 +509,7 @@ impl Value {
|
||||
}
|
||||
}
|
||||
|
||||
Some(Tagged::from_item(current, tag))
|
||||
Some(current.tagged(tag))
|
||||
}
|
||||
|
||||
pub fn get_data_by_path(&self, tag: Tag, path: &str) -> Option<Tagged<&Value>> {
|
||||
@ -515,7 +521,7 @@ impl Value {
|
||||
}
|
||||
}
|
||||
|
||||
Some(Tagged::from_item(current, tag))
|
||||
Some(current.tagged(tag))
|
||||
}
|
||||
|
||||
pub fn insert_data_at_path(
|
||||
@ -535,8 +541,8 @@ impl Value {
|
||||
// Special case for inserting at the top level
|
||||
current
|
||||
.entries
|
||||
.insert(path.to_string(), Tagged::from_item(new_value, tag));
|
||||
return Some(Tagged::from_item(new_obj, tag));
|
||||
.insert(path.to_string(), new_value.tagged(&tag));
|
||||
return Some(new_obj.tagged(&tag));
|
||||
}
|
||||
|
||||
for idx in 0..split_path.len() {
|
||||
@ -547,13 +553,13 @@ impl Value {
|
||||
Value::Row(o) => {
|
||||
o.entries.insert(
|
||||
split_path[idx + 1].to_string(),
|
||||
Tagged::from_item(new_value, tag),
|
||||
new_value.tagged(&tag),
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
return Some(Tagged::from_item(new_obj, tag));
|
||||
return Some(new_obj.tagged(&tag));
|
||||
} else {
|
||||
match next.item {
|
||||
Value::Row(ref mut o) => {
|
||||
@ -584,11 +590,10 @@ impl Value {
|
||||
|
||||
if split_path.len() == 1 {
|
||||
// Special case for inserting at the top level
|
||||
current.entries.insert(
|
||||
split_path[0].item.clone(),
|
||||
Tagged::from_item(new_value, tag),
|
||||
);
|
||||
return Some(Tagged::from_item(new_obj, tag));
|
||||
current
|
||||
.entries
|
||||
.insert(split_path[0].item.clone(), new_value.tagged(&tag));
|
||||
return Some(new_obj.tagged(&tag));
|
||||
}
|
||||
|
||||
for idx in 0..split_path.len() {
|
||||
@ -599,13 +604,13 @@ impl Value {
|
||||
Value::Row(o) => {
|
||||
o.entries.insert(
|
||||
split_path[idx + 1].to_string(),
|
||||
Tagged::from_item(new_value, tag),
|
||||
new_value.tagged(&tag),
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
return Some(Tagged::from_item(new_obj, tag));
|
||||
return Some(new_obj.tagged(&tag));
|
||||
} else {
|
||||
match next.item {
|
||||
Value::Row(ref mut o) => {
|
||||
@ -639,8 +644,8 @@ impl Value {
|
||||
match current.entries.get_mut(split_path[idx]) {
|
||||
Some(next) => {
|
||||
if idx == (split_path.len() - 1) {
|
||||
*next = Tagged::from_item(replaced_value, tag);
|
||||
return Some(Tagged::from_item(new_obj, tag));
|
||||
*next = replaced_value.tagged(&tag);
|
||||
return Some(new_obj.tagged(&tag));
|
||||
} else {
|
||||
match next.item {
|
||||
Value::Row(ref mut o) => {
|
||||
@ -672,8 +677,8 @@ impl Value {
|
||||
match current.entries.get_mut(&split_path[idx].item) {
|
||||
Some(next) => {
|
||||
if idx == (split_path.len() - 1) {
|
||||
*next = Tagged::from_item(replaced_value, tag);
|
||||
return Some(Tagged::from_item(new_obj, tag));
|
||||
*next = replaced_value.tagged(&tag);
|
||||
return Some(new_obj.tagged(&tag));
|
||||
} else {
|
||||
match next.item {
|
||||
Value::Row(ref mut o) => {
|
||||
@ -697,6 +702,7 @@ impl Value {
|
||||
Value::Row(o) => o.get_data(desc),
|
||||
Value::Block(_) => MaybeOwned::Owned(Value::nothing()),
|
||||
Value::Table(_) => MaybeOwned::Owned(Value::nothing()),
|
||||
Value::Error(_) => MaybeOwned::Owned(Value::nothing()),
|
||||
}
|
||||
}
|
||||
|
||||
@ -706,7 +712,7 @@ impl Value {
|
||||
Value::Block(b) => itertools::join(
|
||||
b.expressions
|
||||
.iter()
|
||||
.map(|e| e.source(&b.source).to_string()),
|
||||
.map(|e| e.span.slice(&b.source).to_string()),
|
||||
"; ",
|
||||
),
|
||||
Value::Row(_) => format!("[table: 1 row]"),
|
||||
@ -715,6 +721,7 @@ impl Value {
|
||||
l.len(),
|
||||
if l.len() == 1 { "row" } else { "rows" }
|
||||
),
|
||||
Value::Error(_) => format!("[error]"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7,7 +7,7 @@ use std::ops::Deref;
|
||||
pub(crate) fn command_dict(command: Arc<Command>, tag: impl Into<Tag>) -> Tagged<Value> {
|
||||
let tag = tag.into();
|
||||
|
||||
let mut cmd_dict = TaggedDictBuilder::new(tag);
|
||||
let mut cmd_dict = TaggedDictBuilder::new(&tag);
|
||||
|
||||
cmd_dict.insert("name", Value::string(command.name()));
|
||||
|
||||
@ -42,7 +42,7 @@ fn for_spec(name: &str, ty: &str, required: bool, tag: impl Into<Tag>) -> Tagged
|
||||
|
||||
fn signature_dict(signature: Signature, tag: impl Into<Tag>) -> Tagged<Value> {
|
||||
let tag = tag.into();
|
||||
let mut sig = TaggedListBuilder::new(tag);
|
||||
let mut sig = TaggedListBuilder::new(&tag);
|
||||
|
||||
for arg in signature.positional.iter() {
|
||||
let is_required = match arg {
|
||||
@ -50,19 +50,19 @@ fn signature_dict(signature: Signature, tag: impl Into<Tag>) -> Tagged<Value> {
|
||||
PositionalType::Optional(_, _) => false,
|
||||
};
|
||||
|
||||
sig.insert_tagged(for_spec(arg.name(), "argument", is_required, tag));
|
||||
sig.insert_tagged(for_spec(arg.name(), "argument", is_required, &tag));
|
||||
}
|
||||
|
||||
if let Some(_) = signature.rest_positional {
|
||||
let is_required = false;
|
||||
sig.insert_tagged(for_spec("rest", "argument", is_required, tag));
|
||||
sig.insert_tagged(for_spec("rest", "argument", is_required, &tag));
|
||||
}
|
||||
|
||||
for (name, ty) in signature.named.iter() {
|
||||
match ty {
|
||||
NamedType::Mandatory(_) => sig.insert_tagged(for_spec(name, "flag", true, tag)),
|
||||
NamedType::Optional(_) => sig.insert_tagged(for_spec(name, "flag", false, tag)),
|
||||
NamedType::Switch => sig.insert_tagged(for_spec(name, "switch", false, tag)),
|
||||
NamedType::Mandatory(_) => sig.insert_tagged(for_spec(name, "flag", true, &tag)),
|
||||
NamedType::Optional(_) => sig.insert_tagged(for_spec(name, "flag", false, &tag)),
|
||||
NamedType::Switch => sig.insert_tagged(for_spec(name, "switch", false, &tag)),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -75,12 +75,12 @@ pub fn read(
|
||||
|
||||
let tag = tag.into();
|
||||
let contents = fs::read_to_string(filename)
|
||||
.map(|v| v.tagged(tag))
|
||||
.map(|v| v.tagged(&tag))
|
||||
.map_err(|err| {
|
||||
ShellError::labeled_error(
|
||||
&format!("Couldn't read config file:\n{}", err),
|
||||
"file name",
|
||||
tag,
|
||||
&tag,
|
||||
)
|
||||
})?;
|
||||
|
||||
@ -88,7 +88,7 @@ pub fn read(
|
||||
ShellError::labeled_error(
|
||||
&format!("Couldn't parse config file:\n{}", err),
|
||||
"file name",
|
||||
tag,
|
||||
&tag,
|
||||
)
|
||||
})?;
|
||||
|
||||
@ -98,7 +98,7 @@ pub fn read(
|
||||
Value::Row(Dictionary { entries }) => Ok(entries),
|
||||
other => Err(ShellError::type_error(
|
||||
"Dictionary",
|
||||
other.type_name().tagged(tag),
|
||||
other.type_name().tagged(&tag),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
@ -115,7 +115,7 @@ impl TaggedListBuilder {
|
||||
}
|
||||
|
||||
pub fn push(&mut self, value: impl Into<Value>) {
|
||||
self.list.push(value.into().tagged(self.tag));
|
||||
self.list.push(value.into().tagged(&self.tag));
|
||||
}
|
||||
|
||||
pub fn insert_tagged(&mut self, value: impl Into<Tagged<Value>>) {
|
||||
@ -155,7 +155,7 @@ impl TaggedDictBuilder {
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, key: impl Into<String>, value: impl Into<Value>) {
|
||||
self.dict.insert(key.into(), value.into().tagged(self.tag));
|
||||
self.dict.insert(key.into(), value.into().tagged(&self.tag));
|
||||
}
|
||||
|
||||
pub fn insert_tagged(&mut self, key: impl Into<String>, value: impl Into<Tagged<Value>>) {
|
||||
|
235
src/data/meta.rs
235
src/data/meta.rs
@ -1,15 +1,52 @@
|
||||
use crate::context::{AnchorLocation, SourceMap};
|
||||
use crate::context::AnchorLocation;
|
||||
use crate::parser::parse::parser::TracableContext;
|
||||
use crate::prelude::*;
|
||||
use crate::Text;
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use std::path::{Path, PathBuf};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(new, Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
|
||||
pub struct Spanned<T> {
|
||||
pub span: Span,
|
||||
pub item: T,
|
||||
}
|
||||
|
||||
impl<T> Spanned<T> {
|
||||
pub fn map<U>(self, input: impl FnOnce(T) -> U) -> Spanned<U> {
|
||||
let span = self.span;
|
||||
|
||||
let mapped = input(self.item);
|
||||
mapped.spanned(span)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait SpannedItem: Sized {
|
||||
fn spanned(self, span: impl Into<Span>) -> Spanned<Self> {
|
||||
Spanned {
|
||||
item: self,
|
||||
span: span.into(),
|
||||
}
|
||||
}
|
||||
|
||||
fn spanned_unknown(self) -> Spanned<Self> {
|
||||
Spanned {
|
||||
item: self,
|
||||
span: Span::unknown(),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<T> SpannedItem for T {}
|
||||
|
||||
impl<T> std::ops::Deref for Spanned<T> {
|
||||
type Target = T;
|
||||
|
||||
fn deref(&self) -> &T {
|
||||
&self.item
|
||||
}
|
||||
}
|
||||
#[derive(new, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
|
||||
pub struct Tagged<T> {
|
||||
pub tag: Tag,
|
||||
pub item: T,
|
||||
@ -17,7 +54,7 @@ pub struct Tagged<T> {
|
||||
|
||||
impl<T> HasTag for Tagged<T> {
|
||||
fn tag(&self) -> Tag {
|
||||
self.tag
|
||||
self.tag.clone()
|
||||
}
|
||||
}
|
||||
|
||||
@ -29,20 +66,23 @@ impl AsRef<Path> for Tagged<PathBuf> {
|
||||
|
||||
pub trait TaggedItem: Sized {
|
||||
fn tagged(self, tag: impl Into<Tag>) -> Tagged<Self> {
|
||||
Tagged::from_item(self, tag.into())
|
||||
Tagged {
|
||||
item: self,
|
||||
tag: tag.into(),
|
||||
}
|
||||
}
|
||||
|
||||
// For now, this is a temporary facility. In many cases, there are other useful spans that we
|
||||
// could be using, such as the original source spans of JSON or Toml files, but we don't yet
|
||||
// have the infrastructure to make that work.
|
||||
fn tagged_unknown(self) -> Tagged<Self> {
|
||||
Tagged::from_item(
|
||||
self,
|
||||
Tag {
|
||||
Tagged {
|
||||
item: self,
|
||||
tag: Tag {
|
||||
span: Span::unknown(),
|
||||
anchor: uuid::Uuid::nil(),
|
||||
anchor: None,
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -57,48 +97,29 @@ impl<T> std::ops::Deref for Tagged<T> {
|
||||
}
|
||||
|
||||
impl<T> Tagged<T> {
|
||||
pub fn with_tag(self, tag: impl Into<Tag>) -> Tagged<T> {
|
||||
Tagged::from_item(self.item, tag)
|
||||
}
|
||||
|
||||
pub fn from_item(item: T, tag: impl Into<Tag>) -> Tagged<T> {
|
||||
Tagged {
|
||||
item,
|
||||
tag: tag.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn map<U>(self, input: impl FnOnce(T) -> U) -> Tagged<U> {
|
||||
let tag = self.tag();
|
||||
|
||||
let mapped = input(self.item);
|
||||
Tagged::from_item(mapped, tag)
|
||||
}
|
||||
|
||||
pub(crate) fn copy_tag<U>(&self, output: U) -> Tagged<U> {
|
||||
Tagged::from_item(output, self.tag())
|
||||
}
|
||||
|
||||
pub fn source(&self, source: &Text) -> Text {
|
||||
Text::from(self.tag().slice(source))
|
||||
mapped.tagged(tag)
|
||||
}
|
||||
|
||||
pub fn tag(&self) -> Tag {
|
||||
self.tag
|
||||
self.tag.clone()
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
self.tag.span
|
||||
}
|
||||
|
||||
pub fn anchor(&self) -> uuid::Uuid {
|
||||
self.tag.anchor
|
||||
pub fn anchor(&self) -> Option<AnchorLocation> {
|
||||
self.tag.anchor.clone()
|
||||
}
|
||||
|
||||
pub fn anchor_name(&self, source_map: &SourceMap) -> Option<String> {
|
||||
match source_map.get(&self.tag.anchor) {
|
||||
Some(AnchorLocation::File(file)) => Some(file.clone()),
|
||||
Some(AnchorLocation::Url(url)) => Some(url.clone()),
|
||||
pub fn anchor_name(&self) -> Option<String> {
|
||||
match self.tag.anchor {
|
||||
Some(AnchorLocation::File(ref file)) => Some(file.clone()),
|
||||
Some(AnchorLocation::Url(ref url)) => Some(url.clone()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@ -114,26 +135,32 @@ impl<T> Tagged<T> {
|
||||
|
||||
impl From<&Tag> for Tag {
|
||||
fn from(input: &Tag) -> Tag {
|
||||
*input
|
||||
input.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<nom_locate::LocatedSpanEx<&str, Uuid>> for Span {
|
||||
fn from(input: nom_locate::LocatedSpanEx<&str, Uuid>) -> Span {
|
||||
impl From<nom_locate::LocatedSpanEx<&str, TracableContext>> for Span {
|
||||
fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Span {
|
||||
Span::new(input.offset, input.offset + input.fragment.len())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<nom_locate::LocatedSpanEx<&str, u64>> for Span {
|
||||
fn from(input: nom_locate::LocatedSpanEx<&str, u64>) -> Span {
|
||||
Span::new(input.offset, input.offset + input.fragment.len())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T>
|
||||
From<(
|
||||
nom_locate::LocatedSpanEx<T, Uuid>,
|
||||
nom_locate::LocatedSpanEx<T, Uuid>,
|
||||
nom_locate::LocatedSpanEx<T, u64>,
|
||||
nom_locate::LocatedSpanEx<T, u64>,
|
||||
)> for Span
|
||||
{
|
||||
fn from(
|
||||
input: (
|
||||
nom_locate::LocatedSpanEx<T, Uuid>,
|
||||
nom_locate::LocatedSpanEx<T, Uuid>,
|
||||
nom_locate::LocatedSpanEx<T, u64>,
|
||||
nom_locate::LocatedSpanEx<T, u64>,
|
||||
),
|
||||
) -> Span {
|
||||
Span {
|
||||
@ -159,42 +186,48 @@ impl From<&std::ops::Range<usize>> for Span {
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, new,
|
||||
Debug, Clone, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, new,
|
||||
)]
|
||||
pub struct Tag {
|
||||
pub anchor: Uuid,
|
||||
pub anchor: Option<AnchorLocation>,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl From<Span> for Tag {
|
||||
fn from(span: Span) -> Self {
|
||||
Tag {
|
||||
anchor: uuid::Uuid::nil(),
|
||||
span,
|
||||
}
|
||||
Tag { anchor: None, span }
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Span> for Tag {
|
||||
fn from(span: &Span) -> Self {
|
||||
Tag {
|
||||
anchor: uuid::Uuid::nil(),
|
||||
anchor: None,
|
||||
span: *span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<(usize, usize, TracableContext)> for Tag {
|
||||
fn from((start, end, context): (usize, usize, TracableContext)) -> Self {
|
||||
fn from((start, end, _context): (usize, usize, TracableContext)) -> Self {
|
||||
Tag {
|
||||
anchor: context.origin,
|
||||
anchor: None,
|
||||
span: Span::new(start, end),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<(usize, usize, Uuid)> for Tag {
|
||||
fn from((start, end, anchor): (usize, usize, Uuid)) -> Self {
|
||||
impl From<(usize, usize, AnchorLocation)> for Tag {
|
||||
fn from((start, end, anchor): (usize, usize, AnchorLocation)) -> Self {
|
||||
Tag {
|
||||
anchor: Some(anchor),
|
||||
span: Span::new(start, end),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<(usize, usize, Option<AnchorLocation>)> for Tag {
|
||||
fn from((start, end, anchor): (usize, usize, Option<AnchorLocation>)) -> Self {
|
||||
Tag {
|
||||
anchor,
|
||||
span: Span::new(start, end),
|
||||
@ -202,19 +235,10 @@ impl From<(usize, usize, Uuid)> for Tag {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<(usize, usize, Option<Uuid>)> for Tag {
|
||||
fn from((start, end, anchor): (usize, usize, Option<Uuid>)) -> Self {
|
||||
Tag {
|
||||
anchor: anchor.unwrap_or(uuid::Uuid::nil()),
|
||||
span: Span::new(start, end),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<nom_locate::LocatedSpanEx<&str, TracableContext>> for Tag {
|
||||
fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Tag {
|
||||
Tag {
|
||||
anchor: input.extra.origin,
|
||||
anchor: None,
|
||||
span: Span::new(input.offset, input.offset + input.fragment.len()),
|
||||
}
|
||||
}
|
||||
@ -234,15 +258,12 @@ impl From<&Tag> for Span {
|
||||
|
||||
impl Tag {
|
||||
pub fn unknown_anchor(span: Span) -> Tag {
|
||||
Tag {
|
||||
anchor: uuid::Uuid::nil(),
|
||||
span,
|
||||
}
|
||||
Tag { anchor: None, span }
|
||||
}
|
||||
|
||||
pub fn for_char(pos: usize, anchor: Uuid) -> Tag {
|
||||
pub fn for_char(pos: usize, anchor: AnchorLocation) -> Tag {
|
||||
Tag {
|
||||
anchor,
|
||||
anchor: Some(anchor),
|
||||
span: Span {
|
||||
start: pos,
|
||||
end: pos + 1,
|
||||
@ -250,16 +271,16 @@ impl Tag {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unknown_span(anchor: Uuid) -> Tag {
|
||||
pub fn unknown_span(anchor: AnchorLocation) -> Tag {
|
||||
Tag {
|
||||
anchor,
|
||||
anchor: Some(anchor),
|
||||
span: Span::unknown(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unknown() -> Tag {
|
||||
Tag {
|
||||
anchor: uuid::Uuid::nil(),
|
||||
anchor: None,
|
||||
span: Span::unknown(),
|
||||
}
|
||||
}
|
||||
@ -273,7 +294,7 @@ impl Tag {
|
||||
|
||||
Tag {
|
||||
span: Span::new(self.span.start, other.span.end),
|
||||
anchor: self.anchor,
|
||||
anchor: self.anchor.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -288,10 +309,10 @@ impl Tag {
|
||||
|
||||
Tag {
|
||||
span: Span::new(self.span.start, other.span.end),
|
||||
anchor: self.anchor,
|
||||
anchor: self.anchor.clone(),
|
||||
}
|
||||
}
|
||||
None => *self,
|
||||
None => self.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -360,6 +381,42 @@ impl Span {
|
||||
Span { start, end }
|
||||
}
|
||||
|
||||
pub fn for_char(pos: usize) -> Span {
|
||||
Span {
|
||||
start: pos,
|
||||
end: pos + 1,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn until(&self, other: impl Into<Span>) -> Span {
|
||||
let other = other.into();
|
||||
|
||||
Span::new(self.start, other.end)
|
||||
}
|
||||
|
||||
pub fn until_option(&self, other: Option<impl Into<Span>>) -> Span {
|
||||
match other {
|
||||
Some(other) => {
|
||||
let other = other.into();
|
||||
|
||||
Span::new(self.start, other.end)
|
||||
}
|
||||
None => *self,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn string<'a>(&self, source: &'a str) -> String {
|
||||
self.slice(source).to_string()
|
||||
}
|
||||
|
||||
pub fn spanned_slice<'a>(&self, source: &'a str) -> Spanned<&'a str> {
|
||||
self.slice(source).spanned(*self)
|
||||
}
|
||||
|
||||
pub fn spanned_string<'a>(&self, source: &'a str) -> Spanned<String> {
|
||||
self.slice(source).to_string().spanned(*self)
|
||||
}
|
||||
|
||||
/*
|
||||
pub fn unknown_with_uuid(uuid: Uuid) -> Span {
|
||||
Span {
|
||||
@ -404,27 +461,3 @@ impl language_reporting::ReportingSpan for Span {
|
||||
self.end
|
||||
}
|
||||
}
|
||||
|
||||
impl language_reporting::ReportingSpan for Tag {
|
||||
fn with_start(&self, start: usize) -> Self {
|
||||
Tag {
|
||||
span: Span::new(start, self.span.end),
|
||||
anchor: self.anchor,
|
||||
}
|
||||
}
|
||||
|
||||
fn with_end(&self, end: usize) -> Self {
|
||||
Tag {
|
||||
span: Span::new(self.span.start, end),
|
||||
anchor: self.anchor,
|
||||
}
|
||||
}
|
||||
|
||||
fn start(&self) -> usize {
|
||||
self.span.start
|
||||
}
|
||||
|
||||
fn end(&self) -> usize {
|
||||
self.span.end
|
||||
}
|
||||
}
|
||||
|
@ -54,7 +54,7 @@ impl ExtractType for i64 {
|
||||
&Tagged {
|
||||
item: Value::Primitive(Primitive::Int(int)),
|
||||
..
|
||||
} => Ok(int.tagged(value.tag).coerce_into("converting to i64")?),
|
||||
} => Ok(int.tagged(&value.tag).coerce_into("converting to i64")?),
|
||||
other => Err(ShellError::type_error("Integer", other.tagged_type_name())),
|
||||
}
|
||||
}
|
||||
@ -68,7 +68,7 @@ impl ExtractType for u64 {
|
||||
&Tagged {
|
||||
item: Value::Primitive(Primitive::Int(int)),
|
||||
..
|
||||
} => Ok(int.tagged(value.tag).coerce_into("converting to u64")?),
|
||||
} => Ok(int.tagged(&value.tag).coerce_into("converting to u64")?),
|
||||
other => Err(ShellError::type_error("Integer", other.tagged_type_name())),
|
||||
}
|
||||
}
|
||||
|
@ -14,9 +14,9 @@ pub enum Description {
|
||||
}
|
||||
|
||||
impl Description {
|
||||
fn into_label(self) -> Result<Label<Tag>, String> {
|
||||
fn into_label(self) -> Result<Label<Span>, String> {
|
||||
match self {
|
||||
Description::Source(s) => Ok(Label::new_primary(s.tag()).with_message(s.item)),
|
||||
Description::Source(s) => Ok(Label::new_primary(s.span()).with_message(s.item)),
|
||||
Description::Synthetic(s) => Err(s),
|
||||
}
|
||||
}
|
||||
@ -24,7 +24,7 @@ impl Description {
|
||||
#[allow(unused)]
|
||||
fn tag(&self) -> Tag {
|
||||
match self {
|
||||
Description::Source(tagged) => tagged.tag,
|
||||
Description::Source(tagged) => tagged.tag.clone(),
|
||||
Description::Synthetic(_) => Tag::unknown(),
|
||||
}
|
||||
}
|
||||
@ -85,10 +85,10 @@ impl ShellError {
|
||||
.start()
|
||||
}
|
||||
|
||||
pub(crate) fn unexpected_eof(expected: impl Into<String>, tag: Tag) -> ShellError {
|
||||
pub(crate) fn unexpected_eof(expected: impl Into<String>, tag: impl Into<Tag>) -> ShellError {
|
||||
ProximateShellError::UnexpectedEof {
|
||||
expected: expected.into(),
|
||||
tag,
|
||||
tag: tag.into(),
|
||||
}
|
||||
.start()
|
||||
}
|
||||
@ -100,7 +100,7 @@ impl ShellError {
|
||||
) -> ShellError {
|
||||
ProximateShellError::RangeError {
|
||||
kind: expected.into(),
|
||||
actual_kind: actual.copy_tag(format!("{:?}", actual.item)),
|
||||
actual_kind: format!("{:?}", actual.item).tagged(actual.tag()),
|
||||
operation,
|
||||
}
|
||||
.start()
|
||||
@ -143,22 +143,22 @@ impl ShellError {
|
||||
pub(crate) fn argument_error(
|
||||
command: impl Into<String>,
|
||||
kind: ArgumentError,
|
||||
tag: Tag,
|
||||
tag: impl Into<Tag>,
|
||||
) -> ShellError {
|
||||
ProximateShellError::ArgumentError {
|
||||
command: command.into(),
|
||||
error: kind,
|
||||
tag,
|
||||
tag: tag.into(),
|
||||
}
|
||||
.start()
|
||||
}
|
||||
|
||||
pub(crate) fn invalid_external_word(tag: Tag) -> ShellError {
|
||||
pub(crate) fn invalid_external_word(tag: impl Into<Tag>) -> ShellError {
|
||||
ProximateShellError::ArgumentError {
|
||||
command: "Invalid argument to Nu command (did you mean to call an external command?)"
|
||||
.into(),
|
||||
error: ArgumentError::InvalidExternalWord,
|
||||
tag,
|
||||
tag: tag.into(),
|
||||
}
|
||||
.start()
|
||||
}
|
||||
@ -183,22 +183,22 @@ impl ShellError {
|
||||
}
|
||||
nom::Err::Failure(span) | nom::Err::Error(span) => {
|
||||
let diagnostic = Diagnostic::new(Severity::Error, format!("Parse Error"))
|
||||
.with_label(Label::new_primary(Tag::from(span.0)));
|
||||
.with_label(Label::new_primary(Span::from(span.0)));
|
||||
|
||||
ShellError::diagnostic(diagnostic)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn diagnostic(diagnostic: Diagnostic<Tag>) -> ShellError {
|
||||
pub(crate) fn diagnostic(diagnostic: Diagnostic<Span>) -> ShellError {
|
||||
ProximateShellError::Diagnostic(ShellDiagnostic { diagnostic }).start()
|
||||
}
|
||||
|
||||
pub(crate) fn to_diagnostic(self) -> Diagnostic<Tag> {
|
||||
pub(crate) fn to_diagnostic(self) -> Diagnostic<Span> {
|
||||
match self.error {
|
||||
ProximateShellError::InvalidCommand { command } => {
|
||||
Diagnostic::new(Severity::Error, "Invalid command")
|
||||
.with_label(Label::new_primary(command))
|
||||
.with_label(Label::new_primary(command.span))
|
||||
}
|
||||
ProximateShellError::MissingValue { tag, reason } => {
|
||||
let mut d = Diagnostic::new(
|
||||
@ -207,7 +207,7 @@ impl ShellError {
|
||||
);
|
||||
|
||||
if let Some(tag) = tag {
|
||||
d = d.with_label(Label::new_primary(tag));
|
||||
d = d.with_label(Label::new_primary(tag.span));
|
||||
}
|
||||
|
||||
d
|
||||
@ -220,7 +220,7 @@ impl ShellError {
|
||||
ArgumentError::InvalidExternalWord => Diagnostic::new(
|
||||
Severity::Error,
|
||||
format!("Invalid bare word for Nu command (did you intend to invoke an external command?)"))
|
||||
.with_label(Label::new_primary(tag)),
|
||||
.with_label(Label::new_primary(tag.span)),
|
||||
ArgumentError::MissingMandatoryFlag(name) => Diagnostic::new(
|
||||
Severity::Error,
|
||||
format!(
|
||||
@ -230,7 +230,7 @@ impl ShellError {
|
||||
Color::Black.bold().paint(name)
|
||||
),
|
||||
)
|
||||
.with_label(Label::new_primary(tag)),
|
||||
.with_label(Label::new_primary(tag.span)),
|
||||
ArgumentError::MissingMandatoryPositional(name) => Diagnostic::new(
|
||||
Severity::Error,
|
||||
format!(
|
||||
@ -240,7 +240,7 @@ impl ShellError {
|
||||
),
|
||||
)
|
||||
.with_label(
|
||||
Label::new_primary(tag).with_message(format!("requires {} parameter", name)),
|
||||
Label::new_primary(tag.span).with_message(format!("requires {} parameter", name)),
|
||||
),
|
||||
ArgumentError::MissingValueForName(name) => Diagnostic::new(
|
||||
Severity::Error,
|
||||
@ -251,7 +251,7 @@ impl ShellError {
|
||||
Color::Black.bold().paint(name)
|
||||
),
|
||||
)
|
||||
.with_label(Label::new_primary(tag)),
|
||||
.with_label(Label::new_primary(tag.span)),
|
||||
},
|
||||
ProximateShellError::TypeError {
|
||||
expected,
|
||||
@ -261,7 +261,7 @@ impl ShellError {
|
||||
tag,
|
||||
},
|
||||
} => Diagnostic::new(Severity::Error, "Type Error").with_label(
|
||||
Label::new_primary(tag)
|
||||
Label::new_primary(tag.span)
|
||||
.with_message(format!("Expected {}, found {}", expected, actual)),
|
||||
),
|
||||
ProximateShellError::TypeError {
|
||||
@ -272,12 +272,12 @@ impl ShellError {
|
||||
tag
|
||||
},
|
||||
} => Diagnostic::new(Severity::Error, "Type Error")
|
||||
.with_label(Label::new_primary(tag).with_message(expected)),
|
||||
.with_label(Label::new_primary(tag.span).with_message(expected)),
|
||||
|
||||
ProximateShellError::UnexpectedEof {
|
||||
expected, tag
|
||||
} => Diagnostic::new(Severity::Error, format!("Unexpected end of input"))
|
||||
.with_label(Label::new_primary(tag).with_message(format!("Expected {}", expected))),
|
||||
.with_label(Label::new_primary(tag.span).with_message(format!("Expected {}", expected))),
|
||||
|
||||
ProximateShellError::RangeError {
|
||||
kind,
|
||||
@ -288,7 +288,7 @@ impl ShellError {
|
||||
tag
|
||||
},
|
||||
} => Diagnostic::new(Severity::Error, "Range Error").with_label(
|
||||
Label::new_primary(tag).with_message(format!(
|
||||
Label::new_primary(tag.span).with_message(format!(
|
||||
"Expected to convert {} to {} while {}, but it was out of range",
|
||||
item,
|
||||
kind.desc(),
|
||||
@ -303,7 +303,7 @@ impl ShellError {
|
||||
item
|
||||
},
|
||||
} => Diagnostic::new(Severity::Error, "Syntax Error")
|
||||
.with_label(Label::new_primary(tag).with_message(item)),
|
||||
.with_label(Label::new_primary(tag.span).with_message(item)),
|
||||
|
||||
ProximateShellError::MissingProperty { subpath, expr, .. } => {
|
||||
let subpath = subpath.into_label();
|
||||
@ -326,8 +326,8 @@ impl ShellError {
|
||||
ProximateShellError::Diagnostic(diag) => diag.diagnostic,
|
||||
ProximateShellError::CoerceError { left, right } => {
|
||||
Diagnostic::new(Severity::Error, "Coercion error")
|
||||
.with_label(Label::new_primary(left.tag()).with_message(left.item))
|
||||
.with_label(Label::new_secondary(right.tag()).with_message(right.item))
|
||||
.with_label(Label::new_primary(left.tag().span).with_message(left.item))
|
||||
.with_label(Label::new_secondary(right.tag().span).with_message(right.item))
|
||||
}
|
||||
|
||||
ProximateShellError::UntaggedRuntimeError { reason } => Diagnostic::new(Severity::Error, format!("Error: {}", reason))
|
||||
@ -341,7 +341,7 @@ impl ShellError {
|
||||
) -> ShellError {
|
||||
ShellError::diagnostic(
|
||||
Diagnostic::new(Severity::Error, msg.into())
|
||||
.with_label(Label::new_primary(tag.into()).with_message(label.into())),
|
||||
.with_label(Label::new_primary(tag.into().span).with_message(label.into())),
|
||||
)
|
||||
}
|
||||
|
||||
@ -355,15 +355,19 @@ impl ShellError {
|
||||
ShellError::diagnostic(
|
||||
Diagnostic::new_error(msg.into())
|
||||
.with_label(
|
||||
Label::new_primary(primary_span.into()).with_message(primary_label.into()),
|
||||
Label::new_primary(primary_span.into().span).with_message(primary_label.into()),
|
||||
)
|
||||
.with_label(
|
||||
Label::new_secondary(secondary_span.into())
|
||||
Label::new_secondary(secondary_span.into().span)
|
||||
.with_message(secondary_label.into()),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
// pub fn string(title: impl Into<String>) -> ShellError {
|
||||
// ProximateShellError::String(StringError::new(title.into(), String::new())).start()
|
||||
// }
|
||||
|
||||
pub(crate) fn unimplemented(title: impl Into<String>) -> ShellError {
|
||||
ShellError::untagged_runtime_error(&format!("Unimplemented: {}", title.into()))
|
||||
}
|
||||
@ -472,16 +476,16 @@ impl ProximateShellError {
|
||||
pub(crate) fn tag(&self) -> Option<Tag> {
|
||||
Some(match self {
|
||||
ProximateShellError::SyntaxError { problem } => problem.tag(),
|
||||
ProximateShellError::UnexpectedEof { tag, .. } => *tag,
|
||||
ProximateShellError::InvalidCommand { command } => *command,
|
||||
ProximateShellError::TypeError { actual, .. } => actual.tag,
|
||||
ProximateShellError::MissingProperty { tag, .. } => *tag,
|
||||
ProximateShellError::MissingValue { tag, .. } => return *tag,
|
||||
ProximateShellError::ArgumentError { tag, .. } => *tag,
|
||||
ProximateShellError::RangeError { actual_kind, .. } => actual_kind.tag,
|
||||
ProximateShellError::UnexpectedEof { tag, .. } => tag.clone(),
|
||||
ProximateShellError::InvalidCommand { command } => command.clone(),
|
||||
ProximateShellError::TypeError { actual, .. } => actual.tag.clone(),
|
||||
ProximateShellError::MissingProperty { tag, .. } => tag.clone(),
|
||||
ProximateShellError::MissingValue { tag, .. } => return tag.clone(),
|
||||
ProximateShellError::ArgumentError { tag, .. } => tag.clone(),
|
||||
ProximateShellError::RangeError { actual_kind, .. } => actual_kind.tag.clone(),
|
||||
ProximateShellError::Diagnostic(..) => return None,
|
||||
ProximateShellError::UntaggedRuntimeError { .. } => return None,
|
||||
ProximateShellError::CoerceError { left, right } => left.tag.until(right.tag),
|
||||
ProximateShellError::CoerceError { left, right } => left.tag.until(&right.tag),
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -495,7 +499,7 @@ impl ToDebug for ProximateShellError {
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ShellDiagnostic {
|
||||
pub(crate) diagnostic: Diagnostic<Tag>,
|
||||
pub(crate) diagnostic: Diagnostic<Span>,
|
||||
}
|
||||
|
||||
impl PartialEq for ShellDiagnostic {
|
||||
@ -521,7 +525,7 @@ impl std::cmp::Ord for ShellDiagnostic {
|
||||
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, new, Clone, Serialize, Deserialize)]
|
||||
pub struct StringError {
|
||||
title: String,
|
||||
error: Value,
|
||||
error: String,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ShellError {
|
||||
@ -598,7 +602,6 @@ impl<T> ShellErrorUtils<Tagged<T>> for Option<Tagged<T>> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait CoerceInto<U> {
|
||||
fn coerce_into(self, operation: impl Into<String>) -> Result<U, ShellError>;
|
||||
}
|
||||
|
@ -48,19 +48,23 @@ pub(crate) fn evaluate_baseline_expr(
|
||||
scope: &Scope,
|
||||
source: &Text,
|
||||
) -> Result<Tagged<Value>, ShellError> {
|
||||
let tag = Tag {
|
||||
span: expr.span,
|
||||
anchor: None,
|
||||
};
|
||||
match &expr.item {
|
||||
RawExpression::Literal(literal) => Ok(evaluate_literal(expr.copy_tag(literal), source)),
|
||||
RawExpression::Literal(literal) => Ok(evaluate_literal(literal.tagged(tag), source)),
|
||||
RawExpression::ExternalWord => Err(ShellError::argument_error(
|
||||
"Invalid external word",
|
||||
ArgumentError::InvalidExternalWord,
|
||||
expr.tag(),
|
||||
tag,
|
||||
)),
|
||||
RawExpression::FilePath(path) => Ok(Value::path(path.clone()).tagged(expr.tag())),
|
||||
RawExpression::FilePath(path) => Ok(Value::path(path.clone()).tagged(tag)),
|
||||
RawExpression::Synthetic(hir::Synthetic::String(s)) => {
|
||||
Ok(Value::string(s).tagged_unknown())
|
||||
}
|
||||
RawExpression::Variable(var) => evaluate_reference(var, scope, source, expr.tag()),
|
||||
RawExpression::Command(_) => evaluate_command(expr.tag(), scope, source),
|
||||
RawExpression::Variable(var) => evaluate_reference(var, scope, source, tag),
|
||||
RawExpression::Command(_) => evaluate_command(tag, scope, source),
|
||||
RawExpression::ExternalCommand(external) => evaluate_external(external, scope, source),
|
||||
RawExpression::Binary(binary) => {
|
||||
let left = evaluate_baseline_expr(binary.left(), registry, scope, source)?;
|
||||
@ -69,10 +73,16 @@ pub(crate) fn evaluate_baseline_expr(
|
||||
trace!("left={:?} right={:?}", left.item, right.item);
|
||||
|
||||
match left.compare(binary.op(), &*right) {
|
||||
Ok(result) => Ok(Value::boolean(result).tagged(expr.tag())),
|
||||
Ok(result) => Ok(Value::boolean(result).tagged(tag)),
|
||||
Err((left_type, right_type)) => Err(ShellError::coerce_error(
|
||||
binary.left().copy_tag(left_type),
|
||||
binary.right().copy_tag(right_type),
|
||||
left_type.tagged(Tag {
|
||||
span: binary.left().span,
|
||||
anchor: None,
|
||||
}),
|
||||
right_type.tagged(Tag {
|
||||
span: binary.right().span,
|
||||
anchor: None,
|
||||
}),
|
||||
)),
|
||||
}
|
||||
}
|
||||
@ -84,13 +94,10 @@ pub(crate) fn evaluate_baseline_expr(
|
||||
exprs.push(expr);
|
||||
}
|
||||
|
||||
Ok(Value::Table(exprs).tagged(expr.tag()))
|
||||
Ok(Value::Table(exprs).tagged(tag))
|
||||
}
|
||||
RawExpression::Block(block) => {
|
||||
Ok(
|
||||
Value::Block(Block::new(block.clone(), source.clone(), expr.tag()))
|
||||
.tagged(expr.tag()),
|
||||
)
|
||||
Ok(Value::Block(Block::new(block.clone(), source.clone(), tag.clone())).tagged(&tag))
|
||||
}
|
||||
RawExpression::Path(path) => {
|
||||
let value = evaluate_baseline_expr(path.head(), registry, scope, source)?;
|
||||
@ -113,16 +120,16 @@ pub(crate) fn evaluate_baseline_expr(
|
||||
return Err(ShellError::labeled_error(
|
||||
"Unknown column",
|
||||
format!("did you mean '{}'?", possible_matches[0].1),
|
||||
expr.tag(),
|
||||
&tag,
|
||||
));
|
||||
}
|
||||
Some(next) => {
|
||||
item = next.clone().item.tagged(expr.tag());
|
||||
item = next.clone().item.tagged(&tag);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
Ok(item.item().clone().tagged(expr.tag()))
|
||||
Ok(item.item().clone().tagged(tag))
|
||||
}
|
||||
RawExpression::Boolean(_boolean) => unimplemented!(),
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ impl RenderView for GenericView<'_> {
|
||||
match self.value {
|
||||
Value::Primitive(p) => Ok(host.stdout(&p.format(None))),
|
||||
Value::Table(l) => {
|
||||
let view = TableView::from_list(l);
|
||||
let view = TableView::from_list(l, 0);
|
||||
|
||||
if let Some(view) = view {
|
||||
view.render_view(host)?;
|
||||
@ -35,6 +35,8 @@ impl RenderView for GenericView<'_> {
|
||||
view.render_view(host)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
Value::Error(e) => Err(e.clone()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -34,7 +34,7 @@ impl TableView {
|
||||
ret
|
||||
}
|
||||
|
||||
pub fn from_list(values: &[Tagged<Value>]) -> Option<TableView> {
|
||||
pub fn from_list(values: &[Tagged<Value>], starting_idx: usize) -> Option<TableView> {
|
||||
if values.len() == 0 {
|
||||
return None;
|
||||
}
|
||||
@ -68,7 +68,7 @@ impl TableView {
|
||||
|
||||
if values.len() > 1 {
|
||||
// Indices are black, bold, right-aligned:
|
||||
row.insert(0, (format!("{}", idx.to_string()), "Fdbr"));
|
||||
row.insert(0, (format!("{}", (starting_idx + idx).to_string()), "Fdbr"));
|
||||
}
|
||||
|
||||
entries.push(row);
|
||||
|
@ -1,4 +1,4 @@
|
||||
#![recursion_limit = "512"]
|
||||
#![recursion_limit = "1024"]
|
||||
|
||||
#[macro_use]
|
||||
mod prelude;
|
||||
@ -21,7 +21,7 @@ mod traits;
|
||||
mod utils;
|
||||
|
||||
pub use crate::commands::command::{CallInfo, ReturnSuccess, ReturnValue};
|
||||
pub use crate::context::{AnchorLocation, SourceMap};
|
||||
pub use crate::context::AnchorLocation;
|
||||
pub use crate::env::host::BasicHost;
|
||||
pub use crate::parser::hir::SyntaxShape;
|
||||
pub use crate::parser::parse::token_tree_builder::TokenTreeBuilder;
|
||||
@ -31,7 +31,7 @@ pub use cli::cli;
|
||||
pub use data::base::{Primitive, Value};
|
||||
pub use data::config::{config_path, APP_INFO};
|
||||
pub use data::dict::{Dictionary, TaggedDictBuilder};
|
||||
pub use data::meta::{Span, Tag, Tagged, TaggedItem};
|
||||
pub use data::meta::{Span, Spanned, SpannedItem, Tag, Tagged, TaggedItem};
|
||||
pub use errors::{CoerceInto, ShellError};
|
||||
pub use num_traits::cast::ToPrimitive;
|
||||
pub use parser::parse::text::Text;
|
||||
|
@ -21,10 +21,10 @@ pub(crate) use parse::tokens::{RawNumber, RawToken};
|
||||
pub(crate) use parse::unit::Unit;
|
||||
pub(crate) use registry::CommandRegistry;
|
||||
|
||||
pub fn parse(input: &str, anchor: uuid::Uuid) -> Result<TokenNode, ShellError> {
|
||||
pub fn parse(input: &str) -> Result<TokenNode, ShellError> {
|
||||
let _ = pretty_env_logger::try_init();
|
||||
|
||||
match pipeline(nom_input(input, anchor)) {
|
||||
match pipeline(nom_input(input)) {
|
||||
Ok((_rest, val)) => Ok(val),
|
||||
Err(err) => Err(ShellError::parse_error(err)),
|
||||
}
|
||||
|
@ -52,7 +52,7 @@ impl<'de> ConfigDeserializer<'de> {
|
||||
|
||||
self.stack.push(DeserializerItem {
|
||||
key_struct_field: Some((name.to_string(), name)),
|
||||
val: value.unwrap_or_else(|| Value::nothing().tagged(self.call.name_tag)),
|
||||
val: value.unwrap_or_else(|| Value::nothing().tagged(&self.call.name_tag)),
|
||||
});
|
||||
|
||||
Ok(())
|
||||
|
@ -86,7 +86,7 @@ pub enum RawExpression {
|
||||
|
||||
FilePath(PathBuf),
|
||||
ExternalCommand(ExternalCommand),
|
||||
Command(Tag),
|
||||
Command(Span),
|
||||
|
||||
Boolean(bool),
|
||||
}
|
||||
@ -123,14 +123,14 @@ impl RawExpression {
|
||||
}
|
||||
}
|
||||
|
||||
pub type Expression = Tagged<RawExpression>;
|
||||
pub type Expression = Spanned<RawExpression>;
|
||||
|
||||
impl std::fmt::Display for Expression {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let span = self.tag.span;
|
||||
let span = self.span;
|
||||
|
||||
match &self.item {
|
||||
RawExpression::Literal(literal) => write!(f, "{}", literal.tagged(self.tag)),
|
||||
RawExpression::Literal(literal) => write!(f, "{}", literal.tagged(self.span)),
|
||||
RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{}", s),
|
||||
RawExpression::Command(_) => write!(f, "Command{{ {}..{} }}", span.start(), span.end()),
|
||||
RawExpression::ExternalWord => {
|
||||
@ -159,97 +159,97 @@ impl std::fmt::Display for Expression {
|
||||
}
|
||||
|
||||
impl Expression {
|
||||
pub(crate) fn number(i: impl Into<Number>, tag: impl Into<Tag>) -> Expression {
|
||||
RawExpression::Literal(Literal::Number(i.into())).tagged(tag.into())
|
||||
pub(crate) fn number(i: impl Into<Number>, span: impl Into<Span>) -> Expression {
|
||||
RawExpression::Literal(Literal::Number(i.into())).spanned(span.into())
|
||||
}
|
||||
|
||||
pub(crate) fn size(
|
||||
i: impl Into<Number>,
|
||||
unit: impl Into<Unit>,
|
||||
tag: impl Into<Tag>,
|
||||
span: impl Into<Span>,
|
||||
) -> Expression {
|
||||
RawExpression::Literal(Literal::Size(i.into(), unit.into())).tagged(tag.into())
|
||||
RawExpression::Literal(Literal::Size(i.into(), unit.into())).spanned(span.into())
|
||||
}
|
||||
|
||||
pub(crate) fn synthetic_string(s: impl Into<String>) -> Expression {
|
||||
RawExpression::Synthetic(Synthetic::String(s.into())).tagged_unknown()
|
||||
RawExpression::Synthetic(Synthetic::String(s.into())).spanned_unknown()
|
||||
}
|
||||
|
||||
pub(crate) fn string(inner: impl Into<Tag>, outer: impl Into<Tag>) -> Expression {
|
||||
RawExpression::Literal(Literal::String(inner.into())).tagged(outer.into())
|
||||
pub(crate) fn string(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
||||
RawExpression::Literal(Literal::String(inner.into())).spanned(outer.into())
|
||||
}
|
||||
|
||||
pub(crate) fn path(
|
||||
head: Expression,
|
||||
tail: Vec<Tagged<impl Into<String>>>,
|
||||
tag: impl Into<Tag>,
|
||||
tail: Vec<Spanned<impl Into<String>>>,
|
||||
span: impl Into<Span>,
|
||||
) -> Expression {
|
||||
let tail = tail.into_iter().map(|t| t.map(|s| s.into())).collect();
|
||||
RawExpression::Path(Box::new(Path::new(head, tail))).tagged(tag.into())
|
||||
RawExpression::Path(Box::new(Path::new(head, tail))).spanned(span.into())
|
||||
}
|
||||
|
||||
pub(crate) fn dot_member(head: Expression, next: Tagged<impl Into<String>>) -> Expression {
|
||||
let Tagged { item, tag } = head;
|
||||
let new_tag = head.tag.until(next.tag);
|
||||
pub(crate) fn dot_member(head: Expression, next: Spanned<impl Into<String>>) -> Expression {
|
||||
let Spanned { item, span } = head;
|
||||
let new_span = head.span.until(next.span);
|
||||
|
||||
match item {
|
||||
RawExpression::Path(path) => {
|
||||
let (head, mut tail) = path.parts();
|
||||
|
||||
tail.push(next.map(|i| i.into()));
|
||||
Expression::path(head, tail, new_tag)
|
||||
Expression::path(head, tail, new_span)
|
||||
}
|
||||
|
||||
other => Expression::path(other.tagged(tag), vec![next], new_tag),
|
||||
other => Expression::path(other.spanned(span), vec![next], new_span),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn infix(
|
||||
left: Expression,
|
||||
op: Tagged<impl Into<Operator>>,
|
||||
op: Spanned<impl Into<Operator>>,
|
||||
right: Expression,
|
||||
) -> Expression {
|
||||
let new_tag = left.tag.until(right.tag);
|
||||
let new_span = left.span.until(right.span);
|
||||
|
||||
RawExpression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right)))
|
||||
.tagged(new_tag)
|
||||
.spanned(new_span)
|
||||
}
|
||||
|
||||
pub(crate) fn file_path(path: impl Into<PathBuf>, outer: impl Into<Tag>) -> Expression {
|
||||
RawExpression::FilePath(path.into()).tagged(outer)
|
||||
pub(crate) fn file_path(path: impl Into<PathBuf>, outer: impl Into<Span>) -> Expression {
|
||||
RawExpression::FilePath(path.into()).spanned(outer)
|
||||
}
|
||||
|
||||
pub(crate) fn list(list: Vec<Expression>, tag: impl Into<Tag>) -> Expression {
|
||||
RawExpression::List(list).tagged(tag)
|
||||
pub(crate) fn list(list: Vec<Expression>, span: impl Into<Span>) -> Expression {
|
||||
RawExpression::List(list).spanned(span)
|
||||
}
|
||||
|
||||
pub(crate) fn bare(tag: impl Into<Tag>) -> Expression {
|
||||
RawExpression::Literal(Literal::Bare).tagged(tag)
|
||||
pub(crate) fn bare(span: impl Into<Span>) -> Expression {
|
||||
RawExpression::Literal(Literal::Bare).spanned(span)
|
||||
}
|
||||
|
||||
pub(crate) fn pattern(tag: impl Into<Tag>) -> Expression {
|
||||
RawExpression::Literal(Literal::GlobPattern).tagged(tag.into())
|
||||
pub(crate) fn pattern(span: impl Into<Span>) -> Expression {
|
||||
RawExpression::Literal(Literal::GlobPattern).spanned(span.into())
|
||||
}
|
||||
|
||||
pub(crate) fn variable(inner: impl Into<Tag>, outer: impl Into<Tag>) -> Expression {
|
||||
RawExpression::Variable(Variable::Other(inner.into())).tagged(outer)
|
||||
pub(crate) fn variable(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
||||
RawExpression::Variable(Variable::Other(inner.into())).spanned(outer)
|
||||
}
|
||||
|
||||
pub(crate) fn external_command(inner: impl Into<Tag>, outer: impl Into<Tag>) -> Expression {
|
||||
RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).tagged(outer)
|
||||
pub(crate) fn external_command(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
||||
RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).spanned(outer)
|
||||
}
|
||||
|
||||
pub(crate) fn it_variable(inner: impl Into<Tag>, outer: impl Into<Tag>) -> Expression {
|
||||
RawExpression::Variable(Variable::It(inner.into())).tagged(outer)
|
||||
pub(crate) fn it_variable(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
|
||||
RawExpression::Variable(Variable::It(inner.into())).spanned(outer)
|
||||
}
|
||||
}
|
||||
|
||||
impl ToDebug for Expression {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||
match self.item() {
|
||||
RawExpression::Literal(l) => l.tagged(self.tag()).fmt_debug(f, source),
|
||||
match &self.item {
|
||||
RawExpression::Literal(l) => l.spanned(self.span).fmt_debug(f, source),
|
||||
RawExpression::FilePath(p) => write!(f, "{}", p.display()),
|
||||
RawExpression::ExternalWord => write!(f, "{}", self.tag().slice(source)),
|
||||
RawExpression::ExternalWord => write!(f, "{}", self.span.slice(source)),
|
||||
RawExpression::Command(tag) => write!(f, "{}", tag.slice(source)),
|
||||
RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{:?}", s),
|
||||
RawExpression::Variable(Variable::It(_)) => write!(f, "$it"),
|
||||
@ -281,8 +281,8 @@ impl ToDebug for Expression {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Tagged<Path>> for Expression {
|
||||
fn from(path: Tagged<Path>) -> Expression {
|
||||
impl From<Spanned<Path>> for Expression {
|
||||
fn from(path: Spanned<Path>) -> Expression {
|
||||
path.map(|p| RawExpression::Path(Box::new(p)))
|
||||
}
|
||||
}
|
||||
@ -296,14 +296,14 @@ impl From<Tagged<Path>> for Expression {
|
||||
pub enum Literal {
|
||||
Number(Number),
|
||||
Size(Number, Unit),
|
||||
String(Tag),
|
||||
String(Span),
|
||||
GlobPattern,
|
||||
Bare,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Tagged<Literal> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", Tagged::new(self.tag, &self.item))
|
||||
write!(f, "{}", Tagged::new(self.tag.clone(), &self.item))
|
||||
}
|
||||
}
|
||||
|
||||
@ -321,14 +321,14 @@ impl std::fmt::Display for Tagged<&Literal> {
|
||||
}
|
||||
}
|
||||
|
||||
impl ToDebug for Tagged<&Literal> {
|
||||
impl ToDebug for Spanned<&Literal> {
|
||||
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
|
||||
match self.item() {
|
||||
Literal::Number(number) => write!(f, "{:?}", *number),
|
||||
match self.item {
|
||||
Literal::Number(number) => write!(f, "{:?}", number),
|
||||
Literal::Size(number, unit) => write!(f, "{:?}{:?}", *number, unit),
|
||||
Literal::String(tag) => write!(f, "{}", tag.slice(source)),
|
||||
Literal::GlobPattern => write!(f, "{}", self.tag().slice(source)),
|
||||
Literal::Bare => write!(f, "{}", self.tag().slice(source)),
|
||||
Literal::GlobPattern => write!(f, "{}", self.span.slice(source)),
|
||||
Literal::Bare => write!(f, "{}", self.span.slice(source)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -347,15 +347,15 @@ impl Literal {
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
|
||||
pub enum Variable {
|
||||
It(Tag),
|
||||
Other(Tag),
|
||||
It(Span),
|
||||
Other(Span),
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Variable {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Variable::It(_) => write!(f, "$it"),
|
||||
Variable::Other(tag) => write!(f, "${{ {}..{} }}", tag.span.start(), tag.span.end()),
|
||||
Variable::Other(span) => write!(f, "${{ {}..{} }}", span.start(), span.end()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -6,15 +6,14 @@ use crate::parser::hir::syntax_shape::*;
|
||||
use crate::parser::hir::TokensIterator;
|
||||
use crate::parser::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b};
|
||||
use crate::parser::TokenNode;
|
||||
use crate::{Span, Tag, Tagged, TaggedItem, Text};
|
||||
use crate::{Span, SpannedItem, Tag, Tagged, Text};
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::fmt::Debug;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[test]
|
||||
fn test_parse_string() {
|
||||
parse_tokens(StringShape, vec![b::string("hello")], |tokens| {
|
||||
hir::Expression::string(inner_string_tag(tokens[0].tag()), tokens[0].tag())
|
||||
hir::Expression::string(inner_string_span(tokens[0].span()), tokens[0].span())
|
||||
});
|
||||
}
|
||||
|
||||
@ -28,7 +27,7 @@ fn test_parse_path() {
|
||||
let bare = tokens[2].expect_bare();
|
||||
hir::Expression::path(
|
||||
hir::Expression::it_variable(inner_var, outer_var),
|
||||
vec!["cpu".tagged(bare)],
|
||||
vec!["cpu".spanned(bare)],
|
||||
outer_var.until(bare),
|
||||
)
|
||||
},
|
||||
@ -50,7 +49,7 @@ fn test_parse_path() {
|
||||
|
||||
hir::Expression::path(
|
||||
hir::Expression::variable(inner_var, outer_var),
|
||||
vec!["amount".tagged(amount), "max ghz".tagged(outer_max_ghz)],
|
||||
vec!["amount".spanned(amount), "max ghz".spanned(outer_max_ghz)],
|
||||
outer_var.until(outer_max_ghz),
|
||||
)
|
||||
},
|
||||
@ -64,13 +63,16 @@ fn test_parse_command() {
|
||||
vec![b::bare("ls"), b::sp(), b::pattern("*.txt")],
|
||||
|tokens| {
|
||||
let bare = tokens[0].expect_bare();
|
||||
let pat = tokens[2].tag();
|
||||
let pat = tokens[2].span();
|
||||
|
||||
ClassifiedCommand::Internal(InternalCommand::new(
|
||||
"ls".to_string(),
|
||||
bare,
|
||||
Tag {
|
||||
span: bare,
|
||||
anchor: None,
|
||||
},
|
||||
hir::Call {
|
||||
head: Box::new(hir::RawExpression::Command(bare).tagged(bare)),
|
||||
head: Box::new(hir::RawExpression::Command(bare).spanned(bare)),
|
||||
positional: Some(vec![hir::Expression::pattern(pat)]),
|
||||
named: None,
|
||||
},
|
||||
@ -99,7 +101,7 @@ fn test_parse_command() {
|
||||
|
||||
hir::Expression::path(
|
||||
hir::Expression::variable(inner_var, outer_var),
|
||||
vec!["amount".tagged(amount), "max ghz".tagged(outer_max_ghz)],
|
||||
vec!["amount".spanned(amount), "max ghz".spanned(outer_max_ghz)],
|
||||
outer_var.until(outer_max_ghz),
|
||||
)
|
||||
},
|
||||
@ -112,11 +114,11 @@ fn parse_tokens<T: Eq + Debug>(
|
||||
expected: impl FnOnce(Tagged<&[TokenNode]>) -> T,
|
||||
) {
|
||||
let tokens = b::token_list(tokens);
|
||||
let (tokens, source) = b::build(test_origin(), tokens);
|
||||
let (tokens, source) = b::build(tokens);
|
||||
|
||||
ExpandContext::with_empty(&Text::from(source), |context| {
|
||||
let tokens = tokens.expect_list();
|
||||
let mut iterator = TokensIterator::all(tokens.item, *context.tag());
|
||||
let mut iterator = TokensIterator::all(tokens.item, *context.span());
|
||||
|
||||
let expr = expand_syntax(&shape, &mut iterator, &context);
|
||||
|
||||
@ -132,13 +134,6 @@ fn parse_tokens<T: Eq + Debug>(
|
||||
})
|
||||
}
|
||||
|
||||
fn test_origin() -> Uuid {
|
||||
Uuid::nil()
|
||||
}
|
||||
|
||||
fn inner_string_tag(tag: Tag) -> Tag {
|
||||
Tag {
|
||||
span: Span::new(tag.span.start() + 1, tag.span.end() - 1),
|
||||
anchor: tag.anchor,
|
||||
}
|
||||
fn inner_string_span(span: Span) -> Span {
|
||||
Span::new(span.start() + 1, span.end() - 1)
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
use crate::parser::{hir::Expression, Operator};
|
||||
use crate::prelude::*;
|
||||
use crate::Tagged;
|
||||
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -12,7 +12,7 @@ use std::fmt;
|
||||
#[get = "pub(crate)"]
|
||||
pub struct Binary {
|
||||
left: Expression,
|
||||
op: Tagged<Operator>,
|
||||
op: Spanned<Operator>,
|
||||
right: Expression,
|
||||
}
|
||||
|
||||
|
@ -6,17 +6,17 @@ use crate::parser::{
|
||||
},
|
||||
FlatShape, TokenNode, TokensIterator,
|
||||
};
|
||||
use crate::{Tag, Tagged, Text};
|
||||
use crate::{Span, Spanned, Text};
|
||||
|
||||
pub fn expand_external_tokens(
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
source: &Text,
|
||||
) -> Result<Vec<Tagged<String>>, ShellError> {
|
||||
let mut out: Vec<Tagged<String>> = vec![];
|
||||
) -> Result<Vec<Spanned<String>>, ShellError> {
|
||||
let mut out: Vec<Spanned<String>> = vec![];
|
||||
|
||||
loop {
|
||||
if let Some(tag) = expand_next_expression(token_nodes)? {
|
||||
out.push(tag.tagged_string(source));
|
||||
if let Some(span) = expand_next_expression(token_nodes)? {
|
||||
out.push(span.spanned_string(source));
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
@ -37,7 +37,7 @@ impl ColorSyntax for ExternalTokensShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Self::Info {
|
||||
loop {
|
||||
// Allow a space
|
||||
@ -55,7 +55,7 @@ impl ColorSyntax for ExternalTokensShape {
|
||||
|
||||
pub fn expand_next_expression(
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
) -> Result<Option<Tag>, ShellError> {
|
||||
) -> Result<Option<Span>, ShellError> {
|
||||
let first = token_nodes.next_non_ws();
|
||||
|
||||
let first = match first {
|
||||
@ -79,14 +79,14 @@ pub fn expand_next_expression(
|
||||
Ok(Some(first.until(last)))
|
||||
}
|
||||
|
||||
fn triage_external_head(node: &TokenNode) -> Result<Tag, ShellError> {
|
||||
fn triage_external_head(node: &TokenNode) -> Result<Span, ShellError> {
|
||||
Ok(match node {
|
||||
TokenNode::Token(token) => token.tag(),
|
||||
TokenNode::Token(token) => token.span,
|
||||
TokenNode::Call(_call) => unimplemented!("TODO: OMG"),
|
||||
TokenNode::Nodes(_nodes) => unimplemented!("TODO: OMG"),
|
||||
TokenNode::Delimited(_delimited) => unimplemented!("TODO: OMG"),
|
||||
TokenNode::Pipeline(_pipeline) => unimplemented!("TODO: OMG"),
|
||||
TokenNode::Flag(flag) => flag.tag(),
|
||||
TokenNode::Flag(flag) => flag.span,
|
||||
TokenNode::Whitespace(_whitespace) => {
|
||||
unreachable!("This function should be called after next_non_ws()")
|
||||
}
|
||||
@ -96,7 +96,7 @@ fn triage_external_head(node: &TokenNode) -> Result<Tag, ShellError> {
|
||||
|
||||
fn triage_continuation<'a, 'b>(
|
||||
nodes: &'a mut TokensIterator<'b>,
|
||||
) -> Result<Option<Tag>, ShellError> {
|
||||
) -> Result<Option<Span>, ShellError> {
|
||||
let mut peeked = nodes.peek_any();
|
||||
|
||||
let node = match peeked.node {
|
||||
@ -116,7 +116,7 @@ fn triage_continuation<'a, 'b>(
|
||||
}
|
||||
|
||||
peeked.commit();
|
||||
Ok(Some(node.tag()))
|
||||
Ok(Some(node.span()))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
@ -137,7 +137,7 @@ impl ColorSyntax for ExternalExpression {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> ExternalExpressionResult {
|
||||
let atom = match expand_atom(
|
||||
token_nodes,
|
||||
@ -146,7 +146,7 @@ impl ColorSyntax for ExternalExpression {
|
||||
ExpansionRule::permissive(),
|
||||
) {
|
||||
Err(_) => unreachable!("TODO: separate infallible expand_atom"),
|
||||
Ok(Tagged {
|
||||
Ok(Spanned {
|
||||
item: AtomicToken::Eof { .. },
|
||||
..
|
||||
}) => return ExternalExpressionResult::Eof,
|
||||
|
@ -9,7 +9,7 @@ use std::fmt;
|
||||
)]
|
||||
#[get = "pub(crate)"]
|
||||
pub struct ExternalCommand {
|
||||
pub(crate) name: Tag,
|
||||
pub(crate) name: Span,
|
||||
}
|
||||
|
||||
impl ToDebug for ExternalCommand {
|
||||
|
@ -43,9 +43,13 @@ impl NamedArguments {
|
||||
|
||||
match switch {
|
||||
None => self.named.insert(name.into(), NamedValue::AbsentSwitch),
|
||||
Some(flag) => self
|
||||
.named
|
||||
.insert(name, NamedValue::PresentSwitch(*flag.name())),
|
||||
Some(flag) => self.named.insert(
|
||||
name,
|
||||
NamedValue::PresentSwitch(Tag {
|
||||
span: *flag.name(),
|
||||
anchor: None,
|
||||
}),
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,5 @@
|
||||
use crate::parser::hir::Expression;
|
||||
use crate::prelude::*;
|
||||
use crate::Tagged;
|
||||
use derive_new::new;
|
||||
use getset::{Getters, MutGetters};
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -24,7 +23,7 @@ use std::fmt;
|
||||
pub struct Path {
|
||||
head: Expression,
|
||||
#[get_mut = "pub(crate)"]
|
||||
tail: Vec<Tagged<String>>,
|
||||
tail: Vec<Spanned<String>>,
|
||||
}
|
||||
|
||||
impl fmt::Display for Path {
|
||||
@ -40,7 +39,7 @@ impl fmt::Display for Path {
|
||||
}
|
||||
|
||||
impl Path {
|
||||
pub(crate) fn parts(self) -> (Expression, Vec<Tagged<String>>) {
|
||||
pub(crate) fn parts(self) -> (Expression, Vec<Spanned<String>>) {
|
||||
(self.head, self.tail)
|
||||
}
|
||||
}
|
||||
@ -50,7 +49,7 @@ impl ToDebug for Path {
|
||||
write!(f, "{}", self.head.debug(source))?;
|
||||
|
||||
for part in &self.tail {
|
||||
write!(f, ".{}", part.item())?;
|
||||
write!(f, ".{}", part.item)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -64,7 +64,7 @@ impl FallibleColorSyntax for SyntaxShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
match self {
|
||||
SyntaxShape::Any => {
|
||||
@ -158,7 +158,7 @@ pub struct ExpandContext<'context> {
|
||||
#[get = "pub(crate)"]
|
||||
registry: &'context CommandRegistry,
|
||||
#[get = "pub(crate)"]
|
||||
tag: Tag,
|
||||
span: Span,
|
||||
#[get = "pub(crate)"]
|
||||
source: &'context Text,
|
||||
homedir: Option<PathBuf>,
|
||||
@ -179,7 +179,7 @@ impl<'context> ExpandContext<'context> {
|
||||
|
||||
callback(ExpandContext {
|
||||
registry: ®istry,
|
||||
tag: Tag::unknown(),
|
||||
span: Span::unknown(),
|
||||
source,
|
||||
homedir: None,
|
||||
})
|
||||
@ -211,7 +211,7 @@ pub trait FallibleColorSyntax: std::fmt::Debug + Copy {
|
||||
input: &Self::Input,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<Self::Info, ShellError>;
|
||||
}
|
||||
|
||||
@ -224,7 +224,7 @@ pub trait ColorSyntax: std::fmt::Debug + Copy {
|
||||
input: &Self::Input,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Self::Info;
|
||||
}
|
||||
|
||||
@ -240,7 +240,7 @@ pub trait ColorSyntax: std::fmt::Debug + Copy {
|
||||
// input: &Self::Input,
|
||||
// token_nodes: &'b mut TokensIterator<'a>,
|
||||
// context: &ExpandContext,
|
||||
// shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
// shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
// ) -> Result<T::Info, ShellError> {
|
||||
// FallibleColorSyntax::color_syntax(self, input, token_nodes, context, shapes)
|
||||
// }
|
||||
@ -282,7 +282,7 @@ pub fn color_syntax<'a, 'b, T: ColorSyntax<Info = U, Input = ()>, U>(
|
||||
shape: &T,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> ((), U) {
|
||||
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source));
|
||||
|
||||
@ -310,7 +310,7 @@ pub fn color_fallible_syntax<'a, 'b, T: FallibleColorSyntax<Info = U, Input = ()
|
||||
shape: &T,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<U, ShellError> {
|
||||
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source));
|
||||
|
||||
@ -344,7 +344,7 @@ pub fn color_syntax_with<'a, 'b, T: ColorSyntax<Info = U, Input = I>, U, I>(
|
||||
input: &I,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> ((), U) {
|
||||
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source));
|
||||
|
||||
@ -373,7 +373,7 @@ pub fn color_fallible_syntax_with<'a, 'b, T: FallibleColorSyntax<Info = U, Input
|
||||
input: &I,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<U, ShellError> {
|
||||
trace!(target: "nu::color_syntax", "before {} :: {:?}", std::any::type_name::<T>(), debug_tokens(token_nodes, context.source));
|
||||
|
||||
@ -446,15 +446,15 @@ pub trait SkipSyntax: std::fmt::Debug + Copy {
|
||||
|
||||
enum BarePathState {
|
||||
Initial,
|
||||
Seen(Tag, Tag),
|
||||
Seen(Span, Span),
|
||||
Error(ShellError),
|
||||
}
|
||||
|
||||
impl BarePathState {
|
||||
pub fn seen(self, tag: Tag) -> BarePathState {
|
||||
pub fn seen(self, span: Span) -> BarePathState {
|
||||
match self {
|
||||
BarePathState::Initial => BarePathState::Seen(tag, tag),
|
||||
BarePathState::Seen(start, _) => BarePathState::Seen(start, tag),
|
||||
BarePathState::Initial => BarePathState::Seen(span, span),
|
||||
BarePathState::Seen(start, _) => BarePathState::Seen(start, span),
|
||||
BarePathState::Error(err) => BarePathState::Error(err),
|
||||
}
|
||||
}
|
||||
@ -467,7 +467,7 @@ impl BarePathState {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_bare(self) -> Result<Tag, ShellError> {
|
||||
pub fn into_bare(self) -> Result<Span, ShellError> {
|
||||
match self {
|
||||
BarePathState::Initial => unreachable!("into_bare in initial state"),
|
||||
BarePathState::Seen(start, end) => Ok(start.until(end)),
|
||||
@ -480,7 +480,7 @@ pub fn expand_bare<'a, 'b>(
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
_context: &ExpandContext,
|
||||
predicate: impl Fn(&TokenNode) -> bool,
|
||||
) -> Result<Tag, ShellError> {
|
||||
) -> Result<Span, ShellError> {
|
||||
let mut state = BarePathState::Initial;
|
||||
|
||||
loop {
|
||||
@ -494,7 +494,7 @@ pub fn expand_bare<'a, 'b>(
|
||||
}
|
||||
Some(node) => {
|
||||
if predicate(node) {
|
||||
state = state.seen(node.tag());
|
||||
state = state.seen(node.span());
|
||||
peeked.commit();
|
||||
} else {
|
||||
state = state.end(peeked, "word");
|
||||
@ -511,19 +511,19 @@ pub fn expand_bare<'a, 'b>(
|
||||
pub struct BarePathShape;
|
||||
|
||||
impl ExpandSyntax for BarePathShape {
|
||||
type Output = Tag;
|
||||
type Output = Span;
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Tag, ShellError> {
|
||||
) -> Result<Span, ShellError> {
|
||||
expand_bare(token_nodes, context, |token| match token {
|
||||
TokenNode::Token(Tagged {
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::Bare,
|
||||
..
|
||||
})
|
||||
| TokenNode::Token(Tagged {
|
||||
| TokenNode::Token(Spanned {
|
||||
item: RawToken::Operator(Operator::Dot),
|
||||
..
|
||||
}) => true,
|
||||
@ -545,15 +545,15 @@ impl FallibleColorSyntax for BareShape {
|
||||
input: &FlatShape,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
_context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
token_nodes.peek_any_token(|token| match token {
|
||||
// If it's a bare token, color it
|
||||
TokenNode::Token(Tagged {
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::Bare,
|
||||
tag,
|
||||
span,
|
||||
}) => {
|
||||
shapes.push((*input).tagged(tag));
|
||||
shapes.push((*input).spanned(*span));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -564,7 +564,7 @@ impl FallibleColorSyntax for BareShape {
|
||||
}
|
||||
|
||||
impl ExpandSyntax for BareShape {
|
||||
type Output = Tagged<String>;
|
||||
type Output = Spanned<String>;
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
@ -574,12 +574,12 @@ impl ExpandSyntax for BareShape {
|
||||
let peeked = token_nodes.peek_any().not_eof("word")?;
|
||||
|
||||
match peeked.node {
|
||||
TokenNode::Token(Tagged {
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::Bare,
|
||||
tag,
|
||||
span,
|
||||
}) => {
|
||||
peeked.commit();
|
||||
Ok(tag.tagged_string(context.source))
|
||||
Ok(span.spanned_string(context.source))
|
||||
}
|
||||
|
||||
other => Err(ShellError::type_error("word", other.tagged_type_name())),
|
||||
@ -608,9 +608,9 @@ impl TestSyntax for BareShape {
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum CommandSignature {
|
||||
Internal(Tagged<Arc<Command>>),
|
||||
LiteralExternal { outer: Tag, inner: Tag },
|
||||
External(Tag),
|
||||
Internal(Spanned<Arc<Command>>),
|
||||
LiteralExternal { outer: Span, inner: Span },
|
||||
External(Span),
|
||||
Expression(hir::Expression),
|
||||
}
|
||||
|
||||
@ -618,14 +618,15 @@ impl CommandSignature {
|
||||
pub fn to_expression(&self) -> hir::Expression {
|
||||
match self {
|
||||
CommandSignature::Internal(command) => {
|
||||
let tag = command.tag;
|
||||
hir::RawExpression::Command(tag).tagged(tag)
|
||||
let span = command.span;
|
||||
hir::RawExpression::Command(span).spanned(span)
|
||||
}
|
||||
CommandSignature::LiteralExternal { outer, inner } => {
|
||||
hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*inner)).tagged(outer)
|
||||
hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*inner))
|
||||
.spanned(*outer)
|
||||
}
|
||||
CommandSignature::External(tag) => {
|
||||
hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*tag)).tagged(tag)
|
||||
CommandSignature::External(span) => {
|
||||
hir::RawExpression::ExternalCommand(hir::ExternalCommand::new(*span)).spanned(*span)
|
||||
}
|
||||
CommandSignature::Expression(expr) => expr.clone(),
|
||||
}
|
||||
@ -645,7 +646,7 @@ impl FallibleColorSyntax for PipelineShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
// Make sure we're looking at a pipeline
|
||||
let Pipeline { parts, .. } = token_nodes.peek_any_token(|node| node.as_pipeline())?;
|
||||
@ -654,11 +655,11 @@ impl FallibleColorSyntax for PipelineShape {
|
||||
for part in parts {
|
||||
// If the pipeline part has a prefix `|`, emit a pipe to color
|
||||
if let Some(pipe) = part.pipe {
|
||||
shapes.push(FlatShape::Pipe.tagged(pipe));
|
||||
shapes.push(FlatShape::Pipe.spanned(pipe));
|
||||
}
|
||||
|
||||
// Create a new iterator containing the tokens in the pipeline part to color
|
||||
let mut token_nodes = TokensIterator::new(&part.tokens.item, part.tag, false);
|
||||
let mut token_nodes = TokensIterator::new(&part.tokens.item, part.span, false);
|
||||
|
||||
color_syntax(&MaybeSpaceShape, &mut token_nodes, context, shapes);
|
||||
color_syntax(&CommandShape, &mut token_nodes, context, shapes);
|
||||
@ -685,7 +686,7 @@ impl ExpandSyntax for PipelineShape {
|
||||
|
||||
let commands: Result<Vec<_>, ShellError> = parts
|
||||
.iter()
|
||||
.map(|item| classify_command(&item, context, &source))
|
||||
.map(|item| classify_command(item, context, &source))
|
||||
.collect();
|
||||
|
||||
Ok(ClassifiedPipeline {
|
||||
@ -711,7 +712,7 @@ impl FallibleColorSyntax for CommandHeadShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<CommandHeadKind, ShellError> {
|
||||
// If we don't ultimately find a token, roll back
|
||||
token_nodes.atomic(|token_nodes| {
|
||||
@ -726,7 +727,7 @@ impl FallibleColorSyntax for CommandHeadShape {
|
||||
match atom.item {
|
||||
// If the head is an explicit external command (^cmd), color it as an external command
|
||||
AtomicToken::ExternalCommand { command } => {
|
||||
shapes.push(FlatShape::ExternalCommand.tagged(command));
|
||||
shapes.push(FlatShape::ExternalCommand.spanned(command));
|
||||
Ok(CommandHeadKind::External)
|
||||
}
|
||||
|
||||
@ -736,19 +737,19 @@ impl FallibleColorSyntax for CommandHeadShape {
|
||||
|
||||
if context.registry.has(name) {
|
||||
// If the registry has the command, color it as an internal command
|
||||
shapes.push(FlatShape::InternalCommand.tagged(text));
|
||||
shapes.push(FlatShape::InternalCommand.spanned(text));
|
||||
let command = context.registry.expect_command(name);
|
||||
Ok(CommandHeadKind::Internal(command.signature()))
|
||||
} else {
|
||||
// Otherwise, color it as an external command
|
||||
shapes.push(FlatShape::ExternalCommand.tagged(text));
|
||||
shapes.push(FlatShape::ExternalCommand.spanned(text));
|
||||
Ok(CommandHeadKind::External)
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise, we're not actually looking at a command
|
||||
_ => Err(ShellError::syntax_error(
|
||||
"No command at the head".tagged(atom.tag),
|
||||
"No command at the head".tagged(atom.span),
|
||||
)),
|
||||
}
|
||||
})
|
||||
@ -764,25 +765,25 @@ impl ExpandSyntax for CommandHeadShape {
|
||||
context: &ExpandContext,
|
||||
) -> Result<CommandSignature, ShellError> {
|
||||
let node =
|
||||
parse_single_node_skipping_ws(token_nodes, "command head1", |token, token_tag, _| {
|
||||
parse_single_node_skipping_ws(token_nodes, "command head1", |token, token_span, _| {
|
||||
Ok(match token {
|
||||
RawToken::ExternalCommand(tag) => CommandSignature::LiteralExternal {
|
||||
outer: token_tag,
|
||||
inner: tag,
|
||||
RawToken::ExternalCommand(span) => CommandSignature::LiteralExternal {
|
||||
outer: token_span,
|
||||
inner: span,
|
||||
},
|
||||
RawToken::Bare => {
|
||||
let name = token_tag.slice(context.source);
|
||||
let name = token_span.slice(context.source);
|
||||
if context.registry.has(name) {
|
||||
let command = context.registry.expect_command(name);
|
||||
CommandSignature::Internal(command.tagged(token_tag))
|
||||
CommandSignature::Internal(command.spanned(token_span))
|
||||
} else {
|
||||
CommandSignature::External(token_tag)
|
||||
CommandSignature::External(token_span)
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(ShellError::type_error(
|
||||
"command head2",
|
||||
token.type_name().tagged(token_tag),
|
||||
token.type_name().tagged(token_span),
|
||||
))
|
||||
}
|
||||
})
|
||||
@ -813,7 +814,7 @@ impl ExpandSyntax for ClassifiedCommandShape {
|
||||
|
||||
match &head {
|
||||
CommandSignature::Expression(expr) => Err(ShellError::syntax_error(
|
||||
"Unexpected expression in command position".tagged(expr.tag),
|
||||
"Unexpected expression in command position".tagged(expr.span),
|
||||
)),
|
||||
|
||||
// If the command starts with `^`, treat it as an external command no matter what
|
||||
@ -831,7 +832,7 @@ impl ExpandSyntax for ClassifiedCommandShape {
|
||||
|
||||
CommandSignature::Internal(command) => {
|
||||
let tail =
|
||||
parse_command_tail(&command.signature(), &context, iterator, command.tag)?;
|
||||
parse_command_tail(&command.signature(), &context, iterator, command.span)?;
|
||||
|
||||
let (positional, named) = match tail {
|
||||
None => (None, None),
|
||||
@ -846,7 +847,10 @@ impl ExpandSyntax for ClassifiedCommandShape {
|
||||
|
||||
Ok(ClassifiedCommand::Internal(InternalCommand::new(
|
||||
command.item.name().to_string(),
|
||||
command.tag,
|
||||
Tag {
|
||||
span: command.span,
|
||||
anchor: None,
|
||||
},
|
||||
call,
|
||||
)))
|
||||
}
|
||||
@ -866,7 +870,7 @@ impl FallibleColorSyntax for InternalCommandHeadShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
_context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let peeked_head = token_nodes.peek_non_ws().not_eof("command head4");
|
||||
|
||||
@ -876,17 +880,17 @@ impl FallibleColorSyntax for InternalCommandHeadShape {
|
||||
};
|
||||
|
||||
let _expr = match peeked_head.node {
|
||||
TokenNode::Token(Tagged {
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::Bare,
|
||||
tag,
|
||||
}) => shapes.push(FlatShape::Word.tagged(tag)),
|
||||
span,
|
||||
}) => shapes.push(FlatShape::Word.spanned(*span)),
|
||||
|
||||
TokenNode::Token(Tagged {
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::String(_inner_tag),
|
||||
tag,
|
||||
}) => shapes.push(FlatShape::String.tagged(tag)),
|
||||
span,
|
||||
}) => shapes.push(FlatShape::String.spanned(*span)),
|
||||
|
||||
_node => shapes.push(FlatShape::Error.tagged(peeked_head.node.tag())),
|
||||
_node => shapes.push(FlatShape::Error.spanned(peeked_head.node.span())),
|
||||
};
|
||||
|
||||
peeked_head.commit();
|
||||
@ -905,16 +909,16 @@ impl ExpandExpression for InternalCommandHeadShape {
|
||||
|
||||
let expr = match peeked_head.node {
|
||||
TokenNode::Token(
|
||||
spanned @ Tagged {
|
||||
spanned @ Spanned {
|
||||
item: RawToken::Bare,
|
||||
..
|
||||
},
|
||||
) => spanned.map(|_| hir::RawExpression::Literal(hir::Literal::Bare)),
|
||||
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::String(inner_tag),
|
||||
tag,
|
||||
}) => hir::RawExpression::Literal(hir::Literal::String(*inner_tag)).tagged(*tag),
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::String(inner_span),
|
||||
span,
|
||||
}) => hir::RawExpression::Literal(hir::Literal::String(*inner_span)).spanned(*span),
|
||||
|
||||
node => {
|
||||
return Err(ShellError::type_error(
|
||||
@ -932,24 +936,24 @@ impl ExpandExpression for InternalCommandHeadShape {
|
||||
|
||||
pub(crate) struct SingleError<'token> {
|
||||
expected: &'static str,
|
||||
node: &'token Tagged<RawToken>,
|
||||
node: &'token Spanned<RawToken>,
|
||||
}
|
||||
|
||||
impl<'token> SingleError<'token> {
|
||||
pub(crate) fn error(&self) -> ShellError {
|
||||
ShellError::type_error(self.expected, self.node.type_name().tagged(self.node.tag))
|
||||
ShellError::type_error(self.expected, self.node.type_name().tagged(self.node.span))
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_single_node<'a, 'b, T>(
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
expected: &'static str,
|
||||
callback: impl FnOnce(RawToken, Tag, SingleError) -> Result<T, ShellError>,
|
||||
callback: impl FnOnce(RawToken, Span, SingleError) -> Result<T, ShellError>,
|
||||
) -> Result<T, ShellError> {
|
||||
token_nodes.peek_any_token(|node| match node {
|
||||
TokenNode::Token(token) => callback(
|
||||
token.item,
|
||||
token.tag(),
|
||||
token.span,
|
||||
SingleError {
|
||||
expected,
|
||||
node: token,
|
||||
@ -963,14 +967,14 @@ fn parse_single_node<'a, 'b, T>(
|
||||
fn parse_single_node_skipping_ws<'a, 'b, T>(
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
expected: &'static str,
|
||||
callback: impl FnOnce(RawToken, Tag, SingleError) -> Result<T, ShellError>,
|
||||
callback: impl FnOnce(RawToken, Span, SingleError) -> Result<T, ShellError>,
|
||||
) -> Result<T, ShellError> {
|
||||
let peeked = token_nodes.peek_non_ws().not_eof(expected)?;
|
||||
|
||||
let expr = match peeked.node {
|
||||
TokenNode::Token(token) => callback(
|
||||
token.item,
|
||||
token.tag(),
|
||||
token.span,
|
||||
SingleError {
|
||||
expected,
|
||||
node: token,
|
||||
@ -997,7 +1001,7 @@ impl FallibleColorSyntax for WhitespaceShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
_context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let peeked = token_nodes.peek_any().not_eof("whitespace");
|
||||
|
||||
@ -1007,7 +1011,7 @@ impl FallibleColorSyntax for WhitespaceShape {
|
||||
};
|
||||
|
||||
let _tag = match peeked.node {
|
||||
TokenNode::Whitespace(tag) => shapes.push(FlatShape::Whitespace.tagged(tag)),
|
||||
TokenNode::Whitespace(span) => shapes.push(FlatShape::Whitespace.spanned(*span)),
|
||||
|
||||
_other => return Ok(()),
|
||||
};
|
||||
@ -1019,7 +1023,7 @@ impl FallibleColorSyntax for WhitespaceShape {
|
||||
}
|
||||
|
||||
impl ExpandSyntax for WhitespaceShape {
|
||||
type Output = Tag;
|
||||
type Output = Span;
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
@ -1028,7 +1032,7 @@ impl ExpandSyntax for WhitespaceShape {
|
||||
) -> Result<Self::Output, ShellError> {
|
||||
let peeked = token_nodes.peek_any().not_eof("whitespace")?;
|
||||
|
||||
let tag = match peeked.node {
|
||||
let span = match peeked.node {
|
||||
TokenNode::Whitespace(tag) => *tag,
|
||||
|
||||
other => {
|
||||
@ -1041,7 +1045,7 @@ impl ExpandSyntax for WhitespaceShape {
|
||||
|
||||
peeked.commit();
|
||||
|
||||
Ok(tag)
|
||||
Ok(span)
|
||||
}
|
||||
}
|
||||
|
||||
@ -1094,7 +1098,7 @@ impl ColorSyntax for MaybeSpaceShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
_context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Self::Info {
|
||||
let peeked = token_nodes.peek_any().not_eof("whitespace");
|
||||
|
||||
@ -1103,9 +1107,9 @@ impl ColorSyntax for MaybeSpaceShape {
|
||||
Ok(peeked) => peeked,
|
||||
};
|
||||
|
||||
if let TokenNode::Whitespace(tag) = peeked.node {
|
||||
if let TokenNode::Whitespace(span) = peeked.node {
|
||||
peeked.commit();
|
||||
shapes.push(FlatShape::Whitespace.tagged(tag));
|
||||
shapes.push(FlatShape::Whitespace.spanned(*span));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1122,14 +1126,14 @@ impl FallibleColorSyntax for SpaceShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
_context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let peeked = token_nodes.peek_any().not_eof("whitespace")?;
|
||||
|
||||
match peeked.node {
|
||||
TokenNode::Whitespace(tag) => {
|
||||
TokenNode::Whitespace(span) => {
|
||||
peeked.commit();
|
||||
shapes.push(FlatShape::Whitespace.tagged(tag));
|
||||
shapes.push(FlatShape::Whitespace.spanned(*span));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -1168,26 +1172,26 @@ pub fn spaced<T: ExpandExpression>(inner: T) -> SpacedExpression<T> {
|
||||
SpacedExpression { inner }
|
||||
}
|
||||
|
||||
fn expand_variable(tag: Tag, token_tag: Tag, source: &Text) -> hir::Expression {
|
||||
if tag.slice(source) == "it" {
|
||||
hir::Expression::it_variable(tag, token_tag)
|
||||
fn expand_variable(span: Span, token_span: Span, source: &Text) -> hir::Expression {
|
||||
if span.slice(source) == "it" {
|
||||
hir::Expression::it_variable(span, token_span)
|
||||
} else {
|
||||
hir::Expression::variable(tag, token_tag)
|
||||
hir::Expression::variable(span, token_span)
|
||||
}
|
||||
}
|
||||
|
||||
fn classify_command(
|
||||
command: &Tagged<PipelineElement>,
|
||||
command: &Spanned<PipelineElement>,
|
||||
context: &ExpandContext,
|
||||
source: &Text,
|
||||
) -> Result<ClassifiedCommand, ShellError> {
|
||||
let mut iterator = TokensIterator::new(&command.tokens.item, command.tag, true);
|
||||
let mut iterator = TokensIterator::new(&command.tokens.item, command.span, true);
|
||||
|
||||
let head = CommandHeadShape.expand_syntax(&mut iterator, &context)?;
|
||||
|
||||
match &head {
|
||||
CommandSignature::Expression(_) => Err(ShellError::syntax_error(
|
||||
"Unexpected expression in command position".tagged(command.tag),
|
||||
"Unexpected expression in command position".tagged(command.span),
|
||||
)),
|
||||
|
||||
// If the command starts with `^`, treat it as an external command no matter what
|
||||
@ -1205,7 +1209,7 @@ fn classify_command(
|
||||
|
||||
CommandSignature::Internal(command) => {
|
||||
let tail =
|
||||
parse_command_tail(&command.signature(), &context, &mut iterator, command.tag)?;
|
||||
parse_command_tail(&command.signature(), &context, &mut iterator, command.span)?;
|
||||
|
||||
let (positional, named) = match tail {
|
||||
None => (None, None),
|
||||
@ -1220,7 +1224,10 @@ fn classify_command(
|
||||
|
||||
Ok(ClassifiedCommand::Internal(InternalCommand::new(
|
||||
command.name().to_string(),
|
||||
command.tag,
|
||||
Tag {
|
||||
span: command.span,
|
||||
anchor: None,
|
||||
},
|
||||
call,
|
||||
)))
|
||||
}
|
||||
@ -1239,7 +1246,7 @@ impl ColorSyntax for CommandShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) {
|
||||
let kind = color_fallible_syntax(&CommandHeadShape, token_nodes, context, shapes);
|
||||
|
||||
|
@ -11,7 +11,7 @@ use crate::parser::{
|
||||
parse::token_tree::Delimiter,
|
||||
RawToken, TokenNode,
|
||||
};
|
||||
use crate::{Tag, Tagged, TaggedItem};
|
||||
use crate::{Span, Spanned, SpannedItem};
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct AnyBlockShape;
|
||||
@ -25,7 +25,7 @@ impl FallibleColorSyntax for AnyBlockShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let block = token_nodes.peek_non_ws().not_eof("block");
|
||||
|
||||
@ -39,11 +39,11 @@ impl FallibleColorSyntax for AnyBlockShape {
|
||||
|
||||
match block {
|
||||
// If so, color it as a block
|
||||
Some((children, tags)) => {
|
||||
let mut token_nodes = TokensIterator::new(children.item, context.tag, false);
|
||||
Some((children, spans)) => {
|
||||
let mut token_nodes = TokensIterator::new(children.item, context.span, false);
|
||||
color_syntax_with(
|
||||
&DelimitedShape,
|
||||
&(Delimiter::Brace, tags.0, tags.1),
|
||||
&(Delimiter::Brace, spans.0, spans.1),
|
||||
&mut token_nodes,
|
||||
context,
|
||||
shapes,
|
||||
@ -72,11 +72,11 @@ impl ExpandExpression for AnyBlockShape {
|
||||
|
||||
match block {
|
||||
Some((block, _tags)) => {
|
||||
let mut iterator = TokensIterator::new(&block.item, context.tag, false);
|
||||
let mut iterator = TokensIterator::new(&block.item, context.span, false);
|
||||
|
||||
let exprs = expand_syntax(&ExpressionListShape, &mut iterator, context)?;
|
||||
|
||||
return Ok(hir::RawExpression::Block(exprs).tagged(block.tag));
|
||||
return Ok(hir::RawExpression::Block(exprs).spanned(block.span));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@ -97,7 +97,7 @@ impl FallibleColorSyntax for ShorthandBlock {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
// Try to find a shorthand head. If none found, fail
|
||||
color_fallible_syntax(&ShorthandPath, token_nodes, context, shapes)?;
|
||||
@ -126,10 +126,10 @@ impl ExpandExpression for ShorthandBlock {
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
let path = expand_expr(&ShorthandPath, token_nodes, context)?;
|
||||
let start = path.tag;
|
||||
let start = path.span;
|
||||
let expr = continue_expression(path, token_nodes, context)?;
|
||||
let end = expr.tag;
|
||||
let block = hir::RawExpression::Block(vec![expr]).tagged(start.until(end));
|
||||
let end = expr.span;
|
||||
let block = hir::RawExpression::Block(vec![expr]).spanned(start.until(end));
|
||||
|
||||
Ok(block)
|
||||
}
|
||||
@ -148,7 +148,7 @@ impl FallibleColorSyntax for ShorthandPath {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
token_nodes.atomic(|token_nodes| {
|
||||
let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context, shapes);
|
||||
@ -232,29 +232,29 @@ impl FallibleColorSyntax for ShorthandHeadShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
_context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
// A shorthand path must not be at EOF
|
||||
let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?;
|
||||
|
||||
match peeked.node {
|
||||
// If the head of a shorthand path is a bare token, it expands to `$it.bare`
|
||||
TokenNode::Token(Tagged {
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::Bare,
|
||||
tag,
|
||||
span,
|
||||
}) => {
|
||||
peeked.commit();
|
||||
shapes.push(FlatShape::BareMember.tagged(tag));
|
||||
shapes.push(FlatShape::BareMember.spanned(*span));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// If the head of a shorthand path is a string, it expands to `$it."some string"`
|
||||
TokenNode::Token(Tagged {
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::String(_),
|
||||
tag: outer,
|
||||
span: outer,
|
||||
}) => {
|
||||
peeked.commit();
|
||||
shapes.push(FlatShape::StringMember.tagged(outer));
|
||||
shapes.push(FlatShape::StringMember.spanned(*outer));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -277,40 +277,40 @@ impl ExpandExpression for ShorthandHeadShape {
|
||||
|
||||
match peeked.node {
|
||||
// If the head of a shorthand path is a bare token, it expands to `$it.bare`
|
||||
TokenNode::Token(Tagged {
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::Bare,
|
||||
tag,
|
||||
span,
|
||||
}) => {
|
||||
// Commit the peeked token
|
||||
peeked.commit();
|
||||
|
||||
// Synthesize an `$it` expression
|
||||
let it = synthetic_it(token_nodes.anchor());
|
||||
let it = synthetic_it();
|
||||
|
||||
// Make a path out of `$it` and the bare token as a member
|
||||
Ok(hir::Expression::path(
|
||||
it,
|
||||
vec![tag.tagged_string(context.source)],
|
||||
tag,
|
||||
vec![span.spanned_string(context.source)],
|
||||
*span,
|
||||
))
|
||||
}
|
||||
|
||||
// If the head of a shorthand path is a string, it expands to `$it."some string"`
|
||||
TokenNode::Token(Tagged {
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::String(inner),
|
||||
tag: outer,
|
||||
span: outer,
|
||||
}) => {
|
||||
// Commit the peeked token
|
||||
peeked.commit();
|
||||
|
||||
// Synthesize an `$it` expression
|
||||
let it = synthetic_it(token_nodes.anchor());
|
||||
let it = synthetic_it();
|
||||
|
||||
// Make a path out of `$it` and the bare token as a member
|
||||
Ok(hir::Expression::path(
|
||||
it,
|
||||
vec![inner.string(context.source).tagged(outer)],
|
||||
outer,
|
||||
vec![inner.string(context.source).spanned(*outer)],
|
||||
*outer,
|
||||
))
|
||||
}
|
||||
|
||||
@ -325,6 +325,6 @@ impl ExpandExpression for ShorthandHeadShape {
|
||||
}
|
||||
}
|
||||
|
||||
fn synthetic_it(origin: uuid::Uuid) -> hir::Expression {
|
||||
hir::Expression::it_variable(Tag::unknown_span(origin), Tag::unknown_span(origin))
|
||||
fn synthetic_it() -> hir::Expression {
|
||||
hir::Expression::it_variable(Span::unknown(), Span::unknown())
|
||||
}
|
||||
|
@ -46,7 +46,7 @@ impl FallibleColorSyntax for AnyExpressionShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
// Look for an expression at the cursor
|
||||
color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context, shapes)?;
|
||||
@ -94,7 +94,7 @@ pub(crate) fn continue_expression(
|
||||
pub(crate) fn continue_coloring_expression(
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
// if there's not even one expression continuation, fail
|
||||
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes)?;
|
||||
@ -131,20 +131,23 @@ impl ExpandExpression for AnyExpressionStartShape {
|
||||
return Ok(hir::Expression::size(
|
||||
number.to_number(context.source),
|
||||
unit.item,
|
||||
atom.tag,
|
||||
Tag {
|
||||
span: atom.span,
|
||||
anchor: None,
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
AtomicToken::SquareDelimited { nodes, .. } => {
|
||||
expand_delimited_square(&nodes, atom.tag, context)
|
||||
expand_delimited_square(&nodes, atom.span.into(), context)
|
||||
}
|
||||
|
||||
AtomicToken::Word { .. } | AtomicToken::Dot { .. } => {
|
||||
let end = expand_syntax(&BareTailShape, token_nodes, context)?;
|
||||
Ok(hir::Expression::bare(atom.tag.until_option(end)))
|
||||
Ok(hir::Expression::bare(atom.span.until_option(end)))
|
||||
}
|
||||
|
||||
other => return other.tagged(atom.tag).into_hir(context, "expression"),
|
||||
other => return other.spanned(atom.span).into_hir(context, "expression"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -158,7 +161,7 @@ impl FallibleColorSyntax for AnyExpressionStartShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let atom = token_nodes.spanned(|token_nodes| {
|
||||
expand_atom(
|
||||
@ -170,15 +173,15 @@ impl FallibleColorSyntax for AnyExpressionStartShape {
|
||||
});
|
||||
|
||||
let atom = match atom {
|
||||
Tagged {
|
||||
Spanned {
|
||||
item: Err(_err),
|
||||
tag,
|
||||
span,
|
||||
} => {
|
||||
shapes.push(FlatShape::Error.tagged(tag));
|
||||
shapes.push(FlatShape::Error.spanned(span));
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
Tagged {
|
||||
Spanned {
|
||||
item: Ok(value), ..
|
||||
} => value,
|
||||
};
|
||||
@ -186,18 +189,18 @@ impl FallibleColorSyntax for AnyExpressionStartShape {
|
||||
match atom.item {
|
||||
AtomicToken::Size { number, unit } => shapes.push(
|
||||
FlatShape::Size {
|
||||
number: number.tag,
|
||||
unit: unit.tag,
|
||||
number: number.span.into(),
|
||||
unit: unit.span.into(),
|
||||
}
|
||||
.tagged(atom.tag),
|
||||
.spanned(atom.span),
|
||||
),
|
||||
|
||||
AtomicToken::SquareDelimited { nodes, tags } => {
|
||||
color_delimited_square(tags, &nodes, atom.tag, context, shapes)
|
||||
AtomicToken::SquareDelimited { nodes, spans } => {
|
||||
color_delimited_square(spans, &nodes, atom.span.into(), context, shapes)
|
||||
}
|
||||
|
||||
AtomicToken::Word { .. } | AtomicToken::Dot { .. } => {
|
||||
shapes.push(FlatShape::Word.tagged(atom.tag));
|
||||
shapes.push(FlatShape::Word.spanned(atom.span));
|
||||
}
|
||||
|
||||
_ => atom.color_tokens(shapes),
|
||||
@ -219,7 +222,7 @@ impl FallibleColorSyntax for BareTailShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let len = shapes.len();
|
||||
|
||||
@ -267,19 +270,19 @@ impl FallibleColorSyntax for BareTailShape {
|
||||
}
|
||||
|
||||
impl ExpandSyntax for BareTailShape {
|
||||
type Output = Option<Tag>;
|
||||
type Output = Option<Span>;
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Option<Tag>, ShellError> {
|
||||
let mut end: Option<Tag> = None;
|
||||
) -> Result<Option<Span>, ShellError> {
|
||||
let mut end: Option<Span> = None;
|
||||
|
||||
loop {
|
||||
match expand_syntax(&BareShape, token_nodes, context) {
|
||||
Ok(bare) => {
|
||||
end = Some(bare.tag);
|
||||
end = Some(bare.span);
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -9,82 +9,83 @@ use crate::parser::{
|
||||
DelimitedNode, Delimiter, FlatShape, RawToken, TokenNode, Unit,
|
||||
};
|
||||
use crate::prelude::*;
|
||||
use crate::{Span, Spanned};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum AtomicToken<'tokens> {
|
||||
Eof {
|
||||
tag: Tag,
|
||||
span: Span,
|
||||
},
|
||||
Error {
|
||||
error: Tagged<ShellError>,
|
||||
error: Spanned<ShellError>,
|
||||
},
|
||||
Number {
|
||||
number: RawNumber,
|
||||
},
|
||||
Size {
|
||||
number: Tagged<RawNumber>,
|
||||
unit: Tagged<Unit>,
|
||||
number: Spanned<RawNumber>,
|
||||
unit: Spanned<Unit>,
|
||||
},
|
||||
String {
|
||||
body: Tag,
|
||||
body: Span,
|
||||
},
|
||||
ItVariable {
|
||||
name: Tag,
|
||||
name: Span,
|
||||
},
|
||||
Variable {
|
||||
name: Tag,
|
||||
name: Span,
|
||||
},
|
||||
ExternalCommand {
|
||||
command: Tag,
|
||||
command: Span,
|
||||
},
|
||||
ExternalWord {
|
||||
text: Tag,
|
||||
text: Span,
|
||||
},
|
||||
GlobPattern {
|
||||
pattern: Tag,
|
||||
pattern: Span,
|
||||
},
|
||||
FilePath {
|
||||
path: Tag,
|
||||
path: Span,
|
||||
},
|
||||
Word {
|
||||
text: Tag,
|
||||
text: Span,
|
||||
},
|
||||
SquareDelimited {
|
||||
tags: (Tag, Tag),
|
||||
spans: (Span, Span),
|
||||
nodes: &'tokens Vec<TokenNode>,
|
||||
},
|
||||
ParenDelimited {
|
||||
tags: (Tag, Tag),
|
||||
span: (Span, Span),
|
||||
nodes: &'tokens Vec<TokenNode>,
|
||||
},
|
||||
BraceDelimited {
|
||||
tags: (Tag, Tag),
|
||||
spans: (Span, Span),
|
||||
nodes: &'tokens Vec<TokenNode>,
|
||||
},
|
||||
Pipeline {
|
||||
pipe: Option<Tag>,
|
||||
elements: Tagged<&'tokens Vec<TokenNode>>,
|
||||
pipe: Option<Span>,
|
||||
elements: Spanned<&'tokens Vec<TokenNode>>,
|
||||
},
|
||||
ShorthandFlag {
|
||||
name: Tag,
|
||||
name: Span,
|
||||
},
|
||||
LonghandFlag {
|
||||
name: Tag,
|
||||
name: Span,
|
||||
},
|
||||
Dot {
|
||||
text: Tag,
|
||||
text: Span,
|
||||
},
|
||||
Operator {
|
||||
text: Tag,
|
||||
text: Span,
|
||||
},
|
||||
Whitespace {
|
||||
text: Tag,
|
||||
text: Span,
|
||||
},
|
||||
}
|
||||
|
||||
pub type TaggedAtomicToken<'tokens> = Tagged<AtomicToken<'tokens>>;
|
||||
pub type SpannedAtomicToken<'tokens> = Spanned<AtomicToken<'tokens>>;
|
||||
|
||||
impl<'tokens> TaggedAtomicToken<'tokens> {
|
||||
impl<'tokens> SpannedAtomicToken<'tokens> {
|
||||
pub fn into_hir(
|
||||
&self,
|
||||
context: &ExpandContext,
|
||||
@ -94,55 +95,55 @@ impl<'tokens> TaggedAtomicToken<'tokens> {
|
||||
AtomicToken::Eof { .. } => {
|
||||
return Err(ShellError::type_error(
|
||||
expected,
|
||||
"eof atomic token".tagged(self.tag),
|
||||
"eof atomic token".tagged(self.span),
|
||||
))
|
||||
}
|
||||
AtomicToken::Error { .. } => {
|
||||
return Err(ShellError::type_error(
|
||||
expected,
|
||||
"eof atomic token".tagged(self.tag),
|
||||
"eof atomic token".tagged(self.span),
|
||||
))
|
||||
}
|
||||
AtomicToken::Operator { .. } => {
|
||||
return Err(ShellError::type_error(
|
||||
expected,
|
||||
"operator".tagged(self.tag),
|
||||
"operator".tagged(self.span),
|
||||
))
|
||||
}
|
||||
AtomicToken::ShorthandFlag { .. } => {
|
||||
return Err(ShellError::type_error(
|
||||
expected,
|
||||
"shorthand flag".tagged(self.tag),
|
||||
"shorthand flag".tagged(self.span),
|
||||
))
|
||||
}
|
||||
AtomicToken::LonghandFlag { .. } => {
|
||||
return Err(ShellError::type_error(expected, "flag".tagged(self.tag)))
|
||||
return Err(ShellError::type_error(expected, "flag".tagged(self.span)))
|
||||
}
|
||||
AtomicToken::Whitespace { .. } => {
|
||||
return Err(ShellError::unimplemented("whitespace in AtomicToken"))
|
||||
}
|
||||
AtomicToken::Dot { .. } => {
|
||||
return Err(ShellError::type_error(expected, "dot".tagged(self.tag)))
|
||||
return Err(ShellError::type_error(expected, "dot".tagged(self.span)))
|
||||
}
|
||||
AtomicToken::Number { number } => {
|
||||
Expression::number(number.to_number(context.source), self.tag)
|
||||
Expression::number(number.to_number(context.source), self.span)
|
||||
}
|
||||
AtomicToken::FilePath { path } => Expression::file_path(
|
||||
expand_file_path(path.slice(context.source), context),
|
||||
self.tag,
|
||||
self.span,
|
||||
),
|
||||
AtomicToken::Size { number, unit } => {
|
||||
Expression::size(number.to_number(context.source), **unit, self.tag)
|
||||
Expression::size(number.to_number(context.source), **unit, self.span)
|
||||
}
|
||||
AtomicToken::String { body } => Expression::string(body, self.tag),
|
||||
AtomicToken::ItVariable { name } => Expression::it_variable(name, self.tag),
|
||||
AtomicToken::Variable { name } => Expression::variable(name, self.tag),
|
||||
AtomicToken::String { body } => Expression::string(*body, self.span),
|
||||
AtomicToken::ItVariable { name } => Expression::it_variable(*name, self.span),
|
||||
AtomicToken::Variable { name } => Expression::variable(*name, self.span),
|
||||
AtomicToken::ExternalCommand { command } => {
|
||||
Expression::external_command(command, self.tag)
|
||||
Expression::external_command(*command, self.span)
|
||||
}
|
||||
AtomicToken::ExternalWord { text } => Expression::string(text, self.tag),
|
||||
AtomicToken::GlobPattern { pattern } => Expression::pattern(pattern),
|
||||
AtomicToken::Word { text } => Expression::string(text, text),
|
||||
AtomicToken::ExternalWord { text } => Expression::string(*text, self.span),
|
||||
AtomicToken::GlobPattern { pattern } => Expression::pattern(*pattern),
|
||||
AtomicToken::Word { text } => Expression::string(*text, *text),
|
||||
AtomicToken::SquareDelimited { .. } => unimplemented!("into_hir"),
|
||||
AtomicToken::ParenDelimited { .. } => unimplemented!("into_hir"),
|
||||
AtomicToken::BraceDelimited { .. } => unimplemented!("into_hir"),
|
||||
@ -150,6 +151,33 @@ impl<'tokens> TaggedAtomicToken<'tokens> {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn spanned_type_name(&self) -> Spanned<&'static str> {
|
||||
match &self.item {
|
||||
AtomicToken::Eof { .. } => "eof",
|
||||
AtomicToken::Error { .. } => "error",
|
||||
AtomicToken::Operator { .. } => "operator",
|
||||
AtomicToken::ShorthandFlag { .. } => "shorthand flag",
|
||||
AtomicToken::LonghandFlag { .. } => "flag",
|
||||
AtomicToken::Whitespace { .. } => "whitespace",
|
||||
AtomicToken::Dot { .. } => "dot",
|
||||
AtomicToken::Number { .. } => "number",
|
||||
AtomicToken::FilePath { .. } => "file path",
|
||||
AtomicToken::Size { .. } => "size",
|
||||
AtomicToken::String { .. } => "string",
|
||||
AtomicToken::ItVariable { .. } => "$it",
|
||||
AtomicToken::Variable { .. } => "variable",
|
||||
AtomicToken::ExternalCommand { .. } => "external command",
|
||||
AtomicToken::ExternalWord { .. } => "external word",
|
||||
AtomicToken::GlobPattern { .. } => "file pattern",
|
||||
AtomicToken::Word { .. } => "word",
|
||||
AtomicToken::SquareDelimited { .. } => "array literal",
|
||||
AtomicToken::ParenDelimited { .. } => "parenthesized expression",
|
||||
AtomicToken::BraceDelimited { .. } => "block",
|
||||
AtomicToken::Pipeline { .. } => "pipeline",
|
||||
}
|
||||
.spanned(self.span)
|
||||
}
|
||||
|
||||
pub fn tagged_type_name(&self) -> Tagged<&'static str> {
|
||||
match &self.item {
|
||||
AtomicToken::Eof { .. } => "eof",
|
||||
@ -174,64 +202,64 @@ impl<'tokens> TaggedAtomicToken<'tokens> {
|
||||
AtomicToken::BraceDelimited { .. } => "block",
|
||||
AtomicToken::Pipeline { .. } => "pipeline",
|
||||
}
|
||||
.tagged(self.tag)
|
||||
.tagged(self.span)
|
||||
}
|
||||
|
||||
pub(crate) fn color_tokens(&self, shapes: &mut Vec<Tagged<FlatShape>>) {
|
||||
pub(crate) fn color_tokens(&self, shapes: &mut Vec<Spanned<FlatShape>>) {
|
||||
match &self.item {
|
||||
AtomicToken::Eof { .. } => {}
|
||||
AtomicToken::Error { .. } => return shapes.push(FlatShape::Error.tagged(self.tag)),
|
||||
AtomicToken::Error { .. } => return shapes.push(FlatShape::Error.spanned(self.span)),
|
||||
AtomicToken::Operator { .. } => {
|
||||
return shapes.push(FlatShape::Operator.tagged(self.tag));
|
||||
return shapes.push(FlatShape::Operator.spanned(self.span));
|
||||
}
|
||||
AtomicToken::ShorthandFlag { .. } => {
|
||||
return shapes.push(FlatShape::ShorthandFlag.tagged(self.tag));
|
||||
return shapes.push(FlatShape::ShorthandFlag.spanned(self.span));
|
||||
}
|
||||
AtomicToken::LonghandFlag { .. } => {
|
||||
return shapes.push(FlatShape::Flag.tagged(self.tag));
|
||||
return shapes.push(FlatShape::Flag.spanned(self.span));
|
||||
}
|
||||
AtomicToken::Whitespace { .. } => {
|
||||
return shapes.push(FlatShape::Whitespace.tagged(self.tag));
|
||||
return shapes.push(FlatShape::Whitespace.spanned(self.span));
|
||||
}
|
||||
AtomicToken::FilePath { .. } => return shapes.push(FlatShape::Path.tagged(self.tag)),
|
||||
AtomicToken::Dot { .. } => return shapes.push(FlatShape::Dot.tagged(self.tag)),
|
||||
AtomicToken::FilePath { .. } => return shapes.push(FlatShape::Path.spanned(self.span)),
|
||||
AtomicToken::Dot { .. } => return shapes.push(FlatShape::Dot.spanned(self.span)),
|
||||
AtomicToken::Number {
|
||||
number: RawNumber::Decimal(_),
|
||||
} => {
|
||||
return shapes.push(FlatShape::Decimal.tagged(self.tag));
|
||||
return shapes.push(FlatShape::Decimal.spanned(self.span));
|
||||
}
|
||||
AtomicToken::Number {
|
||||
number: RawNumber::Int(_),
|
||||
} => {
|
||||
return shapes.push(FlatShape::Int.tagged(self.tag));
|
||||
return shapes.push(FlatShape::Int.spanned(self.span));
|
||||
}
|
||||
AtomicToken::Size { number, unit } => {
|
||||
return shapes.push(
|
||||
FlatShape::Size {
|
||||
number: number.tag,
|
||||
unit: unit.tag,
|
||||
number: number.span,
|
||||
unit: unit.span,
|
||||
}
|
||||
.tagged(self.tag),
|
||||
.spanned(self.span),
|
||||
);
|
||||
}
|
||||
AtomicToken::String { .. } => return shapes.push(FlatShape::String.tagged(self.tag)),
|
||||
AtomicToken::String { .. } => return shapes.push(FlatShape::String.spanned(self.span)),
|
||||
AtomicToken::ItVariable { .. } => {
|
||||
return shapes.push(FlatShape::ItVariable.tagged(self.tag))
|
||||
return shapes.push(FlatShape::ItVariable.spanned(self.span))
|
||||
}
|
||||
AtomicToken::Variable { .. } => {
|
||||
return shapes.push(FlatShape::Variable.tagged(self.tag))
|
||||
return shapes.push(FlatShape::Variable.spanned(self.span))
|
||||
}
|
||||
AtomicToken::ExternalCommand { .. } => {
|
||||
return shapes.push(FlatShape::ExternalCommand.tagged(self.tag));
|
||||
return shapes.push(FlatShape::ExternalCommand.spanned(self.span));
|
||||
}
|
||||
AtomicToken::ExternalWord { .. } => {
|
||||
return shapes.push(FlatShape::ExternalWord.tagged(self.tag))
|
||||
return shapes.push(FlatShape::ExternalWord.spanned(self.span))
|
||||
}
|
||||
AtomicToken::GlobPattern { .. } => {
|
||||
return shapes.push(FlatShape::GlobPattern.tagged(self.tag))
|
||||
return shapes.push(FlatShape::GlobPattern.spanned(self.span))
|
||||
}
|
||||
AtomicToken::Word { .. } => return shapes.push(FlatShape::Word.tagged(self.tag)),
|
||||
_ => return shapes.push(FlatShape::Error.tagged(self.tag)),
|
||||
AtomicToken::Word { .. } => return shapes.push(FlatShape::Word.spanned(self.span)),
|
||||
_ => return shapes.push(FlatShape::Error.spanned(self.span)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -350,14 +378,14 @@ pub fn expand_atom<'me, 'content>(
|
||||
expected: &'static str,
|
||||
context: &ExpandContext,
|
||||
rule: ExpansionRule,
|
||||
) -> Result<TaggedAtomicToken<'content>, ShellError> {
|
||||
) -> Result<SpannedAtomicToken<'content>, ShellError> {
|
||||
if token_nodes.at_end() {
|
||||
match rule.allow_eof {
|
||||
true => {
|
||||
return Ok(AtomicToken::Eof {
|
||||
tag: Tag::unknown(),
|
||||
span: Span::unknown(),
|
||||
}
|
||||
.tagged_unknown())
|
||||
.spanned(Span::unknown()))
|
||||
}
|
||||
false => return Err(ShellError::unexpected_eof("anything", Tag::unknown())),
|
||||
}
|
||||
@ -376,10 +404,10 @@ pub fn expand_atom<'me, 'content>(
|
||||
Err(_) => {}
|
||||
|
||||
// But if it was a valid unit, we're done here
|
||||
Ok(Tagged {
|
||||
Ok(Spanned {
|
||||
item: (number, unit),
|
||||
tag,
|
||||
}) => return Ok(AtomicToken::Size { number, unit }.tagged(tag)),
|
||||
span,
|
||||
}) => return Ok(AtomicToken::Size { number, unit }.spanned(span)),
|
||||
},
|
||||
}
|
||||
|
||||
@ -388,7 +416,7 @@ pub fn expand_atom<'me, 'content>(
|
||||
match expand_syntax(&BarePathShape, token_nodes, context) {
|
||||
// If we didn't find a bare path
|
||||
Err(_) => {}
|
||||
Ok(tag) => {
|
||||
Ok(span) => {
|
||||
let next = token_nodes.peek_any();
|
||||
|
||||
match next.node {
|
||||
@ -397,7 +425,7 @@ pub fn expand_atom<'me, 'content>(
|
||||
// word, and we should try to parse it as a glob next
|
||||
}
|
||||
|
||||
_ => return Ok(AtomicToken::Word { text: tag }.tagged(tag)),
|
||||
_ => return Ok(AtomicToken::Word { text: span }.spanned(span)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -407,7 +435,7 @@ pub fn expand_atom<'me, 'content>(
|
||||
match expand_syntax(&BarePatternShape, token_nodes, context) {
|
||||
// If we didn't find a bare path
|
||||
Err(_) => {}
|
||||
Ok(tag) => return Ok(AtomicToken::GlobPattern { pattern: tag }.tagged(tag)),
|
||||
Ok(span) => return Ok(AtomicToken::GlobPattern { pattern: span }.spanned(span)),
|
||||
}
|
||||
|
||||
// The next token corresponds to at most one atomic token
|
||||
@ -427,80 +455,84 @@ pub fn expand_atom<'me, 'content>(
|
||||
return Ok(AtomicToken::Error {
|
||||
error: error.clone(),
|
||||
}
|
||||
.tagged(error.tag));
|
||||
.spanned(error.span));
|
||||
}
|
||||
|
||||
// [ ... ]
|
||||
TokenNode::Delimited(Tagged {
|
||||
TokenNode::Delimited(Spanned {
|
||||
item:
|
||||
DelimitedNode {
|
||||
delimiter: Delimiter::Square,
|
||||
tags,
|
||||
spans,
|
||||
children,
|
||||
},
|
||||
tag,
|
||||
span,
|
||||
}) => {
|
||||
peeked.commit();
|
||||
let span = *span;
|
||||
return Ok(AtomicToken::SquareDelimited {
|
||||
nodes: children,
|
||||
tags: *tags,
|
||||
spans: *spans,
|
||||
}
|
||||
.tagged(tag));
|
||||
.spanned(span));
|
||||
}
|
||||
|
||||
TokenNode::Flag(Tagged {
|
||||
TokenNode::Flag(Spanned {
|
||||
item:
|
||||
Flag {
|
||||
kind: FlagKind::Shorthand,
|
||||
name,
|
||||
},
|
||||
tag,
|
||||
span,
|
||||
}) => {
|
||||
peeked.commit();
|
||||
return Ok(AtomicToken::ShorthandFlag { name: *name }.tagged(tag));
|
||||
return Ok(AtomicToken::ShorthandFlag { name: *name }.spanned(*span));
|
||||
}
|
||||
|
||||
TokenNode::Flag(Tagged {
|
||||
TokenNode::Flag(Spanned {
|
||||
item:
|
||||
Flag {
|
||||
kind: FlagKind::Longhand,
|
||||
name,
|
||||
},
|
||||
tag,
|
||||
span,
|
||||
}) => {
|
||||
peeked.commit();
|
||||
return Ok(AtomicToken::ShorthandFlag { name: *name }.tagged(tag));
|
||||
return Ok(AtomicToken::ShorthandFlag { name: *name }.spanned(*span));
|
||||
}
|
||||
|
||||
// If we see whitespace, process the whitespace according to the whitespace
|
||||
// handling rules
|
||||
TokenNode::Whitespace(tag) => match rule.whitespace {
|
||||
TokenNode::Whitespace(span) => match rule.whitespace {
|
||||
// if whitespace is allowed, return a whitespace token
|
||||
WhitespaceHandling::AllowWhitespace => {
|
||||
peeked.commit();
|
||||
return Ok(AtomicToken::Whitespace { text: *tag }.tagged(tag));
|
||||
return Ok(AtomicToken::Whitespace { text: *span }.spanned(*span));
|
||||
}
|
||||
|
||||
// if whitespace is disallowed, return an error
|
||||
WhitespaceHandling::RejectWhitespace => {
|
||||
return Err(ShellError::syntax_error(
|
||||
"Unexpected whitespace".tagged(tag),
|
||||
))
|
||||
return Err(ShellError::syntax_error("Unexpected whitespace".tagged(
|
||||
Tag {
|
||||
span: *span,
|
||||
anchor: None,
|
||||
},
|
||||
)))
|
||||
}
|
||||
},
|
||||
|
||||
other => {
|
||||
let tag = peeked.node.tag();
|
||||
let span = peeked.node.span();
|
||||
|
||||
peeked.commit();
|
||||
return Ok(AtomicToken::Error {
|
||||
error: ShellError::type_error("token", other.tagged_type_name()).tagged(tag),
|
||||
error: ShellError::type_error("token", other.tagged_type_name()).spanned(span),
|
||||
}
|
||||
.tagged(tag));
|
||||
.spanned(span));
|
||||
}
|
||||
}
|
||||
|
||||
parse_single_node(token_nodes, expected, |token, token_tag, err| {
|
||||
parse_single_node(token_nodes, expected, |token, token_span, err| {
|
||||
Ok(match token {
|
||||
// First, the error cases. Each error case corresponds to a expansion rule
|
||||
// flag that can be used to allow the case
|
||||
@ -511,31 +543,38 @@ pub fn expand_atom<'me, 'content>(
|
||||
RawToken::ExternalCommand(_) if !rule.allow_external_command => {
|
||||
return Err(ShellError::type_error(
|
||||
expected,
|
||||
token.type_name().tagged(token_tag),
|
||||
token.type_name().tagged(Tag {
|
||||
span: token_span,
|
||||
anchor: None,
|
||||
}),
|
||||
))
|
||||
}
|
||||
// rule.allow_external_word
|
||||
RawToken::ExternalWord if !rule.allow_external_word => {
|
||||
return Err(ShellError::invalid_external_word(token_tag))
|
||||
return Err(ShellError::invalid_external_word(Tag {
|
||||
span: token_span,
|
||||
anchor: None,
|
||||
}))
|
||||
}
|
||||
|
||||
RawToken::Number(number) => AtomicToken::Number { number }.tagged(token_tag),
|
||||
RawToken::Operator(_) => AtomicToken::Operator { text: token_tag }.tagged(token_tag),
|
||||
RawToken::String(body) => AtomicToken::String { body }.tagged(token_tag),
|
||||
RawToken::Number(number) => AtomicToken::Number { number }.spanned(token_span),
|
||||
RawToken::Operator(_) => AtomicToken::Operator { text: token_span }.spanned(token_span),
|
||||
RawToken::String(body) => AtomicToken::String { body }.spanned(token_span),
|
||||
RawToken::Variable(name) if name.slice(context.source) == "it" => {
|
||||
AtomicToken::ItVariable { name }.tagged(token_tag)
|
||||
AtomicToken::ItVariable { name }.spanned(token_span)
|
||||
}
|
||||
RawToken::Variable(name) => AtomicToken::Variable { name }.tagged(token_tag),
|
||||
RawToken::Variable(name) => AtomicToken::Variable { name }.spanned(token_span),
|
||||
RawToken::ExternalCommand(command) => {
|
||||
AtomicToken::ExternalCommand { command }.tagged(token_tag)
|
||||
AtomicToken::ExternalCommand { command }.spanned(token_span)
|
||||
}
|
||||
RawToken::ExternalWord => {
|
||||
AtomicToken::ExternalWord { text: token_tag }.tagged(token_tag)
|
||||
AtomicToken::ExternalWord { text: token_span }.spanned(token_span)
|
||||
}
|
||||
RawToken::GlobPattern => {
|
||||
AtomicToken::GlobPattern { pattern: token_tag }.tagged(token_tag)
|
||||
RawToken::GlobPattern => AtomicToken::GlobPattern {
|
||||
pattern: token_span,
|
||||
}
|
||||
RawToken::Bare => AtomicToken::Word { text: token_tag }.tagged(token_tag),
|
||||
.spanned(token_span),
|
||||
RawToken::Bare => AtomicToken::Word { text: token_span }.spanned(token_span),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
@ -6,27 +6,27 @@ use crate::prelude::*;
|
||||
|
||||
pub fn expand_delimited_square(
|
||||
children: &Vec<TokenNode>,
|
||||
tag: Tag,
|
||||
span: Span,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
let mut tokens = TokensIterator::new(&children, tag, false);
|
||||
let mut tokens = TokensIterator::new(&children, span, false);
|
||||
|
||||
let list = expand_syntax(&ExpressionListShape, &mut tokens, context);
|
||||
|
||||
Ok(hir::Expression::list(list?, tag))
|
||||
Ok(hir::Expression::list(list?, Tag { span, anchor: None }))
|
||||
}
|
||||
|
||||
pub fn color_delimited_square(
|
||||
(open, close): (Tag, Tag),
|
||||
(open, close): (Span, Span),
|
||||
children: &Vec<TokenNode>,
|
||||
tag: Tag,
|
||||
span: Span,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) {
|
||||
shapes.push(FlatShape::OpenDelimiter(Delimiter::Square).tagged(open));
|
||||
let mut tokens = TokensIterator::new(&children, tag, false);
|
||||
shapes.push(FlatShape::OpenDelimiter(Delimiter::Square).spanned(open));
|
||||
let mut tokens = TokensIterator::new(&children, span, false);
|
||||
let _list = color_syntax(&ExpressionListShape, &mut tokens, context, shapes);
|
||||
shapes.push(FlatShape::CloseDelimiter(Delimiter::Square).tagged(close));
|
||||
shapes.push(FlatShape::CloseDelimiter(Delimiter::Square).spanned(close));
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
@ -34,16 +34,16 @@ pub struct DelimitedShape;
|
||||
|
||||
impl ColorSyntax for DelimitedShape {
|
||||
type Info = ();
|
||||
type Input = (Delimiter, Tag, Tag);
|
||||
type Input = (Delimiter, Span, Span);
|
||||
fn color_syntax<'a, 'b>(
|
||||
&self,
|
||||
(delimiter, open, close): &(Delimiter, Tag, Tag),
|
||||
(delimiter, open, close): &(Delimiter, Span, Span),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Self::Info {
|
||||
shapes.push(FlatShape::OpenDelimiter(*delimiter).tagged(open));
|
||||
shapes.push(FlatShape::OpenDelimiter(*delimiter).spanned(*open));
|
||||
color_syntax(&ExpressionListShape, token_nodes, context, shapes);
|
||||
shapes.push(FlatShape::CloseDelimiter(*delimiter).tagged(close));
|
||||
shapes.push(FlatShape::CloseDelimiter(*delimiter).spanned(*close));
|
||||
}
|
||||
}
|
||||
|
@ -17,7 +17,7 @@ impl FallibleColorSyntax for FilePathShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let atom = expand_atom(
|
||||
token_nodes,
|
||||
@ -36,7 +36,7 @@ impl FallibleColorSyntax for FilePathShape {
|
||||
| AtomicToken::String { .. }
|
||||
| AtomicToken::Number { .. }
|
||||
| AtomicToken::Size { .. } => {
|
||||
shapes.push(FlatShape::Path.tagged(atom.tag));
|
||||
shapes.push(FlatShape::Path.spanned(atom.span));
|
||||
}
|
||||
|
||||
_ => atom.color_tokens(shapes),
|
||||
@ -57,12 +57,12 @@ impl ExpandExpression for FilePathShape {
|
||||
match atom.item {
|
||||
AtomicToken::Word { text: body } | AtomicToken::String { body } => {
|
||||
let path = expand_file_path(body.slice(context.source), context);
|
||||
return Ok(hir::Expression::file_path(path, atom.tag));
|
||||
return Ok(hir::Expression::file_path(path, atom.span));
|
||||
}
|
||||
|
||||
AtomicToken::Number { .. } | AtomicToken::Size { .. } => {
|
||||
let path = atom.tag.slice(context.source);
|
||||
return Ok(hir::Expression::file_path(path, atom.tag));
|
||||
let path = atom.span.slice(context.source);
|
||||
return Ok(hir::Expression::file_path(path, atom.span));
|
||||
}
|
||||
|
||||
_ => return atom.into_hir(context, "file path"),
|
||||
|
@ -9,7 +9,7 @@ use crate::parser::{
|
||||
hir::TokensIterator,
|
||||
FlatShape,
|
||||
};
|
||||
use crate::Tagged;
|
||||
use crate::Spanned;
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub struct ExpressionListShape;
|
||||
@ -60,7 +60,7 @@ impl ColorSyntax for ExpressionListShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) {
|
||||
// We encountered a parsing error and will continue with simpler coloring ("backoff
|
||||
// coloring mode")
|
||||
@ -126,7 +126,7 @@ impl ColorSyntax for BackoffColoringMode {
|
||||
_input: &Self::Input,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Self::Info {
|
||||
loop {
|
||||
if token_nodes.at_end() {
|
||||
@ -159,7 +159,7 @@ impl ColorSyntax for SimplestExpression {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) {
|
||||
let atom = expand_atom(
|
||||
token_nodes,
|
||||
|
@ -18,20 +18,27 @@ impl ExpandExpression for NumberShape {
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
parse_single_node(token_nodes, "Number", |token, token_tag, err| {
|
||||
parse_single_node(token_nodes, "Number", |token, token_span, err| {
|
||||
Ok(match token {
|
||||
RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()),
|
||||
RawToken::Variable(tag) if tag.slice(context.source) == "it" => {
|
||||
hir::Expression::it_variable(tag, token_tag)
|
||||
hir::Expression::it_variable(tag, token_span)
|
||||
}
|
||||
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag),
|
||||
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)),
|
||||
RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag),
|
||||
RawToken::ExternalCommand(tag) => {
|
||||
hir::Expression::external_command(tag, token_span)
|
||||
}
|
||||
RawToken::ExternalWord => {
|
||||
return Err(ShellError::invalid_external_word(Tag {
|
||||
span: token_span,
|
||||
anchor: None,
|
||||
}))
|
||||
}
|
||||
RawToken::Variable(tag) => hir::Expression::variable(tag, token_span),
|
||||
RawToken::Number(number) => {
|
||||
hir::Expression::number(number.to_number(context.source), token_tag)
|
||||
hir::Expression::number(number.to_number(context.source), token_span)
|
||||
}
|
||||
RawToken::Bare => hir::Expression::bare(token_tag),
|
||||
RawToken::String(tag) => hir::Expression::string(tag, token_tag),
|
||||
RawToken::Bare => hir::Expression::bare(token_span),
|
||||
RawToken::String(tag) => hir::Expression::string(tag, token_span),
|
||||
})
|
||||
})
|
||||
}
|
||||
@ -46,18 +53,18 @@ impl FallibleColorSyntax for NumberShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let atom = token_nodes.spanned(|token_nodes| {
|
||||
expand_atom(token_nodes, "number", context, ExpansionRule::permissive())
|
||||
});
|
||||
|
||||
let atom = match atom {
|
||||
Tagged { item: Err(_), tag } => {
|
||||
shapes.push(FlatShape::Error.tagged(tag));
|
||||
Spanned { item: Err(_), span } => {
|
||||
shapes.push(FlatShape::Error.spanned(span));
|
||||
return Ok(());
|
||||
}
|
||||
Tagged { item: Ok(atom), .. } => atom,
|
||||
Spanned { item: Ok(atom), .. } => atom,
|
||||
};
|
||||
|
||||
atom.color_tokens(shapes);
|
||||
@ -75,21 +82,25 @@ impl ExpandExpression for IntShape {
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
parse_single_node(token_nodes, "Integer", |token, token_tag, err| {
|
||||
parse_single_node(token_nodes, "Integer", |token, token_span, err| {
|
||||
Ok(match token {
|
||||
RawToken::GlobPattern | RawToken::Operator(..) => return Err(err.error()),
|
||||
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)),
|
||||
RawToken::Variable(tag) if tag.slice(context.source) == "it" => {
|
||||
hir::Expression::it_variable(tag, token_tag)
|
||||
RawToken::ExternalWord => {
|
||||
return Err(ShellError::invalid_external_word(token_span))
|
||||
}
|
||||
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag),
|
||||
RawToken::Variable(tag) => hir::Expression::variable(tag, token_tag),
|
||||
RawToken::Variable(span) if span.slice(context.source) == "it" => {
|
||||
hir::Expression::it_variable(span, token_span)
|
||||
}
|
||||
RawToken::ExternalCommand(span) => {
|
||||
hir::Expression::external_command(span, token_span)
|
||||
}
|
||||
RawToken::Variable(span) => hir::Expression::variable(span, token_span),
|
||||
RawToken::Number(number @ RawNumber::Int(_)) => {
|
||||
hir::Expression::number(number.to_number(context.source), token_tag)
|
||||
hir::Expression::number(number.to_number(context.source), token_span)
|
||||
}
|
||||
RawToken::Number(_) => return Err(err.error()),
|
||||
RawToken::Bare => hir::Expression::bare(token_tag),
|
||||
RawToken::String(tag) => hir::Expression::string(tag, token_tag),
|
||||
RawToken::Bare => hir::Expression::bare(token_span),
|
||||
RawToken::String(span) => hir::Expression::string(span, token_span),
|
||||
})
|
||||
})
|
||||
}
|
||||
@ -104,18 +115,18 @@ impl FallibleColorSyntax for IntShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let atom = token_nodes.spanned(|token_nodes| {
|
||||
expand_atom(token_nodes, "integer", context, ExpansionRule::permissive())
|
||||
});
|
||||
|
||||
let atom = match atom {
|
||||
Tagged { item: Err(_), tag } => {
|
||||
shapes.push(FlatShape::Error.tagged(tag));
|
||||
Spanned { item: Err(_), span } => {
|
||||
shapes.push(FlatShape::Error.spanned(span));
|
||||
return Ok(());
|
||||
}
|
||||
Tagged { item: Ok(atom), .. } => atom,
|
||||
Spanned { item: Ok(atom), .. } => atom,
|
||||
};
|
||||
|
||||
atom.color_tokens(shapes);
|
||||
|
@ -18,14 +18,14 @@ impl FallibleColorSyntax for PatternShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
token_nodes.atomic(|token_nodes| {
|
||||
let atom = expand_atom(token_nodes, "pattern", context, ExpansionRule::permissive())?;
|
||||
|
||||
match &atom.item {
|
||||
AtomicToken::GlobPattern { .. } | AtomicToken::Word { .. } => {
|
||||
shapes.push(FlatShape::GlobPattern.tagged(atom.tag));
|
||||
shapes.push(FlatShape::GlobPattern.spanned(atom.span));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -85,23 +85,23 @@ impl ExpandExpression for PatternShape {
|
||||
pub struct BarePatternShape;
|
||||
|
||||
impl ExpandSyntax for BarePatternShape {
|
||||
type Output = Tag;
|
||||
type Output = Span;
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Tag, ShellError> {
|
||||
) -> Result<Span, ShellError> {
|
||||
expand_bare(token_nodes, context, |token| match token {
|
||||
TokenNode::Token(Tagged {
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::Bare,
|
||||
..
|
||||
})
|
||||
| TokenNode::Token(Tagged {
|
||||
| TokenNode::Token(Spanned {
|
||||
item: RawToken::Operator(Operator::Dot),
|
||||
..
|
||||
})
|
||||
| TokenNode::Token(Tagged {
|
||||
| TokenNode::Token(Spanned {
|
||||
item: RawToken::GlobPattern,
|
||||
..
|
||||
}) => true,
|
||||
|
@ -18,7 +18,7 @@ impl FallibleColorSyntax for StringShape {
|
||||
input: &FlatShape,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let atom = expand_atom(token_nodes, "string", context, ExpansionRule::permissive());
|
||||
|
||||
@ -28,10 +28,10 @@ impl FallibleColorSyntax for StringShape {
|
||||
};
|
||||
|
||||
match atom {
|
||||
Tagged {
|
||||
Spanned {
|
||||
item: AtomicToken::String { .. },
|
||||
tag,
|
||||
} => shapes.push((*input).tagged(tag)),
|
||||
span,
|
||||
} => shapes.push((*input).spanned(span)),
|
||||
other => other.color_tokens(shapes),
|
||||
}
|
||||
|
||||
@ -45,26 +45,30 @@ impl ExpandExpression for StringShape {
|
||||
token_nodes: &mut TokensIterator<'_>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<hir::Expression, ShellError> {
|
||||
parse_single_node(token_nodes, "String", |token, token_tag, _| {
|
||||
parse_single_node(token_nodes, "String", |token, token_span, _| {
|
||||
Ok(match token {
|
||||
RawToken::GlobPattern => {
|
||||
return Err(ShellError::type_error(
|
||||
"String",
|
||||
"glob pattern".tagged(token_tag),
|
||||
"glob pattern".tagged(token_span),
|
||||
))
|
||||
}
|
||||
RawToken::Operator(..) => {
|
||||
return Err(ShellError::type_error(
|
||||
"String",
|
||||
"operator".tagged(token_tag),
|
||||
"operator".tagged(token_span),
|
||||
))
|
||||
}
|
||||
RawToken::Variable(tag) => expand_variable(tag, token_tag, &context.source),
|
||||
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token_tag),
|
||||
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token_tag)),
|
||||
RawToken::Number(_) => hir::Expression::bare(token_tag),
|
||||
RawToken::Bare => hir::Expression::bare(token_tag),
|
||||
RawToken::String(tag) => hir::Expression::string(tag, token_tag),
|
||||
RawToken::Variable(span) => expand_variable(span, token_span, &context.source),
|
||||
RawToken::ExternalCommand(span) => {
|
||||
hir::Expression::external_command(span, token_span)
|
||||
}
|
||||
RawToken::ExternalWord => {
|
||||
return Err(ShellError::invalid_external_word(token_span))
|
||||
}
|
||||
RawToken::Number(_) => hir::Expression::bare(token_span),
|
||||
RawToken::Bare => hir::Expression::bare(token_span),
|
||||
RawToken::String(span) => hir::Expression::string(span, token_span),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
@ -14,24 +14,24 @@ use nom::IResult;
|
||||
pub struct UnitShape;
|
||||
|
||||
impl ExpandSyntax for UnitShape {
|
||||
type Output = Tagged<(Tagged<RawNumber>, Tagged<Unit>)>;
|
||||
type Output = Spanned<(Spanned<RawNumber>, Spanned<Unit>)>;
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Tagged<(Tagged<RawNumber>, Tagged<Unit>)>, ShellError> {
|
||||
) -> Result<Spanned<(Spanned<RawNumber>, Spanned<Unit>)>, ShellError> {
|
||||
let peeked = token_nodes.peek_any().not_eof("unit")?;
|
||||
|
||||
let tag = match peeked.node {
|
||||
TokenNode::Token(Tagged {
|
||||
let span = match peeked.node {
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::Bare,
|
||||
tag,
|
||||
}) => tag,
|
||||
span,
|
||||
}) => span,
|
||||
_ => return Err(peeked.type_error("unit")),
|
||||
};
|
||||
|
||||
let unit = unit_size(tag.slice(context.source), *tag);
|
||||
let unit = unit_size(span.slice(context.source), *span);
|
||||
|
||||
let (_, (number, unit)) = match unit {
|
||||
Err(_) => {
|
||||
@ -44,11 +44,11 @@ impl ExpandSyntax for UnitShape {
|
||||
};
|
||||
|
||||
peeked.commit();
|
||||
Ok((number, unit).tagged(tag))
|
||||
Ok((number, unit).spanned(*span))
|
||||
}
|
||||
}
|
||||
|
||||
fn unit_size(input: &str, bare_tag: Tag) -> IResult<&str, (Tagged<RawNumber>, Tagged<Unit>)> {
|
||||
fn unit_size(input: &str, bare_span: Span) -> IResult<&str, (Spanned<RawNumber>, Spanned<Unit>)> {
|
||||
let (input, digits) = digit1(input)?;
|
||||
|
||||
let (input, dot) = opt(tag("."))(input)?;
|
||||
@ -58,20 +58,18 @@ fn unit_size(input: &str, bare_tag: Tag) -> IResult<&str, (Tagged<RawNumber>, Ta
|
||||
let (input, rest) = digit1(input)?;
|
||||
(
|
||||
input,
|
||||
RawNumber::decimal((
|
||||
bare_tag.span.start(),
|
||||
bare_tag.span.start() + digits.len() + dot.len() + rest.len(),
|
||||
bare_tag.anchor,
|
||||
RawNumber::decimal(Span::new(
|
||||
bare_span.start(),
|
||||
bare_span.start() + digits.len() + dot.len() + rest.len(),
|
||||
)),
|
||||
)
|
||||
}
|
||||
|
||||
None => (
|
||||
input,
|
||||
RawNumber::int((
|
||||
bare_tag.span.start(),
|
||||
bare_tag.span.start() + digits.len(),
|
||||
bare_tag.anchor,
|
||||
RawNumber::int(Span::new(
|
||||
bare_span.start(),
|
||||
bare_span.start() + digits.len(),
|
||||
)),
|
||||
),
|
||||
};
|
||||
@ -85,12 +83,10 @@ fn unit_size(input: &str, bare_tag: Tag) -> IResult<&str, (Tagged<RawNumber>, Ta
|
||||
value(Unit::MB, alt((tag("PB"), tag("pb"), tag("Pb")))),
|
||||
)))(input)?;
|
||||
|
||||
let start_span = number.tag.span.end();
|
||||
let start_span = number.span.end();
|
||||
|
||||
let unit_tag = Tag::new(
|
||||
bare_tag.anchor,
|
||||
Span::from((start_span, bare_tag.span.end())),
|
||||
);
|
||||
|
||||
Ok((input, (number, unit.tagged(unit_tag))))
|
||||
Ok((
|
||||
input,
|
||||
(number, unit.spanned(Span::new(start_span, bare_span.end()))),
|
||||
))
|
||||
}
|
||||
|
@ -23,9 +23,9 @@ impl ExpandExpression for VariablePathShape {
|
||||
// 2. consume the next token as a member and push it onto tail
|
||||
|
||||
let head = expand_expr(&VariableShape, token_nodes, context)?;
|
||||
let start = head.tag();
|
||||
let start = head.span;
|
||||
let mut end = start;
|
||||
let mut tail: Vec<Tagged<String>> = vec![];
|
||||
let mut tail: Vec<Spanned<String>> = vec![];
|
||||
|
||||
loop {
|
||||
match DotShape.skip(token_nodes, context) {
|
||||
@ -34,9 +34,9 @@ impl ExpandExpression for VariablePathShape {
|
||||
}
|
||||
|
||||
let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
|
||||
let member = syntax.to_tagged_string(context.source);
|
||||
let member = syntax.to_spanned_string(context.source);
|
||||
|
||||
end = member.tag();
|
||||
end = member.span;
|
||||
tail.push(member);
|
||||
}
|
||||
|
||||
@ -53,7 +53,7 @@ impl FallibleColorSyntax for VariablePathShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
token_nodes.atomic(|token_nodes| {
|
||||
// If the head of the token stream is not a variable, fail
|
||||
@ -97,7 +97,7 @@ impl FallibleColorSyntax for PathTailShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
token_nodes.atomic(|token_nodes| loop {
|
||||
let result = color_fallible_syntax_with(
|
||||
@ -120,13 +120,13 @@ impl FallibleColorSyntax for PathTailShape {
|
||||
}
|
||||
|
||||
impl ExpandSyntax for PathTailShape {
|
||||
type Output = (Vec<Tagged<String>>, Tag);
|
||||
type Output = (Vec<Spanned<String>>, Span);
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
) -> Result<Self::Output, ShellError> {
|
||||
let mut end: Option<Tag> = None;
|
||||
let mut end: Option<Span> = None;
|
||||
let mut tail = vec![];
|
||||
|
||||
loop {
|
||||
@ -136,17 +136,21 @@ impl ExpandSyntax for PathTailShape {
|
||||
}
|
||||
|
||||
let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
|
||||
let member = syntax.to_tagged_string(context.source);
|
||||
end = Some(member.tag());
|
||||
let member = syntax.to_spanned_string(context.source);
|
||||
end = Some(member.span);
|
||||
tail.push(member);
|
||||
}
|
||||
|
||||
match end {
|
||||
None => {
|
||||
return Err(ShellError::type_error(
|
||||
"path tail",
|
||||
token_nodes.typed_tag_at_cursor(),
|
||||
))
|
||||
return Err(ShellError::type_error("path tail", {
|
||||
let typed_span = token_nodes.typed_span_at_cursor();
|
||||
|
||||
Tagged {
|
||||
tag: typed_span.span.into(),
|
||||
item: typed_span.item,
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
Some(end) => Ok((tail, end)),
|
||||
@ -156,8 +160,8 @@ impl ExpandSyntax for PathTailShape {
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ExpressionContinuation {
|
||||
DotSuffix(Tag, Tagged<String>),
|
||||
InfixSuffix(Tagged<Operator>, Expression),
|
||||
DotSuffix(Span, Spanned<String>),
|
||||
InfixSuffix(Spanned<Operator>, Expression),
|
||||
}
|
||||
|
||||
/// An expression continuation
|
||||
@ -179,7 +183,7 @@ impl ExpandSyntax for ExpressionContinuationShape {
|
||||
// If a `.` was matched, it's a `Path`, and we expect a `Member` next
|
||||
Ok(dot) => {
|
||||
let syntax = expand_syntax(&MemberShape, token_nodes, context)?;
|
||||
let member = syntax.to_tagged_string(context.source);
|
||||
let member = syntax.to_spanned_string(context.source);
|
||||
|
||||
Ok(ExpressionContinuation::DotSuffix(dot, member))
|
||||
}
|
||||
@ -209,7 +213,7 @@ impl FallibleColorSyntax for ExpressionContinuationShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<ContinuationInfo, ShellError> {
|
||||
token_nodes.atomic(|token_nodes| {
|
||||
// Try to expand a `.`
|
||||
@ -290,7 +294,7 @@ impl FallibleColorSyntax for VariableShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let atom = expand_atom(
|
||||
token_nodes,
|
||||
@ -306,11 +310,11 @@ impl FallibleColorSyntax for VariableShape {
|
||||
|
||||
match &atom.item {
|
||||
AtomicToken::Variable { .. } => {
|
||||
shapes.push(FlatShape::Variable.tagged(atom.tag));
|
||||
shapes.push(FlatShape::Variable.spanned(atom.span));
|
||||
Ok(())
|
||||
}
|
||||
AtomicToken::ItVariable { .. } => {
|
||||
shapes.push(FlatShape::ItVariable.tagged(atom.tag));
|
||||
shapes.push(FlatShape::ItVariable.spanned(atom.span));
|
||||
Ok(())
|
||||
}
|
||||
_ => Err(ShellError::type_error("variable", atom.tagged_type_name())),
|
||||
@ -320,50 +324,53 @@ impl FallibleColorSyntax for VariableShape {
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum Member {
|
||||
String(/* outer */ Tag, /* inner */ Tag),
|
||||
Bare(Tag),
|
||||
String(/* outer */ Span, /* inner */ Span),
|
||||
Bare(Span),
|
||||
}
|
||||
|
||||
impl Member {
|
||||
pub(crate) fn to_expr(&self) -> hir::Expression {
|
||||
match self {
|
||||
Member::String(outer, inner) => hir::Expression::string(inner, outer),
|
||||
Member::Bare(tag) => hir::Expression::string(tag, tag),
|
||||
Member::String(outer, inner) => hir::Expression::string(*inner, *outer),
|
||||
Member::Bare(span) => hir::Expression::string(*span, *span),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn tag(&self) -> Tag {
|
||||
pub(crate) fn span(&self) -> Span {
|
||||
match self {
|
||||
Member::String(outer, _inner) => *outer,
|
||||
Member::Bare(tag) => *tag,
|
||||
Member::Bare(span) => *span,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn to_tagged_string(&self, source: &str) -> Tagged<String> {
|
||||
pub(crate) fn to_spanned_string(&self, source: &str) -> Spanned<String> {
|
||||
match self {
|
||||
Member::String(outer, inner) => inner.string(source).tagged(outer),
|
||||
Member::Bare(tag) => tag.tagged_string(source),
|
||||
Member::String(outer, inner) => inner.string(source).spanned(*outer),
|
||||
Member::Bare(span) => span.spanned_string(source),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn tagged_type_name(&self) -> Tagged<&'static str> {
|
||||
match self {
|
||||
Member::String(outer, _inner) => "string".tagged(outer),
|
||||
Member::Bare(tag) => "word".tagged(tag),
|
||||
Member::Bare(span) => "word".tagged(Tag {
|
||||
span: *span,
|
||||
anchor: None,
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum ColumnPathState {
|
||||
Initial,
|
||||
LeadingDot(Tag),
|
||||
Dot(Tag, Vec<Member>, Tag),
|
||||
Member(Tag, Vec<Member>),
|
||||
LeadingDot(Span),
|
||||
Dot(Span, Vec<Member>, Span),
|
||||
Member(Span, Vec<Member>),
|
||||
Error(ShellError),
|
||||
}
|
||||
|
||||
impl ColumnPathState {
|
||||
pub fn dot(self, dot: Tag) -> ColumnPathState {
|
||||
pub fn dot(self, dot: Span) -> ColumnPathState {
|
||||
match self {
|
||||
ColumnPathState::Initial => ColumnPathState::LeadingDot(dot),
|
||||
ColumnPathState::LeadingDot(_) => {
|
||||
@ -379,13 +386,13 @@ impl ColumnPathState {
|
||||
|
||||
pub fn member(self, member: Member) -> ColumnPathState {
|
||||
match self {
|
||||
ColumnPathState::Initial => ColumnPathState::Member(member.tag(), vec![member]),
|
||||
ColumnPathState::Initial => ColumnPathState::Member(member.span(), vec![member]),
|
||||
ColumnPathState::LeadingDot(tag) => {
|
||||
ColumnPathState::Member(tag.until(member.tag()), vec![member])
|
||||
ColumnPathState::Member(tag.until(member.span()), vec![member])
|
||||
}
|
||||
|
||||
ColumnPathState::Dot(tag, mut tags, _) => {
|
||||
ColumnPathState::Member(tag.until(member.tag()), {
|
||||
ColumnPathState::Member(tag.until(member.span()), {
|
||||
tags.push(member);
|
||||
tags
|
||||
})
|
||||
@ -449,7 +456,7 @@ impl FallibleColorSyntax for ColumnPathShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
// If there's not even one member shape, fail
|
||||
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
|
||||
@ -513,7 +520,7 @@ impl FallibleColorSyntax for MemberShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let bare = color_fallible_syntax_with(
|
||||
&BareShape,
|
||||
@ -552,7 +559,7 @@ impl ExpandSyntax for MemberShape {
|
||||
let bare = BareShape.test(token_nodes, context);
|
||||
if let Some(peeked) = bare {
|
||||
let node = peeked.not_eof("column")?.commit();
|
||||
return Ok(Member::Bare(node.tag()));
|
||||
return Ok(Member::Bare(node.span()));
|
||||
}
|
||||
|
||||
let string = StringShape.test(token_nodes, context);
|
||||
@ -583,14 +590,14 @@ impl FallibleColorSyntax for ColorableDotShape {
|
||||
input: &FlatShape,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
_context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let peeked = token_nodes.peek_any().not_eof("dot")?;
|
||||
|
||||
match peeked.node {
|
||||
node if node.is_dot() => {
|
||||
peeked.commit();
|
||||
shapes.push((*input).tagged(node.tag()));
|
||||
shapes.push((*input).spanned(node.span()));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@ -612,20 +619,20 @@ impl SkipSyntax for DotShape {
|
||||
}
|
||||
|
||||
impl ExpandSyntax for DotShape {
|
||||
type Output = Tag;
|
||||
type Output = Span;
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
_context: &ExpandContext,
|
||||
) -> Result<Self::Output, ShellError> {
|
||||
parse_single_node(token_nodes, "dot", |token, token_tag, _| {
|
||||
parse_single_node(token_nodes, "dot", |token, token_span, _| {
|
||||
Ok(match token {
|
||||
RawToken::Operator(Operator::Dot) => token_tag,
|
||||
RawToken::Operator(Operator::Dot) => token_span,
|
||||
_ => {
|
||||
return Err(ShellError::type_error(
|
||||
"dot",
|
||||
token.type_name().tagged(token_tag),
|
||||
token.type_name().tagged(token_span),
|
||||
))
|
||||
}
|
||||
})
|
||||
@ -645,7 +652,7 @@ impl FallibleColorSyntax for InfixShape {
|
||||
_input: &(),
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
outer_shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
outer_shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Result<(), ShellError> {
|
||||
let checkpoint = token_nodes.checkpoint();
|
||||
let mut shapes = vec![];
|
||||
@ -657,18 +664,18 @@ impl FallibleColorSyntax for InfixShape {
|
||||
parse_single_node(
|
||||
checkpoint.iterator,
|
||||
"infix operator",
|
||||
|token, token_tag, _| {
|
||||
|token, token_span, _| {
|
||||
match token {
|
||||
// If it's an operator (and not `.`), it's a match
|
||||
RawToken::Operator(operator) if operator != Operator::Dot => {
|
||||
shapes.push(FlatShape::Operator.tagged(token_tag));
|
||||
shapes.push(FlatShape::Operator.spanned(token_span));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Otherwise, it's not a match
|
||||
_ => Err(ShellError::type_error(
|
||||
"infix operator",
|
||||
token.type_name().tagged(token_tag),
|
||||
token.type_name().tagged(token_span),
|
||||
)),
|
||||
}
|
||||
},
|
||||
@ -684,7 +691,7 @@ impl FallibleColorSyntax for InfixShape {
|
||||
}
|
||||
|
||||
impl ExpandSyntax for InfixShape {
|
||||
type Output = (Tag, Tagged<Operator>, Tag);
|
||||
type Output = (Span, Spanned<Operator>, Span);
|
||||
|
||||
fn expand_syntax<'a, 'b>(
|
||||
&self,
|
||||
@ -700,18 +707,18 @@ impl ExpandSyntax for InfixShape {
|
||||
let operator = parse_single_node(
|
||||
checkpoint.iterator,
|
||||
"infix operator",
|
||||
|token, token_tag, _| {
|
||||
|token, token_span, _| {
|
||||
Ok(match token {
|
||||
// If it's an operator (and not `.`), it's a match
|
||||
RawToken::Operator(operator) if operator != Operator::Dot => {
|
||||
operator.tagged(token_tag)
|
||||
operator.spanned(token_span)
|
||||
}
|
||||
|
||||
// Otherwise, it's not a match
|
||||
_ => {
|
||||
return Err(ShellError::type_error(
|
||||
"infix operator",
|
||||
token.type_name().tagged(token_tag),
|
||||
token.type_name().tagged(token_span),
|
||||
))
|
||||
}
|
||||
})
|
||||
|
@ -1,5 +1,5 @@
|
||||
use crate::parser::{Delimiter, Flag, FlagKind, Operator, RawNumber, RawToken, TokenNode};
|
||||
use crate::{Tag, Tagged, TaggedItem, Text};
|
||||
use crate::{Span, Spanned, SpannedItem, Text};
|
||||
|
||||
#[derive(Debug, Copy, Clone)]
|
||||
pub enum FlatShape {
|
||||
@ -25,32 +25,34 @@ pub enum FlatShape {
|
||||
Decimal,
|
||||
Whitespace,
|
||||
Error,
|
||||
Size { number: Tag, unit: Tag },
|
||||
Size { number: Span, unit: Span },
|
||||
}
|
||||
|
||||
impl FlatShape {
|
||||
pub fn from(token: &TokenNode, source: &Text, shapes: &mut Vec<Tagged<FlatShape>>) -> () {
|
||||
pub fn from(token: &TokenNode, source: &Text, shapes: &mut Vec<Spanned<FlatShape>>) -> () {
|
||||
match token {
|
||||
TokenNode::Token(token) => match token.item {
|
||||
RawToken::Number(RawNumber::Int(_)) => {
|
||||
shapes.push(FlatShape::Int.tagged(token.tag))
|
||||
shapes.push(FlatShape::Int.spanned(token.span))
|
||||
}
|
||||
RawToken::Number(RawNumber::Decimal(_)) => {
|
||||
shapes.push(FlatShape::Decimal.tagged(token.tag))
|
||||
shapes.push(FlatShape::Decimal.spanned(token.span))
|
||||
}
|
||||
RawToken::Operator(Operator::Dot) => shapes.push(FlatShape::Dot.tagged(token.tag)),
|
||||
RawToken::Operator(_) => shapes.push(FlatShape::Operator.tagged(token.tag)),
|
||||
RawToken::String(_) => shapes.push(FlatShape::String.tagged(token.tag)),
|
||||
RawToken::Operator(Operator::Dot) => {
|
||||
shapes.push(FlatShape::Dot.spanned(token.span))
|
||||
}
|
||||
RawToken::Operator(_) => shapes.push(FlatShape::Operator.spanned(token.span)),
|
||||
RawToken::String(_) => shapes.push(FlatShape::String.spanned(token.span)),
|
||||
RawToken::Variable(v) if v.slice(source) == "it" => {
|
||||
shapes.push(FlatShape::ItVariable.tagged(token.tag))
|
||||
shapes.push(FlatShape::ItVariable.spanned(token.span))
|
||||
}
|
||||
RawToken::Variable(_) => shapes.push(FlatShape::Variable.tagged(token.tag)),
|
||||
RawToken::Variable(_) => shapes.push(FlatShape::Variable.spanned(token.span)),
|
||||
RawToken::ExternalCommand(_) => {
|
||||
shapes.push(FlatShape::ExternalCommand.tagged(token.tag))
|
||||
shapes.push(FlatShape::ExternalCommand.spanned(token.span))
|
||||
}
|
||||
RawToken::ExternalWord => shapes.push(FlatShape::ExternalWord.tagged(token.tag)),
|
||||
RawToken::GlobPattern => shapes.push(FlatShape::GlobPattern.tagged(token.tag)),
|
||||
RawToken::Bare => shapes.push(FlatShape::Word.tagged(token.tag)),
|
||||
RawToken::ExternalWord => shapes.push(FlatShape::ExternalWord.spanned(token.span)),
|
||||
RawToken::GlobPattern => shapes.push(FlatShape::GlobPattern.spanned(token.span)),
|
||||
RawToken::Bare => shapes.push(FlatShape::Word.spanned(token.span)),
|
||||
},
|
||||
TokenNode::Call(_) => unimplemented!(),
|
||||
TokenNode::Nodes(nodes) => {
|
||||
@ -59,37 +61,37 @@ impl FlatShape {
|
||||
}
|
||||
}
|
||||
TokenNode::Delimited(v) => {
|
||||
shapes.push(FlatShape::OpenDelimiter(v.item.delimiter).tagged(v.item.tags.0));
|
||||
shapes.push(FlatShape::OpenDelimiter(v.item.delimiter).spanned(v.item.spans.0));
|
||||
for token in &v.item.children {
|
||||
FlatShape::from(token, source, shapes);
|
||||
}
|
||||
shapes.push(FlatShape::CloseDelimiter(v.item.delimiter).tagged(v.item.tags.1));
|
||||
shapes.push(FlatShape::CloseDelimiter(v.item.delimiter).spanned(v.item.spans.1));
|
||||
}
|
||||
TokenNode::Pipeline(pipeline) => {
|
||||
for part in &pipeline.parts {
|
||||
if let Some(_) = part.pipe {
|
||||
shapes.push(FlatShape::Pipe.tagged(part.tag));
|
||||
shapes.push(FlatShape::Pipe.spanned(part.span));
|
||||
}
|
||||
}
|
||||
}
|
||||
TokenNode::Flag(Tagged {
|
||||
TokenNode::Flag(Spanned {
|
||||
item:
|
||||
Flag {
|
||||
kind: FlagKind::Longhand,
|
||||
..
|
||||
},
|
||||
tag,
|
||||
}) => shapes.push(FlatShape::Flag.tagged(tag)),
|
||||
TokenNode::Flag(Tagged {
|
||||
span,
|
||||
}) => shapes.push(FlatShape::Flag.spanned(*span)),
|
||||
TokenNode::Flag(Spanned {
|
||||
item:
|
||||
Flag {
|
||||
kind: FlagKind::Shorthand,
|
||||
..
|
||||
},
|
||||
tag,
|
||||
}) => shapes.push(FlatShape::ShorthandFlag.tagged(tag)),
|
||||
TokenNode::Whitespace(_) => shapes.push(FlatShape::Whitespace.tagged(token.tag())),
|
||||
TokenNode::Error(v) => shapes.push(FlatShape::Error.tagged(v.tag)),
|
||||
span,
|
||||
}) => shapes.push(FlatShape::ShorthandFlag.spanned(*span)),
|
||||
TokenNode::Whitespace(_) => shapes.push(FlatShape::Whitespace.spanned(token.span())),
|
||||
TokenNode::Error(v) => shapes.push(FlatShape::Error.spanned(v.span)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2,12 +2,12 @@ pub(crate) mod debug;
|
||||
|
||||
use crate::errors::ShellError;
|
||||
use crate::parser::TokenNode;
|
||||
use crate::{Tag, Tagged, TaggedItem};
|
||||
use crate::{Span, Spanned, SpannedItem};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TokensIterator<'content> {
|
||||
tokens: &'content [TokenNode],
|
||||
tag: Tag,
|
||||
span: Span,
|
||||
skip_ws: bool,
|
||||
index: usize,
|
||||
seen: indexmap::IndexSet<usize>,
|
||||
@ -65,7 +65,7 @@ impl<'content, 'me> Peeked<'content, 'me> {
|
||||
match self.node {
|
||||
None => Err(ShellError::unexpected_eof(
|
||||
expected,
|
||||
self.iterator.eof_tag(),
|
||||
self.iterator.eof_span(),
|
||||
)),
|
||||
Some(node) => Ok(PeekedNode {
|
||||
node,
|
||||
@ -77,7 +77,7 @@ impl<'content, 'me> Peeked<'content, 'me> {
|
||||
}
|
||||
|
||||
pub fn type_error(&self, expected: impl Into<String>) -> ShellError {
|
||||
peek_error(&self.node, self.iterator.eof_tag(), expected)
|
||||
peek_error(&self.node, self.iterator.eof_span(), expected)
|
||||
}
|
||||
}
|
||||
|
||||
@ -105,38 +105,38 @@ impl<'content, 'me> PeekedNode<'content, 'me> {
|
||||
pub fn rollback(self) {}
|
||||
|
||||
pub fn type_error(&self, expected: impl Into<String>) -> ShellError {
|
||||
peek_error(&Some(self.node), self.iterator.eof_tag(), expected)
|
||||
peek_error(&Some(self.node), self.iterator.eof_span(), expected)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn peek_error(
|
||||
node: &Option<&TokenNode>,
|
||||
eof_tag: Tag,
|
||||
eof_span: Span,
|
||||
expected: impl Into<String>,
|
||||
) -> ShellError {
|
||||
match node {
|
||||
None => ShellError::unexpected_eof(expected, eof_tag),
|
||||
None => ShellError::unexpected_eof(expected, eof_span),
|
||||
Some(node) => ShellError::type_error(expected, node.tagged_type_name()),
|
||||
}
|
||||
}
|
||||
|
||||
impl<'content> TokensIterator<'content> {
|
||||
pub fn new(items: &'content [TokenNode], tag: Tag, skip_ws: bool) -> TokensIterator<'content> {
|
||||
pub fn new(
|
||||
items: &'content [TokenNode],
|
||||
span: Span,
|
||||
skip_ws: bool,
|
||||
) -> TokensIterator<'content> {
|
||||
TokensIterator {
|
||||
tokens: items,
|
||||
tag,
|
||||
span,
|
||||
skip_ws,
|
||||
index: 0,
|
||||
seen: indexmap::IndexSet::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn anchor(&self) -> uuid::Uuid {
|
||||
self.tag.anchor
|
||||
}
|
||||
|
||||
pub fn all(tokens: &'content [TokenNode], tag: Tag) -> TokensIterator<'content> {
|
||||
TokensIterator::new(tokens, tag, false)
|
||||
pub fn all(tokens: &'content [TokenNode], span: Span) -> TokensIterator<'content> {
|
||||
TokensIterator::new(tokens, span, false)
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
@ -146,14 +146,14 @@ impl<'content> TokensIterator<'content> {
|
||||
pub fn spanned<T>(
|
||||
&mut self,
|
||||
block: impl FnOnce(&mut TokensIterator<'content>) -> T,
|
||||
) -> Tagged<T> {
|
||||
let start = self.tag_at_cursor();
|
||||
) -> Spanned<T> {
|
||||
let start = self.span_at_cursor();
|
||||
|
||||
let result = block(self);
|
||||
|
||||
let end = self.tag_at_cursor();
|
||||
let end = self.span_at_cursor();
|
||||
|
||||
result.tagged(start.until(end))
|
||||
result.spanned(start.until(end))
|
||||
}
|
||||
|
||||
/// Use a checkpoint when you need to peek more than one token ahead, but can't be sure
|
||||
@ -192,25 +192,25 @@ impl<'content> TokensIterator<'content> {
|
||||
return Ok(value);
|
||||
}
|
||||
|
||||
fn eof_tag(&self) -> Tag {
|
||||
Tag::from((self.tag.span.end(), self.tag.span.end(), self.tag.anchor))
|
||||
fn eof_span(&self) -> Span {
|
||||
Span::new(self.span.end(), self.span.end())
|
||||
}
|
||||
|
||||
pub fn typed_tag_at_cursor(&mut self) -> Tagged<&'static str> {
|
||||
pub fn typed_span_at_cursor(&mut self) -> Spanned<&'static str> {
|
||||
let next = self.peek_any();
|
||||
|
||||
match next.node {
|
||||
None => "end".tagged(self.eof_tag()),
|
||||
Some(node) => node.tagged_type_name(),
|
||||
None => "end".spanned(self.eof_span()),
|
||||
Some(node) => node.spanned_type_name(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tag_at_cursor(&mut self) -> Tag {
|
||||
pub fn span_at_cursor(&mut self) -> Span {
|
||||
let next = self.peek_any();
|
||||
|
||||
match next.node {
|
||||
None => self.eof_tag(),
|
||||
Some(node) => node.tag(),
|
||||
None => self.eof_span(),
|
||||
Some(node) => node.span(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -262,7 +262,7 @@ impl<'content> TokensIterator<'content> {
|
||||
pub fn clone(&self) -> TokensIterator<'content> {
|
||||
TokensIterator {
|
||||
tokens: self.tokens,
|
||||
tag: self.tag,
|
||||
span: self.span,
|
||||
index: self.index,
|
||||
seen: self.seen.clone(),
|
||||
skip_ws: self.skip_ws,
|
||||
|
@ -1,8 +1,7 @@
|
||||
use crate::Tag;
|
||||
use crate::Span;
|
||||
use derive_new::new;
|
||||
use language_reporting::{FileName, Location};
|
||||
use log::trace;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(new, Debug, Clone)]
|
||||
pub struct Files {
|
||||
@ -10,20 +9,20 @@ pub struct Files {
|
||||
}
|
||||
|
||||
impl language_reporting::ReportingFiles for Files {
|
||||
type Span = Tag;
|
||||
type FileId = Uuid;
|
||||
type Span = Span;
|
||||
type FileId = usize;
|
||||
|
||||
fn byte_span(
|
||||
&self,
|
||||
file: Self::FileId,
|
||||
_file: Self::FileId,
|
||||
from_index: usize,
|
||||
to_index: usize,
|
||||
) -> Option<Self::Span> {
|
||||
Some(Tag::new(file, (from_index, to_index).into()))
|
||||
Some(Span::new(from_index, to_index))
|
||||
}
|
||||
|
||||
fn file_id(&self, tag: Self::Span) -> Self::FileId {
|
||||
tag.anchor
|
||||
fn file_id(&self, _tag: Self::Span) -> Self::FileId {
|
||||
0
|
||||
}
|
||||
|
||||
fn file_name(&self, _file: Self::FileId) -> FileName {
|
||||
@ -68,14 +67,14 @@ impl language_reporting::ReportingFiles for Files {
|
||||
}
|
||||
}
|
||||
|
||||
fn line_span(&self, file: Self::FileId, lineno: usize) -> Option<Self::Span> {
|
||||
fn line_span(&self, _file: Self::FileId, lineno: usize) -> Option<Self::Span> {
|
||||
let source = &self.snippet;
|
||||
let mut seen_lines = 0;
|
||||
let mut seen_bytes = 0;
|
||||
|
||||
for (pos, _) in source.match_indices('\n') {
|
||||
if seen_lines == lineno {
|
||||
return Some(Tag::new(file, (seen_bytes, pos + 1).into()));
|
||||
return Some(Span::new(seen_bytes, pos + 1));
|
||||
} else {
|
||||
seen_lines += 1;
|
||||
seen_bytes = pos + 1;
|
||||
@ -83,20 +82,20 @@ impl language_reporting::ReportingFiles for Files {
|
||||
}
|
||||
|
||||
if seen_lines == 0 {
|
||||
Some(Tag::new(file, (0, self.snippet.len() - 1).into()))
|
||||
Some(Span::new(0, self.snippet.len() - 1))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn source(&self, tag: Self::Span) -> Option<String> {
|
||||
trace!("source(tag={:?}) snippet={:?}", tag, self.snippet);
|
||||
fn source(&self, span: Self::Span) -> Option<String> {
|
||||
trace!("source(tag={:?}) snippet={:?}", span, self.snippet);
|
||||
|
||||
if tag.span.start() > tag.span.end() {
|
||||
if span.start() > span.end() {
|
||||
return None;
|
||||
} else if tag.span.end() > self.snippet.len() {
|
||||
} else if span.end() > self.snippet.len() {
|
||||
return None;
|
||||
}
|
||||
Some(tag.slice(&self.snippet).to_string())
|
||||
Some(span.slice(&self.snippet).to_string())
|
||||
}
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use crate::parser::hir::syntax_shape::flat_shape::FlatShape;
|
||||
use crate::{Tag, Tagged, TaggedItem};
|
||||
use crate::{Span, Spanned, SpannedItem};
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@ -14,14 +14,14 @@ pub enum FlagKind {
|
||||
#[get = "pub(crate)"]
|
||||
pub struct Flag {
|
||||
pub(crate) kind: FlagKind,
|
||||
pub(crate) name: Tag,
|
||||
pub(crate) name: Span,
|
||||
}
|
||||
|
||||
impl Tagged<Flag> {
|
||||
pub fn color(&self) -> Tagged<FlatShape> {
|
||||
impl Spanned<Flag> {
|
||||
pub fn color(&self) -> Spanned<FlatShape> {
|
||||
match self.item.kind {
|
||||
FlagKind::Longhand => FlatShape::Flag.tagged(self.tag),
|
||||
FlagKind::Shorthand => FlatShape::ShorthandFlag.tagged(self.tag),
|
||||
FlagKind::Longhand => FlatShape::Flag.spanned(self.span),
|
||||
FlagKind::Shorthand => FlatShape::ShorthandFlag.spanned(self.span),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -24,13 +24,11 @@ use nom_tracable::{tracable_parser, HasTracableInfo, TracableInfo};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::Debug;
|
||||
use std::str::FromStr;
|
||||
use uuid::Uuid;
|
||||
|
||||
pub type NomSpan<'a> = LocatedSpanEx<&'a str, TracableContext>;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, new)]
|
||||
pub struct TracableContext {
|
||||
pub(crate) origin: Uuid,
|
||||
pub(crate) info: TracableInfo,
|
||||
}
|
||||
|
||||
@ -40,10 +38,7 @@ impl HasTracableInfo for TracableContext {
|
||||
}
|
||||
|
||||
fn set_tracable_info(mut self, info: TracableInfo) -> Self {
|
||||
TracableContext {
|
||||
origin: self.origin,
|
||||
info,
|
||||
}
|
||||
TracableContext { info }
|
||||
}
|
||||
}
|
||||
|
||||
@ -55,8 +50,8 @@ impl std::ops::Deref for TracableContext {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn nom_input(s: &str, anchor: Uuid) -> NomSpan<'_> {
|
||||
LocatedSpanEx::new_extra(s, TracableContext::new(anchor, TracableInfo::new()))
|
||||
pub fn nom_input(s: &str) -> NomSpan<'_> {
|
||||
LocatedSpanEx::new_extra(s, TracableContext::new(TracableInfo::new()))
|
||||
}
|
||||
|
||||
macro_rules! operator {
|
||||
@ -69,7 +64,7 @@ macro_rules! operator {
|
||||
|
||||
Ok((
|
||||
input,
|
||||
TokenTreeBuilder::tagged_op(tag.fragment, (start, end, input.extra)),
|
||||
TokenTreeBuilder::spanned_op(tag.fragment, Span::new(start, end)),
|
||||
))
|
||||
}
|
||||
};
|
||||
@ -175,22 +170,22 @@ pub fn number(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
|
||||
Ok((
|
||||
input,
|
||||
TokenTreeBuilder::tagged_number(number.item, number.tag),
|
||||
TokenTreeBuilder::spanned_number(number.item, number.span),
|
||||
))
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn raw_number(input: NomSpan) -> IResult<NomSpan, Tagged<RawNumber>> {
|
||||
pub fn raw_number(input: NomSpan) -> IResult<NomSpan, Spanned<RawNumber>> {
|
||||
let anchoral = input;
|
||||
let start = input.offset;
|
||||
let (input, neg) = opt(tag("-"))(input)?;
|
||||
let (input, head) = digit1(input)?;
|
||||
|
||||
match input.fragment.chars().next() {
|
||||
None => return Ok((input, RawNumber::int((start, input.offset, input.extra)))),
|
||||
None => return Ok((input, RawNumber::int(Span::new(start, input.offset)))),
|
||||
Some('.') => (),
|
||||
other if is_boundary(other) => {
|
||||
return Ok((input, RawNumber::int((start, input.offset, input.extra))))
|
||||
return Ok((input, RawNumber::int(Span::new(start, input.offset))))
|
||||
}
|
||||
_ => {
|
||||
return Err(nom::Err::Error(nom::error::make_error(
|
||||
@ -206,7 +201,7 @@ pub fn raw_number(input: NomSpan) -> IResult<NomSpan, Tagged<RawNumber>> {
|
||||
Ok((input, dot)) => input,
|
||||
|
||||
// it's just an integer
|
||||
Err(_) => return Ok((input, RawNumber::int((start, input.offset, input.extra)))),
|
||||
Err(_) => return Ok((input, RawNumber::int(Span::new(start, input.offset)))),
|
||||
};
|
||||
|
||||
let (input, tail) = digit1(input)?;
|
||||
@ -216,7 +211,7 @@ pub fn raw_number(input: NomSpan) -> IResult<NomSpan, Tagged<RawNumber>> {
|
||||
let next = input.fragment.chars().next();
|
||||
|
||||
if is_boundary(next) {
|
||||
Ok((input, RawNumber::decimal((start, end, input.extra))))
|
||||
Ok((input, RawNumber::decimal(Span::new(start, end))))
|
||||
} else {
|
||||
Err(nom::Err::Error(nom::error::make_error(
|
||||
input,
|
||||
@ -243,7 +238,7 @@ pub fn dq_string(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
let end = input.offset;
|
||||
Ok((
|
||||
input,
|
||||
TokenTreeBuilder::tagged_string((start1, end1, input.extra), (start, end, input.extra)),
|
||||
TokenTreeBuilder::spanned_string(Span::new(start1, end1), Span::new(start, end)),
|
||||
))
|
||||
}
|
||||
|
||||
@ -259,7 +254,7 @@ pub fn sq_string(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
|
||||
Ok((
|
||||
input,
|
||||
TokenTreeBuilder::tagged_string((start1, end1, input.extra), (start, end, input.extra)),
|
||||
TokenTreeBuilder::spanned_string(Span::new(start1, end1), Span::new(start, end)),
|
||||
))
|
||||
}
|
||||
|
||||
@ -277,7 +272,7 @@ pub fn external(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
|
||||
Ok((
|
||||
input,
|
||||
TokenTreeBuilder::tagged_external_command(bare, (start, end, input.extra)),
|
||||
TokenTreeBuilder::spanned_external_command(bare, Span::new(start, end)),
|
||||
))
|
||||
}
|
||||
|
||||
@ -302,7 +297,7 @@ pub fn pattern(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
|
||||
Ok((
|
||||
input,
|
||||
TokenTreeBuilder::tagged_pattern((start, end, input.extra)),
|
||||
TokenTreeBuilder::spanned_pattern(Span::new(start, end)),
|
||||
))
|
||||
}
|
||||
|
||||
@ -335,10 +330,7 @@ pub fn bare(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
|
||||
let end = input.offset;
|
||||
|
||||
Ok((
|
||||
input,
|
||||
TokenTreeBuilder::tagged_bare((start, end, input.extra)),
|
||||
))
|
||||
Ok((input, TokenTreeBuilder::spanned_bare(Span::new(start, end))))
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
@ -349,7 +341,7 @@ pub fn external_word(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
|
||||
Ok((
|
||||
input,
|
||||
TokenTreeBuilder::tagged_external_word((start, end, input.extra)),
|
||||
TokenTreeBuilder::spanned_external_word(Span::new(start, end)),
|
||||
))
|
||||
}
|
||||
|
||||
@ -362,7 +354,7 @@ pub fn var(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
|
||||
Ok((
|
||||
input,
|
||||
TokenTreeBuilder::tagged_var(bare, (start, end, input.extra)),
|
||||
TokenTreeBuilder::spanned_var(bare, Span::new(start, end)),
|
||||
))
|
||||
}
|
||||
|
||||
@ -373,7 +365,7 @@ pub fn ident(input: NomSpan) -> IResult<NomSpan, Tag> {
|
||||
let (input, _) = take_while(is_bare_char)(input)?;
|
||||
let end = input.offset;
|
||||
|
||||
Ok((input, Tag::from((start, end, input.extra.origin))))
|
||||
Ok((input, Tag::from((start, end, None))))
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
@ -385,7 +377,7 @@ pub fn flag(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
|
||||
Ok((
|
||||
input,
|
||||
TokenTreeBuilder::tagged_flag(bare.tag(), (start, end, input.extra)),
|
||||
TokenTreeBuilder::spanned_flag(bare.span(), Span::new(start, end)),
|
||||
))
|
||||
}
|
||||
|
||||
@ -398,7 +390,7 @@ pub fn shorthand(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
|
||||
Ok((
|
||||
input,
|
||||
TokenTreeBuilder::tagged_shorthand(bare.tag(), (start, end, input.extra)),
|
||||
TokenTreeBuilder::spanned_shorthand(bare.span(), Span::new(start, end)),
|
||||
))
|
||||
}
|
||||
|
||||
@ -420,12 +412,12 @@ pub fn token_list(input: NomSpan) -> IResult<NomSpan, Tagged<Vec<TokenNode>>> {
|
||||
|
||||
Ok((
|
||||
input,
|
||||
make_token_list(first, list, None).tagged((start, end, input.extra.origin)),
|
||||
make_token_list(first, list, None).tagged((start, end, None)),
|
||||
))
|
||||
}
|
||||
|
||||
#[tracable_parser]
|
||||
pub fn spaced_token_list(input: NomSpan) -> IResult<NomSpan, Tagged<Vec<TokenNode>>> {
|
||||
pub fn spaced_token_list(input: NomSpan) -> IResult<NomSpan, Spanned<Vec<TokenNode>>> {
|
||||
let start = input.offset;
|
||||
let (input, pre_ws) = opt(whitespace)(input)?;
|
||||
let (input, items) = token_list(input)?;
|
||||
@ -438,7 +430,7 @@ pub fn spaced_token_list(input: NomSpan) -> IResult<NomSpan, Tagged<Vec<TokenNod
|
||||
out.extend(items.item);
|
||||
out.extend(post_ws);
|
||||
|
||||
Ok((input, out.tagged((start, end, input.extra.origin))))
|
||||
Ok((input, out.spanned(Span::new(start, end))))
|
||||
}
|
||||
|
||||
fn make_token_list(
|
||||
@ -468,20 +460,17 @@ pub fn whitespace(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
let (input, ws1) = space1(input)?;
|
||||
let right = input.offset;
|
||||
|
||||
Ok((
|
||||
input,
|
||||
TokenTreeBuilder::tagged_ws((left, right, input.extra)),
|
||||
))
|
||||
Ok((input, TokenTreeBuilder::spanned_ws(Span::new(left, right))))
|
||||
}
|
||||
|
||||
pub fn delimited(
|
||||
input: NomSpan,
|
||||
delimiter: Delimiter,
|
||||
) -> IResult<NomSpan, (Tag, Tag, Tagged<Vec<TokenNode>>)> {
|
||||
) -> IResult<NomSpan, (Span, Span, Spanned<Vec<TokenNode>>)> {
|
||||
let left = input.offset;
|
||||
let (input, open_tag) = tag(delimiter.open())(input)?;
|
||||
let (input, open_span) = tag(delimiter.open())(input)?;
|
||||
let (input, inner_items) = opt(spaced_token_list)(input)?;
|
||||
let (input, close_tag) = tag(delimiter.close())(input)?;
|
||||
let (input, close_span) = tag(delimiter.close())(input)?;
|
||||
let right = input.offset;
|
||||
|
||||
let mut items = vec![];
|
||||
@ -493,9 +482,9 @@ pub fn delimited(
|
||||
Ok((
|
||||
input,
|
||||
(
|
||||
Tag::from(open_tag),
|
||||
Tag::from(close_tag),
|
||||
items.tagged((left, right, input.extra.origin)),
|
||||
Span::from(open_span),
|
||||
Span::from(close_span),
|
||||
items.spanned(Span::new(left, right)),
|
||||
),
|
||||
))
|
||||
}
|
||||
@ -506,7 +495,7 @@ pub fn delimited_paren(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
|
||||
Ok((
|
||||
input,
|
||||
TokenTreeBuilder::tagged_parens(tokens.item, (left, right), tokens.tag),
|
||||
TokenTreeBuilder::spanned_parens(tokens.item, (left, right), tokens.span),
|
||||
))
|
||||
}
|
||||
|
||||
@ -516,7 +505,7 @@ pub fn delimited_square(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
|
||||
Ok((
|
||||
input,
|
||||
TokenTreeBuilder::tagged_square(tokens.item, (left, right), tokens.tag),
|
||||
TokenTreeBuilder::spanned_square(tokens.item, (left, right), tokens.span),
|
||||
))
|
||||
}
|
||||
|
||||
@ -526,7 +515,7 @@ pub fn delimited_brace(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
|
||||
Ok((
|
||||
input,
|
||||
TokenTreeBuilder::tagged_square(tokens.item, (left, right), tokens.tag),
|
||||
TokenTreeBuilder::spanned_square(tokens.item, (left, right), tokens.span),
|
||||
))
|
||||
}
|
||||
|
||||
@ -637,18 +626,19 @@ pub fn pipeline(input: NomSpan) -> IResult<NomSpan, TokenNode> {
|
||||
|
||||
let end = input.offset;
|
||||
|
||||
let head_tag = head.tag();
|
||||
let mut all_items: Vec<Tagged<PipelineElement>> =
|
||||
vec![PipelineElement::new(None, head).tagged(head_tag)];
|
||||
let head_span = head.span;
|
||||
let mut all_items: Vec<Spanned<PipelineElement>> =
|
||||
vec![PipelineElement::new(None, head).spanned(head_span)];
|
||||
|
||||
all_items.extend(items.into_iter().map(|(pipe, items)| {
|
||||
let items_tag = items.tag();
|
||||
PipelineElement::new(Some(Tag::from(pipe)), items).tagged(Tag::from(pipe).until(items_tag))
|
||||
let items_span = items.span;
|
||||
PipelineElement::new(Some(Span::from(pipe)), items)
|
||||
.spanned(Span::from(pipe).until(items_span))
|
||||
}));
|
||||
|
||||
Ok((
|
||||
input,
|
||||
TokenTreeBuilder::tagged_pipeline(all_items, (start, end, input.extra)),
|
||||
TokenTreeBuilder::spanned_pipeline(all_items, Span::new(start, end)),
|
||||
))
|
||||
}
|
||||
|
||||
@ -757,7 +747,7 @@ mod tests {
|
||||
macro_rules! equal_tokens {
|
||||
($source:tt -> $tokens:expr) => {
|
||||
let result = apply(pipeline, "pipeline", $source);
|
||||
let (expected_tree, expected_source) = TokenTreeBuilder::build(uuid::Uuid::nil(), $tokens);
|
||||
let (expected_tree, expected_source) = TokenTreeBuilder::build($tokens);
|
||||
|
||||
if result != expected_tree {
|
||||
let debug_result = format!("{}", result.debug($source));
|
||||
@ -778,7 +768,7 @@ mod tests {
|
||||
|
||||
(<$parser:tt> $source:tt -> $tokens:expr) => {
|
||||
let result = apply($parser, stringify!($parser), $source);
|
||||
let (expected_tree, expected_source) = TokenTreeBuilder::build(uuid::Uuid::nil(), $tokens);
|
||||
let (expected_tree, expected_source) = TokenTreeBuilder::build($tokens);
|
||||
|
||||
if result != expected_tree {
|
||||
let debug_result = format!("{}", result.debug($source));
|
||||
@ -1241,41 +1231,37 @@ mod tests {
|
||||
desc: &str,
|
||||
string: &str,
|
||||
) -> TokenNode {
|
||||
f(nom_input(string, uuid::Uuid::nil())).unwrap().1
|
||||
f(nom_input(string)).unwrap().1
|
||||
}
|
||||
|
||||
fn tag(left: usize, right: usize) -> Tag {
|
||||
Tag::from((left, right, uuid::Uuid::nil()))
|
||||
fn span((left, right): (usize, usize)) -> Span {
|
||||
Span::new(left, right)
|
||||
}
|
||||
|
||||
fn delimited(
|
||||
delimiter: Tagged<Delimiter>,
|
||||
delimiter: Spanned<Delimiter>,
|
||||
children: Vec<TokenNode>,
|
||||
left: usize,
|
||||
right: usize,
|
||||
) -> TokenNode {
|
||||
let start = Tag::for_char(left, delimiter.tag.anchor);
|
||||
let end = Tag::for_char(right, delimiter.tag.anchor);
|
||||
let start = Span::for_char(left);
|
||||
let end = Span::for_char(right);
|
||||
|
||||
let node = DelimitedNode::new(delimiter.item, (start, end), children);
|
||||
let spanned = node.tagged((left, right, delimiter.tag.anchor));
|
||||
let spanned = node.spanned(Span::new(left, right));
|
||||
TokenNode::Delimited(spanned)
|
||||
}
|
||||
|
||||
fn token(token: RawToken, left: usize, right: usize) -> TokenNode {
|
||||
TokenNode::Token(token.tagged((left, right, uuid::Uuid::nil())))
|
||||
TokenNode::Token(token.spanned(Span::new(left, right)))
|
||||
}
|
||||
|
||||
fn build<T>(block: CurriedNode<T>) -> T {
|
||||
let mut builder = TokenTreeBuilder::new(uuid::Uuid::nil());
|
||||
let mut builder = TokenTreeBuilder::new();
|
||||
block(&mut builder)
|
||||
}
|
||||
|
||||
fn build_token(block: CurriedToken) -> TokenNode {
|
||||
TokenTreeBuilder::build(uuid::Uuid::nil(), block).0
|
||||
}
|
||||
|
||||
fn test_uuid() -> uuid::Uuid {
|
||||
uuid::Uuid::nil()
|
||||
TokenTreeBuilder::build(block).0
|
||||
}
|
||||
}
|
||||
|
@ -1,13 +1,13 @@
|
||||
use crate::parser::TokenNode;
|
||||
use crate::traits::ToDebug;
|
||||
use crate::{Tag, Tagged};
|
||||
use crate::{Span, Spanned};
|
||||
use derive_new::new;
|
||||
use getset::Getters;
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, new)]
|
||||
pub struct Pipeline {
|
||||
pub(crate) parts: Vec<Tagged<PipelineElement>>,
|
||||
pub(crate) parts: Vec<Spanned<PipelineElement>>,
|
||||
// pub(crate) post_ws: Option<Tag>,
|
||||
}
|
||||
|
||||
@ -23,8 +23,8 @@ impl ToDebug for Pipeline {
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Getters, new)]
|
||||
pub struct PipelineElement {
|
||||
pub pipe: Option<Tag>,
|
||||
pub tokens: Tagged<Vec<TokenNode>>,
|
||||
pub pipe: Option<Span>,
|
||||
pub tokens: Spanned<Vec<TokenNode>>,
|
||||
}
|
||||
|
||||
impl ToDebug for PipelineElement {
|
||||
|
@ -2,7 +2,7 @@ use crate::errors::ShellError;
|
||||
use crate::parser::parse::{call_node::*, flag::*, operator::*, pipeline::*, tokens::*};
|
||||
use crate::prelude::*;
|
||||
use crate::traits::ToDebug;
|
||||
use crate::{Tag, Tagged, Text};
|
||||
use crate::{Tagged, Text};
|
||||
use derive_new::new;
|
||||
use enum_utils::FromStr;
|
||||
use getset::Getters;
|
||||
@ -12,14 +12,14 @@ use std::fmt;
|
||||
pub enum TokenNode {
|
||||
Token(Token),
|
||||
|
||||
Call(Tagged<CallNode>),
|
||||
Nodes(Tagged<Vec<TokenNode>>),
|
||||
Delimited(Tagged<DelimitedNode>),
|
||||
Pipeline(Tagged<Pipeline>),
|
||||
Flag(Tagged<Flag>),
|
||||
Whitespace(Tag),
|
||||
Call(Spanned<CallNode>),
|
||||
Nodes(Spanned<Vec<TokenNode>>),
|
||||
Delimited(Spanned<DelimitedNode>),
|
||||
Pipeline(Spanned<Pipeline>),
|
||||
Flag(Spanned<Flag>),
|
||||
Whitespace(Span),
|
||||
|
||||
Error(Tagged<ShellError>),
|
||||
Error(Spanned<ShellError>),
|
||||
}
|
||||
|
||||
impl ToDebug for TokenNode {
|
||||
@ -78,28 +78,28 @@ impl fmt::Debug for DebugTokenNode<'_> {
|
||||
}
|
||||
TokenNode::Pipeline(pipeline) => write!(f, "{}", pipeline.debug(self.source)),
|
||||
TokenNode::Error(_) => write!(f, "<error>"),
|
||||
rest => write!(f, "{}", rest.tag().slice(self.source)),
|
||||
rest => write!(f, "{}", rest.span().slice(self.source)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&TokenNode> for Tag {
|
||||
fn from(token: &TokenNode) -> Tag {
|
||||
token.tag()
|
||||
impl From<&TokenNode> for Span {
|
||||
fn from(token: &TokenNode) -> Span {
|
||||
token.span()
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenNode {
|
||||
pub fn tag(&self) -> Tag {
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
TokenNode::Token(t) => t.tag(),
|
||||
TokenNode::Nodes(t) => t.tag(),
|
||||
TokenNode::Call(s) => s.tag(),
|
||||
TokenNode::Delimited(s) => s.tag(),
|
||||
TokenNode::Pipeline(s) => s.tag(),
|
||||
TokenNode::Flag(s) => s.tag(),
|
||||
TokenNode::Token(t) => t.span,
|
||||
TokenNode::Nodes(t) => t.span,
|
||||
TokenNode::Call(s) => s.span,
|
||||
TokenNode::Delimited(s) => s.span,
|
||||
TokenNode::Pipeline(s) => s.span,
|
||||
TokenNode::Flag(s) => s.span,
|
||||
TokenNode::Whitespace(s) => *s,
|
||||
TokenNode::Error(s) => return s.tag,
|
||||
TokenNode::Error(s) => s.span,
|
||||
}
|
||||
}
|
||||
|
||||
@ -116,8 +116,12 @@ impl TokenNode {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn spanned_type_name(&self) -> Spanned<&'static str> {
|
||||
self.type_name().spanned(self.span())
|
||||
}
|
||||
|
||||
pub fn tagged_type_name(&self) -> Tagged<&'static str> {
|
||||
self.type_name().tagged(self.tag())
|
||||
self.type_name().tagged(self.span())
|
||||
}
|
||||
|
||||
pub fn old_debug<'a>(&'a self, source: &'a Text) -> DebugTokenNode<'a> {
|
||||
@ -125,26 +129,26 @@ impl TokenNode {
|
||||
}
|
||||
|
||||
pub fn as_external_arg(&self, source: &Text) -> String {
|
||||
self.tag().slice(source).to_string()
|
||||
self.span().slice(source).to_string()
|
||||
}
|
||||
|
||||
pub fn source<'a>(&self, source: &'a Text) -> &'a str {
|
||||
self.tag().slice(source)
|
||||
self.span().slice(source)
|
||||
}
|
||||
|
||||
pub fn get_variable(&self) -> Result<(Tag, Tag), ShellError> {
|
||||
pub fn get_variable(&self) -> Result<(Span, Span), ShellError> {
|
||||
match self {
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::Variable(inner_tag),
|
||||
tag: outer_tag,
|
||||
}) => Ok((*outer_tag, *inner_tag)),
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::Variable(inner_span),
|
||||
span: outer_span,
|
||||
}) => Ok((*outer_span, *inner_span)),
|
||||
_ => Err(ShellError::type_error("variable", self.tagged_type_name())),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_bare(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Tagged {
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::Bare,
|
||||
..
|
||||
}) => true,
|
||||
@ -154,7 +158,7 @@ impl TokenNode {
|
||||
|
||||
pub fn is_pattern(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Tagged {
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::GlobPattern,
|
||||
..
|
||||
}) => true,
|
||||
@ -164,7 +168,7 @@ impl TokenNode {
|
||||
|
||||
pub fn is_dot(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Tagged {
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::Operator(Operator::Dot),
|
||||
..
|
||||
}) => true,
|
||||
@ -172,24 +176,24 @@ impl TokenNode {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_block(&self) -> Option<(Tagged<&[TokenNode]>, (Tag, Tag))> {
|
||||
pub fn as_block(&self) -> Option<(Spanned<&[TokenNode]>, (Span, Span))> {
|
||||
match self {
|
||||
TokenNode::Delimited(Tagged {
|
||||
TokenNode::Delimited(Spanned {
|
||||
item:
|
||||
DelimitedNode {
|
||||
delimiter,
|
||||
children,
|
||||
tags,
|
||||
spans,
|
||||
},
|
||||
tag,
|
||||
}) if *delimiter == Delimiter::Brace => Some(((&children[..]).tagged(tag), *tags)),
|
||||
span,
|
||||
}) if *delimiter == Delimiter::Brace => Some(((&children[..]).spanned(*span), *spans)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_external(&self) -> bool {
|
||||
match self {
|
||||
TokenNode::Token(Tagged {
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::ExternalCommand(..),
|
||||
..
|
||||
}) => true,
|
||||
@ -197,20 +201,20 @@ impl TokenNode {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_external(&self) -> Tag {
|
||||
pub fn expect_external(&self) -> Span {
|
||||
match self {
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::ExternalCommand(tag),
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::ExternalCommand(span),
|
||||
..
|
||||
}) => *tag,
|
||||
}) => *span,
|
||||
_ => panic!("Only call expect_external if you checked is_external first"),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option<Tagged<Flag>> {
|
||||
pub(crate) fn as_flag(&self, value: &str, source: &Text) -> Option<Spanned<Flag>> {
|
||||
match self {
|
||||
TokenNode::Flag(
|
||||
flag @ Tagged {
|
||||
flag @ Spanned {
|
||||
item: Flag { .. }, ..
|
||||
},
|
||||
) if value == flag.name().slice(source) => Some(*flag),
|
||||
@ -220,7 +224,7 @@ impl TokenNode {
|
||||
|
||||
pub fn as_pipeline(&self) -> Result<Pipeline, ShellError> {
|
||||
match self {
|
||||
TokenNode::Pipeline(Tagged { item, .. }) => Ok(item.clone()),
|
||||
TokenNode::Pipeline(Spanned { item, .. }) => Ok(item.clone()),
|
||||
_ => Err(ShellError::unimplemented("unimplemented")),
|
||||
}
|
||||
}
|
||||
@ -232,12 +236,12 @@ impl TokenNode {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_string(&self) -> (Tag, Tag) {
|
||||
pub fn expect_string(&self) -> (Span, Span) {
|
||||
match self {
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::String(inner_tag),
|
||||
tag: outer_tag,
|
||||
}) => (*outer_tag, *inner_tag),
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::String(inner_span),
|
||||
span: outer_span,
|
||||
}) => (*outer_span, *inner_span),
|
||||
other => panic!("Expected string, found {:?}", other),
|
||||
}
|
||||
}
|
||||
@ -247,27 +251,30 @@ impl TokenNode {
|
||||
impl TokenNode {
|
||||
pub fn expect_list(&self) -> Tagged<&[TokenNode]> {
|
||||
match self {
|
||||
TokenNode::Nodes(Tagged { item, tag }) => (&item[..]).tagged(tag),
|
||||
TokenNode::Nodes(Spanned { item, span }) => (&item[..]).tagged(Tag {
|
||||
span: *span,
|
||||
anchor: None,
|
||||
}),
|
||||
other => panic!("Expected list, found {:?}", other),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_var(&self) -> (Tag, Tag) {
|
||||
pub fn expect_var(&self) -> (Span, Span) {
|
||||
match self {
|
||||
TokenNode::Token(Tagged {
|
||||
item: RawToken::Variable(inner_tag),
|
||||
tag: outer_tag,
|
||||
}) => (*outer_tag, *inner_tag),
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::Variable(inner_span),
|
||||
span: outer_span,
|
||||
}) => (*outer_span, *inner_span),
|
||||
other => panic!("Expected var, found {:?}", other),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_bare(&self) -> Tag {
|
||||
pub fn expect_bare(&self) -> Span {
|
||||
match self {
|
||||
TokenNode::Token(Tagged {
|
||||
TokenNode::Token(Spanned {
|
||||
item: RawToken::Bare,
|
||||
tag,
|
||||
}) => *tag,
|
||||
span,
|
||||
}) => *span,
|
||||
other => panic!("Expected var, found {:?}", other),
|
||||
}
|
||||
}
|
||||
@ -277,7 +284,7 @@ impl TokenNode {
|
||||
#[get = "pub(crate)"]
|
||||
pub struct DelimitedNode {
|
||||
pub(crate) delimiter: Delimiter,
|
||||
pub(crate) tags: (Tag, Tag),
|
||||
pub(crate) spans: (Span, Span),
|
||||
pub(crate) children: Vec<TokenNode>,
|
||||
}
|
||||
|
||||
|
@ -7,7 +7,6 @@ use crate::parser::parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
|
||||
use crate::parser::parse::tokens::{RawNumber, RawToken};
|
||||
use crate::parser::CallNode;
|
||||
use derive_new::new;
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(new)]
|
||||
pub struct TokenTreeBuilder {
|
||||
@ -16,33 +15,34 @@ pub struct TokenTreeBuilder {
|
||||
|
||||
#[new(default)]
|
||||
output: String,
|
||||
|
||||
anchor: Uuid,
|
||||
}
|
||||
|
||||
pub type CurriedToken = Box<dyn FnOnce(&mut TokenTreeBuilder) -> TokenNode + 'static>;
|
||||
pub type CurriedCall = Box<dyn FnOnce(&mut TokenTreeBuilder) -> Tagged<CallNode> + 'static>;
|
||||
|
||||
impl TokenTreeBuilder {
|
||||
pub fn build(anchor: Uuid, block: impl FnOnce(&mut Self) -> TokenNode) -> (TokenNode, String) {
|
||||
let mut builder = TokenTreeBuilder::new(anchor);
|
||||
pub fn build(block: impl FnOnce(&mut Self) -> TokenNode) -> (TokenNode, String) {
|
||||
let mut builder = TokenTreeBuilder::new();
|
||||
let node = block(&mut builder);
|
||||
(node, builder.output)
|
||||
}
|
||||
|
||||
fn build_tagged<T>(&mut self, callback: impl FnOnce(&mut TokenTreeBuilder) -> T) -> Tagged<T> {
|
||||
fn build_spanned<T>(
|
||||
&mut self,
|
||||
callback: impl FnOnce(&mut TokenTreeBuilder) -> T,
|
||||
) -> Spanned<T> {
|
||||
let start = self.pos;
|
||||
let ret = callback(self);
|
||||
let end = self.pos;
|
||||
|
||||
ret.tagged((start, end, self.anchor))
|
||||
ret.spanned(Span::new(start, end))
|
||||
}
|
||||
|
||||
pub fn pipeline(input: Vec<Vec<CurriedToken>>) -> CurriedToken {
|
||||
Box::new(move |b| {
|
||||
let start = b.pos;
|
||||
|
||||
let mut out: Vec<Tagged<PipelineElement>> = vec![];
|
||||
let mut out: Vec<Spanned<PipelineElement>> = vec![];
|
||||
|
||||
let mut input = input.into_iter().peekable();
|
||||
let head = input
|
||||
@ -50,34 +50,37 @@ impl TokenTreeBuilder {
|
||||
.expect("A pipeline must contain at least one element");
|
||||
|
||||
let pipe = None;
|
||||
let head = b.build_tagged(|b| head.into_iter().map(|node| node(b)).collect());
|
||||
let head = b.build_spanned(|b| head.into_iter().map(|node| node(b)).collect());
|
||||
|
||||
let head_tag: Tag = head.tag;
|
||||
out.push(PipelineElement::new(pipe, head).tagged(head_tag));
|
||||
let head_span: Span = head.span;
|
||||
out.push(PipelineElement::new(pipe, head).spanned(head_span));
|
||||
|
||||
loop {
|
||||
match input.next() {
|
||||
None => break,
|
||||
Some(node) => {
|
||||
let start = b.pos;
|
||||
let pipe = Some(b.consume_tag("|"));
|
||||
let pipe = Some(b.consume_span("|"));
|
||||
let node =
|
||||
b.build_tagged(|b| node.into_iter().map(|node| node(b)).collect());
|
||||
b.build_spanned(|b| node.into_iter().map(|node| node(b)).collect());
|
||||
let end = b.pos;
|
||||
|
||||
out.push(PipelineElement::new(pipe, node).tagged((start, end, b.anchor)));
|
||||
out.push(PipelineElement::new(pipe, node).spanned(Span::new(start, end)));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let end = b.pos;
|
||||
|
||||
TokenTreeBuilder::tagged_pipeline(out, (start, end, b.anchor))
|
||||
TokenTreeBuilder::spanned_pipeline(out, Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_pipeline(input: Vec<Tagged<PipelineElement>>, tag: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Pipeline(Pipeline::new(input).tagged(tag.into()))
|
||||
pub fn spanned_pipeline(
|
||||
input: Vec<Spanned<PipelineElement>>,
|
||||
span: impl Into<Span>,
|
||||
) -> TokenNode {
|
||||
TokenNode::Pipeline(Pipeline::new(input).spanned(span))
|
||||
}
|
||||
|
||||
pub fn token_list(input: Vec<CurriedToken>) -> CurriedToken {
|
||||
@ -86,12 +89,12 @@ impl TokenTreeBuilder {
|
||||
let tokens = input.into_iter().map(|i| i(b)).collect();
|
||||
let end = b.pos;
|
||||
|
||||
TokenTreeBuilder::tagged_token_list(tokens, (start, end, b.anchor))
|
||||
TokenTreeBuilder::tagged_token_list(tokens, (start, end, None))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_token_list(input: Vec<TokenNode>, tag: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Nodes(input.tagged(tag))
|
||||
TokenNode::Nodes(input.spanned(tag.into().span))
|
||||
}
|
||||
|
||||
pub fn op(input: impl Into<Operator>) -> CurriedToken {
|
||||
@ -102,12 +105,12 @@ impl TokenTreeBuilder {
|
||||
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::tagged_op(input, (start, end, b.anchor))
|
||||
TokenTreeBuilder::spanned_op(input, Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_op(input: impl Into<Operator>, tag: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Token(RawToken::Operator(input.into()).tagged(tag.into()))
|
||||
pub fn spanned_op(input: impl Into<Operator>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(RawToken::Operator(input.into()).spanned(span.into()))
|
||||
}
|
||||
|
||||
pub fn string(input: impl Into<String>) -> CurriedToken {
|
||||
@ -119,15 +122,15 @@ impl TokenTreeBuilder {
|
||||
let (_, end) = b.consume("\"");
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::tagged_string(
|
||||
(inner_start, inner_end, b.anchor),
|
||||
(start, end, b.anchor),
|
||||
TokenTreeBuilder::spanned_string(
|
||||
Span::new(inner_start, inner_end),
|
||||
Span::new(start, end),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_string(input: impl Into<Tag>, tag: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Token(RawToken::String(input.into()).tagged(tag.into()))
|
||||
pub fn spanned_string(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(RawToken::String(input.into()).spanned(span.into()))
|
||||
}
|
||||
|
||||
pub fn bare(input: impl Into<String>) -> CurriedToken {
|
||||
@ -137,12 +140,12 @@ impl TokenTreeBuilder {
|
||||
let (start, end) = b.consume(&input);
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::tagged_bare((start, end, b.anchor))
|
||||
TokenTreeBuilder::spanned_bare(Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_bare(tag: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Token(RawToken::Bare.tagged(tag.into()))
|
||||
pub fn spanned_bare(span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(RawToken::Bare.spanned(span))
|
||||
}
|
||||
|
||||
pub fn pattern(input: impl Into<String>) -> CurriedToken {
|
||||
@ -152,12 +155,12 @@ impl TokenTreeBuilder {
|
||||
let (start, end) = b.consume(&input);
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::tagged_pattern((start, end, b.anchor))
|
||||
TokenTreeBuilder::spanned_pattern(Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_pattern(input: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Token(RawToken::GlobPattern.tagged(input.into()))
|
||||
pub fn spanned_pattern(input: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(RawToken::GlobPattern.spanned(input.into()))
|
||||
}
|
||||
|
||||
pub fn external_word(input: impl Into<String>) -> CurriedToken {
|
||||
@ -167,12 +170,12 @@ impl TokenTreeBuilder {
|
||||
let (start, end) = b.consume(&input);
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::tagged_external_word((start, end, b.anchor))
|
||||
TokenTreeBuilder::spanned_external_word(Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_external_word(input: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Token(RawToken::ExternalWord.tagged(input.into()))
|
||||
pub fn spanned_external_word(input: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(RawToken::ExternalWord.spanned(input.into()))
|
||||
}
|
||||
|
||||
pub fn external_command(input: impl Into<String>) -> CurriedToken {
|
||||
@ -183,15 +186,15 @@ impl TokenTreeBuilder {
|
||||
let (inner_start, end) = b.consume(&input);
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::tagged_external_command(
|
||||
(inner_start, end, b.anchor),
|
||||
(outer_start, end, b.anchor),
|
||||
TokenTreeBuilder::spanned_external_command(
|
||||
Span::new(inner_start, end),
|
||||
Span::new(outer_start, end),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_external_command(inner: impl Into<Tag>, outer: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Token(RawToken::ExternalCommand(inner.into()).tagged(outer.into()))
|
||||
pub fn spanned_external_command(inner: impl Into<Span>, outer: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(RawToken::ExternalCommand(inner.into()).spanned(outer.into()))
|
||||
}
|
||||
|
||||
pub fn int(input: impl Into<BigInt>) -> CurriedToken {
|
||||
@ -201,9 +204,9 @@ impl TokenTreeBuilder {
|
||||
let (start, end) = b.consume(&int.to_string());
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::tagged_number(
|
||||
RawNumber::Int((start, end, b.anchor).into()),
|
||||
(start, end, b.anchor),
|
||||
TokenTreeBuilder::spanned_number(
|
||||
RawNumber::Int(Span::new(start, end)),
|
||||
Span::new(start, end),
|
||||
)
|
||||
})
|
||||
}
|
||||
@ -215,15 +218,15 @@ impl TokenTreeBuilder {
|
||||
let (start, end) = b.consume(&decimal.to_string());
|
||||
b.pos = end;
|
||||
|
||||
TokenTreeBuilder::tagged_number(
|
||||
RawNumber::Decimal((start, end, b.anchor).into()),
|
||||
(start, end, b.anchor),
|
||||
TokenTreeBuilder::spanned_number(
|
||||
RawNumber::Decimal(Span::new(start, end)),
|
||||
Span::new(start, end),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_number(input: impl Into<RawNumber>, tag: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Token(RawToken::Number(input.into()).tagged(tag.into()))
|
||||
pub fn spanned_number(input: impl Into<RawNumber>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(RawToken::Number(input.into()).spanned(span.into()))
|
||||
}
|
||||
|
||||
pub fn var(input: impl Into<String>) -> CurriedToken {
|
||||
@ -233,12 +236,12 @@ impl TokenTreeBuilder {
|
||||
let (start, _) = b.consume("$");
|
||||
let (inner_start, end) = b.consume(&input);
|
||||
|
||||
TokenTreeBuilder::tagged_var((inner_start, end, b.anchor), (start, end, b.anchor))
|
||||
TokenTreeBuilder::spanned_var(Span::new(inner_start, end), Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_var(input: impl Into<Tag>, tag: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Token(RawToken::Variable(input.into()).tagged(tag.into()))
|
||||
pub fn spanned_var(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Token(RawToken::Variable(input.into()).spanned(span.into()))
|
||||
}
|
||||
|
||||
pub fn flag(input: impl Into<String>) -> CurriedToken {
|
||||
@ -248,12 +251,12 @@ impl TokenTreeBuilder {
|
||||
let (start, _) = b.consume("--");
|
||||
let (inner_start, end) = b.consume(&input);
|
||||
|
||||
TokenTreeBuilder::tagged_flag((inner_start, end, b.anchor), (start, end, b.anchor))
|
||||
TokenTreeBuilder::spanned_flag(Span::new(inner_start, end), Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_flag(input: impl Into<Tag>, tag: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Flag(Flag::new(FlagKind::Longhand, input.into()).tagged(tag.into()))
|
||||
pub fn spanned_flag(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Flag(Flag::new(FlagKind::Longhand, input.into()).spanned(span.into()))
|
||||
}
|
||||
|
||||
pub fn shorthand(input: impl Into<String>) -> CurriedToken {
|
||||
@ -263,12 +266,12 @@ impl TokenTreeBuilder {
|
||||
let (start, _) = b.consume("-");
|
||||
let (inner_start, end) = b.consume(&input);
|
||||
|
||||
TokenTreeBuilder::tagged_shorthand((inner_start, end, b.anchor), (start, end, b.anchor))
|
||||
TokenTreeBuilder::spanned_shorthand((inner_start, end), (start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_shorthand(input: impl Into<Tag>, tag: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into()).tagged(tag.into()))
|
||||
pub fn spanned_shorthand(input: impl Into<Span>, span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Flag(Flag::new(FlagKind::Shorthand, input.into()).spanned(span.into()))
|
||||
}
|
||||
|
||||
pub fn call(head: CurriedToken, input: Vec<CurriedToken>) -> CurriedCall {
|
||||
@ -284,7 +287,7 @@ impl TokenTreeBuilder {
|
||||
|
||||
let end = b.pos;
|
||||
|
||||
TokenTreeBuilder::tagged_call(nodes, (start, end, b.anchor))
|
||||
TokenTreeBuilder::tagged_call(nodes, (start, end, None))
|
||||
})
|
||||
}
|
||||
|
||||
@ -306,7 +309,7 @@ impl TokenTreeBuilder {
|
||||
input: Vec<CurriedToken>,
|
||||
_open: &str,
|
||||
_close: &str,
|
||||
) -> (Tag, Tag, Tag, Vec<TokenNode>) {
|
||||
) -> (Span, Span, Span, Vec<TokenNode>) {
|
||||
let (start_open_paren, end_open_paren) = self.consume("(");
|
||||
let mut output = vec![];
|
||||
for item in input {
|
||||
@ -315,9 +318,9 @@ impl TokenTreeBuilder {
|
||||
|
||||
let (start_close_paren, end_close_paren) = self.consume(")");
|
||||
|
||||
let open = Tag::from((start_open_paren, end_open_paren, self.anchor));
|
||||
let close = Tag::from((start_close_paren, end_close_paren, self.anchor));
|
||||
let whole = Tag::from((start_open_paren, end_close_paren, self.anchor));
|
||||
let open = Span::new(start_open_paren, end_open_paren);
|
||||
let close = Span::new(start_close_paren, end_close_paren);
|
||||
let whole = Span::new(start_open_paren, end_close_paren);
|
||||
|
||||
(open, close, whole, output)
|
||||
}
|
||||
@ -326,17 +329,17 @@ impl TokenTreeBuilder {
|
||||
Box::new(move |b| {
|
||||
let (open, close, whole, output) = b.consume_delimiter(input, "(", ")");
|
||||
|
||||
TokenTreeBuilder::tagged_parens(output, (open, close), whole)
|
||||
TokenTreeBuilder::spanned_parens(output, (open, close), whole)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_parens(
|
||||
pub fn spanned_parens(
|
||||
input: impl Into<Vec<TokenNode>>,
|
||||
tags: (Tag, Tag),
|
||||
tag: impl Into<Tag>,
|
||||
spans: (Span, Span),
|
||||
span: impl Into<Span>,
|
||||
) -> TokenNode {
|
||||
TokenNode::Delimited(
|
||||
DelimitedNode::new(Delimiter::Paren, tags, input.into()).tagged(tag.into()),
|
||||
DelimitedNode::new(Delimiter::Paren, spans, input.into()).spanned(span.into()),
|
||||
)
|
||||
}
|
||||
|
||||
@ -344,17 +347,17 @@ impl TokenTreeBuilder {
|
||||
Box::new(move |b| {
|
||||
let (open, close, whole, tokens) = b.consume_delimiter(input, "[", "]");
|
||||
|
||||
TokenTreeBuilder::tagged_square(tokens, (open, close), whole)
|
||||
TokenTreeBuilder::spanned_square(tokens, (open, close), whole)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_square(
|
||||
pub fn spanned_square(
|
||||
input: impl Into<Vec<TokenNode>>,
|
||||
tags: (Tag, Tag),
|
||||
tag: impl Into<Tag>,
|
||||
spans: (Span, Span),
|
||||
span: impl Into<Span>,
|
||||
) -> TokenNode {
|
||||
TokenNode::Delimited(
|
||||
DelimitedNode::new(Delimiter::Square, tags, input.into()).tagged(tag.into()),
|
||||
DelimitedNode::new(Delimiter::Square, spans, input.into()).spanned(span.into()),
|
||||
)
|
||||
}
|
||||
|
||||
@ -362,24 +365,24 @@ impl TokenTreeBuilder {
|
||||
Box::new(move |b| {
|
||||
let (open, close, whole, tokens) = b.consume_delimiter(input, "{", "}");
|
||||
|
||||
TokenTreeBuilder::tagged_brace(tokens, (open, close), whole)
|
||||
TokenTreeBuilder::spanned_brace(tokens, (open, close), whole)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_brace(
|
||||
pub fn spanned_brace(
|
||||
input: impl Into<Vec<TokenNode>>,
|
||||
tags: (Tag, Tag),
|
||||
tag: impl Into<Tag>,
|
||||
spans: (Span, Span),
|
||||
span: impl Into<Span>,
|
||||
) -> TokenNode {
|
||||
TokenNode::Delimited(
|
||||
DelimitedNode::new(Delimiter::Brace, tags, input.into()).tagged(tag.into()),
|
||||
DelimitedNode::new(Delimiter::Brace, spans, input.into()).spanned(span.into()),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn sp() -> CurriedToken {
|
||||
Box::new(|b| {
|
||||
let (start, end) = b.consume(" ");
|
||||
TokenNode::Whitespace(Tag::from((start, end, b.anchor)))
|
||||
TokenNode::Whitespace(Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
@ -388,12 +391,12 @@ impl TokenTreeBuilder {
|
||||
|
||||
Box::new(move |b| {
|
||||
let (start, end) = b.consume(&input);
|
||||
TokenTreeBuilder::tagged_ws((start, end, b.anchor))
|
||||
TokenTreeBuilder::spanned_ws(Span::new(start, end))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn tagged_ws(tag: impl Into<Tag>) -> TokenNode {
|
||||
TokenNode::Whitespace(tag.into())
|
||||
pub fn spanned_ws(span: impl Into<Span>) -> TokenNode {
|
||||
TokenNode::Whitespace(span.into())
|
||||
}
|
||||
|
||||
fn consume(&mut self, input: &str) -> (usize, usize) {
|
||||
@ -403,10 +406,10 @@ impl TokenTreeBuilder {
|
||||
(start, self.pos)
|
||||
}
|
||||
|
||||
fn consume_tag(&mut self, input: &str) -> Tag {
|
||||
fn consume_span(&mut self, input: &str) -> Span {
|
||||
let start = self.pos;
|
||||
self.pos += input.len();
|
||||
self.output.push_str(input);
|
||||
(start, self.pos, self.anchor).into()
|
||||
Span::new(start, self.pos)
|
||||
}
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
use crate::parser::Operator;
|
||||
use crate::prelude::*;
|
||||
use crate::{Tagged, Text};
|
||||
use crate::Text;
|
||||
use std::fmt;
|
||||
use std::str::FromStr;
|
||||
|
||||
@ -8,9 +8,9 @@ use std::str::FromStr;
|
||||
pub enum RawToken {
|
||||
Number(RawNumber),
|
||||
Operator(Operator),
|
||||
String(Tag),
|
||||
Variable(Tag),
|
||||
ExternalCommand(Tag),
|
||||
String(Span),
|
||||
Variable(Span),
|
||||
ExternalCommand(Span),
|
||||
ExternalWord,
|
||||
GlobPattern,
|
||||
Bare,
|
||||
@ -33,21 +33,21 @@ impl RawToken {
|
||||
|
||||
#[derive(Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Hash)]
|
||||
pub enum RawNumber {
|
||||
Int(Tag),
|
||||
Decimal(Tag),
|
||||
Int(Span),
|
||||
Decimal(Span),
|
||||
}
|
||||
|
||||
impl RawNumber {
|
||||
pub fn int(tag: impl Into<Tag>) -> Tagged<RawNumber> {
|
||||
let tag = tag.into();
|
||||
pub fn int(span: impl Into<Span>) -> Spanned<RawNumber> {
|
||||
let span = span.into();
|
||||
|
||||
RawNumber::Int(tag).tagged(tag)
|
||||
RawNumber::Int(span).spanned(span)
|
||||
}
|
||||
|
||||
pub fn decimal(tag: impl Into<Tag>) -> Tagged<RawNumber> {
|
||||
let tag = tag.into();
|
||||
pub fn decimal(span: impl Into<Span>) -> Spanned<RawNumber> {
|
||||
let span = span.into();
|
||||
|
||||
RawNumber::Decimal(tag).tagged(tag)
|
||||
RawNumber::Decimal(span).spanned(span)
|
||||
}
|
||||
|
||||
pub(crate) fn to_number(self, source: &Text) -> Number {
|
||||
@ -60,7 +60,7 @@ impl RawNumber {
|
||||
}
|
||||
}
|
||||
|
||||
pub type Token = Tagged<RawToken>;
|
||||
pub type Token = Spanned<RawToken>;
|
||||
|
||||
impl Token {
|
||||
pub fn debug<'a>(&self, source: &'a Text) -> DebugToken<'a> {
|
||||
@ -70,72 +70,72 @@ impl Token {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_number(&self) -> Option<Tagged<RawNumber>> {
|
||||
pub fn extract_number(&self) -> Option<Spanned<RawNumber>> {
|
||||
match self.item {
|
||||
RawToken::Number(number) => Some((number).tagged(self.tag)),
|
||||
RawToken::Number(number) => Some((number).spanned(self.span)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_int(&self) -> Option<(Tag, Tag)> {
|
||||
pub fn extract_int(&self) -> Option<(Span, Span)> {
|
||||
match self.item {
|
||||
RawToken::Number(RawNumber::Int(int)) => Some((int, self.tag)),
|
||||
RawToken::Number(RawNumber::Int(int)) => Some((int, self.span)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_decimal(&self) -> Option<(Tag, Tag)> {
|
||||
pub fn extract_decimal(&self) -> Option<(Span, Span)> {
|
||||
match self.item {
|
||||
RawToken::Number(RawNumber::Decimal(decimal)) => Some((decimal, self.tag)),
|
||||
RawToken::Number(RawNumber::Decimal(decimal)) => Some((decimal, self.span)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_operator(&self) -> Option<Tagged<Operator>> {
|
||||
pub fn extract_operator(&self) -> Option<Spanned<Operator>> {
|
||||
match self.item {
|
||||
RawToken::Operator(operator) => Some(operator.tagged(self.tag)),
|
||||
RawToken::Operator(operator) => Some(operator.spanned(self.span)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_string(&self) -> Option<(Tag, Tag)> {
|
||||
pub fn extract_string(&self) -> Option<(Span, Span)> {
|
||||
match self.item {
|
||||
RawToken::String(tag) => Some((tag, self.tag)),
|
||||
RawToken::String(span) => Some((span, self.span)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_variable(&self) -> Option<(Tag, Tag)> {
|
||||
pub fn extract_variable(&self) -> Option<(Span, Span)> {
|
||||
match self.item {
|
||||
RawToken::Variable(tag) => Some((tag, self.tag)),
|
||||
RawToken::Variable(span) => Some((span, self.span)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_external_command(&self) -> Option<(Tag, Tag)> {
|
||||
pub fn extract_external_command(&self) -> Option<(Span, Span)> {
|
||||
match self.item {
|
||||
RawToken::ExternalCommand(tag) => Some((tag, self.tag)),
|
||||
RawToken::ExternalCommand(span) => Some((span, self.span)),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_external_word(&self) -> Option<Tag> {
|
||||
pub fn extract_external_word(&self) -> Option<Span> {
|
||||
match self.item {
|
||||
RawToken::ExternalWord => Some(self.tag),
|
||||
RawToken::ExternalWord => Some(self.span),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_glob_pattern(&self) -> Option<Tag> {
|
||||
pub fn extract_glob_pattern(&self) -> Option<Span> {
|
||||
match self.item {
|
||||
RawToken::GlobPattern => Some(self.tag),
|
||||
RawToken::GlobPattern => Some(self.span),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_bare(&self) -> Option<Tag> {
|
||||
pub fn extract_bare(&self) -> Option<Span> {
|
||||
match self.item {
|
||||
RawToken::Bare => Some(self.tag),
|
||||
RawToken::Bare => Some(self.span),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@ -148,6 +148,6 @@ pub struct DebugToken<'a> {
|
||||
|
||||
impl fmt::Debug for DebugToken<'_> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{}", self.node.tag().slice(self.source))
|
||||
write!(f, "{}", self.node.span.slice(self.source))
|
||||
}
|
||||
}
|
||||
|
@ -10,14 +10,14 @@ use crate::parser::{
|
||||
Flag,
|
||||
};
|
||||
use crate::traits::ToDebug;
|
||||
use crate::{Tag, Tagged, Text};
|
||||
use crate::{Span, Spanned, Tag, Text};
|
||||
use log::trace;
|
||||
|
||||
pub fn parse_command_tail(
|
||||
config: &Signature,
|
||||
context: &ExpandContext,
|
||||
tail: &mut TokensIterator,
|
||||
command_tag: Tag,
|
||||
command_span: Span,
|
||||
) -> Result<Option<(Option<Vec<hir::Expression>>, Option<NamedArguments>)>, ShellError> {
|
||||
let mut named = NamedArguments::new();
|
||||
trace_remaining("nodes", tail.clone(), context.source());
|
||||
@ -32,7 +32,7 @@ pub fn parse_command_tail(
|
||||
named.insert_switch(name, flag);
|
||||
}
|
||||
NamedType::Mandatory(syntax_type) => {
|
||||
match extract_mandatory(config, name, tail, context.source(), command_tag) {
|
||||
match extract_mandatory(config, name, tail, context.source(), command_span) {
|
||||
Err(err) => return Err(err), // produce a correct diagnostic
|
||||
Ok((pos, flag)) => {
|
||||
tail.move_to(pos);
|
||||
@ -41,7 +41,7 @@ pub fn parse_command_tail(
|
||||
return Err(ShellError::argument_error(
|
||||
config.name.clone(),
|
||||
ArgumentError::MissingValueForName(name.to_string()),
|
||||
flag.tag(),
|
||||
flag.span,
|
||||
));
|
||||
}
|
||||
|
||||
@ -62,7 +62,7 @@ pub fn parse_command_tail(
|
||||
return Err(ShellError::argument_error(
|
||||
config.name.clone(),
|
||||
ArgumentError::MissingValueForName(name.to_string()),
|
||||
flag.tag(),
|
||||
flag.span,
|
||||
));
|
||||
}
|
||||
|
||||
@ -98,7 +98,10 @@ pub fn parse_command_tail(
|
||||
return Err(ShellError::argument_error(
|
||||
config.name.clone(),
|
||||
ArgumentError::MissingMandatoryPositional(arg.name().to_string()),
|
||||
command_tag,
|
||||
Tag {
|
||||
span: command_span,
|
||||
anchor: None,
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
||||
@ -158,7 +161,7 @@ pub fn parse_command_tail(
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ColoringArgs {
|
||||
vec: Vec<Option<Vec<Tagged<FlatShape>>>>,
|
||||
vec: Vec<Option<Vec<Spanned<FlatShape>>>>,
|
||||
}
|
||||
|
||||
impl ColoringArgs {
|
||||
@ -167,11 +170,11 @@ impl ColoringArgs {
|
||||
ColoringArgs { vec }
|
||||
}
|
||||
|
||||
fn insert(&mut self, pos: usize, shapes: Vec<Tagged<FlatShape>>) {
|
||||
fn insert(&mut self, pos: usize, shapes: Vec<Spanned<FlatShape>>) {
|
||||
self.vec[pos] = Some(shapes);
|
||||
}
|
||||
|
||||
fn spread_shapes(self, shapes: &mut Vec<Tagged<FlatShape>>) {
|
||||
fn spread_shapes(self, shapes: &mut Vec<Spanned<FlatShape>>) {
|
||||
for item in self.vec {
|
||||
match item {
|
||||
None => {}
|
||||
@ -195,7 +198,7 @@ impl ColorSyntax for CommandTailShape {
|
||||
signature: &Signature,
|
||||
token_nodes: &'b mut TokensIterator<'a>,
|
||||
context: &ExpandContext,
|
||||
shapes: &mut Vec<Tagged<FlatShape>>,
|
||||
shapes: &mut Vec<Spanned<FlatShape>>,
|
||||
) -> Self::Info {
|
||||
let mut args = ColoringArgs::new(token_nodes.len());
|
||||
trace_remaining("nodes", token_nodes.clone(), context.source());
|
||||
@ -216,7 +219,7 @@ impl ColorSyntax for CommandTailShape {
|
||||
name,
|
||||
token_nodes,
|
||||
context.source(),
|
||||
Tag::unknown(),
|
||||
Span::unknown(),
|
||||
) {
|
||||
Err(_) => {
|
||||
// The mandatory flag didn't exist at all, so there's nothing to color
|
||||
@ -378,7 +381,7 @@ impl ColorSyntax for CommandTailShape {
|
||||
// Consume any remaining tokens with backoff coloring mode
|
||||
color_syntax(&BackoffColoringMode, token_nodes, context, shapes);
|
||||
|
||||
shapes.sort_by(|a, b| a.tag.span.start().cmp(&b.tag.span.start()));
|
||||
shapes.sort_by(|a, b| a.span.start().cmp(&b.span.start()));
|
||||
}
|
||||
}
|
||||
|
||||
@ -393,15 +396,15 @@ fn extract_mandatory(
|
||||
name: &str,
|
||||
tokens: &mut hir::TokensIterator<'_>,
|
||||
source: &Text,
|
||||
tag: Tag,
|
||||
) -> Result<(usize, Tagged<Flag>), ShellError> {
|
||||
span: Span,
|
||||
) -> Result<(usize, Spanned<Flag>), ShellError> {
|
||||
let flag = tokens.extract(|t| t.as_flag(name, source));
|
||||
|
||||
match flag {
|
||||
None => Err(ShellError::argument_error(
|
||||
config.name.clone(),
|
||||
ArgumentError::MissingMandatoryFlag(name.to_string()),
|
||||
tag,
|
||||
span,
|
||||
)),
|
||||
|
||||
Some((pos, flag)) => {
|
||||
@ -415,7 +418,7 @@ fn extract_optional(
|
||||
name: &str,
|
||||
tokens: &mut hir::TokensIterator<'_>,
|
||||
source: &Text,
|
||||
) -> Result<(Option<(usize, Tagged<Flag>)>), ShellError> {
|
||||
) -> Result<(Option<(usize, Spanned<Flag>)>), ShellError> {
|
||||
let flag = tokens.extract(|t| t.as_flag(name, source));
|
||||
|
||||
match flag {
|
||||
|
@ -298,7 +298,7 @@ pub(crate) fn evaluate_args(
|
||||
for (name, value) in n.named.iter() {
|
||||
match value {
|
||||
hir::named::NamedValue::PresentSwitch(tag) => {
|
||||
results.insert(name.clone(), Value::boolean(true).tagged(*tag));
|
||||
results.insert(name.clone(), Value::boolean(true).tagged(tag));
|
||||
}
|
||||
hir::named::NamedValue::Value(expr) => {
|
||||
results.insert(
|
||||
|
@ -22,7 +22,7 @@ impl Add {
|
||||
let value_tag = value.tag();
|
||||
match (value.item, self.value.clone()) {
|
||||
(obj @ Value::Row(_), Some(v)) => match &self.field {
|
||||
Some(f) => match obj.insert_data_at_column_path(value_tag, &f, v) {
|
||||
Some(f) => match obj.insert_data_at_column_path(value_tag.clone(), &f, v) {
|
||||
Some(v) => return Ok(v),
|
||||
None => {
|
||||
return Err(ShellError::labeled_error(
|
||||
@ -32,7 +32,7 @@ impl Add {
|
||||
f.iter().map(|i| &i.item).join(".")
|
||||
),
|
||||
"column name",
|
||||
value_tag,
|
||||
&value_tag,
|
||||
))
|
||||
}
|
||||
},
|
||||
|
@ -24,8 +24,7 @@ impl Plugin for BinaryView {
|
||||
let value_anchor = v.anchor();
|
||||
match v.item {
|
||||
Value::Primitive(Primitive::Binary(b)) => {
|
||||
let source = call_info.source_map.get(&value_anchor);
|
||||
let _ = view_binary(&b, source, call_info.args.has("lores"));
|
||||
let _ = view_binary(&b, value_anchor.as_ref(), call_info.args.has("lores"));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
@ -27,7 +27,7 @@ impl Edit {
|
||||
return Err(ShellError::labeled_error(
|
||||
"edit could not find place to insert column",
|
||||
"column name",
|
||||
f.tag,
|
||||
&f.tag,
|
||||
))
|
||||
}
|
||||
},
|
||||
|
@ -28,7 +28,7 @@ impl Embed {
|
||||
None => Err(ShellError::labeled_error(
|
||||
"embed needs a field when embedding a value",
|
||||
"original value",
|
||||
value.tag,
|
||||
&tag,
|
||||
)),
|
||||
},
|
||||
}
|
||||
|
@ -82,9 +82,7 @@ impl Inc {
|
||||
Value::Primitive(Primitive::Bytes(b)) => {
|
||||
Ok(Value::bytes(b + 1 as u64).tagged(value.tag()))
|
||||
}
|
||||
Value::Primitive(Primitive::String(ref s)) => {
|
||||
Ok(Tagged::from_item(self.apply(&s)?, value.tag()))
|
||||
}
|
||||
Value::Primitive(Primitive::String(ref s)) => Ok(self.apply(&s)?.tagged(value.tag())),
|
||||
Value::Row(_) => match self.field {
|
||||
Some(ref f) => {
|
||||
let replacement = match value.item.get_data_by_column_path(value.tag(), f) {
|
||||
@ -93,7 +91,7 @@ impl Inc {
|
||||
return Err(ShellError::labeled_error(
|
||||
"inc could not find field to replace",
|
||||
"column name",
|
||||
f.tag,
|
||||
&f.tag,
|
||||
))
|
||||
}
|
||||
};
|
||||
@ -107,7 +105,7 @@ impl Inc {
|
||||
return Err(ShellError::labeled_error(
|
||||
"inc could not find field to replace",
|
||||
"column name",
|
||||
f.tag,
|
||||
&f.tag,
|
||||
))
|
||||
}
|
||||
}
|
||||
@ -191,20 +189,18 @@ mod tests {
|
||||
use super::{Inc, SemVerAction};
|
||||
use indexmap::IndexMap;
|
||||
use nu::{
|
||||
CallInfo, EvaluatedArgs, Plugin, ReturnSuccess, SourceMap, Tag, Tagged, TaggedDictBuilder,
|
||||
TaggedItem, Value,
|
||||
CallInfo, EvaluatedArgs, Plugin, ReturnSuccess, Tag, Tagged, TaggedDictBuilder, TaggedItem,
|
||||
Value,
|
||||
};
|
||||
|
||||
struct CallStub {
|
||||
anchor: uuid::Uuid,
|
||||
positionals: Vec<Tagged<Value>>,
|
||||
flags: IndexMap<String, Tagged<Value>>,
|
||||
}
|
||||
|
||||
impl CallStub {
|
||||
fn new(anchor: uuid::Uuid) -> CallStub {
|
||||
fn new() -> CallStub {
|
||||
CallStub {
|
||||
anchor,
|
||||
positionals: vec![],
|
||||
flags: indexmap::IndexMap::new(),
|
||||
}
|
||||
@ -221,19 +217,18 @@ mod tests {
|
||||
fn with_parameter(&mut self, name: &str) -> &mut Self {
|
||||
let fields: Vec<Tagged<Value>> = name
|
||||
.split(".")
|
||||
.map(|s| Value::string(s.to_string()).tagged(Tag::unknown_span(self.anchor)))
|
||||
.map(|s| Value::string(s.to_string()).tagged(Tag::unknown()))
|
||||
.collect();
|
||||
|
||||
self.positionals
|
||||
.push(Value::Table(fields).tagged(Tag::unknown_span(self.anchor)));
|
||||
.push(Value::Table(fields).tagged(Tag::unknown()));
|
||||
self
|
||||
}
|
||||
|
||||
fn create(&self) -> CallInfo {
|
||||
CallInfo {
|
||||
args: EvaluatedArgs::new(Some(self.positionals.clone()), Some(self.flags.clone())),
|
||||
source_map: SourceMap::new(),
|
||||
name_tag: Tag::unknown_span(self.anchor),
|
||||
name_tag: Tag::unknown(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -260,7 +255,7 @@ mod tests {
|
||||
let mut plugin = Inc::new();
|
||||
|
||||
assert!(plugin
|
||||
.begin_filter(CallStub::new(test_uuid()).with_long_flag("major").create())
|
||||
.begin_filter(CallStub::new().with_long_flag("major").create())
|
||||
.is_ok());
|
||||
assert!(plugin.action.is_some());
|
||||
}
|
||||
@ -270,7 +265,7 @@ mod tests {
|
||||
let mut plugin = Inc::new();
|
||||
|
||||
assert!(plugin
|
||||
.begin_filter(CallStub::new(test_uuid()).with_long_flag("minor").create())
|
||||
.begin_filter(CallStub::new().with_long_flag("minor").create())
|
||||
.is_ok());
|
||||
assert!(plugin.action.is_some());
|
||||
}
|
||||
@ -280,7 +275,7 @@ mod tests {
|
||||
let mut plugin = Inc::new();
|
||||
|
||||
assert!(plugin
|
||||
.begin_filter(CallStub::new(test_uuid()).with_long_flag("patch").create())
|
||||
.begin_filter(CallStub::new().with_long_flag("patch").create())
|
||||
.is_ok());
|
||||
assert!(plugin.action.is_some());
|
||||
}
|
||||
@ -291,7 +286,7 @@ mod tests {
|
||||
|
||||
assert!(plugin
|
||||
.begin_filter(
|
||||
CallStub::new(test_uuid())
|
||||
CallStub::new()
|
||||
.with_long_flag("major")
|
||||
.with_long_flag("minor")
|
||||
.create(),
|
||||
@ -305,11 +300,7 @@ mod tests {
|
||||
let mut plugin = Inc::new();
|
||||
|
||||
assert!(plugin
|
||||
.begin_filter(
|
||||
CallStub::new(test_uuid())
|
||||
.with_parameter("package.version")
|
||||
.create()
|
||||
)
|
||||
.begin_filter(CallStub::new().with_parameter("package.version").create())
|
||||
.is_ok());
|
||||
|
||||
assert_eq!(
|
||||
@ -347,7 +338,7 @@ mod tests {
|
||||
|
||||
assert!(plugin
|
||||
.begin_filter(
|
||||
CallStub::new(test_uuid())
|
||||
CallStub::new()
|
||||
.with_long_flag("major")
|
||||
.with_parameter("version")
|
||||
.create()
|
||||
@ -375,7 +366,7 @@ mod tests {
|
||||
|
||||
assert!(plugin
|
||||
.begin_filter(
|
||||
CallStub::new(test_uuid())
|
||||
CallStub::new()
|
||||
.with_long_flag("minor")
|
||||
.with_parameter("version")
|
||||
.create()
|
||||
@ -404,7 +395,7 @@ mod tests {
|
||||
|
||||
assert!(plugin
|
||||
.begin_filter(
|
||||
CallStub::new(test_uuid())
|
||||
CallStub::new()
|
||||
.with_long_flag("patch")
|
||||
.with_parameter(&field)
|
||||
.create()
|
||||
@ -425,8 +416,4 @@ mod tests {
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn test_uuid() -> uuid::Uuid {
|
||||
uuid::Uuid::nil()
|
||||
}
|
||||
}
|
||||
|
@ -40,7 +40,7 @@ async fn ps(tag: Tag) -> Vec<Tagged<Value>> {
|
||||
let mut output = vec![];
|
||||
while let Some(res) = processes.next().await {
|
||||
if let Ok((process, usage)) = res {
|
||||
let mut dict = TaggedDictBuilder::new(tag);
|
||||
let mut dict = TaggedDictBuilder::new(&tag);
|
||||
dict.insert("pid", Value::int(process.pid()));
|
||||
if let Ok(name) = process.name().await {
|
||||
dict.insert("name", Value::string(name));
|
||||
|
@ -89,14 +89,12 @@ impl Str {
|
||||
impl Str {
|
||||
fn strutils(&self, value: Tagged<Value>) -> Result<Tagged<Value>, ShellError> {
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(ref s)) => {
|
||||
Ok(Tagged::from_item(self.apply(&s)?, value.tag()))
|
||||
}
|
||||
Value::Primitive(Primitive::String(ref s)) => Ok(self.apply(&s)?.tagged(value.tag())),
|
||||
Value::Row(_) => match self.field {
|
||||
Some(ref f) => {
|
||||
let replacement = match value.item.get_data_by_column_path(value.tag(), f) {
|
||||
Some(result) => self.strutils(result.map(|x| x.clone()))?,
|
||||
None => return Ok(Tagged::from_item(Value::nothing(), value.tag)),
|
||||
None => return Ok(Value::nothing().tagged(value.tag)),
|
||||
};
|
||||
match value.item.replace_data_at_column_path(
|
||||
value.tag(),
|
||||
@ -174,7 +172,7 @@ impl Plugin for Str {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Unrecognized type in params",
|
||||
possible_field.type_name(),
|
||||
possible_field.tag,
|
||||
&possible_field.tag,
|
||||
))
|
||||
}
|
||||
}
|
||||
@ -216,13 +214,12 @@ mod tests {
|
||||
use super::{Action, Str};
|
||||
use indexmap::IndexMap;
|
||||
use nu::{
|
||||
CallInfo, EvaluatedArgs, Plugin, Primitive, ReturnSuccess, SourceMap, Tag, Tagged,
|
||||
TaggedDictBuilder, TaggedItem, Value,
|
||||
CallInfo, EvaluatedArgs, Plugin, Primitive, ReturnSuccess, Tag, Tagged, TaggedDictBuilder,
|
||||
TaggedItem, Value,
|
||||
};
|
||||
use num_bigint::BigInt;
|
||||
|
||||
struct CallStub {
|
||||
anchor: uuid::Uuid,
|
||||
positionals: Vec<Tagged<Value>>,
|
||||
flags: IndexMap<String, Tagged<Value>>,
|
||||
}
|
||||
@ -230,7 +227,6 @@ mod tests {
|
||||
impl CallStub {
|
||||
fn new() -> CallStub {
|
||||
CallStub {
|
||||
anchor: uuid::Uuid::nil(),
|
||||
positionals: vec![],
|
||||
flags: indexmap::IndexMap::new(),
|
||||
}
|
||||
@ -247,19 +243,18 @@ mod tests {
|
||||
fn with_parameter(&mut self, name: &str) -> &mut Self {
|
||||
let fields: Vec<Tagged<Value>> = name
|
||||
.split(".")
|
||||
.map(|s| Value::string(s.to_string()).tagged(Tag::unknown_span(self.anchor)))
|
||||
.map(|s| Value::string(s.to_string()).tagged(Tag::unknown()))
|
||||
.collect();
|
||||
|
||||
self.positionals
|
||||
.push(Value::Table(fields).tagged(Tag::unknown_span(self.anchor)));
|
||||
.push(Value::Table(fields).tagged(Tag::unknown()));
|
||||
self
|
||||
}
|
||||
|
||||
fn create(&self) -> CallInfo {
|
||||
CallInfo {
|
||||
args: EvaluatedArgs::new(Some(self.positionals.clone()), Some(self.flags.clone())),
|
||||
source_map: SourceMap::new(),
|
||||
name_tag: Tag::unknown_span(self.anchor),
|
||||
name_tag: Tag::unknown(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -271,7 +266,7 @@ mod tests {
|
||||
}
|
||||
|
||||
fn unstructured_sample_record(value: &str) -> Tagged<Value> {
|
||||
Tagged::from_item(Value::string(value), Tag::unknown())
|
||||
Value::string(value).tagged(Tag::unknown())
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -21,7 +21,7 @@ impl Sum {
|
||||
tag,
|
||||
}) => {
|
||||
//TODO: handle overflow
|
||||
self.total = Some(Value::int(i + j).tagged(*tag));
|
||||
self.total = Some(Value::int(i + j).tagged(tag));
|
||||
Ok(())
|
||||
}
|
||||
None => {
|
||||
@ -36,7 +36,7 @@ impl Sum {
|
||||
}
|
||||
}
|
||||
Value::Primitive(Primitive::Bytes(b)) => {
|
||||
match self.total {
|
||||
match &self.total {
|
||||
Some(Tagged {
|
||||
item: Value::Primitive(Primitive::Bytes(j)),
|
||||
tag,
|
||||
|
@ -80,7 +80,7 @@ async fn mem(tag: Tag) -> Tagged<Value> {
|
||||
}
|
||||
|
||||
async fn host(tag: Tag) -> Tagged<Value> {
|
||||
let mut dict = TaggedDictBuilder::with_capacity(tag, 6);
|
||||
let mut dict = TaggedDictBuilder::with_capacity(&tag, 6);
|
||||
|
||||
let (platform_result, uptime_result) =
|
||||
futures::future::join(host::platform(), host::uptime()).await;
|
||||
@ -95,7 +95,7 @@ async fn host(tag: Tag) -> Tagged<Value> {
|
||||
|
||||
// Uptime
|
||||
if let Ok(uptime) = uptime_result {
|
||||
let mut uptime_dict = TaggedDictBuilder::with_capacity(tag, 4);
|
||||
let mut uptime_dict = TaggedDictBuilder::with_capacity(&tag, 4);
|
||||
|
||||
let uptime = uptime.get::<time::second>().round() as i64;
|
||||
let days = uptime / (60 * 60 * 24);
|
||||
@ -116,7 +116,10 @@ async fn host(tag: Tag) -> Tagged<Value> {
|
||||
let mut user_vec = vec![];
|
||||
while let Some(user) = users.next().await {
|
||||
if let Ok(user) = user {
|
||||
user_vec.push(Tagged::from_item(Value::string(user.username()), tag));
|
||||
user_vec.push(Tagged {
|
||||
item: Value::string(user.username()),
|
||||
tag: tag.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
let user_list = Value::Table(user_vec);
|
||||
@ -130,7 +133,7 @@ async fn disks(tag: Tag) -> Option<Value> {
|
||||
let mut partitions = disk::partitions_physical();
|
||||
while let Some(part) = partitions.next().await {
|
||||
if let Ok(part) = part {
|
||||
let mut dict = TaggedDictBuilder::with_capacity(tag, 6);
|
||||
let mut dict = TaggedDictBuilder::with_capacity(&tag, 6);
|
||||
dict.insert(
|
||||
"device",
|
||||
Value::string(
|
||||
@ -176,7 +179,7 @@ async fn battery(tag: Tag) -> Option<Value> {
|
||||
if let Ok(batteries) = manager.batteries() {
|
||||
for battery in batteries {
|
||||
if let Ok(battery) = battery {
|
||||
let mut dict = TaggedDictBuilder::new(tag);
|
||||
let mut dict = TaggedDictBuilder::new(&tag);
|
||||
if let Some(vendor) = battery.vendor() {
|
||||
dict.insert("vendor", Value::string(vendor));
|
||||
}
|
||||
@ -217,7 +220,7 @@ async fn temp(tag: Tag) -> Option<Value> {
|
||||
let mut sensors = sensors::temperatures();
|
||||
while let Some(sensor) = sensors.next().await {
|
||||
if let Ok(sensor) = sensor {
|
||||
let mut dict = TaggedDictBuilder::new(tag);
|
||||
let mut dict = TaggedDictBuilder::new(&tag);
|
||||
dict.insert("unit", Value::string(sensor.unit()));
|
||||
if let Some(label) = sensor.label() {
|
||||
dict.insert("label", Value::string(label));
|
||||
@ -259,7 +262,7 @@ async fn net(tag: Tag) -> Option<Value> {
|
||||
let mut io_counters = net::io_counters();
|
||||
while let Some(nic) = io_counters.next().await {
|
||||
if let Ok(nic) = nic {
|
||||
let mut network_idx = TaggedDictBuilder::with_capacity(tag, 3);
|
||||
let mut network_idx = TaggedDictBuilder::with_capacity(&tag, 3);
|
||||
network_idx.insert("name", Value::string(nic.interface()));
|
||||
network_idx.insert(
|
||||
"sent",
|
||||
@ -280,11 +283,17 @@ async fn net(tag: Tag) -> Option<Value> {
|
||||
}
|
||||
|
||||
async fn sysinfo(tag: Tag) -> Vec<Tagged<Value>> {
|
||||
let mut sysinfo = TaggedDictBuilder::with_capacity(tag, 7);
|
||||
let mut sysinfo = TaggedDictBuilder::with_capacity(&tag, 7);
|
||||
|
||||
let (host, cpu, disks, memory, temp) =
|
||||
futures::future::join5(host(tag), cpu(tag), disks(tag), mem(tag), temp(tag)).await;
|
||||
let (net, battery) = futures::future::join(net(tag), battery(tag)).await;
|
||||
let (host, cpu, disks, memory, temp) = futures::future::join5(
|
||||
host(tag.clone()),
|
||||
cpu(tag.clone()),
|
||||
disks(tag.clone()),
|
||||
mem(tag.clone()),
|
||||
temp(tag.clone()),
|
||||
)
|
||||
.await;
|
||||
let (net, battery) = futures::future::join(net(tag.clone()), battery(tag.clone())).await;
|
||||
|
||||
sysinfo.insert_tagged("host", host);
|
||||
if let Some(cpu) = cpu {
|
||||
|
@ -1,8 +1,7 @@
|
||||
use crossterm::{cursor, terminal, RawScreen};
|
||||
use crossterm::{InputEvent, KeyEvent};
|
||||
use nu::{
|
||||
serve_plugin, AnchorLocation, CallInfo, Plugin, Primitive, ShellError, Signature, SourceMap,
|
||||
Tagged, Value,
|
||||
serve_plugin, AnchorLocation, CallInfo, Plugin, Primitive, ShellError, Signature, Tagged, Value,
|
||||
};
|
||||
|
||||
use syntect::easy::HighlightLines;
|
||||
@ -29,8 +28,8 @@ impl Plugin for TextView {
|
||||
Ok(Signature::build("textview").desc("Autoview of text data."))
|
||||
}
|
||||
|
||||
fn sink(&mut self, call_info: CallInfo, input: Vec<Tagged<Value>>) {
|
||||
view_text_value(&input[0], &call_info.source_map);
|
||||
fn sink(&mut self, _call_info: CallInfo, input: Vec<Tagged<Value>>) {
|
||||
view_text_value(&input[0]);
|
||||
}
|
||||
}
|
||||
|
||||
@ -215,20 +214,18 @@ fn scroll_view(s: &str) {
|
||||
scroll_view_lines_if_needed(v, false);
|
||||
}
|
||||
|
||||
fn view_text_value(value: &Tagged<Value>, source_map: &SourceMap) {
|
||||
fn view_text_value(value: &Tagged<Value>) {
|
||||
let value_anchor = value.anchor();
|
||||
match value.item {
|
||||
Value::Primitive(Primitive::String(ref s)) => {
|
||||
let source = source_map.get(&value_anchor);
|
||||
|
||||
if let Some(source) = source {
|
||||
if let Some(source) = value_anchor {
|
||||
let extension: Option<String> = match source {
|
||||
AnchorLocation::File(file) => {
|
||||
let path = Path::new(file);
|
||||
let path = Path::new(&file);
|
||||
path.extension().map(|x| x.to_string_lossy().to_string())
|
||||
}
|
||||
AnchorLocation::Url(url) => {
|
||||
let url = url::Url::parse(url);
|
||||
let url = url::Url::parse(&url);
|
||||
if let Ok(url) = url {
|
||||
let url = url.clone();
|
||||
if let Some(mut segments) = url.path_segments() {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user