Merge master

This commit is contained in:
Jonathan Turner
2019-11-17 06:17:05 +13:00
195 changed files with 19791 additions and 5402 deletions

View File

@ -1,4 +1,3 @@
use crate::commands::autoview;
use crate::commands::classified::{
ClassifiedCommand, ClassifiedInputStream, ClassifiedPipeline, ExternalCommand, InternalCommand,
StreamNext,
@ -14,18 +13,22 @@ use crate::fuzzysearch::{interactive_fuzzy_search, SelectionResult};
#[cfg(not(feature = "starship-prompt"))]
use crate::git::current_branch;
use crate::parser::registry::Signature;
use crate::parser::{hir, CallNode, Pipeline, PipelineElement, TokenNode};
use crate::parser::{
hir,
hir::syntax_shape::{expand_syntax, ExpandContext, PipelineShape},
hir::{expand_external_tokens::ExternalTokensShape, tokens_iterator::TokensIterator},
TokenNode,
};
use crate::prelude::*;
use log::{debug, trace};
use log::{debug, log_enabled, trace};
use rustyline::error::ReadlineError;
use rustyline::{self, config::Configurer, config::EditMode, ColorMode, Config, Editor};
use std::env;
use std::error::Error;
use std::io::{BufRead, BufReader, Write};
use std::iter::Iterator;
use std::path::PathBuf;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::atomic::Ordering;
#[derive(Debug)]
pub enum MaybeOwned<'a, T> {
@ -76,7 +79,7 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel
let name = params.name.clone();
let fname = fname.to_string();
if context.has_command(&name) {
if let Some(_) = context.get_command(&name) {
trace!("plugin {:?} already loaded.", &name);
} else {
if params.is_filter {
@ -95,11 +98,17 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel
},
Err(e) => {
trace!("incompatible plugin {:?}", input);
Err(ShellError::string(format!("Error: {:?}", e)))
Err(ShellError::untagged_runtime_error(format!(
"Error: {:?}",
e
)))
}
}
}
Err(e) => Err(ShellError::string(format!("Error: {:?}", e))),
Err(e) => Err(ShellError::untagged_runtime_error(format!(
"Error: {:?}",
e
))),
};
let _ = child.wait();
@ -110,13 +119,6 @@ fn load_plugin(path: &std::path::Path, context: &mut Context) -> Result<(), Shel
fn search_paths() -> Vec<std::path::PathBuf> {
let mut search_paths = Vec::new();
match env::var_os("PATH") {
Some(paths) => {
search_paths = env::split_paths(&paths).collect::<Vec<_>>();
}
None => println!("PATH is not defined in the environment."),
}
#[cfg(debug_assertions)]
{
// Use our debug plugins in debug mode
@ -131,6 +133,15 @@ fn search_paths() -> Vec<std::path::PathBuf> {
#[cfg(not(debug_assertions))]
{
use std::env;
match env::var_os("PATH") {
Some(paths) => {
search_paths = env::split_paths(&paths).collect::<Vec<_>>();
}
None => println!("PATH is not defined in the environment."),
}
// Use our release plugins in release mode
let mut path = std::path::PathBuf::from(".");
path.push("target");
@ -154,6 +165,8 @@ fn load_plugins(context: &mut Context) -> Result<(), ShellError> {
require_literal_leading_dot: false,
};
set_env_from_config();
for path in search_paths() {
let mut pattern = path.to_path_buf();
@ -249,13 +262,14 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
whole_stream_command(Next),
whole_stream_command(Previous),
whole_stream_command(Debug),
whole_stream_command(Lines),
whole_stream_command(Shells),
whole_stream_command(SplitColumn),
whole_stream_command(SplitRow),
whole_stream_command(Lines),
whole_stream_command(Reject),
whole_stream_command(Reverse),
whole_stream_command(Append),
whole_stream_command(Prepend),
whole_stream_command(Trim),
whole_stream_command(ToBSON),
whole_stream_command(ToCSV),
@ -267,12 +281,15 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
whole_stream_command(ToURL),
whole_stream_command(ToYAML),
whole_stream_command(SortBy),
whole_stream_command(GroupBy),
whole_stream_command(Tags),
whole_stream_command(Count),
whole_stream_command(First),
whole_stream_command(Last),
whole_stream_command(Env),
whole_stream_command(FromCSV),
whole_stream_command(FromTSV),
whole_stream_command(FromSSV),
whole_stream_command(FromINI),
whole_stream_command(FromBSON),
whole_stream_command(FromJSON),
@ -285,6 +302,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
whole_stream_command(FromYML),
whole_stream_command(Pick),
whole_stream_command(Get),
whole_stream_command(Histogram),
per_item_command(Remove),
per_item_command(Fetch),
per_item_command(Open),
@ -295,6 +313,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
whole_stream_command(SkipWhile),
per_item_command(Enter),
per_item_command(Help),
per_item_command(History),
whole_stream_command(Exit),
whole_stream_command(Autoview),
whole_stream_command(Pivot),
@ -303,11 +322,23 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
per_item_command(Mkdir),
per_item_command(Move),
whole_stream_command(Save),
whole_stream_command(SplitBy),
whole_stream_command(Table),
whole_stream_command(Version),
whole_stream_command(Which),
]);
cfg_if::cfg_if! {
if #[cfg(data_processing_primitives)] {
context.add_commands(vec![
whole_stream_command(ReduceBy),
whole_stream_command(EvaluateBy),
whole_stream_command(TSortBy),
whole_stream_command(MapMaxBy),
]);
}
}
#[cfg(feature = "clipboard")]
{
context.add_commands(vec![whole_stream_command(
@ -315,6 +346,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
)]);
}
}
let _ = load_plugins(&mut context);
let config = Config::builder().color_mode(ColorMode::Forced).build();
@ -328,24 +360,21 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
// we are ok if history does not exist
let _ = rl.load_history(&History::path());
let ctrl_c = Arc::new(AtomicBool::new(false));
let cc = ctrl_c.clone();
let cc = context.ctrl_c.clone();
ctrlc::set_handler(move || {
cc.store(true, Ordering::SeqCst);
})
.expect("Error setting Ctrl-C handler");
let mut ctrlcbreak = false;
loop {
if ctrl_c.load(Ordering::SeqCst) {
ctrl_c.store(false, Ordering::SeqCst);
if context.ctrl_c.load(Ordering::SeqCst) {
context.ctrl_c.store(false, Ordering::SeqCst);
continue;
}
let cwd = context.shell_manager.path();
rl.set_helper(Some(crate::shell::Helper::new(
context.shell_manager.clone(),
)));
rl.set_helper(Some(crate::shell::Helper::new(context.clone())));
let edit_mode = config::config(Tag::unknown())?
.get("edit_mode")
@ -416,6 +445,7 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
match process_line(readline, &mut context).await {
LineResult::Success(line) => {
rl.add_history_entry(line.clone());
let _ = rl.save_history(&History::path());
}
LineResult::CtrlC => {
@ -441,21 +471,12 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
}
}
LineResult::Error(mut line, err) => {
LineResult::Error(line, err) => {
rl.add_history_entry(line.clone());
let diag = err.to_diagnostic();
let _ = rl.save_history(&History::path());
context.with_host(|host| {
let writer = host.err_termcolor();
line.push_str(" ");
let files = crate::parser::Files::new(line);
let _ = std::panic::catch_unwind(move || {
let _ = language_reporting::emit(
&mut writer.lock(),
&files,
&diag,
&language_reporting::DefaultConfig,
);
});
print_err(err, host, &Text::from(line));
})
}
@ -472,6 +493,78 @@ pub async fn cli() -> Result<(), Box<dyn Error>> {
Ok(())
}
fn chomp_newline(s: &str) -> &str {
if s.ends_with('\n') {
&s[..s.len() - 1]
} else {
s
}
}
fn set_env_from_config() {
let config = crate::data::config::read(Tag::unknown(), &None).unwrap();
if config.contains_key("env") {
// Clear the existing vars, we're about to replace them
for (key, _value) in std::env::vars() {
std::env::remove_var(key);
}
let value = config.get("env");
match value {
Some(Tagged {
item: Value::Row(r),
..
}) => {
for (k, v) in &r.entries {
match v.as_string() {
Ok(value_string) => {
std::env::set_var(k, value_string);
}
_ => {}
}
}
}
_ => {}
}
}
if config.contains_key("path") {
// Override the path with what they give us from config
let value = config.get("path");
match value {
Some(value) => match value {
Tagged {
item: Value::Table(table),
..
} => {
let mut paths = vec![];
for val in table {
let path_str = val.as_string();
match path_str {
Err(_) => {}
Ok(path_str) => {
paths.push(PathBuf::from(path_str));
}
}
}
let path_os_string = std::env::join_paths(&paths);
match path_os_string {
Ok(path_os_string) => {
std::env::set_var("PATH", path_os_string);
}
Err(_) => {}
}
}
_ => {}
},
None => {}
}
}
}
enum LineResult {
Success(String),
Error(String, ShellError),
@ -484,9 +577,11 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
Ok(line) if line.trim() == "" => LineResult::Success(line.clone()),
Ok(line) => {
let result = match crate::parser::parse(&line, uuid::Uuid::nil()) {
let line = chomp_newline(line);
let result = match crate::parser::parse(&line) {
Err(err) => {
return LineResult::Error(line.clone(), err);
return LineResult::Error(line.to_string(), err);
}
Ok(val) => val,
@ -497,28 +592,32 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
let mut pipeline = match classify_pipeline(&result, ctx, &Text::from(line)) {
Ok(pipeline) => pipeline,
Err(err) => return LineResult::Error(line.clone(), err),
Err(err) => return LineResult::Error(line.to_string(), err),
};
match pipeline.commands.last() {
Some(ClassifiedCommand::External(_)) => {}
_ => pipeline
.commands
.item
.push(ClassifiedCommand::Internal(InternalCommand {
command: whole_stream_command(autoview::Autoview),
name: "autoview".to_string(),
name_tag: Tag::unknown(),
args: hir::Call::new(
Box::new(hir::Expression::synthetic_string("autoview")),
None,
None,
),
)
.spanned_unknown(),
})),
}
let mut input = ClassifiedInputStream::new();
let mut iter = pipeline.commands.item.into_iter().peekable();
let mut iter = pipeline.commands.into_iter().peekable();
let mut is_first_command = true;
// Check the config to see if we need to update the path
// TODO: make sure config is cached so we don't path this load every call
set_env_from_config();
loop {
let item: Option<ClassifiedCommand> = iter.next();
@ -527,16 +626,24 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
input = match (item, next) {
(None, _) => break,
(Some(ClassifiedCommand::Dynamic(_)), _)
| (_, Some(ClassifiedCommand::Dynamic(_))) => {
return LineResult::Error(
line.to_string(),
ShellError::unimplemented("Dynamic commands"),
)
}
(Some(ClassifiedCommand::Expr(_)), _) => {
return LineResult::Error(
line.clone(),
line.to_string(),
ShellError::unimplemented("Expression-only commands"),
)
}
(_, Some(ClassifiedCommand::Expr(_))) => {
return LineResult::Error(
line.clone(),
line.to_string(),
ShellError::unimplemented("Expression-only commands"),
)
}
@ -544,31 +651,46 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
(
Some(ClassifiedCommand::Internal(left)),
Some(ClassifiedCommand::External(_)),
) => match left
.run(ctx, input, Text::from(line), is_first_command)
.await
{
) => match left.run(ctx, input, Text::from(line)) {
Ok(val) => ClassifiedInputStream::from_input_stream(val),
Err(err) => return LineResult::Error(line.clone(), err),
Err(err) => return LineResult::Error(line.to_string(), err),
},
(Some(ClassifiedCommand::Internal(left)), Some(_)) => {
match left
.run(ctx, input, Text::from(line), is_first_command)
.await
{
match left.run(ctx, input, Text::from(line)) {
Ok(val) => ClassifiedInputStream::from_input_stream(val),
Err(err) => return LineResult::Error(line.clone(), err),
Err(err) => return LineResult::Error(line.to_string(), err),
}
}
(Some(ClassifiedCommand::Internal(left)), None) => {
match left
.run(ctx, input, Text::from(line), is_first_command)
.await
{
Ok(val) => ClassifiedInputStream::from_input_stream(val),
Err(err) => return LineResult::Error(line.clone(), err),
match left.run(ctx, input, Text::from(line)) {
Ok(val) => {
use futures::stream::TryStreamExt;
let mut output_stream: OutputStream = val.into();
loop {
match output_stream.try_next().await {
Ok(Some(ReturnSuccess::Value(Tagged {
item: Value::Error(e),
..
}))) => {
return LineResult::Error(line.to_string(), e);
}
Ok(Some(_item)) => {
if ctx.ctrl_c.load(Ordering::SeqCst) {
break;
}
}
_ => {
break;
}
}
}
return LineResult::Success(line.to_string());
}
Err(err) => return LineResult::Error(line.to_string(), err),
}
}
@ -577,28 +699,26 @@ async fn process_line(readline: Result<String, ReadlineError>, ctx: &mut Context
Some(ClassifiedCommand::External(_)),
) => match left.run(ctx, input, StreamNext::External).await {
Ok(val) => val,
Err(err) => return LineResult::Error(line.clone(), err),
Err(err) => return LineResult::Error(line.to_string(), err),
},
(Some(ClassifiedCommand::External(left)), Some(_)) => {
match left.run(ctx, input, StreamNext::Internal).await {
Ok(val) => val,
Err(err) => return LineResult::Error(line.clone(), err),
Err(err) => return LineResult::Error(line.to_string(), err),
}
}
(Some(ClassifiedCommand::External(left)), None) => {
match left.run(ctx, input, StreamNext::Last).await {
Ok(val) => val,
Err(err) => return LineResult::Error(line.clone(), err),
Err(err) => return LineResult::Error(line.to_string(), err),
}
}
};
is_first_command = false;
}
LineResult::Success(line.clone())
LineResult::Success(line.to_string())
}
Err(ReadlineError::Interrupted) => LineResult::CtrlC,
Err(ReadlineError::Eof) => LineResult::Break,
@ -614,95 +734,62 @@ fn classify_pipeline(
context: &Context,
source: &Text,
) -> Result<ClassifiedPipeline, ShellError> {
let pipeline = pipeline.as_pipeline()?;
let mut pipeline_list = vec![pipeline.clone()];
let mut iterator = TokensIterator::all(&mut pipeline_list, pipeline.span());
let Pipeline { parts, .. } = pipeline;
let result = expand_syntax(
&PipelineShape,
&mut iterator,
&context.expand_context(source),
)
.map_err(|err| err.into());
let commands: Result<Vec<_>, ShellError> = parts
.iter()
.map(|item| classify_command(&item, context, &source))
.collect();
Ok(ClassifiedPipeline {
commands: commands?,
})
}
fn classify_command(
command: &PipelineElement,
context: &Context,
source: &Text,
) -> Result<ClassifiedCommand, ShellError> {
let call = command.call();
match call {
// If the command starts with `^`, treat it as an external command no matter what
call if call.head().is_external() => {
let name_tag = call.head().expect_external();
let name = name_tag.slice(source);
Ok(external_command(call, source, name.tagged(name_tag)))
}
// Otherwise, if the command is a bare word, we'll need to triage it
call if call.head().is_bare() => {
let head = call.head();
let name = head.source(source);
match context.has_command(name) {
// if the command is in the registry, it's an internal command
true => {
let command = context.get_command(name);
let config = command.signature();
trace!(target: "nu::build_pipeline", "classifying {:?}", config);
let args: hir::Call = config.parse_args(call, &context, source)?;
trace!(target: "nu::build_pipeline", "args :: {}", args.debug(source));
Ok(ClassifiedCommand::Internal(InternalCommand {
command,
name_tag: head.tag(),
args,
}))
}
// otherwise, it's an external command
false => Ok(external_command(call, source, name.tagged(head.tag()))),
}
}
// If the command is something else (like a number or a variable), that is currently unsupported.
// We might support `$somevar` as a curried command in the future.
call => Err(ShellError::invalid_command(call.head().tag())),
if log_enabled!(target: "nu::expand_syntax", log::Level::Debug) {
println!("");
ptree::print_tree(&iterator.expand_tracer().print(source.clone())).unwrap();
println!("");
}
result
}
// Classify this command as an external command, which doesn't give special meaning
// to nu syntactic constructs, and passes all arguments to the external command as
// strings.
fn external_command(
call: &Tagged<CallNode>,
source: &Text,
pub(crate) fn external_command(
tokens: &mut TokensIterator,
context: &ExpandContext,
name: Tagged<&str>,
) -> ClassifiedCommand {
let arg_list_strings: Vec<Tagged<String>> = match call.children() {
Some(args) => args
.iter()
.filter_map(|i| match i {
TokenNode::Whitespace(_) => None,
other => Some(other.as_external_arg(source).tagged(other.tag())),
})
.collect(),
None => vec![],
};
) -> Result<ClassifiedCommand, ParseError> {
let Spanned { item, span } = expand_syntax(&ExternalTokensShape, tokens, context)?;
let (name, tag) = name.into_parts();
ClassifiedCommand::External(ExternalCommand {
Ok(ClassifiedCommand::External(ExternalCommand {
name: name.to_string(),
name_tag: tag,
args: arg_list_strings,
})
name_tag: name.tag(),
args: item
.iter()
.map(|x| Tagged {
tag: x.span.into(),
item: x.item.clone(),
})
.collect::<Vec<_>>()
.spanned(span),
}))
}
pub fn print_err(err: ShellError, host: &dyn Host, source: &Text) {
let diag = err.to_diagnostic();
let writer = host.err_termcolor();
let mut source = source.to_string();
source.push_str(" ");
let files = crate::parser::Files::new(source);
let _ = std::panic::catch_unwind(move || {
let _ = language_reporting::emit(
&mut writer.lock(),
&files,
&diag,
&language_reporting::DefaultConfig,
);
});
}

View File

@ -1,6 +1,9 @@
#[macro_use]
pub(crate) mod macros;
mod from_structured_data;
pub(crate) mod append;
pub(crate) mod args;
pub(crate) mod autoview;
pub(crate) mod cd;
@ -8,12 +11,15 @@ pub(crate) mod classified;
pub(crate) mod clip;
pub(crate) mod command;
pub(crate) mod config;
pub(crate) mod count;
pub(crate) mod cp;
pub(crate) mod date;
pub(crate) mod debug;
pub(crate) mod echo;
pub(crate) mod enter;
pub(crate) mod env;
#[allow(unused)]
pub(crate) mod evaluate_by;
pub(crate) mod exit;
pub(crate) mod fetch;
pub(crate) mod first;
@ -22,16 +28,22 @@ pub(crate) mod from_csv;
pub(crate) mod from_ini;
pub(crate) mod from_json;
pub(crate) mod from_sqlite;
pub(crate) mod from_ssv;
pub(crate) mod from_toml;
pub(crate) mod from_tsv;
pub(crate) mod from_url;
pub(crate) mod from_xml;
pub(crate) mod from_yaml;
pub(crate) mod get;
pub(crate) mod group_by;
pub(crate) mod help;
pub(crate) mod histogram;
pub(crate) mod history;
pub(crate) mod last;
pub(crate) mod lines;
pub(crate) mod ls;
#[allow(unused)]
pub(crate) mod map_max_by;
pub(crate) mod mkdir;
pub(crate) mod mv;
pub(crate) mod next;
@ -41,8 +53,11 @@ pub(crate) mod pick;
pub(crate) mod pivot;
pub(crate) mod plugin;
pub(crate) mod post;
pub(crate) mod prepend;
pub(crate) mod prev;
pub(crate) mod pwd;
#[allow(unused)]
pub(crate) mod reduce_by;
pub(crate) mod reject;
pub(crate) mod reverse;
pub(crate) mod rm;
@ -51,8 +66,11 @@ pub(crate) mod shells;
pub(crate) mod size;
pub(crate) mod skip_while;
pub(crate) mod sort_by;
pub(crate) mod split_by;
pub(crate) mod split_column;
pub(crate) mod split_row;
#[allow(unused)]
pub(crate) mod t_sort_by;
pub(crate) mod table;
pub(crate) mod tags;
pub(crate) mod to_bson;
@ -75,13 +93,18 @@ pub(crate) use command::{
UnevaluatedCallInfo, WholeStreamCommand,
};
pub(crate) use append::Append;
pub(crate) use classified::ClassifiedCommand;
pub(crate) use config::Config;
pub(crate) use count::Count;
pub(crate) use cp::Cpy;
pub(crate) use date::Date;
pub(crate) use debug::Debug;
pub(crate) use echo::Echo;
pub(crate) use enter::Enter;
pub(crate) use env::Env;
#[allow(unused)]
pub(crate) use evaluate_by::EvaluateBy;
pub(crate) use exit::Exit;
pub(crate) use fetch::Fetch;
pub(crate) use first::First;
@ -91,6 +114,7 @@ pub(crate) use from_ini::FromINI;
pub(crate) use from_json::FromJSON;
pub(crate) use from_sqlite::FromDB;
pub(crate) use from_sqlite::FromSQLite;
pub(crate) use from_ssv::FromSSV;
pub(crate) use from_toml::FromTOML;
pub(crate) use from_tsv::FromTSV;
pub(crate) use from_url::FromURL;
@ -98,10 +122,15 @@ pub(crate) use from_xml::FromXML;
pub(crate) use from_yaml::FromYAML;
pub(crate) use from_yaml::FromYML;
pub(crate) use get::Get;
pub(crate) use group_by::GroupBy;
pub(crate) use help::Help;
pub(crate) use histogram::Histogram;
pub(crate) use history::History;
pub(crate) use last::Last;
pub(crate) use lines::Lines;
pub(crate) use ls::LS;
#[allow(unused)]
pub(crate) use map_max_by::MapMaxBy;
pub(crate) use mkdir::Mkdir;
pub(crate) use mv::Move;
pub(crate) use next::Next;
@ -110,8 +139,11 @@ pub(crate) use open::Open;
pub(crate) use pick::Pick;
pub(crate) use pivot::Pivot;
pub(crate) use post::Post;
pub(crate) use prepend::Prepend;
pub(crate) use prev::Previous;
pub(crate) use pwd::PWD;
#[allow(unused)]
pub(crate) use reduce_by::ReduceBy;
pub(crate) use reject::Reject;
pub(crate) use reverse::Reverse;
pub(crate) use rm::Remove;
@ -120,8 +152,11 @@ pub(crate) use shells::Shells;
pub(crate) use size::Size;
pub(crate) use skip_while::SkipWhile;
pub(crate) use sort_by::SortBy;
pub(crate) use split_by::SplitBy;
pub(crate) use split_column::SplitColumn;
pub(crate) use split_row::SplitRow;
#[allow(unused)]
pub(crate) use t_sort_by::TSortBy;
pub(crate) use table::Table;
pub(crate) use tags::Tags;
pub(crate) use to_bson::ToBSON;

47
src/commands/append.rs Normal file
View File

@ -0,0 +1,47 @@
use crate::commands::WholeStreamCommand;
use crate::errors::ShellError;
use crate::parser::CommandRegistry;
use crate::prelude::*;
#[derive(Deserialize)]
struct AppendArgs {
row: Tagged<Value>,
}
pub struct Append;
impl WholeStreamCommand for Append {
fn name(&self) -> &str {
"append"
}
fn signature(&self) -> Signature {
Signature::build("append").required(
"row value",
SyntaxShape::Any,
"the value of the row to append to the table",
)
}
fn usage(&self) -> &str {
"Append the given row to the table"
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, append)?.run()
}
}
fn append(
AppendArgs { row }: AppendArgs,
RunnableContext { input, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let mut after: VecDeque<Tagged<Value>> = VecDeque::new();
after.push_back(row);
Ok(OutputStream::from_input(input.values.chain(after)))
}

View File

@ -1,9 +1,14 @@
use crate::commands::{RawCommandArgs, WholeStreamCommand};
use crate::errors::ShellError;
use crate::parser::hir::{Expression, NamedArguments};
use crate::prelude::*;
use futures::stream::TryStreamExt;
use std::sync::atomic::Ordering;
pub struct Autoview;
const STREAM_PAGE_SIZE: u64 = 50;
#[derive(Deserialize)]
pub struct AutoviewArgs {}
@ -31,61 +36,132 @@ impl WholeStreamCommand for Autoview {
pub fn autoview(
AutoviewArgs {}: AutoviewArgs,
mut context: RunnableContext,
context: RunnableContext,
raw: RawCommandArgs,
) -> Result<OutputStream, ShellError> {
Ok(OutputStream::new(async_stream! {
let input = context.input.drain_vec().await;
let binary = context.get_command("binaryview");
let text = context.get_command("textview");
let table = context.get_command("table");
if input.len() > 0 {
if let Tagged {
item: Value::Primitive(Primitive::Binary(_)),
..
} = input[0usize]
{
let binary = context.get_command("binaryview");
if let Some(binary) = binary {
let result = binary.run(raw.with_input(input), &context.commands, false);
result.collect::<Vec<_>>().await;
} else {
for i in input {
match i.item {
Value::Primitive(Primitive::Binary(b)) => {
use pretty_hex::*;
println!("{:?}", b.hex_dump());
Ok(OutputStream::new(async_stream! {
let mut output_stream: OutputStream = context.input.into();
match output_stream.try_next().await {
Ok(Some(x)) => {
match output_stream.try_next().await {
Ok(Some(y)) => {
let ctrl_c = context.ctrl_c.clone();
let stream = async_stream! {
yield Ok(x);
yield Ok(y);
loop {
match output_stream.try_next().await {
Ok(Some(z)) => {
if ctrl_c.load(Ordering::SeqCst) {
break;
}
yield Ok(z);
}
_ => break,
}
}
};
if let Some(table) = table {
let mut new_output_stream: OutputStream = stream.to_output_stream();
let mut finished = false;
let mut current_idx = 0;
loop {
let mut new_input = VecDeque::new();
for _ in 0..STREAM_PAGE_SIZE {
match new_output_stream.try_next().await {
Ok(Some(a)) => {
if let ReturnSuccess::Value(v) = a {
new_input.push_back(v);
}
}
_ => {
finished = true;
break;
}
}
}
let raw = raw.clone();
let mut command_args = raw.with_input(new_input.into());
let mut named_args = NamedArguments::new();
named_args.insert_optional("start_number", Some(Expression::number(current_idx, Tag::unknown())));
command_args.call_info.args.named = Some(named_args);
let result = table.run(command_args, &context.commands);
result.collect::<Vec<_>>().await;
if finished {
break;
} else {
current_idx += STREAM_PAGE_SIZE;
}
}
_ => {}
}
}
};
} else if is_single_anchored_text_value(&input) {
let text = context.get_command("textview");
if let Some(text) = text {
let result = text.run(raw.with_input(input), &context.commands, false);
result.collect::<Vec<_>>().await;
} else {
for i in input {
match i.item {
Value::Primitive(Primitive::String(s)) => {
println!("{}", s);
_ => {
if let ReturnSuccess::Value(x) = x {
match x {
Tagged {
item: Value::Primitive(Primitive::String(ref s)),
tag: Tag { anchor, span },
} if anchor.is_some() => {
if let Some(text) = text {
let mut stream = VecDeque::new();
stream.push_back(Value::string(s).tagged(Tag { anchor, span }));
let result = text.run(raw.with_input(stream.into()), &context.commands);
result.collect::<Vec<_>>().await;
} else {
println!("{}", s);
}
}
Tagged {
item: Value::Primitive(Primitive::String(s)),
..
} => {
println!("{}", s);
}
Tagged { item: Value::Primitive(Primitive::Binary(ref b)), .. } => {
if let Some(binary) = binary {
let mut stream = VecDeque::new();
stream.push_back(x.clone());
let result = binary.run(raw.with_input(stream.into()), &context.commands);
result.collect::<Vec<_>>().await;
} else {
use pretty_hex::*;
println!("{:?}", b.hex_dump());
}
}
Tagged { item: Value::Error(e), .. } => {
yield Err(e);
}
Tagged { item: ref item, .. } => {
if let Some(table) = table {
let mut stream = VecDeque::new();
stream.push_back(x.clone());
let result = table.run(raw.with_input(stream.into()), &context.commands);
result.collect::<Vec<_>>().await;
} else {
println!("{:?}", item);
}
}
}
_ => {}
}
}
}
} else if is_single_text_value(&input) {
for i in input {
match i.item {
Value::Primitive(Primitive::String(s)) => {
println!("{}", s);
}
_ => {}
}
}
} else {
let table = context.expect_command("table");
let result = table.run(raw.with_input(input), &context.commands, false);
result.collect::<Vec<_>>().await;
}
_ => {
//println!("<no results>");
}
}
@ -95,34 +171,3 @@ pub fn autoview(
}
}))
}
fn is_single_text_value(input: &Vec<Tagged<Value>>) -> bool {
if input.len() != 1 {
return false;
}
if let Tagged {
item: Value::Primitive(Primitive::String(_)),
..
} = input[0]
{
true
} else {
false
}
}
fn is_single_anchored_text_value(input: &Vec<Tagged<Value>>) -> bool {
if input.len() != 1 {
return false;
}
if let Tagged {
item: Value::Primitive(Primitive::String(_)),
tag: Tag { anchor, .. },
} = input[0]
{
anchor != uuid::Uuid::nil()
} else {
false
}
}

View File

@ -10,7 +10,11 @@ impl WholeStreamCommand for CD {
}
fn signature(&self) -> Signature {
Signature::build("cd").optional("directory", SyntaxShape::Path)
Signature::build("cd").optional(
"directory",
SyntaxShape::Path,
"the directory to change to",
)
}
fn usage(&self) -> &str {

View File

@ -1,12 +1,13 @@
use crate::commands::Command;
use crate::parser::{hir, TokenNode};
use crate::prelude::*;
use bytes::{BufMut, BytesMut};
use derive_new::new;
use futures::stream::StreamExt;
use futures_codec::{Decoder, Encoder, Framed};
use itertools::Itertools;
use log::{log_enabled, trace};
use std::fmt;
use std::io::{Error, ErrorKind};
use std::sync::Arc;
use subprocess::Exec;
/// A simple `Codec` implementation that splits up data into lines.
@ -53,7 +54,7 @@ pub(crate) struct ClassifiedInputStream {
impl ClassifiedInputStream {
pub(crate) fn new() -> ClassifiedInputStream {
ClassifiedInputStream {
objects: VecDeque::new().into(),
objects: vec![Value::nothing().tagged(Tag::unknown())].into(),
stdin: None,
}
}
@ -73,125 +74,212 @@ impl ClassifiedInputStream {
}
}
#[derive(Debug, Clone)]
pub(crate) struct ClassifiedPipeline {
pub(crate) commands: Vec<ClassifiedCommand>,
pub(crate) commands: Spanned<Vec<ClassifiedCommand>>,
}
impl FormatDebug for ClassifiedPipeline {
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
f.say_str(
"classified pipeline",
self.commands.iter().map(|c| c.debug(source)).join(" | "),
)
}
}
impl HasSpan for ClassifiedPipeline {
fn span(&self) -> Span {
self.commands.span
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub(crate) enum ClassifiedCommand {
#[allow(unused)]
Expr(TokenNode),
Internal(InternalCommand),
#[allow(unused)]
Dynamic(Spanned<hir::Call>),
External(ExternalCommand),
}
impl FormatDebug for ClassifiedCommand {
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
match self {
ClassifiedCommand::Expr(expr) => expr.fmt_debug(f, source),
ClassifiedCommand::Internal(internal) => internal.fmt_debug(f, source),
ClassifiedCommand::Dynamic(dynamic) => dynamic.fmt_debug(f, source),
ClassifiedCommand::External(external) => external.fmt_debug(f, source),
}
}
}
impl HasSpan for ClassifiedCommand {
fn span(&self) -> Span {
match self {
ClassifiedCommand::Expr(node) => node.span(),
ClassifiedCommand::Internal(command) => command.span(),
ClassifiedCommand::Dynamic(call) => call.span,
ClassifiedCommand::External(command) => command.span(),
}
}
}
#[derive(new, Debug, Clone, Eq, PartialEq)]
pub(crate) struct InternalCommand {
pub(crate) command: Arc<Command>,
pub(crate) name: String,
pub(crate) name_tag: Tag,
pub(crate) args: Spanned<hir::Call>,
}
impl HasSpan for InternalCommand {
fn span(&self) -> Span {
let start = self.name_tag.span;
start.until(self.args.span)
}
}
impl FormatDebug for InternalCommand {
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
f.say("internal", self.args.debug(source))
}
}
#[derive(new, Debug, Eq, PartialEq)]
pub(crate) struct DynamicCommand {
pub(crate) args: hir::Call,
}
impl InternalCommand {
pub(crate) async fn run(
pub(crate) fn run(
self,
context: &mut Context,
input: ClassifiedInputStream,
source: Text,
is_first_command: bool,
) -> Result<InputStream, ShellError> {
if log_enabled!(log::Level::Trace) {
trace!(target: "nu::run::internal", "->");
trace!(target: "nu::run::internal", "{}", self.command.name());
trace!(target: "nu::run::internal", "{}", self.name);
trace!(target: "nu::run::internal", "{}", self.args.debug(&source));
}
let objects: InputStream =
trace_stream!(target: "nu::trace_stream::internal", "input" = input.objects);
let result = context.run_command(
self.command,
self.name_tag.clone(),
context.source_map.clone(),
self.args,
&source,
objects,
is_first_command,
);
let command = context.expect_command(&self.name);
let result = {
context.run_command(
command,
self.name_tag.clone(),
self.args.item,
&source,
objects,
)
};
let result = trace_out_stream!(target: "nu::trace_stream::internal", source: &source, "output" = result);
let mut result = result.values;
let mut context = context.clone();
let mut stream = VecDeque::new();
while let Some(item) = result.next().await {
match item? {
ReturnSuccess::Action(action) => match action {
CommandAction::ChangePath(path) => {
context.shell_manager.set_path(path);
}
CommandAction::AddAnchorLocation(uuid, anchor_location) => {
context.add_anchor_location(uuid, anchor_location);
}
CommandAction::Exit => std::process::exit(0), // TODO: save history.txt
CommandAction::EnterHelpShell(value) => {
match value {
Tagged {
item: Value::Primitive(Primitive::String(cmd)),
tag,
} => {
context.shell_manager.insert_at_current(Box::new(
HelpShell::for_command(
Value::string(cmd).tagged(tag),
&context.registry(),
)?,
));
}
_ => {
context.shell_manager.insert_at_current(Box::new(
HelpShell::index(&context.registry())?,
));
let stream = async_stream! {
while let Some(item) = result.next().await {
match item {
Ok(ReturnSuccess::Action(action)) => match action {
CommandAction::ChangePath(path) => {
context.shell_manager.set_path(path);
}
CommandAction::Exit => std::process::exit(0), // TODO: save history.txt
CommandAction::EnterHelpShell(value) => {
match value {
Tagged {
item: Value::Primitive(Primitive::String(cmd)),
tag,
} => {
context.shell_manager.insert_at_current(Box::new(
HelpShell::for_command(
Value::string(cmd).tagged(tag),
&context.registry(),
).unwrap(),
));
}
_ => {
context.shell_manager.insert_at_current(Box::new(
HelpShell::index(&context.registry()).unwrap(),
));
}
}
}
}
CommandAction::EnterValueShell(value) => {
context
.shell_manager
.insert_at_current(Box::new(ValueShell::new(value)));
}
CommandAction::EnterShell(location) => {
context.shell_manager.insert_at_current(Box::new(
FilesystemShell::with_location(location, context.registry().clone())?,
));
}
CommandAction::PreviousShell => {
context.shell_manager.prev();
}
CommandAction::NextShell => {
context.shell_manager.next();
}
CommandAction::LeaveShell => {
context.shell_manager.remove_at_current();
if context.shell_manager.is_empty() {
std::process::exit(0); // TODO: save history.txt
CommandAction::EnterValueShell(value) => {
context
.shell_manager
.insert_at_current(Box::new(ValueShell::new(value)));
}
}
},
CommandAction::EnterShell(location) => {
context.shell_manager.insert_at_current(Box::new(
FilesystemShell::with_location(location, context.registry().clone()).unwrap(),
));
}
CommandAction::PreviousShell => {
context.shell_manager.prev();
}
CommandAction::NextShell => {
context.shell_manager.next();
}
CommandAction::LeaveShell => {
context.shell_manager.remove_at_current();
if context.shell_manager.is_empty() {
std::process::exit(0); // TODO: save history.txt
}
}
},
ReturnSuccess::Value(v) => {
stream.push_back(v);
Ok(ReturnSuccess::Value(v)) => {
yield Ok(v);
}
Err(x) => {
yield Ok(Value::Error(x).tagged_unknown());
break;
}
}
}
}
};
Ok(stream.into())
Ok(stream.to_input_stream())
}
}
#[derive(Debug, Clone, Eq, PartialEq)]
pub(crate) struct ExternalCommand {
pub(crate) name: String,
pub(crate) name_tag: Tag,
pub(crate) args: Vec<Tagged<String>>,
pub(crate) args: Spanned<Vec<Tagged<String>>>,
}
impl FormatDebug for ExternalCommand {
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
write!(f, "{}", self.name)?;
if self.args.item.len() > 0 {
write!(f, " ")?;
write!(f, "{}", self.args.iter().map(|i| i.debug(source)).join(" "))?;
}
Ok(())
}
}
impl HasSpan for ExternalCommand {
fn span(&self) -> Span {
self.name_tag.span.until(self.args.span)
}
}
#[derive(Debug)]
pub(crate) enum StreamNext {
Last,
External,
@ -207,58 +295,57 @@ impl ExternalCommand {
) -> Result<ClassifiedInputStream, ShellError> {
let stdin = input.stdin;
let inputs: Vec<Tagged<Value>> = input.objects.into_vec().await;
let name_tag = self.name_tag.clone();
trace!(target: "nu::run::external", "-> {}", self.name);
trace!(target: "nu::run::external", "inputs = {:?}", inputs);
let mut arg_string = format!("{}", self.name);
for arg in &self.args {
for arg in &self.args.item {
arg_string.push_str(&arg);
}
trace!(target: "nu::run::external", "command = {:?}", self.name);
let mut process;
process = Exec::cmd(&self.name);
if arg_string.contains("$it") {
let mut first = true;
for i in &inputs {
if i.as_string().is_err() {
let mut tag = None;
for arg in &self.args {
if arg.item.contains("$it") {
tag = Some(arg.tag());
let input_strings = inputs
.iter()
.map(|i| {
i.as_string().map_err(|_| {
let arg = self.args.iter().find(|arg| arg.item.contains("$it"));
if let Some(arg) = arg {
ShellError::labeled_error(
"External $it needs string data",
"given row instead of string data",
arg.tag(),
)
} else {
ShellError::labeled_error(
"$it needs string data",
"given something else",
self.name_tag.clone(),
)
}
}
if let Some(tag) = tag {
return Err(ShellError::labeled_error(
"External $it needs string data",
"given row instead of string data",
tag,
));
} else {
return Err(ShellError::string("Error: $it needs string data"));
}
}
if !first {
process = process.arg("&&");
process = process.arg(&self.name);
} else {
first = false;
}
})
})
.collect::<Result<Vec<String>, ShellError>>()?;
for arg in &self.args {
let commands = input_strings.iter().map(|i| {
let args = self.args.iter().filter_map(|arg| {
if arg.chars().all(|c| c.is_whitespace()) {
continue;
None
} else {
Some(arg.replace("$it", &i))
}
});
process = process.arg(&arg.replace("$it", &i.as_string()?));
}
}
format!("{} {}", self.name, itertools::join(args, " "))
});
process = Exec::shell(itertools::join(commands, " && "))
} else {
for arg in &self.args {
process = Exec::cmd(&self.name);
for arg in &self.args.item {
let arg_chars: Vec<_> = arg.chars().collect();
if arg_chars.len() > 1
&& arg_chars[0] == '"'
@ -275,6 +362,8 @@ impl ExternalCommand {
process = process.cwd(context.shell_manager.path());
trace!(target: "nu::run::external", "cwd = {:?}", context.shell_manager.path());
let mut process = match stream_next {
StreamNext::Last => process,
StreamNext::External | StreamNext::Internal => {
@ -282,43 +371,60 @@ impl ExternalCommand {
}
};
trace!(target: "nu::run::external", "set up stdout pipe");
if let Some(stdin) = stdin {
process = process.stdin(stdin);
}
let mut popen = process.popen()?;
trace!(target: "nu::run::external", "set up stdin pipe");
trace!(target: "nu::run::external", "built process {:?}", process);
match stream_next {
StreamNext::Last => {
let _ = popen.detach();
loop {
match popen.poll() {
None => {
let _ = std::thread::sleep(std::time::Duration::new(0, 100000000));
}
_ => {
let _ = popen.terminate();
break;
let popen = process.popen();
trace!(target: "nu::run::external", "next = {:?}", stream_next);
let name_tag = self.name_tag.clone();
if let Ok(mut popen) = popen {
match stream_next {
StreamNext::Last => {
let _ = popen.detach();
loop {
match popen.poll() {
None => {
let _ = std::thread::sleep(std::time::Duration::new(0, 100000000));
}
_ => {
let _ = popen.terminate();
break;
}
}
}
Ok(ClassifiedInputStream::new())
}
StreamNext::External => {
let _ = popen.detach();
let stdout = popen.stdout.take().unwrap();
Ok(ClassifiedInputStream::from_stdout(stdout))
}
StreamNext::Internal => {
let _ = popen.detach();
let stdout = popen.stdout.take().unwrap();
let file = futures::io::AllowStdIo::new(stdout);
let stream = Framed::new(file, LinesCodec {});
let stream =
stream.map(move |line| Value::string(line.unwrap()).tagged(&name_tag));
Ok(ClassifiedInputStream::from_input_stream(
stream.boxed() as BoxStream<'static, Tagged<Value>>
))
}
Ok(ClassifiedInputStream::new())
}
StreamNext::External => {
let _ = popen.detach();
let stdout = popen.stdout.take().unwrap();
Ok(ClassifiedInputStream::from_stdout(stdout))
}
StreamNext::Internal => {
let _ = popen.detach();
let stdout = popen.stdout.take().unwrap();
let file = futures::io::AllowStdIo::new(stdout);
let stream = Framed::new(file, LinesCodec {});
let stream = stream.map(move |line| Value::string(line.unwrap()).tagged(name_tag));
Ok(ClassifiedInputStream::from_input_stream(
stream.boxed() as BoxStream<'static, Tagged<Value>>
))
}
} else {
return Err(ShellError::labeled_error(
"Command not found",
"command not found",
name_tag,
));
}
}
}

View File

@ -1,4 +1,3 @@
use crate::context::{AnchorLocation, SourceMap};
use crate::data::Value;
use crate::errors::ShellError;
use crate::evaluate::Scope;
@ -11,18 +10,17 @@ use serde::{Deserialize, Serialize};
use std::fmt;
use std::ops::Deref;
use std::path::PathBuf;
use uuid::Uuid;
use std::sync::atomic::AtomicBool;
#[derive(Deserialize, Serialize, Debug, Clone)]
pub struct UnevaluatedCallInfo {
pub args: hir::Call,
pub source: Text,
pub source_map: SourceMap,
pub name_tag: Tag,
}
impl ToDebug for UnevaluatedCallInfo {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
impl FormatDebug for UnevaluatedCallInfo {
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
self.args.fmt_debug(f, source)
}
}
@ -37,7 +35,6 @@ impl UnevaluatedCallInfo {
Ok(CallInfo {
args,
source_map: self.source_map,
name_tag: self.name_tag,
})
}
@ -46,7 +43,6 @@ impl UnevaluatedCallInfo {
#[derive(Deserialize, Serialize, Debug, Clone)]
pub struct CallInfo {
pub args: registry::EvaluatedArgs,
pub source_map: SourceMap,
pub name_tag: Tag,
}
@ -62,7 +58,7 @@ impl CallInfo {
args: T::deserialize(&mut deserializer)?,
context: RunnablePerItemContext {
shell_manager: shell_manager.clone(),
name: self.name_tag,
name: self.name_tag.clone(),
},
callback,
})
@ -73,6 +69,7 @@ impl CallInfo {
#[get = "pub(crate)"]
pub struct CommandArgs {
pub host: Arc<Mutex<dyn Host>>,
pub ctrl_c: Arc<AtomicBool>,
pub shell_manager: ShellManager,
pub call_info: UnevaluatedCallInfo,
pub input: InputStream,
@ -82,6 +79,7 @@ pub struct CommandArgs {
#[get = "pub(crate)"]
pub struct RawCommandArgs {
pub host: Arc<Mutex<dyn Host>>,
pub ctrl_c: Arc<AtomicBool>,
pub shell_manager: ShellManager,
pub call_info: UnevaluatedCallInfo,
}
@ -90,6 +88,7 @@ impl RawCommandArgs {
pub fn with_input(self, input: Vec<Tagged<Value>>) -> CommandArgs {
CommandArgs {
host: self.host,
ctrl_c: self.ctrl_c,
shell_manager: self.shell_manager,
call_info: self.call_info,
input: input.into(),
@ -97,8 +96,14 @@ impl RawCommandArgs {
}
}
impl ToDebug for CommandArgs {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
impl std::fmt::Debug for CommandArgs {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.call_info.fmt(f)
}
}
impl FormatDebug for CommandArgs {
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
self.call_info.fmt_debug(f, source)
}
}
@ -109,12 +114,14 @@ impl CommandArgs {
registry: &registry::CommandRegistry,
) -> Result<EvaluatedWholeStreamCommandArgs, ShellError> {
let host = self.host.clone();
let ctrl_c = self.ctrl_c.clone();
let shell_manager = self.shell_manager.clone();
let input = self.input;
let call_info = self.call_info.evaluate(registry, &Scope::empty())?;
Ok(EvaluatedWholeStreamCommandArgs::new(
host,
ctrl_c,
shell_manager,
call_info,
input,
@ -127,12 +134,13 @@ impl CommandArgs {
callback: fn(T, RunnableContext) -> Result<OutputStream, ShellError>,
) -> Result<RunnableArgs<T>, ShellError> {
let shell_manager = self.shell_manager.clone();
let source_map = self.call_info.source_map.clone();
let host = self.host.clone();
let ctrl_c = self.ctrl_c.clone();
let args = self.evaluate_once(registry)?;
let call_info = args.call_info.clone();
let (input, args) = args.split();
let name_tag = args.call_info.name_tag;
let mut deserializer = ConfigDeserializer::from_call_info(args.call_info);
let mut deserializer = ConfigDeserializer::from_call_info(call_info);
Ok(RunnableArgs {
args: T::deserialize(&mut deserializer)?,
@ -141,8 +149,8 @@ impl CommandArgs {
commands: registry.clone(),
shell_manager,
name: name_tag,
source_map,
host,
ctrl_c,
},
callback,
})
@ -155,17 +163,20 @@ impl CommandArgs {
) -> Result<RunnableRawArgs<T>, ShellError> {
let raw_args = RawCommandArgs {
host: self.host.clone(),
ctrl_c: self.ctrl_c.clone(),
shell_manager: self.shell_manager.clone(),
call_info: self.call_info.clone(),
};
let shell_manager = self.shell_manager.clone();
let source_map = self.call_info.source_map.clone();
let host = self.host.clone();
let ctrl_c = self.ctrl_c.clone();
let args = self.evaluate_once(registry)?;
let call_info = args.call_info.clone();
let (input, args) = args.split();
let name_tag = args.call_info.name_tag;
let mut deserializer = ConfigDeserializer::from_call_info(args.call_info);
let mut deserializer = ConfigDeserializer::from_call_info(call_info.clone());
Ok(RunnableRawArgs {
args: T::deserialize(&mut deserializer)?,
@ -174,8 +185,8 @@ impl CommandArgs {
commands: registry.clone(),
shell_manager,
name: name_tag,
source_map,
host,
ctrl_c,
},
raw_args,
callback,
@ -198,18 +209,12 @@ pub struct RunnableContext {
pub input: InputStream,
pub shell_manager: ShellManager,
pub host: Arc<Mutex<dyn Host>>,
pub ctrl_c: Arc<AtomicBool>,
pub commands: CommandRegistry,
pub source_map: SourceMap,
pub name: Tag,
}
impl RunnableContext {
pub fn expect_command(&self, name: &str) -> Arc<Command> {
self.commands
.get_command(name)
.expect(&format!("Expected command {}", name))
}
pub fn get_command(&self, name: &str) -> Option<Arc<Command>> {
self.commands.get_command(name)
}
@ -270,6 +275,7 @@ impl Deref for EvaluatedWholeStreamCommandArgs {
impl EvaluatedWholeStreamCommandArgs {
pub fn new(
host: Arc<Mutex<dyn Host>>,
ctrl_c: Arc<AtomicBool>,
shell_manager: ShellManager,
call_info: CallInfo,
input: impl Into<InputStream>,
@ -277,6 +283,7 @@ impl EvaluatedWholeStreamCommandArgs {
EvaluatedWholeStreamCommandArgs {
args: EvaluatedCommandArgs {
host,
ctrl_c,
shell_manager,
call_info,
},
@ -285,7 +292,7 @@ impl EvaluatedWholeStreamCommandArgs {
}
pub fn name_tag(&self) -> Tag {
self.args.call_info.name_tag
self.args.call_info.name_tag.clone()
}
pub fn parts(self) -> (InputStream, registry::EvaluatedArgs) {
@ -317,12 +324,14 @@ impl Deref for EvaluatedFilterCommandArgs {
impl EvaluatedFilterCommandArgs {
pub fn new(
host: Arc<Mutex<dyn Host>>,
ctrl_c: Arc<AtomicBool>,
shell_manager: ShellManager,
call_info: CallInfo,
) -> EvaluatedFilterCommandArgs {
EvaluatedFilterCommandArgs {
args: EvaluatedCommandArgs {
host,
ctrl_c,
shell_manager,
call_info,
},
@ -334,6 +343,7 @@ impl EvaluatedFilterCommandArgs {
#[get = "pub(crate)"]
pub struct EvaluatedCommandArgs {
pub host: Arc<Mutex<dyn Host>>,
pub ctrl_c: Arc<AtomicBool>,
pub shell_manager: ShellManager,
pub call_info: CallInfo,
}
@ -373,10 +383,9 @@ impl EvaluatedCommandArgs {
}
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum CommandAction {
ChangePath(String),
AddAnchorLocation(Uuid, AnchorLocation),
Exit,
EnterShell(String),
EnterValueShell(Tagged<Value>),
@ -386,13 +395,10 @@ pub enum CommandAction {
LeaveShell,
}
impl ToDebug for CommandAction {
fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result {
impl FormatDebug for CommandAction {
fn fmt_debug(&self, f: &mut DebugFormatter, _source: &str) -> fmt::Result {
match self {
CommandAction::ChangePath(s) => write!(f, "action:change-path={}", s),
CommandAction::AddAnchorLocation(u, source) => {
write!(f, "action:add-span-source={}@{:?}", u, source)
}
CommandAction::Exit => write!(f, "action:exit"),
CommandAction::EnterShell(s) => write!(f, "action:enter-shell={}", s),
CommandAction::EnterValueShell(t) => {
@ -408,7 +414,7 @@ impl ToDebug for CommandAction {
}
}
#[derive(Debug, Serialize, Deserialize)]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum ReturnSuccess {
Value(Tagged<Value>),
Action(CommandAction),
@ -416,8 +422,8 @@ pub enum ReturnSuccess {
pub type ReturnValue = Result<ReturnSuccess, ShellError>;
impl ToDebug for ReturnValue {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
impl FormatDebug for ReturnValue {
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
match self {
Err(err) => write!(f, "{}", err.debug(source)),
Ok(ReturnSuccess::Value(v)) => write!(f, "{:?}", v.debug()),
@ -507,6 +513,15 @@ pub enum Command {
PerItem(Arc<dyn PerItemCommand>),
}
impl std::fmt::Debug for Command {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Command::WholeStream(command) => write!(f, "WholeStream({})", command.name()),
Command::PerItem(command) => write!(f, "PerItem({})", command.name()),
}
}
}
impl Command {
pub fn name(&self) -> &str {
match self {
@ -529,20 +544,13 @@ impl Command {
}
}
pub fn run(
&self,
args: CommandArgs,
registry: &registry::CommandRegistry,
is_first_command: bool,
) -> OutputStream {
pub fn run(&self, args: CommandArgs, registry: &registry::CommandRegistry) -> OutputStream {
match self {
Command::WholeStream(command) => match command.run(args, registry) {
Ok(stream) => stream,
Err(err) => OutputStream::one(Err(err)),
},
Command::PerItem(command) => {
self.run_helper(command.clone(), args, registry.clone(), is_first_command)
}
Command::PerItem(command) => self.run_helper(command.clone(), args, registry.clone()),
}
}
@ -551,48 +559,31 @@ impl Command {
command: Arc<dyn PerItemCommand>,
args: CommandArgs,
registry: CommandRegistry,
is_first_command: bool,
) -> OutputStream {
let raw_args = RawCommandArgs {
host: args.host,
ctrl_c: args.ctrl_c,
shell_manager: args.shell_manager,
call_info: args.call_info,
};
if !is_first_command {
let out = args
.input
.values
.map(move |x| {
let call_info = raw_args
.clone()
.call_info
.evaluate(&registry, &Scope::it_value(x.clone()))
.unwrap();
match command.run(&call_info, &registry, &raw_args, x) {
Ok(o) => o,
Err(e) => VecDeque::from(vec![ReturnValue::Err(e)]).to_output_stream(),
}
})
.flatten();
let out = args
.input
.values
.map(move |x| {
let call_info = raw_args
.clone()
.call_info
.evaluate(&registry, &Scope::it_value(x.clone()))
.unwrap();
match command.run(&call_info, &registry, &raw_args, x) {
Ok(o) => o,
Err(e) => VecDeque::from(vec![ReturnValue::Err(e)]).to_output_stream(),
}
})
.flatten();
out.to_output_stream()
} else {
let nothing = Value::nothing().tagged(Tag::unknown());
let call_info = raw_args
.clone()
.call_info
.evaluate(&registry, &Scope::it_value(nothing.clone()))
.unwrap();
match command
.run(&call_info, &registry, &raw_args, nothing)
.into()
{
Ok(o) => o,
Err(e) => OutputStream::one(Err(e)),
}
}
out.to_output_stream()
}
pub fn is_binary(&self) -> bool {
@ -624,6 +615,7 @@ impl WholeStreamCommand for FnFilterCommand {
) -> Result<OutputStream, ShellError> {
let CommandArgs {
host,
ctrl_c,
shell_manager,
call_info,
input,
@ -641,8 +633,12 @@ impl WholeStreamCommand for FnFilterCommand {
Ok(args) => args,
};
let args =
EvaluatedFilterCommandArgs::new(host.clone(), shell_manager.clone(), call_info);
let args = EvaluatedFilterCommandArgs::new(
host.clone(),
ctrl_c.clone(),
shell_manager.clone(),
call_info,
);
match func(args) {
Err(err) => return OutputStream::from(vec![Err(err)]).values,

View File

@ -4,7 +4,6 @@ use crate::errors::ShellError;
use crate::parser::hir::SyntaxShape;
use crate::parser::registry::{self};
use crate::prelude::*;
use std::iter::FromIterator;
use std::path::PathBuf;
pub struct Config;
@ -13,6 +12,7 @@ pub struct Config;
pub struct ConfigArgs {
load: Option<Tagged<PathBuf>>,
set: Option<(Tagged<String>, Tagged<Value>)>,
set_into: Option<Tagged<String>>,
get: Option<Tagged<String>>,
clear: Tagged<bool>,
remove: Option<Tagged<String>>,
@ -26,12 +26,25 @@ impl WholeStreamCommand for Config {
fn signature(&self) -> Signature {
Signature::build("config")
.named("load", SyntaxShape::Path)
.named("set", SyntaxShape::Any)
.named("get", SyntaxShape::Any)
.named("remove", SyntaxShape::Any)
.switch("clear")
.switch("path")
.named(
"load",
SyntaxShape::Path,
"load the config from the path give",
)
.named(
"set",
SyntaxShape::Any,
"set a value in the config, eg) --set [key value]",
)
.named(
"set_into",
SyntaxShape::Member,
"sets a variable from values in the pipeline",
)
.named("get", SyntaxShape::Any, "get a value from the config")
.named("remove", SyntaxShape::Any, "remove a value from the config")
.switch("clear", "clear the config")
.switch("path", "return the path to the config file")
}
fn usage(&self) -> &str {
@ -51,84 +64,110 @@ pub fn config(
ConfigArgs {
load,
set,
set_into,
get,
clear,
remove,
path,
}: ConfigArgs,
RunnableContext { name, .. }: RunnableContext,
RunnableContext { name, input, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let name_span = name;
let name_span = name.clone();
let configuration = if let Some(supplied) = load {
Some(supplied.item().clone())
} else {
None
let stream = async_stream! {
let configuration = if let Some(supplied) = load {
Some(supplied.item().clone())
} else {
None
};
let mut result = crate::data::config::read(name_span, &configuration)?;
if let Some(v) = get {
let key = v.to_string();
let value = result
.get(&key)
.ok_or_else(|| ShellError::labeled_error("Missing key in config", "key", v.tag()))?;
match value {
Tagged {
item: Value::Table(list),
..
} => {
for l in list {
yield ReturnSuccess::value(l.clone());
}
}
x => yield ReturnSuccess::value(x.clone()),
}
}
else if let Some((key, value)) = set {
result.insert(key.to_string(), value.clone());
config::write(&result, &configuration)?;
yield ReturnSuccess::value(Value::Row(result.into()).tagged(value.tag()));
}
else if let Some(v) = set_into {
let rows: Vec<Tagged<Value>> = input.values.collect().await;
let key = v.to_string();
if rows.len() == 0 {
yield Err(ShellError::labeled_error("No values given for set_into", "needs value(s) from pipeline", v.tag()));
} else if rows.len() == 1 {
// A single value
let value = &rows[0];
result.insert(key.to_string(), value.clone());
config::write(&result, &configuration)?;
yield ReturnSuccess::value(Value::Row(result.into()).tagged(name));
} else {
// Take in the pipeline as a table
let value = Value::Table(rows).tagged(name.clone());
result.insert(key.to_string(), value.clone());
config::write(&result, &configuration)?;
yield ReturnSuccess::value(Value::Row(result.into()).tagged(name));
}
}
else if let Tagged { item: true, tag } = clear {
result.clear();
config::write(&result, &configuration)?;
yield ReturnSuccess::value(Value::Row(result.into()).tagged(tag));
return;
}
else if let Tagged { item: true, tag } = path {
let path = config::default_path_for(&configuration)?;
yield ReturnSuccess::value(Value::Primitive(Primitive::Path(path)).tagged(tag));
}
else if let Some(v) = remove {
let key = v.to_string();
if result.contains_key(&key) {
result.swap_remove(&key);
config::write(&result, &configuration).unwrap();
} else {
yield Err(ShellError::labeled_error(
"Key does not exist in config",
"key",
v.tag(),
));
}
yield ReturnSuccess::value(Value::Row(result.into()).tagged(v.tag()));
}
else {
yield ReturnSuccess::value(Value::Row(result.into()).tagged(name));
}
};
let mut result = crate::data::config::read(name_span, &configuration)?;
if let Some(v) = get {
let key = v.to_string();
let value = result
.get(&key)
.ok_or_else(|| ShellError::string(&format!("Missing key {} in config", key)))?;
let mut results = VecDeque::new();
match value {
Tagged {
item: Value::Table(list),
..
} => {
for l in list {
results.push_back(ReturnSuccess::value(l.clone()));
}
}
x => results.push_back(ReturnSuccess::value(x.clone())),
}
return Ok(results.to_output_stream());
}
if let Some((key, value)) = set {
result.insert(key.to_string(), value.clone());
config::write(&result, &configuration)?;
return Ok(stream![Value::Row(result.into()).tagged(value.tag())].from_input_stream());
}
if let Tagged { item: true, tag } = clear {
result.clear();
config::write(&result, &configuration)?;
return Ok(stream![Value::Row(result.into()).tagged(tag)].from_input_stream());
}
if let Tagged { item: true, tag } = path {
let path = config::default_path_for(&configuration)?;
return Ok(stream![Value::Primitive(Primitive::Path(path)).tagged(tag)].from_input_stream());
}
if let Some(v) = remove {
let key = v.to_string();
if result.contains_key(&key) {
result.swap_remove(&key);
config::write(&result, &configuration)?;
} else {
return Err(ShellError::string(&format!(
"{} does not exist in config",
key
)));
}
let obj = VecDeque::from_iter(vec![Value::Row(result.into()).tagged(v.tag())]);
return Ok(obj.from_input_stream());
}
return Ok(vec![Value::Row(result.into()).tagged(name)].into());
Ok(stream.to_output_stream())
}

46
src/commands/count.rs Normal file
View File

@ -0,0 +1,46 @@
use crate::commands::WholeStreamCommand;
use crate::data::Value;
use crate::errors::ShellError;
use crate::parser::CommandRegistry;
use crate::prelude::*;
use futures::stream::StreamExt;
pub struct Count;
#[derive(Deserialize)]
pub struct CountArgs {}
impl WholeStreamCommand for Count {
fn name(&self) -> &str {
"count"
}
fn signature(&self) -> Signature {
Signature::build("count")
}
fn usage(&self) -> &str {
"Show the total number of rows."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, count)?.run()
}
}
pub fn count(
CountArgs {}: CountArgs,
RunnableContext { input, name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let stream = async_stream! {
let rows: Vec<Tagged<Value>> = input.values.collect().await;
yield ReturnSuccess::value(Value::int(rows.len()).tagged(name))
};
Ok(stream.to_output_stream())
}

View File

@ -21,10 +21,9 @@ impl PerItemCommand for Cpy {
fn signature(&self) -> Signature {
Signature::build("cp")
.required("src", SyntaxShape::Pattern)
.required("dst", SyntaxShape::Path)
.named("file", SyntaxShape::Any)
.switch("recursive")
.required("src", SyntaxShape::Pattern, "the place to copy from")
.required("dst", SyntaxShape::Path, "the place to copy to")
.switch("recursive", "copy recursively through subdirectories")
}
fn usage(&self) -> &str {

View File

@ -17,7 +17,9 @@ impl WholeStreamCommand for Date {
}
fn signature(&self) -> Signature {
Signature::build("date").switch("utc").switch("local")
Signature::build("date")
.switch("utc", "use universal time (UTC)")
.switch("local", "use the local time")
}
fn usage(&self) -> &str {
@ -39,27 +41,27 @@ where
{
let mut indexmap = IndexMap::new();
indexmap.insert("year".to_string(), Value::int(dt.year()).tagged(tag));
indexmap.insert("month".to_string(), Value::int(dt.month()).tagged(tag));
indexmap.insert("day".to_string(), Value::int(dt.day()).tagged(tag));
indexmap.insert("hour".to_string(), Value::int(dt.hour()).tagged(tag));
indexmap.insert("minute".to_string(), Value::int(dt.minute()).tagged(tag));
indexmap.insert("second".to_string(), Value::int(dt.second()).tagged(tag));
indexmap.insert("year".to_string(), Value::int(dt.year()).tagged(&tag));
indexmap.insert("month".to_string(), Value::int(dt.month()).tagged(&tag));
indexmap.insert("day".to_string(), Value::int(dt.day()).tagged(&tag));
indexmap.insert("hour".to_string(), Value::int(dt.hour()).tagged(&tag));
indexmap.insert("minute".to_string(), Value::int(dt.minute()).tagged(&tag));
indexmap.insert("second".to_string(), Value::int(dt.second()).tagged(&tag));
let tz = dt.offset();
indexmap.insert(
"timezone".to_string(),
Value::string(format!("{}", tz)).tagged(tag),
Value::string(format!("{}", tz)).tagged(&tag),
);
Value::Row(Dictionary::from(indexmap)).tagged(tag)
Value::Row(Dictionary::from(indexmap)).tagged(&tag)
}
pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?;
let mut date_out = VecDeque::new();
let tag = args.call_info.name_tag;
let tag = args.call_info.name_tag.clone();
let value = if args.has("utc") {
let utc: DateTime<Utc> = Utc::now();

View File

@ -12,7 +12,7 @@ impl PerItemCommand for Echo {
}
fn signature(&self) -> Signature {
Signature::build("echo").rest(SyntaxShape::Any)
Signature::build("echo").rest(SyntaxShape::Any, "the values to echo")
}
fn usage(&self) -> &str {
@ -35,38 +35,34 @@ fn run(
_registry: &CommandRegistry,
_raw_args: &RawCommandArgs,
) -> Result<OutputStream, ShellError> {
let name = call_info.name_tag;
let mut output = String::new();
let mut first = true;
let mut output = vec![];
if let Some(ref positional) = call_info.args.positional {
for i in positional {
match i.as_string() {
Ok(s) => {
if !first {
output.push_str(" ");
} else {
first = false;
output.push(Ok(ReturnSuccess::Value(
Value::string(s).tagged(i.tag.clone()),
)));
}
_ => match i {
Tagged {
item: Value::Table(table),
..
} => {
for item in table {
output.push(Ok(ReturnSuccess::Value(item.clone())));
}
}
output.push_str(&s);
}
_ => {
return Err(ShellError::labeled_error(
"Expect a string from pipeline",
"not a string-compatible value",
i.tag(),
));
}
_ => {
output.push(Ok(ReturnSuccess::Value(i.clone())));
}
},
}
}
}
let stream = VecDeque::from(vec![Ok(ReturnSuccess::Value(
Value::string(output).tagged(name),
))]);
let stream = VecDeque::from(output);
Ok(stream.to_output_stream())
}

View File

@ -1,7 +1,6 @@
use crate::commands::command::CommandAction;
use crate::commands::PerItemCommand;
use crate::commands::UnevaluatedCallInfo;
use crate::data::meta::Span;
use crate::errors::ShellError;
use crate::parser::registry;
use crate::prelude::*;
@ -15,7 +14,11 @@ impl PerItemCommand for Enter {
}
fn signature(&self) -> registry::Signature {
Signature::build("enter").required("location", SyntaxShape::Block)
Signature::build("enter").required(
"location",
SyntaxShape::Path,
"the location to create a new shell from",
)
}
fn usage(&self) -> &str {
@ -33,14 +36,16 @@ impl PerItemCommand for Enter {
let raw_args = raw_args.clone();
match call_info.args.expect_nth(0)? {
Tagged {
item: Value::Primitive(Primitive::String(location)),
item: Value::Primitive(Primitive::Path(location)),
tag,
..
} => {
let location = location.to_string();
let location_clone = location.to_string();
let location_string = location.display().to_string();
let location_clone = location_string.clone();
let tag_clone = tag.clone();
if location.starts_with("help") {
let spec = location.split(":").collect::<Vec<&str>>();
let spec = location_string.split(":").collect::<Vec<&str>>();
let (_, command) = (spec[0], spec[1]);
@ -67,26 +72,16 @@ impl PerItemCommand for Enter {
let full_path = std::path::PathBuf::from(cwd);
let (file_extension, contents, contents_tag, anchor_location) =
let (file_extension, contents, contents_tag) =
crate::commands::open::fetch(
&full_path,
&location_clone,
Span::unknown(),
)
.await.unwrap();
if contents_tag.anchor != uuid::Uuid::nil() {
// If we have loaded something, track its source
yield ReturnSuccess::action(CommandAction::AddAnchorLocation(
contents_tag.anchor,
anchor_location,
));
}
tag_clone.span,
).await?;
match contents {
Value::Primitive(Primitive::String(_)) => {
let tagged_contents = contents.tagged(contents_tag);
let tagged_contents = contents.tagged(&contents_tag);
if let Some(extension) = file_extension {
let command_name = format!("from-{}", extension);
@ -95,6 +90,7 @@ impl PerItemCommand for Enter {
{
let new_args = RawCommandArgs {
host: raw_args.host,
ctrl_c: raw_args.ctrl_c,
shell_manager: raw_args.shell_manager,
call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call {
@ -103,14 +99,12 @@ impl PerItemCommand for Enter {
named: None,
},
source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag,
},
};
let mut result = converter.run(
new_args.with_input(vec![tagged_contents]),
&registry,
false
);
let result_vec: Vec<Result<ReturnSuccess, ShellError>> =
result.drain_vec().await;
@ -123,7 +117,7 @@ impl PerItemCommand for Enter {
yield Ok(ReturnSuccess::Action(CommandAction::EnterValueShell(
Tagged {
item,
tag: contents_tag,
tag: contents_tag.clone(),
})));
}
x => yield x,

View File

@ -37,22 +37,22 @@ pub fn get_environment(tag: Tag) -> Result<Tagged<Value>, Box<dyn std::error::Er
let mut indexmap = IndexMap::new();
let path = std::env::current_dir()?;
indexmap.insert("cwd".to_string(), Value::path(path).tagged(tag));
indexmap.insert("cwd".to_string(), Value::path(path).tagged(&tag));
if let Some(home) = dirs::home_dir() {
indexmap.insert("home".to_string(), Value::path(home).tagged(tag));
indexmap.insert("home".to_string(), Value::path(home).tagged(&tag));
}
let config = config::default_path()?;
indexmap.insert("config".to_string(), Value::path(config).tagged(tag));
indexmap.insert("config".to_string(), Value::path(config).tagged(&tag));
let history = History::path();
indexmap.insert("history".to_string(), Value::path(history).tagged(tag));
indexmap.insert("history".to_string(), Value::path(history).tagged(&tag));
let temp = std::env::temp_dir();
indexmap.insert("temp".to_string(), Value::path(temp).tagged(tag));
indexmap.insert("temp".to_string(), Value::path(temp).tagged(&tag));
let mut dict = TaggedDictBuilder::new(tag);
let mut dict = TaggedDictBuilder::new(&tag);
for v in std::env::vars() {
dict.insert(v.0, Value::string(v.1));
}
@ -60,14 +60,14 @@ pub fn get_environment(tag: Tag) -> Result<Tagged<Value>, Box<dyn std::error::Er
indexmap.insert("vars".to_string(), dict.into_tagged_value());
}
Ok(Value::Row(Dictionary::from(indexmap)).tagged(tag))
Ok(Value::Row(Dictionary::from(indexmap)).tagged(&tag))
}
pub fn env(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?;
let mut env_out = VecDeque::new();
let tag = args.call_info.name_tag;
let tag = args.call_info.name_tag.clone();
let value = get_environment(tag)?;
env_out.push_back(value);

260
src/commands/evaluate_by.rs Normal file
View File

@ -0,0 +1,260 @@
use crate::commands::WholeStreamCommand;
use crate::parser::hir::SyntaxShape;
use crate::prelude::*;
pub struct EvaluateBy;
#[derive(Deserialize)]
pub struct EvaluateByArgs {
evaluate_with: Option<Tagged<String>>,
}
impl WholeStreamCommand for EvaluateBy {
fn name(&self) -> &str {
"evaluate-by"
}
fn signature(&self) -> Signature {
Signature::build("evaluate-by").named(
"evaluate_with",
SyntaxShape::String,
"the name of the column to evaluate by",
)
}
fn usage(&self) -> &str {
"Creates a new table with the data from the tables rows evaluated by the column given."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, evaluate_by)?.run()
}
}
pub fn evaluate_by(
EvaluateByArgs { evaluate_with }: EvaluateByArgs,
RunnableContext { input, name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let stream = async_stream! {
let values: Vec<Tagged<Value>> = input.values.collect().await;
if values.is_empty() {
yield Err(ShellError::labeled_error(
"Expected table from pipeline",
"requires a table input",
name
))
} else {
let evaluate_with = if let Some(evaluator) = evaluate_with {
Some(evaluator.item().clone())
} else {
None
};
match evaluate(&values[0], evaluate_with, name) {
Ok(evaluated) => yield ReturnSuccess::value(evaluated),
Err(err) => yield Err(err)
}
}
};
Ok(stream.to_output_stream())
}
fn fetch(
key: Option<String>,
) -> Box<dyn Fn(Tagged<Value>, Tag) -> Option<Tagged<Value>> + 'static> {
Box::new(move |value: Tagged<Value>, tag| match key {
Some(ref key_given) => {
if let Some(Tagged { item, .. }) = value.get_data_by_key(&key_given) {
Some(item.clone().tagged(tag))
} else {
None
}
}
None => Some(Value::int(1).tagged(tag)),
})
}
pub fn evaluate(
values: &Tagged<Value>,
evaluator: Option<String>,
tag: impl Into<Tag>,
) -> Result<Tagged<Value>, ShellError> {
let tag = tag.into();
let evaluate_with = match evaluator {
Some(keyfn) => fetch(Some(keyfn)),
None => fetch(None),
};
let results: Tagged<Value> = match values {
Tagged {
item: Value::Table(datasets),
..
} => {
let datasets: Vec<_> = datasets
.into_iter()
.map(|subsets| match subsets {
Tagged {
item: Value::Table(subsets),
..
} => {
let subsets: Vec<_> = subsets
.clone()
.into_iter()
.map(|data| match data {
Tagged {
item: Value::Table(data),
..
} => {
let data: Vec<_> = data
.into_iter()
.map(|x| evaluate_with(x.clone(), tag.clone()).unwrap())
.collect();
Value::Table(data).tagged(&tag)
}
_ => Value::Table(vec![]).tagged(&tag),
})
.collect();
Value::Table(subsets).tagged(&tag)
}
_ => Value::Table(vec![]).tagged(&tag),
})
.collect();
Value::Table(datasets.clone()).tagged(&tag)
}
_ => Value::Table(vec![]).tagged(&tag),
};
Ok(results)
}
#[cfg(test)]
mod tests {
use crate::commands::evaluate_by::{evaluate, fetch};
use crate::commands::group_by::group;
use crate::commands::t_sort_by::t_sort;
use crate::data::meta::*;
use crate::prelude::*;
use crate::Value;
use indexmap::IndexMap;
fn int(s: impl Into<BigInt>) -> Tagged<Value> {
Value::int(s).tagged_unknown()
}
fn string(input: impl Into<String>) -> Tagged<Value> {
Value::string(input.into()).tagged_unknown()
}
fn row(entries: IndexMap<String, Tagged<Value>>) -> Tagged<Value> {
Value::row(entries).tagged_unknown()
}
fn table(list: &Vec<Tagged<Value>>) -> Tagged<Value> {
Value::table(list).tagged_unknown()
}
fn nu_releases_sorted_by_date() -> Tagged<Value> {
let key = String::from("date");
t_sort(
Some(key),
None,
&nu_releases_grouped_by_date(),
Tag::unknown(),
)
.unwrap()
}
fn nu_releases_grouped_by_date() -> Tagged<Value> {
let key = String::from("date").tagged_unknown();
group(&key, nu_releases_commiters(), Tag::unknown()).unwrap()
}
fn nu_releases_commiters() -> Vec<Tagged<Value>> {
vec![
row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")},
),
row(
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")},
),
row(
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")},
),
row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("September 24-2019")},
),
row(
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")},
),
row(
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("September 24-2019")},
),
row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")},
),
row(
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("September 24-2019")},
),
row(
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")},
),
]
}
#[test]
fn evaluator_fetches_by_column_if_supplied_a_column_name() {
let subject = row(indexmap! { "name".into() => string("andres") });
let evaluator = fetch(Some(String::from("name")));
assert_eq!(evaluator(subject, Tag::unknown()), Some(string("andres")));
}
#[test]
fn evaluator_returns_1_if_no_column_name_given() {
let subject = row(indexmap! { "name".into() => string("andres") });
let evaluator = fetch(None);
assert_eq!(
evaluator(subject, Tag::unknown()),
Some(Value::int(1).tagged_unknown())
);
}
#[test]
fn evaluates_the_tables() {
assert_eq!(
evaluate(&nu_releases_sorted_by_date(), None, Tag::unknown()).unwrap(),
table(&vec![table(&vec![
table(&vec![int(1), int(1), int(1)]),
table(&vec![int(1), int(1), int(1)]),
table(&vec![int(1), int(1), int(1)]),
]),])
);
}
#[test]
fn evaluates_the_tables_with_custom_evaluator() {
let eval = String::from("name");
assert_eq!(
evaluate(&nu_releases_sorted_by_date(), Some(eval), Tag::unknown()).unwrap(),
table(&vec![table(&vec![
table(&vec![string("AR"), string("JT"), string("YK")]),
table(&vec![string("AR"), string("YK"), string("JT")]),
table(&vec![string("YK"), string("JT"), string("AR")]),
]),])
);
}
}

View File

@ -11,7 +11,7 @@ impl WholeStreamCommand for Exit {
}
fn signature(&self) -> Signature {
Signature::build("exit").switch("now")
Signature::build("exit").switch("now", "exit out of the shell immediately")
}
fn usage(&self) -> &str {

View File

@ -10,7 +10,6 @@ use mime::Mime;
use std::path::PathBuf;
use std::str::FromStr;
use surf::mime;
use uuid::Uuid;
pub struct Fetch;
impl PerItemCommand for Fetch {
@ -20,8 +19,12 @@ impl PerItemCommand for Fetch {
fn signature(&self) -> Signature {
Signature::build(self.name())
.required("path", SyntaxShape::Path)
.switch("raw")
.required(
"path",
SyntaxShape::Path,
"the URL to fetch the contents from",
)
.switch("raw", "fetch contents as text rather than a table")
}
fn usage(&self) -> &str {
@ -44,16 +47,18 @@ fn run(
registry: &CommandRegistry,
raw_args: &RawCommandArgs,
) -> Result<OutputStream, ShellError> {
let path = match call_info
.args
.nth(0)
.ok_or_else(|| ShellError::string(&format!("No file or directory specified")))?
{
let path = match call_info.args.nth(0).ok_or_else(|| {
ShellError::labeled_error(
"No file or directory specified",
"for command",
&call_info.name_tag,
)
})? {
file => file,
};
let path_buf = path.as_path()?;
let path_str = path_buf.display().to_string();
let path_span = path.span();
let path_span = path.tag.span;
let has_raw = call_info.args.has("raw");
let registry = registry.clone();
let raw_args = raw_args.clone();
@ -66,7 +71,7 @@ fn run(
yield Err(e);
return;
}
let (file_extension, contents, contents_tag, anchor_location) = result.unwrap();
let (file_extension, contents, contents_tag) = result.unwrap();
let file_extension = if has_raw {
None
@ -76,21 +81,14 @@ fn run(
file_extension.or(path_str.split('.').last().map(String::from))
};
if contents_tag.anchor != uuid::Uuid::nil() {
// If we have loaded something, track its source
yield ReturnSuccess::action(CommandAction::AddAnchorLocation(
contents_tag.anchor,
anchor_location,
));
}
let tagged_contents = contents.tagged(contents_tag);
let tagged_contents = contents.tagged(&contents_tag);
if let Some(extension) = file_extension {
let command_name = format!("from-{}", extension);
if let Some(converter) = registry.get_command(&command_name) {
let new_args = RawCommandArgs {
host: raw_args.host,
ctrl_c: raw_args.ctrl_c,
shell_manager: raw_args.shell_manager,
call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call {
@ -99,11 +97,10 @@ fn run(
named: None
},
source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag,
}
};
let mut result = converter.run(new_args.with_input(vec![tagged_contents]), &registry, false);
let mut result = converter.run(new_args.with_input(vec![tagged_contents]), &registry);
let result_vec: Vec<Result<ReturnSuccess, ShellError>> = result.drain_vec().await;
for res in result_vec {
match res {
@ -113,7 +110,7 @@ fn run(
}
}
Ok(ReturnSuccess::Value(Tagged { item, .. })) => {
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag }));
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() }));
}
x => yield x,
}
@ -129,10 +126,7 @@ fn run(
Ok(stream.to_output_stream())
}
pub async fn fetch(
location: &str,
span: Span,
) -> Result<(Option<String>, Value, Tag, AnchorLocation), ShellError> {
pub async fn fetch(location: &str, span: Span) -> Result<(Option<String>, Value, Tag), ShellError> {
if let Err(_) = url::Url::parse(location) {
return Err(ShellError::labeled_error(
"Incomplete or incorrect url",
@ -158,9 +152,8 @@ pub async fn fetch(
})?),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::Url(location.to_string())),
},
AnchorLocation::Url(location.to_string()),
)),
(mime::APPLICATION, mime::JSON) => Ok((
Some("json".to_string()),
@ -173,9 +166,8 @@ pub async fn fetch(
})?),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::Url(location.to_string())),
},
AnchorLocation::Url(location.to_string()),
)),
(mime::APPLICATION, mime::OCTET_STREAM) => {
let buf: Vec<u8> = r.body_bytes().await.map_err(|_| {
@ -190,9 +182,8 @@ pub async fn fetch(
Value::binary(buf),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::Url(location.to_string())),
},
AnchorLocation::Url(location.to_string()),
))
}
(mime::IMAGE, mime::SVG) => Ok((
@ -206,9 +197,8 @@ pub async fn fetch(
})?),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::Url(location.to_string())),
},
AnchorLocation::Url(location.to_string()),
)),
(mime::IMAGE, image_ty) => {
let buf: Vec<u8> = r.body_bytes().await.map_err(|_| {
@ -223,9 +213,8 @@ pub async fn fetch(
Value::binary(buf),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::Url(location.to_string())),
},
AnchorLocation::Url(location.to_string()),
))
}
(mime::TEXT, mime::HTML) => Ok((
@ -239,9 +228,8 @@ pub async fn fetch(
})?),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::Url(location.to_string())),
},
AnchorLocation::Url(location.to_string()),
)),
(mime::TEXT, mime::PLAIN) => {
let path_extension = url::Url::parse(location)
@ -266,9 +254,8 @@ pub async fn fetch(
})?),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::Url(location.to_string())),
},
AnchorLocation::Url(location.to_string()),
))
}
(ty, sub_ty) => Ok((
@ -276,9 +263,8 @@ pub async fn fetch(
Value::string(format!("Not yet supported MIME type: {} {}", ty, sub_ty)),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::Url(location.to_string())),
},
AnchorLocation::Url(location.to_string()),
)),
}
}
@ -287,9 +273,8 @@ pub async fn fetch(
Value::string(format!("No content type found")),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::Url(location.to_string())),
},
AnchorLocation::Url(location.to_string()),
)),
},
Err(_) => {

View File

@ -7,7 +7,7 @@ pub struct First;
#[derive(Deserialize)]
pub struct FirstArgs {
amount: Tagged<u64>,
rows: Option<Tagged<u64>>,
}
impl WholeStreamCommand for First {
@ -16,7 +16,11 @@ impl WholeStreamCommand for First {
}
fn signature(&self) -> Signature {
Signature::build("first").required("amount", SyntaxShape::Literal)
Signature::build("first").optional(
"rows",
SyntaxShape::Int,
"starting from the front, the number of rows to return",
)
}
fn usage(&self) -> &str {
@ -33,8 +37,16 @@ impl WholeStreamCommand for First {
}
fn first(
FirstArgs { amount }: FirstArgs,
FirstArgs { rows }: FirstArgs,
context: RunnableContext,
) -> Result<OutputStream, ShellError> {
Ok(OutputStream::from_input(context.input.values.take(*amount)))
let rows_desired = if let Some(quantity) = rows {
*quantity
} else {
1
};
Ok(OutputStream::from_input(
context.input.values.take(rows_desired),
))
}

View File

@ -33,7 +33,7 @@ fn bson_array(input: &Vec<Bson>, tag: Tag) -> Result<Vec<Tagged<Value>>, ShellEr
let mut out = vec![];
for value in input {
out.push(convert_bson_value_to_nu_value(value, tag)?);
out.push(convert_bson_value_to_nu_value(value, &tag)?);
}
Ok(out)
@ -46,100 +46,100 @@ fn convert_bson_value_to_nu_value(
let tag = tag.into();
Ok(match v {
Bson::FloatingPoint(n) => Value::Primitive(Primitive::from(*n)).tagged(tag),
Bson::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag),
Bson::Array(a) => Value::Table(bson_array(a, tag)?).tagged(tag),
Bson::FloatingPoint(n) => Value::Primitive(Primitive::from(*n)).tagged(&tag),
Bson::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(&tag),
Bson::Array(a) => Value::Table(bson_array(a, tag.clone())?).tagged(&tag),
Bson::Document(doc) => {
let mut collected = TaggedDictBuilder::new(tag);
let mut collected = TaggedDictBuilder::new(tag.clone());
for (k, v) in doc.iter() {
collected.insert_tagged(k.clone(), convert_bson_value_to_nu_value(v, tag)?);
collected.insert_tagged(k.clone(), convert_bson_value_to_nu_value(v, &tag)?);
}
collected.into_tagged_value()
}
Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(tag),
Bson::Null => Value::Primitive(Primitive::Nothing).tagged(tag),
Bson::Boolean(b) => Value::Primitive(Primitive::Boolean(*b)).tagged(&tag),
Bson::Null => Value::Primitive(Primitive::Nothing).tagged(&tag),
Bson::RegExp(r, opts) => {
let mut collected = TaggedDictBuilder::new(tag);
let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged(
"$regex".to_string(),
Value::Primitive(Primitive::String(String::from(r))).tagged(tag),
Value::Primitive(Primitive::String(String::from(r))).tagged(&tag),
);
collected.insert_tagged(
"$options".to_string(),
Value::Primitive(Primitive::String(String::from(opts))).tagged(tag),
Value::Primitive(Primitive::String(String::from(opts))).tagged(&tag),
);
collected.into_tagged_value()
}
Bson::I32(n) => Value::number(n).tagged(tag),
Bson::I64(n) => Value::number(n).tagged(tag),
Bson::I32(n) => Value::number(n).tagged(&tag),
Bson::I64(n) => Value::number(n).tagged(&tag),
Bson::Decimal128(n) => {
// TODO: this really isn't great, and we should update this to do a higher
// fidelity translation
let decimal = BigDecimal::from_str(&format!("{}", n)).map_err(|_| {
ShellError::range_error(
ExpectedRange::BigDecimal,
&n.tagged(tag),
&n.tagged(&tag),
format!("converting BSON Decimal128 to BigDecimal"),
)
})?;
Value::Primitive(Primitive::Decimal(decimal)).tagged(tag)
Value::Primitive(Primitive::Decimal(decimal)).tagged(&tag)
}
Bson::JavaScriptCode(js) => {
let mut collected = TaggedDictBuilder::new(tag);
let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged(
"$javascript".to_string(),
Value::Primitive(Primitive::String(String::from(js))).tagged(tag),
Value::Primitive(Primitive::String(String::from(js))).tagged(&tag),
);
collected.into_tagged_value()
}
Bson::JavaScriptCodeWithScope(js, doc) => {
let mut collected = TaggedDictBuilder::new(tag);
let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged(
"$javascript".to_string(),
Value::Primitive(Primitive::String(String::from(js))).tagged(tag),
Value::Primitive(Primitive::String(String::from(js))).tagged(&tag),
);
collected.insert_tagged(
"$scope".to_string(),
convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag)?,
convert_bson_value_to_nu_value(&Bson::Document(doc.to_owned()), tag.clone())?,
);
collected.into_tagged_value()
}
Bson::TimeStamp(ts) => {
let mut collected = TaggedDictBuilder::new(tag);
collected.insert_tagged("$timestamp".to_string(), Value::number(ts).tagged(tag));
let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged("$timestamp".to_string(), Value::number(ts).tagged(&tag));
collected.into_tagged_value()
}
Bson::Binary(bst, bytes) => {
let mut collected = TaggedDictBuilder::new(tag);
let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged(
"$binary_subtype".to_string(),
match bst {
BinarySubtype::UserDefined(u) => Value::number(u),
_ => Value::Primitive(Primitive::String(binary_subtype_to_string(*bst))),
}
.tagged(tag),
.tagged(&tag),
);
collected.insert_tagged(
"$binary".to_string(),
Value::Primitive(Primitive::Binary(bytes.to_owned())).tagged(tag),
Value::Primitive(Primitive::Binary(bytes.to_owned())).tagged(&tag),
);
collected.into_tagged_value()
}
Bson::ObjectId(obj_id) => {
let mut collected = TaggedDictBuilder::new(tag);
let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged(
"$object_id".to_string(),
Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(tag),
Value::Primitive(Primitive::String(obj_id.to_hex())).tagged(&tag),
);
collected.into_tagged_value()
}
Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(tag),
Bson::UtcDatetime(dt) => Value::Primitive(Primitive::Date(*dt)).tagged(&tag),
Bson::Symbol(s) => {
let mut collected = TaggedDictBuilder::new(tag);
let mut collected = TaggedDictBuilder::new(tag.clone());
collected.insert_tagged(
"$symbol".to_string(),
Value::Primitive(Primitive::String(String::from(s))).tagged(tag),
Value::Primitive(Primitive::String(String::from(s))).tagged(&tag),
);
collected.into_tagged_value()
}
@ -208,13 +208,13 @@ fn from_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
let value_tag = value.tag();
match value.item {
Value::Primitive(Primitive::Binary(vb)) =>
match from_bson_bytes_to_value(vb, tag) {
match from_bson_bytes_to_value(vb, tag.clone()) {
Ok(x) => yield ReturnSuccess::value(x),
Err(_) => {
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as BSON",
"input cannot be parsed as BSON",
tag,
tag.clone(),
"value originates from here",
value_tag,
))
@ -223,7 +223,7 @@ fn from_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
tag,
tag.clone(),
"value originates from here",
value_tag,
)),

View File

@ -1,13 +1,14 @@
use crate::commands::from_structured_data::from_structured_data;
use crate::commands::WholeStreamCommand;
use crate::data::{Primitive, TaggedDictBuilder, Value};
use crate::data::{Primitive, Value};
use crate::prelude::*;
use csv::ReaderBuilder;
pub struct FromCSV;
#[derive(Deserialize)]
pub struct FromCSVArgs {
headerless: bool,
separator: Option<Tagged<Value>>,
}
impl WholeStreamCommand for FromCSV {
@ -16,11 +17,17 @@ impl WholeStreamCommand for FromCSV {
}
fn signature(&self) -> Signature {
Signature::build("from-csv").switch("headerless")
Signature::build("from-csv")
.named(
"separator",
SyntaxShape::String,
"a character to separate columns, defaults to ','",
)
.switch("headerless", "don't treat the first row as column names")
}
fn usage(&self) -> &str {
"Parse text as .csv and create table"
"Parse text as .csv and create table."
}
fn run(
@ -32,107 +39,31 @@ impl WholeStreamCommand for FromCSV {
}
}
pub fn from_csv_string_to_value(
s: String,
headerless: bool,
tag: impl Into<Tag>,
) -> Result<Tagged<Value>, csv::Error> {
let mut reader = ReaderBuilder::new()
.has_headers(false)
.from_reader(s.as_bytes());
let tag = tag.into();
let mut fields: VecDeque<String> = VecDeque::new();
let mut iter = reader.records();
let mut rows = vec![];
if let Some(result) = iter.next() {
let line = result?;
for (idx, item) in line.iter().enumerate() {
if headerless {
fields.push_back(format!("Column{}", idx + 1));
} else {
fields.push_back(item.to_string());
}
}
}
loop {
if let Some(row_values) = iter.next() {
let row_values = row_values?;
let mut row = TaggedDictBuilder::new(tag);
for (idx, entry) in row_values.iter().enumerate() {
row.insert_tagged(
fields.get(idx).unwrap(),
Value::Primitive(Primitive::String(String::from(entry))).tagged(tag),
);
}
rows.push(row.into_tagged_value());
} else {
break;
}
}
Ok(Tagged::from_item(Value::Table(rows), tag))
}
fn from_csv(
FromCSVArgs {
headerless: skip_headers,
headerless,
separator,
}: FromCSVArgs,
RunnableContext { input, name, .. }: RunnableContext,
runnable_context: RunnableContext,
) -> Result<OutputStream, ShellError> {
let name_tag = name;
let stream = async_stream! {
let values: Vec<Tagged<Value>> = input.values.collect().await;
let mut concat_string = String::new();
let mut latest_tag: Option<Tag> = None;
for value in values {
let value_tag = value.tag();
latest_tag = Some(value_tag);
match value.item {
Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s);
concat_string.push_str("\n");
}
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
name_tag,
"value originates from here",
value_tag,
)),
}
}
match from_csv_string_to_value(concat_string, skip_headers, name_tag) {
Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => {
for l in list {
yield ReturnSuccess::value(l);
}
}
x => yield ReturnSuccess::value(x),
},
Err(_) => if let Some(last_tag) = latest_tag {
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as CSV",
"input cannot be parsed as CSV",
name_tag,
"value originates from here",
last_tag,
))
} ,
let sep = match separator {
Some(Tagged {
item: Value::Primitive(Primitive::String(s)),
tag,
..
}) => {
let vec_s: Vec<char> = s.chars().collect();
if vec_s.len() != 1 {
return Err(ShellError::labeled_error(
"Expected a single separator char from --separator",
"requires a single character string input",
tag,
));
};
vec_s[0]
}
_ => ',',
};
Ok(stream.to_output_stream())
from_structured_data(headerless, sep, "CSV", runnable_context)
}

View File

@ -45,10 +45,13 @@ fn convert_ini_top_to_nu_value(
tag: impl Into<Tag>,
) -> Tagged<Value> {
let tag = tag.into();
let mut top_level = TaggedDictBuilder::new(tag);
let mut top_level = TaggedDictBuilder::new(tag.clone());
for (key, value) in v.iter() {
top_level.insert_tagged(key.clone(), convert_ini_second_to_nu_value(value, tag));
top_level.insert_tagged(
key.clone(),
convert_ini_second_to_nu_value(value, tag.clone()),
);
}
top_level.into_tagged_value()
@ -75,7 +78,7 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
for value in values {
let value_tag = value.tag();
latest_tag = Some(value_tag);
latest_tag = Some(value_tag.clone());
match value.item {
Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s);
@ -84,15 +87,15 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
tag,
&tag,
"value originates from here",
value_tag,
&value_tag,
)),
}
}
match from_ini_string_to_value(concat_string, tag) {
match from_ini_string_to_value(concat_string, tag.clone()) {
Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => {
for l in list {
@ -105,7 +108,7 @@ fn from_ini(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as INI",
"input cannot be parsed as INI",
tag,
&tag,
"value originates from here",
last_tag,
))

View File

@ -15,7 +15,7 @@ impl WholeStreamCommand for FromJSON {
}
fn signature(&self) -> Signature {
Signature::build("from-json").switch("objects")
Signature::build("from-json").switch("objects", "treat each line as a separate value")
}
fn usage(&self) -> &str {
@ -35,24 +35,24 @@ fn convert_json_value_to_nu_value(v: &serde_hjson::Value, tag: impl Into<Tag>) -
let tag = tag.into();
match v {
serde_hjson::Value::Null => Value::Primitive(Primitive::Nothing).tagged(tag),
serde_hjson::Value::Bool(b) => Value::boolean(*b).tagged(tag),
serde_hjson::Value::F64(n) => Value::number(n).tagged(tag),
serde_hjson::Value::U64(n) => Value::number(n).tagged(tag),
serde_hjson::Value::I64(n) => Value::number(n).tagged(tag),
serde_hjson::Value::Null => Value::Primitive(Primitive::Nothing).tagged(&tag),
serde_hjson::Value::Bool(b) => Value::boolean(*b).tagged(&tag),
serde_hjson::Value::F64(n) => Value::number(n).tagged(&tag),
serde_hjson::Value::U64(n) => Value::number(n).tagged(&tag),
serde_hjson::Value::I64(n) => Value::number(n).tagged(&tag),
serde_hjson::Value::String(s) => {
Value::Primitive(Primitive::String(String::from(s))).tagged(tag)
Value::Primitive(Primitive::String(String::from(s))).tagged(&tag)
}
serde_hjson::Value::Array(a) => Value::Table(
a.iter()
.map(|x| convert_json_value_to_nu_value(x, tag))
.map(|x| convert_json_value_to_nu_value(x, &tag))
.collect(),
)
.tagged(tag),
serde_hjson::Value::Object(o) => {
let mut collected = TaggedDictBuilder::new(tag);
let mut collected = TaggedDictBuilder::new(&tag);
for (k, v) in o.iter() {
collected.insert_tagged(k.clone(), convert_json_value_to_nu_value(v, tag));
collected.insert_tagged(k.clone(), convert_json_value_to_nu_value(v, &tag));
}
collected.into_tagged_value()
@ -82,7 +82,7 @@ fn from_json(
for value in values {
let value_tag = value.tag();
latest_tag = Some(value_tag);
latest_tag = Some(value_tag.clone());
match value.item {
Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s);
@ -91,9 +91,9 @@ fn from_json(
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
name_tag,
&name_tag,
"value originates from here",
value_tag,
&value_tag,
)),
}
@ -106,15 +106,15 @@ fn from_json(
continue;
}
match from_json_string_to_value(json_str.to_string(), name_tag) {
match from_json_string_to_value(json_str.to_string(), &name_tag) {
Ok(x) =>
yield ReturnSuccess::value(x),
Err(_) => {
if let Some(last_tag) = latest_tag {
if let Some(ref last_tag) = latest_tag {
yield Err(ShellError::labeled_error_with_secondary(
"Could nnot parse as JSON",
"input cannot be parsed as JSON",
name_tag,
&name_tag,
"value originates from here",
last_tag))
}
@ -122,7 +122,7 @@ fn from_json(
}
}
} else {
match from_json_string_to_value(concat_string, name_tag) {
match from_json_string_to_value(concat_string, name_tag.clone()) {
Ok(x) =>
match x {
Tagged { item: Value::Table(list), .. } => {

View File

@ -138,7 +138,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
let value_tag = value.tag();
match value.item {
Value::Primitive(Primitive::Binary(vb)) =>
match from_sqlite_bytes_to_value(vb, tag) {
match from_sqlite_bytes_to_value(vb, tag.clone()) {
Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => {
for l in list {
@ -151,7 +151,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as SQLite",
"input cannot be parsed as SQLite",
tag,
&tag,
"value originates from here",
value_tag,
))
@ -160,7 +160,7 @@ fn from_sqlite(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
tag,
&tag,
"value originates from here",
value_tag,
)),

504
src/commands/from_ssv.rs Normal file
View File

@ -0,0 +1,504 @@
use crate::commands::WholeStreamCommand;
use crate::data::{Primitive, TaggedDictBuilder, Value};
use crate::prelude::*;
pub struct FromSSV;
#[derive(Deserialize)]
pub struct FromSSVArgs {
headerless: bool,
#[serde(rename(deserialize = "aligned-columns"))]
aligned_columns: bool,
#[serde(rename(deserialize = "minimum-spaces"))]
minimum_spaces: Option<Tagged<usize>>,
}
const STRING_REPRESENTATION: &str = "from-ssv";
const DEFAULT_MINIMUM_SPACES: usize = 2;
impl WholeStreamCommand for FromSSV {
fn name(&self) -> &str {
STRING_REPRESENTATION
}
fn signature(&self) -> Signature {
Signature::build(STRING_REPRESENTATION)
.switch("headerless", "don't treat the first row as column names")
.switch("aligned-columns", "assume columns are aligned")
.named(
"minimum-spaces",
SyntaxShape::Int,
"the mininum spaces to separate columns",
)
}
fn usage(&self) -> &str {
"Parse text as space-separated values and create a table. The default minimum number of spaces counted as a separator is 2."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, from_ssv)?.run()
}
}
enum HeaderOptions<'a> {
WithHeaders(&'a str),
WithoutHeaders,
}
fn parse_aligned_columns<'a>(
lines: impl Iterator<Item = &'a str>,
headers: HeaderOptions,
separator: &str,
) -> Vec<Vec<(String, String)>> {
fn construct<'a>(
lines: impl Iterator<Item = &'a str>,
headers: Vec<(String, usize)>,
) -> Vec<Vec<(String, String)>> {
lines
.map(|l| {
headers
.iter()
.enumerate()
.map(|(i, (header_name, start_position))| {
let val = match headers.get(i + 1) {
Some((_, end)) => {
if *end < l.len() {
l.get(*start_position..*end)
} else {
l.get(*start_position..)
}
}
None => l.get(*start_position..),
}
.unwrap_or("")
.trim()
.into();
(header_name.clone(), val)
})
.collect()
})
.collect()
}
let find_indices = |line: &str| {
let values = line
.split(&separator)
.map(str::trim)
.filter(|s| !s.is_empty());
values
.fold(
(0, vec![]),
|(current_pos, mut indices), value| match line[current_pos..].find(value) {
None => (current_pos, indices),
Some(index) => {
let absolute_index = current_pos + index;
indices.push(absolute_index);
(absolute_index + value.len(), indices)
}
},
)
.1
};
let parse_with_headers = |lines, headers_raw: &str| {
let indices = find_indices(headers_raw);
let headers = headers_raw
.split(&separator)
.map(str::trim)
.filter(|s| !s.is_empty())
.map(String::from)
.zip(indices);
let columns = headers.collect::<Vec<(String, usize)>>();
construct(lines, columns)
};
let parse_without_headers = |ls: Vec<&str>| {
let mut indices = ls
.iter()
.flat_map(|s| find_indices(*s))
.collect::<Vec<usize>>();
indices.sort();
indices.dedup();
let headers: Vec<(String, usize)> = indices
.iter()
.enumerate()
.map(|(i, position)| (format!("Column{}", i + 1), *position))
.collect();
construct(ls.iter().map(|s| s.to_owned()), headers)
};
match headers {
HeaderOptions::WithHeaders(headers_raw) => parse_with_headers(lines, headers_raw),
HeaderOptions::WithoutHeaders => parse_without_headers(lines.collect()),
}
}
fn parse_separated_columns<'a>(
lines: impl Iterator<Item = &'a str>,
headers: HeaderOptions,
separator: &str,
) -> Vec<Vec<(String, String)>> {
fn collect<'a>(
headers: Vec<String>,
rows: impl Iterator<Item = &'a str>,
separator: &str,
) -> Vec<Vec<(String, String)>> {
rows.map(|r| {
headers
.iter()
.zip(r.split(separator).map(str::trim).filter(|s| !s.is_empty()))
.map(|(a, b)| (a.to_owned(), b.to_owned()))
.collect()
})
.collect()
}
let parse_with_headers = |lines, headers_raw: &str| {
let headers = headers_raw
.split(&separator)
.map(str::trim)
.map(|s| s.to_owned())
.filter(|s| !s.is_empty())
.collect();
collect(headers, lines, separator)
};
let parse_without_headers = |ls: Vec<&str>| {
let num_columns = ls.iter().map(|r| r.len()).max().unwrap_or(0);
let headers = (1..=num_columns)
.map(|i| format!("Column{}", i))
.collect::<Vec<String>>();
collect(headers, ls.iter().map(|s| s.as_ref()), separator)
};
match headers {
HeaderOptions::WithHeaders(headers_raw) => parse_with_headers(lines, headers_raw),
HeaderOptions::WithoutHeaders => parse_without_headers(lines.collect()),
}
}
fn string_to_table(
s: &str,
headerless: bool,
aligned_columns: bool,
split_at: usize,
) -> Option<Vec<Vec<(String, String)>>> {
let mut lines = s.lines().filter(|l| !l.trim().is_empty());
let separator = " ".repeat(std::cmp::max(split_at, 1));
let (ls, header_options) = if headerless {
(lines, HeaderOptions::WithoutHeaders)
} else {
let headers = lines.next()?;
(lines, HeaderOptions::WithHeaders(headers))
};
let f = if aligned_columns {
parse_aligned_columns
} else {
parse_separated_columns
};
let parsed = f(ls, header_options, &separator);
match parsed.len() {
0 => None,
_ => Some(parsed),
}
}
fn from_ssv_string_to_value(
s: &str,
headerless: bool,
aligned_columns: bool,
split_at: usize,
tag: impl Into<Tag>,
) -> Option<Tagged<Value>> {
let tag = tag.into();
let rows = string_to_table(s, headerless, aligned_columns, split_at)?
.iter()
.map(|row| {
let mut tagged_dict = TaggedDictBuilder::new(&tag);
for (col, entry) in row {
tagged_dict.insert_tagged(
col,
Value::Primitive(Primitive::String(String::from(entry))).tagged(&tag),
)
}
tagged_dict.into_tagged_value()
})
.collect();
Some(Value::Table(rows).tagged(&tag))
}
fn from_ssv(
FromSSVArgs {
headerless,
aligned_columns,
minimum_spaces,
}: FromSSVArgs,
RunnableContext { input, name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let stream = async_stream! {
let values: Vec<Tagged<Value>> = input.values.collect().await;
let mut concat_string = String::new();
let mut latest_tag: Option<Tag> = None;
let split_at = match minimum_spaces {
Some(number) => number.item,
None => DEFAULT_MINIMUM_SPACES
};
for value in values {
let value_tag = value.tag();
latest_tag = Some(value_tag.clone());
match value.item {
Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s);
}
_ => yield Err(ShellError::labeled_error_with_secondary (
"Expected a string from pipeline",
"requires string input",
&name,
"value originates from here",
&value_tag
)),
}
}
match from_ssv_string_to_value(&concat_string, headerless, aligned_columns, split_at, name.clone()) {
Some(x) => match x {
Tagged { item: Value::Table(list), ..} => {
for l in list { yield ReturnSuccess::value(l) }
}
x => yield ReturnSuccess::value(x)
},
None => if let Some(tag) = latest_tag {
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as SSV",
"input cannot be parsed ssv",
&name,
"value originates from here",
&tag,
))
},
}
};
Ok(stream.to_output_stream())
}
#[cfg(test)]
mod tests {
use super::*;
fn owned(x: &str, y: &str) -> (String, String) {
(String::from(x), String::from(y))
}
#[test]
fn it_trims_empty_and_whitespace_only_lines() {
let input = r#"
a b
1 2
3 4
"#;
let result = string_to_table(input, false, true, 1);
assert_eq!(
result,
Some(vec![
vec![owned("a", "1"), owned("b", "2")],
vec![owned("a", "3"), owned("b", "4")]
])
);
}
#[test]
fn it_deals_with_single_column_input() {
let input = r#"
a
1
2
"#;
let result = string_to_table(input, false, true, 1);
assert_eq!(
result,
Some(vec![vec![owned("a", "1")], vec![owned("a", "2")]])
);
}
#[test]
fn it_uses_first_row_as_data_when_headerless() {
let input = r#"
a b
1 2
3 4
"#;
let result = string_to_table(input, true, true, 1);
assert_eq!(
result,
Some(vec![
vec![owned("Column1", "a"), owned("Column2", "b")],
vec![owned("Column1", "1"), owned("Column2", "2")],
vec![owned("Column1", "3"), owned("Column2", "4")]
])
);
}
#[test]
fn it_returns_none_given_an_empty_string() {
let input = "";
let result = string_to_table(input, true, true, 1);
assert!(result.is_none());
}
#[test]
fn it_allows_a_predefined_number_of_spaces() {
let input = r#"
column a column b
entry 1 entry number 2
3 four
"#;
let result = string_to_table(input, false, true, 3);
assert_eq!(
result,
Some(vec![
vec![
owned("column a", "entry 1"),
owned("column b", "entry number 2")
],
vec![owned("column a", "3"), owned("column b", "four")]
])
);
}
#[test]
fn it_trims_remaining_separator_space() {
let input = r#"
colA colB colC
val1 val2 val3
"#;
let trimmed = |s: &str| s.trim() == s;
let result = string_to_table(input, false, true, 2).unwrap();
assert!(result
.iter()
.all(|row| row.iter().all(|(a, b)| trimmed(a) && trimmed(b))))
}
#[test]
fn it_keeps_empty_columns() {
let input = r#"
colA col B col C
val2 val3
val4 val 5 val 6
val7 val8
"#;
let result = string_to_table(input, false, true, 2).unwrap();
assert_eq!(
result,
vec![
vec![
owned("colA", ""),
owned("col B", "val2"),
owned("col C", "val3")
],
vec![
owned("colA", "val4"),
owned("col B", "val 5"),
owned("col C", "val 6")
],
vec![
owned("colA", "val7"),
owned("col B", ""),
owned("col C", "val8")
],
]
)
}
#[test]
fn it_uses_the_full_final_column() {
let input = r#"
colA col B
val1 val2 trailing value that should be included
"#;
let result = string_to_table(input, false, true, 2).unwrap();
assert_eq!(
result,
vec![vec![
owned("colA", "val1"),
owned("col B", "val2 trailing value that should be included"),
],]
)
}
#[test]
fn it_handles_empty_values_when_headerless_and_aligned_columns() {
let input = r#"
a multi-word value b d
1 3-3 4
last
"#;
let result = string_to_table(input, true, true, 2).unwrap();
assert_eq!(
result,
vec![
vec![
owned("Column1", "a multi-word value"),
owned("Column2", "b"),
owned("Column3", ""),
owned("Column4", "d"),
owned("Column5", "")
],
vec![
owned("Column1", "1"),
owned("Column2", ""),
owned("Column3", "3-3"),
owned("Column4", "4"),
owned("Column5", "")
],
vec![
owned("Column1", ""),
owned("Column2", ""),
owned("Column3", ""),
owned("Column4", ""),
owned("Column5", "last")
],
]
)
}
#[test]
fn input_is_parsed_correctly_if_either_option_works() {
let input = r#"
docker-registry docker-registry=default docker-registry=default 172.30.78.158 5000/TCP
kubernetes component=apiserver,provider=kubernetes <none> 172.30.0.2 443/TCP
kubernetes-ro component=apiserver,provider=kubernetes <none> 172.30.0.1 80/TCP
"#;
let aligned_columns_headerless = string_to_table(input, true, true, 2).unwrap();
let separator_headerless = string_to_table(input, true, false, 2).unwrap();
let aligned_columns_with_headers = string_to_table(input, false, true, 2).unwrap();
let separator_with_headers = string_to_table(input, false, false, 2).unwrap();
assert_eq!(aligned_columns_headerless, separator_headerless);
assert_eq!(aligned_columns_with_headers, separator_with_headers);
}
}

View File

@ -0,0 +1,97 @@
use crate::data::{Primitive, TaggedDictBuilder, Value};
use crate::prelude::*;
use csv::ReaderBuilder;
fn from_stuctured_string_to_value(
s: String,
headerless: bool,
separator: char,
tag: impl Into<Tag>,
) -> Result<Tagged<Value>, csv::Error> {
let mut reader = ReaderBuilder::new()
.has_headers(!headerless)
.delimiter(separator as u8)
.from_reader(s.as_bytes());
let tag = tag.into();
let headers = if headerless {
(1..=reader.headers()?.len())
.map(|i| format!("Column{}", i))
.collect::<Vec<String>>()
} else {
reader.headers()?.iter().map(String::from).collect()
};
let mut rows = vec![];
for row in reader.records() {
let mut tagged_row = TaggedDictBuilder::new(&tag);
for (value, header) in row?.iter().zip(headers.iter()) {
tagged_row.insert_tagged(
header,
Value::Primitive(Primitive::String(String::from(value))).tagged(&tag),
)
}
rows.push(tagged_row.into_tagged_value());
}
Ok(Value::Table(rows).tagged(&tag))
}
pub fn from_structured_data(
headerless: bool,
sep: char,
format_name: &'static str,
RunnableContext { input, name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let name_tag = name;
let stream = async_stream! {
let values: Vec<Tagged<Value>> = input.values.collect().await;
let mut concat_string = String::new();
let mut latest_tag: Option<Tag> = None;
for value in values {
let value_tag = value.tag();
latest_tag = Some(value_tag.clone());
match value.item {
Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s);
concat_string.push_str("\n");
}
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
name_tag.clone(),
"value originates from here",
value_tag.clone(),
)),
}
}
match from_stuctured_string_to_value(concat_string, headerless, sep, name_tag.clone()) {
Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => {
for l in list {
yield ReturnSuccess::value(l);
}
}
x => yield ReturnSuccess::value(x),
},
Err(_) => if let Some(last_tag) = latest_tag {
let line_one = format!("Could not parse as {}", format_name);
let line_two = format!("input cannot be parsed as {}", format_name);
yield Err(ShellError::labeled_error_with_secondary(
line_one,
line_two,
name_tag.clone(),
"value originates from here",
last_tag.clone(),
))
} ,
}
};
Ok(stream.to_output_stream())
}

View File

@ -36,7 +36,7 @@ pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into<Tag>) -> T
toml::Value::String(s) => Value::Primitive(Primitive::String(String::from(s))).tagged(tag),
toml::Value::Array(a) => Value::Table(
a.iter()
.map(|x| convert_toml_value_to_nu_value(x, tag))
.map(|x| convert_toml_value_to_nu_value(x, &tag))
.collect(),
)
.tagged(tag),
@ -44,10 +44,10 @@ pub fn convert_toml_value_to_nu_value(v: &toml::Value, tag: impl Into<Tag>) -> T
Value::Primitive(Primitive::String(dt.to_string())).tagged(tag)
}
toml::Value::Table(t) => {
let mut collected = TaggedDictBuilder::new(tag);
let mut collected = TaggedDictBuilder::new(&tag);
for (k, v) in t.iter() {
collected.insert_tagged(k.clone(), convert_toml_value_to_nu_value(v, tag));
collected.insert_tagged(k.clone(), convert_toml_value_to_nu_value(v, &tag));
}
collected.into_tagged_value()
@ -79,7 +79,7 @@ pub fn from_toml(
for value in values {
let value_tag = value.tag();
latest_tag = Some(value_tag);
latest_tag = Some(value_tag.clone());
match value.item {
Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s);
@ -88,15 +88,15 @@ pub fn from_toml(
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
tag,
&tag,
"value originates from here",
value_tag,
&value_tag,
)),
}
}
match from_toml_string_to_value(concat_string, tag) {
match from_toml_string_to_value(concat_string, tag.clone()) {
Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => {
for l in list {
@ -109,7 +109,7 @@ pub fn from_toml(
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as TOML",
"input cannot be parsed as TOML",
tag,
&tag,
"value originates from here",
last_tag,
))

View File

@ -1,7 +1,6 @@
use crate::commands::from_structured_data::from_structured_data;
use crate::commands::WholeStreamCommand;
use crate::data::{Primitive, TaggedDictBuilder, Value};
use crate::prelude::*;
use csv::ReaderBuilder;
pub struct FromTSV;
@ -16,7 +15,8 @@ impl WholeStreamCommand for FromTSV {
}
fn signature(&self) -> Signature {
Signature::build("from-tsv").switch("headerless")
Signature::build("from-tsv")
.switch("headerless", "don't treat the first row as column names")
}
fn usage(&self) -> &str {
@ -32,108 +32,9 @@ impl WholeStreamCommand for FromTSV {
}
}
pub fn from_tsv_string_to_value(
s: String,
headerless: bool,
tag: impl Into<Tag>,
) -> Result<Tagged<Value>, csv::Error> {
let mut reader = ReaderBuilder::new()
.has_headers(false)
.delimiter(b'\t')
.from_reader(s.as_bytes());
let tag = tag.into();
let mut fields: VecDeque<String> = VecDeque::new();
let mut iter = reader.records();
let mut rows = vec![];
if let Some(result) = iter.next() {
let line = result?;
for (idx, item) in line.iter().enumerate() {
if headerless {
fields.push_back(format!("Column{}", idx + 1));
} else {
fields.push_back(item.to_string());
}
}
}
loop {
if let Some(row_values) = iter.next() {
let row_values = row_values?;
let mut row = TaggedDictBuilder::new(tag);
for (idx, entry) in row_values.iter().enumerate() {
row.insert_tagged(
fields.get(idx).unwrap(),
Value::Primitive(Primitive::String(String::from(entry))).tagged(tag),
);
}
rows.push(row.into_tagged_value());
} else {
break;
}
}
Ok(Tagged::from_item(Value::Table(rows), tag))
}
fn from_tsv(
FromTSVArgs {
headerless: skip_headers,
}: FromTSVArgs,
RunnableContext { input, name, .. }: RunnableContext,
FromTSVArgs { headerless }: FromTSVArgs,
runnable_context: RunnableContext,
) -> Result<OutputStream, ShellError> {
let name_tag = name;
let stream = async_stream! {
let values: Vec<Tagged<Value>> = input.values.collect().await;
let mut concat_string = String::new();
let mut latest_tag: Option<Tag> = None;
for value in values {
let value_tag = value.tag();
latest_tag = Some(value_tag);
match value.item {
Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s);
concat_string.push_str("\n");
}
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
name_tag,
"value originates from here",
value_tag,
)),
}
}
match from_tsv_string_to_value(concat_string, skip_headers, name_tag) {
Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => {
for l in list {
yield ReturnSuccess::value(l);
}
}
x => yield ReturnSuccess::value(x),
},
Err(_) => if let Some(last_tag) = latest_tag {
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as TSV",
"input cannot be parsed as TSV",
name_tag,
"value originates from here",
last_tag,
))
} ,
}
};
Ok(stream.to_output_stream())
from_structured_data(headerless, '\t', "TSV", runnable_context)
}

View File

@ -39,7 +39,7 @@ fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
for value in values {
let value_tag = value.tag();
latest_tag = Some(value_tag);
latest_tag = Some(value_tag.clone());
match value.item {
Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s);
@ -47,9 +47,9 @@ fn from_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
tag,
&tag,
"value originates from here",
value_tag,
&value_tag,
)),
}

View File

@ -34,7 +34,7 @@ fn from_node_to_value<'a, 'd>(n: &roxmltree::Node<'a, 'd>, tag: impl Into<Tag>)
let mut children_values = vec![];
for c in n.children() {
children_values.push(from_node_to_value(&c, tag));
children_values.push(from_node_to_value(&c, &tag));
}
let children_values: Vec<Tagged<Value>> = children_values
@ -94,7 +94,7 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
for value in values {
let value_tag = value.tag();
latest_tag = Some(value_tag);
latest_tag = Some(value_tag.clone());
match value.item {
Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s);
@ -103,15 +103,15 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
tag,
&tag,
"value originates from here",
value_tag,
&value_tag,
)),
}
}
match from_xml_string_to_value(concat_string, tag) {
match from_xml_string_to_value(concat_string, tag.clone()) {
Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => {
for l in list {
@ -124,9 +124,9 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as XML",
"input cannot be parsed as XML",
tag,
&tag,
"value originates from here",
last_tag,
&last_tag,
))
} ,
}
@ -134,3 +134,73 @@ fn from_xml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStrea
Ok(stream.to_output_stream())
}
#[cfg(test)]
mod tests {
use crate::commands::from_xml;
use crate::data::meta::*;
use crate::Value;
use indexmap::IndexMap;
fn string(input: impl Into<String>) -> Tagged<Value> {
Value::string(input.into()).tagged_unknown()
}
fn row(entries: IndexMap<String, Tagged<Value>>) -> Tagged<Value> {
Value::row(entries).tagged_unknown()
}
fn table(list: &Vec<Tagged<Value>>) -> Tagged<Value> {
Value::table(list).tagged_unknown()
}
fn parse(xml: &str) -> Tagged<Value> {
from_xml::from_xml_string_to_value(xml.to_string(), Tag::unknown()).unwrap()
}
#[test]
fn parses_empty_element() {
let source = "<nu></nu>";
assert_eq!(
parse(source),
row(indexmap! {
"nu".into() => table(&vec![])
})
);
}
#[test]
fn parses_element_with_text() {
let source = "<nu>La era de los tres caballeros</nu>";
assert_eq!(
parse(source),
row(indexmap! {
"nu".into() => table(&vec![string("La era de los tres caballeros")])
})
);
}
#[test]
fn parses_element_with_elements() {
let source = "\
<nu>
<dev>Andrés</dev>
<dev>Jonathan</dev>
<dev>Yehuda</dev>
</nu>";
assert_eq!(
parse(source),
row(indexmap! {
"nu".into() => table(&vec![
row(indexmap! {"dev".into() => table(&vec![string("Andrés")])}),
row(indexmap! {"dev".into() => table(&vec![string("Jonathan")])}),
row(indexmap! {"dev".into() => table(&vec![string("Yehuda")])})
])
})
);
}
}

View File

@ -64,17 +64,17 @@ fn convert_yaml_value_to_nu_value(v: &serde_yaml::Value, tag: impl Into<Tag>) ->
serde_yaml::Value::String(s) => Value::string(s).tagged(tag),
serde_yaml::Value::Sequence(a) => Value::Table(
a.iter()
.map(|x| convert_yaml_value_to_nu_value(x, tag))
.map(|x| convert_yaml_value_to_nu_value(x, &tag))
.collect(),
)
.tagged(tag),
serde_yaml::Value::Mapping(t) => {
let mut collected = TaggedDictBuilder::new(tag);
let mut collected = TaggedDictBuilder::new(&tag);
for (k, v) in t.iter() {
match k {
serde_yaml::Value::String(k) => {
collected.insert_tagged(k.clone(), convert_yaml_value_to_nu_value(v, tag));
collected.insert_tagged(k.clone(), convert_yaml_value_to_nu_value(v, &tag));
}
_ => unimplemented!("Unknown key type"),
}
@ -108,7 +108,7 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
for value in values {
let value_tag = value.tag();
latest_tag = Some(value_tag);
latest_tag = Some(value_tag.clone());
match value.item {
Value::Primitive(Primitive::String(s)) => {
concat_string.push_str(&s);
@ -117,15 +117,15 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
tag,
&tag,
"value originates from here",
value_tag,
&value_tag,
)),
}
}
match from_yaml_string_to_value(concat_string, tag) {
match from_yaml_string_to_value(concat_string, tag.clone()) {
Ok(x) => match x {
Tagged { item: Value::Table(list), .. } => {
for l in list {
@ -138,9 +138,9 @@ fn from_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
yield Err(ShellError::labeled_error_with_secondary(
"Could not parse as YAML",
"input cannot be parsed as YAML",
tag,
&tag,
"value originates from here",
last_tag,
&last_tag,
))
} ,
}

View File

@ -2,13 +2,15 @@ use crate::commands::WholeStreamCommand;
use crate::data::Value;
use crate::errors::ShellError;
use crate::prelude::*;
use crate::utils::did_you_mean;
use log::trace;
pub struct Get;
#[derive(Deserialize)]
pub struct GetArgs {
member: Tagged<String>,
rest: Vec<Tagged<String>>,
member: ColumnPath,
rest: Vec<ColumnPath>,
}
impl WholeStreamCommand for Get {
@ -18,8 +20,15 @@ impl WholeStreamCommand for Get {
fn signature(&self) -> Signature {
Signature::build("get")
.required("member", SyntaxShape::Member)
.rest(SyntaxShape::Member)
.required(
"member",
SyntaxShape::ColumnPath,
"the path to the data to get",
)
.rest(
SyntaxShape::ColumnPath,
"optionally return additional data by path",
)
}
fn usage(&self) -> &str {
@ -35,59 +44,98 @@ impl WholeStreamCommand for Get {
}
}
fn get_member(path: &Tagged<String>, obj: &Tagged<Value>) -> Result<Tagged<Value>, ShellError> {
let mut current = Some(obj);
for p in path.split(".") {
if let Some(obj) = current {
current = match obj.get_data_by_key(p) {
Some(v) => Some(v),
None =>
// Before we give up, see if they gave us a path that matches a field name by itself
{
match obj.get_data_by_key(&path.item) {
Some(v) => return Ok(v.clone()),
None => {
let possibilities = obj.data_descriptors();
pub type ColumnPath = Vec<Tagged<Value>>;
let mut possible_matches: Vec<_> = possibilities
.iter()
.map(|x| {
(natural::distance::levenshtein_distance(x, &path.item), x)
})
.collect();
pub fn get_column_path(
path: &ColumnPath,
obj: &Tagged<Value>,
) -> Result<Tagged<Value>, ShellError> {
let fields = path.clone();
possible_matches.sort();
let value = obj.get_data_by_column_path(
obj.tag(),
path,
Box::new(move |(obj_source, column_path_tried)| {
match obj_source {
Value::Table(rows) => {
let total = rows.len();
let end_tag = match fields.iter().nth_back(if fields.len() > 2 { 1 } else { 0 })
{
Some(last_field) => last_field.tag(),
None => column_path_tried.tag(),
};
if possible_matches.len() > 0 {
return Err(ShellError::labeled_error(
"Unknown column",
format!("did you mean '{}'?", possible_matches[0].1),
path.tag(),
));
return ShellError::labeled_error_with_secondary(
"Row not found",
format!(
"There isn't a row indexed at '{}'",
match &*column_path_tried {
Value::Primitive(primitive) => primitive.format(None),
_ => String::from(""),
}
None
}
}
),
column_path_tried.tag(),
format!("The table only has {} rows (0..{})", total, total - 1),
end_tag,
);
}
_ => {}
}
}
}
match current {
Some(v) => Ok(v.clone()),
None => match obj {
// If its None check for certain values.
Tagged {
item: Value::Primitive(Primitive::String(_)),
..
} => Ok(obj.clone()),
Tagged {
item: Value::Primitive(Primitive::Path(_)),
..
} => Ok(obj.clone()),
_ => Ok(Value::nothing().tagged(obj.tag)),
match &column_path_tried {
Tagged {
item: Value::Primitive(Primitive::Int(index)),
..
} => {
return ShellError::labeled_error(
"No rows available",
format!(
"Not a table. Perhaps you meant to get the column '{}' instead?",
index
),
column_path_tried.tag(),
)
}
_ => match did_you_mean(&obj_source, &column_path_tried) {
Some(suggestions) => {
return ShellError::labeled_error(
"Unknown column",
format!("did you mean '{}'?", suggestions[0].1),
tag_for_tagged_list(fields.iter().map(|p| p.tag())),
)
}
None => {
return ShellError::labeled_error(
"Unknown column",
"row does not contain this column",
tag_for_tagged_list(fields.iter().map(|p| p.tag())),
)
}
},
}
}),
);
let res = match value {
Ok(fetched) => match fetched {
Some(Tagged { item: v, .. }) => Ok((v.clone()).tagged(&obj.tag)),
None => match obj {
// If its None check for certain values.
Tagged {
item: Value::Primitive(Primitive::String(_)),
..
} => Ok(obj.clone()),
Tagged {
item: Value::Primitive(Primitive::Path(_)),
..
} => Ok(obj.clone()),
_ => Ok(Value::nothing().tagged(&obj.tag)),
},
},
}
Err(reason) => Err(reason),
};
res
}
pub fn get(
@ -97,6 +145,8 @@ pub fn get(
}: GetArgs,
RunnableContext { input, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
trace!("get {:?} {:?}", member, fields);
let stream = input
.values
.map(move |item| {
@ -104,26 +154,36 @@ pub fn get(
let member = vec![member.clone()];
let fields = vec![&member, &fields]
let column_paths = vec![&member, &fields]
.into_iter()
.flatten()
.collect::<Vec<&Tagged<String>>>();
.collect::<Vec<&ColumnPath>>();
for field in &fields {
match get_member(field, &item) {
Ok(Tagged {
item: Value::Table(l),
..
}) => {
for item in l {
result.push_back(ReturnSuccess::value(item.clone()));
for path in column_paths {
let res = get_column_path(&path, &item);
match res {
Ok(got) => match got {
Tagged {
item: Value::Table(rows),
..
} => {
for row in rows {
result.push_back(ReturnSuccess::value(
Tagged {
item: row.item,
tag: Tag::from(&item.tag),
}
.map_anchored(&item.tag.anchor),
))
}
}
}
Ok(x) => result.push_back(ReturnSuccess::value(x.clone())),
Err(x) => result.push_back(Err(x)),
other => result
.push_back(ReturnSuccess::value((*other).clone().tagged(&item.tag))),
},
Err(reason) => result.push_back(Err(reason)),
}
}
result
})
.flatten();

217
src/commands/group_by.rs Normal file
View File

@ -0,0 +1,217 @@
use crate::commands::WholeStreamCommand;
use crate::data::TaggedDictBuilder;
use crate::errors::ShellError;
use crate::prelude::*;
pub struct GroupBy;
#[derive(Deserialize)]
pub struct GroupByArgs {
column_name: Tagged<String>,
}
impl WholeStreamCommand for GroupBy {
fn name(&self) -> &str {
"group-by"
}
fn signature(&self) -> Signature {
Signature::build("group-by").required(
"column_name",
SyntaxShape::String,
"the name of the column to group by",
)
}
fn usage(&self) -> &str {
"Creates a new table with the data from the table rows grouped by the column given."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, group_by)?.run()
}
}
pub fn group_by(
GroupByArgs { column_name }: GroupByArgs,
RunnableContext { input, name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let stream = async_stream! {
let values: Vec<Tagged<Value>> = input.values.collect().await;
if values.is_empty() {
yield Err(ShellError::labeled_error(
"Expected table from pipeline",
"requires a table input",
column_name.span()
))
} else {
match group(&column_name, values, name) {
Ok(grouped) => yield ReturnSuccess::value(grouped),
Err(err) => yield Err(err)
}
}
};
Ok(stream.to_output_stream())
}
pub fn group(
column_name: &Tagged<String>,
values: Vec<Tagged<Value>>,
tag: impl Into<Tag>,
) -> Result<Tagged<Value>, ShellError> {
let tag = tag.into();
let mut groups = indexmap::IndexMap::new();
for value in values {
let group_key = value.get_data_by_key(column_name);
if group_key.is_none() {
let possibilities = value.data_descriptors();
let mut possible_matches: Vec<_> = possibilities
.iter()
.map(|x| (natural::distance::levenshtein_distance(x, column_name), x))
.collect();
possible_matches.sort();
if possible_matches.len() > 0 {
return Err(ShellError::labeled_error(
"Unknown column",
format!("did you mean '{}'?", possible_matches[0].1),
column_name.tag(),
));
} else {
return Err(ShellError::labeled_error(
"Unknown column",
"row does not contain this column",
column_name.tag(),
));
}
}
let group_key = group_key.unwrap().as_string()?;
let group = groups.entry(group_key).or_insert(vec![]);
group.push(value);
}
let mut out = TaggedDictBuilder::new(&tag);
for (k, v) in groups.iter() {
out.insert(k, Value::table(v));
}
Ok(out.into_tagged_value())
}
#[cfg(test)]
mod tests {
use crate::commands::group_by::group;
use crate::data::meta::*;
use crate::Value;
use indexmap::IndexMap;
fn string(input: impl Into<String>) -> Tagged<Value> {
Value::string(input.into()).tagged_unknown()
}
fn row(entries: IndexMap<String, Tagged<Value>>) -> Tagged<Value> {
Value::row(entries).tagged_unknown()
}
fn table(list: &Vec<Tagged<Value>>) -> Tagged<Value> {
Value::table(list).tagged_unknown()
}
fn nu_releases_commiters() -> Vec<Tagged<Value>> {
vec![
row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")},
),
row(
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")},
),
row(
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")},
),
row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")},
),
row(
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")},
),
row(
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")},
),
row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")},
),
row(
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")},
),
row(
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")},
),
]
}
#[test]
fn groups_table_by_date_column() {
let for_key = String::from("date").tagged_unknown();
assert_eq!(
group(&for_key, nu_releases_commiters(), Tag::unknown()).unwrap(),
row(indexmap! {
"August 23-2019".into() => table(&vec![
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}),
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}),
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")})
]),
"October 10-2019".into() => table(&vec![
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}),
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}),
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")})
]),
"Sept 24-2019".into() => table(&vec![
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}),
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}),
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")})
]),
})
);
}
#[test]
fn groups_table_by_country_column() {
let for_key = String::from("country").tagged_unknown();
assert_eq!(
group(&for_key, nu_releases_commiters(), Tag::unknown()).unwrap(),
row(indexmap! {
"EC".into() => table(&vec![
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}),
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")}),
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")})
]),
"NZ".into() => table(&vec![
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}),
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}),
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")})
]),
"US".into() => table(&vec![
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}),
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")}),
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}),
]),
})
);
}
}

View File

@ -12,7 +12,7 @@ impl PerItemCommand for Help {
}
fn signature(&self) -> registry::Signature {
Signature::build("help").rest(SyntaxShape::Any)
Signature::build("help").rest(SyntaxShape::Any, "the name of command(s) to get help on")
}
fn usage(&self) -> &str {
@ -26,7 +26,7 @@ impl PerItemCommand for Help {
_raw_args: &RawCommandArgs,
_input: Tagged<Value>,
) -> Result<OutputStream, ShellError> {
let tag = call_info.name_tag;
let tag = &call_info.name_tag;
match call_info.args.nth(0) {
Some(Tagged {
@ -61,12 +61,9 @@ impl PerItemCommand for Help {
let mut one_liner = String::new();
one_liner.push_str(&signature.name);
one_liner.push_str(" ");
if signature.named.len() > 0 {
one_liner.push_str("{flags} ");
}
for positional in signature.positional {
match positional {
for positional in &signature.positional {
match &positional.0 {
PositionalType::Mandatory(name, _m) => {
one_liner.push_str(&format!("<{}> ", name));
}
@ -77,25 +74,70 @@ impl PerItemCommand for Help {
}
if signature.rest_positional.is_some() {
one_liner.push_str(" ...args");
one_liner.push_str(&format!(" ...args",));
}
if signature.named.len() > 0 {
one_liner.push_str("{flags} ");
}
long_desc.push_str(&format!("\nUsage:\n > {}\n", one_liner));
if signature.positional.len() > 0 || signature.rest_positional.is_some() {
long_desc.push_str("\nparameters:\n");
for positional in signature.positional {
match positional.0 {
PositionalType::Mandatory(name, _m) => {
long_desc
.push_str(&format!(" <{}> {}\n", name, positional.1));
}
PositionalType::Optional(name, _o) => {
long_desc
.push_str(&format!(" ({}) {}\n", name, positional.1));
}
}
}
if signature.rest_positional.is_some() {
long_desc.push_str(&format!(
" ...args{} {}\n",
if signature.rest_positional.is_some() {
":"
} else {
""
},
signature.rest_positional.unwrap().1
));
}
}
if signature.named.len() > 0 {
long_desc.push_str("\nflags:\n");
for (flag, ty) in signature.named {
match ty {
match ty.0 {
NamedType::Switch => {
long_desc.push_str(&format!(" --{}\n", flag));
long_desc.push_str(&format!(
" --{}{} {}\n",
flag,
if ty.1.len() > 0 { ":" } else { "" },
ty.1
));
}
NamedType::Mandatory(m) => {
long_desc.push_str(&format!(
" --{} <{}> (required parameter)\n",
flag, m
" --{} <{}> (required parameter){} {}\n",
flag,
m,
if ty.1.len() > 0 { ":" } else { "" },
ty.1
));
}
NamedType::Optional(o) => {
long_desc.push_str(&format!(" --{} <{}>\n", flag, o));
long_desc.push_str(&format!(
" --{} <{}>{} {}\n",
flag,
o,
if ty.1.len() > 0 { ":" } else { "" },
ty.1
));
}
}
}

163
src/commands/histogram.rs Normal file
View File

@ -0,0 +1,163 @@
use crate::commands::evaluate_by::evaluate;
use crate::commands::group_by::group;
use crate::commands::map_max_by::map_max;
use crate::commands::reduce_by::reduce;
use crate::commands::t_sort_by::columns_sorted;
use crate::commands::t_sort_by::t_sort;
use crate::commands::WholeStreamCommand;
use crate::data::TaggedDictBuilder;
use crate::errors::ShellError;
use crate::prelude::*;
use num_traits::cast::ToPrimitive;
pub struct Histogram;
#[derive(Deserialize)]
pub struct HistogramArgs {
column_name: Tagged<String>,
rest: Vec<Tagged<String>>,
}
impl WholeStreamCommand for Histogram {
fn name(&self) -> &str {
"histogram"
}
fn signature(&self) -> Signature {
Signature::build("histogram")
.required(
"column_name",
SyntaxShape::String,
"the name of the column to graph by",
)
.rest(
SyntaxShape::Member,
"column name to give the histogram's frequency column",
)
}
fn usage(&self) -> &str {
"Creates a new table with a histogram based on the column name passed in."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, histogram)?.run()
}
}
pub fn histogram(
HistogramArgs { column_name, rest }: HistogramArgs,
RunnableContext { input, name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let stream = async_stream! {
let values: Vec<Tagged<Value>> = input.values.collect().await;
let Tagged { item: group_by, .. } = column_name.clone();
let groups = group(&column_name, values, &name)?;
let group_labels = columns_sorted(Some(group_by.clone()), &groups, &name);
let sorted = t_sort(Some(group_by.clone()), None, &groups, &name)?;
let evaled = evaluate(&sorted, None, &name)?;
let reduced = reduce(&evaled, None, &name)?;
let maxima = map_max(&reduced, None, &name)?;
let percents = percentages(&reduced, maxima, &name)?;
match percents {
Tagged {
item: Value::Table(datasets),
..
} => {
let mut idx = 0;
let column_names_supplied: Vec<_> = rest.iter().map(|f| f.item.clone()).collect();
let frequency_column_name = if column_names_supplied.is_empty() {
"frecuency".to_string()
} else {
column_names_supplied[0].clone()
};
let column = (*column_name).clone();
if let Tagged { item: Value::Table(start), .. } = datasets.get(0).unwrap() {
for percentage in start.into_iter() {
let mut fact = TaggedDictBuilder::new(&name);
fact.insert_tagged(&column, group_labels.get(idx).unwrap().clone());
if let Tagged { item: Value::Primitive(Primitive::Int(ref num)), .. } = percentage.clone() {
fact.insert(&frequency_column_name, std::iter::repeat("*").take(num.to_i32().unwrap() as usize).collect::<String>());
}
idx = idx + 1;
yield ReturnSuccess::value(fact.into_tagged_value());
}
}
}
_ => {}
}
};
Ok(stream.to_output_stream())
}
fn percentages(
values: &Tagged<Value>,
max: Tagged<Value>,
tag: impl Into<Tag>,
) -> Result<Tagged<Value>, ShellError> {
let tag = tag.into();
let results: Tagged<Value> = match values {
Tagged {
item: Value::Table(datasets),
..
} => {
let datasets: Vec<_> = datasets
.into_iter()
.map(|subsets| match subsets {
Tagged {
item: Value::Table(data),
..
} => {
let data = data
.into_iter()
.map(|d| match d {
Tagged {
item: Value::Primitive(Primitive::Int(n)),
..
} => {
let max = match max {
Tagged {
item: Value::Primitive(Primitive::Int(ref maxima)),
..
} => maxima.to_i32().unwrap(),
_ => 0,
};
let n = { n.to_i32().unwrap() * 100 / max };
Value::number(n).tagged(&tag)
}
_ => Value::number(0).tagged(&tag),
})
.collect::<Vec<_>>();
Value::Table(data).tagged(&tag)
}
_ => Value::Table(vec![]).tagged(&tag),
})
.collect();
Value::Table(datasets).tagged(&tag)
}
other => other.clone(),
};
Ok(results)
}

49
src/commands/history.rs Normal file
View File

@ -0,0 +1,49 @@
use crate::cli::History as HistoryFile;
use crate::commands::PerItemCommand;
use crate::errors::ShellError;
use crate::parser::registry::{self};
use crate::prelude::*;
use std::fs::File;
use std::io::{BufRead, BufReader};
pub struct History;
impl PerItemCommand for History {
fn name(&self) -> &str {
"history"
}
fn signature(&self) -> registry::Signature {
Signature::build("history")
}
fn usage(&self) -> &str {
"Display command history."
}
fn run(
&self,
call_info: &CallInfo,
_registry: &CommandRegistry,
_raw_args: &RawCommandArgs,
_input: Tagged<Value>,
) -> Result<OutputStream, ShellError> {
let tag = call_info.name_tag.clone();
let stream = async_stream! {
let history_path = HistoryFile::path();
let file = File::open(history_path);
if let Ok(file) = file {
let reader = BufReader::new(file);
for line in reader.lines() {
if let Ok(line) = line {
yield ReturnSuccess::value(Value::string(line).tagged(tag.clone()));
}
}
} else {
yield Err(ShellError::labeled_error("Could not open history", "history file could not be opened", tag.clone()));
}
};
Ok(stream.to_output_stream())
}
}

View File

@ -7,7 +7,7 @@ pub struct Last;
#[derive(Deserialize)]
pub struct LastArgs {
amount: Tagged<u64>,
rows: Option<Tagged<u64>>,
}
impl WholeStreamCommand for Last {
@ -16,7 +16,11 @@ impl WholeStreamCommand for Last {
}
fn signature(&self) -> Signature {
Signature::build("last").required("amount", SyntaxShape::Number)
Signature::build("last").optional(
"rows",
SyntaxShape::Number,
"starting from the back, the number of rows to return",
)
}
fn usage(&self) -> &str {
@ -32,13 +36,17 @@ impl WholeStreamCommand for Last {
}
}
fn last(
LastArgs { amount }: LastArgs,
context: RunnableContext,
) -> Result<OutputStream, ShellError> {
fn last(LastArgs { rows }: LastArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
let stream = async_stream! {
let v: Vec<_> = context.input.into_vec().await;
let count = (*amount as usize);
let rows_desired = if let Some(quantity) = rows {
*quantity
} else {
1
};
let count = (rows_desired as usize);
if count < v.len() {
let k = v.len() - count;
for x in v[k..].iter() {

View File

@ -58,7 +58,7 @@ fn lines(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
result.push_back(Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
tag,
&tag,
"value originates from here",
v.tag(),
)));

View File

@ -16,7 +16,11 @@ impl WholeStreamCommand for LS {
}
fn signature(&self) -> Signature {
Signature::build("ls").optional("path", SyntaxShape::Pattern)
Signature::build("ls").optional(
"path",
SyntaxShape::Pattern,
"a path to get the directory contents from",
)
}
fn usage(&self) -> &str {
@ -34,5 +38,5 @@ impl WholeStreamCommand for LS {
}
fn ls(LsArgs { path }: LsArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
context.shell_manager.ls(path, context.name)
context.shell_manager.ls(path, &context)
}

225
src/commands/map_max_by.rs Normal file
View File

@ -0,0 +1,225 @@
use crate::commands::WholeStreamCommand;
use crate::parser::hir::SyntaxShape;
use crate::prelude::*;
use num_traits::cast::ToPrimitive;
pub struct MapMaxBy;
#[derive(Deserialize)]
pub struct MapMaxByArgs {
column_name: Option<Tagged<String>>,
}
impl WholeStreamCommand for MapMaxBy {
fn name(&self) -> &str {
"map-max-by"
}
fn signature(&self) -> Signature {
Signature::build("map-max-by").named(
"column_name",
SyntaxShape::String,
"the name of the column to map-max the table's rows",
)
}
fn usage(&self) -> &str {
"Creates a new table with the data from the tables rows maxed by the column given."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, map_max_by)?.run()
}
}
pub fn map_max_by(
MapMaxByArgs { column_name }: MapMaxByArgs,
RunnableContext { input, name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let stream = async_stream! {
let values: Vec<Tagged<Value>> = input.values.collect().await;
if values.is_empty() {
yield Err(ShellError::labeled_error(
"Expected table from pipeline",
"requires a table input",
name
))
} else {
let map_by_column = if let Some(column_to_map) = column_name {
Some(column_to_map.item().clone())
} else {
None
};
match map_max(&values[0], map_by_column, name) {
Ok(table_maxed) => yield ReturnSuccess::value(table_maxed),
Err(err) => yield Err(err)
}
}
};
Ok(stream.to_output_stream())
}
pub fn map_max(
values: &Tagged<Value>,
_map_by_column_name: Option<String>,
tag: impl Into<Tag>,
) -> Result<Tagged<Value>, ShellError> {
let tag = tag.into();
let results: Tagged<Value> = match values {
Tagged {
item: Value::Table(datasets),
..
} => {
let datasets: Vec<_> = datasets
.into_iter()
.map(|subsets| match subsets {
Tagged {
item: Value::Table(data),
..
} => {
let data = data.into_iter().fold(0, |acc, value| match value {
Tagged {
item: Value::Primitive(Primitive::Int(n)),
..
} => {
if n.to_i32().unwrap() > acc {
n.to_i32().unwrap()
} else {
acc
}
}
_ => acc,
});
Value::number(data).tagged(&tag)
}
_ => Value::number(0).tagged(&tag),
})
.collect();
let datasets = datasets.iter().fold(0, |max, value| match value {
Tagged {
item: Value::Primitive(Primitive::Int(n)),
..
} => {
if n.to_i32().unwrap() > max {
n.to_i32().unwrap()
} else {
max
}
}
_ => max,
});
Value::number(datasets).tagged(&tag)
}
_ => Value::number(-1).tagged(&tag),
};
Ok(results)
}
#[cfg(test)]
mod tests {
use crate::commands::evaluate_by::evaluate;
use crate::commands::group_by::group;
use crate::commands::map_max_by::map_max;
use crate::commands::reduce_by::reduce;
use crate::commands::t_sort_by::t_sort;
use crate::data::meta::*;
use crate::prelude::*;
use crate::Value;
use indexmap::IndexMap;
fn int(s: impl Into<BigInt>) -> Tagged<Value> {
Value::int(s).tagged_unknown()
}
fn string(input: impl Into<String>) -> Tagged<Value> {
Value::string(input.into()).tagged_unknown()
}
fn row(entries: IndexMap<String, Tagged<Value>>) -> Tagged<Value> {
Value::row(entries).tagged_unknown()
}
fn nu_releases_evaluated_by_default_one() -> Tagged<Value> {
evaluate(&nu_releases_sorted_by_date(), None, Tag::unknown()).unwrap()
}
fn nu_releases_reduced_by_sum() -> Tagged<Value> {
reduce(
&nu_releases_evaluated_by_default_one(),
Some(String::from("sum")),
Tag::unknown(),
)
.unwrap()
}
fn nu_releases_sorted_by_date() -> Tagged<Value> {
let key = String::from("date");
t_sort(
Some(key),
None,
&nu_releases_grouped_by_date(),
Tag::unknown(),
)
.unwrap()
}
fn nu_releases_grouped_by_date() -> Tagged<Value> {
let key = String::from("date").tagged_unknown();
group(&key, nu_releases_commiters(), Tag::unknown()).unwrap()
}
fn nu_releases_commiters() -> Vec<Tagged<Value>> {
vec![
row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")},
),
row(
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")},
),
row(
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")},
),
row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("September 24-2019")},
),
row(
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")},
),
row(
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("September 24-2019")},
),
row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")},
),
row(
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("September 24-2019")},
),
row(
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")},
),
row(
indexmap! {"name".into() => string("JK"), "country".into() => string("US"), "date".into() => string("August 23-2019")},
),
]
}
#[test]
fn maps_and_gets_max_value() {
assert_eq!(
map_max(&nu_releases_reduced_by_sum(), None, Tag::unknown()).unwrap(),
int(4)
);
}
}

View File

@ -17,7 +17,7 @@ impl PerItemCommand for Mkdir {
}
fn signature(&self) -> Signature {
Signature::build("mkdir").rest(SyntaxShape::Path)
Signature::build("mkdir").rest(SyntaxShape::Path, "the name(s) of the path(s) to create")
}
fn usage(&self) -> &str {

View File

@ -20,9 +20,16 @@ impl PerItemCommand for Move {
fn signature(&self) -> Signature {
Signature::build("mv")
.required("source", SyntaxShape::Pattern)
.required("destination", SyntaxShape::Path)
.named("file", SyntaxShape::Any)
.required(
"source",
SyntaxShape::Pattern,
"the location to move files/directories from",
)
.required(
"destination",
SyntaxShape::Path,
"the location to move files/directories to",
)
}
fn usage(&self) -> &str {

View File

@ -16,7 +16,11 @@ impl WholeStreamCommand for Nth {
}
fn signature(&self) -> Signature {
Signature::build("nth").required("row number", SyntaxShape::Any)
Signature::build("nth").required(
"row number",
SyntaxShape::Any,
"the number of the row to return",
)
}
fn usage(&self) -> &str {

View File

@ -7,7 +7,6 @@ use crate::parser::hir::SyntaxShape;
use crate::parser::registry::Signature;
use crate::prelude::*;
use std::path::{Path, PathBuf};
use uuid::Uuid;
pub struct Open;
impl PerItemCommand for Open {
@ -17,8 +16,12 @@ impl PerItemCommand for Open {
fn signature(&self) -> Signature {
Signature::build(self.name())
.required("path", SyntaxShape::Path)
.switch("raw")
.required(
"path",
SyntaxShape::Path,
"the file path to load values from",
)
.switch("raw", "load content as a string insead of a table")
}
fn usage(&self) -> &str {
@ -45,16 +48,18 @@ fn run(
let cwd = PathBuf::from(shell_manager.path());
let full_path = PathBuf::from(cwd);
let path = match call_info
.args
.nth(0)
.ok_or_else(|| ShellError::string(&format!("No file or directory specified")))?
{
let path = match call_info.args.nth(0).ok_or_else(|| {
ShellError::labeled_error(
"No file or directory specified",
"for command",
&call_info.name_tag,
)
})? {
file => file,
};
let path_buf = path.as_path()?;
let path_str = path_buf.display().to_string();
let path_span = path.span();
let path_span = path.tag.span;
let has_raw = call_info.args.has("raw");
let registry = registry.clone();
let raw_args = raw_args.clone();
@ -67,7 +72,7 @@ fn run(
yield Err(e);
return;
}
let (file_extension, contents, contents_tag, anchor_location) = result.unwrap();
let (file_extension, contents, contents_tag) = result.unwrap();
let file_extension = if has_raw {
None
@ -77,21 +82,14 @@ fn run(
file_extension.or(path_str.split('.').last().map(String::from))
};
if contents_tag.anchor != uuid::Uuid::nil() {
// If we have loaded something, track its source
yield ReturnSuccess::action(CommandAction::AddAnchorLocation(
contents_tag.anchor,
anchor_location,
));
}
let tagged_contents = contents.tagged(contents_tag);
let tagged_contents = contents.tagged(&contents_tag);
if let Some(extension) = file_extension {
let command_name = format!("from-{}", extension);
if let Some(converter) = registry.get_command(&command_name) {
let new_args = RawCommandArgs {
host: raw_args.host,
ctrl_c: raw_args.ctrl_c,
shell_manager: raw_args.shell_manager,
call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call {
@ -100,11 +98,10 @@ fn run(
named: None
},
source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag,
}
};
let mut result = converter.run(new_args.with_input(vec![tagged_contents]), &registry, false);
let mut result = converter.run(new_args.with_input(vec![tagged_contents]), &registry);
let result_vec: Vec<Result<ReturnSuccess, ShellError>> = result.drain_vec().await;
for res in result_vec {
match res {
@ -114,7 +111,7 @@ fn run(
}
}
Ok(ReturnSuccess::Value(Tagged { item, .. })) => {
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag }));
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() }));
}
x => yield x,
}
@ -134,7 +131,7 @@ pub async fn fetch(
cwd: &PathBuf,
location: &str,
span: Span,
) -> Result<(Option<String>, Value, Tag, AnchorLocation), ShellError> {
) -> Result<(Option<String>, Value, Tag), ShellError> {
let mut cwd = cwd.clone();
cwd.push(Path::new(location));
@ -147,9 +144,8 @@ pub async fn fetch(
Value::string(s),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::File(cwd.to_string_lossy().to_string())),
},
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)),
Err(_) => {
//Non utf8 data.
@ -166,18 +162,20 @@ pub async fn fetch(
Value::string(s),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
},
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)),
Err(_) => Ok((
None,
Value::binary(bytes),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
},
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)),
}
} else {
@ -186,9 +184,10 @@ pub async fn fetch(
Value::binary(bytes),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
},
AnchorLocation::File(cwd.to_string_lossy().to_string()),
))
}
}
@ -204,18 +203,20 @@ pub async fn fetch(
Value::string(s),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
},
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)),
Err(_) => Ok((
None,
Value::binary(bytes),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
},
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)),
}
} else {
@ -224,9 +225,10 @@ pub async fn fetch(
Value::binary(bytes),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
},
AnchorLocation::File(cwd.to_string_lossy().to_string()),
))
}
}
@ -235,9 +237,10 @@ pub async fn fetch(
Value::binary(bytes),
Tag {
span,
anchor: Uuid::new_v4(),
anchor: Some(AnchorLocation::File(
cwd.to_string_lossy().to_string(),
)),
},
AnchorLocation::File(cwd.to_string_lossy().to_string()),
)),
}
}

View File

@ -17,7 +17,7 @@ impl WholeStreamCommand for Pick {
}
fn signature(&self) -> Signature {
Signature::build("pick").rest(SyntaxShape::Any)
Signature::build("pick").rest(SyntaxShape::Any, "the columns to select from the table")
}
fn usage(&self) -> &str {

View File

@ -21,9 +21,12 @@ impl WholeStreamCommand for Pivot {
fn signature(&self) -> Signature {
Signature::build("pivot")
.switch("header-row")
.switch("ignore-titles")
.rest(SyntaxShape::String)
.switch("header-row", "treat the first row as column names")
.switch("ignore-titles", "don't pivot the column names into values")
.rest(
SyntaxShape::String,
"the names to give columns once pivoted",
)
}
fn usage(&self) -> &str {
@ -104,7 +107,7 @@ pub fn pivot(args: PivotArgs, context: RunnableContext) -> Result<OutputStream,
for desc in descs {
let mut column_num: usize = 0;
let mut dict = TaggedDictBuilder::new(context.name);
let mut dict = TaggedDictBuilder::new(&context.name);
if !args.ignore_titles && !args.header_row {
dict.insert(headers[column_num].clone(), Value::string(desc.clone()));

View File

@ -128,7 +128,7 @@ pub fn filter_plugin(
},
Err(e) => {
let mut result = VecDeque::new();
result.push_back(Err(ShellError::string(format!(
result.push_back(Err(ShellError::untagged_runtime_error(format!(
"Error while processing begin_filter response: {:?} {}",
e, input
))));
@ -138,7 +138,7 @@ pub fn filter_plugin(
}
Err(e) => {
let mut result = VecDeque::new();
result.push_back(Err(ShellError::string(format!(
result.push_back(Err(ShellError::untagged_runtime_error(format!(
"Error while reading begin_filter response: {:?}",
e
))));
@ -189,7 +189,7 @@ pub fn filter_plugin(
},
Err(e) => {
let mut result = VecDeque::new();
result.push_back(Err(ShellError::string(format!(
result.push_back(Err(ShellError::untagged_runtime_error(format!(
"Error while processing end_filter response: {:?} {}",
e, input
))));
@ -199,7 +199,7 @@ pub fn filter_plugin(
}
Err(e) => {
let mut result = VecDeque::new();
result.push_back(Err(ShellError::string(format!(
result.push_back(Err(ShellError::untagged_runtime_error(format!(
"Error while reading end_filter: {:?}",
e
))));
@ -236,7 +236,7 @@ pub fn filter_plugin(
},
Err(e) => {
let mut result = VecDeque::new();
result.push_back(Err(ShellError::string(format!(
result.push_back(Err(ShellError::untagged_runtime_error(format!(
"Error while processing filter response: {:?} {}",
e, input
))));
@ -246,7 +246,7 @@ pub fn filter_plugin(
}
Err(e) => {
let mut result = VecDeque::new();
result.push_back(Err(ShellError::string(format!(
result.push_back(Err(ShellError::untagged_runtime_error(format!(
"Error while reading filter response: {:?}",
e
))));

View File

@ -25,13 +25,25 @@ impl PerItemCommand for Post {
fn signature(&self) -> Signature {
Signature::build(self.name())
.required("path", SyntaxShape::Any)
.required("body", SyntaxShape::Any)
.named("user", SyntaxShape::Any)
.named("password", SyntaxShape::Any)
.named("content-type", SyntaxShape::Any)
.named("content-length", SyntaxShape::Any)
.switch("raw")
.required("path", SyntaxShape::Any, "the URL to post to")
.required("body", SyntaxShape::Any, "the contents of the post body")
.named("user", SyntaxShape::Any, "the username when authenticating")
.named(
"password",
SyntaxShape::Any,
"the password when authenticating",
)
.named(
"content-type",
SyntaxShape::Any,
"the MIME type of content to post",
)
.named(
"content-length",
SyntaxShape::Any,
"the length of the content being posted",
)
.switch("raw", "return values as a string instead of a table")
}
fn usage(&self) -> &str {
@ -54,21 +66,20 @@ fn run(
registry: &CommandRegistry,
raw_args: &RawCommandArgs,
) -> Result<OutputStream, ShellError> {
let name_tag = call_info.name_tag.clone();
let call_info = call_info.clone();
let path = match call_info
.args
.nth(0)
.ok_or_else(|| ShellError::string(&format!("No url specified")))?
{
file => file.clone(),
};
let body = match call_info
.args
.nth(1)
.ok_or_else(|| ShellError::string(&format!("No body specified")))?
{
file => file.clone(),
};
let path =
match call_info.args.nth(0).ok_or_else(|| {
ShellError::labeled_error("No url specified", "for command", &name_tag)
})? {
file => file.clone(),
};
let body =
match call_info.args.nth(1).ok_or_else(|| {
ShellError::labeled_error("No body specified", "for command", &name_tag)
})? {
file => file.clone(),
};
let path_str = path.as_string()?;
let path_span = path.tag();
let has_raw = call_info.args.has("raw");
@ -83,7 +94,7 @@ fn run(
let headers = get_headers(&call_info)?;
let stream = async_stream! {
let (file_extension, contents, contents_tag, anchor_location) =
let (file_extension, contents, contents_tag) =
post(&path_str, &body, user, password, &headers, path_span, &registry, &raw_args).await.unwrap();
let file_extension = if has_raw {
@ -94,21 +105,14 @@ fn run(
file_extension.or(path_str.split('.').last().map(String::from))
};
if contents_tag.anchor != uuid::Uuid::nil() {
// If we have loaded something, track its source
yield ReturnSuccess::action(CommandAction::AddAnchorLocation(
contents_tag.anchor,
anchor_location,
));
}
let tagged_contents = contents.tagged(contents_tag);
let tagged_contents = contents.tagged(&contents_tag);
if let Some(extension) = file_extension {
let command_name = format!("from-{}", extension);
if let Some(converter) = registry.get_command(&command_name) {
let new_args = RawCommandArgs {
host: raw_args.host,
ctrl_c: raw_args.ctrl_c,
shell_manager: raw_args.shell_manager,
call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call {
@ -117,11 +121,10 @@ fn run(
named: None
},
source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag,
}
};
let mut result = converter.run(new_args.with_input(vec![tagged_contents]), &registry, false);
let mut result = converter.run(new_args.with_input(vec![tagged_contents]), &registry);
let result_vec: Vec<Result<ReturnSuccess, ShellError>> = result.drain_vec().await;
for res in result_vec {
match res {
@ -131,7 +134,7 @@ fn run(
}
}
Ok(ReturnSuccess::Value(Tagged { item, .. })) => {
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag }));
yield Ok(ReturnSuccess::Value(Tagged { item, tag: contents_tag.clone() }));
}
x => yield x,
}
@ -211,7 +214,7 @@ pub async fn post(
tag: Tag,
registry: &CommandRegistry,
raw_args: &RawCommandArgs,
) -> Result<(Option<String>, Value, Tag, AnchorLocation), ShellError> {
) -> Result<(Option<String>, Value, Tag), ShellError> {
let registry = registry.clone();
let raw_args = raw_args.clone();
if location.starts_with("http:") || location.starts_with("https:") {
@ -252,6 +255,7 @@ pub async fn post(
if let Some(converter) = registry.get_command("to-json") {
let new_args = RawCommandArgs {
host: raw_args.host,
ctrl_c: raw_args.ctrl_c,
shell_manager: raw_args.shell_manager,
call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call {
@ -260,14 +264,12 @@ pub async fn post(
named: None,
},
source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag,
},
};
let mut result = converter.run(
new_args.with_input(vec![item.clone().tagged(tag.clone())]),
&registry,
false,
);
let result_vec: Vec<Result<ReturnSuccess, ShellError>> =
result.drain_vec().await;
@ -284,7 +286,7 @@ pub async fn post(
return Err(ShellError::labeled_error(
"Save could not successfully save",
"unexpected data during save",
*tag,
tag,
));
}
}
@ -300,7 +302,7 @@ pub async fn post(
return Err(ShellError::labeled_error(
"Could not automatically convert table",
"needs manual conversion",
*tag,
tag,
));
}
}
@ -316,11 +318,13 @@ pub async fn post(
ShellError::labeled_error(
"Could not load text from remote url",
"could not load",
tag,
&tag,
)
})?),
tag,
AnchorLocation::Url(location.to_string()),
Tag {
anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)),
(mime::APPLICATION, mime::JSON) => Ok((
Some("json".to_string()),
@ -328,25 +332,29 @@ pub async fn post(
ShellError::labeled_error(
"Could not load text from remote url",
"could not load",
tag,
&tag,
)
})?),
tag,
AnchorLocation::Url(location.to_string()),
Tag {
anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)),
(mime::APPLICATION, mime::OCTET_STREAM) => {
let buf: Vec<u8> = r.body_bytes().await.map_err(|_| {
ShellError::labeled_error(
"Could not load binary file",
"could not load",
tag,
&tag,
)
})?;
Ok((
None,
Value::binary(buf),
tag,
AnchorLocation::Url(location.to_string()),
Tag {
anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
))
}
(mime::IMAGE, image_ty) => {
@ -354,14 +362,16 @@ pub async fn post(
ShellError::labeled_error(
"Could not load image file",
"could not load",
tag,
&tag,
)
})?;
Ok((
Some(image_ty.to_string()),
Value::binary(buf),
tag,
AnchorLocation::Url(location.to_string()),
Tag {
anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
))
}
(mime::TEXT, mime::HTML) => Ok((
@ -370,11 +380,13 @@ pub async fn post(
ShellError::labeled_error(
"Could not load text from remote url",
"could not load",
tag,
&tag,
)
})?),
tag,
AnchorLocation::Url(location.to_string()),
Tag {
anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)),
(mime::TEXT, mime::PLAIN) => {
let path_extension = url::Url::parse(location)
@ -394,11 +406,13 @@ pub async fn post(
ShellError::labeled_error(
"Could not load text from remote url",
"could not load",
tag,
&tag,
)
})?),
tag,
AnchorLocation::Url(location.to_string()),
Tag {
anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
))
}
(ty, sub_ty) => Ok((
@ -407,16 +421,20 @@ pub async fn post(
"Not yet supported MIME type: {} {}",
ty, sub_ty
)),
tag,
AnchorLocation::Url(location.to_string()),
Tag {
anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)),
}
}
None => Ok((
None,
Value::string(format!("No content type found")),
tag,
AnchorLocation::Url(location.to_string()),
Tag {
anchor: Some(AnchorLocation::Url(location.to_string())),
span: tag.span,
},
)),
},
Err(_) => {

47
src/commands/prepend.rs Normal file
View File

@ -0,0 +1,47 @@
use crate::commands::WholeStreamCommand;
use crate::errors::ShellError;
use crate::parser::CommandRegistry;
use crate::prelude::*;
#[derive(Deserialize)]
struct PrependArgs {
row: Tagged<Value>,
}
pub struct Prepend;
impl WholeStreamCommand for Prepend {
fn name(&self) -> &str {
"prepend"
}
fn signature(&self) -> Signature {
Signature::build("prepend").required(
"row value",
SyntaxShape::Any,
"the value of the row to prepend to the table",
)
}
fn usage(&self) -> &str {
"Prepend the given row to the front of the table"
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, prepend)?.run()
}
}
fn prepend(
PrependArgs { row }: PrependArgs,
RunnableContext { input, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let mut prepend: VecDeque<Tagged<Value>> = VecDeque::new();
prepend.push_back(row);
Ok(OutputStream::from_input(prepend.chain(input.values)))
}

257
src/commands/reduce_by.rs Normal file
View File

@ -0,0 +1,257 @@
use crate::commands::WholeStreamCommand;
use crate::parser::hir::SyntaxShape;
use crate::prelude::*;
use num_traits::cast::ToPrimitive;
pub struct ReduceBy;
#[derive(Deserialize)]
pub struct ReduceByArgs {
reduce_with: Option<Tagged<String>>,
}
impl WholeStreamCommand for ReduceBy {
fn name(&self) -> &str {
"reduce-by"
}
fn signature(&self) -> Signature {
Signature::build("reduce-by").named(
"reduce_with",
SyntaxShape::String,
"the command to reduce by with",
)
}
fn usage(&self) -> &str {
"Creates a new table with the data from the tables rows reduced by the command given."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, reduce_by)?.run()
}
}
pub fn reduce_by(
ReduceByArgs { reduce_with }: ReduceByArgs,
RunnableContext { input, name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let stream = async_stream! {
let values: Vec<Tagged<Value>> = input.values.collect().await;
if values.is_empty() {
yield Err(ShellError::labeled_error(
"Expected table from pipeline",
"requires a table input",
name
))
} else {
let reduce_with = if let Some(reducer) = reduce_with {
Some(reducer.item().clone())
} else {
None
};
match reduce(&values[0], reduce_with, name) {
Ok(reduced) => yield ReturnSuccess::value(reduced),
Err(err) => yield Err(err)
}
}
};
Ok(stream.to_output_stream())
}
fn sum(data: Vec<Tagged<Value>>) -> i32 {
data.into_iter().fold(0, |acc, value| match value {
Tagged {
item: Value::Primitive(Primitive::Int(n)),
..
} => acc + n.to_i32().unwrap(),
_ => acc,
})
}
fn formula(
acc_begin: i32,
calculator: Box<dyn Fn(Vec<Tagged<Value>>) -> i32 + 'static>,
) -> Box<dyn Fn(i32, Vec<Tagged<Value>>) -> i32 + 'static> {
Box::new(move |acc, datax| -> i32 {
let result = acc * acc_begin;
result + calculator(datax)
})
}
fn reducer_for(command: Reduce) -> Box<dyn Fn(i32, Vec<Tagged<Value>>) -> i32 + 'static> {
match command {
Reduce::Sum | Reduce::Default => Box::new(formula(0, Box::new(sum))),
}
}
pub enum Reduce {
Sum,
Default,
}
pub fn reduce(
values: &Tagged<Value>,
reducer: Option<String>,
tag: impl Into<Tag>,
) -> Result<Tagged<Value>, ShellError> {
let tag = tag.into();
let reduce_with = match reducer {
Some(cmd) if cmd == "sum" => reducer_for(Reduce::Sum),
Some(_) | None => reducer_for(Reduce::Default),
};
let results: Tagged<Value> = match values {
Tagged {
item: Value::Table(datasets),
..
} => {
let datasets: Vec<_> = datasets
.into_iter()
.map(|subsets| {
let mut acc = 0;
match subsets {
Tagged {
item: Value::Table(data),
..
} => {
let data = data
.into_iter()
.map(|d| {
if let Tagged {
item: Value::Table(x),
..
} = d
{
acc = reduce_with(acc, x.clone());
Value::number(acc).tagged(&tag)
} else {
Value::number(0).tagged(&tag)
}
})
.collect::<Vec<_>>();
Value::Table(data).tagged(&tag)
}
_ => Value::Table(vec![]).tagged(&tag),
}
})
.collect();
Value::Table(datasets).tagged(&tag)
}
_ => Value::Table(vec![]).tagged(&tag),
};
Ok(results)
}
#[cfg(test)]
mod tests {
use crate::commands::evaluate_by::evaluate;
use crate::commands::group_by::group;
use crate::commands::reduce_by::{reduce, reducer_for, Reduce};
use crate::commands::t_sort_by::t_sort;
use crate::data::meta::*;
use crate::prelude::*;
use crate::Value;
use indexmap::IndexMap;
fn int(s: impl Into<BigInt>) -> Tagged<Value> {
Value::int(s).tagged_unknown()
}
fn string(input: impl Into<String>) -> Tagged<Value> {
Value::string(input.into()).tagged_unknown()
}
fn row(entries: IndexMap<String, Tagged<Value>>) -> Tagged<Value> {
Value::row(entries).tagged_unknown()
}
fn table(list: &Vec<Tagged<Value>>) -> Tagged<Value> {
Value::table(list).tagged_unknown()
}
fn nu_releases_sorted_by_date() -> Tagged<Value> {
let key = String::from("date");
t_sort(
Some(key),
None,
&nu_releases_grouped_by_date(),
Tag::unknown(),
)
.unwrap()
}
fn nu_releases_evaluated_by_default_one() -> Tagged<Value> {
evaluate(&nu_releases_sorted_by_date(), None, Tag::unknown()).unwrap()
}
fn nu_releases_grouped_by_date() -> Tagged<Value> {
let key = String::from("date").tagged_unknown();
group(&key, nu_releases_commiters(), Tag::unknown()).unwrap()
}
fn nu_releases_commiters() -> Vec<Tagged<Value>> {
vec![
row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")},
),
row(
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")},
),
row(
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")},
),
row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("September 24-2019")},
),
row(
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")},
),
row(
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("September 24-2019")},
),
row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")},
),
row(
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("September 24-2019")},
),
row(
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")},
),
]
}
#[test]
fn reducer_computes_given_a_sum_command() {
let subject = vec![int(1), int(1), int(1)];
let action = reducer_for(Reduce::Sum);
assert_eq!(action(0, subject), 3);
}
#[test]
fn reducer_computes() {
assert_eq!(
reduce(
&nu_releases_evaluated_by_default_one(),
Some(String::from("sum")),
Tag::unknown()
),
Ok(table(&vec![table(&vec![int(3), int(3), int(3)])]))
);
}
}

View File

@ -16,7 +16,7 @@ impl WholeStreamCommand for Reject {
}
fn signature(&self) -> Signature {
Signature::build("reject").rest(SyntaxShape::Member)
Signature::build("reject").rest(SyntaxShape::Member, "the names of columns to remove")
}
fn usage(&self) -> &str {

View File

@ -11,6 +11,7 @@ pub struct Remove;
pub struct RemoveArgs {
pub target: Tagged<PathBuf>,
pub recursive: Tagged<bool>,
pub trash: Tagged<bool>,
}
impl PerItemCommand for Remove {
@ -20,12 +21,16 @@ impl PerItemCommand for Remove {
fn signature(&self) -> Signature {
Signature::build("rm")
.required("path", SyntaxShape::Pattern)
.switch("recursive")
.required("path", SyntaxShape::Pattern, "the file path to remove")
.switch(
"trash",
"use the platform's recycle bin instead of permanently deleting",
)
.switch("recursive", "delete subdirectories recursively")
}
fn usage(&self) -> &str {
"Remove a file, (for removing directory append '--recursive')"
"Remove a file"
}
fn run(

View File

@ -93,8 +93,11 @@ impl WholeStreamCommand for Save {
fn signature(&self) -> Signature {
Signature::build("save")
.optional("path", SyntaxShape::Path)
.switch("raw")
.optional("path", SyntaxShape::Path, "the path to save contents to")
.switch(
"raw",
"treat values as-is rather than auto-converting based on file extension",
)
}
fn usage(&self) -> &str {
@ -119,49 +122,48 @@ fn save(
input,
name,
shell_manager,
source_map,
host,
ctrl_c,
commands: registry,
..
}: RunnableContext,
raw_args: RawCommandArgs,
) -> Result<OutputStream, ShellError> {
let mut full_path = PathBuf::from(shell_manager.path());
let name_tag = name;
let name_tag = name.clone();
let source_map = source_map.clone();
let stream = async_stream! {
let input: Vec<Tagged<Value>> = input.values.collect().await;
if path.is_none() {
// If there is no filename, check the metadata for the anchor filename
if input.len() > 0 {
let anchor = input[0].anchor();
match source_map.get(&anchor) {
match anchor {
Some(path) => match path {
AnchorLocation::File(file) => {
full_path.push(Path::new(file));
full_path.push(Path::new(&file));
}
_ => {
yield Err(ShellError::labeled_error(
"Save requires a filepath",
"Save requires a filepath (1)",
"needs path",
name_tag,
name_tag.clone(),
));
}
},
None => {
yield Err(ShellError::labeled_error(
"Save requires a filepath",
"Save requires a filepath (2)",
"needs path",
name_tag,
name_tag.clone(),
));
}
}
} else {
yield Err(ShellError::labeled_error(
"Save requires a filepath",
"Save requires a filepath (3)",
"needs path",
name_tag,
name_tag.clone(),
));
}
} else {
@ -179,6 +181,7 @@ fn save(
if let Some(converter) = registry.get_command(&command_name) {
let new_args = RawCommandArgs {
host,
ctrl_c,
shell_manager,
call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call {
@ -187,11 +190,10 @@ fn save(
named: None
},
source: raw_args.call_info.source,
source_map: raw_args.call_info.source_map,
name_tag: raw_args.call_info.name_tag,
}
};
let mut result = converter.run(new_args.with_input(input), &registry, false);
let mut result = converter.run(new_args.with_input(input), &registry);
let result_vec: Vec<Result<ReturnSuccess, ShellError>> = result.drain_vec().await;
if converter.is_binary() {
process_binary_return_success!('scope, result_vec, name_tag)
@ -212,9 +214,9 @@ fn save(
match content {
Ok(save_data) => match std::fs::write(full_path, save_data) {
Ok(o) => o,
Err(e) => yield Err(ShellError::string(e.to_string())),
Err(e) => yield Err(ShellError::labeled_error(e.to_string(), "for command", name)),
},
Err(e) => yield Err(ShellError::string(e.to_string())),
Err(e) => yield Err(ShellError::labeled_error(e.to_string(), "for command", name)),
}
};

View File

@ -2,6 +2,7 @@ use crate::commands::WholeStreamCommand;
use crate::data::TaggedDictBuilder;
use crate::errors::ShellError;
use crate::prelude::*;
use std::sync::atomic::Ordering;
pub struct Shells;
@ -32,14 +33,14 @@ fn shells(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream
let tag = args.call_info.name_tag;
for (index, shell) in args.shell_manager.shells.lock().unwrap().iter().enumerate() {
let mut dict = TaggedDictBuilder::new(tag);
let mut dict = TaggedDictBuilder::new(&tag);
if index == args.shell_manager.current_shell {
if index == (*args.shell_manager.current_shell).load(Ordering::SeqCst) {
dict.insert(" ", "X".to_string());
} else {
dict.insert(" ", " ".to_string());
}
dict.insert("name", shell.name(&args.call_info.source_map));
dict.insert("name", shell.name());
dict.insert("path", shell.path());
shells_out.push_back(dict.into_tagged_value());

View File

@ -37,7 +37,7 @@ fn size(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream,
_ => Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
tag,
&tag,
"value originates from here",
v.tag(),
)),

View File

@ -1,6 +1,7 @@
use crate::commands::WholeStreamCommand;
use crate::errors::ShellError;
use crate::prelude::*;
use log::trace;
pub struct SkipWhile;
@ -16,7 +17,11 @@ impl WholeStreamCommand for SkipWhile {
fn signature(&self) -> Signature {
Signature::build("skip-while")
.required("condition", SyntaxShape::Block)
.required(
"condition",
SyntaxShape::Block,
"the condition that must be met to continue skipping",
)
.filter()
}
@ -38,7 +43,9 @@ pub fn skip_while(
RunnableContext { input, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let objects = input.values.skip_while(move |item| {
trace!("ITEM = {:?}", item);
let result = condition.invoke(&item);
trace!("RESULT = {:?}", result);
let return_value = match result {
Ok(ref v) if v.is_true() => true,

View File

@ -15,7 +15,7 @@ impl WholeStreamCommand for SortBy {
}
fn signature(&self) -> Signature {
Signature::build("sort-by").rest(SyntaxShape::String)
Signature::build("sort-by").rest(SyntaxShape::String, "the column(s) to sort by")
}
fn usage(&self) -> &str {

270
src/commands/split_by.rs Normal file
View File

@ -0,0 +1,270 @@
use crate::commands::WholeStreamCommand;
use crate::data::TaggedDictBuilder;
use crate::errors::ShellError;
use crate::prelude::*;
pub struct SplitBy;
#[derive(Deserialize)]
pub struct SplitByArgs {
column_name: Tagged<String>,
}
impl WholeStreamCommand for SplitBy {
fn name(&self) -> &str {
"split-by"
}
fn signature(&self) -> Signature {
Signature::build("split-by").required(
"column_name",
SyntaxShape::String,
"the name of the column within the nested table to split by",
)
}
fn usage(&self) -> &str {
"Creates a new table with the data from the inner tables splitted by the column given."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, split_by)?.run()
}
}
pub fn split_by(
SplitByArgs { column_name }: SplitByArgs,
RunnableContext { input, name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
let stream = async_stream! {
let values: Vec<Tagged<Value>> = input.values.collect().await;
if values.len() > 1 || values.is_empty() {
yield Err(ShellError::labeled_error(
"Expected table from pipeline",
"requires a table input",
column_name.span()
))
} else {
match split(&column_name, &values[0], name) {
Ok(split) => yield ReturnSuccess::value(split),
Err(err) => yield Err(err),
}
}
};
Ok(stream.to_output_stream())
}
pub fn split(
column_name: &Tagged<String>,
value: &Tagged<Value>,
tag: impl Into<Tag>,
) -> Result<Tagged<Value>, ShellError> {
let origin_tag = tag.into();
let mut splits = indexmap::IndexMap::new();
match value {
Tagged {
item: Value::Row(group_sets),
..
} => {
for (group_key, group_value) in group_sets.entries.iter() {
match *group_value {
Tagged {
item: Value::Table(ref dataset),
..
} => {
let group = crate::commands::group_by::group(
&column_name,
dataset.to_vec(),
&origin_tag,
)?;
match group {
Tagged {
item: Value::Row(o),
..
} => {
for (split_label, subset) in o.entries.into_iter() {
match subset {
Tagged {
item: Value::Table(subset),
tag,
} => {
let s = splits
.entry(split_label.clone())
.or_insert(indexmap::IndexMap::new());
s.insert(
group_key.clone(),
Value::table(&subset).tagged(tag),
);
}
other => {
return Err(ShellError::type_error(
"a table value",
other.tagged_type_name(),
))
}
}
}
}
_ => {
return Err(ShellError::type_error(
"a table value",
group.tagged_type_name(),
))
}
}
}
ref other => {
return Err(ShellError::type_error(
"a table value",
other.tagged_type_name(),
))
}
}
}
}
_ => {
return Err(ShellError::type_error(
"a table value",
value.tagged_type_name(),
))
}
}
let mut out = TaggedDictBuilder::new(&origin_tag);
for (k, v) in splits.into_iter() {
out.insert(k, Value::row(v));
}
Ok(out.into_tagged_value())
}
#[cfg(test)]
mod tests {
use crate::commands::group_by::group;
use crate::commands::split_by::split;
use crate::data::meta::*;
use crate::Value;
use indexmap::IndexMap;
fn string(input: impl Into<String>) -> Tagged<Value> {
Value::string(input.into()).tagged_unknown()
}
fn row(entries: IndexMap<String, Tagged<Value>>) -> Tagged<Value> {
Value::row(entries).tagged_unknown()
}
fn table(list: &Vec<Tagged<Value>>) -> Tagged<Value> {
Value::table(list).tagged_unknown()
}
fn nu_releases_grouped_by_date() -> Tagged<Value> {
let key = String::from("date").tagged_unknown();
group(&key, nu_releases_commiters(), Tag::unknown()).unwrap()
}
fn nu_releases_commiters() -> Vec<Tagged<Value>> {
vec![
row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")},
),
row(
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")},
),
row(
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")},
),
row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")},
),
row(
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")},
),
row(
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")},
),
row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")},
),
row(
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")},
),
row(
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")},
),
]
}
#[test]
fn splits_inner_tables_by_key() {
let for_key = String::from("country").tagged_unknown();
assert_eq!(
split(&for_key, &nu_releases_grouped_by_date(), Tag::unknown()).unwrap(),
Value::row(indexmap! {
"EC".into() => row(indexmap! {
"August 23-2019".into() => table(&vec![
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")})
]),
"Sept 24-2019".into() => table(&vec![
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("Sept 24-2019")})
]),
"October 10-2019".into() => table(&vec![
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")})
])
}),
"NZ".into() => row(indexmap! {
"August 23-2019".into() => table(&vec![
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")})
]),
"Sept 24-2019".into() => table(&vec![
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("Sept 24-2019")})
]),
"October 10-2019".into() => table(&vec![
row(indexmap!{"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")})
])
}),
"US".into() => row(indexmap! {
"August 23-2019".into() => table(&vec![
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")})
]),
"Sept 24-2019".into() => table(&vec![
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("Sept 24-2019")})
]),
"October 10-2019".into() => table(&vec![
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")})
])
})
}).tagged_unknown()
);
}
#[test]
fn errors_if_key_within_some_inner_table_is_missing() {
let for_key = String::from("country").tagged_unknown();
let nu_releases = row(indexmap! {
"August 23-2019".into() => table(&vec![
row(indexmap!{"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")})
]),
"Sept 24-2019".into() => table(&vec![
row(indexmap!{"name".into() => Value::string("JT").tagged(Tag::from(Span::new(5,10))), "date".into() => string("Sept 24-2019")})
]),
"October 10-2019".into() => table(&vec![
row(indexmap!{"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")})
])
});
assert!(split(&for_key, &nu_releases, Tag::from(Span::new(5, 10))).is_err());
}
}

View File

@ -21,9 +21,13 @@ impl WholeStreamCommand for SplitColumn {
fn signature(&self) -> Signature {
Signature::build("split-column")
.required("separator", SyntaxShape::Any)
.switch("collapse-empty")
.rest(SyntaxShape::Member)
.required(
"separator",
SyntaxShape::Any,
"the character that denotes what separates columns",
)
.switch("collapse-empty", "remove empty columns")
.rest(SyntaxShape::Member, "column names to give the new columns")
}
fn usage(&self) -> &str {
@ -94,7 +98,7 @@ fn split_column(
_ => Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
name,
&name,
"value originates from here",
v.tag(),
)),

View File

@ -17,7 +17,11 @@ impl WholeStreamCommand for SplitRow {
}
fn signature(&self) -> Signature {
Signature::build("split-row").required("separator", SyntaxShape::Any)
Signature::build("split-row").required(
"separator",
SyntaxShape::Any,
"the character that denotes what separates rows",
)
}
fn usage(&self) -> &str {
@ -60,7 +64,7 @@ fn split_row(
result.push_back(Err(ShellError::labeled_error_with_secondary(
"Expected a string from pipeline",
"requires string input",
name,
&name,
"value originates from here",
v.tag(),
)));

358
src/commands/t_sort_by.rs Normal file
View File

@ -0,0 +1,358 @@
use crate::commands::WholeStreamCommand;
use crate::data::{TaggedDictBuilder, TaggedListBuilder};
use crate::errors::ShellError;
use crate::prelude::*;
use chrono::{DateTime, NaiveDate, Utc};
pub struct TSortBy;
#[derive(Deserialize)]
pub struct TSortByArgs {
#[serde(rename(deserialize = "show-columns"))]
show_columns: bool,
group_by: Option<Tagged<String>>,
#[allow(unused)]
split_by: Option<String>,
}
impl WholeStreamCommand for TSortBy {
fn name(&self) -> &str {
"t-sort-by"
}
fn signature(&self) -> Signature {
Signature::build("t-sort-by")
.switch("show-columns", "Displays the column names sorted")
.named(
"group_by",
SyntaxShape::String,
"the name of the column to group by",
)
.named(
"split_by",
SyntaxShape::String,
"the name of the column within the grouped by table to split by",
)
}
fn usage(&self) -> &str {
"Sort by the given columns."
}
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, t_sort_by)?.run()
}
}
fn t_sort_by(
TSortByArgs {
show_columns,
group_by,
..
}: TSortByArgs,
RunnableContext { input, name, .. }: RunnableContext,
) -> Result<OutputStream, ShellError> {
Ok(OutputStream::new(async_stream! {
let values: Vec<Tagged<Value>> = input.values.collect().await;
let column_grouped_by_name = if let Some(grouped_by) = group_by {
Some(grouped_by.item().clone())
} else {
None
};
if show_columns {
for label in columns_sorted(column_grouped_by_name, &values[0], &name).iter() {
yield ReturnSuccess::value(label.clone());
}
} else {
match t_sort(column_grouped_by_name, None, &values[0], name) {
Ok(sorted) => yield ReturnSuccess::value(sorted),
Err(err) => yield Err(err)
}
}
}))
}
pub fn columns_sorted(
_group_by_name: Option<String>,
value: &Tagged<Value>,
tag: impl Into<Tag>,
) -> Vec<Tagged<Value>> {
let origin_tag = tag.into();
match value {
Tagged {
item: Value::Row(rows),
..
} => {
let mut keys: Vec<Tagged<Value>> =
rows.entries
.keys()
.map(|s| s.as_ref())
.map(|k: &str| {
let date = NaiveDate::parse_from_str(k, "%B %d-%Y");
let date = match date {
Ok(parsed) => Value::Primitive(Primitive::Date(
DateTime::<Utc>::from_utc(parsed.and_hms(12, 34, 56), Utc),
)),
Err(_) => Value::string(k),
};
date.tagged_unknown()
})
.collect();
keys.sort();
let keys: Vec<Value> = keys
.into_iter()
.map(|k| {
Value::string(match k {
Tagged {
item: Value::Primitive(Primitive::Date(d)),
..
} => format!("{}", d.format("%B %d-%Y")),
_ => k.as_string().unwrap(),
})
})
.collect();
keys.into_iter().map(|k| k.tagged(&origin_tag)).collect()
}
_ => vec![Value::string("default").tagged(&origin_tag)],
}
}
pub fn t_sort(
group_by_name: Option<String>,
split_by_name: Option<String>,
value: &Tagged<Value>,
tag: impl Into<Tag>,
) -> Result<Tagged<Value>, ShellError> {
let origin_tag = tag.into();
match group_by_name {
Some(column_name) => {
let sorted_labels = columns_sorted(Some(column_name), value, &origin_tag);
match split_by_name {
None => {
let mut dataset = TaggedDictBuilder::new(&origin_tag);
dataset.insert_tagged("default", value.clone());
let dataset = dataset.into_tagged_value();
let split_labels = match &dataset {
Tagged {
item: Value::Row(rows),
..
} => {
let mut keys: Vec<Tagged<Value>> = rows
.entries
.keys()
.map(|s| s.as_ref())
.map(|k: &str| {
let date = NaiveDate::parse_from_str(k, "%B %d-%Y");
let date = match date {
Ok(parsed) => Value::Primitive(Primitive::Date(
DateTime::<Utc>::from_utc(
parsed.and_hms(12, 34, 56),
Utc,
),
)),
Err(_) => Value::string(k),
};
date.tagged_unknown()
})
.collect();
keys.sort();
let keys: Vec<Value> = keys
.into_iter()
.map(|k| {
Value::string(match k {
Tagged {
item: Value::Primitive(Primitive::Date(d)),
..
} => format!("{}", d.format("%B %d-%Y")),
_ => k.as_string().unwrap(),
})
})
.collect();
keys.into_iter().map(|k| k.tagged(&origin_tag)).collect()
}
_ => vec![],
};
let results: Vec<Vec<Tagged<Value>>> = split_labels
.into_iter()
.map(|split| {
let groups = dataset.get_data_by_key(&split.as_string().unwrap());
sorted_labels
.clone()
.into_iter()
.map(|label| {
let label = label.as_string().unwrap();
match groups {
Some(Tagged {
item: Value::Row(dict),
..
}) => dict.get_data_by_key(&label).unwrap().clone(),
_ => Value::Table(vec![]).tagged(&origin_tag),
}
})
.collect()
})
.collect();
let mut outer = TaggedListBuilder::new(&origin_tag);
for i in results {
outer.insert_tagged(Value::Table(i).tagged(&origin_tag));
}
return Ok(Value::Table(outer.list).tagged(&origin_tag));
}
Some(_) => return Ok(Value::nothing().tagged(&origin_tag)),
}
}
None => return Ok(Value::nothing().tagged(&origin_tag)),
}
}
#[cfg(test)]
mod tests {
use crate::commands::group_by::group;
use crate::commands::t_sort_by::{columns_sorted, t_sort};
use crate::data::meta::*;
use crate::Value;
use indexmap::IndexMap;
fn string(input: impl Into<String>) -> Tagged<Value> {
Value::string(input.into()).tagged_unknown()
}
fn row(entries: IndexMap<String, Tagged<Value>>) -> Tagged<Value> {
Value::row(entries).tagged_unknown()
}
fn table(list: &Vec<Tagged<Value>>) -> Tagged<Value> {
Value::table(list).tagged_unknown()
}
fn nu_releases_grouped_by_date() -> Tagged<Value> {
let key = String::from("date").tagged_unknown();
group(&key, nu_releases_commiters(), Tag::unknown()).unwrap()
}
fn nu_releases_commiters() -> Vec<Tagged<Value>> {
vec![
row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")},
),
row(
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")},
),
row(
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")},
),
row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("September 24-2019")},
),
row(
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")},
),
row(
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("September 24-2019")},
),
row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")},
),
row(
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("September 24-2019")},
),
row(
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")},
),
]
}
#[test]
fn show_columns_sorted_given_a_column_to_sort_by() {
let by_column = String::from("date");
assert_eq!(
columns_sorted(
Some(by_column),
&nu_releases_grouped_by_date(),
Tag::unknown()
),
vec![
string("August 23-2019"),
string("September 24-2019"),
string("October 10-2019")
]
)
}
#[test]
fn sorts_the_tables() {
let group_by = String::from("date");
assert_eq!(
t_sort(
Some(group_by),
None,
&nu_releases_grouped_by_date(),
Tag::unknown()
)
.unwrap(),
table(&vec![table(&vec![
table(&vec![
row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("August 23-2019")}
),
row(
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("August 23-2019")}
),
row(
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("August 23-2019")}
)
]),
table(&vec![
row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("September 24-2019")}
),
row(
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("September 24-2019")}
),
row(
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("September 24-2019")}
)
]),
table(&vec![
row(
indexmap! {"name".into() => string("YK"), "country".into() => string("US"), "date".into() => string("October 10-2019")}
),
row(
indexmap! {"name".into() => string("JT"), "country".into() => string("NZ"), "date".into() => string("October 10-2019")}
),
row(
indexmap! {"name".into() => string("AR"), "country".into() => string("EC"), "date".into() => string("October 10-2019")}
)
]),
]),])
);
}
}

View File

@ -5,16 +5,17 @@ use crate::prelude::*;
pub struct Table;
#[derive(Deserialize)]
pub struct TableArgs {}
impl WholeStreamCommand for Table {
fn name(&self) -> &str {
"table"
}
fn signature(&self) -> Signature {
Signature::build("table")
Signature::build("table").named(
"start_number",
SyntaxShape::Number,
"row number to start viewing from",
)
}
fn usage(&self) -> &str {
@ -26,16 +27,29 @@ impl WholeStreamCommand for Table {
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
args.process(registry, table)?.run()
table(args, registry)
}
}
pub fn table(_args: TableArgs, context: RunnableContext) -> Result<OutputStream, ShellError> {
fn table(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?;
let stream = async_stream! {
let input: Vec<Tagged<Value>> = context.input.into_vec().await;
let host = args.host.clone();
let start_number = match args.get("start_number") {
Some(Tagged { item: Value::Primitive(Primitive::Int(i)), .. }) => {
i.to_usize().unwrap()
}
_ => {
0
}
};
let input: Vec<Tagged<Value>> = args.input.into_vec().await;
if input.len() > 0 {
let mut host = context.host.lock().unwrap();
let view = TableView::from_list(&input);
let mut host = host.lock().unwrap();
let view = TableView::from_list(&input, start_number);
if let Some(view) = view {
handle_unexpected(&mut *host, |host| crate::format::print_view(&view, host));
}

View File

@ -28,7 +28,6 @@ impl WholeStreamCommand for Tags {
}
fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let source_map = args.call_info.source_map.clone();
Ok(args
.input
.values
@ -38,11 +37,11 @@ fn tags(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream,
let anchor = v.anchor();
let span = v.tag().span;
let mut dict = TaggedDictBuilder::new(v.tag());
dict.insert("start", Value::int(span.start as i64));
dict.insert("end", Value::int(span.end as i64));
dict.insert("start", Value::int(span.start() as i64));
dict.insert("end", Value::int(span.end() as i64));
tags.insert_tagged("span", dict.into_tagged_value());
match source_map.get(&anchor) {
match anchor {
Some(AnchorLocation::File(source)) => {
tags.insert("anchor", Value::string(source));
}

View File

@ -46,7 +46,7 @@ pub fn value_to_bson_value(v: &Tagged<Value>) -> Result<Bson, ShellError> {
Value::Primitive(Primitive::BeginningOfStream) => Bson::Null,
Value::Primitive(Primitive::Decimal(d)) => Bson::FloatingPoint(d.to_f64().unwrap()),
Value::Primitive(Primitive::Int(i)) => {
Bson::I64(i.tagged(v.tag).coerce_into("converting to BSON")?)
Bson::I64(i.tagged(&v.tag).coerce_into("converting to BSON")?)
}
Value::Primitive(Primitive::Nothing) => Bson::Null,
Value::Primitive(Primitive::String(s)) => Bson::String(s.clone()),
@ -58,6 +58,7 @@ pub fn value_to_bson_value(v: &Tagged<Value>) -> Result<Bson, ShellError> {
.collect::<Result<_, _>>()?,
),
Value::Block(_) => Bson::Null,
Value::Error(e) => return Err(e.clone()),
Value::Primitive(Primitive::Binary(b)) => Bson::Binary(BinarySubtype::Generic, b.clone()),
Value::Row(o) => object_value_to_bson(o)?,
})
@ -170,7 +171,7 @@ fn get_binary_subtype<'a>(tagged_value: &'a Tagged<Value>) -> Result<BinarySubty
_ => unreachable!(),
}),
Value::Primitive(Primitive::Int(i)) => Ok(BinarySubtype::UserDefined(
i.tagged(tagged_value.tag)
i.tagged(&tagged_value.tag)
.coerce_into("converting to BSON binary subtype")?,
)),
_ => Err(ShellError::type_error(
@ -207,12 +208,12 @@ fn bson_value_to_bytes(bson: Bson, tag: Tag) -> Result<Vec<u8>, ShellError> {
Bson::Array(a) => {
for v in a.into_iter() {
match v {
Bson::Document(d) => shell_encode_document(&mut out, d, tag)?,
Bson::Document(d) => shell_encode_document(&mut out, d, tag.clone())?,
_ => {
return Err(ShellError::labeled_error(
format!("All top level values must be Documents, got {:?}", v),
"requires BSON-compatible document",
tag,
&tag,
))
}
}
@ -237,7 +238,7 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
let input: Vec<Tagged<Value>> = args.input.values.collect().await;
let to_process_input = if input.len() > 1 {
let tag = input[0].tag;
let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 {
input
@ -248,14 +249,14 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
for value in to_process_input {
match value_to_bson_value(&value) {
Ok(bson_value) => {
match bson_value_to_bytes(bson_value, name_tag) {
match bson_value_to_bytes(bson_value, name_tag.clone()) {
Ok(x) => yield ReturnSuccess::value(
Value::binary(x).tagged(name_tag),
Value::binary(x).tagged(&name_tag),
),
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with BSON-compatible structure.tag() from pipeline",
"requires BSON-compatible input",
name_tag,
&name_tag,
"originates from here".to_string(),
value.tag(),
)),
@ -264,7 +265,7 @@ fn to_bson(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
_ => yield Err(ShellError::labeled_error(
"Expected a table with BSON-compatible structure from pipeline",
"requires BSON-compatible input",
name_tag))
&name_tag))
}
}
};

View File

@ -16,7 +16,10 @@ impl WholeStreamCommand for ToCSV {
}
fn signature(&self) -> Signature {
Signature::build("to-csv").switch("headerless")
Signature::build("to-csv").switch(
"headerless",
"do not output the columns names as the first row",
)
}
fn usage(&self) -> &str {
@ -32,8 +35,8 @@ impl WholeStreamCommand for ToCSV {
}
}
pub fn value_to_csv_value(v: &Value) -> Value {
match v {
pub fn value_to_csv_value(v: &Tagged<Value>) -> Tagged<Value> {
match &v.item {
Value::Primitive(Primitive::String(s)) => Value::Primitive(Primitive::String(s.clone())),
Value::Primitive(Primitive::Nothing) => Value::Primitive(Primitive::Nothing),
Value::Primitive(Primitive::Boolean(b)) => Value::Primitive(Primitive::Boolean(b.clone())),
@ -47,10 +50,11 @@ pub fn value_to_csv_value(v: &Value) -> Value {
Value::Block(_) => Value::Primitive(Primitive::Nothing),
_ => Value::Primitive(Primitive::Nothing),
}
.tagged(v.tag.clone())
}
fn to_string_helper(v: &Value) -> Result<String, ShellError> {
match v {
fn to_string_helper(v: &Tagged<Value>) -> Result<String, ShellError> {
match &v.item {
Value::Primitive(Primitive::Date(d)) => Ok(d.to_string()),
Value::Primitive(Primitive::Bytes(b)) => Ok(format!("{}", b)),
Value::Primitive(Primitive::Boolean(_)) => Ok(v.as_string()?),
@ -60,7 +64,13 @@ fn to_string_helper(v: &Value) -> Result<String, ShellError> {
Value::Table(_) => return Ok(String::from("[Table]")),
Value::Row(_) => return Ok(String::from("[Row]")),
Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()),
_ => return Err(ShellError::string("Unexpected value")),
_ => {
return Err(ShellError::labeled_error(
"Unexpected value",
"",
v.tag.clone(),
))
}
}
}
@ -76,7 +86,9 @@ fn merge_descriptors(values: &[Tagged<Value>]) -> Vec<String> {
ret
}
pub fn to_string(v: &Value) -> Result<String, ShellError> {
pub fn to_string(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
let v = &tagged_value.item;
match v {
Value::Row(o) => {
let mut wtr = WriterBuilder::new().from_writer(vec![]);
@ -92,11 +104,20 @@ pub fn to_string(v: &Value) -> Result<String, ShellError> {
wtr.write_record(fields).expect("can not write.");
wtr.write_record(values).expect("can not write.");
return Ok(String::from_utf8(
wtr.into_inner()
.map_err(|_| ShellError::string("Could not convert record"))?,
)
.map_err(|_| ShellError::string("Could not convert record"))?);
return Ok(String::from_utf8(wtr.into_inner().map_err(|_| {
ShellError::labeled_error(
"Could not convert record",
"original value",
&tagged_value.tag,
)
})?)
.map_err(|_| {
ShellError::labeled_error(
"Could not convert record",
"original value",
&tagged_value.tag,
)
})?);
}
Value::Table(list) => {
let mut wtr = WriterBuilder::new().from_writer(vec![]);
@ -120,13 +141,22 @@ pub fn to_string(v: &Value) -> Result<String, ShellError> {
wtr.write_record(&row).expect("can not write");
}
return Ok(String::from_utf8(
wtr.into_inner()
.map_err(|_| ShellError::string("Could not convert record"))?,
)
.map_err(|_| ShellError::string("Could not convert record"))?);
return Ok(String::from_utf8(wtr.into_inner().map_err(|_| {
ShellError::labeled_error(
"Could not convert record",
"original value",
&tagged_value.tag,
)
})?)
.map_err(|_| {
ShellError::labeled_error(
"Could not convert record",
"original value",
&tagged_value.tag,
)
})?);
}
_ => return to_string_helper(&v),
_ => return to_string_helper(tagged_value),
}
}
@ -139,7 +169,7 @@ fn to_csv(
let input: Vec<Tagged<Value>> = input.values.collect().await;
let to_process_input = if input.len() > 1 {
let tag = input[0].tag;
let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 {
input
@ -148,20 +178,20 @@ fn to_csv(
};
for value in to_process_input {
match to_string(&value_to_csv_value(&value.item)) {
match to_string(&value_to_csv_value(&value)) {
Ok(x) => {
let converted = if headerless {
x.lines().skip(1).collect()
} else {
x
};
yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(name_tag))
yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(&name_tag))
}
_ => {
yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with CSV-compatible structure.tag() from pipeline",
"requires CSV-compatible input",
name_tag,
&name_tag,
"originates from here".to_string(),
value.tag(),
))

View File

@ -42,7 +42,7 @@ pub fn value_to_json_value(v: &Tagged<Value>) -> Result<serde_json::Value, Shell
.unwrap(),
),
Value::Primitive(Primitive::Int(i)) => serde_json::Value::Number(serde_json::Number::from(
CoerceInto::<i64>::coerce_into(i.tagged(v.tag), "converting to JSON number")?,
CoerceInto::<i64>::coerce_into(i.tagged(&v.tag), "converting to JSON number")?,
)),
Value::Primitive(Primitive::Nothing) => serde_json::Value::Null,
Value::Primitive(Primitive::Pattern(s)) => serde_json::Value::String(s.clone()),
@ -50,6 +50,7 @@ pub fn value_to_json_value(v: &Tagged<Value>) -> Result<serde_json::Value, Shell
Value::Primitive(Primitive::Path(s)) => serde_json::Value::String(s.display().to_string()),
Value::Table(l) => serde_json::Value::Array(json_list(l)?),
Value::Error(e) => return Err(e.clone()),
Value::Block(_) => serde_json::Value::Null,
Value::Primitive(Primitive::Binary(b)) => serde_json::Value::Array(
b.iter()
@ -85,7 +86,7 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
let input: Vec<Tagged<Value>> = args.input.values.collect().await;
let to_process_input = if input.len() > 1 {
let tag = input[0].tag;
let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 {
input
@ -98,12 +99,12 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
Ok(json_value) => {
match serde_json::to_string(&json_value) {
Ok(x) => yield ReturnSuccess::value(
Value::Primitive(Primitive::String(x)).tagged(name_tag),
Value::Primitive(Primitive::String(x)).tagged(&name_tag),
),
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with JSON-compatible structure.tag() from pipeline",
"requires JSON-compatible input",
name_tag,
&name_tag,
"originates from here".to_string(),
value.tag(),
)),
@ -112,7 +113,7 @@ fn to_json(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
_ => yield Err(ShellError::labeled_error(
"Expected a table with JSON-compatible structure from pipeline",
"requires JSON-compatible input",
name_tag))
&name_tag))
}
}
};

View File

@ -38,10 +38,10 @@ pub fn value_to_toml_value(v: &Tagged<Value>) -> Result<toml::Value, ShellError>
toml::Value::String("<Beginning of Stream>".to_string())
}
Value::Primitive(Primitive::Decimal(f)) => {
toml::Value::Float(f.tagged(v.tag).coerce_into("converting to TOML float")?)
toml::Value::Float(f.tagged(&v.tag).coerce_into("converting to TOML float")?)
}
Value::Primitive(Primitive::Int(i)) => {
toml::Value::Integer(i.tagged(v.tag).coerce_into("converting to TOML integer")?)
toml::Value::Integer(i.tagged(&v.tag).coerce_into("converting to TOML integer")?)
}
Value::Primitive(Primitive::Nothing) => toml::Value::String("<Nothing>".to_string()),
Value::Primitive(Primitive::Pattern(s)) => toml::Value::String(s.clone()),
@ -49,6 +49,7 @@ pub fn value_to_toml_value(v: &Tagged<Value>) -> Result<toml::Value, ShellError>
Value::Primitive(Primitive::Path(s)) => toml::Value::String(s.display().to_string()),
Value::Table(l) => toml::Value::Array(collect_values(l)?),
Value::Error(e) => return Err(e.clone()),
Value::Block(_) => toml::Value::String("<Block>".to_string()),
Value::Primitive(Primitive::Binary(b)) => {
toml::Value::Array(b.iter().map(|x| toml::Value::Integer(*x as i64)).collect())
@ -80,7 +81,7 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
let input: Vec<Tagged<Value>> = args.input.values.collect().await;
let to_process_input = if input.len() > 1 {
let tag = input[0].tag;
let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 {
input
@ -93,12 +94,12 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
Ok(toml_value) => {
match toml::to_string(&toml_value) {
Ok(x) => yield ReturnSuccess::value(
Value::Primitive(Primitive::String(x)).tagged(name_tag),
Value::Primitive(Primitive::String(x)).tagged(&name_tag),
),
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with TOML-compatible structure.tag() from pipeline",
"requires TOML-compatible input",
name_tag,
&name_tag,
"originates from here".to_string(),
value.tag(),
)),
@ -107,7 +108,7 @@ fn to_toml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
_ => yield Err(ShellError::labeled_error(
"Expected a table with TOML-compatible structure from pipeline",
"requires TOML-compatible input",
name_tag))
&name_tag))
}
}
};

View File

@ -16,7 +16,10 @@ impl WholeStreamCommand for ToTSV {
}
fn signature(&self) -> Signature {
Signature::build("to-tsv").switch("headerless")
Signature::build("to-tsv").switch(
"headerless",
"do not output the column names as the first row",
)
}
fn usage(&self) -> &str {
@ -32,7 +35,9 @@ impl WholeStreamCommand for ToTSV {
}
}
pub fn value_to_tsv_value(v: &Value) -> Value {
pub fn value_to_tsv_value(tagged_value: &Tagged<Value>) -> Tagged<Value> {
let v = &tagged_value.item;
match v {
Value::Primitive(Primitive::String(s)) => Value::Primitive(Primitive::String(s.clone())),
Value::Primitive(Primitive::Nothing) => Value::Primitive(Primitive::Nothing),
@ -47,20 +52,28 @@ pub fn value_to_tsv_value(v: &Value) -> Value {
Value::Block(_) => Value::Primitive(Primitive::Nothing),
_ => Value::Primitive(Primitive::Nothing),
}
.tagged(&tagged_value.tag)
}
fn to_string_helper(v: &Value) -> Result<String, ShellError> {
fn to_string_helper(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
let v = &tagged_value.item;
match v {
Value::Primitive(Primitive::Date(d)) => Ok(d.to_string()),
Value::Primitive(Primitive::Bytes(b)) => Ok(format!("{}", b)),
Value::Primitive(Primitive::Boolean(_)) => Ok(v.as_string()?),
Value::Primitive(Primitive::Decimal(_)) => Ok(v.as_string()?),
Value::Primitive(Primitive::Int(_)) => Ok(v.as_string()?),
Value::Primitive(Primitive::Path(_)) => Ok(v.as_string()?),
Value::Primitive(Primitive::Boolean(_)) => Ok(tagged_value.as_string()?),
Value::Primitive(Primitive::Decimal(_)) => Ok(tagged_value.as_string()?),
Value::Primitive(Primitive::Int(_)) => Ok(tagged_value.as_string()?),
Value::Primitive(Primitive::Path(_)) => Ok(tagged_value.as_string()?),
Value::Table(_) => return Ok(String::from("[table]")),
Value::Row(_) => return Ok(String::from("[row]")),
Value::Primitive(Primitive::String(s)) => return Ok(s.to_string()),
_ => return Err(ShellError::string("Unexpected value")),
_ => {
return Err(ShellError::labeled_error(
"Unexpected value",
"original value",
&tagged_value.tag,
))
}
}
}
@ -76,7 +89,9 @@ fn merge_descriptors(values: &[Tagged<Value>]) -> Vec<String> {
ret
}
pub fn to_string(v: &Value) -> Result<String, ShellError> {
pub fn to_string(tagged_value: &Tagged<Value>) -> Result<String, ShellError> {
let v = &tagged_value.item;
match v {
Value::Row(o) => {
let mut wtr = WriterBuilder::new().delimiter(b'\t').from_writer(vec![]);
@ -91,11 +106,20 @@ pub fn to_string(v: &Value) -> Result<String, ShellError> {
wtr.write_record(fields).expect("can not write.");
wtr.write_record(values).expect("can not write.");
return Ok(String::from_utf8(
wtr.into_inner()
.map_err(|_| ShellError::string("Could not convert record"))?,
)
.map_err(|_| ShellError::string("Could not convert record"))?);
return Ok(String::from_utf8(wtr.into_inner().map_err(|_| {
ShellError::labeled_error(
"Could not convert record",
"original value",
&tagged_value.tag,
)
})?)
.map_err(|_| {
ShellError::labeled_error(
"Could not convert record",
"original value",
&tagged_value.tag,
)
})?);
}
Value::Table(list) => {
let mut wtr = WriterBuilder::new().delimiter(b'\t').from_writer(vec![]);
@ -119,13 +143,22 @@ pub fn to_string(v: &Value) -> Result<String, ShellError> {
wtr.write_record(&row).expect("can not write");
}
return Ok(String::from_utf8(
wtr.into_inner()
.map_err(|_| ShellError::string("Could not convert record"))?,
)
.map_err(|_| ShellError::string("Could not convert record"))?);
return Ok(String::from_utf8(wtr.into_inner().map_err(|_| {
ShellError::labeled_error(
"Could not convert record",
"original value",
&tagged_value.tag,
)
})?)
.map_err(|_| {
ShellError::labeled_error(
"Could not convert record",
"original value",
&tagged_value.tag,
)
})?);
}
_ => return to_string_helper(&v),
_ => return to_string_helper(tagged_value),
}
}
@ -138,7 +171,7 @@ fn to_tsv(
let input: Vec<Tagged<Value>> = input.values.collect().await;
let to_process_input = if input.len() > 1 {
let tag = input[0].tag;
let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 {
input
@ -147,20 +180,20 @@ fn to_tsv(
};
for value in to_process_input {
match to_string(&value_to_tsv_value(&value.item)) {
match to_string(&value_to_tsv_value(&value)) {
Ok(x) => {
let converted = if headerless {
x.lines().skip(1).collect()
} else {
x
};
yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(name_tag))
yield ReturnSuccess::value(Value::Primitive(Primitive::String(converted)).tagged(&name_tag))
}
_ => {
yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with TSV-compatible structure.tag() from pipeline",
"requires TSV-compatible input",
name_tag,
&name_tag,
"originates from here".to_string(),
value.tag(),
))

View File

@ -47,7 +47,7 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
yield Err(ShellError::labeled_error_with_secondary(
"Expected table with string values",
"requires table with strings",
tag,
&tag,
"value originates from here",
v.tag,
))
@ -57,13 +57,13 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
match serde_urlencoded::to_string(row_vec) {
Ok(s) => {
yield ReturnSuccess::value(Value::string(s).tagged(tag));
yield ReturnSuccess::value(Value::string(s).tagged(&tag));
}
_ => {
yield Err(ShellError::labeled_error(
"Failed to convert to url-encoded",
"cannot url-encode",
tag,
&tag,
))
}
}
@ -72,7 +72,7 @@ fn to_url(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream,
yield Err(ShellError::labeled_error_with_secondary(
"Expected a table from pipeline",
"requires table input",
tag,
&tag,
"value originates from here",
value_tag,
))

View File

@ -39,7 +39,7 @@ pub fn value_to_yaml_value(v: &Tagged<Value>) -> Result<serde_yaml::Value, Shell
serde_yaml::Value::Number(serde_yaml::Number::from(f.to_f64().unwrap()))
}
Value::Primitive(Primitive::Int(i)) => serde_yaml::Value::Number(serde_yaml::Number::from(
CoerceInto::<i64>::coerce_into(i.tagged(v.tag), "converting to YAML number")?,
CoerceInto::<i64>::coerce_into(i.tagged(&v.tag), "converting to YAML number")?,
)),
Value::Primitive(Primitive::Nothing) => serde_yaml::Value::Null,
Value::Primitive(Primitive::Pattern(s)) => serde_yaml::Value::String(s.clone()),
@ -55,6 +55,7 @@ pub fn value_to_yaml_value(v: &Tagged<Value>) -> Result<serde_yaml::Value, Shell
serde_yaml::Value::Sequence(out)
}
Value::Error(e) => return Err(e.clone()),
Value::Block(_) => serde_yaml::Value::Null,
Value::Primitive(Primitive::Binary(b)) => serde_yaml::Value::Sequence(
b.iter()
@ -81,7 +82,7 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
let input: Vec<Tagged<Value>> = args.input.values.collect().await;
let to_process_input = if input.len() > 1 {
let tag = input[0].tag;
let tag = input[0].tag.clone();
vec![Tagged { item: Value::Table(input), tag } ]
} else if input.len() == 1 {
input
@ -94,12 +95,12 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
Ok(yaml_value) => {
match serde_yaml::to_string(&yaml_value) {
Ok(x) => yield ReturnSuccess::value(
Value::Primitive(Primitive::String(x)).tagged(name_tag),
Value::Primitive(Primitive::String(x)).tagged(&name_tag),
),
_ => yield Err(ShellError::labeled_error_with_secondary(
"Expected a table with YAML-compatible structure.tag() from pipeline",
"requires YAML-compatible input",
name_tag,
&name_tag,
"originates from here".to_string(),
value.tag(),
)),
@ -108,7 +109,7 @@ fn to_yaml(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream
_ => yield Err(ShellError::labeled_error(
"Expected a table with YAML-compatible structure from pipeline",
"requires YAML-compatible input",
name_tag))
&name_tag))
}
}
};

View File

@ -31,14 +31,14 @@ impl WholeStreamCommand for Version {
pub fn date(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
let args = args.evaluate_once(registry)?;
let tag = args.call_info.name_tag;
let tag = args.call_info.name_tag.clone();
let mut indexmap = IndexMap::new();
indexmap.insert(
"version".to_string(),
Value::string(clap::crate_version!()).tagged(tag),
Value::string(clap::crate_version!()).tagged(&tag),
);
let value = Value::Row(Dictionary::from(indexmap)).tagged(tag);
let value = Value::Row(Dictionary::from(indexmap)).tagged(&tag);
Ok(OutputStream::one(value))
}

View File

@ -12,7 +12,11 @@ impl PerItemCommand for Where {
}
fn signature(&self) -> registry::Signature {
Signature::build("where").required("condition", SyntaxShape::Block)
Signature::build("where").required(
"condition",
SyntaxShape::Block,
"the condition that must match",
)
}
fn usage(&self) -> &str {
@ -49,7 +53,7 @@ impl PerItemCommand for Where {
return Err(ShellError::labeled_error(
"Expected a condition",
"where needs a condition",
*tag,
tag,
))
}
};

View File

@ -13,7 +13,11 @@ impl WholeStreamCommand for Which {
}
fn signature(&self) -> Signature {
Signature::build("which").required("name", SyntaxShape::Any)
Signature::build("which").required(
"name",
SyntaxShape::Any,
"the name of the command to find the path to",
)
}
fn usage(&self) -> &str {
@ -33,7 +37,7 @@ pub fn which(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
let args = args.evaluate_once(registry)?;
let mut which_out = VecDeque::new();
let tag = args.call_info.name_tag;
let tag = args.call_info.name_tag.clone();
if let Some(v) = &args.call_info.args.positional {
if v.len() > 0 {
@ -52,7 +56,7 @@ pub fn which(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputStre
return Err(ShellError::labeled_error(
"Expected a filename to find",
"needs a filename",
*tag,
tag,
));
}
}

View File

@ -1,39 +1,20 @@
use crate::commands::{Command, UnevaluatedCallInfo};
use crate::parser::hir;
use crate::parser::{hir, hir::syntax_shape::ExpandContext};
use crate::prelude::*;
use derive_new::new;
use indexmap::IndexMap;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::error::Error;
use std::sync::Arc;
use uuid::Uuid;
use std::sync::atomic::AtomicBool;
use std::sync::{Arc, Mutex};
#[derive(Clone, Debug, Serialize, Deserialize)]
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum AnchorLocation {
Url(String),
File(String),
Source(Text),
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct SourceMap(HashMap<Uuid, AnchorLocation>);
impl SourceMap {
pub fn insert(&mut self, uuid: Uuid, anchor_location: AnchorLocation) {
self.0.insert(uuid, anchor_location);
}
pub fn get(&self, uuid: &Uuid) -> Option<&AnchorLocation> {
self.0.get(uuid)
}
pub fn new() -> SourceMap {
SourceMap(HashMap::new())
}
}
#[derive(Clone, new)]
pub struct CommandRegistry {
#[new(value = "Arc::new(Mutex::new(IndexMap::default()))")]
@ -53,13 +34,17 @@ impl CommandRegistry {
registry.get(name).map(|c| c.clone())
}
pub(crate) fn expect_command(&self, name: &str) -> Arc<Command> {
self.get_command(name).unwrap()
}
pub(crate) fn has(&self, name: &str) -> bool {
let registry = self.registry.lock().unwrap();
registry.contains_key(name)
}
fn insert(&mut self, name: impl Into<String>, command: Arc<Command>) {
pub(crate) fn insert(&mut self, name: impl Into<String>, command: Arc<Command>) {
let mut registry = self.registry.lock().unwrap();
registry.insert(name.into(), command);
}
@ -73,8 +58,8 @@ impl CommandRegistry {
#[derive(Clone)]
pub struct Context {
registry: CommandRegistry,
pub(crate) source_map: SourceMap,
host: Arc<Mutex<dyn Host + Send>>,
pub ctrl_c: Arc<AtomicBool>,
pub(crate) shell_manager: ShellManager,
}
@ -83,12 +68,19 @@ impl Context {
&self.registry
}
pub(crate) fn expand_context<'context>(
&'context self,
source: &'context Text,
) -> ExpandContext<'context> {
ExpandContext::new(&self.registry, source, self.shell_manager.homedir())
}
pub(crate) fn basic() -> Result<Context, Box<dyn Error>> {
let registry = CommandRegistry::new();
Ok(Context {
registry: registry.clone(),
source_map: SourceMap::new(),
host: Arc::new(Mutex::new(crate::env::host::BasicHost)),
ctrl_c: Arc::new(AtomicBool::new(false)),
shell_manager: ShellManager::basic(registry)?,
})
}
@ -105,43 +97,30 @@ impl Context {
}
}
pub fn add_anchor_location(&mut self, uuid: Uuid, anchor_location: AnchorLocation) {
self.source_map.insert(uuid, anchor_location);
pub(crate) fn get_command(&self, name: &str) -> Option<Arc<Command>> {
self.registry.get_command(name)
}
pub(crate) fn has_command(&self, name: &str) -> bool {
self.registry.has(name)
}
pub(crate) fn get_command(&self, name: &str) -> Arc<Command> {
self.registry.get_command(name).unwrap()
pub(crate) fn expect_command(&self, name: &str) -> Arc<Command> {
self.registry.expect_command(name)
}
pub(crate) fn run_command<'a>(
&mut self,
command: Arc<Command>,
name_tag: Tag,
source_map: SourceMap,
args: hir::Call,
source: &Text,
input: InputStream,
is_first_command: bool,
) -> OutputStream {
let command_args = self.command_args(args, input, source, source_map, name_tag);
command.run(command_args, self.registry(), is_first_command)
let command_args = self.command_args(args, input, source, name_tag);
command.run(command_args, self.registry())
}
fn call_info(
&self,
args: hir::Call,
source: &Text,
source_map: SourceMap,
name_tag: Tag,
) -> UnevaluatedCallInfo {
fn call_info(&self, args: hir::Call, source: &Text, name_tag: Tag) -> UnevaluatedCallInfo {
UnevaluatedCallInfo {
args,
source: source.clone(),
source_map,
name_tag,
}
}
@ -151,13 +130,13 @@ impl Context {
args: hir::Call,
input: InputStream,
source: &Text,
source_map: SourceMap,
name_tag: Tag,
) -> CommandArgs {
CommandArgs {
host: self.host.clone(),
ctrl_c: self.ctrl_c.clone(),
shell_manager: self.shell_manager.clone(),
call_info: self.call_info(args, source, source_map, name_tag),
call_info: self.call_info(args, source, name_tag),
input,
}
}

View File

@ -8,6 +8,8 @@ use crate::Text;
use chrono::{DateTime, Utc};
use chrono_humanize::Humanize;
use derive_new::new;
use indexmap::IndexMap;
use log::trace;
use serde::{Deserialize, Serialize};
use std::fmt;
use std::path::PathBuf;
@ -212,11 +214,19 @@ impl Block {
let scope = Scope::new(value.clone());
if self.expressions.len() == 0 {
return Ok(Value::nothing().tagged(self.tag));
return Ok(Value::nothing().tagged(&self.tag));
}
let mut last = None;
trace!(
"EXPRS = {:?}",
self.expressions
.iter()
.map(|e| format!("{}", e))
.collect::<Vec<_>>()
);
for expr in self.expressions.iter() {
last = Some(evaluate_baseline_expr(
&expr,
@ -236,6 +246,9 @@ pub enum Value {
Row(crate::data::Dictionary),
Table(Vec<Tagged<Value>>),
// Errors are a type of value too
Error(ShellError),
Block(Block),
}
@ -284,14 +297,15 @@ impl fmt::Debug for ValueDebug<'_> {
Value::Row(o) => o.debug(f),
Value::Table(l) => debug_list(l).fmt(f),
Value::Block(_) => write!(f, "[[block]]"),
Value::Error(_) => write!(f, "[[error]]"),
}
}
}
impl Tagged<Value> {
pub(crate) fn tagged_type_name(&self) -> Tagged<String> {
pub fn tagged_type_name(&self) -> Tagged<String> {
let name = self.type_name();
Tagged::from_item(name, self.tag())
name.tagged(self.tag())
}
}
@ -303,7 +317,7 @@ impl std::convert::TryFrom<&Tagged<Value>> for Block {
Value::Block(block) => Ok(block.clone()),
v => Err(ShellError::type_error(
"Block",
value.copy_tag(v.type_name()),
v.type_name().tagged(value.tag()),
)),
}
}
@ -315,11 +329,11 @@ impl std::convert::TryFrom<&Tagged<Value>> for i64 {
fn try_from(value: &Tagged<Value>) -> Result<i64, ShellError> {
match value.item() {
Value::Primitive(Primitive::Int(int)) => {
int.tagged(value.tag).coerce_into("converting to i64")
int.tagged(&value.tag).coerce_into("converting to i64")
}
v => Err(ShellError::type_error(
"Integer",
value.copy_tag(v.type_name()),
v.type_name().tagged(value.tag()),
)),
}
}
@ -333,7 +347,7 @@ impl std::convert::TryFrom<&Tagged<Value>> for String {
Value::Primitive(Primitive::String(s)) => Ok(s.clone()),
v => Err(ShellError::type_error(
"String",
value.copy_tag(v.type_name()),
v.type_name().tagged(value.tag()),
)),
}
}
@ -347,7 +361,7 @@ impl std::convert::TryFrom<&Tagged<Value>> for Vec<u8> {
Value::Primitive(Primitive::Binary(b)) => Ok(b.clone()),
v => Err(ShellError::type_error(
"Binary",
value.copy_tag(v.type_name()),
v.type_name().tagged(value.tag()),
)),
}
}
@ -361,7 +375,7 @@ impl<'a> std::convert::TryFrom<&'a Tagged<Value>> for &'a crate::data::Dictionar
Value::Row(d) => Ok(d),
v => Err(ShellError::type_error(
"Dictionary",
value.copy_tag(v.type_name()),
v.type_name().tagged(value.tag()),
)),
}
}
@ -383,7 +397,7 @@ impl std::convert::TryFrom<Option<&Tagged<Value>>> for Switch {
Value::Primitive(Primitive::Boolean(true)) => Ok(Switch::Present),
v => Err(ShellError::type_error(
"Boolean",
value.copy_tag(v.type_name()),
v.type_name().tagged(value.tag()),
)),
},
}
@ -394,23 +408,54 @@ impl Tagged<Value> {
pub(crate) fn debug(&self) -> ValueDebug<'_> {
ValueDebug { value: self }
}
}
impl Value {
pub(crate) fn type_name(&self) -> String {
match self {
Value::Primitive(p) => p.type_name(),
Value::Row(_) => format!("object"),
Value::Table(_) => format!("list"),
Value::Block(_) => format!("block"),
pub fn as_column_path(&self) -> Result<Tagged<Vec<Tagged<Value>>>, ShellError> {
match &self.item {
Value::Primitive(Primitive::String(s)) => {
Ok(vec![Value::string(s).tagged(&self.tag)].tagged(&self.tag))
}
Value::Table(table) => Ok(table.to_vec().tagged(&self.tag)),
other => Err(ShellError::type_error(
"column name",
other.type_name().tagged(&self.tag),
)),
}
}
pub fn as_string(&self) -> Result<String, ShellError> {
match &self.item {
Value::Primitive(Primitive::String(s)) => Ok(s.clone()),
Value::Primitive(Primitive::Boolean(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Decimal(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Int(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Path(x)) => Ok(format!("{}", x.display())),
Value::Primitive(Primitive::Date(x)) => Ok(format!("{}", x.to_rfc3339())),
// TODO: this should definitely be more general with better errors
other => Err(ShellError::labeled_error(
"Expected string",
other.type_name(),
&self.tag,
)),
}
}
}
impl Value {
pub fn type_name(&self) -> String {
match self {
Value::Primitive(p) => p.type_name(),
Value::Row(_) => format!("row"),
Value::Table(_) => format!("table"),
Value::Block(_) => format!("block"),
Value::Error(_) => format!("error"),
}
}
// TODO: This is basically a legacy construct, I think
pub fn data_descriptors(&self) -> Vec<String> {
match self {
Value::Primitive(_) => vec![],
Value::Row(o) => o
Value::Row(columns) => columns
.entries
.keys()
.into_iter()
@ -418,6 +463,14 @@ impl Value {
.collect(),
Value::Block(_) => vec![],
Value::Table(_) => vec![],
Value::Error(_) => vec![],
}
}
pub(crate) fn get_data_by_index(&self, idx: usize) -> Option<&Tagged<Value>> {
match self {
Value::Table(value_set) => value_set.get(idx),
_ => None,
}
}
@ -443,27 +496,88 @@ impl Value {
}
}
pub fn get_data_by_path(&self, tag: Tag, path: &str) -> Option<Tagged<&Value>> {
pub(crate) fn get_mut_data_by_key(&mut self, name: &str) -> Option<&mut Tagged<Value>> {
match self {
Value::Row(ref mut o) => o.get_mut_data_by_key(name),
Value::Table(ref mut l) => {
for item in l {
match item {
Tagged {
item: Value::Row(ref mut o),
..
} => match o.get_mut_data_by_key(name) {
Some(v) => return Some(v),
None => {}
},
_ => {}
}
}
None
}
_ => None,
}
}
pub fn get_data_by_column_path(
&self,
tag: Tag,
path: &Vec<Tagged<Value>>,
callback: Box<dyn FnOnce((Value, Tagged<Value>)) -> ShellError>,
) -> Result<Option<Tagged<&Value>>, ShellError> {
let mut column_path = vec![];
for value in path {
column_path.push(
Value::string(value.as_string().unwrap_or("".to_string())).tagged(&value.tag),
);
}
let path = column_path;
let mut current = self;
for p in path.split(".") {
match current.get_data_by_key(p) {
for p in path {
let value = p.as_string().unwrap_or("".to_string());
let value = match value.parse::<usize>() {
Ok(number) => match current {
Value::Table(_) => current.get_data_by_index(number),
Value::Row(_) => current.get_data_by_key(&value),
_ => None,
},
Err(_) => match self {
Value::Table(_) | Value::Row(_) => current.get_data_by_key(&value),
_ => None,
},
};
match value {
Some(v) => current = v,
None => return None,
None => return Err(callback((current.clone(), p.clone()))),
}
}
Some(Tagged::from_item(current, tag))
Ok(Some(current.tagged(tag)))
}
pub fn insert_data_at_path(
pub fn insert_data_at_column_path(
&self,
tag: Tag,
path: &str,
split_path: &Vec<Tagged<Value>>,
new_value: Value,
) -> Option<Tagged<Value>> {
let mut new_obj = self.clone();
let split_path = split_path
.into_iter()
.map(|p| match p {
Tagged {
item: Value::Primitive(Primitive::String(s)),
tag,
} => Ok(s.clone().tagged(tag)),
o => Err(o),
})
.filter_map(Result::ok)
.collect::<Vec<Tagged<String>>>();
let split_path: Vec<_> = path.split(".").collect();
let mut new_obj = self.clone();
if let Value::Row(ref mut o) = new_obj {
let mut current = o;
@ -472,25 +586,25 @@ impl Value {
// Special case for inserting at the top level
current
.entries
.insert(path.to_string(), Tagged::from_item(new_value, tag));
return Some(Tagged::from_item(new_obj, tag));
.insert(split_path[0].item.clone(), new_value.tagged(&tag));
return Some(new_obj.tagged(&tag));
}
for idx in 0..split_path.len() {
match current.entries.get_mut(split_path[idx]) {
match current.entries.get_mut(&split_path[idx].item) {
Some(next) => {
if idx == (split_path.len() - 2) {
match &mut next.item {
Value::Row(o) => {
o.entries.insert(
split_path[idx + 1].to_string(),
Tagged::from_item(new_value, tag),
new_value.tagged(&tag),
);
}
_ => {}
}
return Some(Tagged::from_item(new_obj, tag));
return Some(new_obj.tagged(&tag));
} else {
match next.item {
Value::Row(ref mut o) => {
@ -508,34 +622,39 @@ impl Value {
None
}
pub fn replace_data_at_path(
pub fn replace_data_at_column_path(
&self,
tag: Tag,
path: &str,
split_path: &Vec<Tagged<Value>>,
replaced_value: Value,
) -> Option<Tagged<Value>> {
let split_path = split_path
.into_iter()
.map(|p| match p {
Tagged {
item: Value::Primitive(Primitive::String(s)),
tag,
} => Ok(s.clone().tagged(tag)),
o => Err(o),
})
.filter_map(Result::ok)
.collect::<Vec<Tagged<String>>>();
let mut new_obj = self.clone();
let mut current = &mut new_obj;
let split_path: Vec<_> = path.split(".").collect();
if let Value::Row(ref mut o) = new_obj {
let mut current = o;
for idx in 0..split_path.len() {
match current.entries.get_mut(split_path[idx]) {
Some(next) => {
if idx == (split_path.len() - 1) {
*next = Tagged::from_item(replaced_value, tag);
return Some(Tagged::from_item(new_obj, tag));
} else {
match next.item {
Value::Row(ref mut o) => {
current = o;
}
_ => return None,
}
}
for idx in 0..split_path.len() {
match current.get_mut_data_by_key(&split_path[idx].item) {
Some(next) => {
if idx == (split_path.len() - 1) {
*next = replaced_value.tagged(&tag);
return Some(new_obj.tagged(&tag));
} else {
current = &mut next.item;
}
_ => return None,
}
None => {
return None;
}
}
}
@ -549,6 +668,7 @@ impl Value {
Value::Row(o) => o.get_data(desc),
Value::Block(_) => MaybeOwned::Owned(Value::nothing()),
Value::Table(_) => MaybeOwned::Owned(Value::nothing()),
Value::Error(_) => MaybeOwned::Owned(Value::nothing()),
}
}
@ -558,7 +678,7 @@ impl Value {
Value::Block(b) => itertools::join(
b.expressions
.iter()
.map(|e| e.source(&b.source).to_string()),
.map(|e| e.span.slice(&b.source).to_string()),
"; ",
),
Value::Row(_) => format!("[table: 1 row]"),
@ -567,6 +687,7 @@ impl Value {
l.len(),
if l.len() == 1 { "row" } else { "rows" }
),
Value::Error(_) => format!("[error]"),
}
}
@ -607,22 +728,6 @@ impl Value {
}
}
pub(crate) fn as_string(&self) -> Result<String, ShellError> {
match self {
Value::Primitive(Primitive::String(s)) => Ok(s.clone()),
Value::Primitive(Primitive::Boolean(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Decimal(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Int(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Bytes(x)) => Ok(format!("{}", x)),
Value::Primitive(Primitive::Path(x)) => Ok(format!("{}", x.display())),
// TODO: this should definitely be more general with better errors
other => Err(ShellError::string(format!(
"Expected string, got {:?}",
other
))),
}
}
pub(crate) fn is_true(&self) -> bool {
match self {
Value::Primitive(Primitive::Boolean(true)) => true,
@ -630,6 +735,15 @@ impl Value {
}
}
#[allow(unused)]
pub fn row(entries: IndexMap<String, Tagged<Value>>) -> Value {
Value::Row(entries.into())
}
pub fn table(list: &Vec<Tagged<Value>>) -> Value {
Value::Table(list.to_vec())
}
pub fn string(s: impl Into<String>) -> Value {
Value::Primitive(Primitive::String(s.into()))
}
@ -675,9 +789,14 @@ impl Value {
Value::Primitive(Primitive::Date(s.into()))
}
pub fn date_from_str(s: &str) -> Result<Value, ShellError> {
let date = DateTime::parse_from_rfc3339(s)
.map_err(|err| ShellError::string(&format!("Date parse error: {}", err)))?;
pub fn date_from_str(s: Tagged<&str>) -> Result<Value, ShellError> {
let date = DateTime::parse_from_rfc3339(s.item).map_err(|err| {
ShellError::labeled_error(
&format!("Date parse error: {}", err),
"original value",
s.tag,
)
})?;
let date = date.with_timezone(&chrono::offset::Utc);
@ -693,6 +812,7 @@ impl Tagged<Value> {
pub(crate) fn as_path(&self) -> Result<PathBuf, ShellError> {
match self.item() {
Value::Primitive(Primitive::Path(path)) => Ok(path.clone()),
Value::Primitive(Primitive::String(path_str)) => Ok(PathBuf::from(&path_str).clone()),
other => Err(ShellError::type_error(
"Path",
other.type_name().tagged(self.tag()),
@ -736,6 +856,7 @@ enum CompareValues {
Ints(BigInt, BigInt),
Decimals(BigDecimal, BigDecimal),
String(String, String),
Date(DateTime<Utc>, DateTime<Utc>),
}
impl CompareValues {
@ -744,6 +865,7 @@ impl CompareValues {
CompareValues::Ints(left, right) => left.cmp(right),
CompareValues::Decimals(left, right) => left.cmp(right),
CompareValues::String(left, right) => left.cmp(right),
CompareValues::Date(left, right) => right.cmp(left),
}
}
}
@ -780,6 +902,256 @@ fn coerce_compare_primitive(
CompareValues::Decimals(BigDecimal::from(*left), right.clone())
}
(String(left), String(right)) => CompareValues::String(left.clone(), right.clone()),
(Date(left), Date(right)) => CompareValues::Date(left.clone(), right.clone()),
_ => return Err((left.type_name(), right.type_name())),
})
}
#[cfg(test)]
mod tests {
use crate::data::meta::*;
use crate::ShellError;
use crate::Value;
use indexmap::IndexMap;
fn string(input: impl Into<String>) -> Tagged<Value> {
Value::string(input.into()).tagged_unknown()
}
fn number(n: i64) -> Tagged<Value> {
Value::number(n).tagged_unknown()
}
fn row(entries: IndexMap<String, Tagged<Value>>) -> Tagged<Value> {
Value::row(entries).tagged_unknown()
}
fn table(list: &Vec<Tagged<Value>>) -> Tagged<Value> {
Value::table(list).tagged_unknown()
}
fn error_callback() -> impl FnOnce((Value, Tagged<Value>)) -> ShellError {
move |(_obj_source, _column_path_tried)| ShellError::unimplemented("will never be called.")
}
fn column_path(paths: &Vec<Tagged<Value>>) -> Vec<Tagged<Value>> {
table(paths).as_column_path().unwrap().item
}
#[test]
fn gets_matching_field_from_a_row() {
let row = Value::row(indexmap! {
"amigos".into() => table(&vec![string("andres"),string("jonathan"),string("yehuda")])
});
assert_eq!(
*row.get_data_by_key("amigos").unwrap(),
table(&vec![
string("andres"),
string("jonathan"),
string("yehuda")
])
);
}
#[test]
fn gets_matching_field_from_nested_rows_inside_a_row() {
let field_path = column_path(&vec![string("package"), string("version")]);
let (version, tag) = string("0.4.0").into_parts();
let value = Value::row(indexmap! {
"package".into() =>
row(indexmap! {
"name".into() => string("nu"),
"version".into() => string("0.4.0")
})
});
assert_eq!(
**value
.get_data_by_column_path(tag, &field_path, Box::new(error_callback()))
.unwrap()
.unwrap(),
version
)
}
#[test]
fn column_path_that_contains_just_a_number_gets_a_row_from_a_table() {
let field_path = column_path(&vec![string("package"), string("authors"), number(0)]);
let (_, tag) = string("Andrés N. Robalino").into_parts();
let value = Value::row(indexmap! {
"package".into() => row(indexmap! {
"name".into() => string("nu"),
"version".into() => string("0.4.0"),
"authors".into() => table(&vec![
row(indexmap!{"name".into() => string("Andrés N. Robalino")}),
row(indexmap!{"name".into() => string("Jonathan Turner")}),
row(indexmap!{"name".into() => string("Yehuda Katz")})
])
})
});
assert_eq!(
**value
.get_data_by_column_path(tag, &field_path, Box::new(error_callback()))
.unwrap()
.unwrap(),
Value::row(indexmap! {
"name".into() => string("Andrés N. Robalino")
})
);
}
#[test]
fn column_path_that_contains_just_a_number_gets_a_row_from_a_row() {
let field_path = column_path(&vec![string("package"), string("authors"), string("0")]);
let (_, tag) = string("Andrés N. Robalino").into_parts();
let value = Value::row(indexmap! {
"package".into() => row(indexmap! {
"name".into() => string("nu"),
"version".into() => string("0.4.0"),
"authors".into() => row(indexmap! {
"0".into() => row(indexmap!{"name".into() => string("Andrés N. Robalino")}),
"1".into() => row(indexmap!{"name".into() => string("Jonathan Turner")}),
"2".into() => row(indexmap!{"name".into() => string("Yehuda Katz")}),
})
})
});
assert_eq!(
**value
.get_data_by_column_path(tag, &field_path, Box::new(error_callback()))
.unwrap()
.unwrap(),
Value::row(indexmap! {
"name".into() => string("Andrés N. Robalino")
})
);
}
#[test]
fn replaces_matching_field_from_a_row() {
let field_path = column_path(&vec![string("amigos")]);
let sample = Value::row(indexmap! {
"amigos".into() => table(&vec![
string("andres"),
string("jonathan"),
string("yehuda"),
]),
});
let (replacement, tag) = string("jonas").into_parts();
let actual = sample
.replace_data_at_column_path(tag, &field_path, replacement)
.unwrap();
assert_eq!(actual, row(indexmap! {"amigos".into() => string("jonas")}));
}
#[test]
fn replaces_matching_field_from_nested_rows_inside_a_row() {
let field_path = column_path(&vec![
string("package"),
string("authors"),
string("los.3.caballeros"),
]);
let sample = Value::row(indexmap! {
"package".into() => row(indexmap! {
"authors".into() => row(indexmap! {
"los.3.mosqueteros".into() => table(&vec![string("andres::yehuda::jonathan")]),
"los.3.amigos".into() => table(&vec![string("andres::yehuda::jonathan")]),
"los.3.caballeros".into() => table(&vec![string("andres::yehuda::jonathan")])
})
})
});
let (replacement, tag) = table(&vec![string("yehuda::jonathan::andres")]).into_parts();
let actual = sample
.replace_data_at_column_path(tag.clone(), &field_path, replacement.clone())
.unwrap();
assert_eq!(
actual,
Value::row(indexmap! {
"package".into() => row(indexmap! {
"authors".into() => row(indexmap! {
"los.3.mosqueteros".into() => table(&vec![string("andres::yehuda::jonathan")]),
"los.3.amigos".into() => table(&vec![string("andres::yehuda::jonathan")]),
"los.3.caballeros".into() => replacement.tagged(&tag)})})})
.tagged(tag)
);
}
#[test]
fn replaces_matching_field_from_rows_inside_a_table() {
let field_path = column_path(&vec![
string("shell_policy"),
string("releases"),
string("nu.version.arepa"),
]);
let sample = Value::row(indexmap! {
"shell_policy".into() => row(indexmap! {
"releases".into() => table(&vec![
row(indexmap! {
"nu.version.arepa".into() => row(indexmap! {
"code".into() => string("0.4.0"), "tag_line".into() => string("GitHub-era")
})
}),
row(indexmap! {
"nu.version.taco".into() => row(indexmap! {
"code".into() => string("0.3.0"), "tag_line".into() => string("GitHub-era")
})
}),
row(indexmap! {
"nu.version.stable".into() => row(indexmap! {
"code".into() => string("0.2.0"), "tag_line".into() => string("GitHub-era")
})
})
])
})
});
let (replacement, tag) = row(indexmap! {
"code".into() => string("0.5.0"),
"tag_line".into() => string("CABALLEROS")
})
.into_parts();
let actual = sample
.replace_data_at_column_path(tag.clone(), &field_path, replacement.clone())
.unwrap();
assert_eq!(
actual,
Value::row(indexmap! {
"shell_policy".into() => row(indexmap! {
"releases".into() => table(&vec![
row(indexmap! {
"nu.version.arepa".into() => replacement.tagged(&tag)
}),
row(indexmap! {
"nu.version.taco".into() => row(indexmap! {
"code".into() => string("0.3.0"), "tag_line".into() => string("GitHub-era")
})
}),
row(indexmap! {
"nu.version.stable".into() => row(indexmap! {
"code".into() => string("0.2.0"), "tag_line".into() => string("GitHub-era")
})
})
])
})
}).tagged(&tag)
);
}
}

View File

@ -7,7 +7,7 @@ use std::ops::Deref;
pub(crate) fn command_dict(command: Arc<Command>, tag: impl Into<Tag>) -> Tagged<Value> {
let tag = tag.into();
let mut cmd_dict = TaggedDictBuilder::new(tag);
let mut cmd_dict = TaggedDictBuilder::new(&tag);
cmd_dict.insert("name", Value::string(command.name()));
@ -42,27 +42,27 @@ fn for_spec(name: &str, ty: &str, required: bool, tag: impl Into<Tag>) -> Tagged
fn signature_dict(signature: Signature, tag: impl Into<Tag>) -> Tagged<Value> {
let tag = tag.into();
let mut sig = TaggedListBuilder::new(tag);
let mut sig = TaggedListBuilder::new(&tag);
for arg in signature.positional.iter() {
let is_required = match arg {
let is_required = match arg.0 {
PositionalType::Mandatory(_, _) => true,
PositionalType::Optional(_, _) => false,
};
sig.insert_tagged(for_spec(arg.name(), "argument", is_required, tag));
sig.insert_tagged(for_spec(arg.0.name(), "argument", is_required, &tag));
}
if let Some(_) = signature.rest_positional {
let is_required = false;
sig.insert_tagged(for_spec("rest", "argument", is_required, tag));
sig.insert_tagged(for_spec("rest", "argument", is_required, &tag));
}
for (name, ty) in signature.named.iter() {
match ty {
NamedType::Mandatory(_) => sig.insert_tagged(for_spec(name, "flag", true, tag)),
NamedType::Optional(_) => sig.insert_tagged(for_spec(name, "flag", false, tag)),
NamedType::Switch => sig.insert_tagged(for_spec(name, "switch", false, tag)),
match ty.0 {
NamedType::Mandatory(_) => sig.insert_tagged(for_spec(name, "flag", true, &tag)),
NamedType::Optional(_) => sig.insert_tagged(for_spec(name, "flag", false, &tag)),
NamedType::Switch => sig.insert_tagged(for_spec(name, "switch", false, &tag)),
}
}

View File

@ -51,8 +51,9 @@ pub fn user_data() -> Result<PathBuf, ShellError> {
}
pub fn app_path(app_data_type: AppDataType, display: &str) -> Result<PathBuf, ShellError> {
let path = app_root(app_data_type, &APP_INFO)
.map_err(|err| ShellError::string(&format!("Couldn't open {} path:\n{}", display, err)))?;
let path = app_root(app_data_type, &APP_INFO).map_err(|err| {
ShellError::untagged_runtime_error(&format!("Couldn't open {} path:\n{}", display, err))
})?;
Ok(path)
}
@ -74,11 +75,22 @@ pub fn read(
let tag = tag.into();
let contents = fs::read_to_string(filename)
.map(|v| v.tagged(tag))
.map_err(|err| ShellError::string(&format!("Couldn't read config file:\n{}", err)))?;
.map(|v| v.tagged(&tag))
.map_err(|err| {
ShellError::labeled_error(
&format!("Couldn't read config file:\n{}", err),
"file name",
&tag,
)
})?;
let parsed: toml::Value = toml::from_str(&contents)
.map_err(|err| ShellError::string(&format!("Couldn't parse config file:\n{}", err)))?;
let parsed: toml::Value = toml::from_str(&contents).map_err(|err| {
ShellError::labeled_error(
&format!("Couldn't parse config file:\n{}", err),
"file name",
&tag,
)
})?;
let value = convert_toml_value_to_nu_value(&parsed, tag);
let tag = value.tag();
@ -86,7 +98,7 @@ pub fn read(
Value::Row(Dictionary { entries }) => Ok(entries),
other => Err(ShellError::type_error(
"Dictionary",
other.type_name().tagged(tag),
other.type_name().tagged(&tag),
)),
}
}

View File

@ -89,6 +89,17 @@ impl Dictionary {
}
}
pub(crate) fn get_mut_data_by_key(&mut self, name: &str) -> Option<&mut Tagged<Value>> {
match self
.entries
.iter_mut()
.find(|(desc_name, _)| *desc_name == name)
{
Some((_, v)) => Some(v),
None => None,
}
}
pub(crate) fn debug(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut debug = f.debug_struct("Dictionary");
@ -103,7 +114,7 @@ impl Dictionary {
#[derive(Debug)]
pub struct TaggedListBuilder {
tag: Tag,
list: Vec<Tagged<Value>>,
pub list: Vec<Tagged<Value>>,
}
impl TaggedListBuilder {
@ -115,7 +126,7 @@ impl TaggedListBuilder {
}
pub fn push(&mut self, value: impl Into<Value>) {
self.list.push(value.into().tagged(self.tag));
self.list.push(value.into().tagged(&self.tag));
}
pub fn insert_tagged(&mut self, value: impl Into<Tagged<Value>>) {
@ -155,7 +166,7 @@ impl TaggedDictBuilder {
}
pub fn insert(&mut self, key: impl Into<String>, value: impl Into<Value>) {
self.dict.insert(key.into(), value.into().tagged(self.tag));
self.dict.insert(key.into(), value.into().tagged(&self.tag));
}
pub fn insert_tagged(&mut self, key: impl Into<String>, value: impl Into<Tagged<Value>>) {

View File

@ -1,14 +1,53 @@
use crate::context::{AnchorLocation, SourceMap};
use crate::context::AnchorLocation;
use crate::parser::parse::parser::TracableContext;
use crate::prelude::*;
use crate::Text;
use derive_new::new;
use getset::Getters;
use serde::Deserialize;
use serde::Serialize;
use std::fmt;
use std::path::{Path, PathBuf};
use uuid::Uuid;
#[derive(new, Debug, Clone, Copy, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
pub struct Spanned<T> {
pub span: Span,
pub item: T,
}
impl<T> Spanned<T> {
pub fn map<U>(self, input: impl FnOnce(T) -> U) -> Spanned<U> {
let span = self.span;
let mapped = input(self.item);
mapped.spanned(span)
}
}
pub trait SpannedItem: Sized {
fn spanned(self, span: impl Into<Span>) -> Spanned<Self> {
Spanned {
item: self,
span: span.into(),
}
}
fn spanned_unknown(self) -> Spanned<Self> {
Spanned {
item: self,
span: Span::unknown(),
}
}
}
impl<T> SpannedItem for T {}
impl<T> std::ops::Deref for Spanned<T> {
type Target = T;
fn deref(&self) -> &T {
&self.item
}
}
#[derive(new, Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
pub struct Tagged<T> {
pub tag: Tag,
pub item: T,
@ -16,7 +55,7 @@ pub struct Tagged<T> {
impl<T> HasTag for Tagged<T> {
fn tag(&self) -> Tag {
self.tag
self.tag.clone()
}
}
@ -28,20 +67,23 @@ impl AsRef<Path> for Tagged<PathBuf> {
pub trait TaggedItem: Sized {
fn tagged(self, tag: impl Into<Tag>) -> Tagged<Self> {
Tagged::from_item(self, tag.into())
Tagged {
item: self,
tag: tag.into(),
}
}
// For now, this is a temporary facility. In many cases, there are other useful spans that we
// could be using, such as the original source spans of JSON or Toml files, but we don't yet
// have the infrastructure to make that work.
fn tagged_unknown(self) -> Tagged<Self> {
Tagged::from_item(
self,
Tag {
Tagged {
item: self,
tag: Tag {
span: Span::unknown(),
anchor: uuid::Uuid::nil(),
anchor: None,
},
)
}
}
}
@ -56,48 +98,40 @@ impl<T> std::ops::Deref for Tagged<T> {
}
impl<T> Tagged<T> {
pub fn with_tag(self, tag: impl Into<Tag>) -> Tagged<T> {
Tagged::from_item(self.item, tag)
}
pub fn from_item(item: T, tag: impl Into<Tag>) -> Tagged<T> {
Tagged {
item,
tag: tag.into(),
}
}
pub fn map<U>(self, input: impl FnOnce(T) -> U) -> Tagged<U> {
let tag = self.tag();
let mapped = input(self.item);
Tagged::from_item(mapped, tag)
mapped.tagged(tag)
}
pub(crate) fn copy_tag<U>(&self, output: U) -> Tagged<U> {
Tagged::from_item(output, self.tag())
}
pub fn map_anchored(self, anchor: &Option<AnchorLocation>) -> Tagged<T> {
let mut tag = self.tag;
pub fn source(&self, source: &Text) -> Text {
Text::from(self.tag().slice(source))
tag.anchor = anchor.clone();
Tagged {
item: self.item,
tag: tag,
}
}
pub fn tag(&self) -> Tag {
self.tag
self.tag.clone()
}
pub fn span(&self) -> Span {
self.tag.span
}
pub fn anchor(&self) -> uuid::Uuid {
self.tag.anchor
pub fn anchor(&self) -> Option<AnchorLocation> {
self.tag.anchor.clone()
}
pub fn anchor_name(&self, source_map: &SourceMap) -> Option<String> {
match source_map.get(&self.tag.anchor) {
Some(AnchorLocation::File(file)) => Some(file.clone()),
Some(AnchorLocation::Url(url)) => Some(url.clone()),
pub fn anchor_name(&self) -> Option<String> {
match self.tag.anchor {
Some(AnchorLocation::File(ref file)) => Some(file.clone()),
Some(AnchorLocation::Url(ref url)) => Some(url.clone()),
_ => None,
}
}
@ -113,29 +147,32 @@ impl<T> Tagged<T> {
impl From<&Tag> for Tag {
fn from(input: &Tag) -> Tag {
*input
input.clone()
}
}
impl From<nom_locate::LocatedSpanEx<&str, Uuid>> for Span {
fn from(input: nom_locate::LocatedSpanEx<&str, Uuid>) -> Span {
Span {
start: input.offset,
end: input.offset + input.fragment.len(),
}
impl From<nom_locate::LocatedSpanEx<&str, TracableContext>> for Span {
fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Span {
Span::new(input.offset, input.offset + input.fragment.len())
}
}
impl From<nom_locate::LocatedSpanEx<&str, u64>> for Span {
fn from(input: nom_locate::LocatedSpanEx<&str, u64>) -> Span {
Span::new(input.offset, input.offset + input.fragment.len())
}
}
impl<T>
From<(
nom_locate::LocatedSpanEx<T, Uuid>,
nom_locate::LocatedSpanEx<T, Uuid>,
nom_locate::LocatedSpanEx<T, u64>,
nom_locate::LocatedSpanEx<T, u64>,
)> for Span
{
fn from(
input: (
nom_locate::LocatedSpanEx<T, Uuid>,
nom_locate::LocatedSpanEx<T, Uuid>,
nom_locate::LocatedSpanEx<T, u64>,
nom_locate::LocatedSpanEx<T, u64>,
),
) -> Span {
Span {
@ -147,10 +184,7 @@ impl<T>
impl From<(usize, usize)> for Span {
fn from(input: (usize, usize)) -> Span {
Span {
start: input.0,
end: input.1,
}
Span::new(input.0, input.1)
}
}
@ -164,61 +198,60 @@ impl From<&std::ops::Range<usize>> for Span {
}
#[derive(
Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters,
Debug, Clone, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash, Getters, new,
)]
pub struct Tag {
pub anchor: Uuid,
pub anchor: Option<AnchorLocation>,
pub span: Span,
}
impl From<Span> for Tag {
fn from(span: Span) -> Self {
Tag {
anchor: uuid::Uuid::nil(),
span,
}
Tag { anchor: None, span }
}
}
impl From<&Span> for Tag {
fn from(span: &Span) -> Self {
Tag {
anchor: uuid::Uuid::nil(),
anchor: None,
span: *span,
}
}
}
impl From<(usize, usize, Uuid)> for Tag {
fn from((start, end, anchor): (usize, usize, Uuid)) -> Self {
impl From<(usize, usize, TracableContext)> for Tag {
fn from((start, end, _context): (usize, usize, TracableContext)) -> Self {
Tag {
anchor: None,
span: Span::new(start, end),
}
}
}
impl From<(usize, usize, AnchorLocation)> for Tag {
fn from((start, end, anchor): (usize, usize, AnchorLocation)) -> Self {
Tag {
anchor: Some(anchor),
span: Span::new(start, end),
}
}
}
impl From<(usize, usize, Option<AnchorLocation>)> for Tag {
fn from((start, end, anchor): (usize, usize, Option<AnchorLocation>)) -> Self {
Tag {
anchor,
span: Span { start, end },
span: Span::new(start, end),
}
}
}
impl From<(usize, usize, Option<Uuid>)> for Tag {
fn from((start, end, anchor): (usize, usize, Option<Uuid>)) -> Self {
impl From<nom_locate::LocatedSpanEx<&str, TracableContext>> for Tag {
fn from(input: nom_locate::LocatedSpanEx<&str, TracableContext>) -> Tag {
Tag {
anchor: if let Some(uuid) = anchor {
uuid
} else {
uuid::Uuid::nil()
},
span: Span { start, end },
}
}
}
impl From<nom_locate::LocatedSpanEx<&str, Uuid>> for Tag {
fn from(input: nom_locate::LocatedSpanEx<&str, Uuid>) -> Tag {
Tag {
anchor: input.extra,
span: Span {
start: input.offset,
end: input.offset + input.fragment.len(),
},
anchor: None,
span: Span::new(input.offset, input.offset + input.fragment.len()),
}
}
}
@ -237,22 +270,29 @@ impl From<&Tag> for Span {
impl Tag {
pub fn unknown_anchor(span: Span) -> Tag {
Tag { anchor: None, span }
}
pub fn for_char(pos: usize, anchor: AnchorLocation) -> Tag {
Tag {
anchor: uuid::Uuid::nil(),
span,
anchor: Some(anchor),
span: Span {
start: pos,
end: pos + 1,
},
}
}
pub fn unknown_span(anchor: Uuid) -> Tag {
pub fn unknown_span(anchor: AnchorLocation) -> Tag {
Tag {
anchor,
anchor: Some(anchor),
span: Span::unknown(),
}
}
pub fn unknown() -> Tag {
Tag {
anchor: uuid::Uuid::nil(),
anchor: None,
span: Span::unknown(),
}
}
@ -265,29 +305,73 @@ impl Tag {
);
Tag {
span: Span {
start: self.span.start,
end: other.span.end,
},
anchor: self.anchor,
span: Span::new(self.span.start, other.span.end),
anchor: self.anchor.clone(),
}
}
pub fn until_option(&self, other: Option<impl Into<Tag>>) -> Tag {
match other {
Some(other) => {
let other = other.into();
debug_assert!(
self.anchor == other.anchor,
"Can only merge two tags with the same anchor"
);
Tag {
span: Span::new(self.span.start, other.span.end),
anchor: self.anchor.clone(),
}
}
None => self.clone(),
}
}
pub fn slice<'a>(&self, source: &'a str) -> &'a str {
self.span.slice(source)
}
pub fn string<'a>(&self, source: &'a str) -> String {
self.span.slice(source).to_string()
}
pub fn tagged_slice<'a>(&self, source: &'a str) -> Tagged<&'a str> {
self.span.slice(source).tagged(self)
}
pub fn tagged_string<'a>(&self, source: &'a str) -> Tagged<String> {
self.span.slice(source).to_string().tagged(self)
}
}
#[allow(unused)]
pub fn tag_for_tagged_list(mut iter: impl Iterator<Item = Tag>) -> Tag {
let first = iter.next();
let first = match first {
None => return Tag::unknown(),
Some(first) => first,
};
let last = iter.last();
match last {
None => first,
Some(last) => first.until(last),
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Ord, PartialOrd, Serialize, Deserialize, Hash)]
pub struct Span {
pub(crate) start: usize,
pub(crate) end: usize,
start: usize,
end: usize,
}
impl From<Option<Span>> for Span {
fn from(input: Option<Span>) -> Span {
match input {
None => Span { start: 0, end: 0 },
None => Span::new(0, 0),
Some(span) => span,
}
}
@ -295,18 +379,63 @@ impl From<Option<Span>> for Span {
impl Span {
pub fn unknown() -> Span {
Span { start: 0, end: 0 }
Span::new(0, 0)
}
/*
pub fn unknown_with_uuid(uuid: Uuid) -> Span {
pub fn new(start: usize, end: usize) -> Span {
assert!(
end >= start,
"Can't create a Span whose end < start, start={}, end={}",
start,
end
);
Span { start, end }
}
pub fn for_char(pos: usize) -> Span {
Span {
start: 0,
end: 0,
source: Some(uuid),
start: pos,
end: pos + 1,
}
}
*/
pub fn until(&self, other: impl Into<Span>) -> Span {
let other = other.into();
Span::new(self.start, other.end)
}
pub fn until_option(&self, other: Option<impl Into<Span>>) -> Span {
match other {
Some(other) => {
let other = other.into();
Span::new(self.start, other.end)
}
None => *self,
}
}
pub fn string<'a>(&self, source: &'a str) -> String {
self.slice(source).to_string()
}
pub fn spanned_slice<'a>(&self, source: &'a str) -> Spanned<&'a str> {
self.slice(source).spanned(*self)
}
pub fn spanned_string<'a>(&self, source: &'a str) -> Spanned<String> {
self.slice(source).to_string().spanned(*self)
}
pub fn start(&self) -> usize {
self.start
}
pub fn end(&self) -> usize {
self.end
}
pub fn is_unknown(&self) -> bool {
self.start == 0 && self.end == 0
@ -319,17 +448,11 @@ impl Span {
impl language_reporting::ReportingSpan for Span {
fn with_start(&self, start: usize) -> Self {
Span {
start,
end: self.end,
}
Span::new(start, self.end)
}
fn with_end(&self, end: usize) -> Self {
Span {
start: self.start,
end,
}
Span::new(self.start, end)
}
fn start(&self) -> usize {
@ -341,32 +464,133 @@ impl language_reporting::ReportingSpan for Span {
}
}
impl language_reporting::ReportingSpan for Tag {
fn with_start(&self, start: usize) -> Self {
Tag {
span: Span {
start,
end: self.span.end,
},
anchor: self.anchor,
}
}
pub trait HasSpan: ToDebug {
fn span(&self) -> Span;
}
fn with_end(&self, end: usize) -> Self {
Tag {
span: Span {
start: self.span.start,
end,
},
anchor: self.anchor,
}
}
pub trait HasFallibleSpan: ToDebug {
fn maybe_span(&self) -> Option<Span>;
}
fn start(&self) -> usize {
self.span.start
}
fn end(&self) -> usize {
self.span.end
impl<T: HasSpan> HasFallibleSpan for T {
fn maybe_span(&self) -> Option<Span> {
Some(HasSpan::span(self))
}
}
impl<T> HasSpan for Spanned<T>
where
Spanned<T>: ToDebug,
{
fn span(&self) -> Span {
self.span
}
}
impl HasFallibleSpan for Option<Span> {
fn maybe_span(&self) -> Option<Span> {
*self
}
}
impl FormatDebug for Option<Span> {
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
match self {
Option::None => write!(f, "no span"),
Option::Some(span) => FormatDebug::fmt_debug(span, f, source),
}
}
}
impl FormatDebug for Span {
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
write!(f, "{:?}", self.slice(source))
}
}
impl HasSpan for Span {
fn span(&self) -> Span {
*self
}
}
impl<T> FormatDebug for Option<Spanned<T>>
where
Spanned<T>: ToDebug,
{
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
match self {
Option::None => write!(f, "nothing"),
Option::Some(spanned) => FormatDebug::fmt_debug(spanned, f, source),
}
}
}
impl<T> HasFallibleSpan for Option<Spanned<T>>
where
Spanned<T>: ToDebug,
{
fn maybe_span(&self) -> Option<Span> {
match self {
None => None,
Some(value) => Some(value.span),
}
}
}
impl<T> FormatDebug for Option<Tagged<T>>
where
Tagged<T>: ToDebug,
{
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
match self {
Option::None => write!(f, "nothing"),
Option::Some(item) => FormatDebug::fmt_debug(item, f, source),
}
}
}
impl<T> HasFallibleSpan for Option<Tagged<T>>
where
Tagged<T>: ToDebug,
{
fn maybe_span(&self) -> Option<Span> {
match self {
None => None,
Some(value) => Some(value.tag.span),
}
}
}
impl<T> HasSpan for Tagged<T>
where
Tagged<T>: ToDebug,
{
fn span(&self) -> Span {
self.tag.span
}
}
impl<T: ToDebug> FormatDebug for Vec<T> {
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
write!(f, "[ ")?;
write!(
f,
"{}",
self.iter().map(|item| item.debug(source)).join(" ")
)?;
write!(f, " ]")
}
}
impl FormatDebug for String {
fn fmt_debug(&self, f: &mut DebugFormatter, _source: &str) -> fmt::Result {
write!(f, "{}", self)
}
}
impl FormatDebug for Spanned<String> {
fn fmt_debug(&self, f: &mut DebugFormatter, _source: &str) -> fmt::Result {
write!(f, "{}", self.item)
}
}

View File

@ -54,7 +54,7 @@ impl ExtractType for i64 {
&Tagged {
item: Value::Primitive(Primitive::Int(int)),
..
} => Ok(int.tagged(value.tag).coerce_into("converting to i64")?),
} => Ok(int.tagged(&value.tag).coerce_into("converting to i64")?),
other => Err(ShellError::type_error("Integer", other.tagged_type_name())),
}
}
@ -68,7 +68,7 @@ impl ExtractType for u64 {
&Tagged {
item: Value::Primitive(Primitive::Int(int)),
..
} => Ok(int.tagged(value.tag).coerce_into("converting to u64")?),
} => Ok(int.tagged(&value.tag).coerce_into("converting to u64")?),
other => Err(ShellError::type_error("Integer", other.tagged_type_name())),
}
}

View File

@ -1,5 +1,6 @@
use crate::prelude::*;
use crate::parser::parse::parser::TracableContext;
use ansi_term::Color;
use derive_new::new;
use language_reporting::{Diagnostic, Label, Severity};
@ -13,12 +14,96 @@ pub enum Description {
}
impl Description {
fn into_label(self) -> Result<Label<Tag>, String> {
fn into_label(self) -> Result<Label<Span>, String> {
match self {
Description::Source(s) => Ok(Label::new_primary(s.tag()).with_message(s.item)),
Description::Source(s) => Ok(Label::new_primary(s.span()).with_message(s.item)),
Description::Synthetic(s) => Err(s),
}
}
#[allow(unused)]
fn tag(&self) -> Tag {
match self {
Description::Source(tagged) => tagged.tag.clone(),
Description::Synthetic(_) => Tag::unknown(),
}
}
}
#[derive(Debug, Clone)]
pub enum ParseErrorReason {
Eof {
expected: &'static str,
},
Mismatch {
expected: &'static str,
actual: Tagged<String>,
},
ArgumentError {
command: String,
error: ArgumentError,
tag: Tag,
},
}
#[derive(Debug, Clone)]
pub struct ParseError {
reason: ParseErrorReason,
tag: Tag,
}
impl ParseError {
pub fn unexpected_eof(expected: &'static str, span: Span) -> ParseError {
ParseError {
reason: ParseErrorReason::Eof { expected },
tag: span.into(),
}
}
pub fn mismatch(expected: &'static str, actual: Tagged<impl Into<String>>) -> ParseError {
let Tagged { tag, item } = actual;
ParseError {
reason: ParseErrorReason::Mismatch {
expected,
actual: item.into().tagged(tag.clone()),
},
tag,
}
}
pub fn argument_error(
command: impl Into<String>,
kind: ArgumentError,
tag: impl Into<Tag>,
) -> ParseError {
let tag = tag.into();
ParseError {
reason: ParseErrorReason::ArgumentError {
command: command.into(),
error: kind,
tag: tag.clone(),
},
tag: tag.clone(),
}
}
}
impl From<ParseError> for ShellError {
fn from(error: ParseError) -> ShellError {
match error.reason {
ParseErrorReason::Eof { expected } => ShellError::unexpected_eof(expected, error.tag),
ParseErrorReason::Mismatch { actual, expected } => {
ShellError::type_error(expected, actual.clone())
}
ParseErrorReason::ArgumentError {
command,
error,
tag,
} => ShellError::argument_error(command, error, tag),
}
}
}
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)]
@ -35,8 +120,15 @@ pub struct ShellError {
cause: Option<Box<ProximateShellError>>,
}
impl ToDebug for ShellError {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
impl ShellError {
#[allow(unused)]
pub(crate) fn tag(&self) -> Option<Tag> {
self.error.tag()
}
}
impl FormatDebug for ShellError {
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
self.error.fmt_debug(f, source)
}
}
@ -46,12 +138,12 @@ impl serde::de::Error for ShellError {
where
T: std::fmt::Display,
{
ShellError::string(msg.to_string())
ShellError::untagged_runtime_error(msg.to_string())
}
}
impl ShellError {
pub(crate) fn type_error(
pub fn type_error(
expected: impl Into<String>,
actual: Tagged<impl Into<String>>,
) -> ShellError {
@ -62,6 +154,21 @@ impl ShellError {
.start()
}
pub fn untagged_runtime_error(error: impl Into<String>) -> ShellError {
ProximateShellError::UntaggedRuntimeError {
reason: error.into(),
}
.start()
}
pub(crate) fn unexpected_eof(expected: impl Into<String>, tag: impl Into<Tag>) -> ShellError {
ProximateShellError::UnexpectedEof {
expected: expected.into(),
tag: tag.into(),
}
.start()
}
pub(crate) fn range_error(
expected: impl Into<ExpectedRange>,
actual: &Tagged<impl fmt::Debug>,
@ -69,7 +176,7 @@ impl ShellError {
) -> ShellError {
ProximateShellError::RangeError {
kind: expected.into(),
actual_kind: actual.copy_tag(format!("{:?}", actual.item)),
actual_kind: format!("{:?}", actual.item).tagged(actual.tag()),
operation,
}
.start()
@ -82,6 +189,7 @@ impl ShellError {
.start()
}
#[allow(unused)]
pub(crate) fn invalid_command(problem: impl Into<Tag>) -> ShellError {
ProximateShellError::InvalidCommand {
command: problem.into(),
@ -111,29 +219,19 @@ impl ShellError {
pub(crate) fn argument_error(
command: impl Into<String>,
kind: ArgumentError,
tag: Tag,
tag: impl Into<Tag>,
) -> ShellError {
ProximateShellError::ArgumentError {
command: command.into(),
error: kind,
tag,
}
.start()
}
pub(crate) fn invalid_external_word(tag: Tag) -> ShellError {
ProximateShellError::ArgumentError {
command: "Invalid argument to Nu command (did you mean to call an external command?)"
.into(),
error: ArgumentError::InvalidExternalWord,
tag,
tag: tag.into(),
}
.start()
}
pub(crate) fn parse_error(
error: nom::Err<(
nom_locate::LocatedSpanEx<&str, uuid::Uuid>,
nom_locate::LocatedSpanEx<&str, TracableContext>,
nom::error::ErrorKind,
)>,
) -> ShellError {
@ -151,25 +249,22 @@ impl ShellError {
}
nom::Err::Failure(span) | nom::Err::Error(span) => {
let diagnostic = Diagnostic::new(Severity::Error, format!("Parse Error"))
.with_label(Label::new_primary(Tag::from(span.0)));
.with_label(Label::new_primary(Span::from(span.0)));
ShellError::diagnostic(diagnostic)
}
}
}
pub(crate) fn diagnostic(diagnostic: Diagnostic<Tag>) -> ShellError {
pub(crate) fn diagnostic(diagnostic: Diagnostic<Span>) -> ShellError {
ProximateShellError::Diagnostic(ShellDiagnostic { diagnostic }).start()
}
pub(crate) fn to_diagnostic(self) -> Diagnostic<Tag> {
pub(crate) fn to_diagnostic(self) -> Diagnostic<Span> {
match self.error {
ProximateShellError::String(StringError { title, .. }) => {
Diagnostic::new(Severity::Error, title)
}
ProximateShellError::InvalidCommand { command } => {
Diagnostic::new(Severity::Error, "Invalid command")
.with_label(Label::new_primary(command))
.with_label(Label::new_primary(command.span))
}
ProximateShellError::MissingValue { tag, reason } => {
let mut d = Diagnostic::new(
@ -178,7 +273,7 @@ impl ShellError {
);
if let Some(tag) = tag {
d = d.with_label(Label::new_primary(tag));
d = d.with_label(Label::new_primary(tag.span));
}
d
@ -191,7 +286,7 @@ impl ShellError {
ArgumentError::InvalidExternalWord => Diagnostic::new(
Severity::Error,
format!("Invalid bare word for Nu command (did you intend to invoke an external command?)"))
.with_label(Label::new_primary(tag)),
.with_label(Label::new_primary(tag.span)),
ArgumentError::MissingMandatoryFlag(name) => Diagnostic::new(
Severity::Error,
format!(
@ -201,7 +296,7 @@ impl ShellError {
Color::Black.bold().paint(name)
),
)
.with_label(Label::new_primary(tag)),
.with_label(Label::new_primary(tag.span)),
ArgumentError::MissingMandatoryPositional(name) => Diagnostic::new(
Severity::Error,
format!(
@ -211,7 +306,7 @@ impl ShellError {
),
)
.with_label(
Label::new_primary(tag).with_message(format!("requires {} parameter", name)),
Label::new_primary(tag.span).with_message(format!("requires {} parameter", name)),
),
ArgumentError::MissingValueForName(name) => Diagnostic::new(
Severity::Error,
@ -222,7 +317,7 @@ impl ShellError {
Color::Black.bold().paint(name)
),
)
.with_label(Label::new_primary(tag)),
.with_label(Label::new_primary(tag.span)),
},
ProximateShellError::TypeError {
expected,
@ -232,10 +327,9 @@ impl ShellError {
tag,
},
} => Diagnostic::new(Severity::Error, "Type Error").with_label(
Label::new_primary(tag)
Label::new_primary(tag.span)
.with_message(format!("Expected {}, found {}", expected, actual)),
),
ProximateShellError::TypeError {
expected,
actual:
@ -244,7 +338,12 @@ impl ShellError {
tag
},
} => Diagnostic::new(Severity::Error, "Type Error")
.with_label(Label::new_primary(tag).with_message(expected)),
.with_label(Label::new_primary(tag.span).with_message(expected)),
ProximateShellError::UnexpectedEof {
expected, tag
} => Diagnostic::new(Severity::Error, format!("Unexpected end of input"))
.with_label(Label::new_primary(tag.span).with_message(format!("Expected {}", expected))),
ProximateShellError::RangeError {
kind,
@ -255,7 +354,7 @@ impl ShellError {
tag
},
} => Diagnostic::new(Severity::Error, "Range Error").with_label(
Label::new_primary(tag).with_message(format!(
Label::new_primary(tag.span).with_message(format!(
"Expected to convert {} to {} while {}, but it was out of range",
item,
kind.desc(),
@ -267,12 +366,12 @@ impl ShellError {
problem:
Tagged {
tag,
..
item
},
} => Diagnostic::new(Severity::Error, "Syntax Error")
.with_label(Label::new_primary(tag).with_message("Unexpected external command")),
.with_label(Label::new_primary(tag.span).with_message(item)),
ProximateShellError::MissingProperty { subpath, expr } => {
ProximateShellError::MissingProperty { subpath, expr, .. } => {
let subpath = subpath.into_label();
let expr = expr.into_label();
@ -293,9 +392,11 @@ impl ShellError {
ProximateShellError::Diagnostic(diag) => diag.diagnostic,
ProximateShellError::CoerceError { left, right } => {
Diagnostic::new(Severity::Error, "Coercion error")
.with_label(Label::new_primary(left.tag()).with_message(left.item))
.with_label(Label::new_secondary(right.tag()).with_message(right.item))
.with_label(Label::new_primary(left.tag().span).with_message(left.item))
.with_label(Label::new_secondary(right.tag().span).with_message(right.item))
}
ProximateShellError::UntaggedRuntimeError { reason } => Diagnostic::new(Severity::Error, format!("Error: {}", reason))
}
}
@ -306,7 +407,7 @@ impl ShellError {
) -> ShellError {
ShellError::diagnostic(
Diagnostic::new(Severity::Error, msg.into())
.with_label(Label::new_primary(tag.into()).with_message(label.into())),
.with_label(Label::new_primary(tag.into().span).with_message(label.into())),
)
}
@ -320,25 +421,29 @@ impl ShellError {
ShellError::diagnostic(
Diagnostic::new_error(msg.into())
.with_label(
Label::new_primary(primary_span.into()).with_message(primary_label.into()),
Label::new_primary(primary_span.into().span).with_message(primary_label.into()),
)
.with_label(
Label::new_secondary(secondary_span.into())
Label::new_secondary(secondary_span.into().span)
.with_message(secondary_label.into()),
),
)
}
pub fn string(title: impl Into<String>) -> ShellError {
ProximateShellError::String(StringError::new(title.into(), Value::nothing())).start()
}
// pub fn string(title: impl Into<String>) -> ShellError {
// ProximateShellError::String(StringError::new(title.into(), String::new())).start()
// }
//
// pub(crate) fn unreachable(title: impl Into<String>) -> ShellError {
// ShellError::untagged_runtime_error(&format!("BUG: Unreachable: {}", title.into()))
// }
pub(crate) fn unimplemented(title: impl Into<String>) -> ShellError {
ShellError::string(&format!("Unimplemented: {}", title.into()))
ShellError::untagged_runtime_error(&format!("Unimplemented: {}", title.into()))
}
pub(crate) fn unexpected(title: impl Into<String>) -> ShellError {
ShellError::string(&format!("Unexpected: {}", title.into()))
ShellError::untagged_runtime_error(&format!("Unexpected: {}", title.into()))
}
}
@ -383,10 +488,13 @@ impl ExpectedRange {
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Serialize, Deserialize)]
pub enum ProximateShellError {
String(StringError),
SyntaxError {
problem: Tagged<String>,
},
UnexpectedEof {
expected: String,
tag: Tag,
},
InvalidCommand {
command: Tag,
},
@ -397,6 +505,7 @@ pub enum ProximateShellError {
MissingProperty {
subpath: Description,
expr: Description,
tag: Tag,
},
MissingValue {
tag: Option<Tag>,
@ -417,6 +526,9 @@ pub enum ProximateShellError {
left: Tagged<String>,
right: Tagged<String>,
},
UntaggedRuntimeError {
reason: String,
},
}
impl ProximateShellError {
@ -426,10 +538,26 @@ impl ProximateShellError {
error: self,
}
}
pub(crate) fn tag(&self) -> Option<Tag> {
Some(match self {
ProximateShellError::SyntaxError { problem } => problem.tag(),
ProximateShellError::UnexpectedEof { tag, .. } => tag.clone(),
ProximateShellError::InvalidCommand { command } => command.clone(),
ProximateShellError::TypeError { actual, .. } => actual.tag.clone(),
ProximateShellError::MissingProperty { tag, .. } => tag.clone(),
ProximateShellError::MissingValue { tag, .. } => return tag.clone(),
ProximateShellError::ArgumentError { tag, .. } => tag.clone(),
ProximateShellError::RangeError { actual_kind, .. } => actual_kind.tag.clone(),
ProximateShellError::Diagnostic(..) => return None,
ProximateShellError::UntaggedRuntimeError { .. } => return None,
ProximateShellError::CoerceError { left, right } => left.tag.until(&right.tag),
})
}
}
impl ToDebug for ProximateShellError {
fn fmt_debug(&self, f: &mut fmt::Formatter, _source: &str) -> fmt::Result {
impl FormatDebug for ProximateShellError {
fn fmt_debug(&self, f: &mut DebugFormatter, _source: &str) -> fmt::Result {
// TODO: Custom debug for inner spans
write!(f, "{:?}", self)
}
@ -437,7 +565,7 @@ impl ToDebug for ProximateShellError {
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ShellDiagnostic {
pub(crate) diagnostic: Diagnostic<Tag>,
pub(crate) diagnostic: Diagnostic<Span>,
}
impl PartialEq for ShellDiagnostic {
@ -463,22 +591,23 @@ impl std::cmp::Ord for ShellDiagnostic {
#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, new, Clone, Serialize, Deserialize)]
pub struct StringError {
title: String,
error: Value,
error: String,
}
impl std::fmt::Display for ShellError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match &self.error {
ProximateShellError::String(s) => write!(f, "{}", &s.title),
ProximateShellError::MissingValue { .. } => write!(f, "MissingValue"),
ProximateShellError::InvalidCommand { .. } => write!(f, "InvalidCommand"),
ProximateShellError::TypeError { .. } => write!(f, "TypeError"),
ProximateShellError::UnexpectedEof { .. } => write!(f, "UnexpectedEof"),
ProximateShellError::RangeError { .. } => write!(f, "RangeError"),
ProximateShellError::SyntaxError { .. } => write!(f, "SyntaxError"),
ProximateShellError::MissingProperty { .. } => write!(f, "MissingProperty"),
ProximateShellError::ArgumentError { .. } => write!(f, "ArgumentError"),
ProximateShellError::Diagnostic(_) => write!(f, "<diagnostic>"),
ProximateShellError::CoerceError { .. } => write!(f, "CoerceError"),
ProximateShellError::UntaggedRuntimeError { .. } => write!(f, "UntaggedRuntimeError"),
}
}
}
@ -487,71 +616,43 @@ impl std::error::Error for ShellError {}
impl std::convert::From<Box<dyn std::error::Error>> for ShellError {
fn from(input: Box<dyn std::error::Error>) -> ShellError {
ProximateShellError::String(StringError {
title: format!("{}", input),
error: Value::nothing(),
})
.start()
ShellError::untagged_runtime_error(format!("{}", input))
}
}
impl std::convert::From<std::io::Error> for ShellError {
fn from(input: std::io::Error) -> ShellError {
ProximateShellError::String(StringError {
title: format!("{}", input),
error: Value::nothing(),
})
.start()
ShellError::untagged_runtime_error(format!("{}", input))
}
}
impl std::convert::From<subprocess::PopenError> for ShellError {
fn from(input: subprocess::PopenError) -> ShellError {
ProximateShellError::String(StringError {
title: format!("{}", input),
error: Value::nothing(),
})
.start()
ShellError::untagged_runtime_error(format!("{}", input))
}
}
impl std::convert::From<serde_yaml::Error> for ShellError {
fn from(input: serde_yaml::Error) -> ShellError {
ProximateShellError::String(StringError {
title: format!("{:?}", input),
error: Value::nothing(),
})
.start()
ShellError::untagged_runtime_error(format!("{:?}", input))
}
}
impl std::convert::From<toml::ser::Error> for ShellError {
fn from(input: toml::ser::Error) -> ShellError {
ProximateShellError::String(StringError {
title: format!("{:?}", input),
error: Value::nothing(),
})
.start()
ShellError::untagged_runtime_error(format!("{:?}", input))
}
}
impl std::convert::From<serde_json::Error> for ShellError {
fn from(input: serde_json::Error) -> ShellError {
ProximateShellError::String(StringError {
title: format!("{:?}", input),
error: Value::nothing(),
})
.start()
ShellError::untagged_runtime_error(format!("{:?}", input))
}
}
impl std::convert::From<Box<dyn std::error::Error + Send + Sync>> for ShellError {
fn from(input: Box<dyn std::error::Error + Send + Sync>) -> ShellError {
ProximateShellError::String(StringError {
title: format!("{:?}", input),
error: Value::nothing(),
})
.start()
ShellError::untagged_runtime_error(format!("{:?}", input))
}
}
@ -567,7 +668,6 @@ impl<T> ShellErrorUtils<Tagged<T>> for Option<Tagged<T>> {
}
}
}
pub trait CoerceInto<U> {
fn coerce_into(self, operation: impl Into<String>) -> Result<U, ShellError>;
}

View File

@ -5,8 +5,11 @@ use crate::parser::{
CommandRegistry, Text,
};
use crate::prelude::*;
use crate::TaggedDictBuilder;
use derive_new::new;
use indexmap::IndexMap;
use log::trace;
use std::fmt;
#[derive(new)]
pub struct Scope {
@ -15,6 +18,15 @@ pub struct Scope {
vars: IndexMap<String, Tagged<Value>>,
}
impl fmt::Display for Scope {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_map()
.entry(&"$it", &format!("{:?}", self.it.item))
.entries(self.vars.iter().map(|(k, v)| (k, &v.item)))
.finish()
}
}
impl Scope {
pub(crate) fn empty() -> Scope {
Scope {
@ -37,28 +49,41 @@ pub(crate) fn evaluate_baseline_expr(
scope: &Scope,
source: &Text,
) -> Result<Tagged<Value>, ShellError> {
let tag = Tag {
span: expr.span,
anchor: None,
};
match &expr.item {
RawExpression::Literal(literal) => Ok(evaluate_literal(expr.copy_tag(literal), source)),
RawExpression::Literal(literal) => Ok(evaluate_literal(literal.tagged(tag), source)),
RawExpression::ExternalWord => Err(ShellError::argument_error(
"Invalid external word",
ArgumentError::InvalidExternalWord,
expr.tag(),
tag,
)),
RawExpression::FilePath(path) => Ok(Value::path(path.clone()).tagged(expr.tag())),
RawExpression::FilePath(path) => Ok(Value::path(path.clone()).tagged(tag)),
RawExpression::Synthetic(hir::Synthetic::String(s)) => {
Ok(Value::string(s).tagged_unknown())
}
RawExpression::Variable(var) => evaluate_reference(var, scope, source),
RawExpression::Variable(var) => evaluate_reference(var, scope, source, tag),
RawExpression::Command(_) => evaluate_command(tag, scope, source),
RawExpression::ExternalCommand(external) => evaluate_external(external, scope, source),
RawExpression::Binary(binary) => {
let left = evaluate_baseline_expr(binary.left(), registry, scope, source)?;
let right = evaluate_baseline_expr(binary.right(), registry, scope, source)?;
trace!("left={:?} right={:?}", left.item, right.item);
match left.compare(binary.op(), &*right) {
Ok(result) => Ok(Value::boolean(result).tagged(expr.tag())),
Ok(result) => Ok(Value::boolean(result).tagged(tag)),
Err((left_type, right_type)) => Err(ShellError::coerce_error(
binary.left().copy_tag(left_type),
binary.right().copy_tag(right_type),
left_type.tagged(Tag {
span: binary.left().span,
anchor: None,
}),
right_type.tagged(Tag {
span: binary.right().span,
anchor: None,
}),
)),
}
}
@ -70,13 +95,10 @@ pub(crate) fn evaluate_baseline_expr(
exprs.push(expr);
}
Ok(Value::Table(exprs).tagged(expr.tag()))
Ok(Value::Table(exprs).tagged(tag))
}
RawExpression::Block(block) => {
Ok(
Value::Block(Block::new(block.clone(), source.clone(), expr.tag()))
.tagged(expr.tag()),
)
Ok(Value::Block(Block::new(block.clone(), source.clone(), tag.clone())).tagged(&tag))
}
RawExpression::Path(path) => {
let value = evaluate_baseline_expr(path.head(), registry, scope, source)?;
@ -96,19 +118,27 @@ pub(crate) fn evaluate_baseline_expr(
possible_matches.sort();
return Err(ShellError::labeled_error(
"Unknown column",
format!("did you mean '{}'?", possible_matches[0].1),
expr.tag(),
));
if possible_matches.len() > 0 {
return Err(ShellError::labeled_error(
"Unknown column",
format!("did you mean '{}'?", possible_matches[0].1),
&tag,
));
} else {
return Err(ShellError::labeled_error(
"Unknown column",
"row does not have this column",
&tag,
));
}
}
Some(next) => {
item = next.clone().item.tagged(expr.tag());
item = next.clone().item.tagged(&tag);
}
};
}
Ok(item.item().clone().tagged(expr.tag()))
Ok(item.item().clone().tagged(tag))
}
RawExpression::Boolean(_boolean) => unimplemented!(),
}
@ -119,7 +149,7 @@ fn evaluate_literal(literal: Tagged<&hir::Literal>, source: &Text) -> Tagged<Val
hir::Literal::Number(int) => int.into(),
hir::Literal::Size(int, unit) => unit.compute(int),
hir::Literal::String(tag) => Value::string(tag.slice(source)),
hir::Literal::GlobPattern => Value::pattern(literal.tag().slice(source)),
hir::Literal::GlobPattern(pattern) => Value::pattern(pattern),
hir::Literal::Bare => Value::string(literal.tag().slice(source)),
};
@ -130,14 +160,43 @@ fn evaluate_reference(
name: &hir::Variable,
scope: &Scope,
source: &Text,
tag: Tag,
) -> Result<Tagged<Value>, ShellError> {
trace!("Evaluating {} with Scope {}", name, scope);
match name {
hir::Variable::It(tag) => Ok(scope.it.item.clone().tagged(*tag)),
hir::Variable::Other(tag) => Ok(scope
.vars
.get(tag.slice(source))
.map(|v| v.clone())
.unwrap_or_else(|| Value::nothing().tagged(*tag))),
hir::Variable::It(_) => Ok(scope.it.item.clone().tagged(tag)),
hir::Variable::Other(inner) => match inner.slice(source) {
x if x == "nu:env" => {
let mut dict = TaggedDictBuilder::new(&tag);
for v in std::env::vars() {
if v.0 != "PATH" && v.0 != "Path" {
dict.insert(v.0, Value::string(v.1));
}
}
Ok(dict.into_tagged_value())
}
x if x == "nu:config" => {
let config = crate::data::config::read(tag.clone(), &None)?;
Ok(Value::row(config).tagged(tag))
}
x if x == "nu:path" => {
let mut table = vec![];
match std::env::var_os("PATH") {
Some(paths) => {
for path in std::env::split_paths(&paths) {
table.push(Value::path(path).tagged(&tag));
}
}
_ => {}
}
Ok(Value::table(&table).tagged(tag))
}
x => Ok(scope
.vars
.get(x)
.map(|v| v.clone())
.unwrap_or_else(|| Value::nothing().tagged(tag))),
},
}
}
@ -150,3 +209,7 @@ fn evaluate_external(
"Unexpected external command".tagged(*external.name()),
))
}
fn evaluate_command(tag: Tag, _scope: &Scope, _source: &Text) -> Result<Tagged<Value>, ShellError> {
Err(ShellError::syntax_error("Unexpected command".tagged(tag)))
}

View File

@ -14,7 +14,7 @@ impl RenderView for GenericView<'_> {
match self.value {
Value::Primitive(p) => Ok(host.stdout(&p.format(None))),
Value::Table(l) => {
let view = TableView::from_list(l);
let view = TableView::from_list(l, 0);
if let Some(view) = view {
view.render_view(host)?;
@ -35,6 +35,8 @@ impl RenderView for GenericView<'_> {
view.render_view(host)?;
Ok(())
}
Value::Error(e) => Err(e.clone()),
}
}
}

View File

@ -23,18 +23,27 @@ enum TableMode {
impl TableView {
fn merge_descriptors(values: &[Tagged<Value>]) -> Vec<String> {
let mut ret = vec![];
let mut ret: Vec<String> = vec![];
let value_column = "<value>".to_string();
for value in values {
for desc in value.data_descriptors() {
if !ret.contains(&desc) {
ret.push(desc);
let descs = value.data_descriptors();
if descs.len() == 0 {
if !ret.contains(&value_column) {
ret.push("<value>".to_string());
}
} else {
for desc in value.data_descriptors() {
if !ret.contains(&desc) {
ret.push(desc);
}
}
}
}
ret
}
pub fn from_list(values: &[Tagged<Value>]) -> Option<TableView> {
pub fn from_list(values: &[Tagged<Value>], starting_idx: usize) -> Option<TableView> {
if values.len() == 0 {
return None;
}
@ -42,33 +51,69 @@ impl TableView {
let mut headers = TableView::merge_descriptors(values);
if headers.len() == 0 {
headers.push("value".to_string());
headers.push("<value>".to_string());
}
let mut entries = vec![];
for (idx, value) in values.iter().enumerate() {
let mut row: Vec<(String, &'static str)> = match value {
Tagged {
item: Value::Row(..),
..
} => headers
.iter()
.enumerate()
.map(|(i, d)| {
let data = value.get_data(d);
return (
data.borrow().format_leaf(Some(&headers[i])),
data.borrow().style_leaf(),
);
})
.collect(),
x => vec![(x.format_leaf(None), x.style_leaf())],
};
// let mut row: Vec<(String, &'static str)> = match value {
// Tagged {
// item: Value::Row(..),
// ..
// } => headers
// .iter()
// .enumerate()
// .map(|(i, d)| {
// let data = value.get_data(d);
// return (
// data.borrow().format_leaf(Some(&headers[i])),
// data.borrow().style_leaf(),
// );
// })
// .collect(),
// x => vec![(x.format_leaf(None), x.style_leaf())],
// };
let mut row: Vec<(String, &'static str)> = headers
.iter()
.enumerate()
.map(|(i, d)| {
if d == "<value>" {
match value {
Tagged {
item: Value::Row(..),
..
} => (
Value::nothing().format_leaf(None),
Value::nothing().style_leaf(),
),
_ => (value.format_leaf(None), value.style_leaf()),
}
} else {
match value {
Tagged {
item: Value::Row(..),
..
} => {
let data = value.get_data(d);
(
data.borrow().format_leaf(Some(&headers[i])),
data.borrow().style_leaf(),
)
}
_ => (
Value::nothing().format_leaf(None),
Value::nothing().style_leaf(),
),
}
}
})
.collect();
if values.len() > 1 {
// Indices are black, bold, right-aligned:
row.insert(0, (format!("{}", idx.to_string()), "Fdbr"));
row.insert(0, (format!("{}", (starting_idx + idx).to_string()), "Fdbr"));
}
entries.push(row);

View File

@ -73,9 +73,7 @@ pub fn interactive_fuzzy_search(lines: &Vec<&str>, max_results: usize) -> Select
searchinput.pop();
selected = 0;
}
_ => {
// println!("OTHER InputEvent: {:?}", k);
}
_ => {}
},
_ => {}
}

View File

@ -1,4 +1,8 @@
#![recursion_limit = "512"]
#![recursion_limit = "1024"]
#[cfg(test)]
#[macro_use]
extern crate indexmap;
#[macro_use]
mod prelude;
@ -21,17 +25,21 @@ mod traits;
mod utils;
pub use crate::commands::command::{CallInfo, ReturnSuccess, ReturnValue};
pub use crate::context::{AnchorLocation, SourceMap};
pub use crate::context::AnchorLocation;
pub use crate::env::host::BasicHost;
pub use crate::parser::hir::SyntaxShape;
pub use crate::parser::parse::token_tree_builder::TokenTreeBuilder;
pub use crate::plugin::{serve_plugin, Plugin};
pub use crate::utils::{AbsoluteFile, AbsolutePath, RelativePath};
pub use crate::traits::{DebugFormatter, FormatDebug, ToDebug};
pub use crate::utils::{did_you_mean, AbsoluteFile, AbsolutePath, RelativePath};
pub use cli::cli;
pub use data::base::{Primitive, Value};
pub use data::config::{config_path, APP_INFO};
pub use data::dict::{Dictionary, TaggedDictBuilder};
pub use data::meta::{Tag, Tagged, TaggedItem};
pub use data::meta::{
tag_for_tagged_list, HasFallibleSpan, HasSpan, Span, Spanned, SpannedItem, Tag, Tagged,
TaggedItem,
};
pub use errors::{CoerceInto, ShellError};
pub use num_traits::cast::ToPrimitive;
pub use parser::parse::text::Text;

View File

@ -19,6 +19,12 @@ fn main() -> Result<(), Box<dyn Error>> {
.multiple(true)
.takes_value(true),
)
.arg(
Arg::with_name("debug")
.long("debug")
.multiple(true)
.takes_value(true),
)
.get_matches();
let loglevel = match matches.value_of("loglevel") {
@ -48,6 +54,15 @@ fn main() -> Result<(), Box<dyn Error>> {
}
}
match matches.values_of("debug") {
None => {}
Some(values) => {
for item in values {
builder.filter_module(&format!("nu::{}", item), LevelFilter::Debug);
}
}
}
builder.try_init()?;
futures::executor::block_on(nu::cli())?;

View File

@ -7,24 +7,23 @@ pub(crate) mod registry;
use crate::errors::ShellError;
pub(crate) use deserializer::ConfigDeserializer;
pub(crate) use hir::baseline_parse_tokens::baseline_parse_tokens;
pub(crate) use hir::syntax_shape::flat_shape::FlatShape;
pub(crate) use hir::TokensIterator;
pub(crate) use parse::call_node::CallNode;
pub(crate) use parse::files::Files;
pub(crate) use parse::flag::Flag;
pub(crate) use parse::flag::{Flag, FlagKind};
pub(crate) use parse::operator::Operator;
pub(crate) use parse::parser::{nom_input, pipeline};
pub(crate) use parse::pipeline::{Pipeline, PipelineElement};
pub(crate) use parse::text::Text;
pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, PathNode, TokenNode};
pub(crate) use parse::tokens::{RawToken, Token};
pub(crate) use parse::token_tree::{DelimitedNode, Delimiter, TokenNode};
pub(crate) use parse::tokens::{RawNumber, RawToken};
pub(crate) use parse::unit::Unit;
pub(crate) use parse_command::parse_command;
pub(crate) use registry::CommandRegistry;
pub fn parse(input: &str, anchor: uuid::Uuid) -> Result<TokenNode, ShellError> {
pub fn parse(input: &str) -> Result<TokenNode, ShellError> {
let _ = pretty_env_logger::try_init();
match pipeline(nom_input(input, anchor)) {
match pipeline(nom_input(input)) {
Ok((_rest, val)) => Ok(val),
Err(err) => Err(ShellError::parse_error(err)),
}

View File

@ -52,7 +52,7 @@ impl<'de> ConfigDeserializer<'de> {
self.stack.push(DeserializerItem {
key_struct_field: Some((name.to_string(), name)),
val: value.unwrap_or_else(|| Value::nothing().tagged(self.call.name_tag)),
val: value.unwrap_or_else(|| Value::nothing().tagged(&self.call.name_tag)),
});
Ok(())
@ -61,7 +61,7 @@ impl<'de> ConfigDeserializer<'de> {
pub fn top(&mut self) -> &DeserializerItem {
let value = self.stack.last();
trace!("inspecting top value :: {:?}", value);
value.expect("Can't get top elemant of an empty stack")
value.expect("Can't get top element of an empty stack")
}
pub fn pop(&mut self) -> DeserializerItem {
@ -310,9 +310,10 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> {
return Ok(r);
}
trace!(
"deserializing struct {:?} {:?} (stack={:?})",
"deserializing struct {:?} {:?} (saw_root={} stack={:?})",
name,
fields,
self.saw_root,
self.stack
);
@ -326,6 +327,12 @@ impl<'de, 'a> de::Deserializer<'de> for &'a mut ConfigDeserializer<'de> {
let type_name = std::any::type_name::<V::Value>();
let tagged_val_name = std::any::type_name::<Tagged<Value>>();
trace!(
"type_name={} tagged_val_name={}",
type_name,
tagged_val_name
);
if type_name == tagged_val_name {
return visit::<Tagged<Value>, _>(value.val, name, fields, visitor);
}
@ -479,8 +486,8 @@ mod tests {
// is unspecified and change is likely.
// This test makes sure that such change is detected
// by this test failing, and not things silently breaking.
// Specifically, we rely on this behaviour further above
// in the file to special case Tagged<Value> parsing.
// Specifically, we rely on this behavior further above
// in the file for the Tagged<Value> special case parsing.
let tuple = type_name::<()>();
let tagged_tuple = type_name::<Tagged<()>>();
let tagged_value = type_name::<Tagged<Value>>();

View File

@ -1,11 +1,13 @@
pub(crate) mod baseline_parse;
pub(crate) mod baseline_parse_tokens;
pub(crate) mod binary;
pub(crate) mod expand_external_tokens;
pub(crate) mod external_command;
pub(crate) mod named;
pub(crate) mod path;
pub(crate) mod syntax_shape;
pub(crate) mod tokens_iterator;
use crate::parser::{registry, Unit};
use crate::parser::{registry, Operator, Unit};
use crate::prelude::*;
use derive_new::new;
use getset::Getters;
@ -14,27 +16,17 @@ use std::fmt;
use std::path::PathBuf;
use crate::evaluate::Scope;
use crate::parser::parse::tokens::RawNumber;
use crate::traits::ToDebug;
pub(crate) use self::baseline_parse::{
baseline_parse_single_token, baseline_parse_token_as_number, baseline_parse_token_as_path,
baseline_parse_token_as_pattern, baseline_parse_token_as_string,
};
pub(crate) use self::baseline_parse_tokens::{baseline_parse_next_expr, TokensIterator};
pub(crate) use self::binary::Binary;
pub(crate) use self::external_command::ExternalCommand;
pub(crate) use self::named::NamedArguments;
pub(crate) use self::path::Path;
pub(crate) use self::syntax_shape::ExpandContext;
pub(crate) use self::tokens_iterator::TokensIterator;
pub use self::baseline_parse_tokens::SyntaxShape;
pub fn path(head: impl Into<Expression>, tail: Vec<Tagged<impl Into<String>>>) -> Path {
Path::new(
head.into(),
tail.into_iter()
.map(|item| item.map(|string| string.into()))
.collect(),
)
}
pub use self::syntax_shape::SyntaxShape;
#[derive(Debug, Clone, Eq, PartialEq, Getters, Serialize, Deserialize, new)]
pub struct Call {
@ -57,8 +49,8 @@ impl Call {
}
}
impl ToDebug for Call {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
impl FormatDebug for Call {
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
write!(f, "({}", self.head.debug(source))?;
if let Some(positional) = &self.positional {
@ -93,6 +85,7 @@ pub enum RawExpression {
FilePath(PathBuf),
ExternalCommand(ExternalCommand),
Command(Span),
Boolean(bool),
}
@ -115,79 +108,158 @@ impl RawExpression {
match self {
RawExpression::Literal(literal) => literal.type_name(),
RawExpression::Synthetic(synthetic) => synthetic.type_name(),
RawExpression::ExternalWord => "externalword",
RawExpression::FilePath(..) => "filepath",
RawExpression::Command(..) => "command",
RawExpression::ExternalWord => "external word",
RawExpression::FilePath(..) => "file path",
RawExpression::Variable(..) => "variable",
RawExpression::List(..) => "list",
RawExpression::Binary(..) => "binary",
RawExpression::Block(..) => "block",
RawExpression::Path(..) => "path",
RawExpression::Path(..) => "variable path",
RawExpression::Boolean(..) => "boolean",
RawExpression::ExternalCommand(..) => "external",
}
}
}
pub type Expression = Tagged<RawExpression>;
pub type Expression = Spanned<RawExpression>;
impl std::fmt::Display for Expression {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let span = self.span;
match &self.item {
RawExpression::Literal(literal) => write!(f, "{}", literal.tagged(self.span)),
RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{}", s),
RawExpression::Command(_) => write!(f, "Command{{ {}..{} }}", span.start(), span.end()),
RawExpression::ExternalWord => {
write!(f, "ExternalWord{{ {}..{} }}", span.start(), span.end())
}
RawExpression::FilePath(file) => write!(f, "Path{{ {} }}", file.display()),
RawExpression::Variable(variable) => write!(f, "{}", variable),
RawExpression::List(list) => f
.debug_list()
.entries(list.iter().map(|e| format!("{}", e)))
.finish(),
RawExpression::Binary(binary) => write!(f, "{}", binary),
RawExpression::Block(items) => {
write!(f, "Block")?;
f.debug_set()
.entries(items.iter().map(|i| format!("{}", i)))
.finish()
}
RawExpression::Path(path) => write!(f, "{}", path),
RawExpression::Boolean(b) => write!(f, "${}", b),
RawExpression::ExternalCommand(..) => {
write!(f, "ExternalComment{{ {}..{} }}", span.start(), span.end())
}
}
}
}
impl Expression {
pub(crate) fn number(i: impl Into<Number>, tag: impl Into<Tag>) -> Expression {
RawExpression::Literal(Literal::Number(i.into())).tagged(tag.into())
pub(crate) fn number(i: impl Into<Number>, span: impl Into<Span>) -> Expression {
RawExpression::Literal(Literal::Number(i.into())).spanned(span.into())
}
pub(crate) fn size(
i: impl Into<Number>,
unit: impl Into<Unit>,
tag: impl Into<Tag>,
span: impl Into<Span>,
) -> Expression {
RawExpression::Literal(Literal::Size(i.into(), unit.into())).tagged(tag.into())
RawExpression::Literal(Literal::Size(i.into(), unit.into())).spanned(span.into())
}
pub(crate) fn synthetic_string(s: impl Into<String>) -> Expression {
RawExpression::Synthetic(Synthetic::String(s.into())).tagged_unknown()
RawExpression::Synthetic(Synthetic::String(s.into())).spanned_unknown()
}
pub(crate) fn string(inner: impl Into<Tag>, outer: impl Into<Tag>) -> Expression {
RawExpression::Literal(Literal::String(inner.into())).tagged(outer.into())
pub(crate) fn string(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
RawExpression::Literal(Literal::String(inner.into())).spanned(outer.into())
}
pub(crate) fn file_path(path: impl Into<PathBuf>, outer: impl Into<Tag>) -> Expression {
RawExpression::FilePath(path.into()).tagged(outer)
pub(crate) fn path(
head: Expression,
tail: Vec<Spanned<impl Into<String>>>,
span: impl Into<Span>,
) -> Expression {
let tail = tail.into_iter().map(|t| t.map(|s| s.into())).collect();
RawExpression::Path(Box::new(Path::new(head, tail))).spanned(span.into())
}
pub(crate) fn bare(tag: impl Into<Tag>) -> Expression {
RawExpression::Literal(Literal::Bare).tagged(tag)
pub(crate) fn dot_member(head: Expression, next: Spanned<impl Into<String>>) -> Expression {
let Spanned { item, span } = head;
let new_span = head.span.until(next.span);
match item {
RawExpression::Path(path) => {
let (head, mut tail) = path.parts();
tail.push(next.map(|i| i.into()));
Expression::path(head, tail, new_span)
}
other => Expression::path(other.spanned(span), vec![next], new_span),
}
}
pub(crate) fn pattern(tag: impl Into<Tag>) -> Expression {
RawExpression::Literal(Literal::GlobPattern).tagged(tag.into())
pub(crate) fn infix(
left: Expression,
op: Spanned<impl Into<Operator>>,
right: Expression,
) -> Expression {
let new_span = left.span.until(right.span);
RawExpression::Binary(Box::new(Binary::new(left, op.map(|o| o.into()), right)))
.spanned(new_span)
}
pub(crate) fn variable(inner: impl Into<Tag>, outer: impl Into<Tag>) -> Expression {
RawExpression::Variable(Variable::Other(inner.into())).tagged(outer)
pub(crate) fn file_path(path: impl Into<PathBuf>, outer: impl Into<Span>) -> Expression {
RawExpression::FilePath(path.into()).spanned(outer)
}
pub(crate) fn external_command(inner: impl Into<Tag>, outer: impl Into<Tag>) -> Expression {
RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).tagged(outer)
pub(crate) fn list(list: Vec<Expression>, span: impl Into<Span>) -> Expression {
RawExpression::List(list).spanned(span)
}
pub(crate) fn it_variable(inner: impl Into<Tag>, outer: impl Into<Tag>) -> Expression {
RawExpression::Variable(Variable::It(inner.into())).tagged(outer)
pub(crate) fn bare(span: impl Into<Span>) -> Expression {
RawExpression::Literal(Literal::Bare).spanned(span)
}
pub(crate) fn pattern(inner: impl Into<String>, outer: impl Into<Span>) -> Expression {
RawExpression::Literal(Literal::GlobPattern(inner.into())).spanned(outer.into())
}
pub(crate) fn variable(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
RawExpression::Variable(Variable::Other(inner.into())).spanned(outer)
}
pub(crate) fn external_command(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
RawExpression::ExternalCommand(ExternalCommand::new(inner.into())).spanned(outer)
}
pub(crate) fn it_variable(inner: impl Into<Span>, outer: impl Into<Span>) -> Expression {
RawExpression::Variable(Variable::It(inner.into())).spanned(outer)
}
pub(crate) fn tagged_type_name(&self) -> Tagged<&'static str> {
self.item.type_name().tagged(self.span)
}
}
impl ToDebug for Expression {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
match self.item() {
RawExpression::Literal(l) => l.tagged(self.tag()).fmt_debug(f, source),
impl FormatDebug for Spanned<RawExpression> {
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
match &self.item {
RawExpression::Literal(l) => l.spanned(self.span).fmt_debug(f, source),
RawExpression::FilePath(p) => write!(f, "{}", p.display()),
RawExpression::ExternalWord => write!(f, "{}", self.tag().slice(source)),
RawExpression::ExternalWord => write!(f, "{}", self.span.slice(source)),
RawExpression::Command(tag) => write!(f, "{}", tag.slice(source)),
RawExpression::Synthetic(Synthetic::String(s)) => write!(f, "{:?}", s),
RawExpression::Variable(Variable::It(_)) => write!(f, "$it"),
RawExpression::Variable(Variable::Other(s)) => write!(f, "${}", s.slice(source)),
RawExpression::Binary(b) => write!(f, "{}", b.debug(source)),
RawExpression::ExternalCommand(c) => write!(f, "^{}", c.name().slice(source)),
RawExpression::Block(exprs) => {
RawExpression::Block(exprs) => f.say_block("block", |f| {
write!(f, "{{ ")?;
for expr in exprs {
@ -195,8 +267,8 @@ impl ToDebug for Expression {
}
write!(f, "}}")
}
RawExpression::List(exprs) => {
}),
RawExpression::List(exprs) => f.say_block("list", |f| {
write!(f, "[ ")?;
for expr in exprs {
@ -204,7 +276,7 @@ impl ToDebug for Expression {
}
write!(f, "]")
}
}),
RawExpression::Path(p) => write!(f, "{}", p.debug(source)),
RawExpression::Boolean(true) => write!(f, "$yes"),
RawExpression::Boolean(false) => write!(f, "$no"),
@ -212,8 +284,8 @@ impl ToDebug for Expression {
}
}
impl From<Tagged<Path>> for Expression {
fn from(path: Tagged<Path>) -> Expression {
impl From<Spanned<Path>> for Expression {
fn from(path: Spanned<Path>) -> Expression {
path.map(|p| RawExpression::Path(Box::new(p)))
}
}
@ -227,19 +299,39 @@ impl From<Tagged<Path>> for Expression {
pub enum Literal {
Number(Number),
Size(Number, Unit),
String(Tag),
GlobPattern,
String(Span),
GlobPattern(String),
Bare,
}
impl ToDebug for Tagged<&Literal> {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
match self.item() {
Literal::Number(number) => write!(f, "{:?}", *number),
Literal::Size(number, unit) => write!(f, "{:?}{:?}", *number, unit),
Literal::String(tag) => write!(f, "{}", tag.slice(source)),
Literal::GlobPattern => write!(f, "{}", self.tag().slice(source)),
Literal::Bare => write!(f, "{}", self.tag().slice(source)),
impl std::fmt::Display for Tagged<Literal> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", Tagged::new(self.tag.clone(), &self.item))
}
}
impl std::fmt::Display for Tagged<&Literal> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let span = self.tag.span;
match &self.item {
Literal::Number(number) => write!(f, "{}", number),
Literal::Size(number, unit) => write!(f, "{}{}", number, unit.as_str()),
Literal::String(_) => write!(f, "String{{ {}..{} }}", span.start(), span.end()),
Literal::GlobPattern(_) => write!(f, "Glob{{ {}..{} }}", span.start(), span.end()),
Literal::Bare => write!(f, "Bare{{ {}..{} }}", span.start(), span.end()),
}
}
}
impl FormatDebug for Spanned<&Literal> {
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
match self.item {
Literal::Number(..) => f.say_str("number", self.span.slice(source)),
Literal::Size(..) => f.say_str("size", self.span.slice(source)),
Literal::String(..) => f.say_str("string", self.span.slice(source)),
Literal::GlobPattern(..) => f.say_str("glob", self.span.slice(source)),
Literal::Bare => f.say_str("word", self.span.slice(source)),
}
}
}
@ -251,13 +343,28 @@ impl Literal {
Literal::Size(..) => "size",
Literal::String(..) => "string",
Literal::Bare => "string",
Literal::GlobPattern => "pattern",
Literal::GlobPattern(_) => "pattern",
}
}
}
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)]
pub enum Variable {
It(Tag),
Other(Tag),
It(Span),
Other(Span),
}
impl std::fmt::Display for Variable {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Variable::It(_) => write!(f, "$it"),
Variable::Other(span) => write!(f, "${{ {}..{} }}", span.start(), span.end()),
}
}
}
impl FormatDebug for Spanned<Variable> {
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
write!(f, "{}", self.span.slice(source))
}
}

View File

@ -1,140 +1,2 @@
use crate::context::Context;
use crate::errors::ShellError;
use crate::parser::{hir, RawToken, Token};
use crate::TaggedItem;
use crate::Text;
use std::path::PathBuf;
pub fn baseline_parse_single_token(
token: &Token,
source: &Text,
) -> Result<hir::Expression, ShellError> {
Ok(match *token.item() {
RawToken::Number(number) => hir::Expression::number(number.to_number(source), token.tag()),
RawToken::Size(int, unit) => {
hir::Expression::size(int.to_number(source), unit, token.tag())
}
RawToken::String(tag) => hir::Expression::string(tag, token.tag()),
RawToken::Variable(tag) if tag.slice(source) == "it" => {
hir::Expression::it_variable(tag, token.tag())
}
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()),
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
RawToken::GlobPattern => hir::Expression::pattern(token.tag()),
RawToken::Bare => hir::Expression::bare(token.tag()),
})
}
pub fn baseline_parse_token_as_number(
token: &Token,
source: &Text,
) -> Result<hir::Expression, ShellError> {
Ok(match *token.item() {
RawToken::Variable(tag) if tag.slice(source) == "it" => {
hir::Expression::it_variable(tag, token.tag())
}
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()),
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
RawToken::Number(number) => hir::Expression::number(number.to_number(source), token.tag()),
RawToken::Size(number, unit) => {
hir::Expression::size(number.to_number(source), unit, token.tag())
}
RawToken::Bare => hir::Expression::bare(token.tag()),
RawToken::GlobPattern => {
return Err(ShellError::type_error(
"Number",
"glob pattern".to_string().tagged(token.tag()),
))
}
RawToken::String(tag) => hir::Expression::string(tag, token.tag()),
})
}
pub fn baseline_parse_token_as_string(
token: &Token,
source: &Text,
) -> Result<hir::Expression, ShellError> {
Ok(match *token.item() {
RawToken::Variable(tag) if tag.slice(source) == "it" => {
hir::Expression::it_variable(tag, token.tag())
}
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()),
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
RawToken::Number(_) => hir::Expression::bare(token.tag()),
RawToken::Size(_, _) => hir::Expression::bare(token.tag()),
RawToken::Bare => hir::Expression::bare(token.tag()),
RawToken::GlobPattern => {
return Err(ShellError::type_error(
"String",
"glob pattern".tagged(token.tag()),
))
}
RawToken::String(tag) => hir::Expression::string(tag, token.tag()),
})
}
pub fn baseline_parse_token_as_path(
token: &Token,
context: &Context,
source: &Text,
) -> Result<hir::Expression, ShellError> {
Ok(match *token.item() {
RawToken::Variable(tag) if tag.slice(source) == "it" => {
hir::Expression::it_variable(tag, token.tag())
}
RawToken::ExternalCommand(tag) => hir::Expression::external_command(tag, token.tag()),
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
RawToken::Number(_) => hir::Expression::bare(token.tag()),
RawToken::Size(_, _) => hir::Expression::bare(token.tag()),
RawToken::Bare => {
hir::Expression::file_path(expand_path(token.tag().slice(source), context), token.tag())
}
RawToken::GlobPattern => {
return Err(ShellError::type_error(
"Path",
"glob pattern".tagged(token.tag()),
))
}
RawToken::String(tag) => {
hir::Expression::file_path(expand_path(tag.slice(source), context), token.tag())
}
})
}
pub fn baseline_parse_token_as_pattern(
token: &Token,
context: &Context,
source: &Text,
) -> Result<hir::Expression, ShellError> {
Ok(match *token.item() {
RawToken::Variable(tag) if tag.slice(source) == "it" => {
hir::Expression::it_variable(tag, token.tag())
}
RawToken::ExternalCommand(_) => {
return Err(ShellError::syntax_error(
"Invalid external command".to_string().tagged(token.tag()),
))
}
RawToken::ExternalWord => return Err(ShellError::invalid_external_word(token.tag())),
RawToken::Variable(tag) => hir::Expression::variable(tag, token.tag()),
RawToken::Number(_) => hir::Expression::bare(token.tag()),
RawToken::Size(_, _) => hir::Expression::bare(token.tag()),
RawToken::GlobPattern => hir::Expression::pattern(token.tag()),
RawToken::Bare => {
hir::Expression::file_path(expand_path(token.tag().slice(source), context), token.tag())
}
RawToken::String(tag) => {
hir::Expression::file_path(expand_path(tag.slice(source), context), token.tag())
}
})
}
pub fn expand_path(string: &str, context: &Context) -> PathBuf {
let expanded = shellexpand::tilde_with_context(string, || context.shell_manager.homedir());
PathBuf::from(expanded.as_ref())
}
#[cfg(test)]
mod tests;

View File

@ -0,0 +1,120 @@
use crate::commands::classified::InternalCommand;
use crate::commands::ClassifiedCommand;
use crate::env::host::BasicHost;
use crate::parser::hir;
use crate::parser::hir::syntax_shape::*;
use crate::parser::hir::TokensIterator;
use crate::parser::parse::token_tree_builder::{CurriedToken, TokenTreeBuilder as b};
use crate::parser::TokenNode;
use crate::{HasSpan, Span, SpannedItem, Tag, Text};
use pretty_assertions::assert_eq;
use std::fmt::Debug;
#[test]
fn test_parse_string() {
parse_tokens(StringShape, vec![b::string("hello")], |tokens| {
hir::Expression::string(inner_string_span(tokens[0].span()), tokens[0].span())
});
}
#[test]
fn test_parse_path() {
parse_tokens(
VariablePathShape,
vec![b::var("it"), b::op("."), b::bare("cpu")],
|tokens| {
let (outer_var, inner_var) = tokens[0].expect_var();
let bare = tokens[2].expect_bare();
hir::Expression::path(
hir::Expression::it_variable(inner_var, outer_var),
vec!["cpu".spanned(bare)],
outer_var.until(bare),
)
},
);
parse_tokens(
VariablePathShape,
vec![
b::var("cpu"),
b::op("."),
b::bare("amount"),
b::op("."),
b::string("max ghz"),
],
|tokens| {
let (outer_var, inner_var) = tokens[0].expect_var();
let amount = tokens[2].expect_bare();
let (outer_max_ghz, _) = tokens[4].expect_string();
hir::Expression::path(
hir::Expression::variable(inner_var, outer_var),
vec!["amount".spanned(amount), "max ghz".spanned(outer_max_ghz)],
outer_var.until(outer_max_ghz),
)
},
);
}
#[test]
fn test_parse_command() {
parse_tokens(
ClassifiedCommandShape,
vec![b::bare("ls"), b::sp(), b::pattern("*.txt")],
|tokens| {
let bare = tokens[0].expect_bare();
let pat = tokens[2].expect_pattern();
eprintln!("{:?} {:?} {:?}", bare, pat, bare.until(pat));
ClassifiedCommand::Internal(InternalCommand::new(
"ls".to_string(),
Tag {
span: bare,
anchor: None,
},
hir::Call {
head: Box::new(hir::RawExpression::Command(bare).spanned(bare)),
positional: Some(vec![hir::Expression::pattern("*.txt", pat)]),
named: None,
}
.spanned(bare.until(pat)),
))
// hir::Expression::path(
// hir::Expression::variable(inner_var, outer_var),
// vec!["cpu".tagged(bare)],
// outer_var.until(bare),
// )
},
);
}
fn parse_tokens<T: Eq + HasSpan + Clone + Debug + 'static>(
shape: impl ExpandSyntax<Output = T>,
tokens: Vec<CurriedToken>,
expected: impl FnOnce(&[TokenNode]) -> T,
) {
let tokens = b::token_list(tokens);
let (tokens, source) = b::build(tokens);
ExpandContext::with_empty(&Text::from(source), |context| {
let tokens = tokens.expect_list();
let mut iterator = TokensIterator::all(tokens.item, tokens.span);
let expr = expand_syntax(&shape, &mut iterator, &context);
let expr = match expr {
Ok(expr) => expr,
Err(err) => {
crate::cli::print_err(err.into(), &BasicHost, context.source().clone());
panic!("Parse failed");
}
};
assert_eq!(expr, expected(tokens.item));
})
}
fn inner_string_span(span: Span) -> Span {
Span::new(span.start() + 1, span.end() - 1)
}

View File

@ -1,459 +0,0 @@
use crate::context::Context;
use crate::errors::ShellError;
use crate::parser::{
hir,
hir::{
baseline_parse_single_token, baseline_parse_token_as_number, baseline_parse_token_as_path,
baseline_parse_token_as_pattern, baseline_parse_token_as_string,
},
DelimitedNode, Delimiter, PathNode, RawToken, TokenNode,
};
use crate::{Tag, Tagged, TaggedItem, Text};
use derive_new::new;
use log::trace;
use serde::{Deserialize, Serialize};
pub fn baseline_parse_tokens(
token_nodes: &mut TokensIterator<'_>,
context: &Context,
source: &Text,
syntax_type: SyntaxShape,
) -> Result<Vec<hir::Expression>, ShellError> {
let mut exprs: Vec<hir::Expression> = vec![];
loop {
if token_nodes.at_end() {
break;
}
let expr = baseline_parse_next_expr(token_nodes, context, source, syntax_type)?;
exprs.push(expr);
}
Ok(exprs)
}
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
pub enum SyntaxShape {
Any,
List,
Literal,
String,
Member,
Variable,
Number,
Path,
Pattern,
Binary,
Block,
Boolean,
}
impl std::fmt::Display for SyntaxShape {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
SyntaxShape::Any => write!(f, "Any"),
SyntaxShape::List => write!(f, "List"),
SyntaxShape::Literal => write!(f, "Literal"),
SyntaxShape::String => write!(f, "String"),
SyntaxShape::Member => write!(f, "Member"),
SyntaxShape::Variable => write!(f, "Variable"),
SyntaxShape::Number => write!(f, "Number"),
SyntaxShape::Path => write!(f, "Path"),
SyntaxShape::Pattern => write!(f, "Pattern"),
SyntaxShape::Binary => write!(f, "Binary"),
SyntaxShape::Block => write!(f, "Block"),
SyntaxShape::Boolean => write!(f, "Boolean"),
}
}
}
pub fn baseline_parse_next_expr(
tokens: &mut TokensIterator,
context: &Context,
source: &Text,
syntax_type: SyntaxShape,
) -> Result<hir::Expression, ShellError> {
let next = tokens
.next()
.ok_or_else(|| ShellError::string("Expected token, found none"))?;
trace!(target: "nu::parser::parse_one_expr", "syntax_type={:?}, token={:?}", syntax_type, next);
match (syntax_type, next) {
(SyntaxShape::Path, TokenNode::Token(token)) => {
return baseline_parse_token_as_path(token, context, source)
}
(SyntaxShape::Path, token) => {
return Err(ShellError::type_error(
"Path",
token.type_name().tagged(token.tag()),
))
}
(SyntaxShape::Pattern, TokenNode::Token(token)) => {
return baseline_parse_token_as_pattern(token, context, source)
}
(SyntaxShape::Pattern, token) => {
return Err(ShellError::type_error(
"Path",
token.type_name().tagged(token.tag()),
))
}
(SyntaxShape::String, TokenNode::Token(token)) => {
return baseline_parse_token_as_string(token, source);
}
(SyntaxShape::String, token) => {
return Err(ShellError::type_error(
"String",
token.type_name().tagged(token.tag()),
))
}
(SyntaxShape::Number, TokenNode::Token(token)) => {
return Ok(baseline_parse_token_as_number(token, source)?);
}
(SyntaxShape::Number, token) => {
return Err(ShellError::type_error(
"Numeric",
token.type_name().tagged(token.tag()),
))
}
// TODO: More legit member processing
(SyntaxShape::Member, TokenNode::Token(token)) => {
return baseline_parse_token_as_string(token, source);
}
(SyntaxShape::Member, token) => {
return Err(ShellError::type_error(
"member",
token.type_name().tagged(token.tag()),
))
}
(SyntaxShape::Any, _) => {}
(SyntaxShape::List, _) => {}
(SyntaxShape::Literal, _) => {}
(SyntaxShape::Variable, _) => {}
(SyntaxShape::Binary, _) => {}
(SyntaxShape::Block, _) => {}
(SyntaxShape::Boolean, _) => {}
};
let first = baseline_parse_semantic_token(next, context, source)?;
let possible_op = tokens.peek();
let op = match possible_op {
Some(TokenNode::Operator(op)) => op.clone(),
_ => return Ok(first),
};
tokens.next();
let second = match tokens.next() {
None => {
return Err(ShellError::labeled_error(
"Expected something after an operator",
"operator",
op.tag(),
))
}
Some(token) => baseline_parse_semantic_token(token, context, source)?,
};
// We definitely have a binary expression here -- let's see if we should coerce it into a block
match syntax_type {
SyntaxShape::Any => {
let tag = first.tag().until(second.tag());
let binary = hir::Binary::new(first, op, second);
let binary = hir::RawExpression::Binary(Box::new(binary));
let binary = binary.tagged(tag);
Ok(binary)
}
SyntaxShape::Block => {
let tag = first.tag().until(second.tag());
let path: Tagged<hir::RawExpression> = match first {
Tagged {
item: hir::RawExpression::Literal(hir::Literal::Bare),
tag,
} => {
let string = tag.slice(source).to_string().tagged(tag);
let path = hir::Path::new(
// TODO: Deal with synthetic nodes that have no representation at all in source
hir::RawExpression::Variable(hir::Variable::It(Tag::unknown()))
.tagged(Tag::unknown()),
vec![string],
);
let path = hir::RawExpression::Path(Box::new(path));
path.tagged(first.tag())
}
Tagged {
item: hir::RawExpression::Literal(hir::Literal::String(inner)),
tag,
} => {
let string = inner.slice(source).to_string().tagged(tag);
let path = hir::Path::new(
// TODO: Deal with synthetic nodes that have no representation at all in source
hir::RawExpression::Variable(hir::Variable::It(Tag::unknown()))
.tagged_unknown(),
vec![string],
);
let path = hir::RawExpression::Path(Box::new(path));
path.tagged(first.tag())
}
Tagged {
item: hir::RawExpression::Variable(..),
..
} => first,
Tagged { tag, item } => {
return Err(ShellError::labeled_error(
"The first part of an un-braced block must be a column name",
item.type_name(),
tag,
))
}
};
let binary = hir::Binary::new(path, op, second);
let binary = hir::RawExpression::Binary(Box::new(binary));
let binary = binary.tagged(tag);
let block = hir::RawExpression::Block(vec![binary]);
let block = block.tagged(tag);
Ok(block)
}
other => Err(ShellError::unimplemented(format!(
"coerce hint {:?}",
other
))),
}
}
pub fn baseline_parse_semantic_token(
token: &TokenNode,
context: &Context,
source: &Text,
) -> Result<hir::Expression, ShellError> {
match token {
TokenNode::Token(token) => baseline_parse_single_token(token, source),
TokenNode::Call(_call) => unimplemented!(),
TokenNode::Delimited(delimited) => baseline_parse_delimited(delimited, context, source),
TokenNode::Pipeline(_pipeline) => unimplemented!(),
TokenNode::Operator(op) => Err(ShellError::syntax_error(
"Unexpected operator".tagged(op.tag),
)),
TokenNode::Flag(flag) => Err(ShellError::syntax_error("Unexpected flag".tagged(flag.tag))),
TokenNode::Member(tag) => Err(ShellError::syntax_error(
"BUG: Top-level member".tagged(*tag),
)),
TokenNode::Whitespace(tag) => Err(ShellError::syntax_error(
"BUG: Whitespace found during parse".tagged(*tag),
)),
TokenNode::Error(error) => Err(*error.item.clone()),
TokenNode::Path(path) => baseline_parse_path(path, context, source),
}
}
pub fn baseline_parse_delimited(
token: &Tagged<DelimitedNode>,
context: &Context,
source: &Text,
) -> Result<hir::Expression, ShellError> {
match token.delimiter() {
Delimiter::Brace => {
let children = token.children();
let exprs = baseline_parse_tokens(
&mut TokensIterator::new(children),
context,
source,
SyntaxShape::Any,
)?;
let expr = hir::RawExpression::Block(exprs);
Ok(expr.tagged(token.tag()))
}
Delimiter::Paren => unimplemented!(),
Delimiter::Square => {
let children = token.children();
let exprs = baseline_parse_tokens(
&mut TokensIterator::new(children),
context,
source,
SyntaxShape::Any,
)?;
let expr = hir::RawExpression::List(exprs);
Ok(expr.tagged(token.tag()))
}
}
}
pub fn baseline_parse_path(
token: &Tagged<PathNode>,
context: &Context,
source: &Text,
) -> Result<hir::Expression, ShellError> {
let head = baseline_parse_semantic_token(token.head(), context, source)?;
let mut tail = vec![];
for part in token.tail() {
let string = match part {
TokenNode::Token(token) => match token.item() {
RawToken::Bare => token.tag().slice(source),
RawToken::String(tag) => tag.slice(source),
RawToken::Number(_)
| RawToken::Size(..)
| RawToken::Variable(_)
| RawToken::ExternalCommand(_)
| RawToken::GlobPattern
| RawToken::ExternalWord => {
return Err(ShellError::type_error(
"String",
token.type_name().tagged(part.tag()),
))
}
},
TokenNode::Member(tag) => tag.slice(source),
// TODO: Make this impossible
other => {
return Err(ShellError::syntax_error(
format!("{} in path", other.type_name()).tagged(other.tag()),
))
}
}
.to_string();
tail.push(string.tagged(part.tag()));
}
Ok(hir::path(head, tail).tagged(token.tag()).into())
}
#[derive(Debug, new)]
pub struct TokensIterator<'a> {
tokens: &'a [TokenNode],
#[new(default)]
index: usize,
#[new(default)]
seen: indexmap::IndexSet<usize>,
}
impl TokensIterator<'_> {
pub fn remove(&mut self, position: usize) {
self.seen.insert(position);
}
pub fn len(&self) -> usize {
self.tokens.len()
}
pub fn at_end(&self) -> bool {
for index in self.index..self.tokens.len() {
if !self.seen.contains(&index) {
return false;
}
}
true
}
pub fn advance(&mut self) {
self.seen.insert(self.index);
self.index += 1;
}
pub fn extract<T>(&mut self, f: impl Fn(&TokenNode) -> Option<T>) -> Option<(usize, T)> {
for (i, item) in self.tokens.iter().enumerate() {
if self.seen.contains(&i) {
continue;
}
match f(item) {
None => {
continue;
}
Some(value) => {
self.seen.insert(i);
return Some((i, value));
}
}
}
None
}
pub fn move_to(&mut self, pos: usize) {
self.index = pos;
}
pub fn restart(&mut self) {
self.index = 0;
}
pub fn clone(&self) -> TokensIterator {
TokensIterator {
tokens: self.tokens,
index: self.index,
seen: self.seen.clone(),
}
}
pub fn peek(&self) -> Option<&TokenNode> {
let mut tokens = self.clone();
tokens.next()
}
pub fn debug_remaining(&self) -> Vec<TokenNode> {
let mut tokens = self.clone();
tokens.restart();
tokens.cloned().collect()
}
}
impl<'a> Iterator for TokensIterator<'a> {
type Item = &'a TokenNode;
fn next(&mut self) -> Option<&'a TokenNode> {
loop {
if self.index >= self.tokens.len() {
return None;
}
if self.seen.contains(&self.index) {
self.advance();
continue;
}
if self.index >= self.tokens.len() {
return None;
}
match &self.tokens[self.index] {
TokenNode::Whitespace(_) => {
self.advance();
}
other => {
self.advance();
return Some(other);
}
}
}
}
}

View File

@ -1,6 +1,6 @@
use crate::parser::{hir::Expression, Operator};
use crate::prelude::*;
use crate::Tagged;
use derive_new::new;
use getset::Getters;
use serde::{Deserialize, Serialize};
@ -12,12 +12,18 @@ use std::fmt;
#[get = "pub(crate)"]
pub struct Binary {
left: Expression,
op: Tagged<Operator>,
op: Spanned<Operator>,
right: Expression,
}
impl ToDebug for Binary {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
impl fmt::Display for Binary {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "({} {} {})", self.op.as_str(), self.left, self.right)
}
}
impl FormatDebug for Binary {
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
write!(f, "{}", self.left.debug(source))?;
write!(f, " {} ", self.op.debug(source))?;
write!(f, "{}", self.right.debug(source))?;

View File

@ -0,0 +1,374 @@
use crate::errors::ParseError;
#[cfg(not(coloring_in_tokens))]
use crate::parser::hir::syntax_shape::FlatShape;
use crate::parser::{
hir::syntax_shape::{
color_syntax, expand_atom, expand_expr, expand_syntax, AtomicToken, ColorSyntax,
ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, MaybeSpaceShape,
},
hir::Expression,
TokensIterator,
};
use crate::{DebugFormatter, FormatDebug, Span, Spanned, SpannedItem};
use std::fmt;
#[derive(Debug, Copy, Clone)]
pub struct ExternalTokensShape;
impl FormatDebug for Spanned<Vec<Spanned<String>>> {
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
FormatDebug::fmt_debug(&self.item, f, source)
}
}
impl ExpandSyntax for ExternalTokensShape {
type Output = Spanned<Vec<Spanned<String>>>;
fn name(&self) -> &'static str {
"external command"
}
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Self::Output, ParseError> {
let mut out: Vec<Spanned<String>> = vec![];
let start = token_nodes.span_at_cursor();
loop {
match expand_syntax(&ExternalExpressionShape, token_nodes, context) {
Err(_) | Ok(None) => break,
Ok(Some(span)) => out.push(span.spanned_string(context.source())),
}
}
let end = token_nodes.span_at_cursor();
Ok(out.spanned(start.until(end)))
}
}
#[cfg(not(coloring_in_tokens))]
impl ColorSyntax for ExternalTokensShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Self::Info {
loop {
// Allow a space
color_syntax(&MaybeSpaceShape, token_nodes, context, shapes);
// Process an external expression. External expressions are mostly words, with a
// few exceptions (like $variables and path expansion rules)
match color_syntax(&ExternalExpression, token_nodes, context, shapes).1 {
ExternalExpressionResult::Eof => break,
ExternalExpressionResult::Processed => continue,
}
}
}
}
#[cfg(coloring_in_tokens)]
impl ColorSyntax for ExternalTokensShape {
type Info = ();
type Input = ();
fn name(&self) -> &'static str {
"ExternalTokensShape"
}
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Self::Info {
loop {
// Allow a space
color_syntax(&MaybeSpaceShape, token_nodes, context);
// Process an external expression. External expressions are mostly words, with a
// few exceptions (like $variables and path expansion rules)
match color_syntax(&ExternalExpression, token_nodes, context).1 {
ExternalExpressionResult::Eof => break,
ExternalExpressionResult::Processed => continue,
}
}
}
}
#[derive(Debug, Copy, Clone)]
pub struct ExternalExpressionShape;
impl ExpandSyntax for ExternalExpressionShape {
type Output = Option<Span>;
fn name(&self) -> &'static str {
"external expression"
}
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Self::Output, ParseError> {
expand_syntax(&MaybeSpaceShape, token_nodes, context)?;
let first = expand_atom(
token_nodes,
"external command",
context,
ExpansionRule::new().allow_external_command(),
)?
.span;
let mut last = first;
loop {
let continuation = expand_expr(&ExternalContinuationShape, token_nodes, context);
if let Ok(continuation) = continuation {
last = continuation.span;
} else {
break;
}
}
Ok(Some(first.until(last)))
}
}
#[derive(Debug, Copy, Clone)]
struct ExternalExpression;
impl ExpandSyntax for ExternalExpression {
type Output = Option<Span>;
fn name(&self) -> &'static str {
"external expression"
}
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Self::Output, ParseError> {
expand_syntax(&MaybeSpaceShape, token_nodes, context)?;
let first = expand_syntax(&ExternalHeadShape, token_nodes, context)?.span;
let mut last = first;
loop {
let continuation = expand_syntax(&ExternalContinuationShape, token_nodes, context);
if let Ok(continuation) = continuation {
last = continuation.span;
} else {
break;
}
}
Ok(Some(first.until(last)))
}
}
#[derive(Debug, Copy, Clone)]
struct ExternalHeadShape;
impl ExpandExpression for ExternalHeadShape {
fn name(&self) -> &'static str {
"external argument"
}
fn expand_expr<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Expression, ParseError> {
match expand_atom(
token_nodes,
"external argument",
context,
ExpansionRule::new()
.allow_external_word()
.treat_size_as_word(),
)? {
atom => match &atom {
Spanned { item, span } => Ok(match item {
AtomicToken::Eof { .. } => unreachable!("ExpansionRule doesn't allow EOF"),
AtomicToken::Error { .. } => unreachable!("ExpansionRule doesn't allow Error"),
AtomicToken::Size { .. } => unreachable!("ExpansionRule treats size as word"),
AtomicToken::Whitespace { .. } => {
unreachable!("ExpansionRule doesn't allow Whitespace")
}
AtomicToken::ShorthandFlag { .. }
| AtomicToken::LonghandFlag { .. }
| AtomicToken::SquareDelimited { .. }
| AtomicToken::ParenDelimited { .. }
| AtomicToken::BraceDelimited { .. }
| AtomicToken::Pipeline { .. } => {
return Err(ParseError::mismatch(
"external command name",
atom.tagged_type_name(),
))
}
AtomicToken::ExternalCommand { command } => {
Expression::external_command(*command, *span)
}
AtomicToken::Number { number } => {
Expression::number(number.to_number(context.source()), *span)
}
AtomicToken::String { body } => Expression::string(*body, *span),
AtomicToken::ItVariable { name } => Expression::it_variable(*name, *span),
AtomicToken::Variable { name } => Expression::variable(*name, *span),
AtomicToken::ExternalWord { .. }
| AtomicToken::GlobPattern { .. }
| AtomicToken::FilePath { .. }
| AtomicToken::Word { .. }
| AtomicToken::Dot { .. }
| AtomicToken::Operator { .. } => Expression::external_command(*span, *span),
}),
},
}
}
}
#[derive(Debug, Copy, Clone)]
struct ExternalContinuationShape;
impl ExpandExpression for ExternalContinuationShape {
fn name(&self) -> &'static str {
"external argument"
}
fn expand_expr<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Expression, ParseError> {
match expand_atom(
token_nodes,
"external argument",
context,
ExpansionRule::new()
.allow_external_word()
.treat_size_as_word(),
)? {
atom => match &atom {
Spanned { item, span } => Ok(match item {
AtomicToken::Eof { .. } => unreachable!("ExpansionRule doesn't allow EOF"),
AtomicToken::Error { .. } => unreachable!("ExpansionRule doesn't allow Error"),
AtomicToken::Number { number } => {
Expression::number(number.to_number(context.source()), *span)
}
AtomicToken::Size { .. } => unreachable!("ExpansionRule treats size as word"),
AtomicToken::ExternalCommand { .. } => {
unreachable!("ExpansionRule doesn't allow ExternalCommand")
}
AtomicToken::Whitespace { .. } => {
unreachable!("ExpansionRule doesn't allow Whitespace")
}
AtomicToken::String { body } => Expression::string(*body, *span),
AtomicToken::ItVariable { name } => Expression::it_variable(*name, *span),
AtomicToken::Variable { name } => Expression::variable(*name, *span),
AtomicToken::ExternalWord { .. }
| AtomicToken::GlobPattern { .. }
| AtomicToken::FilePath { .. }
| AtomicToken::Word { .. }
| AtomicToken::ShorthandFlag { .. }
| AtomicToken::LonghandFlag { .. }
| AtomicToken::Dot { .. }
| AtomicToken::Operator { .. } => Expression::bare(*span),
AtomicToken::SquareDelimited { .. }
| AtomicToken::ParenDelimited { .. }
| AtomicToken::BraceDelimited { .. }
| AtomicToken::Pipeline { .. } => {
return Err(ParseError::mismatch(
"external argument",
atom.tagged_type_name(),
))
}
}),
},
}
}
}
#[cfg(coloring_in_tokens)]
impl ColorSyntax for ExternalExpression {
type Info = ExternalExpressionResult;
type Input = ();
fn name(&self) -> &'static str {
"ExternalExpression"
}
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> ExternalExpressionResult {
let atom = match expand_atom(
token_nodes,
"external word",
context,
ExpansionRule::permissive(),
) {
Err(_) => unreachable!("TODO: separate infallible expand_atom"),
Ok(Spanned {
item: AtomicToken::Eof { .. },
..
}) => return ExternalExpressionResult::Eof,
Ok(atom) => atom,
};
token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes));
return ExternalExpressionResult::Processed;
}
}
#[must_use]
enum ExternalExpressionResult {
Eof,
Processed,
}
#[cfg(not(coloring_in_tokens))]
impl ColorSyntax for ExternalExpression {
type Info = ExternalExpressionResult;
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> ExternalExpressionResult {
let atom = match expand_atom(
token_nodes,
"external word",
context,
ExpansionRule::permissive(),
) {
Err(_) => unreachable!("TODO: separate infallible expand_atom"),
Ok(Spanned {
item: AtomicToken::Eof { .. },
..
}) => return ExternalExpressionResult::Eof,
Ok(atom) => atom,
};
atom.color_tokens(shapes);
return ExternalExpressionResult::Processed;
}
}

View File

@ -9,11 +9,11 @@ use std::fmt;
)]
#[get = "pub(crate)"]
pub struct ExternalCommand {
name: Tag,
pub(crate) name: Span,
}
impl ToDebug for ExternalCommand {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
impl FormatDebug for ExternalCommand {
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
write!(f, "{}", self.name.slice(source))?;
Ok(())

View File

@ -21,8 +21,8 @@ pub struct NamedArguments {
pub(crate) named: IndexMap<String, NamedValue>,
}
impl ToDebug for NamedArguments {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
impl FormatDebug for NamedArguments {
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
for (name, value) in &self.named {
match value {
NamedValue::AbsentSwitch => continue,
@ -43,9 +43,13 @@ impl NamedArguments {
match switch {
None => self.named.insert(name.into(), NamedValue::AbsentSwitch),
Some(flag) => self
.named
.insert(name, NamedValue::PresentSwitch(*flag.name())),
Some(flag) => self.named.insert(
name,
NamedValue::PresentSwitch(Tag {
span: *flag.name(),
anchor: None,
}),
),
};
}

View File

@ -1,26 +1,55 @@
use crate::parser::hir::Expression;
use crate::prelude::*;
use crate::Tagged;
use derive_new::new;
use getset::Getters;
use getset::{Getters, MutGetters};
use serde::{Deserialize, Serialize};
use std::fmt;
#[derive(
Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, Getters, Serialize, Deserialize, new,
Debug,
Clone,
Eq,
PartialEq,
Ord,
PartialOrd,
Hash,
Getters,
MutGetters,
Serialize,
Deserialize,
new,
)]
#[get = "pub(crate)"]
pub struct Path {
head: Expression,
tail: Vec<Tagged<String>>,
#[get_mut = "pub(crate)"]
tail: Vec<Spanned<String>>,
}
impl ToDebug for Path {
fn fmt_debug(&self, f: &mut fmt::Formatter, source: &str) -> fmt::Result {
write!(f, "{}", self.head.debug(source))?;
impl fmt::Display for Path {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.head)?;
for part in &self.tail {
write!(f, ".{}", part.item())?;
for entry in &self.tail {
write!(f, ".{}", entry.item)?;
}
Ok(())
}
}
impl Path {
pub(crate) fn parts(self) -> (Expression, Vec<Spanned<String>>) {
(self.head, self.tail)
}
}
impl FormatDebug for Path {
fn fmt_debug(&self, f: &mut DebugFormatter, source: &str) -> fmt::Result {
write!(f, "{}", self.head.debug(source))?;
for part in &self.tail {
write!(f, ".{}", part.item)?;
}
Ok(())

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,526 @@
use crate::errors::ShellError;
#[cfg(not(coloring_in_tokens))]
use crate::parser::hir::syntax_shape::FlatShape;
use crate::parser::{
hir,
hir::syntax_shape::{
color_fallible_syntax, color_syntax_with, continue_expression, expand_expr, expand_syntax,
DelimitedShape, ExpandContext, ExpandExpression, ExpressionContinuationShape,
ExpressionListShape, FallibleColorSyntax, MemberShape, ParseError, PathTailShape,
VariablePathShape,
},
hir::tokens_iterator::TokensIterator,
parse::token_tree::Delimiter,
RawToken, TokenNode,
};
use crate::{Span, Spanned, SpannedItem};
#[derive(Debug, Copy, Clone)]
pub struct AnyBlockShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for AnyBlockShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let block = token_nodes.peek_non_ws().not_eof("block");
let block = match block {
Err(_) => return Ok(()),
Ok(block) => block,
};
// is it just a block?
let block = block.node.as_block();
match block {
// If so, color it as a block
Some((children, spans)) => {
let mut token_nodes = TokensIterator::new(children.item, children.span, false);
color_syntax_with(
&DelimitedShape,
&(Delimiter::Brace, spans.0, spans.1),
&mut token_nodes,
context,
shapes,
);
return Ok(());
}
_ => {}
}
// Otherwise, look for a shorthand block. If none found, fail
color_fallible_syntax(&ShorthandBlock, token_nodes, context, shapes)
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for AnyBlockShape {
type Info = ();
type Input = ();
fn name(&self) -> &'static str {
"AnyBlockShape"
}
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
let block = token_nodes.peek_non_ws().not_eof("block");
let block = match block {
Err(_) => return Ok(()),
Ok(block) => block,
};
// is it just a block?
let block = block.node.as_block();
match block {
// If so, color it as a block
Some((children, spans)) => {
token_nodes.child(children, |token_nodes| {
color_syntax_with(
&DelimitedShape,
&(Delimiter::Brace, spans.0, spans.1),
token_nodes,
context,
);
});
return Ok(());
}
_ => {}
}
// Otherwise, look for a shorthand block. If none found, fail
color_fallible_syntax(&ShorthandBlock, token_nodes, context)
}
}
impl ExpandExpression for AnyBlockShape {
fn name(&self) -> &'static str {
"any block"
}
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ParseError> {
let block = token_nodes.peek_non_ws().not_eof("block")?;
// is it just a block?
let block = block.node.as_block();
match block {
Some((block, _tags)) => {
let mut iterator = TokensIterator::new(&block.item, block.span, false);
let exprs = expand_syntax(&ExpressionListShape, &mut iterator, context)?;
return Ok(hir::RawExpression::Block(exprs.item).spanned(block.span));
}
_ => {}
}
expand_syntax(&ShorthandBlock, token_nodes, context)
}
}
#[derive(Debug, Copy, Clone)]
pub struct ShorthandBlock;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for ShorthandBlock {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
// Try to find a shorthand head. If none found, fail
color_fallible_syntax(&ShorthandPath, token_nodes, context, shapes)?;
loop {
// Check to see whether there's any continuation after the head expression
let result =
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes);
match result {
// if no continuation was found, we're done
Err(_) => break,
// if a continuation was found, look for another one
Ok(_) => continue,
}
}
Ok(())
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for ShorthandBlock {
type Info = ();
type Input = ();
fn name(&self) -> &'static str {
"ShorthandBlock"
}
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
// Try to find a shorthand head. If none found, fail
color_fallible_syntax(&ShorthandPath, token_nodes, context)?;
loop {
// Check to see whether there's any continuation after the head expression
let result = color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context);
match result {
// if no continuation was found, we're done
Err(_) => break,
// if a continuation was found, look for another one
Ok(_) => continue,
}
}
Ok(())
}
}
impl ExpandExpression for ShorthandBlock {
fn name(&self) -> &'static str {
"shorthand block"
}
fn expand_expr<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<hir::Expression, ParseError> {
let path = expand_expr(&ShorthandPath, token_nodes, context)?;
let start = path.span;
let expr = continue_expression(path, token_nodes, context);
let end = expr.span;
let block = hir::RawExpression::Block(vec![expr]).spanned(start.until(end));
Ok(block)
}
}
/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block
#[derive(Debug, Copy, Clone)]
pub struct ShorthandPath;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for ShorthandPath {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
token_nodes.atomic(|token_nodes| {
let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context, shapes);
match variable {
Ok(_) => {
// if it's a variable path, that's the head part
return Ok(());
}
Err(_) => {
// otherwise, we'll try to find a member path
}
}
// look for a member (`<member>` -> `$it.<member>`)
color_fallible_syntax(&MemberShape, token_nodes, context, shapes)?;
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
// like any other path.
let tail = color_fallible_syntax(&PathTailShape, token_nodes, context, shapes);
match tail {
Ok(_) => {}
Err(_) => {
// It's ok if there's no path tail; a single member is sufficient
}
}
Ok(())
})
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for ShorthandPath {
type Info = ();
type Input = ();
fn name(&self) -> &'static str {
"ShorthandPath"
}
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
token_nodes.atomic(|token_nodes| {
let variable = color_fallible_syntax(&VariablePathShape, token_nodes, context);
match variable {
Ok(_) => {
// if it's a variable path, that's the head part
return Ok(());
}
Err(_) => {
// otherwise, we'll try to find a member path
}
}
// look for a member (`<member>` -> `$it.<member>`)
color_fallible_syntax(&MemberShape, token_nodes, context)?;
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
// like any other path.
let tail = color_fallible_syntax(&PathTailShape, token_nodes, context);
match tail {
Ok(_) => {}
Err(_) => {
// It's ok if there's no path tail; a single member is sufficient
}
}
Ok(())
})
}
}
impl ExpandExpression for ShorthandPath {
fn name(&self) -> &'static str {
"shorthand path"
}
fn expand_expr<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<hir::Expression, ParseError> {
// if it's a variable path, that's the head part
let path = expand_expr(&VariablePathShape, token_nodes, context);
match path {
Ok(path) => return Ok(path),
Err(_) => {}
}
// Synthesize the head of the shorthand path (`<member>` -> `$it.<member>`)
let mut head = expand_expr(&ShorthandHeadShape, token_nodes, context)?;
// Now that we've synthesized the head, of the path, proceed to expand the tail of the path
// like any other path.
let tail = expand_syntax(&PathTailShape, token_nodes, context);
match tail {
Err(_) => return Ok(head),
Ok(Spanned { item: tail, .. }) => {
// For each member that `PathTailShape` expanded, join it onto the existing expression
// to form a new path
for member in tail {
head = hir::Expression::dot_member(head, member);
}
Ok(head)
}
}
}
}
/// A shorthand for `$it.foo."bar"`, used inside of a shorthand block
#[derive(Debug, Copy, Clone)]
pub struct ShorthandHeadShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for ShorthandHeadShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
// A shorthand path must not be at EOF
let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?;
match peeked.node {
// If the head of a shorthand path is a bare token, it expands to `$it.bare`
TokenNode::Token(Spanned {
item: RawToken::Bare,
span,
}) => {
peeked.commit();
shapes.push(FlatShape::BareMember.spanned(*span));
Ok(())
}
// If the head of a shorthand path is a string, it expands to `$it."some string"`
TokenNode::Token(Spanned {
item: RawToken::String(_),
span: outer,
}) => {
peeked.commit();
shapes.push(FlatShape::StringMember.spanned(*outer));
Ok(())
}
other => Err(ShellError::type_error(
"shorthand head",
other.tagged_type_name(),
)),
}
}
}
#[cfg(coloring_in_tokens)]
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for ShorthandHeadShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
_context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
// A shorthand path must not be at EOF
let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?;
match peeked.node {
// If the head of a shorthand path is a bare token, it expands to `$it.bare`
TokenNode::Token(Spanned {
item: RawToken::Bare,
span,
}) => {
peeked.commit();
shapes.push(FlatShape::BareMember.spanned(*span));
Ok(())
}
// If the head of a shorthand path is a string, it expands to `$it."some string"`
TokenNode::Token(Spanned {
item: RawToken::String(_),
span: outer,
}) => {
peeked.commit();
shapes.push(FlatShape::StringMember.spanned(*outer));
Ok(())
}
other => Err(ShellError::type_error(
"shorthand head",
other.tagged_type_name(),
)),
}
}
}
impl ExpandExpression for ShorthandHeadShape {
fn name(&self) -> &'static str {
"shorthand head"
}
fn expand_expr<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<hir::Expression, ParseError> {
// A shorthand path must not be at EOF
let peeked = token_nodes.peek_non_ws().not_eof("shorthand path")?;
match peeked.node {
// If the head of a shorthand path is a bare token, it expands to `$it.bare`
TokenNode::Token(Spanned {
item: RawToken::Bare,
span,
}) => {
// Commit the peeked token
peeked.commit();
// Synthesize an `$it` expression
let it = synthetic_it();
// Make a path out of `$it` and the bare token as a member
Ok(hir::Expression::path(
it,
vec![span.spanned_string(context.source)],
*span,
))
}
// If the head of a shorthand path is a string, it expands to `$it."some string"`
TokenNode::Token(Spanned {
item: RawToken::String(inner),
span: outer,
}) => {
// Commit the peeked token
peeked.commit();
// Synthesize an `$it` expression
let it = synthetic_it();
// Make a path out of `$it` and the bare token as a member
Ok(hir::Expression::path(
it,
vec![inner.string(context.source).spanned(*outer)],
*outer,
))
}
// Any other token is not a valid bare head
other => {
return Err(ParseError::mismatch(
"shorthand path",
other.tagged_type_name(),
))
}
}
}
}
fn synthetic_it() -> hir::Expression {
hir::Expression::it_variable(Span::unknown(), Span::unknown())
}

View File

@ -0,0 +1,495 @@
pub(crate) mod atom;
pub(crate) mod delimited;
pub(crate) mod file_path;
pub(crate) mod list;
pub(crate) mod number;
pub(crate) mod pattern;
pub(crate) mod string;
pub(crate) mod unit;
pub(crate) mod variable_path;
use crate::parser::hir::syntax_shape::{
color_delimited_square, color_fallible_syntax, color_fallible_syntax_with, expand_atom,
expand_delimited_square, expand_expr, expand_syntax, AtomicToken, BareShape, ColorableDotShape,
DotShape, ExpandContext, ExpandExpression, ExpandSyntax, ExpansionRule, ExpressionContinuation,
ExpressionContinuationShape, FallibleColorSyntax, FlatShape, ParseError,
};
use crate::parser::{
hir,
hir::{Expression, TokensIterator},
};
use crate::prelude::*;
use std::path::PathBuf;
#[derive(Debug, Copy, Clone)]
pub struct AnyExpressionShape;
impl ExpandExpression for AnyExpressionShape {
fn name(&self) -> &'static str {
"any expression"
}
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ParseError> {
// Look for an expression at the cursor
let head = expand_expr(&AnyExpressionStartShape, token_nodes, context)?;
Ok(continue_expression(head, token_nodes, context))
}
}
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for AnyExpressionShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
// Look for an expression at the cursor
color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context, shapes)?;
match continue_coloring_expression(token_nodes, context, shapes) {
Err(_) => {
// it's fine for there to be no continuation
}
Ok(()) => {}
}
Ok(())
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for AnyExpressionShape {
type Info = ();
type Input = ();
fn name(&self) -> &'static str {
"AnyExpressionShape"
}
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
// Look for an expression at the cursor
color_fallible_syntax(&AnyExpressionStartShape, token_nodes, context)?;
match continue_coloring_expression(token_nodes, context) {
Err(_) => {
// it's fine for there to be no continuation
}
Ok(()) => {}
}
Ok(())
}
}
pub(crate) fn continue_expression(
mut head: hir::Expression,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> hir::Expression {
loop {
// Check to see whether there's any continuation after the head expression
let continuation = expand_syntax(&ExpressionContinuationShape, token_nodes, context);
match continuation {
// If there's no continuation, return the head
Err(_) => return head,
// Otherwise, form a new expression by combining the head with the continuation
Ok(continuation) => match continuation {
// If the continuation is a `.member`, form a path with the new member
ExpressionContinuation::DotSuffix(_dot, member) => {
head = Expression::dot_member(head, member);
}
// Otherwise, if the continuation is an infix suffix, form an infix expression
ExpressionContinuation::InfixSuffix(op, expr) => {
head = Expression::infix(head, op, expr);
}
},
}
}
}
#[cfg(not(coloring_in_tokens))]
pub(crate) fn continue_coloring_expression(
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
// if there's not even one expression continuation, fail
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes)?;
loop {
// Check to see whether there's any continuation after the head expression
let result =
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context, shapes);
match result {
Err(_) => {
// We already saw one continuation, so just return
return Ok(());
}
Ok(_) => {}
}
}
}
#[cfg(coloring_in_tokens)]
pub(crate) fn continue_coloring_expression(
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<(), ShellError> {
// if there's not even one expression continuation, fail
color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context)?;
loop {
// Check to see whether there's any continuation after the head expression
let result = color_fallible_syntax(&ExpressionContinuationShape, token_nodes, context);
match result {
Err(_) => {
// We already saw one continuation, so just return
return Ok(());
}
Ok(_) => {}
}
}
}
#[derive(Debug, Copy, Clone)]
pub struct AnyExpressionStartShape;
impl ExpandExpression for AnyExpressionStartShape {
fn name(&self) -> &'static str {
"any expression start"
}
fn expand_expr<'a, 'b>(
&self,
token_nodes: &mut TokensIterator<'_>,
context: &ExpandContext,
) -> Result<hir::Expression, ParseError> {
let atom = expand_atom(token_nodes, "expression", context, ExpansionRule::new())?;
match atom.item {
AtomicToken::Size { number, unit } => {
return Ok(hir::Expression::size(
number.to_number(context.source),
unit.item,
Tag {
span: atom.span,
anchor: None,
},
))
}
AtomicToken::SquareDelimited { nodes, .. } => {
expand_delimited_square(&nodes, atom.span.into(), context)
}
AtomicToken::Word { .. } | AtomicToken::Dot { .. } => {
let end = expand_syntax(&BareTailShape, token_nodes, context)?;
Ok(hir::Expression::bare(atom.span.until_option(end)))
}
other => return other.spanned(atom.span).into_hir(context, "expression"),
}
}
}
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for AnyExpressionStartShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let atom = token_nodes.spanned(|token_nodes| {
expand_atom(
token_nodes,
"expression",
context,
ExpansionRule::permissive(),
)
});
let atom = match atom {
Spanned {
item: Err(_err),
span,
} => {
shapes.push(FlatShape::Error.spanned(span));
return Ok(());
}
Spanned {
item: Ok(value), ..
} => value,
};
match atom.item {
AtomicToken::Size { number, unit } => shapes.push(
FlatShape::Size {
number: number.span.into(),
unit: unit.span.into(),
}
.spanned(atom.span),
),
AtomicToken::SquareDelimited { nodes, spans } => {
color_delimited_square(spans, &nodes, atom.span.into(), context, shapes)
}
AtomicToken::Word { .. } | AtomicToken::Dot { .. } => {
shapes.push(FlatShape::Word.spanned(atom.span));
}
_ => atom.color_tokens(shapes),
}
Ok(())
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for AnyExpressionStartShape {
type Info = ();
type Input = ();
fn name(&self) -> &'static str {
"AnyExpressionStartShape"
}
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
let atom = token_nodes.spanned(|token_nodes| {
expand_atom(
token_nodes,
"expression",
context,
ExpansionRule::permissive(),
)
});
let atom = match atom {
Spanned {
item: Err(_err),
span,
} => {
token_nodes.color_shape(FlatShape::Error.spanned(span));
return Ok(());
}
Spanned {
item: Ok(value), ..
} => value,
};
match atom.item {
AtomicToken::Size { number, unit } => token_nodes.color_shape(
FlatShape::Size {
number: number.span.into(),
unit: unit.span.into(),
}
.spanned(atom.span),
),
AtomicToken::SquareDelimited { nodes, spans } => {
token_nodes.child((&nodes[..]).spanned(atom.span), |tokens| {
color_delimited_square(spans, tokens, atom.span.into(), context);
});
}
AtomicToken::Word { .. } | AtomicToken::Dot { .. } => {
token_nodes.color_shape(FlatShape::Word.spanned(atom.span));
}
_ => token_nodes.mutate_shapes(|shapes| atom.color_tokens(shapes)),
}
Ok(())
}
}
#[derive(Debug, Copy, Clone)]
pub struct BareTailShape;
#[cfg(not(coloring_in_tokens))]
impl FallibleColorSyntax for BareTailShape {
type Info = ();
type Input = ();
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
shapes: &mut Vec<Spanned<FlatShape>>,
) -> Result<(), ShellError> {
let len = shapes.len();
loop {
let word = color_fallible_syntax_with(
&BareShape,
&FlatShape::Word,
token_nodes,
context,
shapes,
);
match word {
// if a word was found, continue
Ok(_) => continue,
// if a word wasn't found, try to find a dot
Err(_) => {}
}
// try to find a dot
let dot = color_fallible_syntax_with(
&ColorableDotShape,
&FlatShape::Word,
token_nodes,
context,
shapes,
);
match dot {
// if a dot was found, try to find another word
Ok(_) => continue,
// otherwise, we're done
Err(_) => break,
}
}
if shapes.len() > len {
Ok(())
} else {
Err(ShellError::syntax_error(
"No tokens matched BareTailShape".tagged_unknown(),
))
}
}
}
#[cfg(coloring_in_tokens)]
impl FallibleColorSyntax for BareTailShape {
type Info = ();
type Input = ();
fn name(&self) -> &'static str {
"BareTailShape"
}
fn color_syntax<'a, 'b>(
&self,
_input: &(),
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<(), ShellError> {
let len = token_nodes.state().shapes().len();
loop {
let word =
color_fallible_syntax_with(&BareShape, &FlatShape::Word, token_nodes, context);
match word {
// if a word was found, continue
Ok(_) => continue,
// if a word wasn't found, try to find a dot
Err(_) => {}
}
// try to find a dot
let dot = color_fallible_syntax_with(
&ColorableDotShape,
&FlatShape::Word,
token_nodes,
context,
);
match dot {
// if a dot was found, try to find another word
Ok(_) => continue,
// otherwise, we're done
Err(_) => break,
}
}
if token_nodes.state().shapes().len() > len {
Ok(())
} else {
Err(ShellError::syntax_error(
"No tokens matched BareTailShape".tagged_unknown(),
))
}
}
}
impl ExpandSyntax for BareTailShape {
fn name(&self) -> &'static str {
"word continuation"
}
type Output = Option<Span>;
fn expand_syntax<'a, 'b>(
&self,
token_nodes: &'b mut TokensIterator<'a>,
context: &ExpandContext,
) -> Result<Option<Span>, ParseError> {
let mut end: Option<Span> = None;
loop {
match expand_syntax(&BareShape, token_nodes, context) {
Ok(bare) => {
end = Some(bare.span);
continue;
}
Err(_) => match expand_syntax(&DotShape, token_nodes, context) {
Ok(dot) => {
end = Some(dot);
continue;
}
Err(_) => break,
},
}
}
Ok(end)
}
}
pub fn expand_file_path(string: &str, context: &ExpandContext) -> PathBuf {
let expanded = shellexpand::tilde_with_context(string, || context.homedir());
PathBuf::from(expanded.as_ref())
}

Some files were not shown because too many files have changed in this diff Show More