mirror of
https://github.com/nushell/nushell.git
synced 2025-08-09 17:25:15 +02:00
Merge branch 'main' into no_export_env
This commit is contained in:
@ -5,25 +5,25 @@ repository = "https://github.com/nushell/nushell/tree/main/crates/nu-command"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
name = "nu-command"
|
||||
version = "0.67.1"
|
||||
version = "0.68.2"
|
||||
build = "build.rs"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
nu-color-config = { path = "../nu-color-config", version = "0.67.1" }
|
||||
nu-engine = { path = "../nu-engine", version = "0.67.1" }
|
||||
nu-glob = { path = "../nu-glob", version = "0.67.1" }
|
||||
nu-json = { path = "../nu-json", version = "0.67.1" }
|
||||
nu-parser = { path = "../nu-parser", version = "0.67.1" }
|
||||
nu-path = { path = "../nu-path", version = "0.67.1" }
|
||||
nu-pretty-hex = { path = "../nu-pretty-hex", version = "0.67.1" }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.67.1" }
|
||||
nu-system = { path = "../nu-system", version = "0.67.1" }
|
||||
nu-table = { path = "../nu-table", version = "0.67.1" }
|
||||
nu-term-grid = { path = "../nu-term-grid", version = "0.67.1" }
|
||||
nu-test-support = { path = "../nu-test-support", version = "0.67.1" }
|
||||
nu-utils = { path = "../nu-utils", version = "0.67.1" }
|
||||
nu-color-config = { path = "../nu-color-config", version = "0.68.2" }
|
||||
nu-engine = { path = "../nu-engine", version = "0.68.2" }
|
||||
nu-glob = { path = "../nu-glob", version = "0.68.2" }
|
||||
nu-json = { path = "../nu-json", version = "0.68.2" }
|
||||
nu-parser = { path = "../nu-parser", version = "0.68.2" }
|
||||
nu-path = { path = "../nu-path", version = "0.68.2" }
|
||||
nu-pretty-hex = { path = "../nu-pretty-hex", version = "0.68.2" }
|
||||
nu-protocol = { path = "../nu-protocol", version = "0.68.2" }
|
||||
nu-system = { path = "../nu-system", version = "0.68.2" }
|
||||
nu-table = { path = "../nu-table", version = "0.68.2" }
|
||||
nu-term-grid = { path = "../nu-term-grid", version = "0.68.2" }
|
||||
nu-test-support = { path = "../nu-test-support", version = "0.68.2" }
|
||||
nu-utils = { path = "../nu-utils", version = "0.68.2" }
|
||||
nu-ansi-term = "0.46.0"
|
||||
num-format = { version = "0.4.0" }
|
||||
|
||||
@ -78,7 +78,7 @@ sha2 = "0.10.0"
|
||||
# Disable default features b/c the default features build Git (very slow to compile)
|
||||
shadow-rs = { version = "0.16.1", default-features = false }
|
||||
strip-ansi-escapes = "0.1.1"
|
||||
sysinfo = "0.25.2"
|
||||
sysinfo = "0.26.2"
|
||||
terminal_size = "0.2.1"
|
||||
thiserror = "1.0.31"
|
||||
titlecase = "2.0.0"
|
||||
@ -87,10 +87,10 @@ unicode-segmentation = "1.8.0"
|
||||
url = "2.2.1"
|
||||
uuid = { version = "1.1.2", features = ["v4"] }
|
||||
which = { version = "4.3.0", optional = true }
|
||||
reedline = { version = "0.10.0", features = ["bashisms", "sqlite"]}
|
||||
reedline = { version = "0.11.0", features = ["bashisms", "sqlite"]}
|
||||
wax = { version = "0.5.0", features = ["diagnostics"] }
|
||||
rusqlite = { version = "0.28.0", features = ["bundled"], optional = true }
|
||||
sqlparser = { version = "0.16.0", features = ["serde"], optional = true }
|
||||
sqlparser = { version = "0.23.0", features = ["serde"], optional = true }
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
umask = "2.0.0"
|
||||
@ -115,6 +115,7 @@ features = [
|
||||
"dtype-struct",
|
||||
"dtype-categorical",
|
||||
"dynamic_groupby",
|
||||
"ipc",
|
||||
"is_in",
|
||||
"json",
|
||||
"lazy",
|
||||
|
84
crates/nu-command/src/core_commands/commandline.rs
Normal file
84
crates/nu-command/src/core_commands/commandline.rs
Normal file
@ -0,0 +1,84 @@
|
||||
use nu_engine::CallExt;
|
||||
use nu_protocol::ast::Call;
|
||||
use nu_protocol::engine::ReplOperation;
|
||||
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||
use nu_protocol::Category;
|
||||
use nu_protocol::IntoPipelineData;
|
||||
use nu_protocol::{PipelineData, ShellError, Signature, SyntaxShape, Value};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Commandline;
|
||||
|
||||
impl Command for Commandline {
|
||||
fn name(&self) -> &str {
|
||||
"commandline"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("commandline")
|
||||
.switch(
|
||||
"append",
|
||||
"appends the string to the end of the buffer",
|
||||
Some('a'),
|
||||
)
|
||||
.switch(
|
||||
"insert",
|
||||
"inserts the string into the buffer at the cursor position",
|
||||
Some('i'),
|
||||
)
|
||||
.switch(
|
||||
"replace",
|
||||
"replaces the current contents of the buffer (default)",
|
||||
Some('r'),
|
||||
)
|
||||
.optional(
|
||||
"cmd",
|
||||
SyntaxShape::String,
|
||||
"the string to perform the operation with",
|
||||
)
|
||||
.category(Category::Core)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"View or modify the current command line input buffer"
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["repl", "interactive"]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
if let Some(cmd) = call.opt::<Value>(engine_state, stack, 0)? {
|
||||
let mut ops = engine_state
|
||||
.repl_operation_queue
|
||||
.lock()
|
||||
.expect("repl op queue mutex");
|
||||
ops.push_back(if call.has_flag("append") {
|
||||
ReplOperation::Append(cmd.as_string()?)
|
||||
} else if call.has_flag("insert") {
|
||||
ReplOperation::Insert(cmd.as_string()?)
|
||||
} else {
|
||||
ReplOperation::Replace(cmd.as_string()?)
|
||||
});
|
||||
Ok(Value::Nothing { span: call.head }.into_pipeline_data())
|
||||
} else if let Some(ref cmd) = *engine_state
|
||||
.repl_buffer_state
|
||||
.lock()
|
||||
.expect("repl buffer state mutex")
|
||||
{
|
||||
Ok(Value::String {
|
||||
val: cmd.clone(),
|
||||
span: call.head,
|
||||
}
|
||||
.into_pipeline_data())
|
||||
} else {
|
||||
Ok(Value::Nothing { span: call.head }.into_pipeline_data())
|
||||
}
|
||||
}
|
||||
}
|
@ -1,5 +1,6 @@
|
||||
mod alias;
|
||||
mod ast;
|
||||
mod commandline;
|
||||
mod debug;
|
||||
mod def;
|
||||
mod def_env;
|
||||
@ -29,6 +30,7 @@ mod version;
|
||||
|
||||
pub use alias::Alias;
|
||||
pub use ast::Ast;
|
||||
pub use commandline::Commandline;
|
||||
pub use debug::Debug;
|
||||
pub use def::Def;
|
||||
pub use def_env::DefEnv;
|
||||
|
@ -1,4 +1,5 @@
|
||||
use nu_engine::{eval_block, find_in_dirs_env, redirect_env, CallExt};
|
||||
use nu_parser::trim_quotes_str;
|
||||
use nu_protocol::ast::{Call, Expr};
|
||||
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||
use nu_protocol::{
|
||||
@ -55,7 +56,8 @@ impl Command for OverlayUse {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let name_arg: Spanned<String> = call.req(engine_state, caller_stack, 0)?;
|
||||
let mut name_arg: Spanned<String> = call.req(engine_state, caller_stack, 0)?;
|
||||
name_arg.item = trim_quotes_str(&name_arg.item).to_string();
|
||||
|
||||
let origin_module_id = if let Some(overlay_expr) = call.positional_nth(0) {
|
||||
if let Expr::Overlay(module_id) = overlay_expr.expr {
|
||||
@ -191,6 +193,13 @@ impl Command for OverlayUse {
|
||||
description: "Create an overlay from a module",
|
||||
example: r#"module spam { export def foo [] { "foo" } }
|
||||
overlay use spam
|
||||
foo"#,
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Create an overlay from a module and rename it",
|
||||
example: r#"module spam { export def foo [] { "foo" } }
|
||||
overlay use spam as spam_new
|
||||
foo"#,
|
||||
result: None,
|
||||
},
|
||||
|
@ -21,12 +21,6 @@ impl Command for Register {
|
||||
SyntaxShape::Filepath,
|
||||
"path of executable for plugin",
|
||||
)
|
||||
.required_named(
|
||||
"encoding",
|
||||
SyntaxShape::String,
|
||||
"Encoding used to communicate with plugin. Options: [json, msgpack]",
|
||||
Some('e'),
|
||||
)
|
||||
.optional(
|
||||
"signature",
|
||||
SyntaxShape::Any,
|
||||
@ -64,12 +58,12 @@ impl Command for Register {
|
||||
vec![
|
||||
Example {
|
||||
description: "Register `nu_plugin_query` plugin from ~/.cargo/bin/ dir",
|
||||
example: r#"register -e json ~/.cargo/bin/nu_plugin_query"#,
|
||||
example: r#"register ~/.cargo/bin/nu_plugin_query"#,
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Register `nu_plugin_query` plugin from `nu -c`(plugin will be available in that nu session only)",
|
||||
example: r#"let plugin = ((which nu).path.0 | path dirname | path join 'nu_plugin_query'); nu -c $'register -e json ($plugin); version'"#,
|
||||
example: r#"let plugin = ((which nu).path.0 | path dirname | path join 'nu_plugin_query'); nu -c $'register ($plugin); version'"#,
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
|
@ -1,4 +1,4 @@
|
||||
use nu_engine::eval_block;
|
||||
use nu_engine::{eval_block, find_in_dirs_env, redirect_env};
|
||||
use nu_protocol::ast::{Call, Expr, Expression, ImportPatternMember};
|
||||
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||
use nu_protocol::{
|
||||
@ -35,9 +35,9 @@ impl Command for Use {
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
caller_stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let import_pattern = if let Some(Expression {
|
||||
expr: Expr::ImportPattern(pat),
|
||||
@ -107,7 +107,7 @@ impl Command for Use {
|
||||
|
||||
let val = eval_block(
|
||||
engine_state,
|
||||
stack,
|
||||
caller_stack,
|
||||
block,
|
||||
PipelineData::new(call.head),
|
||||
false,
|
||||
@ -115,11 +115,50 @@ impl Command for Use {
|
||||
)?
|
||||
.into_value(call.head);
|
||||
|
||||
stack.add_env_var(name, val);
|
||||
caller_stack.add_env_var(name, val);
|
||||
}
|
||||
|
||||
// Evaluate the export-env block if there is one
|
||||
if let Some(block_id) = module.env_block {
|
||||
let block = engine_state.get_block(block_id);
|
||||
|
||||
// See if the module is a file
|
||||
let module_arg_str = String::from_utf8_lossy(
|
||||
engine_state.get_span_contents(&import_pattern.head.span),
|
||||
);
|
||||
let maybe_parent = if let Some(path) =
|
||||
find_in_dirs_env(&module_arg_str, engine_state, caller_stack)?
|
||||
{
|
||||
path.parent().map(|p| p.to_path_buf()).or(None)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mut callee_stack = caller_stack.gather_captures(&block.captures);
|
||||
|
||||
// If so, set the currently evaluated directory (file-relative PWD)
|
||||
if let Some(parent) = maybe_parent {
|
||||
let file_pwd = Value::String {
|
||||
val: parent.to_string_lossy().to_string(),
|
||||
span: call.head,
|
||||
};
|
||||
callee_stack.add_env_var("FILE_PWD".to_string(), file_pwd);
|
||||
}
|
||||
|
||||
// Run the block (discard the result)
|
||||
let _ = eval_block(
|
||||
engine_state,
|
||||
&mut callee_stack,
|
||||
block,
|
||||
input,
|
||||
call.redirect_stdout,
|
||||
call.redirect_stderr,
|
||||
)?;
|
||||
|
||||
// Merge the block's environment to the current stack
|
||||
redirect_env(engine_state, caller_stack, &callee_stack);
|
||||
}
|
||||
} else {
|
||||
// TODO: This is a workaround since call.positional[0].span points at 0 for some reason
|
||||
// when this error is triggered
|
||||
return Err(ShellError::GenericError(
|
||||
format!(
|
||||
"Could not import from '{}'",
|
||||
|
@ -125,7 +125,7 @@ impl Command for AndDb {
|
||||
}
|
||||
|
||||
fn modify_query(query: &mut Box<Query>, expression: Expr, span: Span) -> Result<(), ShellError> {
|
||||
match query.body {
|
||||
match *query.body {
|
||||
SetExpr::Select(ref mut select) => modify_select(select, expression, span)?,
|
||||
_ => {
|
||||
return Err(ShellError::GenericError(
|
||||
|
@ -113,7 +113,7 @@ fn alias_db(
|
||||
Vec::new(),
|
||||
)),
|
||||
Some(statement) => match statement {
|
||||
Statement::Query(query) => match &mut query.body {
|
||||
Statement::Query(query) => match &mut *query.body {
|
||||
SetExpr::Select(select) => {
|
||||
select.as_mut().from.iter_mut().for_each(|table| {
|
||||
let new_alias = Some(TableAlias {
|
@ -17,7 +17,7 @@ pub fn value_into_table_factor(
|
||||
Ok(TableFactor::Table {
|
||||
name: ObjectName(vec![ident]),
|
||||
alias,
|
||||
args: Vec::new(),
|
||||
args: None,
|
||||
with_hints: Vec::new(),
|
||||
})
|
||||
}
|
||||
|
@ -96,12 +96,12 @@ fn create_statement(
|
||||
) -> Result<Statement, ShellError> {
|
||||
let query = Query {
|
||||
with: None,
|
||||
body: SetExpr::Select(Box::new(create_select(
|
||||
body: Box::new(SetExpr::Select(Box::new(create_select(
|
||||
connection,
|
||||
engine_state,
|
||||
stack,
|
||||
call,
|
||||
)?)),
|
||||
)?))),
|
||||
order_by: Vec::new(),
|
||||
limit: None,
|
||||
offset: None,
|
||||
@ -121,18 +121,18 @@ fn modify_statement(
|
||||
) -> Result<Statement, ShellError> {
|
||||
match statement {
|
||||
Statement::Query(ref mut query) => {
|
||||
match query.body {
|
||||
match *query.body {
|
||||
SetExpr::Select(ref mut select) => {
|
||||
let table = create_table(connection, engine_state, stack, call)?;
|
||||
select.from.push(table);
|
||||
}
|
||||
_ => {
|
||||
query.as_mut().body = SetExpr::Select(Box::new(create_select(
|
||||
query.as_mut().body = Box::new(SetExpr::Select(Box::new(create_select(
|
||||
connection,
|
||||
engine_state,
|
||||
stack,
|
||||
call,
|
||||
)?));
|
||||
)?)));
|
||||
}
|
||||
};
|
||||
|
||||
@ -167,6 +167,7 @@ fn create_select(
|
||||
distribute_by: Vec::new(),
|
||||
sort_by: Vec::new(),
|
||||
having: None,
|
||||
qualify: None,
|
||||
})
|
||||
}
|
||||
|
@ -104,7 +104,7 @@ impl Command for GroupByDb {
|
||||
let mut db = SQLiteDatabase::try_from_pipeline(input, call.head)?;
|
||||
match db.statement.as_mut() {
|
||||
Some(statement) => match statement {
|
||||
Statement::Query(ref mut query) => match &mut query.body {
|
||||
Statement::Query(ref mut query) => match &mut *query.body {
|
||||
SetExpr::Select(ref mut select) => select.group_by = expressions,
|
||||
s => {
|
||||
return Err(ShellError::GenericError(
|
||||
|
@ -146,7 +146,7 @@ fn modify_statement(
|
||||
) -> Result<Statement, ShellError> {
|
||||
match statement {
|
||||
Statement::Query(ref mut query) => {
|
||||
match &mut query.body {
|
||||
match &mut *query.body {
|
||||
SetExpr::Select(ref mut select) => {
|
||||
modify_from(connection, select, engine_state, stack, call)?
|
||||
}
|
||||
|
@ -1,45 +1,45 @@
|
||||
// Conversions between value and sqlparser objects
|
||||
pub mod conversions;
|
||||
|
||||
mod alias;
|
||||
mod and;
|
||||
mod as_;
|
||||
mod collect;
|
||||
mod describe;
|
||||
mod from;
|
||||
mod from_table;
|
||||
mod group_by;
|
||||
mod into_db;
|
||||
mod into_sqlite;
|
||||
mod join;
|
||||
mod limit;
|
||||
mod open;
|
||||
mod open_db;
|
||||
mod or;
|
||||
mod order_by;
|
||||
mod query;
|
||||
mod query_db;
|
||||
mod schema;
|
||||
mod select;
|
||||
mod to_db;
|
||||
mod where_;
|
||||
|
||||
// Temporal module to create Query objects
|
||||
mod testing;
|
||||
use testing::TestingDb;
|
||||
mod testing_db;
|
||||
use testing_db::TestingDb;
|
||||
|
||||
use alias::AliasDb;
|
||||
use and::AndDb;
|
||||
use as_::AliasDb;
|
||||
use collect::CollectDb;
|
||||
pub(crate) use describe::DescribeDb;
|
||||
pub(crate) use from::FromDb;
|
||||
pub(crate) use from_table::FromDb;
|
||||
use group_by::GroupByDb;
|
||||
pub(crate) use into_db::ToDataBase;
|
||||
use into_sqlite::IntoSqliteDb;
|
||||
use join::JoinDb;
|
||||
use limit::LimitDb;
|
||||
use nu_protocol::engine::StateWorkingSet;
|
||||
use open::OpenDb;
|
||||
use open_db::OpenDb;
|
||||
use or::OrDb;
|
||||
use order_by::OrderByDb;
|
||||
use query::QueryDb;
|
||||
use query_db::QueryDb;
|
||||
use schema::SchemaDb;
|
||||
pub(crate) use select::ProjectionDb;
|
||||
pub(crate) use to_db::ToDataBase;
|
||||
use where_::WhereDb;
|
||||
|
||||
pub fn add_commands_decls(working_set: &mut StateWorkingSet) {
|
||||
|
@ -125,7 +125,7 @@ impl Command for OrDb {
|
||||
}
|
||||
|
||||
fn modify_query(query: &mut Box<Query>, expression: Expr, span: Span) -> Result<(), ShellError> {
|
||||
match query.body {
|
||||
match *query.body {
|
||||
SetExpr::Select(ref mut select) => modify_select(select, expression, span)?,
|
||||
_ => {
|
||||
return Err(ShellError::GenericError(
|
||||
|
@ -108,7 +108,7 @@ impl Command for ProjectionDb {
|
||||
fn create_statement(expressions: Vec<SelectItem>) -> Statement {
|
||||
let query = Query {
|
||||
with: None,
|
||||
body: SetExpr::Select(Box::new(create_select(expressions))),
|
||||
body: Box::new(SetExpr::Select(Box::new(create_select(expressions)))),
|
||||
order_by: Vec::new(),
|
||||
limit: None,
|
||||
offset: None,
|
||||
@ -126,10 +126,11 @@ fn modify_statement(
|
||||
) -> Result<Statement, ShellError> {
|
||||
match statement {
|
||||
Statement::Query(ref mut query) => {
|
||||
match query.body {
|
||||
match *query.body {
|
||||
SetExpr::Select(ref mut select) => select.as_mut().projection = expressions,
|
||||
_ => {
|
||||
query.as_mut().body = SetExpr::Select(Box::new(create_select(expressions)));
|
||||
query.as_mut().body =
|
||||
Box::new(SetExpr::Select(Box::new(create_select(expressions))));
|
||||
}
|
||||
};
|
||||
|
||||
@ -159,6 +160,7 @@ fn create_select(projection: Vec<SelectItem>) -> Select {
|
||||
distribute_by: Vec::new(),
|
||||
sort_by: Vec::new(),
|
||||
having: None,
|
||||
qualify: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -99,10 +99,10 @@ impl Command for WhereDb {
|
||||
}
|
||||
|
||||
fn modify_query(query: &mut Box<Query>, expression: Expr) {
|
||||
match query.body {
|
||||
match *query.body {
|
||||
SetExpr::Select(ref mut select) => modify_select(select, expression),
|
||||
_ => {
|
||||
query.as_mut().body = SetExpr::Select(Box::new(create_select(expression)));
|
||||
query.as_mut().body = Box::new(SetExpr::Select(Box::new(create_select(expression))));
|
||||
}
|
||||
};
|
||||
}
|
||||
@ -125,6 +125,7 @@ fn create_select(expression: Expr) -> Select {
|
||||
distribute_by: Vec::new(),
|
||||
sort_by: Vec::new(),
|
||||
having: None,
|
||||
qualify: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -132,6 +132,7 @@ impl Command for FunctionExpr {
|
||||
args,
|
||||
over: None,
|
||||
distinct: call.has_flag("distinct"),
|
||||
special: false,
|
||||
})
|
||||
.into();
|
||||
|
||||
|
@ -339,7 +339,7 @@ impl ExprDb {
|
||||
Expr::TypedString { .. } => todo!(),
|
||||
Expr::MapAccess { .. } => todo!(),
|
||||
Expr::Case { .. } => todo!(),
|
||||
Expr::Exists(_) => todo!(),
|
||||
Expr::Exists { .. } => todo!(),
|
||||
Expr::Subquery(_) => todo!(),
|
||||
Expr::ListAgg(_) => todo!(),
|
||||
Expr::GroupingSets(_) => todo!(),
|
||||
@ -348,6 +348,25 @@ impl ExprDb {
|
||||
Expr::Tuple(_) => todo!(),
|
||||
Expr::ArrayIndex { .. } => todo!(),
|
||||
Expr::Array(_) => todo!(),
|
||||
Expr::JsonAccess { .. } => todo!(),
|
||||
Expr::CompositeAccess { .. } => todo!(),
|
||||
Expr::IsFalse(_) => todo!(),
|
||||
Expr::IsNotFalse(_) => todo!(),
|
||||
Expr::IsTrue(_) => todo!(),
|
||||
Expr::IsNotTrue(_) => todo!(),
|
||||
Expr::IsUnknown(_) => todo!(),
|
||||
Expr::IsNotUnknown(_) => todo!(),
|
||||
Expr::Like { .. } => todo!(),
|
||||
Expr::ILike { .. } => todo!(),
|
||||
Expr::SimilarTo { .. } => todo!(),
|
||||
Expr::AnyOp(_) => todo!(),
|
||||
Expr::AllOp(_) => todo!(),
|
||||
Expr::SafeCast { .. } => todo!(),
|
||||
Expr::AtTimeZone { .. } => todo!(),
|
||||
Expr::Position { .. } => todo!(),
|
||||
Expr::Overlay { .. } => todo!(),
|
||||
Expr::AggregateExpressionWithFilter { .. } => todo!(),
|
||||
Expr::ArraySubquery(_) => todo!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -32,7 +32,7 @@ impl Command for AppendDF {
|
||||
vec![
|
||||
Example {
|
||||
description: "Appends a dataframe as new columns",
|
||||
example: r#"let a = ([['a' 'b']; [1 2] [3 4]] | into df);
|
||||
example: r#"let a = ([[a b]; [1 2] [3 4]] | into df);
|
||||
$a | append $a"#,
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
@ -59,7 +59,7 @@ impl Command for AppendDF {
|
||||
},
|
||||
Example {
|
||||
description: "Appends a dataframe merging at the end of columns",
|
||||
example: r#"let a = ([['a' 'b']; [1 2] [3 4]] | into df);
|
||||
example: r#"let a = ([[a b]; [1 2] [3 4]] | into df);
|
||||
$a | append $a --col"#,
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
|
@ -36,9 +36,9 @@ impl Command for DropNulls {
|
||||
vec![
|
||||
Example {
|
||||
description: "drop null values in dataframe",
|
||||
example: r#"let my_df = ([[a b]; [1 2] [3 0] [1 2]] | into df);
|
||||
let res = ($my_df.b / $my_df.b);
|
||||
let a = ($my_df | with-column $res --name 'res');
|
||||
example: r#"let df = ([[a b]; [1 2] [3 0] [1 2]] | into df);
|
||||
let res = ($df.b / $df.b);
|
||||
let a = ($df | with-column $res --name res);
|
||||
$a | drop-nulls"#,
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
|
@ -13,11 +13,15 @@ mod last;
|
||||
mod list;
|
||||
mod melt;
|
||||
mod open;
|
||||
mod query_dfr;
|
||||
mod rename;
|
||||
mod sample;
|
||||
mod shape;
|
||||
mod slice;
|
||||
mod sql_context;
|
||||
mod sql_expr;
|
||||
mod take;
|
||||
mod to_arrow;
|
||||
mod to_csv;
|
||||
mod to_df;
|
||||
mod to_nu;
|
||||
@ -41,11 +45,15 @@ pub use last::LastDF;
|
||||
pub use list::ListDF;
|
||||
pub use melt::MeltDF;
|
||||
pub use open::OpenDataFrame;
|
||||
pub use query_dfr::QueryDfr;
|
||||
pub use rename::RenameDF;
|
||||
pub use sample::SampleDF;
|
||||
pub use shape::ShapeDF;
|
||||
pub use slice::SliceDF;
|
||||
pub use sql_context::SQLContext;
|
||||
pub use sql_expr::parse_sql_expr;
|
||||
pub use take::TakeDF;
|
||||
pub use to_arrow::ToArrow;
|
||||
pub use to_csv::ToCSV;
|
||||
pub use to_df::ToDataFrame;
|
||||
pub use to_nu::ToNu;
|
||||
@ -79,11 +87,13 @@ pub fn add_eager_decls(working_set: &mut StateWorkingSet) {
|
||||
ListDF,
|
||||
MeltDF,
|
||||
OpenDataFrame,
|
||||
QueryDfr,
|
||||
RenameDF,
|
||||
SampleDF,
|
||||
ShapeDF,
|
||||
SliceDF,
|
||||
TakeDF,
|
||||
ToArrow,
|
||||
ToCSV,
|
||||
ToDataFrame,
|
||||
ToNu,
|
||||
|
@ -9,8 +9,8 @@ use nu_protocol::{
|
||||
use std::{fs::File, io::BufReader, path::PathBuf};
|
||||
|
||||
use polars::prelude::{
|
||||
CsvEncoding, CsvReader, JsonReader, LazyCsvReader, LazyFrame, ParallelStrategy, ParquetReader,
|
||||
ScanArgsParquet, SerReader,
|
||||
CsvEncoding, CsvReader, IpcReader, JsonReader, LazyCsvReader, LazyFrame, ParallelStrategy,
|
||||
ParquetReader, ScanArgsIpc, ScanArgsParquet, SerReader,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -22,7 +22,7 @@ impl Command for OpenDataFrame {
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Opens csv, json or parquet file to create dataframe"
|
||||
"Opens csv, json, arrow, or parquet file to create dataframe"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
@ -33,6 +33,12 @@ impl Command for OpenDataFrame {
|
||||
"file path to load values from",
|
||||
)
|
||||
.switch("lazy", "creates a lazy dataframe", Some('l'))
|
||||
.named(
|
||||
"type",
|
||||
SyntaxShape::String,
|
||||
"File type: csv, tsv, json, parquet, arrow. If omitted, derive from file extension",
|
||||
Some('t'),
|
||||
)
|
||||
.named(
|
||||
"delimiter",
|
||||
SyntaxShape::String,
|
||||
@ -93,15 +99,33 @@ fn command(
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
||||
|
||||
match file.item.extension() {
|
||||
Some(e) => match e.to_str() {
|
||||
Some("csv") | Some("tsv") => from_csv(engine_state, stack, call),
|
||||
Some("parquet") => from_parquet(engine_state, stack, call),
|
||||
Some("json") => from_json(engine_state, stack, call),
|
||||
_ => Err(ShellError::FileNotFoundCustom(
|
||||
"Not a csv, tsv, parquet or json file".into(),
|
||||
let type_option: Option<Spanned<String>> = call.get_flag(engine_state, stack, "type")?;
|
||||
|
||||
let type_id = match &type_option {
|
||||
Some(ref t) => Some((t.item.to_owned(), "Invalid type", t.span)),
|
||||
None => match file.item.extension() {
|
||||
Some(e) => Some((
|
||||
e.to_string_lossy().into_owned(),
|
||||
"Invalid extension",
|
||||
file.span,
|
||||
)),
|
||||
None => None,
|
||||
},
|
||||
};
|
||||
|
||||
match type_id {
|
||||
Some((e, msg, blamed)) => match e.as_str() {
|
||||
"csv" | "tsv" => from_csv(engine_state, stack, call),
|
||||
"parquet" => from_parquet(engine_state, stack, call),
|
||||
"ipc" | "arrow" => from_ipc(engine_state, stack, call),
|
||||
"json" => from_json(engine_state, stack, call),
|
||||
_ => Err(ShellError::FileNotFoundCustom(
|
||||
format!(
|
||||
"{}. Supported values: csv, tsv, parquet, ipc, arrow, json",
|
||||
msg
|
||||
),
|
||||
blamed,
|
||||
)),
|
||||
},
|
||||
None => Err(ShellError::FileNotFoundCustom(
|
||||
"File without extension".into(),
|
||||
@ -177,6 +201,70 @@ fn from_parquet(
|
||||
}
|
||||
}
|
||||
|
||||
fn from_ipc(
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
) -> Result<Value, ShellError> {
|
||||
if call.has_flag("lazy") {
|
||||
let file: String = call.req(engine_state, stack, 0)?;
|
||||
let args = ScanArgsIpc {
|
||||
n_rows: None,
|
||||
cache: true,
|
||||
rechunk: false,
|
||||
row_count: None,
|
||||
};
|
||||
|
||||
let df: NuLazyFrame = LazyFrame::scan_ipc(file, args)
|
||||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"IPC reader error".into(),
|
||||
format!("{:?}", e),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
})?
|
||||
.into();
|
||||
|
||||
df.into_value(call.head)
|
||||
} else {
|
||||
let file: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
||||
let columns: Option<Vec<String>> = call.get_flag(engine_state, stack, "columns")?;
|
||||
|
||||
let r = File::open(&file.item).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error opening file".into(),
|
||||
e.to_string(),
|
||||
Some(file.span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
})?;
|
||||
let reader = IpcReader::new(r);
|
||||
|
||||
let reader = match columns {
|
||||
None => reader,
|
||||
Some(columns) => reader.with_columns(Some(columns)),
|
||||
};
|
||||
|
||||
let df: NuDataFrame = reader
|
||||
.finish()
|
||||
.map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"IPC reader error".into(),
|
||||
format!("{:?}", e),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
})?
|
||||
.into();
|
||||
|
||||
Ok(df.into_value(call.head))
|
||||
}
|
||||
}
|
||||
|
||||
fn from_json(
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
|
106
crates/nu-command/src/dataframe/eager/query_dfr.rs
Normal file
106
crates/nu-command/src/dataframe/eager/query_dfr.rs
Normal file
@ -0,0 +1,106 @@
|
||||
use super::super::values::NuDataFrame;
|
||||
use crate::dataframe::values::Column;
|
||||
use crate::dataframe::{eager::SQLContext, values::NuLazyFrame};
|
||||
use nu_engine::CallExt;
|
||||
use nu_protocol::{
|
||||
ast::Call,
|
||||
engine::{Command, EngineState, Stack},
|
||||
Category, Example, PipelineData, ShellError, Signature, Span, SyntaxShape, Type, Value,
|
||||
};
|
||||
|
||||
// attribution:
|
||||
// sql_context.rs, and sql_expr.rs were copied from polars-sql. thank you.
|
||||
// maybe we should just use the crate at some point but it's not published yet.
|
||||
// https://github.com/pola-rs/polars/tree/master/polars-sql
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct QueryDfr;
|
||||
|
||||
impl Command for QueryDfr {
|
||||
fn name(&self) -> &str {
|
||||
"query dfr"
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Query dataframe using SQL. Note: The dataframe is always named 'df' in your query's from clause."
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(self.name())
|
||||
.required("sql", SyntaxShape::String, "sql query")
|
||||
.input_type(Type::Custom("dataframe".into()))
|
||||
.output_type(Type::Custom("dataframe".into()))
|
||||
.category(Category::Custom("dataframe".into()))
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["dataframe", "sql", "search"]
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Query dataframe using SQL",
|
||||
example: "[[a b]; [1 2] [3 4]] | into df | query dfr 'select a from df'",
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![Column::new(
|
||||
"a".to_string(),
|
||||
vec![Value::test_int(1), Value::test_int(3)],
|
||||
)])
|
||||
.expect("simple df for test should not fail")
|
||||
.into_value(Span::test_data()),
|
||||
),
|
||||
}]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
command(engine_state, stack, call, input)
|
||||
}
|
||||
}
|
||||
|
||||
fn command(
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let sql_query: String = call.req(engine_state, stack, 0)?;
|
||||
let df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||
|
||||
let mut ctx = SQLContext::new();
|
||||
ctx.register("df", &df.df);
|
||||
let df_sql = ctx.execute(&sql_query).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Dataframe Error".into(),
|
||||
e.to_string(),
|
||||
Some(call.head),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
})?;
|
||||
let lazy = NuLazyFrame::new(false, df_sql);
|
||||
|
||||
let eager = lazy.collect(call.head)?;
|
||||
let value = Value::CustomValue {
|
||||
val: Box::new(eager),
|
||||
span: call.head,
|
||||
};
|
||||
|
||||
Ok(PipelineData::Value(value, None))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::super::super::test_dataframe::test_dataframe;
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_examples() {
|
||||
test_dataframe(vec![Box::new(QueryDfr {})])
|
||||
}
|
||||
}
|
220
crates/nu-command/src/dataframe/eager/sql_context.rs
Normal file
220
crates/nu-command/src/dataframe/eager/sql_context.rs
Normal file
@ -0,0 +1,220 @@
|
||||
use crate::dataframe::eager::sql_expr::parse_sql_expr;
|
||||
use polars::error::PolarsError;
|
||||
use polars::prelude::{col, DataFrame, DataType, IntoLazy, LazyFrame};
|
||||
use sqlparser::ast::{
|
||||
Expr as SqlExpr, Select, SelectItem, SetExpr, Statement, TableFactor, Value as SQLValue,
|
||||
};
|
||||
use sqlparser::dialect::GenericDialect;
|
||||
use sqlparser::parser::Parser;
|
||||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct SQLContext {
|
||||
table_map: HashMap<String, LazyFrame>,
|
||||
dialect: GenericDialect,
|
||||
}
|
||||
|
||||
impl SQLContext {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
table_map: HashMap::new(),
|
||||
dialect: GenericDialect::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn register(&mut self, name: &str, df: &DataFrame) {
|
||||
self.table_map.insert(name.to_owned(), df.clone().lazy());
|
||||
}
|
||||
|
||||
fn execute_select(&self, select_stmt: &Select) -> Result<LazyFrame, PolarsError> {
|
||||
// Determine involved dataframe
|
||||
// Implicit join require some more work in query parsers, Explicit join are preferred for now.
|
||||
let tbl = select_stmt.from.get(0).ok_or_else(|| {
|
||||
PolarsError::NotFound("No table found in select statement".to_string())
|
||||
})?;
|
||||
let mut alias_map = HashMap::new();
|
||||
let tbl_name = match &tbl.relation {
|
||||
TableFactor::Table { name, alias, .. } => {
|
||||
let tbl_name = name
|
||||
.0
|
||||
.get(0)
|
||||
.ok_or_else(|| {
|
||||
PolarsError::NotFound("No table found in select statement".to_string())
|
||||
})?
|
||||
.value
|
||||
.to_string();
|
||||
if self.table_map.contains_key(&tbl_name) {
|
||||
if let Some(alias) = alias {
|
||||
alias_map.insert(alias.name.value.clone(), tbl_name.to_owned());
|
||||
};
|
||||
tbl_name
|
||||
} else {
|
||||
return Err(PolarsError::ComputeError(
|
||||
format!("Table name {tbl_name} was not found").into(),
|
||||
));
|
||||
}
|
||||
}
|
||||
// Support bare table, optional with alias for now
|
||||
_ => return Err(PolarsError::ComputeError("Not implemented".into())),
|
||||
};
|
||||
let df = &self.table_map[&tbl_name];
|
||||
let mut raw_projection_before_alias: HashMap<String, usize> = HashMap::new();
|
||||
let mut contain_wildcard = false;
|
||||
// Filter Expression
|
||||
let df = match select_stmt.selection.as_ref() {
|
||||
Some(expr) => {
|
||||
let filter_expression = parse_sql_expr(expr)?;
|
||||
df.clone().filter(filter_expression)
|
||||
}
|
||||
None => df.clone(),
|
||||
};
|
||||
// Column Projections
|
||||
let projection = select_stmt
|
||||
.projection
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, select_item)| {
|
||||
Ok(match select_item {
|
||||
SelectItem::UnnamedExpr(expr) => {
|
||||
let expr = parse_sql_expr(expr)?;
|
||||
raw_projection_before_alias.insert(format!("{:?}", expr), i);
|
||||
expr
|
||||
}
|
||||
SelectItem::ExprWithAlias { expr, alias } => {
|
||||
let expr = parse_sql_expr(expr)?;
|
||||
raw_projection_before_alias.insert(format!("{:?}", expr), i);
|
||||
expr.alias(&alias.value)
|
||||
}
|
||||
SelectItem::QualifiedWildcard(_) | SelectItem::Wildcard => {
|
||||
contain_wildcard = true;
|
||||
col("*")
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<_>, PolarsError>>()?;
|
||||
// Check for group by
|
||||
// After projection since there might be number.
|
||||
let group_by = select_stmt
|
||||
.group_by
|
||||
.iter()
|
||||
.map(
|
||||
|e|match e {
|
||||
SqlExpr::Value(SQLValue::Number(idx, _)) => {
|
||||
let idx = match idx.parse::<usize>() {
|
||||
Ok(0)| Err(_) => Err(
|
||||
PolarsError::ComputeError(
|
||||
format!("Group By Error: Only positive number or expression are supported, got {idx}").into()
|
||||
)),
|
||||
Ok(idx) => Ok(idx)
|
||||
}?;
|
||||
Ok(projection[idx].clone())
|
||||
}
|
||||
SqlExpr::Value(_) => Err(
|
||||
PolarsError::ComputeError("Group By Error: Only positive number or expression are supported".into())
|
||||
),
|
||||
_ => parse_sql_expr(e)
|
||||
}
|
||||
)
|
||||
.collect::<Result<Vec<_>, PolarsError>>()?;
|
||||
|
||||
let df = if group_by.is_empty() {
|
||||
df.select(projection)
|
||||
} else {
|
||||
// check groupby and projection due to difference between SQL and polars
|
||||
// Return error on wild card, shouldn't process this
|
||||
if contain_wildcard {
|
||||
return Err(PolarsError::ComputeError(
|
||||
"Group By Error: Can't processed wildcard in groupby".into(),
|
||||
));
|
||||
}
|
||||
// Default polars group by will have group by columns at the front
|
||||
// need some container to contain position of group by columns and its position
|
||||
// at the final agg projection, check the schema for the existence of group by column
|
||||
// and its projections columns, keeping the original index
|
||||
let (exclude_expr, groupby_pos): (Vec<_>, Vec<_>) = group_by
|
||||
.iter()
|
||||
.map(|expr| raw_projection_before_alias.get(&format!("{:?}", expr)))
|
||||
.enumerate()
|
||||
.filter(|(_, proj_p)| proj_p.is_some())
|
||||
.map(|(gb_p, proj_p)| (*proj_p.unwrap_or(&0), (*proj_p.unwrap_or(&0), gb_p)))
|
||||
.unzip();
|
||||
let (agg_projection, agg_proj_pos): (Vec<_>, Vec<_>) = projection
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter(|(i, _)| !exclude_expr.contains(i))
|
||||
.enumerate()
|
||||
.map(|(agg_pj, (proj_p, expr))| (expr.clone(), (proj_p, agg_pj + group_by.len())))
|
||||
.unzip();
|
||||
let agg_df = df.groupby(group_by).agg(agg_projection);
|
||||
let mut final_proj_pos = groupby_pos
|
||||
.into_iter()
|
||||
.chain(agg_proj_pos.into_iter())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
final_proj_pos.sort_by(|(proj_pa, _), (proj_pb, _)| proj_pa.cmp(proj_pb));
|
||||
let final_proj = final_proj_pos
|
||||
.into_iter()
|
||||
.map(|(_, shm_p)| {
|
||||
col(agg_df
|
||||
.clone()
|
||||
// FIXME: had to do this mess to get get_index to work, not sure why. need help
|
||||
.collect()
|
||||
.unwrap_or_default()
|
||||
.schema()
|
||||
.get_index(shm_p)
|
||||
.unwrap_or((&"".to_string(), &DataType::Null))
|
||||
.0)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
agg_df.select(final_proj)
|
||||
};
|
||||
Ok(df)
|
||||
}
|
||||
|
||||
pub fn execute(&self, query: &str) -> Result<LazyFrame, PolarsError> {
|
||||
let ast = Parser::parse_sql(&self.dialect, query)
|
||||
.map_err(|e| PolarsError::ComputeError(format!("{:?}", e).into()))?;
|
||||
if ast.len() != 1 {
|
||||
Err(PolarsError::ComputeError(
|
||||
"One and only one statement at a time please".into(),
|
||||
))
|
||||
} else {
|
||||
let ast = ast
|
||||
.get(0)
|
||||
.ok_or_else(|| PolarsError::NotFound("No statement found".to_string()))?;
|
||||
Ok(match ast {
|
||||
Statement::Query(query) => {
|
||||
let rs = match &*query.body {
|
||||
SetExpr::Select(select_stmt) => self.execute_select(select_stmt)?,
|
||||
_ => {
|
||||
return Err(PolarsError::ComputeError(
|
||||
"INSERT, UPDATE is not supported for polars".into(),
|
||||
))
|
||||
}
|
||||
};
|
||||
match &query.limit {
|
||||
Some(SqlExpr::Value(SQLValue::Number(nrow, _))) => {
|
||||
let nrow = nrow.parse().map_err(|err| {
|
||||
PolarsError::ComputeError(
|
||||
format!("Conversion Error: {:?}", err).into(),
|
||||
)
|
||||
})?;
|
||||
rs.limit(nrow)
|
||||
}
|
||||
None => rs,
|
||||
_ => {
|
||||
return Err(PolarsError::ComputeError(
|
||||
"Only support number argument to LIMIT clause".into(),
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Err(PolarsError::ComputeError(
|
||||
format!("Statement type {:?} is not supported", ast).into(),
|
||||
))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
191
crates/nu-command/src/dataframe/eager/sql_expr.rs
Normal file
191
crates/nu-command/src/dataframe/eager/sql_expr.rs
Normal file
@ -0,0 +1,191 @@
|
||||
use polars::error::PolarsError;
|
||||
use polars::prelude::{col, lit, DataType, Expr, LiteralValue, Result, TimeUnit};
|
||||
|
||||
use sqlparser::ast::{
|
||||
BinaryOperator as SQLBinaryOperator, DataType as SQLDataType, Expr as SqlExpr,
|
||||
Function as SQLFunction, Value as SqlValue, WindowSpec,
|
||||
};
|
||||
|
||||
fn map_sql_polars_datatype(data_type: &SQLDataType) -> Result<DataType> {
|
||||
Ok(match data_type {
|
||||
SQLDataType::Char(_)
|
||||
| SQLDataType::Varchar(_)
|
||||
| SQLDataType::Uuid
|
||||
| SQLDataType::Clob(_)
|
||||
| SQLDataType::Text
|
||||
| SQLDataType::String => DataType::Utf8,
|
||||
SQLDataType::Float(_) => DataType::Float32,
|
||||
SQLDataType::Real => DataType::Float32,
|
||||
SQLDataType::Double => DataType::Float64,
|
||||
SQLDataType::TinyInt(_) => DataType::Int8,
|
||||
SQLDataType::UnsignedTinyInt(_) => DataType::UInt8,
|
||||
SQLDataType::SmallInt(_) => DataType::Int16,
|
||||
SQLDataType::UnsignedSmallInt(_) => DataType::UInt16,
|
||||
SQLDataType::Int(_) => DataType::Int32,
|
||||
SQLDataType::UnsignedInt(_) => DataType::UInt32,
|
||||
SQLDataType::BigInt(_) => DataType::Int64,
|
||||
SQLDataType::UnsignedBigInt(_) => DataType::UInt64,
|
||||
|
||||
SQLDataType::Boolean => DataType::Boolean,
|
||||
SQLDataType::Date => DataType::Date,
|
||||
SQLDataType::Time => DataType::Time,
|
||||
SQLDataType::Timestamp => DataType::Datetime(TimeUnit::Milliseconds, None),
|
||||
SQLDataType::Interval => DataType::Duration(TimeUnit::Milliseconds),
|
||||
SQLDataType::Array(inner_type) => {
|
||||
DataType::List(Box::new(map_sql_polars_datatype(inner_type)?))
|
||||
}
|
||||
_ => {
|
||||
return Err(PolarsError::ComputeError(
|
||||
format!(
|
||||
"SQL Datatype {:?} was not supported in polars-sql yet!",
|
||||
data_type
|
||||
)
|
||||
.into(),
|
||||
))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn cast_(expr: Expr, data_type: &SQLDataType) -> Result<Expr> {
|
||||
let polars_type = map_sql_polars_datatype(data_type)?;
|
||||
Ok(expr.cast(polars_type))
|
||||
}
|
||||
|
||||
fn binary_op_(left: Expr, right: Expr, op: &SQLBinaryOperator) -> Result<Expr> {
|
||||
Ok(match op {
|
||||
SQLBinaryOperator::Plus => left + right,
|
||||
SQLBinaryOperator::Minus => left - right,
|
||||
SQLBinaryOperator::Multiply => left * right,
|
||||
SQLBinaryOperator::Divide => left / right,
|
||||
SQLBinaryOperator::Modulo => left % right,
|
||||
SQLBinaryOperator::StringConcat => left.cast(DataType::Utf8) + right.cast(DataType::Utf8),
|
||||
SQLBinaryOperator::Gt => left.gt(right),
|
||||
SQLBinaryOperator::Lt => left.lt(right),
|
||||
SQLBinaryOperator::GtEq => left.gt_eq(right),
|
||||
SQLBinaryOperator::LtEq => left.lt_eq(right),
|
||||
SQLBinaryOperator::Eq => left.eq(right),
|
||||
SQLBinaryOperator::NotEq => left.eq(right).not(),
|
||||
SQLBinaryOperator::And => left.and(right),
|
||||
SQLBinaryOperator::Or => left.or(right),
|
||||
SQLBinaryOperator::Xor => left.xor(right),
|
||||
_ => {
|
||||
return Err(PolarsError::ComputeError(
|
||||
format!("SQL Operator {:?} was not supported in polars-sql yet!", op).into(),
|
||||
))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn literal_expr(value: &SqlValue) -> Result<Expr> {
|
||||
Ok(match value {
|
||||
SqlValue::Number(s, _) => {
|
||||
// Check for existence of decimal separator dot
|
||||
if s.contains('.') {
|
||||
s.parse::<f64>().map(lit).map_err(|_| {
|
||||
PolarsError::ComputeError(format!("Can't parse literal {:?}", s).into())
|
||||
})
|
||||
} else {
|
||||
s.parse::<i64>().map(lit).map_err(|_| {
|
||||
PolarsError::ComputeError(format!("Can't parse literal {:?}", s).into())
|
||||
})
|
||||
}?
|
||||
}
|
||||
SqlValue::SingleQuotedString(s) => lit(s.clone()),
|
||||
SqlValue::NationalStringLiteral(s) => lit(s.clone()),
|
||||
SqlValue::HexStringLiteral(s) => lit(s.clone()),
|
||||
SqlValue::DoubleQuotedString(s) => lit(s.clone()),
|
||||
SqlValue::Boolean(b) => lit(*b),
|
||||
SqlValue::Null => Expr::Literal(LiteralValue::Null),
|
||||
_ => {
|
||||
return Err(PolarsError::ComputeError(
|
||||
format!(
|
||||
"Parsing SQL Value {:?} was not supported in polars-sql yet!",
|
||||
value
|
||||
)
|
||||
.into(),
|
||||
))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn parse_sql_expr(expr: &SqlExpr) -> Result<Expr> {
|
||||
Ok(match expr {
|
||||
SqlExpr::Identifier(e) => col(&e.value),
|
||||
SqlExpr::BinaryOp { left, op, right } => {
|
||||
let left = parse_sql_expr(left)?;
|
||||
let right = parse_sql_expr(right)?;
|
||||
binary_op_(left, right, op)?
|
||||
}
|
||||
SqlExpr::Function(sql_function) => parse_sql_function(sql_function)?,
|
||||
SqlExpr::Cast { expr, data_type } => cast_(parse_sql_expr(expr)?, data_type)?,
|
||||
SqlExpr::Nested(expr) => parse_sql_expr(expr)?,
|
||||
SqlExpr::Value(value) => literal_expr(value)?,
|
||||
_ => {
|
||||
return Err(PolarsError::ComputeError(
|
||||
format!(
|
||||
"Expression: {:?} was not supported in polars-sql yet!",
|
||||
expr
|
||||
)
|
||||
.into(),
|
||||
))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn apply_window_spec(expr: Expr, window_spec: &Option<WindowSpec>) -> Result<Expr> {
|
||||
Ok(match &window_spec {
|
||||
Some(window_spec) => {
|
||||
// Process for simple window specification, partition by first
|
||||
let partition_by = window_spec
|
||||
.partition_by
|
||||
.iter()
|
||||
.map(parse_sql_expr)
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
expr.over(partition_by)
|
||||
// Order by and Row range may not be supported at the moment
|
||||
}
|
||||
None => expr,
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_sql_function(sql_function: &SQLFunction) -> Result<Expr> {
|
||||
use sqlparser::ast::{FunctionArg, FunctionArgExpr};
|
||||
// Function name mostly do not have name space, so it mostly take the first args
|
||||
let function_name = sql_function.name.0[0].value.to_lowercase();
|
||||
let args = sql_function
|
||||
.args
|
||||
.iter()
|
||||
.map(|arg| match arg {
|
||||
FunctionArg::Named { arg, .. } => arg,
|
||||
FunctionArg::Unnamed(arg) => arg,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
Ok(
|
||||
match (
|
||||
function_name.as_str(),
|
||||
args.as_slice(),
|
||||
sql_function.distinct,
|
||||
) {
|
||||
("sum", [FunctionArgExpr::Expr(expr)], false) => {
|
||||
apply_window_spec(parse_sql_expr(expr)?, &sql_function.over)?.sum()
|
||||
}
|
||||
("count", [FunctionArgExpr::Expr(expr)], false) => {
|
||||
apply_window_spec(parse_sql_expr(expr)?, &sql_function.over)?.count()
|
||||
}
|
||||
("count", [FunctionArgExpr::Expr(expr)], true) => {
|
||||
apply_window_spec(parse_sql_expr(expr)?, &sql_function.over)?.n_unique()
|
||||
}
|
||||
// Special case for wildcard args to count function.
|
||||
("count", [FunctionArgExpr::Wildcard], false) => lit(1i32).count(),
|
||||
_ => {
|
||||
return Err(PolarsError::ComputeError(
|
||||
format!(
|
||||
"Function {:?} with args {:?} was not supported in polars-sql yet!",
|
||||
function_name, args
|
||||
)
|
||||
.into(),
|
||||
))
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
@ -38,9 +38,9 @@ impl Command for TakeDF {
|
||||
vec![
|
||||
Example {
|
||||
description: "Takes selected rows from dataframe",
|
||||
example: r#"let my_df = ([[a b]; [4 1] [5 2] [4 3]] | into df);
|
||||
example: r#"let df = ([[a b]; [4 1] [5 2] [4 3]] | into df);
|
||||
let indices = ([0 2] | into df);
|
||||
$my_df | take $indices"#,
|
||||
$df | take $indices"#,
|
||||
result: Some(
|
||||
NuDataFrame::try_from_columns(vec![
|
||||
Column::new(
|
||||
|
94
crates/nu-command/src/dataframe/eager/to_arrow.rs
Normal file
94
crates/nu-command/src/dataframe/eager/to_arrow.rs
Normal file
@ -0,0 +1,94 @@
|
||||
use std::{fs::File, path::PathBuf};
|
||||
|
||||
use nu_engine::CallExt;
|
||||
use nu_protocol::{
|
||||
ast::Call,
|
||||
engine::{Command, EngineState, Stack},
|
||||
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Type, Value,
|
||||
};
|
||||
use polars::prelude::{IpcWriter, SerWriter};
|
||||
|
||||
use super::super::values::NuDataFrame;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ToArrow;
|
||||
|
||||
impl Command for ToArrow {
|
||||
fn name(&self) -> &str {
|
||||
"to arrow"
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Saves dataframe to arrow file"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build(self.name())
|
||||
.required("file", SyntaxShape::Filepath, "file path to save dataframe")
|
||||
.input_type(Type::Custom("dataframe".into()))
|
||||
.output_type(Type::Any)
|
||||
.category(Category::Custom("dataframe".into()))
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Saves dataframe to arrow file",
|
||||
example: "[[a b]; [1 2] [3 4]] | into df | to arrow test.arrow",
|
||||
result: None,
|
||||
}]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
command(engine_state, stack, call, input)
|
||||
}
|
||||
}
|
||||
|
||||
fn command(
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let file_name: Spanned<PathBuf> = call.req(engine_state, stack, 0)?;
|
||||
|
||||
let mut df = NuDataFrame::try_from_pipeline(input, call.head)?;
|
||||
|
||||
let mut file = File::create(&file_name.item).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error with file name".into(),
|
||||
e.to_string(),
|
||||
Some(file_name.span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
})?;
|
||||
|
||||
IpcWriter::new(&mut file).finish(df.as_mut()).map_err(|e| {
|
||||
ShellError::GenericError(
|
||||
"Error saving file".into(),
|
||||
e.to_string(),
|
||||
Some(file_name.span),
|
||||
None,
|
||||
Vec::new(),
|
||||
)
|
||||
})?;
|
||||
|
||||
let file_value = Value::String {
|
||||
val: format!("saved {:?}", &file_name.item),
|
||||
span: file_name.span,
|
||||
};
|
||||
|
||||
Ok(PipelineData::Value(
|
||||
Value::List {
|
||||
vals: vec![file_value],
|
||||
span: call.head,
|
||||
},
|
||||
None,
|
||||
))
|
||||
}
|
@ -30,6 +30,7 @@ pub fn create_default_context() -> EngineState {
|
||||
bind_command! {
|
||||
Alias,
|
||||
Ast,
|
||||
Commandline,
|
||||
Debug,
|
||||
Def,
|
||||
DefEnv,
|
||||
@ -199,6 +200,7 @@ pub fn create_default_context() -> EngineState {
|
||||
StrDistance,
|
||||
StrDowncase,
|
||||
StrEndswith,
|
||||
StrJoin,
|
||||
StrReplace,
|
||||
StrIndexOf,
|
||||
StrKebabCase,
|
||||
|
@ -5,11 +5,14 @@ use std::collections::HashMap;
|
||||
/// subcommands like `foo bar` where `foo` is still a valid command.
|
||||
/// For those, it's currently easiest to have a "stub" command that just returns an error.
|
||||
pub fn deprecated_commands() -> HashMap<String, String> {
|
||||
let mut commands = HashMap::new();
|
||||
commands.insert("keep".to_string(), "take".to_string());
|
||||
commands.insert("match".to_string(), "find".to_string());
|
||||
commands.insert("nth".to_string(), "select".to_string());
|
||||
commands.insert("pivot".to_string(), "transpose".to_string());
|
||||
commands.insert("unalias".to_string(), "hide".to_string());
|
||||
commands
|
||||
HashMap::from([
|
||||
("keep".to_string(), "take".to_string()),
|
||||
("match".to_string(), "find".to_string()),
|
||||
("nth".to_string(), "select".to_string()),
|
||||
("pivot".to_string(), "transpose".to_string()),
|
||||
("unalias".to_string(), "hide".to_string()),
|
||||
("all?".to_string(), "all".to_string()),
|
||||
("any?".to_string(), "any".to_string()),
|
||||
("empty?".to_string(), "is-empty".to_string()),
|
||||
])
|
||||
}
|
||||
|
2
crates/nu-command/src/env/env_command.rs
vendored
2
crates/nu-command/src/env/env_command.rs
vendored
@ -75,7 +75,7 @@ impl Command for Env {
|
||||
},
|
||||
Example {
|
||||
description: "Check whether the env variable `MY_ENV_ABC` exists",
|
||||
example: r#"env | any? name == MY_ENV_ABC"#,
|
||||
example: r#"env | any name == MY_ENV_ABC"#,
|
||||
result: Some(Value::test_bool(false)),
|
||||
},
|
||||
Example {
|
||||
|
132
crates/nu-command/src/env/source_env.rs
vendored
132
crates/nu-command/src/env/source_env.rs
vendored
@ -1,11 +1,10 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use nu_engine::{eval_block, find_in_dirs_env, redirect_env, CallExt};
|
||||
use nu_parser::parse;
|
||||
use nu_protocol::ast::Call;
|
||||
use nu_protocol::engine::{Command, EngineState, Stack, StateWorkingSet};
|
||||
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||
use nu_protocol::{
|
||||
Category, CliError, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Value,
|
||||
Category, Example, PipelineData, ShellError, Signature, Spanned, SyntaxShape, Value,
|
||||
};
|
||||
|
||||
/// Source a file for environment variables.
|
||||
@ -40,96 +39,47 @@ impl Command for SourceEnv {
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let source_filename: Spanned<String> = call.req(engine_state, caller_stack, 0)?;
|
||||
|
||||
if let Some(path) = find_in_dirs_env(&source_filename.item, engine_state, caller_stack)? {
|
||||
if let Ok(content) = std::fs::read_to_string(&path) {
|
||||
let mut parent = PathBuf::from(&path);
|
||||
parent.pop();
|
||||
// Note: this hidden positional is the block_id that corresponded to the 0th position
|
||||
// it is put here by the parser
|
||||
let block_id: i64 = call.req(engine_state, caller_stack, 1)?;
|
||||
|
||||
let mut new_engine_state = engine_state.clone();
|
||||
|
||||
let (block, delta) = {
|
||||
let mut working_set = StateWorkingSet::new(&new_engine_state);
|
||||
|
||||
// Set the currently parsed directory
|
||||
working_set.currently_parsed_cwd = Some(parent.clone());
|
||||
|
||||
let (block, err) = parse(&mut working_set, None, content.as_bytes(), true, &[]);
|
||||
|
||||
if let Some(err) = err {
|
||||
// Because the error span points at new_engine_state, we must create the error message now
|
||||
let msg = format!(
|
||||
r#"Found this parser error: {:?}"#,
|
||||
CliError(&err, &working_set)
|
||||
);
|
||||
|
||||
return Err(ShellError::GenericError(
|
||||
"Failed to parse content".to_string(),
|
||||
"cannot parse this file".to_string(),
|
||||
Some(source_filename.span),
|
||||
Some(msg),
|
||||
vec![],
|
||||
));
|
||||
} else {
|
||||
(block, working_set.render())
|
||||
}
|
||||
};
|
||||
|
||||
// Merge parser changes to a temporary engine state
|
||||
new_engine_state.merge_delta(delta)?;
|
||||
|
||||
// Set the currently evaluated directory
|
||||
let file_pwd = Value::String {
|
||||
val: parent.to_string_lossy().to_string(),
|
||||
span: call.head,
|
||||
};
|
||||
|
||||
caller_stack.add_env_var("FILE_PWD".to_string(), file_pwd);
|
||||
|
||||
// Evaluate the parsed file's block
|
||||
let mut callee_stack = caller_stack.gather_captures(&block.captures);
|
||||
|
||||
let result = eval_block(
|
||||
&new_engine_state,
|
||||
&mut callee_stack,
|
||||
&block,
|
||||
input,
|
||||
true,
|
||||
true,
|
||||
);
|
||||
|
||||
let result = if let Err(err) = result {
|
||||
// Because the error span points at new_engine_state, we must create the error message now
|
||||
let working_set = StateWorkingSet::new(&new_engine_state);
|
||||
|
||||
let msg = format!(
|
||||
r#"Found this shell error: {:?}"#,
|
||||
CliError(&err, &working_set)
|
||||
);
|
||||
|
||||
Err(ShellError::GenericError(
|
||||
"Failed to evaluate content".to_string(),
|
||||
"cannot evaluate this file".to_string(),
|
||||
Some(source_filename.span),
|
||||
Some(msg),
|
||||
vec![],
|
||||
))
|
||||
} else {
|
||||
result
|
||||
};
|
||||
|
||||
// Merge the block's environment to the current stack
|
||||
redirect_env(engine_state, caller_stack, &callee_stack);
|
||||
|
||||
// Remove the file-relative PWD
|
||||
caller_stack.remove_env_var(engine_state, "FILE_PWD");
|
||||
|
||||
result
|
||||
} else {
|
||||
Err(ShellError::FileNotFound(source_filename.span))
|
||||
}
|
||||
// Set the currently evaluated directory (file-relative PWD)
|
||||
let mut parent = if let Some(path) =
|
||||
find_in_dirs_env(&source_filename.item, engine_state, caller_stack)?
|
||||
{
|
||||
PathBuf::from(&path)
|
||||
} else {
|
||||
Err(ShellError::FileNotFound(source_filename.span))
|
||||
}
|
||||
return Err(ShellError::FileNotFound(source_filename.span));
|
||||
};
|
||||
parent.pop();
|
||||
|
||||
let file_pwd = Value::String {
|
||||
val: parent.to_string_lossy().to_string(),
|
||||
span: call.head,
|
||||
};
|
||||
|
||||
caller_stack.add_env_var("FILE_PWD".to_string(), file_pwd);
|
||||
|
||||
// Evaluate the block
|
||||
let block = engine_state.get_block(block_id as usize).clone();
|
||||
let mut callee_stack = caller_stack.gather_captures(&block.captures);
|
||||
|
||||
let result = eval_block(
|
||||
engine_state,
|
||||
&mut callee_stack,
|
||||
&block,
|
||||
input,
|
||||
call.redirect_stdout,
|
||||
call.redirect_stderr,
|
||||
);
|
||||
|
||||
// Merge the block's environment to the current stack
|
||||
redirect_env(engine_state, caller_stack, &callee_stack);
|
||||
|
||||
// Remove the file-relative PWD
|
||||
caller_stack.remove_env_var(engine_state, "FILE_PWD");
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -14,7 +14,7 @@ use crate::To;
|
||||
#[cfg(test)]
|
||||
use super::{
|
||||
Ansi, Date, From, If, Into, LetEnv, Math, Path, Random, Split, SplitColumn, SplitRow, Str,
|
||||
StrCollect, StrLength, StrReplace, Url, Wrap,
|
||||
StrJoin, StrLength, StrReplace, Url, Wrap,
|
||||
};
|
||||
|
||||
#[cfg(test)]
|
||||
@ -29,7 +29,7 @@ pub fn test_examples(cmd: impl Command + 'static) {
|
||||
// Try to keep this working set small to keep tests running as fast as possible
|
||||
let mut working_set = StateWorkingSet::new(&*engine_state);
|
||||
working_set.add_decl(Box::new(Str));
|
||||
working_set.add_decl(Box::new(StrCollect));
|
||||
working_set.add_decl(Box::new(StrJoin));
|
||||
working_set.add_decl(Box::new(StrLength));
|
||||
working_set.add_decl(Box::new(StrReplace));
|
||||
working_set.add_decl(Box::new(BuildString));
|
||||
|
@ -36,7 +36,7 @@ impl Command for Save {
|
||||
Signature::build("save")
|
||||
.required("filename", SyntaxShape::Filepath, "the filename to use")
|
||||
.switch("raw", "save file as raw binary", Some('r'))
|
||||
.switch("append", "append input to the end of the file", None)
|
||||
.switch("append", "append input to the end of the file", Some('a'))
|
||||
.category(Category::FileSystem)
|
||||
}
|
||||
|
||||
|
@ -10,7 +10,7 @@ pub struct All;
|
||||
|
||||
impl Command for All {
|
||||
fn name(&self) -> &str {
|
||||
"all?"
|
||||
"all"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
@ -18,29 +18,29 @@ impl Command for All {
|
||||
.required(
|
||||
"predicate",
|
||||
SyntaxShape::RowCondition,
|
||||
"the predicate that must match",
|
||||
"the predicate expression that must evaluate to a boolean",
|
||||
)
|
||||
.category(Category::Filters)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Test if every element of the input matches a predicate."
|
||||
"Test if every element of the input fulfills a predicate expression."
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["every"]
|
||||
vec!["every", "and"]
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "Find if services are running",
|
||||
example: "echo [[status]; [UP] [UP]] | all? status == UP",
|
||||
example: "echo [[status]; [UP] [UP]] | all status == UP",
|
||||
result: Some(Value::test_bool(true)),
|
||||
},
|
||||
Example {
|
||||
description: "Check that all values are even",
|
||||
example: "echo [2 4 6 8] | all? ($it mod 2) == 0",
|
||||
example: "echo [2 4 6 8] | all ($it mod 2) == 0",
|
||||
result: Some(Value::test_bool(true)),
|
||||
},
|
||||
]
|
||||
|
@ -10,7 +10,7 @@ pub struct Any;
|
||||
|
||||
impl Command for Any {
|
||||
fn name(&self) -> &str {
|
||||
"any?"
|
||||
"any"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
@ -18,29 +18,29 @@ impl Command for Any {
|
||||
.required(
|
||||
"predicate",
|
||||
SyntaxShape::RowCondition,
|
||||
"the predicate that must match",
|
||||
"the predicate expression that should return a boolean",
|
||||
)
|
||||
.category(Category::Filters)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Tests if any element of the input matches a predicate."
|
||||
"Tests if any element of the input fulfills a predicate expression."
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["some"]
|
||||
vec!["some", "or"]
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "Find if a service is not running",
|
||||
example: "echo [[status]; [UP] [DOWN] [UP]] | any? status == DOWN",
|
||||
example: "echo [[status]; [UP] [DOWN] [UP]] | any status == DOWN",
|
||||
result: Some(Value::test_bool(true)),
|
||||
},
|
||||
Example {
|
||||
description: "Check if any of the values is odd",
|
||||
example: "echo [2 4 1 6 8] | any? ($it mod 2) == 1",
|
||||
example: "echo [2 4 1 6 8] | any ($it mod 2) == 1",
|
||||
result: Some(Value::test_bool(true)),
|
||||
},
|
||||
]
|
||||
|
@ -10,11 +10,11 @@ pub struct Empty;
|
||||
|
||||
impl Command for Empty {
|
||||
fn name(&self) -> &str {
|
||||
"empty?"
|
||||
"is-empty"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("empty?")
|
||||
Signature::build("is-empty")
|
||||
.rest(
|
||||
"rest",
|
||||
SyntaxShape::CellPath,
|
||||
@ -41,7 +41,7 @@ impl Command for Empty {
|
||||
vec![
|
||||
Example {
|
||||
description: "Check if a string is empty",
|
||||
example: "'' | empty?",
|
||||
example: "'' | is-empty",
|
||||
result: Some(Value::Bool {
|
||||
val: true,
|
||||
span: Span::test_data(),
|
||||
@ -49,7 +49,7 @@ impl Command for Empty {
|
||||
},
|
||||
Example {
|
||||
description: "Check if a list is empty",
|
||||
example: "[] | empty?",
|
||||
example: "[] | is-empty",
|
||||
result: Some(Value::Bool {
|
||||
val: true,
|
||||
span: Span::test_data(),
|
||||
@ -58,7 +58,7 @@ impl Command for Empty {
|
||||
Example {
|
||||
// TODO: revisit empty cell path semantics for a record.
|
||||
description: "Check if more than one column are empty",
|
||||
example: "[[meal size]; [arepa small] [taco '']] | empty? meal size",
|
||||
example: "[[meal size]; [arepa small] [taco '']] | is-empty meal size",
|
||||
result: Some(Value::Bool {
|
||||
val: false,
|
||||
span: Span::test_data(),
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::help::highlight_search_string;
|
||||
use fancy_regex::Regex;
|
||||
use lscolors::Style as LsColors_Style;
|
||||
use nu_ansi_term::{Color::Default, Style};
|
||||
use lscolors::{Color as LsColors_Color, Style as LsColors_Style};
|
||||
use nu_ansi_term::{Color, Color::Default, Style};
|
||||
use nu_color_config::get_color_config;
|
||||
use nu_engine::{env_to_string, eval_block, CallExt};
|
||||
use nu_protocol::{
|
||||
@ -384,10 +384,15 @@ fn find_with_rest_and_highlight(
|
||||
|
||||
let ls_colored_val =
|
||||
ansi_style.apply(&val_str).to_string();
|
||||
|
||||
let ansi_term_style = style
|
||||
.map(to_nu_ansi_term_style)
|
||||
.unwrap_or_else(|| string_style);
|
||||
|
||||
let hi = match highlight_search_string(
|
||||
&ls_colored_val,
|
||||
&term_str,
|
||||
&string_style,
|
||||
&ansi_term_style,
|
||||
) {
|
||||
Ok(hi) => hi,
|
||||
Err(_) => string_style
|
||||
@ -535,6 +540,47 @@ fn find_with_rest_and_highlight(
|
||||
}
|
||||
}
|
||||
|
||||
fn to_nu_ansi_term_style(style: &LsColors_Style) -> Style {
|
||||
fn to_nu_ansi_term_color(color: &LsColors_Color) -> Color {
|
||||
match *color {
|
||||
LsColors_Color::Fixed(n) => Color::Fixed(n),
|
||||
LsColors_Color::RGB(r, g, b) => Color::Rgb(r, g, b),
|
||||
LsColors_Color::Black => Color::Black,
|
||||
LsColors_Color::Red => Color::Red,
|
||||
LsColors_Color::Green => Color::Green,
|
||||
LsColors_Color::Yellow => Color::Yellow,
|
||||
LsColors_Color::Blue => Color::Blue,
|
||||
LsColors_Color::Magenta => Color::Magenta,
|
||||
LsColors_Color::Cyan => Color::Cyan,
|
||||
LsColors_Color::White => Color::White,
|
||||
|
||||
// Below items are a rough translations to 256 colors as
|
||||
// nu-ansi-term do not have bright varients
|
||||
LsColors_Color::BrightBlack => Color::Fixed(8),
|
||||
LsColors_Color::BrightRed => Color::Fixed(9),
|
||||
LsColors_Color::BrightGreen => Color::Fixed(10),
|
||||
LsColors_Color::BrightYellow => Color::Fixed(11),
|
||||
LsColors_Color::BrightBlue => Color::Fixed(12),
|
||||
LsColors_Color::BrightMagenta => Color::Fixed(13),
|
||||
LsColors_Color::BrightCyan => Color::Fixed(14),
|
||||
LsColors_Color::BrightWhite => Color::Fixed(15),
|
||||
}
|
||||
}
|
||||
|
||||
Style {
|
||||
foreground: style.foreground.as_ref().map(to_nu_ansi_term_color),
|
||||
background: style.background.as_ref().map(to_nu_ansi_term_color),
|
||||
is_bold: style.font_style.bold,
|
||||
is_dimmed: style.font_style.dimmed,
|
||||
is_italic: style.font_style.italic,
|
||||
is_underline: style.font_style.underline,
|
||||
is_blink: style.font_style.slow_blink || style.font_style.rapid_blink,
|
||||
is_reverse: style.font_style.reverse,
|
||||
is_hidden: style.font_style.hidden,
|
||||
is_strikethrough: style.font_style.strikethrough,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
@ -28,7 +28,7 @@ impl Command for Get {
|
||||
.rest("rest", SyntaxShape::CellPath, "additional cell paths")
|
||||
.switch(
|
||||
"ignore-errors",
|
||||
"return nothing if path can't be found",
|
||||
"when there are empty cells, instead of erroring out, replace them with nothing",
|
||||
Some('i'),
|
||||
)
|
||||
.switch(
|
||||
|
@ -17,6 +17,11 @@ impl Command for Select {
|
||||
// FIXME: also add support for --skip
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("select")
|
||||
.switch(
|
||||
"ignore-errors",
|
||||
"when a column has empty cells, instead of erroring out, replace them with nothing",
|
||||
Some('i'),
|
||||
)
|
||||
.rest(
|
||||
"rest",
|
||||
SyntaxShape::CellPath,
|
||||
@ -42,8 +47,9 @@ impl Command for Select {
|
||||
) -> Result<nu_protocol::PipelineData, nu_protocol::ShellError> {
|
||||
let columns: Vec<CellPath> = call.rest(engine_state, stack, 0)?;
|
||||
let span = call.head;
|
||||
let ignore_empty = call.has_flag("ignore-errors");
|
||||
|
||||
select(engine_state, span, columns, input)
|
||||
select(engine_state, span, columns, input, ignore_empty)
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
@ -67,6 +73,7 @@ fn select(
|
||||
span: Span,
|
||||
columns: Vec<CellPath>,
|
||||
input: PipelineData,
|
||||
ignore_empty: bool,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let mut rows = vec![];
|
||||
|
||||
@ -121,6 +128,7 @@ fn select(
|
||||
..,
|
||||
) => {
|
||||
let mut output = vec![];
|
||||
let mut columns_with_value = Vec::new();
|
||||
|
||||
for input_val in input_vals {
|
||||
if !columns.is_empty() {
|
||||
@ -128,10 +136,25 @@ fn select(
|
||||
let mut vals = vec![];
|
||||
for path in &columns {
|
||||
//FIXME: improve implementation to not clone
|
||||
let fetcher = input_val.clone().follow_cell_path(&path.members, false)?;
|
||||
if ignore_empty {
|
||||
let fetcher = input_val.clone().follow_cell_path(&path.members, false);
|
||||
|
||||
cols.push(path.into_string().replace('.', "_"));
|
||||
vals.push(fetcher);
|
||||
cols.push(path.into_string().replace('.', "_"));
|
||||
if let Ok(fetcher) = fetcher {
|
||||
vals.push(fetcher);
|
||||
if !columns_with_value.contains(&path) {
|
||||
columns_with_value.push(path);
|
||||
}
|
||||
} else {
|
||||
vals.push(Value::nothing(span));
|
||||
}
|
||||
} else {
|
||||
let fetcher =
|
||||
input_val.clone().follow_cell_path(&path.members, false)?;
|
||||
|
||||
cols.push(path.into_string().replace('.', "_"));
|
||||
vals.push(fetcher);
|
||||
}
|
||||
}
|
||||
|
||||
output.push(Value::Record { cols, vals, span })
|
||||
|
@ -14,23 +14,31 @@ impl Command for Uniq {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("uniq")
|
||||
.switch("count", "Count the unique rows", Some('c'))
|
||||
.switch(
|
||||
"count",
|
||||
"Return a table containing the distinct input values together with their counts",
|
||||
Some('c'),
|
||||
)
|
||||
.switch(
|
||||
"repeated",
|
||||
"Count the rows that has more than one value",
|
||||
"Return the input values that occur more than once",
|
||||
Some('d'),
|
||||
)
|
||||
.switch(
|
||||
"ignore-case",
|
||||
"Ignore differences in case when comparing",
|
||||
"Ignore differences in case when comparing input values",
|
||||
Some('i'),
|
||||
)
|
||||
.switch("unique", "Only return unique values", Some('u'))
|
||||
.switch(
|
||||
"unique",
|
||||
"Return the input values that occur once only",
|
||||
Some('u'),
|
||||
)
|
||||
.category(Category::Filters)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Return the unique rows."
|
||||
"Return the distinct values in the input."
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
@ -50,7 +58,7 @@ impl Command for Uniq {
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "Remove duplicate rows of a list/table",
|
||||
description: "Return the distinct values of a list/table (remove duplicates so that each value occurs once only)",
|
||||
example: "[2 3 3 4] | uniq",
|
||||
result: Some(Value::List {
|
||||
vals: vec![Value::test_int(2), Value::test_int(3), Value::test_int(4)],
|
||||
@ -58,7 +66,7 @@ impl Command for Uniq {
|
||||
}),
|
||||
},
|
||||
Example {
|
||||
description: "Only print duplicate lines, one for each group",
|
||||
description: "Return the input values that occur more than once",
|
||||
example: "[1 2 2] | uniq -d",
|
||||
result: Some(Value::List {
|
||||
vals: vec![Value::test_int(2)],
|
||||
@ -66,7 +74,7 @@ impl Command for Uniq {
|
||||
}),
|
||||
},
|
||||
Example {
|
||||
description: "Only print unique lines lines",
|
||||
description: "Return the input values that occur once only",
|
||||
example: "[1 2 2] | uniq -u",
|
||||
result: Some(Value::List {
|
||||
vals: vec![Value::test_int(1)],
|
||||
@ -74,7 +82,7 @@ impl Command for Uniq {
|
||||
}),
|
||||
},
|
||||
Example {
|
||||
description: "Ignore differences in case when comparing",
|
||||
description: "Ignore differences in case when comparing input values",
|
||||
example: "['hello' 'goodbye' 'Hello'] | uniq -i",
|
||||
result: Some(Value::List {
|
||||
vals: vec![Value::test_string("hello"), Value::test_string("goodbye")],
|
||||
@ -82,7 +90,7 @@ impl Command for Uniq {
|
||||
}),
|
||||
},
|
||||
Example {
|
||||
description: "Remove duplicate rows and show counts of a list/table",
|
||||
description: "Return a table containing the distinct input values together with their counts",
|
||||
example: "[1 2 2] | uniq -c",
|
||||
result: Some(Value::List {
|
||||
vals: vec![
|
||||
|
@ -62,7 +62,7 @@ impl Command for Upsert {
|
||||
result: Some(Value::List { vals: vec![Value::Record { cols: vec!["count".into(), "fruit".into()], vals: vec![Value::test_int(2), Value::test_string("apple")], span: Span::test_data()}], span: Span::test_data()}),
|
||||
}, Example {
|
||||
description: "Use in block form for more involved updating logic",
|
||||
example: "echo [[project, authors]; ['nu', ['Andrés', 'JT', 'Yehuda']]] | upsert authors {|a| $a.authors | str collect ','}",
|
||||
example: "echo [[project, authors]; ['nu', ['Andrés', 'JT', 'Yehuda']]] | upsert authors {|a| $a.authors | str join ','}",
|
||||
result: Some(Value::List { vals: vec![Value::Record { cols: vec!["project".into(), "authors".into()], vals: vec![Value::test_string("nu"), Value::test_string("Andrés,JT,Yehuda")], span: Span::test_data()}], span: Span::test_data()}),
|
||||
}]
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use std::path::Path;
|
||||
use std::path::{Component, Path};
|
||||
|
||||
use nu_engine::CallExt;
|
||||
use nu_protocol::{engine::Command, Example, ShellError, Signature, Span, SyntaxShape, Value};
|
||||
@ -62,8 +62,7 @@ impl Command for SubCommand {
|
||||
example: r"'C:\Users\viking\spam.txt' | path split",
|
||||
result: Some(Value::List {
|
||||
vals: vec![
|
||||
Value::test_string("C:"),
|
||||
Value::test_string(r"\"),
|
||||
Value::test_string(r"C:\"),
|
||||
Value::test_string("Users"),
|
||||
Value::test_string("viking"),
|
||||
Value::test_string("spam.txt"),
|
||||
@ -108,15 +107,33 @@ fn split(path: &Path, span: Span, _: &Arguments) -> Value {
|
||||
Value::List {
|
||||
vals: path
|
||||
.components()
|
||||
.map(|comp| {
|
||||
let s = comp.as_os_str().to_string_lossy();
|
||||
Value::string(s, span)
|
||||
.filter_map(|comp| {
|
||||
let comp = process_component(comp);
|
||||
comp.map(|s| Value::string(s, span))
|
||||
})
|
||||
.collect(),
|
||||
span,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn process_component(comp: Component) -> Option<String> {
|
||||
match comp {
|
||||
Component::RootDir => None,
|
||||
Component::Prefix(_) => {
|
||||
let mut s = comp.as_os_str().to_string_lossy().to_string();
|
||||
s.push('\\');
|
||||
Some(s)
|
||||
}
|
||||
comp => Some(comp.as_os_str().to_string_lossy().to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
fn process_component(comp: Component) -> Option<String> {
|
||||
Some(comp.as_os_str().to_string_lossy().to_string())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
@ -141,7 +141,7 @@ lazy_static! {
|
||||
// Reference for ansi codes https://gist.github.com/fnky/458719343aabd01cfb17a3a4f7296797
|
||||
// Another good reference http://ascii-table.com/ansi-escape-sequences.php
|
||||
|
||||
// For setting title like `echo [(char title) (pwd) (char bel)] | str collect`
|
||||
// For setting title like `echo [(char title) (pwd) (char bel)] | str join`
|
||||
AnsiCode{short_name: None, long_name:"title", code: "\x1b]2;".to_string()}, // ESC]2; xterm sets window title using OSC syntax escapes
|
||||
|
||||
// Ansi Erase Sequences
|
||||
@ -258,7 +258,7 @@ following values:
|
||||
https://en.wikipedia.org/wiki/ANSI_escape_code
|
||||
|
||||
OSC: '\x1b]' is not required for --osc parameter
|
||||
Example: echo [(ansi -o '0') 'some title' (char bel)] | str collect
|
||||
Example: echo [(ansi -o '0') 'some title' (char bel)] | str join
|
||||
Format: #
|
||||
0 Set window title and icon name
|
||||
1 Set icon name
|
||||
@ -285,14 +285,14 @@ Format: #
|
||||
Example {
|
||||
description:
|
||||
"Use ansi to color text (rb = red bold, gb = green bold, pb = purple bold)",
|
||||
example: r#"echo [(ansi rb) Hello " " (ansi gb) Nu " " (ansi pb) World (ansi reset)] | str collect"#,
|
||||
example: r#"echo [(ansi rb) Hello " " (ansi gb) Nu " " (ansi pb) World (ansi reset)] | str join"#,
|
||||
result: Some(Value::test_string(
|
||||
"\u{1b}[1;31mHello \u{1b}[1;32mNu \u{1b}[1;35mWorld\u{1b}[0m",
|
||||
)),
|
||||
},
|
||||
Example {
|
||||
description: "Use ansi to color text (italic bright yellow on red 'Hello' with green bold 'Nu' and purple bold 'World')",
|
||||
example: r#"echo [(ansi -e '3;93;41m') Hello (ansi reset) " " (ansi gb) Nu " " (ansi pb) World (ansi reset)] | str collect"#,
|
||||
example: r#"echo [(ansi -e '3;93;41m') Hello (ansi reset) " " (ansi gb) Nu " " (ansi pb) World (ansi reset)] | str join"#,
|
||||
result: Some(Value::test_string(
|
||||
"\u{1b}[3;93;41mHello\u{1b}[0m \u{1b}[1;32mNu \u{1b}[1;35mWorld\u{1b}[0m",
|
||||
)),
|
||||
|
@ -40,7 +40,7 @@ impl Command for SubCommand {
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Strip ANSI escape sequences from a string",
|
||||
example: r#"echo [ (ansi green) (ansi cursor_on) "hello" ] | str collect | ansi strip"#,
|
||||
example: r#"echo [ (ansi green) (ansi cursor_on) "hello" ] | str join | ansi strip"#,
|
||||
result: Some(Value::test_string("hello")),
|
||||
}]
|
||||
}
|
||||
|
@ -23,24 +23,31 @@ impl Command for Clear {
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
_engine_state: &EngineState,
|
||||
_stack: &mut Stack,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
_input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let span = call.head;
|
||||
|
||||
if cfg!(windows) {
|
||||
CommandSys::new("cmd")
|
||||
.args(["/C", "cls"])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
.map_err(|e| ShellError::IOErrorSpanned(e.to_string(), span))?;
|
||||
} else if cfg!(unix) {
|
||||
CommandSys::new("/bin/sh")
|
||||
.args(["-c", "clear"])
|
||||
let mut cmd = CommandSys::new("/bin/sh");
|
||||
|
||||
if let Some(Value::String { val, .. }) = stack.get_env_var(engine_state, "TERM") {
|
||||
cmd.env("TERM", val);
|
||||
}
|
||||
|
||||
cmd.args(["-c", "clear"])
|
||||
.status()
|
||||
.expect("failed to execute process");
|
||||
.map_err(|e| ShellError::IOErrorSpanned(e.to_string(), span))?;
|
||||
}
|
||||
|
||||
Ok(Value::Nothing { span: call.head }.into_pipeline_data())
|
||||
Ok(Value::Nothing { span }.into_pipeline_data())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -185,7 +185,7 @@ impl Command for Char {
|
||||
},
|
||||
Example {
|
||||
description: "Output prompt character, newline and a hamburger character",
|
||||
example: r#"echo [(char prompt) (char newline) (char hamburger)] | str collect"#,
|
||||
example: r#"echo [(char prompt) (char newline) (char hamburger)] | str join"#,
|
||||
result: Some(Value::test_string("\u{25b6}\n\u{2261}")),
|
||||
},
|
||||
Example {
|
||||
|
@ -25,11 +25,7 @@ impl Command for StrCollect {
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Concatenate multiple strings into a single string, with an optional separator between each"
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["join", "concatenate"]
|
||||
"'str collect' is deprecated. Please use 'str join' instead."
|
||||
}
|
||||
|
||||
fn run(
|
||||
|
@ -30,7 +30,7 @@ impl Command for SubCommand {
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"compare to strings and return the edit distance/levenshtein distance"
|
||||
"compare two strings and return the edit distance/levenshtein distance"
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
|
106
crates/nu-command/src/strings/str_/join.rs
Normal file
106
crates/nu-command/src/strings/str_/join.rs
Normal file
@ -0,0 +1,106 @@
|
||||
use nu_engine::CallExt;
|
||||
use nu_protocol::ast::Call;
|
||||
use nu_protocol::engine::{Command, EngineState, Stack};
|
||||
use nu_protocol::{
|
||||
Category, Example, IntoPipelineData, PipelineData, ShellError, Signature, Span, SyntaxShape,
|
||||
Value,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct StrJoin;
|
||||
|
||||
impl Command for StrJoin {
|
||||
fn name(&self) -> &str {
|
||||
"str join"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("str join")
|
||||
.optional(
|
||||
"separator",
|
||||
SyntaxShape::String,
|
||||
"optional separator to use when creating string",
|
||||
)
|
||||
.category(Category::Strings)
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Concatenate multiple strings into a single string, with an optional separator between each"
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["collect", "concatenate"]
|
||||
}
|
||||
|
||||
fn run(
|
||||
&self,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let separator: Option<String> = call.opt(engine_state, stack, 0)?;
|
||||
|
||||
let config = engine_state.get_config();
|
||||
|
||||
// let output = input.collect_string(&separator.unwrap_or_default(), &config)?;
|
||||
// Hmm, not sure what we actually want. If you don't use debug_string, Date comes out as human readable
|
||||
// which feels funny
|
||||
let mut strings: Vec<String> = vec![];
|
||||
|
||||
for value in input {
|
||||
match value {
|
||||
Value::Error { error } => {
|
||||
return Err(error);
|
||||
}
|
||||
value => {
|
||||
strings.push(value.debug_string("\n", config));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let output = if let Some(separator) = separator {
|
||||
strings.join(&separator)
|
||||
} else {
|
||||
strings.join("")
|
||||
};
|
||||
|
||||
Ok(Value::String {
|
||||
val: output,
|
||||
span: call.head,
|
||||
}
|
||||
.into_pipeline_data())
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "Create a string from input",
|
||||
example: "['nu', 'shell'] | str join",
|
||||
result: Some(Value::String {
|
||||
val: "nushell".to_string(),
|
||||
span: Span::test_data(),
|
||||
}),
|
||||
},
|
||||
Example {
|
||||
description: "Create a string from input with a separator",
|
||||
example: "['nu', 'shell'] | str join '-'",
|
||||
result: Some(Value::String {
|
||||
val: "nu-shell".to_string(),
|
||||
span: Span::test_data(),
|
||||
}),
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
#[test]
|
||||
fn test_examples() {
|
||||
use crate::test_examples;
|
||||
|
||||
test_examples(StrJoin {})
|
||||
}
|
||||
}
|
@ -74,10 +74,10 @@ impl Command for SubCommand {
|
||||
}),
|
||||
},
|
||||
Example {
|
||||
description: "Use lpad to truncate a string",
|
||||
description: "Use lpad to truncate a string to its last three characters",
|
||||
example: "'123456789' | str lpad -l 3 -c '0'",
|
||||
result: Some(Value::String {
|
||||
val: "123".to_string(),
|
||||
val: "789".to_string(),
|
||||
span: Span::test_data(),
|
||||
}),
|
||||
},
|
||||
@ -105,6 +105,13 @@ fn operate(
|
||||
column_paths: call.rest(engine_state, stack, 0)?,
|
||||
});
|
||||
|
||||
if options.length.expect("this exists") < 0 {
|
||||
return Err(ShellError::UnsupportedInput(
|
||||
String::from("The length of the string cannot be negative"),
|
||||
call.head,
|
||||
));
|
||||
}
|
||||
|
||||
let head = call.head;
|
||||
input.map(
|
||||
move |v| {
|
||||
@ -142,7 +149,14 @@ fn action(
|
||||
let s = *x as usize;
|
||||
if s < val.len() {
|
||||
Value::String {
|
||||
val: val.chars().take(s).collect::<String>(),
|
||||
val: val
|
||||
.chars()
|
||||
.rev()
|
||||
.take(s)
|
||||
.collect::<String>()
|
||||
.chars()
|
||||
.rev()
|
||||
.collect::<String>(),
|
||||
span: head,
|
||||
}
|
||||
} else {
|
||||
|
@ -4,6 +4,7 @@ mod contains;
|
||||
mod distance;
|
||||
mod ends_with;
|
||||
mod index_of;
|
||||
mod join;
|
||||
mod length;
|
||||
mod lpad;
|
||||
mod replace;
|
||||
@ -19,6 +20,7 @@ pub use contains::SubCommand as StrContains;
|
||||
pub use distance::SubCommand as StrDistance;
|
||||
pub use ends_with::SubCommand as StrEndswith;
|
||||
pub use index_of::SubCommand as StrIndexOf;
|
||||
pub use join::*;
|
||||
pub use length::SubCommand as StrLength;
|
||||
pub use lpad::SubCommand as StrLpad;
|
||||
pub use replace::SubCommand as StrReplace;
|
||||
|
@ -74,7 +74,7 @@ impl Command for SubCommand {
|
||||
}),
|
||||
},
|
||||
Example {
|
||||
description: "Use rpad to truncate a string",
|
||||
description: "Use rpad to truncate a string to its first three characters",
|
||||
example: "'123456789' | str rpad -l 3 -c '0'",
|
||||
result: Some(Value::String {
|
||||
val: "123".to_string(),
|
||||
@ -105,6 +105,13 @@ fn operate(
|
||||
column_paths: call.rest(engine_state, stack, 0)?,
|
||||
});
|
||||
|
||||
if options.length.expect("this exists") < 0 {
|
||||
return Err(ShellError::UnsupportedInput(
|
||||
String::from("The length of the string cannot be negative"),
|
||||
call.head,
|
||||
));
|
||||
}
|
||||
|
||||
let head = call.head;
|
||||
input.map(
|
||||
move |v| {
|
||||
|
@ -7,7 +7,6 @@ use nu_protocol::did_you_mean;
|
||||
use nu_protocol::engine::{EngineState, Stack};
|
||||
use nu_protocol::{ast::Call, engine::Command, ShellError, Signature, SyntaxShape, Value};
|
||||
use nu_protocol::{Category, Example, ListStream, PipelineData, RawStream, Span, Spanned};
|
||||
use nu_system::ForegroundProcess;
|
||||
use pathdiff::diff_paths;
|
||||
use std::collections::HashMap;
|
||||
use std::io::{BufRead, BufReader, Write};
|
||||
@ -142,7 +141,7 @@ impl ExternalCommand {
|
||||
|
||||
let ctrlc = engine_state.ctrlc.clone();
|
||||
|
||||
let mut fg_process = ForegroundProcess::new(self.create_process(&input, false, head)?);
|
||||
let mut process = self.create_process(&input, false, head)?;
|
||||
// mut is used in the windows branch only, suppress warning on other platforms
|
||||
#[allow(unused_mut)]
|
||||
let mut child;
|
||||
@ -157,7 +156,8 @@ impl ExternalCommand {
|
||||
// fails to be run as a normal executable:
|
||||
// 1. "shell out" to cmd.exe if the command is a known cmd.exe internal command
|
||||
// 2. Otherwise, use `which-rs` to look for batch files etc. then run those in cmd.exe
|
||||
match fg_process.spawn() {
|
||||
|
||||
match process.spawn() {
|
||||
Err(err) => {
|
||||
// set the default value, maybe we'll override it later
|
||||
child = Err(err);
|
||||
@ -174,8 +174,7 @@ impl ExternalCommand {
|
||||
.any(|&cmd| command_name_upper == cmd);
|
||||
|
||||
if looks_like_cmd_internal {
|
||||
let mut cmd_process =
|
||||
ForegroundProcess::new(self.create_process(&input, true, head)?);
|
||||
let mut cmd_process = self.create_process(&input, true, head)?;
|
||||
child = cmd_process.spawn();
|
||||
} else {
|
||||
#[cfg(feature = "which-support")]
|
||||
@ -203,10 +202,8 @@ impl ExternalCommand {
|
||||
item: file_name.to_string_lossy().to_string(),
|
||||
span: self.name.span,
|
||||
};
|
||||
let mut cmd_process = ForegroundProcess::new(
|
||||
new_command
|
||||
.create_process(&input, true, head)?,
|
||||
);
|
||||
let mut cmd_process = new_command
|
||||
.create_process(&input, true, head)?;
|
||||
child = cmd_process.spawn();
|
||||
}
|
||||
}
|
||||
@ -224,7 +221,7 @@ impl ExternalCommand {
|
||||
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
child = fg_process.spawn()
|
||||
child = process.spawn()
|
||||
}
|
||||
|
||||
match child {
|
||||
@ -276,7 +273,7 @@ impl ExternalCommand {
|
||||
engine_state.config.use_ansi_coloring = false;
|
||||
|
||||
// if there is a string or a stream, that is sent to the pipe std
|
||||
if let Some(mut stdin_write) = child.as_mut().stdin.take() {
|
||||
if let Some(mut stdin_write) = child.stdin.take() {
|
||||
std::thread::spawn(move || {
|
||||
let input = crate::Table::run(
|
||||
&crate::Table,
|
||||
@ -317,7 +314,7 @@ impl ExternalCommand {
|
||||
// and we create a ListStream that can be consumed
|
||||
|
||||
if redirect_stderr {
|
||||
let stderr = child.as_mut().stderr.take().ok_or_else(|| {
|
||||
let stderr = child.stderr.take().ok_or_else(|| {
|
||||
ShellError::ExternalCommand(
|
||||
"Error taking stderr from external".to_string(),
|
||||
"Redirects need access to stderr of an external command"
|
||||
@ -356,7 +353,7 @@ impl ExternalCommand {
|
||||
}
|
||||
|
||||
if redirect_stdout {
|
||||
let stdout = child.as_mut().stdout.take().ok_or_else(|| {
|
||||
let stdout = child.stdout.take().ok_or_else(|| {
|
||||
ShellError::ExternalCommand(
|
||||
"Error taking stdout from external".to_string(),
|
||||
"Redirects need access to stdout of an external command"
|
||||
@ -394,7 +391,7 @@ impl ExternalCommand {
|
||||
}
|
||||
}
|
||||
|
||||
match child.as_mut().wait() {
|
||||
match child.wait() {
|
||||
Err(err) => Err(ShellError::ExternalCommand(
|
||||
"External command exited with error".into(),
|
||||
err.to_string(),
|
||||
|
@ -1,4 +1,4 @@
|
||||
use lscolors::Style;
|
||||
use lscolors::{LsColors, Style};
|
||||
use nu_color_config::{get_color_config, style_primitive};
|
||||
use nu_engine::{column::get_columns, env_to_string, CallExt};
|
||||
use nu_protocol::{
|
||||
@ -261,10 +261,6 @@ fn handle_row_stream(
|
||||
};
|
||||
let ls_colors = get_ls_colors(ls_colors_env_str);
|
||||
|
||||
// clickable links don't work in remote SSH sessions
|
||||
let in_ssh_session = std::env::var("SSH_CLIENT").is_ok();
|
||||
let show_clickable_links = config.show_clickable_links_in_ls && !in_ssh_session;
|
||||
|
||||
ListStream::from_stream(
|
||||
stream.map(move |mut x| match &mut x {
|
||||
Value::Record { cols, vals, .. } => {
|
||||
@ -273,62 +269,10 @@ fn handle_row_stream(
|
||||
while idx < cols.len() {
|
||||
if cols[idx] == "name" {
|
||||
if let Some(Value::String { val: path, span }) = vals.get(idx) {
|
||||
match std::fs::symlink_metadata(&path) {
|
||||
Ok(metadata) => {
|
||||
let style = ls_colors.style_for_path_with_metadata(
|
||||
path.clone(),
|
||||
Some(&metadata),
|
||||
);
|
||||
let ansi_style = style
|
||||
.map(Style::to_crossterm_style)
|
||||
// .map(ToNuAnsiStyle::to_nu_ansi_style)
|
||||
.unwrap_or_default();
|
||||
let use_ls_colors = config.use_ls_colors;
|
||||
|
||||
let full_path = PathBuf::from(path.clone())
|
||||
.canonicalize()
|
||||
.unwrap_or_else(|_| PathBuf::from(path));
|
||||
let full_path_link = make_clickable_link(
|
||||
full_path.display().to_string(),
|
||||
Some(&path.clone()),
|
||||
show_clickable_links,
|
||||
);
|
||||
|
||||
if use_ls_colors {
|
||||
vals[idx] = Value::String {
|
||||
val: ansi_style
|
||||
.apply(full_path_link)
|
||||
.to_string(),
|
||||
span: *span,
|
||||
};
|
||||
}
|
||||
}
|
||||
Err(_) => {
|
||||
let style = ls_colors.style_for_path(path.clone());
|
||||
let ansi_style = style
|
||||
.map(Style::to_crossterm_style)
|
||||
// .map(ToNuAnsiStyle::to_nu_ansi_style)
|
||||
.unwrap_or_default();
|
||||
let use_ls_colors = config.use_ls_colors;
|
||||
|
||||
let full_path = PathBuf::from(path.clone())
|
||||
.canonicalize()
|
||||
.unwrap_or_else(|_| PathBuf::from(path));
|
||||
let full_path_link = make_clickable_link(
|
||||
full_path.display().to_string(),
|
||||
Some(&path.clone()),
|
||||
show_clickable_links,
|
||||
);
|
||||
|
||||
if use_ls_colors {
|
||||
vals[idx] = Value::String {
|
||||
val: ansi_style
|
||||
.apply(full_path_link)
|
||||
.to_string(),
|
||||
span: *span,
|
||||
};
|
||||
}
|
||||
}
|
||||
if let Some(val) =
|
||||
render_path_name(path, &config, &ls_colors, *span)
|
||||
{
|
||||
vals[idx] = val;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -629,3 +573,49 @@ fn load_theme_from_config(config: &Config) -> TableTheme {
|
||||
_ => nu_table::TableTheme::rounded(),
|
||||
}
|
||||
}
|
||||
|
||||
fn render_path_name(
|
||||
path: &String,
|
||||
config: &Config,
|
||||
ls_colors: &LsColors,
|
||||
span: Span,
|
||||
) -> Option<Value> {
|
||||
if !config.use_ls_colors {
|
||||
return None;
|
||||
}
|
||||
|
||||
let stripped_path = match strip_ansi_escapes::strip(path) {
|
||||
Ok(v) => String::from_utf8(v).unwrap_or_else(|_| path.to_owned()),
|
||||
Err(_) => path.to_owned(),
|
||||
};
|
||||
|
||||
let (style, has_metadata) = match std::fs::symlink_metadata(&stripped_path) {
|
||||
Ok(metadata) => (
|
||||
ls_colors.style_for_path_with_metadata(&stripped_path, Some(&metadata)),
|
||||
true,
|
||||
),
|
||||
Err(_) => (ls_colors.style_for_path(&stripped_path), false),
|
||||
};
|
||||
|
||||
// clickable links don't work in remote SSH sessions
|
||||
let in_ssh_session = std::env::var("SSH_CLIENT").is_ok();
|
||||
let show_clickable_links = config.show_clickable_links_in_ls && !in_ssh_session && has_metadata;
|
||||
|
||||
let ansi_style = style
|
||||
.map(Style::to_crossterm_style)
|
||||
// .map(ToNuAnsiStyle::to_nu_ansi_style)
|
||||
.unwrap_or_default();
|
||||
|
||||
let full_path = PathBuf::from(&stripped_path)
|
||||
.canonicalize()
|
||||
.unwrap_or_else(|_| PathBuf::from(&stripped_path));
|
||||
|
||||
let full_path_link = make_clickable_link(
|
||||
full_path.display().to_string(),
|
||||
Some(path),
|
||||
show_clickable_links,
|
||||
);
|
||||
|
||||
let val = ansi_style.apply(full_path_link).to_string();
|
||||
Some(Value::String { val, span })
|
||||
}
|
||||
|
@ -6,7 +6,7 @@ fn checks_all_rows_are_true() {
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
echo [ "Andrés", "Andrés", "Andrés" ]
|
||||
| all? $it == "Andrés"
|
||||
| all $it == "Andrés"
|
||||
"#
|
||||
));
|
||||
|
||||
@ -18,7 +18,7 @@ fn checks_all_rows_are_false_with_param() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
[1, 2, 3, 4] | all? { |a| $a >= 5 }
|
||||
[1, 2, 3, 4] | all { |a| $a >= 5 }
|
||||
"#
|
||||
));
|
||||
|
||||
@ -30,7 +30,7 @@ fn checks_all_rows_are_true_with_param() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
[1, 2, 3, 4] | all? { |a| $a < 5 }
|
||||
[1, 2, 3, 4] | all { |a| $a < 5 }
|
||||
"#
|
||||
));
|
||||
|
||||
@ -49,7 +49,7 @@ fn checks_all_columns_of_a_table_is_true() {
|
||||
[ Darren, Schroeder, 10/11/2013, 1 ]
|
||||
[ Yehuda, Katz, 10/11/2013, 1 ]
|
||||
]
|
||||
| all? likes > 0
|
||||
| all likes > 0
|
||||
"#
|
||||
));
|
||||
|
||||
@ -61,7 +61,7 @@ fn checks_if_all_returns_error_with_invalid_command() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
[red orange yellow green blue purple] | all? ($it | st length) > 4
|
||||
[red orange yellow green blue purple] | all ($it | st length) > 4
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -6,7 +6,7 @@ fn checks_any_row_is_true() {
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
echo [ "Ecuador", "USA", "New Zealand" ]
|
||||
| any? $it == "New Zealand"
|
||||
| any $it == "New Zealand"
|
||||
"#
|
||||
));
|
||||
|
||||
@ -25,7 +25,7 @@ fn checks_any_column_of_a_table_is_true() {
|
||||
[ Darren, Schroeder, 10/11/2013, 1 ]
|
||||
[ Yehuda, Katz, 10/11/2013, 1 ]
|
||||
]
|
||||
| any? rusty_at == 10/12/2013
|
||||
| any rusty_at == 10/12/2013
|
||||
"#
|
||||
));
|
||||
|
||||
@ -37,7 +37,7 @@ fn checks_if_any_returns_error_with_invalid_command() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
[red orange yellow green blue purple] | any? ($it | st length) > 4
|
||||
[red orange yellow green blue purple] | any ($it | st length) > 4
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -36,7 +36,7 @@ fn more_columns_than_table_has() {
|
||||
[3, white]
|
||||
[8, yellow]
|
||||
[4, white]
|
||||
] | drop column 3 | columns | empty?
|
||||
] | drop column 3 | columns | is-empty
|
||||
"#)
|
||||
);
|
||||
|
||||
|
@ -65,7 +65,7 @@ fn each_implicit_it_in_block() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats", pipeline(
|
||||
r#"
|
||||
echo [[foo bar]; [a b] [c d] [e f]] | each { |it| nu --testbin cococo $it.foo } | str collect
|
||||
echo [[foo bar]; [a b] [c d] [e f]] | each { |it| nu --testbin cococo $it.foo } | str join
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -11,8 +11,8 @@ fn reports_emptiness() {
|
||||
[([[check]; [{}] ])]
|
||||
]
|
||||
| get are_empty
|
||||
| all? {
|
||||
empty? check
|
||||
| all {
|
||||
is-empty check
|
||||
}
|
||||
"#
|
||||
));
|
||||
|
@ -11,7 +11,7 @@ fn flatten_nested_tables_with_columns() {
|
||||
[[origin, people]; [Nu, ('nuno' | wrap name)]]
|
||||
| flatten --all | flatten --all
|
||||
| get name
|
||||
| str collect ','
|
||||
| str join ','
|
||||
"#
|
||||
));
|
||||
|
||||
@ -27,7 +27,7 @@ fn flatten_nested_tables_that_have_many_columns() {
|
||||
[[origin, people]; [USA, (echo [[name, meal]; ['Katz', 'nurepa']])]]
|
||||
| flatten --all | flatten --all
|
||||
| get meal
|
||||
| str collect ','
|
||||
| str join ','
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -26,7 +26,7 @@ fn moves_a_column_before() {
|
||||
| rename chars
|
||||
| get chars
|
||||
| str trim
|
||||
| str collect
|
||||
| str join
|
||||
"#
|
||||
));
|
||||
|
||||
@ -59,9 +59,9 @@ fn moves_columns_before() {
|
||||
| move column99 column3 --before column2
|
||||
| rename _ chars_1 chars_2
|
||||
| select chars_2 chars_1
|
||||
| upsert new_col {|f| $f | transpose | get column1 | str trim | str collect}
|
||||
| upsert new_col {|f| $f | transpose | get column1 | str trim | str join}
|
||||
| get new_col
|
||||
| str collect
|
||||
| str join
|
||||
"#
|
||||
));
|
||||
|
||||
@ -95,9 +95,9 @@ fn moves_a_column_after() {
|
||||
| move letters and_more --before column2
|
||||
| rename _ chars_1 chars_2
|
||||
| select chars_1 chars_2
|
||||
| upsert new_col {|f| $f | transpose | get column1 | str trim | str collect}
|
||||
| upsert new_col {|f| $f | transpose | get column1 | str trim | str join}
|
||||
| get new_col
|
||||
| str collect
|
||||
| str join
|
||||
"#
|
||||
));
|
||||
|
||||
@ -130,7 +130,7 @@ fn moves_columns_after() {
|
||||
| move letters and_more --after column1
|
||||
| columns
|
||||
| select 1 2
|
||||
| str collect
|
||||
| str join
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -373,7 +373,7 @@ fn parse_script_success_with_complex_internal_stream() {
|
||||
#ls **/* | some_filter | grep-nu search
|
||||
#open file.txt | grep-nu search
|
||||
] {
|
||||
if ($entrada | empty?) {
|
||||
if ($entrada | is-empty) {
|
||||
if ($in | column? name) {
|
||||
grep -ihHn $search ($in | get name)
|
||||
} else {
|
||||
@ -422,7 +422,7 @@ fn parse_script_failure_with_complex_internal_stream() {
|
||||
#ls **/* | some_filter | grep-nu search
|
||||
#open file.txt | grep-nu search
|
||||
]
|
||||
if ($entrada | empty?) {
|
||||
if ($entrada | is-empty) {
|
||||
if ($in | column? name) {
|
||||
grep -ihHn $search ($in | get name)
|
||||
} else {
|
||||
@ -471,7 +471,7 @@ fn parse_script_success_with_complex_external_stream() {
|
||||
#ls **/* | some_filter | grep-nu search
|
||||
#open file.txt | grep-nu search
|
||||
] {
|
||||
if ($entrada | empty?) {
|
||||
if ($entrada | is-empty) {
|
||||
if ($in | column? name) {
|
||||
grep -ihHn $search ($in | get name)
|
||||
} else {
|
||||
@ -520,7 +520,7 @@ fn parse_module_success_with_complex_external_stream() {
|
||||
#ls **/* | some_filter | grep-nu search
|
||||
#open file.txt | grep-nu search
|
||||
] {
|
||||
if ($entrada | empty?) {
|
||||
if ($entrada | is-empty) {
|
||||
if ($in | column? name) {
|
||||
grep -ihHn $search ($in | get name)
|
||||
} else {
|
||||
@ -569,7 +569,7 @@ fn parse_with_flag_all_success_for_complex_external_stream() {
|
||||
#ls **/* | some_filter | grep-nu search
|
||||
#open file.txt | grep-nu search
|
||||
] {
|
||||
if ($entrada | empty?) {
|
||||
if ($entrada | is-empty) {
|
||||
if ($in | column? name) {
|
||||
grep -ihHn $search ($in | get name)
|
||||
} else {
|
||||
@ -618,7 +618,7 @@ fn parse_with_flag_all_failure_for_complex_external_stream() {
|
||||
#ls **/* | some_filter | grep-nu search
|
||||
#open file.txt | grep-nu search
|
||||
] {
|
||||
if ($entrada | empty?) {
|
||||
if ($entrada | is-empty) {
|
||||
if ($in | column? name) {
|
||||
grep -ihHn $search ($in | get name)
|
||||
} else {
|
||||
@ -667,7 +667,7 @@ fn parse_with_flag_all_failure_for_complex_list_stream() {
|
||||
#ls **/* | some_filter | grep-nu search
|
||||
#open file.txt | grep-nu search
|
||||
] {
|
||||
if ($entrada | empty?) {
|
||||
if ($entrada | is-empty) {
|
||||
if ($in | column? name) {
|
||||
grep -ihHn $search ($in | get name)
|
||||
} else {
|
||||
|
@ -208,6 +208,22 @@ fn parses_utf16_ini() {
|
||||
assert_eq!(actual.out, "-236")
|
||||
}
|
||||
|
||||
#[cfg(feature = "database")]
|
||||
#[test]
|
||||
fn parses_arrow_ipc() {
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats", pipeline(
|
||||
r#"
|
||||
open-df caco3_plastics.arrow
|
||||
| into nu
|
||||
| first 1
|
||||
| get origin
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "SPAIN")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn errors_if_file_not_found() {
|
||||
let actual = nu!(
|
||||
|
@ -13,7 +13,7 @@ fn regular_columns() {
|
||||
]
|
||||
| reject type first_name
|
||||
| columns
|
||||
| str collect ", "
|
||||
| str join ", "
|
||||
"#
|
||||
));
|
||||
|
||||
@ -56,7 +56,7 @@ fn complex_nested_columns() {
|
||||
| reject nu."0xATYKARNU" nu.committers
|
||||
| get nu
|
||||
| columns
|
||||
| str collect ", "
|
||||
| str join ", "
|
||||
"#,
|
||||
));
|
||||
|
||||
@ -75,7 +75,7 @@ fn ignores_duplicate_columns_rejected() {
|
||||
]
|
||||
| reject "first name" "first name"
|
||||
| columns
|
||||
| str collect ", "
|
||||
| str join ", "
|
||||
"#
|
||||
));
|
||||
|
||||
|
@ -69,7 +69,7 @@ mod columns {
|
||||
format!("{} | {}", table(), pipeline(r#"
|
||||
roll left
|
||||
| columns
|
||||
| str collect "-"
|
||||
| str join "-"
|
||||
"#)));
|
||||
|
||||
assert_eq!(actual.out, "origin-stars-commit_author");
|
||||
@ -82,7 +82,7 @@ mod columns {
|
||||
format!("{} | {}", table(), pipeline(r#"
|
||||
roll right --by 2
|
||||
| columns
|
||||
| str collect "-"
|
||||
| str join "-"
|
||||
"#)));
|
||||
|
||||
assert_eq!(actual.out, "origin-stars-commit_author");
|
||||
@ -97,7 +97,7 @@ mod columns {
|
||||
|
||||
let actual = nu!(
|
||||
cwd: ".",
|
||||
format!("{} | roll right --by 3 --cells-only | columns | str collect '-' ", four_bitstring)
|
||||
format!("{} | roll right --by 3 --cells-only | columns | str join '-' ", four_bitstring)
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, expected_value.1);
|
||||
|
@ -25,7 +25,7 @@ fn counter_clockwise() {
|
||||
]
|
||||
| where column0 == EXPECTED
|
||||
| get column1 column2 column3
|
||||
| str collect "-"
|
||||
| str join "-"
|
||||
"#,
|
||||
));
|
||||
|
||||
@ -35,7 +35,7 @@ fn counter_clockwise() {
|
||||
rotate --ccw
|
||||
| where column0 == EXPECTED
|
||||
| get column1 column2 column3
|
||||
| str collect "-"
|
||||
| str join "-"
|
||||
"#)));
|
||||
|
||||
assert_eq!(actual.out, expected.out);
|
||||
@ -66,7 +66,7 @@ fn clockwise() {
|
||||
]
|
||||
| where column3 == EXPECTED
|
||||
| get column0 column1 column2
|
||||
| str collect "-"
|
||||
| str join "-"
|
||||
"#,
|
||||
));
|
||||
|
||||
@ -76,7 +76,7 @@ fn clockwise() {
|
||||
rotate
|
||||
| where column3 == EXPECTED
|
||||
| get column0 column1 column2
|
||||
| str collect "-"
|
||||
| str join "-"
|
||||
"#)));
|
||||
|
||||
assert_eq!(actual.out, expected.out);
|
||||
|
@ -186,6 +186,20 @@ fn external_arg_with_variable_name() {
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn external_command_escape_args() {
|
||||
Playground::setup("external failed command with semicolon", |dirs, _| {
|
||||
let actual = nu!(
|
||||
cwd: dirs.test(), pipeline(
|
||||
r#"
|
||||
^echo "\"abcd"
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, r#""abcd"#);
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
#[test]
|
||||
fn explicit_glob_windows() {
|
||||
@ -244,72 +258,6 @@ fn failed_command_with_semicolon_will_not_execute_following_cmds_windows() {
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
#[test]
|
||||
#[ignore = "fails on local Windows machines"]
|
||||
// This test case might fail based on the running shell on Windows - CMD vs PowerShell, the reason is
|
||||
//
|
||||
// Test command 1 - `dir * `
|
||||
// Test command 2 - `dir '*'`
|
||||
// Test command 3 - `dir "*"`
|
||||
//
|
||||
// In CMD, command 2 and 3 will give you an error of 'File Not Found'
|
||||
// In Poweshell, all three commands will do the path expansion with any errors whatsoever
|
||||
//
|
||||
// With current Windows CI build(Microsoft Windows 2022 with version 10.0.20348),
|
||||
// the unit test runs agaisnt PowerShell
|
||||
fn double_quote_does_not_expand_path_glob_windows() {
|
||||
Playground::setup("double quote do not run the expansion", |dirs, sandbox| {
|
||||
sandbox.with_files(vec![
|
||||
EmptyFile("D&D_volume_1.txt"),
|
||||
EmptyFile("D&D_volume_2.txt"),
|
||||
EmptyFile("foo.sh"),
|
||||
]);
|
||||
|
||||
let actual = nu!(
|
||||
cwd: dirs.test(), pipeline(
|
||||
r#"
|
||||
dir "*.txt"
|
||||
"#
|
||||
));
|
||||
assert!(actual.out.contains("D&D_volume_1.txt"));
|
||||
assert!(actual.out.contains("D&D_volume_2.txt"));
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
#[test]
|
||||
#[ignore = "fails on local Windows machines"]
|
||||
// This test case might fail based on the running shell on Windows - CMD vs PowerShell, the reason is
|
||||
//
|
||||
// Test command 1 - `dir * `
|
||||
// Test command 2 - `dir '*'`
|
||||
// Test command 3 - `dir "*"`
|
||||
//
|
||||
// In CMD, command 2 and 3 will give you an error of 'File Not Found'
|
||||
// In Poweshell, all three commands will do the path expansion with any errors whatsoever
|
||||
//
|
||||
// With current Windows CI build(Microsoft Windows 2022 with version 10.0.20348),
|
||||
// the unit test runs agaisnt PowerShell
|
||||
fn single_quote_does_not_expand_path_glob_windows() {
|
||||
Playground::setup("single quote do not run the expansion", |dirs, sandbox| {
|
||||
sandbox.with_files(vec![
|
||||
EmptyFile("D&D_volume_1.txt"),
|
||||
EmptyFile("D&D_volume_2.txt"),
|
||||
EmptyFile("foo.sh"),
|
||||
]);
|
||||
|
||||
let actual = nu!(
|
||||
cwd: dirs.test(), pipeline(
|
||||
r#"
|
||||
dir '*.txt'
|
||||
"#
|
||||
));
|
||||
assert!(actual.out.contains("D&D_volume_1.txt"));
|
||||
assert!(actual.out.contains("D&D_volume_2.txt"));
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
#[test]
|
||||
fn can_run_batch_files() {
|
||||
|
@ -96,7 +96,7 @@ fn column_names_with_spaces() {
|
||||
]
|
||||
| select "last name"
|
||||
| get "last name"
|
||||
| str collect " "
|
||||
| str join " "
|
||||
"#
|
||||
));
|
||||
|
||||
@ -115,7 +115,7 @@ fn ignores_duplicate_columns_selected() {
|
||||
]
|
||||
| select "first name" "last name" "first name"
|
||||
| columns
|
||||
| str collect " "
|
||||
| str join " "
|
||||
"#
|
||||
));
|
||||
|
||||
@ -159,3 +159,27 @@ fn selects_many_rows() {
|
||||
assert_eq!(actual.out, "2");
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn select_ignores_errors_succesfully1() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
[{a: 1, b: 2} {a: 3, b: 5} {a: 3}] | select -i b
|
||||
"#
|
||||
));
|
||||
|
||||
assert!(actual.err.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn select_ignores_errors_succesfully2() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
[{a: 1} {a: 2} {a: 3}] | select -i b
|
||||
"#
|
||||
));
|
||||
|
||||
assert!(actual.err.is_empty());
|
||||
}
|
||||
|
@ -36,7 +36,7 @@ fn condition_is_met() {
|
||||
| lines
|
||||
| skip 2
|
||||
| str trim
|
||||
| str collect (char nl)
|
||||
| str join (char nl)
|
||||
| from csv
|
||||
| skip until "Chicken Collection" == "Red Chickens"
|
||||
| skip 1
|
||||
|
@ -36,7 +36,7 @@ fn condition_is_met() {
|
||||
| lines
|
||||
| skip 2
|
||||
| str trim
|
||||
| str collect (char nl)
|
||||
| str join (char nl)
|
||||
| from csv
|
||||
| skip while "Chicken Collection" != "Red Chickens"
|
||||
| skip 1
|
||||
|
@ -143,6 +143,7 @@ fn sources_unicode_file_in_non_utf8_dir() {
|
||||
// How do I create non-UTF-8 path???
|
||||
}
|
||||
|
||||
#[ignore]
|
||||
#[test]
|
||||
fn can_source_dynamic_path() {
|
||||
Playground::setup("can_source_dynamic_path", |dirs, sandbox| {
|
||||
@ -269,39 +270,26 @@ fn source_env_dont_cd_overlay() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn source_env_nice_parse_error() {
|
||||
Playground::setup("source_env_nice_parse_error", |dirs, sandbox| {
|
||||
fn source_env_is_scoped() {
|
||||
Playground::setup("source_env_is_scoped", |dirs, sandbox| {
|
||||
sandbox.with_files(vec![FileWithContentToBeTrimmed(
|
||||
"spam.nu",
|
||||
r#"
|
||||
let x
|
||||
"#,
|
||||
def foo [] { 'foo' }
|
||||
alias bar = 'bar'
|
||||
"#,
|
||||
)]);
|
||||
|
||||
let inp = &[r#"source-env spam.nu"#];
|
||||
let inp = &[r#"source-env spam.nu"#, r#"foo"#];
|
||||
|
||||
let actual = nu!(cwd: dirs.test(), pipeline(&inp.join("; ")));
|
||||
|
||||
assert!(actual.err.contains("cannot parse this file"));
|
||||
assert!(actual.err.contains("───"));
|
||||
})
|
||||
}
|
||||
assert!(actual.err.contains("did you mean"));
|
||||
|
||||
#[test]
|
||||
fn source_env_nice_shell_error() {
|
||||
Playground::setup("source_env_nice_shell_error", |dirs, sandbox| {
|
||||
sandbox.with_files(vec![FileWithContentToBeTrimmed(
|
||||
"spam.nu",
|
||||
r#"
|
||||
let-env FILE_PWD = 'foo'
|
||||
"#,
|
||||
)]);
|
||||
|
||||
let inp = &[r#"source-env spam.nu"#];
|
||||
let inp = &[r#"source-env spam.nu"#, r#"bar"#];
|
||||
|
||||
let actual = nu!(cwd: dirs.test(), pipeline(&inp.join("; ")));
|
||||
|
||||
assert!(actual.err.contains("cannot evaluate this file"));
|
||||
assert!(actual.err.contains("───"));
|
||||
assert!(actual.err.contains("did you mean"));
|
||||
})
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ fn test_1() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
echo 1..5 | into string | str collect
|
||||
echo 1..5 | into string | str join
|
||||
"#
|
||||
)
|
||||
);
|
||||
@ -18,7 +18,7 @@ fn test_2() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
echo [a b c d] | str collect "<sep>"
|
||||
echo [a b c d] | str join "<sep>"
|
||||
"#
|
||||
)
|
||||
);
|
||||
@ -31,7 +31,7 @@ fn construct_a_path() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
echo [sample txt] | str collect "."
|
||||
echo [sample txt] | str join "."
|
||||
"#
|
||||
)
|
||||
);
|
||||
@ -44,7 +44,7 @@ fn sum_one_to_four() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
1..4 | each { |it| $it } | into string | str collect "+" | math eval
|
||||
1..4 | each { |it| $it } | into string | str join "+" | math eval
|
||||
"#
|
||||
)
|
||||
);
|
||||
|
@ -36,7 +36,7 @@ fn condition_is_met() {
|
||||
| lines
|
||||
| skip 2
|
||||
| str trim
|
||||
| str collect (char nl)
|
||||
| str join (char nl)
|
||||
| from csv
|
||||
| skip while "Chicken Collection" != "Blue Chickens"
|
||||
| take until "Chicken Collection" == "Red Chickens"
|
||||
|
@ -36,7 +36,7 @@ fn condition_is_met() {
|
||||
| lines
|
||||
| skip 2
|
||||
| str trim
|
||||
| str collect (char nl)
|
||||
| str join (char nl)
|
||||
| from csv
|
||||
| skip 1
|
||||
| take while "Chicken Collection" != "Blue Chickens"
|
||||
|
@ -1,4 +1,5 @@
|
||||
use nu_test_support::fs::{AbsolutePath, Stub::FileWithContent};
|
||||
use nu_test_support::fs::AbsolutePath;
|
||||
use nu_test_support::fs::Stub::{FileWithContent, FileWithContentToBeTrimmed};
|
||||
use nu_test_support::nu;
|
||||
use nu_test_support::pipeline;
|
||||
use nu_test_support::playground::Playground;
|
||||
@ -63,3 +64,122 @@ fn use_keeps_doc_comments() {
|
||||
assert!(actual.out.contains("this is an x parameter"));
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn use_eval_export_env() {
|
||||
Playground::setup("use_eval_export_env", |dirs, sandbox| {
|
||||
sandbox.with_files(vec![FileWithContentToBeTrimmed(
|
||||
"spam.nu",
|
||||
r#"
|
||||
export-env { let-env FOO = 'foo' }
|
||||
"#,
|
||||
)]);
|
||||
|
||||
let inp = &[r#"use spam.nu"#, r#"$env.FOO"#];
|
||||
|
||||
let actual = nu!(cwd: dirs.test(), pipeline(&inp.join("; ")));
|
||||
|
||||
assert_eq!(actual.out, "foo");
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn use_eval_export_env_hide() {
|
||||
Playground::setup("use_eval_export_env", |dirs, sandbox| {
|
||||
sandbox.with_files(vec![FileWithContentToBeTrimmed(
|
||||
"spam.nu",
|
||||
r#"
|
||||
export-env { hide-env FOO }
|
||||
"#,
|
||||
)]);
|
||||
|
||||
let inp = &[r#"let-env FOO = 'foo'"#, r#"use spam.nu"#, r#"$env.FOO"#];
|
||||
|
||||
let actual = nu!(cwd: dirs.test(), pipeline(&inp.join("; ")));
|
||||
|
||||
assert!(actual.err.contains("did you mean"));
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn use_do_cd() {
|
||||
Playground::setup("use_do_cd", |dirs, sandbox| {
|
||||
sandbox
|
||||
.mkdir("test1/test2")
|
||||
.with_files(vec![FileWithContentToBeTrimmed(
|
||||
"test1/test2/spam.nu",
|
||||
r#"
|
||||
export-env { cd test1/test2 }
|
||||
"#,
|
||||
)]);
|
||||
|
||||
let inp = &[r#"use test1/test2/spam.nu"#, r#"$env.PWD | path basename"#];
|
||||
|
||||
let actual = nu!(cwd: dirs.test(), pipeline(&inp.join("; ")));
|
||||
|
||||
assert_eq!(actual.out, "test2");
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn use_do_cd_file_relative() {
|
||||
Playground::setup("use_do_cd_file_relative", |dirs, sandbox| {
|
||||
sandbox
|
||||
.mkdir("test1/test2")
|
||||
.with_files(vec![FileWithContentToBeTrimmed(
|
||||
"test1/test2/spam.nu",
|
||||
r#"
|
||||
export-env { cd ($env.FILE_PWD | path join '..') }
|
||||
"#,
|
||||
)]);
|
||||
|
||||
let inp = &[r#"use test1/test2/spam.nu"#, r#"$env.PWD | path basename"#];
|
||||
|
||||
let actual = nu!(cwd: dirs.test(), pipeline(&inp.join("; ")));
|
||||
|
||||
assert_eq!(actual.out, "test1");
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn use_dont_cd_overlay() {
|
||||
Playground::setup("use_dont_cd_overlay", |dirs, sandbox| {
|
||||
sandbox
|
||||
.mkdir("test1/test2")
|
||||
.with_files(vec![FileWithContentToBeTrimmed(
|
||||
"test1/test2/spam.nu",
|
||||
r#"
|
||||
export-env {
|
||||
overlay new spam
|
||||
cd test1/test2
|
||||
overlay hide spam
|
||||
}
|
||||
"#,
|
||||
)]);
|
||||
|
||||
let inp = &[r#"use test1/test2/spam.nu"#, r#"$env.PWD | path basename"#];
|
||||
|
||||
let actual = nu!(cwd: dirs.test(), pipeline(&inp.join("; ")));
|
||||
|
||||
assert_eq!(actual.out, "use_dont_cd_overlay");
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn use_export_env_combined() {
|
||||
Playground::setup("use_is_scoped", |dirs, sandbox| {
|
||||
sandbox.with_files(vec![FileWithContentToBeTrimmed(
|
||||
"spam.nu",
|
||||
r#"
|
||||
alias bar = foo
|
||||
export-env { let-env FOO = bar }
|
||||
def foo [] { 'foo' }
|
||||
"#,
|
||||
)]);
|
||||
|
||||
let inp = &[r#"use spam.nu"#, r#"$env.FOO"#];
|
||||
|
||||
let actual = nu!(cwd: dirs.test(), pipeline(&inp.join("; ")));
|
||||
assert_eq!(actual.out, "foo");
|
||||
})
|
||||
}
|
||||
|
@ -8,7 +8,7 @@ export def expect [
|
||||
--to-eq,
|
||||
right
|
||||
] {
|
||||
$left | zip $right | all? {|row|
|
||||
$left | zip $right | all {|row|
|
||||
$row.name.0 == $row.name.1 && $row.commits.0 == $row.commits.1
|
||||
}
|
||||
}
|
||||
@ -51,7 +51,7 @@ fn zips_two_lists() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
echo [0 2 4 6 8] | zip [1 3 5 7 9] | flatten | into string | str collect '-'
|
||||
echo [0 2 4 6 8] | zip [1 3 5 7 9] | flatten | into string | str join '-'
|
||||
"#
|
||||
));
|
||||
|
||||
|
Reference in New Issue
Block a user