mirror of
https://github.com/nushell/nushell.git
synced 2025-07-01 07:00:37 +02:00
Compare commits
18 Commits
Author | SHA1 | Date | |
---|---|---|---|
e299e76fcf | |||
c857e18c4a | |||
5fb3df4054 | |||
8b597187fc | |||
930f9f0063 | |||
63d4df9810 | |||
13ba533fc4 | |||
6d60bab2fd | |||
5be774b2e5 | |||
b412ff92c0 | |||
5a75e11b0e | |||
e66bf70589 | |||
3924e9d50a | |||
8df748463d | |||
0113661c81 | |||
0ee054b14d | |||
80b39454ff | |||
97f3671e2c |
2105
Cargo.lock
generated
2105
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
71
Cargo.toml
71
Cargo.toml
@ -10,7 +10,7 @@ license = "MIT"
|
||||
name = "nu"
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/nushell/nushell"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
[workspace]
|
||||
members = ["crates/*/"]
|
||||
@ -18,32 +18,33 @@ members = ["crates/*/"]
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
nu-cli = {version = "0.22.0", path = "./crates/nu-cli"}
|
||||
nu-data = {version = "0.22.0", path = "./crates/nu-data"}
|
||||
nu-errors = {version = "0.22.0", path = "./crates/nu-errors"}
|
||||
nu-parser = {version = "0.22.0", path = "./crates/nu-parser"}
|
||||
nu-plugin = {version = "0.22.0", path = "./crates/nu-plugin"}
|
||||
nu-protocol = {version = "0.22.0", path = "./crates/nu-protocol"}
|
||||
nu-source = {version = "0.22.0", path = "./crates/nu-source"}
|
||||
nu-value-ext = {version = "0.22.0", path = "./crates/nu-value-ext"}
|
||||
nu-cli = {version = "0.23.0", path = "./crates/nu-cli"}
|
||||
nu-data = {version = "0.23.0", path = "./crates/nu-data"}
|
||||
nu-errors = {version = "0.23.0", path = "./crates/nu-errors"}
|
||||
nu-parser = {version = "0.23.0", path = "./crates/nu-parser"}
|
||||
nu-plugin = {version = "0.23.0", path = "./crates/nu-plugin"}
|
||||
nu-protocol = {version = "0.23.0", path = "./crates/nu-protocol"}
|
||||
nu-source = {version = "0.23.0", path = "./crates/nu-source"}
|
||||
nu-value-ext = {version = "0.23.0", path = "./crates/nu-value-ext"}
|
||||
|
||||
nu_plugin_binaryview = {version = "0.22.0", path = "./crates/nu_plugin_binaryview", optional = true}
|
||||
nu_plugin_chart = {version = "0.22.0", path = "./crates/nu_plugin_chart", optional = true}
|
||||
nu_plugin_fetch = {version = "0.22.0", path = "./crates/nu_plugin_fetch", optional = true}
|
||||
nu_plugin_from_bson = {version = "0.22.0", path = "./crates/nu_plugin_from_bson", optional = true}
|
||||
nu_plugin_from_sqlite = {version = "0.22.0", path = "./crates/nu_plugin_from_sqlite", optional = true}
|
||||
nu_plugin_inc = {version = "0.22.0", path = "./crates/nu_plugin_inc", optional = true}
|
||||
nu_plugin_match = {version = "0.22.0", path = "./crates/nu_plugin_match", optional = true}
|
||||
nu_plugin_post = {version = "0.22.0", path = "./crates/nu_plugin_post", optional = true}
|
||||
nu_plugin_ps = {version = "0.22.0", path = "./crates/nu_plugin_ps", optional = true}
|
||||
nu_plugin_s3 = {version = "0.22.0", path = "./crates/nu_plugin_s3", optional = true}
|
||||
nu_plugin_start = {version = "0.22.0", path = "./crates/nu_plugin_start", optional = true}
|
||||
nu_plugin_sys = {version = "0.22.0", path = "./crates/nu_plugin_sys", optional = true}
|
||||
nu_plugin_textview = {version = "0.22.0", path = "./crates/nu_plugin_textview", optional = true}
|
||||
nu_plugin_to_bson = {version = "0.22.0", path = "./crates/nu_plugin_to_bson", optional = true}
|
||||
nu_plugin_to_sqlite = {version = "0.22.0", path = "./crates/nu_plugin_to_sqlite", optional = true}
|
||||
nu_plugin_tree = {version = "0.22.0", path = "./crates/nu_plugin_tree", optional = true}
|
||||
nu_plugin_xpath = {version = "0.22.0", path = "./crates/nu_plugin_xpath", optional = true}
|
||||
nu_plugin_binaryview = {version = "0.23.0", path = "./crates/nu_plugin_binaryview", optional = true}
|
||||
nu_plugin_chart = {version = "0.23.0", path = "./crates/nu_plugin_chart", optional = true}
|
||||
nu_plugin_fetch = {version = "0.23.0", path = "./crates/nu_plugin_fetch", optional = true}
|
||||
nu_plugin_from_bson = {version = "0.23.0", path = "./crates/nu_plugin_from_bson", optional = true}
|
||||
nu_plugin_from_sqlite = {version = "0.23.0", path = "./crates/nu_plugin_from_sqlite", optional = true}
|
||||
nu_plugin_inc = {version = "0.23.0", path = "./crates/nu_plugin_inc", optional = true}
|
||||
nu_plugin_match = {version = "0.23.0", path = "./crates/nu_plugin_match", optional = true}
|
||||
nu_plugin_post = {version = "0.23.0", path = "./crates/nu_plugin_post", optional = true}
|
||||
nu_plugin_ps = {version = "0.23.0", path = "./crates/nu_plugin_ps", optional = true}
|
||||
nu_plugin_s3 = {version = "0.23.0", path = "./crates/nu_plugin_s3", optional = true}
|
||||
nu_plugin_start = {version = "0.23.0", path = "./crates/nu_plugin_start", optional = true}
|
||||
nu_plugin_sys = {version = "0.23.0", path = "./crates/nu_plugin_sys", optional = true}
|
||||
nu_plugin_textview = {version = "0.23.0", path = "./crates/nu_plugin_textview", optional = true}
|
||||
nu_plugin_to_bson = {version = "0.23.0", path = "./crates/nu_plugin_to_bson", optional = true}
|
||||
nu_plugin_to_sqlite = {version = "0.23.0", path = "./crates/nu_plugin_to_sqlite", optional = true}
|
||||
nu_plugin_tree = {version = "0.23.0", path = "./crates/nu_plugin_tree", optional = true}
|
||||
nu_plugin_xpath = {version = "0.23.0", path = "./crates/nu_plugin_xpath", optional = true}
|
||||
nu_plugin_selector = {version = "0.23.0", path = "./crates/nu_plugin_selector", optional = true}
|
||||
|
||||
# Required to bootstrap the main binary
|
||||
clap = "2.33.3"
|
||||
@ -55,7 +56,7 @@ itertools = "0.9.0"
|
||||
|
||||
[dev-dependencies]
|
||||
dunce = "1.0.1"
|
||||
nu-test-support = {version = "0.22.0", path = "./crates/nu-test-support"}
|
||||
nu-test-support = {version = "0.23.0", path = "./crates/nu-test-support"}
|
||||
|
||||
[build-dependencies]
|
||||
|
||||
@ -87,8 +88,9 @@ default = [
|
||||
"post",
|
||||
"fetch",
|
||||
"rich-benchmark",
|
||||
"zip-support"
|
||||
]
|
||||
extra = ["default", "binaryview", "tree", "clipboard-cli", "trash-support", "start", "bson", "sqlite", "s3", "chart", "xpath"]
|
||||
extra = ["default", "binaryview", "tree", "clipboard-cli", "trash-support", "start", "bson", "sqlite", "s3", "chart", "xpath", "selector"]
|
||||
stable = ["default"]
|
||||
|
||||
wasi = ["inc", "match", "directories-support", "ptree-support", "match", "tree", "rustyline-support"]
|
||||
@ -103,6 +105,7 @@ post = ["nu_plugin_post"]
|
||||
ps = ["nu_plugin_ps"]
|
||||
sys = ["nu_plugin_sys"]
|
||||
textview = ["nu_plugin_textview"]
|
||||
zip-support = ["nu-cli/zip"]
|
||||
|
||||
# Extra
|
||||
binaryview = ["nu_plugin_binaryview"]
|
||||
@ -115,6 +118,13 @@ start = ["nu_plugin_start"]
|
||||
trash-support = ["nu-cli/trash-support"]
|
||||
tree = ["nu_plugin_tree"]
|
||||
xpath = ["nu_plugin_xpath"]
|
||||
selector = ["nu_plugin_selector"]
|
||||
|
||||
[profile.release]
|
||||
#strip = "symbols" #Couldn't get working +nightly
|
||||
opt-level = 'z' #Optimize for size
|
||||
lto = true #Link Time Optimization
|
||||
codegen-units = 1 #Reduce parallel codegen units
|
||||
|
||||
# Core plugins that ship with `cargo install nu` by default
|
||||
# Currently, Cargo limits us to installing only one binary
|
||||
@ -191,6 +201,11 @@ name = "nu_plugin_extra_xpath"
|
||||
path = "src/plugins/nu_plugin_extra_xpath.rs"
|
||||
required-features = ["xpath"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_extra_selector"
|
||||
path = "src/plugins/nu_plugin_extra_selector.rs"
|
||||
required-features = ["selector"]
|
||||
|
||||
[[bin]]
|
||||
name = "nu_plugin_extra_from_bson"
|
||||
path = "src/plugins/nu_plugin_extra_from_bson.rs"
|
||||
|
@ -7,7 +7,7 @@
|
||||
[](https://changelog.com/podcast/363)
|
||||
[](https://twitter.com/nu_shell)
|
||||
|
||||
## Nu Shell
|
||||
## Nushell
|
||||
|
||||
A new type of shell.
|
||||
|
||||
@ -34,7 +34,7 @@ There are also [good first issues](https://github.com/nushell/nushell/issues?q=i
|
||||
|
||||
We also have an active [Discord](https://discord.gg/NtAbbGn) and [Twitter](https://twitter.com/nu_shell) if you'd like to come and chat with us.
|
||||
|
||||
You can also find more learning resources in our [documentation](https://www.nushell.sh/documentation.html) site.
|
||||
You can also find information on more specific topics in our [cookbook](https://www.nushell.sh/cookbook/).
|
||||
|
||||
Try it in Gitpod.
|
||||
|
||||
@ -44,7 +44,7 @@ Try it in Gitpod.
|
||||
|
||||
### Local
|
||||
|
||||
Up-to-date installation instructions can be found in the [installation chapter of the book](https://www.nushell.sh/book/en/installation.html). **Windows users**: please note that Nu works on Windows 10 and does not currently have Windows 7/8.1 support.
|
||||
Up-to-date installation instructions can be found in the [installation chapter of the book](https://www.nushell.sh/book/installation.html). **Windows users**: please note that Nu works on Windows 10 and does not currently have Windows 7/8.1 support.
|
||||
|
||||
To build Nu, you will need to use the **latest stable (1.47 or later)** version of the compiler.
|
||||
|
||||
@ -307,7 +307,7 @@ Nu is in heavy development, and will naturally change as it matures and people u
|
||||
|
||||
## Current Roadmap
|
||||
|
||||
We've added a `Roadmap Board` to help collaboratively capture the direction we're going for the current release as well as capture some important issues we'd like to see in NuShell. You can find the Roadmap [here](https://github.com/nushell/nushell/projects/2).
|
||||
We've added a `Roadmap Board` to help collaboratively capture the direction we're going for the current release as well as capture some important issues we'd like to see in Nushell. You can find the Roadmap [here](https://github.com/nushell/nushell/projects/2).
|
||||
|
||||
## Contributing
|
||||
|
||||
|
@ -4,21 +4,22 @@ description = "CLI for nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-cli"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-data = {version = "0.22.0", path = "../nu-data"}
|
||||
nu-errors = {version = "0.22.0", path = "../nu-errors"}
|
||||
nu-parser = {version = "0.22.0", path = "../nu-parser"}
|
||||
nu-plugin = {version = "0.22.0", path = "../nu-plugin"}
|
||||
nu-protocol = {version = "0.22.0", path = "../nu-protocol"}
|
||||
nu-source = {version = "0.22.0", path = "../nu-source"}
|
||||
nu-table = {version = "0.22.0", path = "../nu-table"}
|
||||
nu-test-support = {version = "0.22.0", path = "../nu-test-support"}
|
||||
nu-value-ext = {version = "0.22.0", path = "../nu-value-ext"}
|
||||
nu-data = {version = "0.23.0", path = "../nu-data"}
|
||||
nu-errors = {version = "0.23.0", path = "../nu-errors"}
|
||||
nu-json = {version = "0.23.0", path = "../nu-json"}
|
||||
nu-parser = {version = "0.23.0", path = "../nu-parser"}
|
||||
nu-plugin = {version = "0.23.0", path = "../nu-plugin"}
|
||||
nu-protocol = {version = "0.23.0", path = "../nu-protocol"}
|
||||
nu-source = {version = "0.23.0", path = "../nu-source"}
|
||||
nu-table = {version = "0.23.0", path = "../nu-table"}
|
||||
nu-test-support = {version = "0.23.0", path = "../nu-test-support"}
|
||||
nu-value-ext = {version = "0.23.0", path = "../nu-value-ext"}
|
||||
|
||||
ansi_term = "0.12.1"
|
||||
async-recursion = "0.3.1"
|
||||
@ -70,7 +71,6 @@ roxmltree = "0.13.0"
|
||||
rust-embed = "5.6.0"
|
||||
rustyline = {version = "6.3.0", optional = true}
|
||||
serde = {version = "1.0.115", features = ["derive"]}
|
||||
serde-hjson = "0.9.1"
|
||||
serde_bytes = "0.11.5"
|
||||
serde_ini = "0.2.0"
|
||||
serde_json = "1.0.57"
|
||||
|
@ -263,6 +263,7 @@ pub fn create_default_context(interactive: bool) -> Result<EvaluationContext, Bo
|
||||
#[cfg(feature = "uuid_crate")]
|
||||
whole_stream_command(RandomUUID),
|
||||
whole_stream_command(RandomInteger),
|
||||
whole_stream_command(RandomDecimal),
|
||||
// Path
|
||||
whole_stream_command(PathBasename),
|
||||
whole_stream_command(PathCommand),
|
||||
@ -279,6 +280,7 @@ pub fn create_default_context(interactive: bool) -> Result<EvaluationContext, Bo
|
||||
whole_stream_command(UrlHost),
|
||||
whole_stream_command(UrlQuery),
|
||||
whole_stream_command(Seq),
|
||||
whole_stream_command(SeqDates),
|
||||
]);
|
||||
|
||||
#[cfg(feature = "clipboard-cli")]
|
||||
@ -323,6 +325,7 @@ pub async fn run_vec_of_pipelines(
|
||||
#[cfg(feature = "rustyline-support")]
|
||||
fn convert_rustyline_result_to_string(input: Result<String, ReadlineError>) -> LineResult {
|
||||
match input {
|
||||
Ok(s) if s == "history -c" || s == "history --clear" => LineResult::ClearHistory,
|
||||
Ok(s) => LineResult::Success(s),
|
||||
Err(ReadlineError::Interrupted) => LineResult::CtrlC,
|
||||
Err(ReadlineError::Eof) => LineResult::CtrlD,
|
||||
@ -392,52 +395,56 @@ pub async fn cli(mut context: EvaluationContext) -> Result<(), Box<dyn Error>> {
|
||||
if let Some(prompt) = configuration.var("prompt") {
|
||||
let prompt_line = prompt.as_string()?;
|
||||
|
||||
match nu_parser::lite_parse(&prompt_line, 0).map_err(ShellError::from) {
|
||||
Ok(result) => {
|
||||
let prompt_block = nu_parser::classify_block(&result, context.registry());
|
||||
let (result, err) = nu_parser::lite_parse(&prompt_line, 0);
|
||||
|
||||
let env = context.get_env();
|
||||
if err.is_some() {
|
||||
use crate::git::current_branch;
|
||||
format!(
|
||||
"\x1b[32m{}{}\x1b[m> ",
|
||||
cwd,
|
||||
match current_branch() {
|
||||
Some(s) => format!("({})", s),
|
||||
None => "".to_string(),
|
||||
}
|
||||
)
|
||||
} else {
|
||||
let prompt_block = nu_parser::classify_block(&result, context.registry());
|
||||
|
||||
match run_block(
|
||||
&prompt_block.block,
|
||||
&mut context,
|
||||
InputStream::empty(),
|
||||
Scope::from_env(env),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(result) => match result.collect_string(Tag::unknown()).await {
|
||||
Ok(string_result) => {
|
||||
let errors = context.get_errors();
|
||||
context.maybe_print_errors(Text::from(prompt_line));
|
||||
context.clear_errors();
|
||||
let env = context.get_env();
|
||||
|
||||
if !errors.is_empty() {
|
||||
"> ".to_string()
|
||||
} else {
|
||||
string_result.item
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
crate::cli::print_err(e, &Text::from(prompt_line));
|
||||
context.clear_errors();
|
||||
match run_block(
|
||||
&prompt_block.block,
|
||||
&mut context,
|
||||
InputStream::empty(),
|
||||
Scope::from_env(env),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(result) => match result.collect_string(Tag::unknown()).await {
|
||||
Ok(string_result) => {
|
||||
let errors = context.get_errors();
|
||||
context.maybe_print_errors(Text::from(prompt_line));
|
||||
context.clear_errors();
|
||||
|
||||
if !errors.is_empty() {
|
||||
"> ".to_string()
|
||||
} else {
|
||||
string_result.item
|
||||
}
|
||||
},
|
||||
}
|
||||
Err(e) => {
|
||||
crate::cli::print_err(e, &Text::from(prompt_line));
|
||||
context.clear_errors();
|
||||
|
||||
"> ".to_string()
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
crate::cli::print_err(e, &Text::from(prompt_line));
|
||||
context.clear_errors();
|
||||
},
|
||||
Err(e) => {
|
||||
crate::cli::print_err(e, &Text::from(prompt_line));
|
||||
context.clear_errors();
|
||||
|
||||
"> ".to_string()
|
||||
"> ".to_string()
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@ -499,6 +506,11 @@ pub async fn cli(mut context: EvaluationContext) -> Result<(), Box<dyn Error>> {
|
||||
context.maybe_print_errors(Text::from(line));
|
||||
}
|
||||
|
||||
LineResult::ClearHistory => {
|
||||
rl.clear_history();
|
||||
let _ = rl.save_history(&history_path);
|
||||
}
|
||||
|
||||
LineResult::Error(line, err) => {
|
||||
rl.add_history_entry(&line);
|
||||
let _ = rl.save_history(&history_path);
|
||||
@ -834,8 +846,8 @@ fn rustyline_hinter(config: &dyn nu_data::config::Conf) -> Option<rustyline::hin
|
||||
}
|
||||
|
||||
fn chomp_newline(s: &str) -> &str {
|
||||
if s.ends_with('\n') {
|
||||
&s[..s.len() - 1]
|
||||
if let Some(s) = s.strip_suffix('\n') {
|
||||
s
|
||||
} else {
|
||||
s
|
||||
}
|
||||
@ -848,16 +860,20 @@ pub enum LineResult {
|
||||
Break,
|
||||
CtrlC,
|
||||
CtrlD,
|
||||
ClearHistory,
|
||||
}
|
||||
|
||||
pub async fn parse_and_eval(line: &str, ctx: &mut EvaluationContext) -> Result<String, ShellError> {
|
||||
let line = if line.ends_with('\n') {
|
||||
&line[..line.len() - 1]
|
||||
let line = if let Some(s) = line.strip_suffix('\n') {
|
||||
s
|
||||
} else {
|
||||
line
|
||||
};
|
||||
|
||||
let lite_result = nu_parser::lite_parse(&line, 0)?;
|
||||
let (lite_result, err) = nu_parser::lite_parse(&line, 0);
|
||||
if let Some(err) = err {
|
||||
return Err(err.into());
|
||||
}
|
||||
|
||||
// TODO ensure the command whose examples we're testing is actually in the pipeline
|
||||
let classified_block = nu_parser::classify_block(&lite_result, ctx.registry());
|
||||
@ -890,13 +906,11 @@ pub async fn process_line(
|
||||
let line = chomp_newline(line);
|
||||
ctx.raw_input = line.to_string();
|
||||
|
||||
let result = match nu_parser::lite_parse(&line, 0) {
|
||||
Err(err) => {
|
||||
return LineResult::Error(line.to_string(), err.into());
|
||||
}
|
||||
let (result, err) = nu_parser::lite_parse(&line, 0);
|
||||
|
||||
Ok(val) => val,
|
||||
};
|
||||
if let Some(err) = err {
|
||||
return LineResult::Error(line.to_string(), err.into());
|
||||
}
|
||||
|
||||
debug!("=== Parsed ===");
|
||||
debug!("{:#?}", result);
|
||||
@ -1093,7 +1107,8 @@ pub fn print_err(err: ShellError, source: &Text) {
|
||||
mod tests {
|
||||
#[quickcheck]
|
||||
fn quickcheck_parse(data: String) -> bool {
|
||||
if let Ok(lite_block) = nu_parser::lite_parse(&data, 0) {
|
||||
let (lite_block, err) = nu_parser::lite_parse(&data, 0);
|
||||
if err.is_none() {
|
||||
let context = crate::evaluation_context::EvaluationContext::basic().unwrap();
|
||||
let _ = nu_parser::classify_block(&lite_block, context.registry());
|
||||
}
|
||||
|
@ -99,6 +99,7 @@ pub(crate) mod run_external;
|
||||
pub(crate) mod save;
|
||||
pub(crate) mod select;
|
||||
pub(crate) mod seq;
|
||||
pub(crate) mod seq_dates;
|
||||
pub(crate) mod shells;
|
||||
pub(crate) mod shuffle;
|
||||
pub(crate) mod size;
|
||||
@ -229,7 +230,7 @@ pub(crate) use prev::Previous;
|
||||
pub(crate) use pwd::Pwd;
|
||||
#[cfg(feature = "uuid_crate")]
|
||||
pub(crate) use random::RandomUUID;
|
||||
pub(crate) use random::{Random, RandomBool, RandomDice, RandomInteger};
|
||||
pub(crate) use random::{Random, RandomBool, RandomDecimal, RandomDice, RandomInteger};
|
||||
pub(crate) use range::Range;
|
||||
pub(crate) use reduce::Reduce;
|
||||
pub(crate) use reject::Reject;
|
||||
@ -240,6 +241,7 @@ pub(crate) use run_external::RunExternalCommand;
|
||||
pub(crate) use save::Save;
|
||||
pub(crate) use select::Select;
|
||||
pub(crate) use seq::Seq;
|
||||
pub(crate) use seq_dates::SeqDates;
|
||||
pub(crate) use shells::Shells;
|
||||
pub(crate) use shuffle::Shuffle;
|
||||
pub(crate) use size::Size;
|
||||
|
@ -63,7 +63,7 @@ pub async fn clip(
|
||||
let mut first = true;
|
||||
for i in values.iter() {
|
||||
if !first {
|
||||
new_copy_data.push_str("\n");
|
||||
new_copy_data.push('\n');
|
||||
} else {
|
||||
first = false;
|
||||
}
|
||||
|
@ -37,26 +37,26 @@ impl WholeStreamCommand for FromJSON {
|
||||
}
|
||||
}
|
||||
|
||||
fn convert_json_value_to_nu_value(v: &serde_hjson::Value, tag: impl Into<Tag>) -> Value {
|
||||
fn convert_json_value_to_nu_value(v: &nu_json::Value, tag: impl Into<Tag>) -> Value {
|
||||
let tag = tag.into();
|
||||
let span = tag.span;
|
||||
|
||||
match v {
|
||||
serde_hjson::Value::Null => UntaggedValue::Primitive(Primitive::Nothing).into_value(&tag),
|
||||
serde_hjson::Value::Bool(b) => UntaggedValue::boolean(*b).into_value(&tag),
|
||||
serde_hjson::Value::F64(n) => UntaggedValue::decimal_from_float(*n, span).into_value(&tag),
|
||||
serde_hjson::Value::U64(n) => UntaggedValue::int(*n).into_value(&tag),
|
||||
serde_hjson::Value::I64(n) => UntaggedValue::int(*n).into_value(&tag),
|
||||
serde_hjson::Value::String(s) => {
|
||||
nu_json::Value::Null => UntaggedValue::Primitive(Primitive::Nothing).into_value(&tag),
|
||||
nu_json::Value::Bool(b) => UntaggedValue::boolean(*b).into_value(&tag),
|
||||
nu_json::Value::F64(n) => UntaggedValue::decimal_from_float(*n, span).into_value(&tag),
|
||||
nu_json::Value::U64(n) => UntaggedValue::int(*n).into_value(&tag),
|
||||
nu_json::Value::I64(n) => UntaggedValue::int(*n).into_value(&tag),
|
||||
nu_json::Value::String(s) => {
|
||||
UntaggedValue::Primitive(Primitive::String(String::from(s))).into_value(&tag)
|
||||
}
|
||||
serde_hjson::Value::Array(a) => UntaggedValue::Table(
|
||||
nu_json::Value::Array(a) => UntaggedValue::Table(
|
||||
a.iter()
|
||||
.map(|x| convert_json_value_to_nu_value(x, &tag))
|
||||
.collect(),
|
||||
)
|
||||
.into_value(tag),
|
||||
serde_hjson::Value::Object(o) => {
|
||||
nu_json::Value::Object(o) => {
|
||||
let mut collected = TaggedDictBuilder::new(&tag);
|
||||
for (k, v) in o.iter() {
|
||||
collected.insert_value(k.clone(), convert_json_value_to_nu_value(v, &tag));
|
||||
@ -67,8 +67,8 @@ fn convert_json_value_to_nu_value(v: &serde_hjson::Value, tag: impl Into<Tag>) -
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_json_string_to_value(s: String, tag: impl Into<Tag>) -> serde_hjson::Result<Value> {
|
||||
let v: serde_hjson::Value = serde_hjson::from_str(&s)?;
|
||||
pub fn from_json_string_to_value(s: String, tag: impl Into<Tag>) -> nu_json::Result<Value> {
|
||||
let v: nu_json::Value = nu_json::from_str(&s)?;
|
||||
Ok(convert_json_value_to_nu_value(&v, tag))
|
||||
}
|
||||
|
||||
@ -96,7 +96,7 @@ async fn from_json(
|
||||
Err(e) => {
|
||||
let mut message = "Could not parse as JSON (".to_string();
|
||||
message.push_str(&e.to_string());
|
||||
message.push_str(")");
|
||||
message.push(')');
|
||||
|
||||
Some(Err(ShellError::labeled_error_with_secondary(
|
||||
message,
|
||||
@ -125,7 +125,7 @@ async fn from_json(
|
||||
Err(e) => {
|
||||
let mut message = "Could not parse as JSON (".to_string();
|
||||
message.push_str(&e.to_string());
|
||||
message.push_str(")");
|
||||
message.push(')');
|
||||
|
||||
Ok(OutputStream::one(Err(
|
||||
ShellError::labeled_error_with_secondary(
|
||||
|
@ -68,13 +68,12 @@ fn convert_yaml_value_to_nu_value(
|
||||
Ok(match v {
|
||||
serde_yaml::Value::Bool(b) => UntaggedValue::boolean(*b).into_value(tag),
|
||||
serde_yaml::Value::Number(n) if n.is_i64() => {
|
||||
UntaggedValue::int(n.as_i64().ok_or_else(|| err_not_compatible_number)?).into_value(tag)
|
||||
UntaggedValue::int(n.as_i64().ok_or(err_not_compatible_number)?).into_value(tag)
|
||||
}
|
||||
serde_yaml::Value::Number(n) if n.is_f64() => {
|
||||
UntaggedValue::decimal_from_float(n.as_f64().ok_or(err_not_compatible_number)?, span)
|
||||
.into_value(tag)
|
||||
}
|
||||
serde_yaml::Value::Number(n) if n.is_f64() => UntaggedValue::decimal_from_float(
|
||||
n.as_f64().ok_or_else(|| err_not_compatible_number)?,
|
||||
span,
|
||||
)
|
||||
.into_value(tag),
|
||||
serde_yaml::Value::String(s) => UntaggedValue::string(s).into_value(tag),
|
||||
serde_yaml::Value::Sequence(a) => {
|
||||
let result: Result<Vec<Value>, ShellError> = a
|
||||
|
@ -27,6 +27,11 @@ pub fn history_path(config: &dyn Conf) -> PathBuf {
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Arguments {
|
||||
clear: Option<bool>,
|
||||
}
|
||||
|
||||
pub struct History;
|
||||
|
||||
#[async_trait]
|
||||
@ -36,7 +41,7 @@ impl WholeStreamCommand for History {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("history")
|
||||
Signature::build("history").switch("clear", "Clears out the history entries", Some('c'))
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
@ -48,31 +53,45 @@ impl WholeStreamCommand for History {
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
history(args, registry)
|
||||
history(args, registry).await
|
||||
}
|
||||
}
|
||||
|
||||
fn history(args: CommandArgs, _registry: &CommandRegistry) -> Result<OutputStream, ShellError> {
|
||||
async fn history(
|
||||
args: CommandArgs,
|
||||
_registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let config: Box<dyn Conf> = Box::new(NuConfig::new());
|
||||
let tag = args.call_info.name_tag;
|
||||
let path = history_path(&config);
|
||||
let file = File::open(path);
|
||||
if let Ok(file) = file {
|
||||
let reader = BufReader::new(file);
|
||||
let output = reader.lines().filter_map(move |line| match line {
|
||||
Ok(line) => Some(ReturnSuccess::value(
|
||||
UntaggedValue::string(line).into_value(tag.clone()),
|
||||
)),
|
||||
Err(_) => None,
|
||||
});
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
let (Arguments { clear }, _) = args.process(&_registry).await?;
|
||||
|
||||
Ok(futures::stream::iter(output).to_output_stream())
|
||||
} else {
|
||||
Err(ShellError::labeled_error(
|
||||
"Could not open history",
|
||||
"history file could not be opened",
|
||||
tag,
|
||||
))
|
||||
let path = history_path(&config);
|
||||
|
||||
match clear {
|
||||
Some(_) => {
|
||||
// This is a NOOP, the logic to clear is handled in cli.rs
|
||||
Ok(OutputStream::empty())
|
||||
}
|
||||
None => {
|
||||
if let Ok(file) = File::open(path) {
|
||||
let reader = BufReader::new(file);
|
||||
// Skips the first line, which is a Rustyline internal
|
||||
let output = reader.lines().skip(1).filter_map(move |line| match line {
|
||||
Ok(line) => Some(ReturnSuccess::value(
|
||||
UntaggedValue::string(line).into_value(tag.clone()),
|
||||
)),
|
||||
Err(_) => None,
|
||||
});
|
||||
|
||||
Ok(futures::stream::iter(output).to_output_stream())
|
||||
} else {
|
||||
Err(ShellError::labeled_error(
|
||||
"Could not open history",
|
||||
"history file could not be opened",
|
||||
tag,
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2,11 +2,18 @@ use super::{operate, DefaultArguments};
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{Signature, SyntaxShape, UntaggedValue, Value};
|
||||
use nu_protocol::{ColumnPath, Signature, SyntaxShape, UntaggedValue, Value};
|
||||
use nu_source::Tagged;
|
||||
use std::path::Path;
|
||||
|
||||
pub struct PathBasename;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct PathBasenameArguments {
|
||||
replace: Option<Tagged<String>>,
|
||||
rest: Vec<ColumnPath>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl WholeStreamCommand for PathBasename {
|
||||
fn name(&self) -> &str {
|
||||
@ -15,11 +22,17 @@ impl WholeStreamCommand for PathBasename {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("path basename")
|
||||
.rest(SyntaxShape::ColumnPath, "optionally operate by path")
|
||||
.named(
|
||||
"replace",
|
||||
SyntaxShape::String,
|
||||
"Return original path with basename replaced by this string",
|
||||
Some('r'),
|
||||
)
|
||||
.rest(SyntaxShape::ColumnPath, "Optionally operate by column path")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"gets the filename of a path"
|
||||
"Gets the final component of a path"
|
||||
}
|
||||
|
||||
async fn run(
|
||||
@ -28,24 +41,60 @@ impl WholeStreamCommand for PathBasename {
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
let (DefaultArguments { rest }, input) = args.process(®istry).await?;
|
||||
operate(input, rest, &action, tag.span).await
|
||||
let (PathBasenameArguments { replace, rest }, input) = args.process(®istry).await?;
|
||||
let args = Arc::new(DefaultArguments {
|
||||
replace: replace.map(|v| v.item),
|
||||
prefix: None,
|
||||
suffix: None,
|
||||
num_levels: None,
|
||||
paths: rest,
|
||||
});
|
||||
operate(input, &action, tag.span, args).await
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Get basename of a path",
|
||||
example: "echo '/home/joe/test.txt' | path basename",
|
||||
result: Some(vec![Value::from("test.txt")]),
|
||||
}]
|
||||
vec![
|
||||
Example {
|
||||
description: "Get basename of a path",
|
||||
example: "echo 'C:\\Users\\joe\\test.txt' | path basename",
|
||||
result: Some(vec![Value::from("test.txt")]),
|
||||
},
|
||||
Example {
|
||||
description: "Replace basename of a path",
|
||||
example: "echo 'C:\\Users\\joe\\test.txt' | path basename -r 'spam.png'",
|
||||
result: Some(vec![Value::from(UntaggedValue::path(
|
||||
"C:\\Users\\joe\\spam.png",
|
||||
))]),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "Get basename of a path",
|
||||
example: "echo '/home/joe/test.txt' | path basename",
|
||||
result: Some(vec![Value::from("test.txt")]),
|
||||
},
|
||||
Example {
|
||||
description: "Replace basename of a path",
|
||||
example: "echo '/home/joe/test.txt' | path basename -r 'spam.png'",
|
||||
result: Some(vec![Value::from(UntaggedValue::path("/home/joe/spam.png"))]),
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
fn action(path: &Path) -> UntaggedValue {
|
||||
UntaggedValue::string(match path.file_name() {
|
||||
Some(filename) => filename.to_string_lossy().to_string(),
|
||||
_ => "".to_string(),
|
||||
})
|
||||
fn action(path: &Path, args: Arc<DefaultArguments>) -> UntaggedValue {
|
||||
match args.replace {
|
||||
Some(ref basename) => UntaggedValue::path(path.with_file_name(basename)),
|
||||
None => UntaggedValue::string(match path.file_name() {
|
||||
Some(filename) => filename.to_string_lossy(),
|
||||
None => "".into(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -16,7 +16,7 @@ impl WholeStreamCommand for Path {
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Apply path function"
|
||||
"Explore and manipulate paths"
|
||||
}
|
||||
|
||||
async fn run(
|
||||
|
@ -2,11 +2,20 @@ use super::{operate, DefaultArguments};
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{Signature, SyntaxShape, UntaggedValue, Value};
|
||||
use nu_protocol::{ColumnPath, Signature, SyntaxShape, UntaggedValue, Value};
|
||||
use nu_source::Tagged;
|
||||
use std::path::Path;
|
||||
|
||||
pub struct PathDirname;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct PathDirnameArguments {
|
||||
replace: Option<Tagged<String>>,
|
||||
#[serde(rename = "num-levels")]
|
||||
num_levels: Option<Tagged<u32>>,
|
||||
rest: Vec<ColumnPath>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl WholeStreamCommand for PathDirname {
|
||||
fn name(&self) -> &str {
|
||||
@ -14,11 +23,24 @@ impl WholeStreamCommand for PathDirname {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("path dirname").rest(SyntaxShape::ColumnPath, "optionally operate by path")
|
||||
Signature::build("path dirname")
|
||||
.named(
|
||||
"replace",
|
||||
SyntaxShape::String,
|
||||
"Return original path with dirname replaced by this string",
|
||||
Some('r'),
|
||||
)
|
||||
.named(
|
||||
"num-levels",
|
||||
SyntaxShape::Int,
|
||||
"Number of directories to walk up",
|
||||
Some('n'),
|
||||
)
|
||||
.rest(SyntaxShape::ColumnPath, "Optionally operate by column path")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"gets the dirname of a path"
|
||||
"Gets the parent directory of a path"
|
||||
}
|
||||
|
||||
async fn run(
|
||||
@ -27,24 +49,100 @@ impl WholeStreamCommand for PathDirname {
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
let (DefaultArguments { rest }, input) = args.process(®istry).await?;
|
||||
operate(input, rest, &action, tag.span).await
|
||||
let (
|
||||
PathDirnameArguments {
|
||||
replace,
|
||||
num_levels,
|
||||
rest,
|
||||
},
|
||||
input,
|
||||
) = args.process(®istry).await?;
|
||||
let args = Arc::new(DefaultArguments {
|
||||
replace: replace.map(|v| v.item),
|
||||
prefix: None,
|
||||
suffix: None,
|
||||
num_levels: num_levels.map(|v| v.item),
|
||||
paths: rest,
|
||||
});
|
||||
operate(input, &action, tag.span, args).await
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Get dirname of a path",
|
||||
example: "echo '/home/joe/test.txt' | path dirname",
|
||||
result: Some(vec![Value::from("/home/joe")]),
|
||||
}]
|
||||
vec![
|
||||
Example {
|
||||
description: "Get dirname of a path",
|
||||
example: "echo 'C:\\Users\\joe\\code\\test.txt' | path dirname",
|
||||
result: Some(vec![Value::from(UntaggedValue::path(
|
||||
"C:\\Users\\joe\\code",
|
||||
))]),
|
||||
},
|
||||
Example {
|
||||
description: "Set how many levels up to skip",
|
||||
example: "echo 'C:\\Users\\joe\\code\\test.txt' | path dirname -n 2",
|
||||
result: Some(vec![Value::from(UntaggedValue::path("C:\\Users\\joe"))]),
|
||||
},
|
||||
Example {
|
||||
description: "Replace the part that would be returned with custom string",
|
||||
example:
|
||||
"echo 'C:\\Users\\joe\\code\\test.txt' | path dirname -n 2 -r C:\\Users\\viking",
|
||||
result: Some(vec![Value::from(UntaggedValue::path(
|
||||
"C:\\Users\\viking\\code\\test.txt",
|
||||
))]),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "Get dirname of a path",
|
||||
example: "echo '/home/joe/code/test.txt' | path dirname",
|
||||
result: Some(vec![Value::from(UntaggedValue::path("/home/joe/code"))]),
|
||||
},
|
||||
Example {
|
||||
description: "Set how many levels up to skip",
|
||||
example: "echo '/home/joe/code/test.txt' | path dirname -n 2",
|
||||
result: Some(vec![Value::from(UntaggedValue::path("/home/joe"))]),
|
||||
},
|
||||
Example {
|
||||
description: "Replace the part that would be returned with custom string",
|
||||
example: "echo '/home/joe/code/test.txt' | path dirname -n 2 -r /home/viking",
|
||||
result: Some(vec![Value::from(UntaggedValue::path(
|
||||
"/home/viking/code/test.txt",
|
||||
))]),
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
fn action(path: &Path) -> UntaggedValue {
|
||||
UntaggedValue::string(match path.parent() {
|
||||
Some(dirname) => dirname.to_string_lossy().to_string(),
|
||||
_ => "".to_string(),
|
||||
})
|
||||
fn action(path: &Path, args: Arc<DefaultArguments>) -> UntaggedValue {
|
||||
let num_levels = args.num_levels.unwrap_or(1);
|
||||
|
||||
let mut dirname = path;
|
||||
let mut reached_top = false; // end early if somebody passes -n 99999999
|
||||
for _ in 0..num_levels {
|
||||
dirname = dirname.parent().unwrap_or_else(|| {
|
||||
reached_top = true;
|
||||
dirname
|
||||
});
|
||||
if reached_top {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
match args.replace {
|
||||
Some(ref newdir) => {
|
||||
let remainder = path.strip_prefix(dirname).unwrap_or(dirname);
|
||||
if !remainder.as_os_str().is_empty() {
|
||||
UntaggedValue::path(Path::new(newdir).join(remainder))
|
||||
} else {
|
||||
UntaggedValue::path(Path::new(newdir))
|
||||
}
|
||||
}
|
||||
None => UntaggedValue::path(dirname),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -2,11 +2,16 @@ use super::{operate, DefaultArguments};
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{Signature, SyntaxShape, UntaggedValue, Value};
|
||||
use nu_protocol::{ColumnPath, Signature, SyntaxShape, UntaggedValue, Value};
|
||||
use std::path::Path;
|
||||
|
||||
pub struct PathExists;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct PathExistsArguments {
|
||||
rest: Vec<ColumnPath>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl WholeStreamCommand for PathExists {
|
||||
fn name(&self) -> &str {
|
||||
@ -14,11 +19,12 @@ impl WholeStreamCommand for PathExists {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("path exists").rest(SyntaxShape::ColumnPath, "optionally operate by path")
|
||||
Signature::build("path exists")
|
||||
.rest(SyntaxShape::ColumnPath, "Optionally operate by column path")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"checks whether the path exists"
|
||||
"Checks whether a path exists"
|
||||
}
|
||||
|
||||
async fn run(
|
||||
@ -27,10 +33,27 @@ impl WholeStreamCommand for PathExists {
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
let (DefaultArguments { rest }, input) = args.process(®istry).await?;
|
||||
operate(input, rest, &action, tag.span).await
|
||||
let (PathExistsArguments { rest }, input) = args.process(®istry).await?;
|
||||
let args = Arc::new(DefaultArguments {
|
||||
replace: None,
|
||||
prefix: None,
|
||||
suffix: None,
|
||||
num_levels: None,
|
||||
paths: rest,
|
||||
});
|
||||
operate(input, &action, tag.span, args).await
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Check if file exists",
|
||||
example: "echo 'C:\\Users\\joe\\todo.txt' | path exists",
|
||||
result: Some(vec![Value::from(UntaggedValue::boolean(false))]),
|
||||
}]
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Check if file exists",
|
||||
@ -40,7 +63,7 @@ impl WholeStreamCommand for PathExists {
|
||||
}
|
||||
}
|
||||
|
||||
fn action(path: &Path) -> UntaggedValue {
|
||||
fn action(path: &Path, _args: Arc<DefaultArguments>) -> UntaggedValue {
|
||||
UntaggedValue::boolean(path.exists())
|
||||
}
|
||||
|
||||
|
@ -2,11 +2,16 @@ use super::{operate, DefaultArguments};
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{Signature, SyntaxShape, UntaggedValue};
|
||||
use std::path::Path;
|
||||
use nu_protocol::{ColumnPath, Signature, SyntaxShape, UntaggedValue};
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
pub struct PathExpand;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct PathExpandArguments {
|
||||
rest: Vec<ColumnPath>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl WholeStreamCommand for PathExpand {
|
||||
fn name(&self) -> &str {
|
||||
@ -14,11 +19,12 @@ impl WholeStreamCommand for PathExpand {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("path expand").rest(SyntaxShape::ColumnPath, "optionally operate by path")
|
||||
Signature::build("path expand")
|
||||
.rest(SyntaxShape::ColumnPath, "Optionally operate by column path")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"expands the path to its absolute form"
|
||||
"Expands a path to its absolute form"
|
||||
}
|
||||
|
||||
async fn run(
|
||||
@ -27,28 +33,43 @@ impl WholeStreamCommand for PathExpand {
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
let (DefaultArguments { rest }, input) = args.process(®istry).await?;
|
||||
operate(input, rest, &action, tag.span).await
|
||||
let (PathExpandArguments { rest }, input) = args.process(®istry).await?;
|
||||
let args = Arc::new(DefaultArguments {
|
||||
replace: None,
|
||||
prefix: None,
|
||||
suffix: None,
|
||||
num_levels: None,
|
||||
paths: rest,
|
||||
});
|
||||
operate(input, &action, tag.span, args).await
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Expand relative directories",
|
||||
example: "echo 'C:\\Users\\joe\\foo\\..\\bar' | path expand",
|
||||
result: None,
|
||||
// fails to canonicalize into Some(vec![Value::from("C:\\Users\\joe\\bar")]) due to non-existing path
|
||||
}]
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Expand relative directories",
|
||||
example: "echo '/home/joe/foo/../bar' | path expand",
|
||||
result: None,
|
||||
//Some(vec![Value::from("/home/joe/bar")]),
|
||||
// fails to canonicalize into Some(vec![Value::from("/home/joe/bar")]) due to non-existing path
|
||||
}]
|
||||
}
|
||||
}
|
||||
|
||||
fn action(path: &Path) -> UntaggedValue {
|
||||
fn action(path: &Path, _args: Arc<DefaultArguments>) -> UntaggedValue {
|
||||
let ps = path.to_string_lossy();
|
||||
let expanded = shellexpand::tilde(&ps);
|
||||
let path: &Path = expanded.as_ref().as_ref();
|
||||
UntaggedValue::string(match path.canonicalize() {
|
||||
Ok(p) => p.to_string_lossy().to_string(),
|
||||
Err(_) => ps.to_string(),
|
||||
})
|
||||
UntaggedValue::path(dunce::canonicalize(path).unwrap_or_else(|_| PathBuf::from(path)))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -2,11 +2,18 @@ use super::{operate, DefaultArguments};
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{Signature, SyntaxShape, UntaggedValue, Value};
|
||||
use nu_protocol::{ColumnPath, Signature, SyntaxShape, UntaggedValue, Value};
|
||||
use nu_source::Tagged;
|
||||
use std::path::Path;
|
||||
|
||||
pub struct PathExtension;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct PathExtensionArguments {
|
||||
replace: Option<Tagged<String>>,
|
||||
rest: Vec<ColumnPath>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl WholeStreamCommand for PathExtension {
|
||||
fn name(&self) -> &str {
|
||||
@ -15,11 +22,17 @@ impl WholeStreamCommand for PathExtension {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("path extension")
|
||||
.rest(SyntaxShape::ColumnPath, "optionally operate by path")
|
||||
.named(
|
||||
"replace",
|
||||
SyntaxShape::String,
|
||||
"Return original path with extension replaced by this string",
|
||||
Some('r'),
|
||||
)
|
||||
.rest(SyntaxShape::ColumnPath, "Optionally operate by column path")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"gets the extension of a path"
|
||||
"Gets the extension of a path"
|
||||
}
|
||||
|
||||
async fn run(
|
||||
@ -28,8 +41,15 @@ impl WholeStreamCommand for PathExtension {
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
let (DefaultArguments { rest }, input) = args.process(®istry).await?;
|
||||
operate(input, rest, &action, tag.span).await
|
||||
let (PathExtensionArguments { replace, rest }, input) = args.process(®istry).await?;
|
||||
let args = Arc::new(DefaultArguments {
|
||||
replace: replace.map(|v| v.item),
|
||||
prefix: None,
|
||||
suffix: None,
|
||||
num_levels: None,
|
||||
paths: rest,
|
||||
});
|
||||
operate(input, &action, tag.span, args).await
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
@ -44,15 +64,28 @@ impl WholeStreamCommand for PathExtension {
|
||||
example: "echo 'test' | path extension",
|
||||
result: Some(vec![Value::from("")]),
|
||||
},
|
||||
Example {
|
||||
description: "Replace an extension with a custom string",
|
||||
example: "echo 'test.txt' | path extension -r md",
|
||||
result: Some(vec![Value::from(UntaggedValue::path("test.md"))]),
|
||||
},
|
||||
Example {
|
||||
description: "To replace more complex extensions:",
|
||||
example: "echo 'test.tar.gz' | path extension -r '' | path extension -r txt",
|
||||
result: Some(vec![Value::from(UntaggedValue::path("test.txt"))]),
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
fn action(path: &Path) -> UntaggedValue {
|
||||
UntaggedValue::string(match path.extension() {
|
||||
Some(ext) => ext.to_string_lossy().to_string(),
|
||||
_ => "".to_string(),
|
||||
})
|
||||
fn action(path: &Path, args: Arc<DefaultArguments>) -> UntaggedValue {
|
||||
match args.replace {
|
||||
Some(ref extension) => UntaggedValue::path(path.with_extension(extension)),
|
||||
None => UntaggedValue::string(match path.extension() {
|
||||
Some(extension) => extension.to_string_lossy(),
|
||||
None => "".into(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -2,11 +2,20 @@ use super::{operate, DefaultArguments};
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{Signature, SyntaxShape, UntaggedValue, Value};
|
||||
use nu_protocol::{ColumnPath, Signature, SyntaxShape, UntaggedValue, Value};
|
||||
use nu_source::Tagged;
|
||||
use std::path::Path;
|
||||
|
||||
pub struct PathFilestem;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct PathFilestemArguments {
|
||||
prefix: Option<Tagged<String>>,
|
||||
suffix: Option<Tagged<String>>,
|
||||
replace: Option<Tagged<String>>,
|
||||
rest: Vec<ColumnPath>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl WholeStreamCommand for PathFilestem {
|
||||
fn name(&self) -> &str {
|
||||
@ -15,11 +24,29 @@ impl WholeStreamCommand for PathFilestem {
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("path filestem")
|
||||
.rest(SyntaxShape::ColumnPath, "optionally operate by path")
|
||||
.named(
|
||||
"replace",
|
||||
SyntaxShape::String,
|
||||
"Return original path with filestem replaced by this string",
|
||||
Some('r'),
|
||||
)
|
||||
.named(
|
||||
"prefix",
|
||||
SyntaxShape::String,
|
||||
"Strip this string from from the beginning of a file name",
|
||||
Some('p'),
|
||||
)
|
||||
.named(
|
||||
"suffix",
|
||||
SyntaxShape::String,
|
||||
"Strip this string from from the end of a file name",
|
||||
Some('s'),
|
||||
)
|
||||
.rest(SyntaxShape::ColumnPath, "Optionally operate by column path")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"gets the filestem of a path"
|
||||
"Gets the file stem of a path"
|
||||
}
|
||||
|
||||
async fn run(
|
||||
@ -28,24 +55,111 @@ impl WholeStreamCommand for PathFilestem {
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
let (DefaultArguments { rest }, input) = args.process(®istry).await?;
|
||||
operate(input, rest, &action, tag.span).await
|
||||
let (
|
||||
PathFilestemArguments {
|
||||
replace,
|
||||
prefix,
|
||||
suffix,
|
||||
rest,
|
||||
},
|
||||
input,
|
||||
) = args.process(®istry).await?;
|
||||
let args = Arc::new(DefaultArguments {
|
||||
replace: replace.map(|v| v.item),
|
||||
prefix: prefix.map(|v| v.item),
|
||||
suffix: suffix.map(|v| v.item),
|
||||
num_levels: None,
|
||||
paths: rest,
|
||||
});
|
||||
operate(input, &action, tag.span, args).await
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![Example {
|
||||
description: "Get filestem of a path",
|
||||
example: "echo '/home/joe/test.txt' | path filestem",
|
||||
result: Some(vec![Value::from("test")]),
|
||||
}]
|
||||
vec![
|
||||
Example {
|
||||
description: "Get filestem of a path",
|
||||
example: "echo 'C:\\Users\\joe\\bacon_lettuce.egg' | path filestem",
|
||||
result: Some(vec![Value::from("bacon_lettuce")]),
|
||||
},
|
||||
Example {
|
||||
description: "Get filestem of a path, stripped of prefix and suffix",
|
||||
example: "echo 'C:\\Users\\joe\\bacon_lettuce.egg.gz' | path filestem -p bacon_ -s .egg.gz",
|
||||
result: Some(vec![Value::from("lettuce")]),
|
||||
},
|
||||
Example {
|
||||
description: "Replace the filestem that would be returned",
|
||||
example: "echo 'C:\\Users\\joe\\bacon_lettuce.egg.gz' | path filestem -p bacon_ -s .egg.gz -r spam",
|
||||
result: Some(vec![Value::from(UntaggedValue::path("C:\\Users\\joe\\bacon_spam.egg.gz"))]),
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "Get filestem of a path",
|
||||
example: "echo '/home/joe/bacon_lettuce.egg' | path filestem",
|
||||
result: Some(vec![Value::from("bacon_lettuce")]),
|
||||
},
|
||||
Example {
|
||||
description: "Get filestem of a path, stripped of prefix and suffix",
|
||||
example: "echo '/home/joe/bacon_lettuce.egg.gz' | path filestem -p bacon_ -s .egg.gz",
|
||||
result: Some(vec![Value::from("lettuce")]),
|
||||
},
|
||||
Example {
|
||||
description: "Replace the filestem that would be returned",
|
||||
example: "echo '/home/joe/bacon_lettuce.egg.gz' | path filestem -p bacon_ -s .egg.gz -r spam",
|
||||
result: Some(vec![Value::from(UntaggedValue::path("/home/joe/bacon_spam.egg.gz"))]),
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
fn action(path: &Path) -> UntaggedValue {
|
||||
UntaggedValue::string(match path.file_stem() {
|
||||
Some(stem) => stem.to_string_lossy().to_string(),
|
||||
_ => "".to_string(),
|
||||
})
|
||||
fn action(path: &Path, args: Arc<DefaultArguments>) -> UntaggedValue {
|
||||
let basename = match path.file_name() {
|
||||
Some(name) => name.to_string_lossy().to_string(),
|
||||
None => "".to_string(),
|
||||
};
|
||||
|
||||
let suffix = match args.suffix {
|
||||
Some(ref suf) => match basename.rmatch_indices(suf).next() {
|
||||
Some((i, _)) => basename.split_at(i).1.to_string(),
|
||||
None => "".to_string(),
|
||||
},
|
||||
None => match path.extension() {
|
||||
// Prepend '.' since the extension returned comes without it
|
||||
Some(ext) => ".".to_string() + &ext.to_string_lossy().to_string(),
|
||||
None => "".to_string(),
|
||||
},
|
||||
};
|
||||
|
||||
let prefix = match args.prefix {
|
||||
Some(ref pre) => match basename.matches(pre).next() {
|
||||
Some(m) => basename.split_at(m.len()).0.to_string(),
|
||||
None => "".to_string(),
|
||||
},
|
||||
None => "".to_string(),
|
||||
};
|
||||
|
||||
let basename_without_prefix = match basename.matches(&prefix).next() {
|
||||
Some(m) => basename.split_at(m.len()).1.to_string(),
|
||||
None => basename,
|
||||
};
|
||||
|
||||
let stem = match basename_without_prefix.rmatch_indices(&suffix).next() {
|
||||
Some((i, _)) => basename_without_prefix.split_at(i).0.to_string(),
|
||||
None => basename_without_prefix,
|
||||
};
|
||||
|
||||
match args.replace {
|
||||
Some(ref replace) => {
|
||||
let new_name = prefix + replace + &suffix;
|
||||
UntaggedValue::path(path.with_file_name(&new_name))
|
||||
}
|
||||
None => UntaggedValue::string(stem),
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
@ -12,6 +12,7 @@ use nu_errors::ShellError;
|
||||
use nu_protocol::{ColumnPath, Primitive, ReturnSuccess, ShellTypeName, UntaggedValue, Value};
|
||||
use nu_source::Span;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use basename::PathBasename;
|
||||
pub use command::Path as PathCommand;
|
||||
@ -24,17 +25,32 @@ pub use r#type::PathType;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct DefaultArguments {
|
||||
rest: Vec<ColumnPath>,
|
||||
// used by basename, dirname, extension and filestem
|
||||
replace: Option<String>,
|
||||
// used by filestem
|
||||
prefix: Option<String>,
|
||||
suffix: Option<String>,
|
||||
// used by dirname
|
||||
num_levels: Option<u32>,
|
||||
// used by all
|
||||
paths: Vec<ColumnPath>,
|
||||
}
|
||||
|
||||
fn handle_value<F>(action: &F, v: &Value, span: Span) -> Result<Value, ShellError>
|
||||
fn handle_value<F>(
|
||||
action: &F,
|
||||
v: &Value,
|
||||
span: Span,
|
||||
args: Arc<DefaultArguments>,
|
||||
) -> Result<Value, ShellError>
|
||||
where
|
||||
F: Fn(&Path) -> UntaggedValue + Send + 'static,
|
||||
F: Fn(&Path, Arc<DefaultArguments>) -> UntaggedValue + Send + 'static,
|
||||
{
|
||||
let v = match &v.value {
|
||||
UntaggedValue::Primitive(Primitive::Path(buf)) => action(buf).into_value(v.tag()),
|
||||
UntaggedValue::Primitive(Primitive::Path(buf)) => action(buf, args).into_value(v.tag()),
|
||||
UntaggedValue::Primitive(Primitive::String(s))
|
||||
| UntaggedValue::Primitive(Primitive::Line(s)) => action(s.as_ref()).into_value(v.tag()),
|
||||
| UntaggedValue::Primitive(Primitive::Line(s)) => {
|
||||
action(s.as_ref(), args).into_value(v.tag())
|
||||
}
|
||||
other => {
|
||||
let got = format!("got {}", other.type_name());
|
||||
return Err(ShellError::labeled_error_with_secondary(
|
||||
@ -51,24 +67,25 @@ where
|
||||
|
||||
async fn operate<F>(
|
||||
input: crate::InputStream,
|
||||
paths: Vec<ColumnPath>,
|
||||
action: &'static F,
|
||||
span: Span,
|
||||
args: Arc<DefaultArguments>,
|
||||
) -> Result<OutputStream, ShellError>
|
||||
where
|
||||
F: Fn(&Path) -> UntaggedValue + Send + Sync + 'static,
|
||||
F: Fn(&Path, Arc<DefaultArguments>) -> UntaggedValue + Send + Sync + 'static,
|
||||
{
|
||||
Ok(input
|
||||
.map(move |v| {
|
||||
if paths.is_empty() {
|
||||
ReturnSuccess::value(handle_value(&action, &v, span)?)
|
||||
if args.paths.is_empty() {
|
||||
ReturnSuccess::value(handle_value(&action, &v, span, Arc::clone(&args))?)
|
||||
} else {
|
||||
let mut ret = v;
|
||||
|
||||
for path in &paths {
|
||||
for path in &args.paths {
|
||||
let cloned_args = Arc::clone(&args);
|
||||
ret = ret.swap_data_by_column_path(
|
||||
path,
|
||||
Box::new(move |old| handle_value(&action, &old, span)),
|
||||
Box::new(move |old| handle_value(&action, &old, span, cloned_args)),
|
||||
)?;
|
||||
}
|
||||
|
||||
|
@ -3,11 +3,16 @@ use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use crate::shell::filesystem_shell::get_file_type;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{Signature, SyntaxShape, UntaggedValue, Value};
|
||||
use nu_protocol::{ColumnPath, Signature, SyntaxShape, UntaggedValue, Value};
|
||||
use std::path::Path;
|
||||
|
||||
pub struct PathType;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct PathTypeArguments {
|
||||
rest: Vec<ColumnPath>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl WholeStreamCommand for PathType {
|
||||
fn name(&self) -> &str {
|
||||
@ -15,11 +20,12 @@ impl WholeStreamCommand for PathType {
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("path type").rest(SyntaxShape::ColumnPath, "optionally operate by path")
|
||||
Signature::build("path type")
|
||||
.rest(SyntaxShape::ColumnPath, "Optionally operate by column path")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"gives the type of the object the path refers to (eg file, dir, symlink)"
|
||||
"Gives the type of the object a path refers to (e.g., file, dir, symlink)"
|
||||
}
|
||||
|
||||
async fn run(
|
||||
@ -28,8 +34,15 @@ impl WholeStreamCommand for PathType {
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let tag = args.call_info.name_tag.clone();
|
||||
let (DefaultArguments { rest }, input) = args.process(®istry).await?;
|
||||
operate(input, rest, &action, tag.span).await
|
||||
let (PathTypeArguments { rest }, input) = args.process(®istry).await?;
|
||||
let args = Arc::new(DefaultArguments {
|
||||
replace: None,
|
||||
prefix: None,
|
||||
suffix: None,
|
||||
num_levels: None,
|
||||
paths: rest,
|
||||
});
|
||||
operate(input, &action, tag.span, args).await
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
@ -41,7 +54,7 @@ impl WholeStreamCommand for PathType {
|
||||
}
|
||||
}
|
||||
|
||||
fn action(path: &Path) -> UntaggedValue {
|
||||
fn action(path: &Path, _args: Arc<DefaultArguments>) -> UntaggedValue {
|
||||
let meta = std::fs::symlink_metadata(path);
|
||||
UntaggedValue::string(match &meta {
|
||||
Ok(md) => get_file_type(md),
|
||||
|
111
crates/nu-cli/src/commands/random/decimal.rs
Normal file
111
crates/nu-cli/src/commands/random/decimal.rs
Normal file
@ -0,0 +1,111 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::deserializer::NumericRange;
|
||||
use crate::prelude::*;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{ReturnSuccess, Signature, SyntaxShape, UntaggedValue};
|
||||
use nu_source::Tagged;
|
||||
use rand::prelude::{thread_rng, Rng};
|
||||
use std::cmp::Ordering;
|
||||
|
||||
pub struct SubCommand;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct DecimalArgs {
|
||||
range: Option<Tagged<NumericRange>>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl WholeStreamCommand for SubCommand {
|
||||
fn name(&self) -> &str {
|
||||
"random decimal"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("random decimal").optional("range", SyntaxShape::Range, "Range of values")
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Generate a random decimal within a range [min..max]"
|
||||
}
|
||||
|
||||
async fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
decimal(args, registry).await
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "Generate a default decimal value between 0 and 1",
|
||||
example: "random decimal",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Generate a random decimal less than or equal to 500",
|
||||
example: "random decimal ..500",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Generate a random decimal greater than or equal to 100000",
|
||||
example: "random decimal 100000..",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "Generate a random decimal between 1 and 10",
|
||||
example: "random decimal 1..10",
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn decimal(
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let (DecimalArgs { range }, _) = args.process(®istry).await?;
|
||||
|
||||
let (min, max) = if let Some(range) = &range {
|
||||
(range.item.min() as f64, range.item.max() as f64)
|
||||
} else {
|
||||
(0.0, 1.0)
|
||||
};
|
||||
|
||||
match min.partial_cmp(&max) {
|
||||
Some(Ordering::Greater) => Err(ShellError::labeled_error(
|
||||
format!("Invalid range {}..{}", min, max),
|
||||
"expected a valid range",
|
||||
range
|
||||
.expect("Unexpected ordering error in random decimal")
|
||||
.span(),
|
||||
)),
|
||||
Some(Ordering::Equal) => {
|
||||
let untagged_result = UntaggedValue::decimal_from_float(min, Span::new(64, 64));
|
||||
Ok(OutputStream::one(ReturnSuccess::value(untagged_result)))
|
||||
}
|
||||
_ => {
|
||||
let mut thread_rng = thread_rng();
|
||||
let result: f64 = thread_rng.gen_range(min, max);
|
||||
|
||||
let untagged_result = UntaggedValue::decimal_from_float(result, Span::new(64, 64));
|
||||
|
||||
Ok(OutputStream::one(ReturnSuccess::value(untagged_result)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::ShellError;
|
||||
use super::SubCommand;
|
||||
|
||||
#[test]
|
||||
fn examples_work_as_expected() -> Result<(), ShellError> {
|
||||
use crate::examples::test as test_examples;
|
||||
|
||||
Ok(test_examples(SubCommand {})?)
|
||||
}
|
||||
}
|
@ -1,6 +1,7 @@
|
||||
pub mod command;
|
||||
|
||||
pub mod bool;
|
||||
pub mod decimal;
|
||||
pub mod dice;
|
||||
pub mod integer;
|
||||
#[cfg(feature = "uuid_crate")]
|
||||
@ -9,6 +10,7 @@ pub mod uuid;
|
||||
pub use command::Command as Random;
|
||||
|
||||
pub use self::bool::SubCommand as RandomBool;
|
||||
pub use decimal::SubCommand as RandomDecimal;
|
||||
pub use dice::SubCommand as RandomDice;
|
||||
pub use integer::SubCommand as RandomInteger;
|
||||
#[cfg(feature = "uuid_crate")]
|
||||
|
@ -264,7 +264,7 @@ fn string_from(input: &[Value]) -> String {
|
||||
let mut first = true;
|
||||
for i in input.iter() {
|
||||
if !first {
|
||||
save_data.push_str("\n");
|
||||
save_data.push('\n');
|
||||
} else {
|
||||
first = false;
|
||||
}
|
||||
|
@ -304,7 +304,7 @@ fn print_seq(
|
||||
let before_dec = istr.find('.').unwrap_or(ilen);
|
||||
if pad && before_dec < padding {
|
||||
for _ in 0..(padding - before_dec) {
|
||||
ret_str.push_str("0");
|
||||
ret_str.push('0');
|
||||
}
|
||||
}
|
||||
ret_str.push_str(&istr);
|
||||
|
379
crates/nu-cli/src/commands/seq_dates.rs
Normal file
379
crates/nu-cli/src/commands/seq_dates.rs
Normal file
@ -0,0 +1,379 @@
|
||||
use crate::commands::WholeStreamCommand;
|
||||
use crate::prelude::*;
|
||||
use chrono::naive::NaiveDate;
|
||||
use chrono::{Duration, Local};
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{value::I64Ext, value::StrExt, value::StringExt, value::U64Ext};
|
||||
use nu_protocol::{ReturnSuccess, Signature, SyntaxShape, UntaggedValue, Value};
|
||||
use nu_source::Tagged;
|
||||
|
||||
pub struct SeqDates;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct SeqDatesArgs {
|
||||
separator: Option<Tagged<String>>,
|
||||
output_format: Option<Tagged<String>>,
|
||||
input_format: Option<Tagged<String>>,
|
||||
begin_date: Option<Tagged<String>>,
|
||||
end_date: Option<Tagged<String>>,
|
||||
increment: Option<Tagged<i64>>,
|
||||
days: Option<Tagged<u64>>,
|
||||
reverse: Tagged<bool>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl WholeStreamCommand for SeqDates {
|
||||
fn name(&self) -> &str {
|
||||
"seq date"
|
||||
}
|
||||
|
||||
fn signature(&self) -> Signature {
|
||||
Signature::build("seq date")
|
||||
.named(
|
||||
"separator",
|
||||
SyntaxShape::String,
|
||||
"separator character (defaults to \\n)",
|
||||
Some('s'),
|
||||
)
|
||||
.named(
|
||||
"output_format",
|
||||
SyntaxShape::String,
|
||||
"prints dates in this format (defaults to %Y-%m-%d)",
|
||||
Some('o'),
|
||||
)
|
||||
.named(
|
||||
"input_format",
|
||||
SyntaxShape::String,
|
||||
"give argument dates in this format (defaults to %Y-%m-%d)",
|
||||
Some('i'),
|
||||
)
|
||||
.named(
|
||||
"begin_date",
|
||||
SyntaxShape::String,
|
||||
"beginning date range",
|
||||
Some('b'),
|
||||
)
|
||||
.named("end_date", SyntaxShape::String, "ending date", Some('e'))
|
||||
.named(
|
||||
"increment",
|
||||
SyntaxShape::Int,
|
||||
"increment dates by this number",
|
||||
Some('n'),
|
||||
)
|
||||
.named(
|
||||
"days",
|
||||
SyntaxShape::Int,
|
||||
"number of days to print",
|
||||
Some('d'),
|
||||
)
|
||||
.switch("reverse", "print dates in reverse", Some('r'))
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"print sequences of dates"
|
||||
}
|
||||
|
||||
async fn run(
|
||||
&self,
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
seq_dates(args, registry).await
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
vec![
|
||||
Example {
|
||||
description: "print the next 10 days in YYYY-MM-DD format with newline separator",
|
||||
example: "seq date --days 10",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "print the previous 10 days in YYYY-MM-DD format with newline separator",
|
||||
example: "seq date --days 10 -r",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "print the previous 10 days starting today in MM/DD/YYYY format with newline separator",
|
||||
example: "seq date --days 10 -o '%m/%d/%Y' -r",
|
||||
result: None,
|
||||
},
|
||||
Example {
|
||||
description: "print the first 10 days in January, 2020",
|
||||
example: "seq date -b '2020-01-01' -e '2020-01-10'",
|
||||
result: Some(vec![
|
||||
UntaggedValue::string("2020-01-01").into(),
|
||||
UntaggedValue::string("2020-01-02").into(),
|
||||
UntaggedValue::string("2020-01-03").into(),
|
||||
UntaggedValue::string("2020-01-04").into(),
|
||||
UntaggedValue::string("2020-01-05").into(),
|
||||
UntaggedValue::string("2020-01-06").into(),
|
||||
UntaggedValue::string("2020-01-07").into(),
|
||||
UntaggedValue::string("2020-01-08").into(),
|
||||
UntaggedValue::string("2020-01-09").into(),
|
||||
UntaggedValue::string("2020-01-10").into(),
|
||||
]),
|
||||
},
|
||||
Example {
|
||||
description: "print every fifth day between January 1st 2020 and January 31st 2020",
|
||||
example: "seq date -b '2020-01-01' -e '2020-01-31' -n 5",
|
||||
result: Some(vec![
|
||||
UntaggedValue::string("2020-01-01").into(),
|
||||
UntaggedValue::string("2020-01-06").into(),
|
||||
UntaggedValue::string("2020-01-11").into(),
|
||||
UntaggedValue::string("2020-01-16").into(),
|
||||
UntaggedValue::string("2020-01-21").into(),
|
||||
UntaggedValue::string("2020-01-26").into(),
|
||||
UntaggedValue::string("2020-01-31").into(),
|
||||
]),
|
||||
},
|
||||
Example {
|
||||
description: "starting on May 5th, 2020, print the next 10 days in your locale's date format, colon separated",
|
||||
example: "seq date -o %x -s ':' -d 10 -b '2020-05-01'",
|
||||
result: None,
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
async fn seq_dates(
|
||||
args: CommandArgs,
|
||||
registry: &CommandRegistry,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let registry = registry.clone();
|
||||
let _name = args.call_info.name_tag.clone();
|
||||
|
||||
let (
|
||||
SeqDatesArgs {
|
||||
separator,
|
||||
output_format,
|
||||
input_format,
|
||||
begin_date,
|
||||
end_date,
|
||||
increment,
|
||||
days,
|
||||
reverse,
|
||||
},
|
||||
_,
|
||||
) = args.process(®istry).await?;
|
||||
|
||||
let sep: String = match separator {
|
||||
Some(s) => {
|
||||
if s.item == r"\t" {
|
||||
'\t'.to_string()
|
||||
} else if s.item == r"\n" {
|
||||
'\n'.to_string()
|
||||
} else if s.item == r"\r" {
|
||||
'\r'.to_string()
|
||||
} else {
|
||||
let vec_s: Vec<char> = s.chars().collect();
|
||||
if vec_s.is_empty() {
|
||||
return Err(ShellError::labeled_error(
|
||||
"Expected a single separator char from --separator",
|
||||
"requires a single character string input",
|
||||
&s.tag,
|
||||
));
|
||||
};
|
||||
vec_s.iter().collect()
|
||||
}
|
||||
}
|
||||
_ => '\n'.to_string(),
|
||||
};
|
||||
|
||||
let outformat = match output_format {
|
||||
Some(s) => Some(s.item.to_string_value(s.tag)),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let informat = match input_format {
|
||||
Some(s) => Some(s.item.to_string_value(s.tag)),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let begin = match begin_date {
|
||||
Some(s) => Some(s.item),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let end = match end_date {
|
||||
Some(s) => Some(s.item),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let inc = match increment {
|
||||
Some(i) => {
|
||||
let clone = i.clone();
|
||||
i.to_value(clone.tag)
|
||||
}
|
||||
_ => (1 as i64).to_value_create_tag(),
|
||||
};
|
||||
|
||||
let day_count: Option<Value> = match days {
|
||||
Some(i) => Some(i.item.to_value(i.tag)),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
let mut rev = false;
|
||||
if *reverse {
|
||||
rev = *reverse;
|
||||
}
|
||||
|
||||
run_seq_dates(sep, outformat, informat, begin, end, inc, day_count, rev)
|
||||
}
|
||||
|
||||
pub fn parse_date_string(s: &str, format: &str) -> Result<NaiveDate, &'static str> {
|
||||
let d = match NaiveDate::parse_from_str(s, format) {
|
||||
Ok(d) => d,
|
||||
Err(_) => return Err("Failed to parse date."),
|
||||
};
|
||||
Ok(d)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn run_seq_dates(
|
||||
separator: String,
|
||||
output_format: Option<Value>,
|
||||
input_format: Option<Value>,
|
||||
beginning_date: Option<String>,
|
||||
ending_date: Option<String>,
|
||||
increment: Value,
|
||||
day_count: Option<Value>,
|
||||
reverse: bool,
|
||||
) -> Result<OutputStream, ShellError> {
|
||||
let today = Local::today().naive_local();
|
||||
let mut step_size: i64 = increment
|
||||
.as_i64()
|
||||
.expect("unable to change increment to i64");
|
||||
|
||||
if step_size == 0 {
|
||||
return Err(ShellError::labeled_error(
|
||||
"increment cannot be 0",
|
||||
"increment cannot be 0",
|
||||
increment.tag,
|
||||
));
|
||||
}
|
||||
|
||||
let in_format = match input_format {
|
||||
Some(i) => i.as_string().map_err(|e| {
|
||||
ShellError::labeled_error(
|
||||
e.to_string(),
|
||||
"error with input_format as_string",
|
||||
i.tag.span,
|
||||
)
|
||||
})?,
|
||||
None => "%Y-%m-%d".to_string(),
|
||||
};
|
||||
|
||||
let out_format = match output_format {
|
||||
Some(o) => o.as_string().map_err(|e| {
|
||||
ShellError::labeled_error(
|
||||
e.to_string(),
|
||||
"error with output_format as_string",
|
||||
o.tag.span,
|
||||
)
|
||||
})?,
|
||||
None => "%Y-%m-%d".to_string(),
|
||||
};
|
||||
|
||||
let start_date = match beginning_date {
|
||||
Some(d) => match parse_date_string(&d, &in_format) {
|
||||
Ok(nd) => nd,
|
||||
Err(e) => {
|
||||
return Err(ShellError::labeled_error(
|
||||
e,
|
||||
"Failed to parse date",
|
||||
Tag::unknown(),
|
||||
))
|
||||
}
|
||||
},
|
||||
_ => today,
|
||||
};
|
||||
|
||||
let mut end_date = match ending_date {
|
||||
Some(d) => match parse_date_string(&d, &in_format) {
|
||||
Ok(nd) => nd,
|
||||
Err(e) => {
|
||||
return Err(ShellError::labeled_error(
|
||||
e,
|
||||
"Failed to parse date",
|
||||
Tag::unknown(),
|
||||
))
|
||||
}
|
||||
},
|
||||
_ => today,
|
||||
};
|
||||
|
||||
let mut days_to_output = match day_count {
|
||||
Some(d) => d.as_i64()?,
|
||||
None => 0i64,
|
||||
};
|
||||
|
||||
// Make the signs opposite if we're created dates in reverse direction
|
||||
if reverse {
|
||||
step_size *= -1;
|
||||
days_to_output *= -1;
|
||||
}
|
||||
|
||||
if days_to_output != 0 {
|
||||
end_date = match start_date.checked_add_signed(Duration::days(days_to_output)) {
|
||||
Some(date) => date,
|
||||
None => {
|
||||
return Err(ShellError::labeled_error(
|
||||
"integer value too large",
|
||||
"integer value too large",
|
||||
Tag::unknown(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// conceptually counting down with a positive step or counting up with a negative step
|
||||
// makes no sense, attempt to do what one means by inverting the signs in those cases.
|
||||
if (start_date > end_date) && (step_size > 0) || (start_date < end_date) && step_size < 0 {
|
||||
step_size = -step_size;
|
||||
}
|
||||
|
||||
let is_out_of_range =
|
||||
|next| (step_size > 0 && next > end_date) || (step_size < 0 && next < end_date);
|
||||
|
||||
let mut next = start_date;
|
||||
if is_out_of_range(next) {
|
||||
return Err(ShellError::labeled_error(
|
||||
"date is out of range",
|
||||
"date is out of range",
|
||||
Tag::unknown(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut ret_str = String::from("");
|
||||
loop {
|
||||
ret_str.push_str(&format!("{}", next.format(&out_format)));
|
||||
// TODO: check this value is good
|
||||
next += Duration::days(step_size);
|
||||
|
||||
if is_out_of_range(next) {
|
||||
break;
|
||||
}
|
||||
|
||||
ret_str.push_str(&separator);
|
||||
}
|
||||
|
||||
let rows: Vec<Value> = ret_str
|
||||
.lines()
|
||||
.map(|v| v.to_str_value_create_tag())
|
||||
.collect();
|
||||
Ok(futures::stream::iter(rows.into_iter().map(ReturnSuccess::value)).to_output_stream())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::SeqDates;
|
||||
use super::ShellError;
|
||||
|
||||
#[test]
|
||||
fn examples_work_as_expected() -> Result<(), ShellError> {
|
||||
use crate::examples::test as test_examples;
|
||||
|
||||
Ok(test_examples(SeqDates {})?)
|
||||
}
|
||||
}
|
@ -159,12 +159,14 @@ fn action(input: &Value, options: &Substring, tag: impl Into<Tag>) -> Result<Val
|
||||
"End must be greater than or equal to Start",
|
||||
tag.span,
|
||||
)),
|
||||
Ordering::Less => Ok(UntaggedValue::string(
|
||||
Ordering::Less => Ok(UntaggedValue::string(if end == isize::max_value() {
|
||||
s.chars().skip(start as usize).collect::<String>()
|
||||
} else {
|
||||
s.chars()
|
||||
.skip(start as usize)
|
||||
.take((end - start) as usize)
|
||||
.collect::<String>(),
|
||||
)
|
||||
.collect::<String>()
|
||||
})
|
||||
.into_value(tag)),
|
||||
}
|
||||
} else {
|
||||
@ -330,6 +332,9 @@ mod tests {
|
||||
expectation("and", (0, -3)),
|
||||
expectation("andr", (0, -2)),
|
||||
expectation("andre", (0, -1)),
|
||||
// str substring [ -4 , _ ]
|
||||
// str substring -4 ,
|
||||
expectation("dres", (-4, isize::max_value())),
|
||||
expectation("", (0, -110)),
|
||||
expectation("", (6, 0)),
|
||||
expectation("", (6, -1)),
|
||||
|
@ -70,6 +70,8 @@ async fn to_md(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
|
||||
column_widths.push(escaped_header_string.len());
|
||||
escaped_headers.push(escaped_header_string);
|
||||
}
|
||||
} else {
|
||||
column_widths = vec![0; headers.len()]
|
||||
}
|
||||
|
||||
let mut escaped_rows: Vec<Vec<String>> = Vec::new();
|
||||
@ -101,7 +103,15 @@ async fn to_md(args: CommandArgs, registry: &CommandRegistry) -> Result<OutputSt
|
||||
escaped_rows.push(escaped_row);
|
||||
}
|
||||
|
||||
let output_string = get_output_string(&escaped_headers, &escaped_rows, &column_widths, pretty);
|
||||
let output_string = if (column_widths.is_empty() || column_widths.iter().all(|x| *x == 0))
|
||||
&& escaped_rows.is_empty()
|
||||
{
|
||||
String::from("")
|
||||
} else {
|
||||
get_output_string(&escaped_headers, &escaped_rows, &column_widths, pretty)
|
||||
.trim()
|
||||
.to_string()
|
||||
};
|
||||
|
||||
Ok(OutputStream::one(ReturnSuccess::value(
|
||||
UntaggedValue::string(output_string).into_value(name_tag),
|
||||
@ -117,22 +127,22 @@ fn get_output_string(
|
||||
let mut output_string = String::new();
|
||||
|
||||
if !headers.is_empty() {
|
||||
output_string.push_str("|");
|
||||
output_string.push('|');
|
||||
|
||||
for i in 0..headers.len() {
|
||||
if pretty {
|
||||
output_string.push_str(" ");
|
||||
output_string.push(' ');
|
||||
output_string.push_str(&get_padded_string(
|
||||
headers[i].clone(),
|
||||
column_widths[i],
|
||||
' ',
|
||||
));
|
||||
output_string.push_str(" ");
|
||||
output_string.push(' ');
|
||||
} else {
|
||||
output_string.push_str(headers[i].as_str());
|
||||
}
|
||||
|
||||
output_string.push_str("|");
|
||||
output_string.push('|');
|
||||
}
|
||||
|
||||
output_string.push_str("\n|");
|
||||
@ -140,55 +150,59 @@ fn get_output_string(
|
||||
#[allow(clippy::needless_range_loop)]
|
||||
for i in 0..headers.len() {
|
||||
if pretty {
|
||||
output_string.push_str(" ");
|
||||
output_string.push(' ');
|
||||
output_string.push_str(&get_padded_string(
|
||||
String::from("-"),
|
||||
column_widths[i],
|
||||
'-',
|
||||
));
|
||||
output_string.push_str(" ");
|
||||
output_string.push(' ');
|
||||
} else {
|
||||
output_string.push_str("-");
|
||||
output_string.push('-');
|
||||
}
|
||||
|
||||
output_string.push_str("|");
|
||||
output_string.push('|');
|
||||
}
|
||||
|
||||
output_string.push_str("\n");
|
||||
output_string.push('\n');
|
||||
}
|
||||
|
||||
for row in rows {
|
||||
if !headers.is_empty() {
|
||||
output_string.push_str("|");
|
||||
output_string.push('|');
|
||||
}
|
||||
|
||||
for i in 0..row.len() {
|
||||
if pretty {
|
||||
output_string.push_str(" ");
|
||||
output_string.push(' ');
|
||||
output_string.push_str(&get_padded_string(row[i].clone(), column_widths[i], ' '));
|
||||
output_string.push_str(" ");
|
||||
output_string.push(' ');
|
||||
} else {
|
||||
output_string.push_str(row[i].as_str());
|
||||
}
|
||||
|
||||
if !headers.is_empty() {
|
||||
output_string.push_str("|");
|
||||
output_string.push('|');
|
||||
}
|
||||
}
|
||||
|
||||
output_string.push_str("\n");
|
||||
output_string.push('\n');
|
||||
}
|
||||
|
||||
output_string
|
||||
}
|
||||
|
||||
fn get_padded_string(text: String, desired_length: usize, padding_character: char) -> String {
|
||||
let repeat_length = if text.len() > desired_length {
|
||||
0
|
||||
} else {
|
||||
desired_length - text.len()
|
||||
};
|
||||
|
||||
format!(
|
||||
"{}{}",
|
||||
text,
|
||||
padding_character
|
||||
.to_string()
|
||||
.repeat(desired_length - text.len())
|
||||
padding_character.to_string().repeat(repeat_length)
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -157,7 +157,7 @@ async fn process_row(
|
||||
None => OutputStream::one(Err(ShellError::labeled_error(
|
||||
"update could not find place to insert column",
|
||||
"column name",
|
||||
field.maybe_span().unwrap_or_else(|| tag.span),
|
||||
field.maybe_span().unwrap_or(tag.span),
|
||||
))),
|
||||
},
|
||||
Value { value: _, ref tag } => {
|
||||
@ -166,7 +166,7 @@ async fn process_row(
|
||||
None => OutputStream::one(Err(ShellError::labeled_error(
|
||||
"update could not find place to insert column",
|
||||
"column name",
|
||||
field.maybe_span().unwrap_or_else(|| tag.span),
|
||||
field.maybe_span().unwrap_or(tag.span),
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
@ -288,10 +288,7 @@ mod tests {
|
||||
registry: &dyn SignatureRegistry,
|
||||
pos: usize,
|
||||
) -> Vec<LocationType> {
|
||||
let lite_block = match lite_parse(line, 0) {
|
||||
Ok(v) => v,
|
||||
Err(e) => e.partial.expect("lite_parse result"),
|
||||
};
|
||||
let (lite_block, _) = lite_parse(line, 0);
|
||||
|
||||
let block = classify_block(&lite_block, registry);
|
||||
|
||||
|
@ -129,7 +129,7 @@ pub fn get_documentation(
|
||||
let mut long_desc = String::new();
|
||||
|
||||
long_desc.push_str(&cmd.usage());
|
||||
long_desc.push_str("\n");
|
||||
long_desc.push('\n');
|
||||
|
||||
let mut subcommands = vec![];
|
||||
if !config.no_subcommands {
|
||||
@ -144,7 +144,7 @@ pub fn get_documentation(
|
||||
|
||||
let mut one_liner = String::new();
|
||||
one_liner.push_str(&signature.name);
|
||||
one_liner.push_str(" ");
|
||||
one_liner.push(' ');
|
||||
|
||||
for positional in &signature.positional {
|
||||
match &positional.0 {
|
||||
@ -175,7 +175,7 @@ pub fn get_documentation(
|
||||
long_desc.push_str("\nSubcommands:\n");
|
||||
subcommands.sort();
|
||||
long_desc.push_str(&subcommands.join("\n"));
|
||||
long_desc.push_str("\n");
|
||||
long_desc.push('\n');
|
||||
}
|
||||
|
||||
if !signature.positional.is_empty() || signature.rest_positional.is_some() {
|
||||
@ -205,7 +205,7 @@ pub fn get_documentation(
|
||||
long_desc.push_str("\nExamples:");
|
||||
}
|
||||
for example in examples {
|
||||
long_desc.push_str("\n");
|
||||
long_desc.push('\n');
|
||||
long_desc.push_str(" ");
|
||||
long_desc.push_str(example.description);
|
||||
|
||||
@ -218,7 +218,7 @@ pub fn get_documentation(
|
||||
}
|
||||
}
|
||||
|
||||
long_desc.push_str("\n");
|
||||
long_desc.push('\n');
|
||||
|
||||
long_desc
|
||||
}
|
||||
|
2
crates/nu-cli/src/env/environment_syncer.rs
vendored
2
crates/nu-cli/src/env/environment_syncer.rs
vendored
@ -50,7 +50,7 @@ impl EnvironmentSyncer {
|
||||
|
||||
pub fn did_config_change(&mut self) -> bool {
|
||||
let config = self.config.lock();
|
||||
config.is_modified().unwrap_or_else(|_| false)
|
||||
config.is_modified().unwrap_or(false)
|
||||
}
|
||||
|
||||
pub fn reload(&mut self) {
|
||||
|
2
crates/nu-cli/src/env/host.rs
vendored
2
crates/nu-cli/src/env/host.rs
vendored
@ -126,7 +126,7 @@ impl Host for BasicHost {
|
||||
}
|
||||
|
||||
fn width(&self) -> usize {
|
||||
let (mut term_width, _) = term_size::dimensions().unwrap_or_else(|| (80, 20));
|
||||
let (mut term_width, _) = term_size::dimensions().unwrap_or((80, 20));
|
||||
term_width -= 1;
|
||||
term_width
|
||||
}
|
||||
|
@ -191,13 +191,16 @@ pub fn test_anchors(cmd: Command) -> Result<(), ShellError> {
|
||||
|
||||
/// Parse and run a nushell pipeline
|
||||
fn parse_line(line: &str, ctx: &mut EvaluationContext) -> Result<ClassifiedBlock, ShellError> {
|
||||
let line = if line.ends_with('\n') {
|
||||
&line[..line.len() - 1]
|
||||
let line = if let Some(line) = line.strip_suffix('\n') {
|
||||
line
|
||||
} else {
|
||||
line
|
||||
};
|
||||
|
||||
let lite_result = nu_parser::lite_parse(&line, 0)?;
|
||||
let (lite_result, err) = nu_parser::lite_parse(&line, 0);
|
||||
if let Some(err) = err {
|
||||
return Err(err.into());
|
||||
}
|
||||
|
||||
// TODO ensure the command whose examples we're testing is actually in the pipeline
|
||||
let classified_block = nu_parser::classify_block(&lite_result, ctx.registry());
|
||||
|
@ -156,7 +156,7 @@ pub fn scan(paths: Vec<std::path::PathBuf>) -> Result<Vec<crate::commands::Comma
|
||||
|
||||
if is_valid_name && is_executable {
|
||||
trace!(target: "nu::load", "plugin infrastructure -> Trying {:?}", path.display());
|
||||
build_plugin_command(&path).unwrap_or_else(|_| None)
|
||||
build_plugin_command(&path).unwrap_or(None)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -23,15 +23,10 @@ impl NuCompleter {
|
||||
use completion::engine::LocationType;
|
||||
|
||||
let nu_context: &EvaluationContext = context.as_ref();
|
||||
let lite_block = match nu_parser::lite_parse(line, 0) {
|
||||
Ok(block) => Some(block),
|
||||
Err(result) => result.partial,
|
||||
};
|
||||
let (lite_block, _) = nu_parser::lite_parse(line, 0);
|
||||
|
||||
let locations = lite_block
|
||||
.map(|block| nu_parser::classify_block(&block, &nu_context.registry))
|
||||
.map(|block| completion::engine::completion_location(line, &block.block, pos))
|
||||
.unwrap_or_default();
|
||||
let classified_block = nu_parser::classify_block(&lite_block, &nu_context.registry);
|
||||
let locations = completion::engine::completion_location(line, &classified_block.block, pos);
|
||||
|
||||
let matcher = nu_data::config::config(Tag::unknown())
|
||||
.ok()
|
||||
|
@ -121,10 +121,10 @@ impl rustyline::validate::Validator for NuValidator {
|
||||
) -> rustyline::Result<rustyline::validate::ValidationResult> {
|
||||
let src = ctx.input();
|
||||
|
||||
let lite_result = nu_parser::lite_parse(src, 0);
|
||||
let (_, err) = nu_parser::lite_parse(src, 0);
|
||||
|
||||
if let Err(err) = lite_result {
|
||||
if let nu_errors::ParseErrorReason::Eof { .. } = err.cause.reason() {
|
||||
if let Some(err) = err {
|
||||
if let nu_errors::ParseErrorReason::Eof { .. } = err.reason() {
|
||||
return Ok(rustyline::validate::ValidationResult::Incomplete);
|
||||
}
|
||||
}
|
||||
|
@ -25,22 +25,21 @@ impl Painter {
|
||||
registry: &dyn SignatureRegistry,
|
||||
palette: &P,
|
||||
) -> Cow<'l, str> {
|
||||
let lite_block = nu_parser::lite_parse(line, 0);
|
||||
let (lb, err) = nu_parser::lite_parse(line, 0);
|
||||
|
||||
match lite_block {
|
||||
Err(_) => Cow::Borrowed(line),
|
||||
Ok(lb) => {
|
||||
let classified = nu_parser::classify_block(&lb, registry);
|
||||
if err.is_some() {
|
||||
Cow::Borrowed(line)
|
||||
} else {
|
||||
let classified = nu_parser::classify_block(&lb, registry);
|
||||
|
||||
let shapes = nu_parser::shapes(&classified.block);
|
||||
let mut painter = Painter::new(line);
|
||||
let shapes = nu_parser::shapes(&classified.block);
|
||||
let mut painter = Painter::new(line);
|
||||
|
||||
for shape in shapes {
|
||||
painter.paint_shape(&shape, palette);
|
||||
}
|
||||
|
||||
Cow::Owned(painter.into_string())
|
||||
for shape in shapes {
|
||||
painter.paint_shape(&shape, palette);
|
||||
}
|
||||
|
||||
Cow::Owned(painter.into_string())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -61,7 +61,7 @@ mod tests {
|
||||
);
|
||||
|
||||
//If this fails for you, check for any special unicode characters in your ~ path
|
||||
assert!(actual.out.chars().filter(|c| c.clone() == '/').count() == 2);
|
||||
assert!(actual.out.chars().filter(|c| *c == '/').count() == 2);
|
||||
#[cfg(target_os = "linux")]
|
||||
assert!(actual.out.contains("home"));
|
||||
#[cfg(target_os = "macos")]
|
||||
|
@ -34,6 +34,7 @@ mod mkdir;
|
||||
mod move_;
|
||||
mod open;
|
||||
mod parse;
|
||||
mod path;
|
||||
mod prepend;
|
||||
mod random;
|
||||
mod range;
|
||||
|
83
crates/nu-cli/tests/commands/path/basename.rs
Normal file
83
crates/nu-cli/tests/commands/path/basename.rs
Normal file
@ -0,0 +1,83 @@
|
||||
use nu_test_support::{nu, pipeline};
|
||||
|
||||
use super::join_path_sep;
|
||||
|
||||
#[test]
|
||||
fn returns_basename_of_empty_input() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo ""
|
||||
| path basename
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replaces_basename_of_empty_input() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo ""
|
||||
| path basename -r newname.txt
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "newname.txt");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn returns_basename_of_path_ending_with_dot() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "some/file.txt/."
|
||||
| path basename
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "file.txt");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replaces_basename_of_path_ending_with_dot() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "some/file.txt/."
|
||||
| path basename -r viking.txt
|
||||
"#
|
||||
));
|
||||
|
||||
let expected = join_path_sep(&["some", "viking.txt"]);
|
||||
assert_eq!(actual.out, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn returns_basename_of_path_ending_with_double_dot() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "some/file.txt/.."
|
||||
| path basename
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replaces_basename_of_path_ending_with_double_dot() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "some/file.txt/.."
|
||||
| path basename -r eggs
|
||||
"#
|
||||
));
|
||||
|
||||
let expected = join_path_sep(&["some/file.txt/..", "eggs"]);
|
||||
assert_eq!(actual.out, expected);
|
||||
}
|
137
crates/nu-cli/tests/commands/path/dirname.rs
Normal file
137
crates/nu-cli/tests/commands/path/dirname.rs
Normal file
@ -0,0 +1,137 @@
|
||||
use nu_test_support::{nu, pipeline};
|
||||
|
||||
use super::join_path_sep;
|
||||
|
||||
#[test]
|
||||
fn returns_dirname_of_empty_input() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo ""
|
||||
| path dirname
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replaces_dirname_of_empty_input() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo ""
|
||||
| path dirname -r newdir
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "newdir");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn returns_dirname_of_path_ending_with_dot() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "some/dir/."
|
||||
| path dirname
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "some");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replaces_dirname_of_path_ending_with_dot() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "some/dir/."
|
||||
| path dirname -r eggs
|
||||
"#
|
||||
));
|
||||
|
||||
let expected = join_path_sep(&["eggs", "dir"]);
|
||||
assert_eq!(actual.out, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn returns_dirname_of_path_ending_with_double_dot() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "some/dir/.."
|
||||
| path dirname
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "some/dir");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replaces_dirname_of_path_with_double_dot() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "some/dir/.."
|
||||
| path dirname -r eggs
|
||||
"#
|
||||
));
|
||||
|
||||
let expected = join_path_sep(&["eggs", ".."]);
|
||||
assert_eq!(actual.out, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn returns_dirname_of_zero_levels() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "some/dir/with/spam.txt"
|
||||
| path dirname -n 0
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "some/dir/with/spam.txt");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replaces_dirname_of_zero_levels_with_empty_string() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "some/dir/with/spam.txt"
|
||||
| path dirname -n 0 -r ""
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replaces_dirname_of_more_levels() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "some/dir/with/spam.txt"
|
||||
| path dirname -r eggs -n 2
|
||||
"#
|
||||
));
|
||||
|
||||
let expected = join_path_sep(&["eggs", "with/spam.txt"]);
|
||||
assert_eq!(actual.out, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replaces_dirname_of_way_too_many_levels() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "some/dir/with/spam.txt"
|
||||
| path dirname -r eggs -n 999
|
||||
"#
|
||||
));
|
||||
|
||||
let expected = join_path_sep(&["eggs", "some/dir/with/spam.txt"]);
|
||||
assert_eq!(actual.out, expected);
|
||||
}
|
53
crates/nu-cli/tests/commands/path/exists.rs
Normal file
53
crates/nu-cli/tests/commands/path/exists.rs
Normal file
@ -0,0 +1,53 @@
|
||||
use nu_test_support::fs::Stub::EmptyFile;
|
||||
use nu_test_support::nu;
|
||||
use nu_test_support::playground::Playground;
|
||||
|
||||
#[test]
|
||||
fn checks_if_existing_file_exists() {
|
||||
Playground::setup("path_exists_1", |dirs, sandbox| {
|
||||
sandbox.with_files(vec![EmptyFile("spam.txt")]);
|
||||
|
||||
let actual = nu!(
|
||||
cwd: dirs.test(),
|
||||
"echo spam.txt | path exists"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "true");
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn checks_if_missing_file_exists() {
|
||||
Playground::setup("path_exists_2", |dirs, _| {
|
||||
let actual = nu!(
|
||||
cwd: dirs.test(),
|
||||
"echo spam.txt | path exists"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "false");
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn checks_if_dot_exists() {
|
||||
Playground::setup("path_exists_3", |dirs, _| {
|
||||
let actual = nu!(
|
||||
cwd: dirs.test(),
|
||||
"echo '.' | path exists"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "true");
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn checks_if_double_dot_exists() {
|
||||
Playground::setup("path_exists_4", |dirs, _| {
|
||||
let actual = nu!(
|
||||
cwd: dirs.test(),
|
||||
"echo '..' | path exists"
|
||||
);
|
||||
|
||||
assert_eq!(actual.out, "true");
|
||||
})
|
||||
}
|
45
crates/nu-cli/tests/commands/path/expand.rs
Normal file
45
crates/nu-cli/tests/commands/path/expand.rs
Normal file
@ -0,0 +1,45 @@
|
||||
use nu_test_support::fs::Stub::EmptyFile;
|
||||
use nu_test_support::playground::Playground;
|
||||
use nu_test_support::{nu, pipeline};
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[test]
|
||||
fn expands_path_with_dot() {
|
||||
Playground::setup("path_expand_1", |dirs, sandbox| {
|
||||
sandbox
|
||||
.within("menu")
|
||||
.with_files(vec![EmptyFile("spam.txt")]);
|
||||
|
||||
let actual = nu!(
|
||||
cwd: dirs.test(), pipeline(
|
||||
r#"
|
||||
echo "menu/./spam.txt"
|
||||
| path expand
|
||||
"#
|
||||
));
|
||||
|
||||
let expected = dirs.test.join("menu").join("spam.txt");
|
||||
assert_eq!(PathBuf::from(actual.out), expected);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn expands_path_with_double_dot() {
|
||||
Playground::setup("path_expand_2", |dirs, sandbox| {
|
||||
sandbox
|
||||
.within("menu")
|
||||
.with_files(vec![EmptyFile("spam.txt")]);
|
||||
|
||||
let actual = nu!(
|
||||
cwd: dirs.test(), pipeline(
|
||||
r#"
|
||||
echo "menu/../menu/spam.txt"
|
||||
| path expand
|
||||
"#
|
||||
));
|
||||
|
||||
let expected = dirs.test.join("menu").join("spam.txt");
|
||||
assert_eq!(PathBuf::from(actual.out), expected);
|
||||
})
|
||||
}
|
37
crates/nu-cli/tests/commands/path/extension.rs
Normal file
37
crates/nu-cli/tests/commands/path/extension.rs
Normal file
@ -0,0 +1,37 @@
|
||||
use nu_test_support::{nu, pipeline};
|
||||
|
||||
#[test]
|
||||
fn returns_extension_of_path_ending_with_dot() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "bacon." | path extension
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replaces_extension_with_dot_of_path_ending_with_dot() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "bacon." | path extension -r .egg
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "bacon..egg");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replaces_extension_of_empty_path() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "" | path extension -r egg
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "");
|
||||
}
|
95
crates/nu-cli/tests/commands/path/filestem.rs
Normal file
95
crates/nu-cli/tests/commands/path/filestem.rs
Normal file
@ -0,0 +1,95 @@
|
||||
use nu_test_support::{nu, pipeline};
|
||||
|
||||
use super::join_path_sep;
|
||||
|
||||
#[test]
|
||||
fn returns_filestem_of_dot() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "menu/eggs/."
|
||||
| path filestem
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "eggs");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn returns_filestem_of_double_dot() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "menu/eggs/.."
|
||||
| path filestem
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn returns_filestem_of_path_with_empty_prefix() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "menu/spam.txt"
|
||||
| path filestem -p ""
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "spam");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn returns_filestem_of_path_with_empty_suffix() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "menu/spam.txt"
|
||||
| path filestem -s ""
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "spam.txt");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn returns_filestem_of_path_with_empty_prefix_and_suffix() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "menu/spam.txt"
|
||||
| path filestem -p "" -s ""
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "spam.txt");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn returns_filestem_with_wrong_prefix_and_suffix() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "menu/spam.txt"
|
||||
| path filestem -p "bacon" -s "eggs"
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "spam.txt");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn replaces_filestem_stripped_to_dot() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "menu/spam.txt"
|
||||
| path filestem -p "spam" -s "txt" -r ".eggs."
|
||||
"#
|
||||
));
|
||||
|
||||
let expected = join_path_sep(&["menu", "spam.eggs.txt"]);
|
||||
assert_eq!(actual.out, expected);
|
||||
}
|
33
crates/nu-cli/tests/commands/path/mod.rs
Normal file
33
crates/nu-cli/tests/commands/path/mod.rs
Normal file
@ -0,0 +1,33 @@
|
||||
mod basename;
|
||||
mod dirname;
|
||||
mod exists;
|
||||
mod expand;
|
||||
mod extension;
|
||||
mod filestem;
|
||||
mod type_;
|
||||
|
||||
use std::path::MAIN_SEPARATOR;
|
||||
|
||||
/// Helper function that joins string literals with '/' or '\', based on host OS
|
||||
fn join_path_sep(pieces: &[&str]) -> String {
|
||||
let sep_string = String::from(MAIN_SEPARATOR);
|
||||
pieces.join(&sep_string)
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
#[test]
|
||||
fn joins_path_on_windows() {
|
||||
let pieces = ["sausage", "bacon", "spam"];
|
||||
let actual = join_path_sep(&pieces);
|
||||
|
||||
assert_eq!(&actual, "sausage\\bacon\\spam");
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
#[test]
|
||||
fn joins_path_on_other_than_windows() {
|
||||
let pieces = ["sausage", "bacon", "spam"];
|
||||
let actual = join_path_sep(&pieces);
|
||||
|
||||
assert_eq!(&actual, "sausage/bacon/spam");
|
||||
}
|
54
crates/nu-cli/tests/commands/path/type_.rs
Normal file
54
crates/nu-cli/tests/commands/path/type_.rs
Normal file
@ -0,0 +1,54 @@
|
||||
use nu_test_support::fs::Stub::EmptyFile;
|
||||
use nu_test_support::playground::Playground;
|
||||
use nu_test_support::{nu, pipeline};
|
||||
|
||||
#[test]
|
||||
fn returns_type_of_missing_file() {
|
||||
let actual = nu!(
|
||||
cwd: "tests", pipeline(
|
||||
r#"
|
||||
echo "spam.txt"
|
||||
| path type
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn returns_type_of_existing_file() {
|
||||
Playground::setup("path_expand_1", |dirs, sandbox| {
|
||||
sandbox
|
||||
.within("menu")
|
||||
.with_files(vec![EmptyFile("spam.txt")]);
|
||||
|
||||
let actual = nu!(
|
||||
cwd: dirs.test(), pipeline(
|
||||
r#"
|
||||
echo "menu"
|
||||
| path type
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "Dir");
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn returns_type_of_existing_directory() {
|
||||
Playground::setup("path_expand_1", |dirs, sandbox| {
|
||||
sandbox
|
||||
.within("menu")
|
||||
.with_files(vec![EmptyFile("spam.txt")]);
|
||||
|
||||
let actual = nu!(
|
||||
cwd: dirs.test(), pipeline(
|
||||
r#"
|
||||
echo "menu/spam.txt"
|
||||
| path type
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "File");
|
||||
})
|
||||
}
|
37
crates/nu-cli/tests/commands/random/decimal.rs
Normal file
37
crates/nu-cli/tests/commands/random/decimal.rs
Normal file
@ -0,0 +1,37 @@
|
||||
use nu_test_support::{nu, pipeline};
|
||||
|
||||
#[test]
|
||||
fn generates_an_decimal() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
random decimal 42..43
|
||||
"#
|
||||
));
|
||||
|
||||
assert!(actual.out.contains("42") || actual.out.contains("43"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generates_55() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
random decimal 55..55
|
||||
"#
|
||||
));
|
||||
|
||||
assert!(actual.out.contains("55"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generates_0() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
random decimal ..<1
|
||||
"#
|
||||
));
|
||||
|
||||
assert!(actual.out.contains('0'));
|
||||
}
|
@ -1,4 +1,5 @@
|
||||
mod bool;
|
||||
mod decimal;
|
||||
mod dice;
|
||||
mod integer;
|
||||
#[cfg(feature = "uuid_crate")]
|
||||
|
@ -33,29 +33,17 @@ fn figures_out_intelligently_where_to_write_out_with_metadata() {
|
||||
#[test]
|
||||
fn writes_out_csv() {
|
||||
Playground::setup("save_test_2", |dirs, sandbox| {
|
||||
sandbox.with_files(vec![FileWithContent(
|
||||
"cargo_sample.json",
|
||||
r#"
|
||||
{
|
||||
"package": {
|
||||
"name": "nu",
|
||||
"version": "0.14",
|
||||
"description": "A new type of shell",
|
||||
"license": "MIT",
|
||||
"edition": "2018"
|
||||
}
|
||||
}
|
||||
"#,
|
||||
)]);
|
||||
sandbox.with_files(vec![]);
|
||||
|
||||
let expected_file = dirs.test().join("cargo_sample.csv");
|
||||
|
||||
nu!(
|
||||
cwd: dirs.root(),
|
||||
"open save_test_2/cargo_sample.json | get package | save save_test_2/cargo_sample.csv",
|
||||
r#"echo [[name, version, description, license, edition]; [nu, "0.14", "A new type of shell", "MIT", "2018"]] | save save_test_2/cargo_sample.csv"#,
|
||||
);
|
||||
|
||||
let actual = file_contents(expected_file);
|
||||
println!("{}", actual);
|
||||
assert!(actual.contains("nu,0.14,A new type of shell,MIT,2018"));
|
||||
})
|
||||
}
|
||||
|
@ -150,18 +150,24 @@ fn uniq_counting() {
|
||||
| from json
|
||||
| wrap item
|
||||
| uniq --count
|
||||
| where item == A
|
||||
| get count
|
||||
"#
|
||||
));
|
||||
let expected = nu!(
|
||||
assert_eq!(actual.out, "2");
|
||||
|
||||
let actual = nu!(
|
||||
cwd: "tests/fixtures/formats", pipeline(
|
||||
r#"
|
||||
echo '[{"item": "A", "count": 2}, {"item": "B", "count": 1}]'
|
||||
| from json
|
||||
echo '["A", "B", "A"]'
|
||||
| from json
|
||||
| wrap item
|
||||
| uniq --count
|
||||
| where item == B
|
||||
| get count
|
||||
"#
|
||||
));
|
||||
print!("{}", actual.out);
|
||||
print!("{}", expected.out);
|
||||
assert_eq!(actual.out, expected.out);
|
||||
assert_eq!(actual.out, "1");
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -1,7 +1,31 @@
|
||||
use nu_test_support::{nu, pipeline};
|
||||
|
||||
#[test]
|
||||
fn out_md_simple() {
|
||||
fn md_empty() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
echo "{}" | from json | to md
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn md_empty_pretty() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
echo "{}" | from json | to md -p
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn md_simple() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
@ -13,7 +37,19 @@ fn out_md_simple() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn out_md_table() {
|
||||
fn md_simple_pretty() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
echo 3 | to md -p
|
||||
"#
|
||||
));
|
||||
|
||||
assert_eq!(actual.out, "3");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn md_table() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
@ -25,7 +61,7 @@ fn out_md_table() {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn out_md_table_pretty() {
|
||||
fn md_table_pretty() {
|
||||
let actual = nu!(
|
||||
cwd: ".", pipeline(
|
||||
r#"
|
||||
|
@ -4,7 +4,7 @@ description = "CLI for nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-data"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
@ -29,12 +29,12 @@ query_interface = "0.3.5"
|
||||
serde = {version = "1.0.115", features = ["derive"]}
|
||||
toml = "0.5.6"
|
||||
|
||||
nu-errors = {version = "0.22.0", path = "../nu-errors"}
|
||||
nu-protocol = {version = "0.22.0", path = "../nu-protocol"}
|
||||
nu-source = {version = "0.22.0", path = "../nu-source"}
|
||||
nu-table = {version = "0.22.0", path = "../nu-table"}
|
||||
nu-test-support = {version = "0.22.0", path = "../nu-test-support"}
|
||||
nu-value-ext = {version = "0.22.0", path = "../nu-value-ext"}
|
||||
nu-errors = {version = "0.23.0", path = "../nu-errors"}
|
||||
nu-protocol = {version = "0.23.0", path = "../nu-protocol"}
|
||||
nu-source = {version = "0.23.0", path = "../nu-source"}
|
||||
nu-table = {version = "0.23.0", path = "../nu-table"}
|
||||
nu-test-support = {version = "0.23.0", path = "../nu-test-support"}
|
||||
nu-value-ext = {version = "0.23.0", path = "../nu-value-ext"}
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
users = "0.10.0"
|
||||
|
@ -4,13 +4,13 @@ description = "Core error subsystem for Nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-errors"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-source = {path = "../nu-source", version = "0.22.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.23.0"}
|
||||
|
||||
ansi_term = "0.12.1"
|
||||
bigdecimal = {version = "0.2.0", features = ["serde"]}
|
||||
|
15
crates/nu-json/Cargo.toml
Normal file
15
crates/nu-json/Cargo.toml
Normal file
@ -0,0 +1,15 @@
|
||||
[package]
|
||||
authors = ["The Nu Project Contributors", "Christian Zangl <laktak@cdak.net>"]
|
||||
description = "Fork of serde-hjson"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-json"
|
||||
version = "0.23.0"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
serde = "^0.8.0"
|
||||
num-traits = "~0.1.32"
|
||||
regex = "^1.0"
|
||||
lazy_static = "1"
|
29
crates/nu-json/LICENSE
Normal file
29
crates/nu-json/LICENSE
Normal file
@ -0,0 +1,29 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 The Rust Project Developers
|
||||
Copyright (c) 2016 Christian Zangl
|
||||
Copyright (c) 2020 The Nu Project Contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
121
crates/nu-json/src/builder.rs
Normal file
121
crates/nu-json/src/builder.rs
Normal file
@ -0,0 +1,121 @@
|
||||
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use serde::ser;
|
||||
|
||||
use crate::value::{self, Map, Value};
|
||||
|
||||
/// This structure provides a simple interface for constructing a JSON array.
|
||||
pub struct ArrayBuilder {
|
||||
array: Vec<Value>,
|
||||
}
|
||||
|
||||
impl Default for ArrayBuilder {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl ArrayBuilder {
|
||||
/// Construct an `ObjectBuilder`.
|
||||
pub fn new() -> ArrayBuilder {
|
||||
ArrayBuilder { array: Vec::new() }
|
||||
}
|
||||
|
||||
/// Return the constructed `Value`.
|
||||
pub fn unwrap(self) -> Value {
|
||||
Value::Array(self.array)
|
||||
}
|
||||
|
||||
/// Insert a value into the array.
|
||||
pub fn push<T: ser::Serialize>(mut self, v: T) -> ArrayBuilder {
|
||||
self.array.push(value::to_value(&v));
|
||||
self
|
||||
}
|
||||
|
||||
/// Creates and passes an `ArrayBuilder` into a closure, then inserts the resulting array into
|
||||
/// this array.
|
||||
pub fn push_array<F>(mut self, f: F) -> ArrayBuilder
|
||||
where
|
||||
F: FnOnce(ArrayBuilder) -> ArrayBuilder,
|
||||
{
|
||||
let builder = ArrayBuilder::new();
|
||||
self.array.push(f(builder).unwrap());
|
||||
self
|
||||
}
|
||||
|
||||
/// Creates and passes an `ArrayBuilder` into a closure, then inserts the resulting object into
|
||||
/// this array.
|
||||
pub fn push_object<F>(mut self, f: F) -> ArrayBuilder
|
||||
where
|
||||
F: FnOnce(ObjectBuilder) -> ObjectBuilder,
|
||||
{
|
||||
let builder = ObjectBuilder::new();
|
||||
self.array.push(f(builder).unwrap());
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// This structure provides a simple interface for constructing a JSON object.
|
||||
pub struct ObjectBuilder {
|
||||
object: Map<String, Value>,
|
||||
}
|
||||
|
||||
impl Default for ObjectBuilder {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl ObjectBuilder {
|
||||
/// Construct an `ObjectBuilder`.
|
||||
pub fn new() -> ObjectBuilder {
|
||||
ObjectBuilder { object: Map::new() }
|
||||
}
|
||||
|
||||
/// Return the constructed `Value`.
|
||||
pub fn unwrap(self) -> Value {
|
||||
Value::Object(self.object)
|
||||
}
|
||||
|
||||
/// Insert a key-value pair into the object.
|
||||
pub fn insert<S, V>(mut self, key: S, value: V) -> ObjectBuilder
|
||||
where
|
||||
S: Into<String>,
|
||||
V: ser::Serialize,
|
||||
{
|
||||
self.object.insert(key.into(), value::to_value(&value));
|
||||
self
|
||||
}
|
||||
|
||||
/// Creates and passes an `ObjectBuilder` into a closure, then inserts the resulting array into
|
||||
/// this object.
|
||||
pub fn insert_array<S, F>(mut self, key: S, f: F) -> ObjectBuilder
|
||||
where
|
||||
S: Into<String>,
|
||||
F: FnOnce(ArrayBuilder) -> ArrayBuilder,
|
||||
{
|
||||
let builder = ArrayBuilder::new();
|
||||
self.object.insert(key.into(), f(builder).unwrap());
|
||||
self
|
||||
}
|
||||
|
||||
/// Creates and passes an `ObjectBuilder` into a closure, then inserts the resulting object into
|
||||
/// this object.
|
||||
pub fn insert_object<S, F>(mut self, key: S, f: F) -> ObjectBuilder
|
||||
where
|
||||
S: Into<String>,
|
||||
F: FnOnce(ObjectBuilder) -> ObjectBuilder,
|
||||
{
|
||||
let builder = ObjectBuilder::new();
|
||||
self.object.insert(key.into(), f(builder).unwrap());
|
||||
self
|
||||
}
|
||||
}
|
951
crates/nu-json/src/de.rs
Normal file
951
crates/nu-json/src/de.rs
Normal file
@ -0,0 +1,951 @@
|
||||
//! Hjson Deserialization
|
||||
//!
|
||||
//! This module provides for Hjson deserialization with the type `Deserializer`.
|
||||
|
||||
use std::char;
|
||||
use std::io;
|
||||
use std::marker::PhantomData;
|
||||
use std::str;
|
||||
|
||||
use serde::de;
|
||||
|
||||
use super::error::{Error, ErrorCode, Result};
|
||||
use super::util::StringReader;
|
||||
use super::util::{Number, ParseNumber};
|
||||
|
||||
enum State {
|
||||
Normal,
|
||||
Root,
|
||||
Keyname,
|
||||
}
|
||||
|
||||
/// A structure that deserializes Hjson into Rust values.
|
||||
pub struct Deserializer<Iter: Iterator<Item = u8>> {
|
||||
rdr: StringReader<Iter>,
|
||||
str_buf: Vec<u8>,
|
||||
state: State,
|
||||
}
|
||||
|
||||
// macro_rules! try_or_invalid {
|
||||
// ($self_:expr, $e:expr) => {
|
||||
// match $e {
|
||||
// Some(v) => v,
|
||||
// None => { return Err($self_.error(ErrorCode::InvalidNumber)); }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
impl<Iter> Deserializer<Iter>
|
||||
where
|
||||
Iter: Iterator<Item = u8>,
|
||||
{
|
||||
/// Creates the Hjson parser from an `std::iter::Iterator`.
|
||||
#[inline]
|
||||
pub fn new(rdr: Iter) -> Deserializer<Iter> {
|
||||
Deserializer {
|
||||
rdr: StringReader::new(rdr),
|
||||
str_buf: Vec::with_capacity(128),
|
||||
state: State::Normal,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates the Hjson parser from an `std::iter::Iterator`.
|
||||
#[inline]
|
||||
pub fn new_for_root(rdr: Iter) -> Deserializer<Iter> {
|
||||
let mut res = Deserializer::new(rdr);
|
||||
res.state = State::Root;
|
||||
res
|
||||
}
|
||||
|
||||
/// The `Deserializer::end` method should be called after a value has been fully deserialized.
|
||||
/// This allows the `Deserializer` to validate that the input stream is at the end or that it
|
||||
/// only has trailing whitespace.
|
||||
#[inline]
|
||||
pub fn end(&mut self) -> Result<()> {
|
||||
self.rdr.parse_whitespace()?;
|
||||
if self.rdr.eof()? {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(self.rdr.error(ErrorCode::TrailingCharacters))
|
||||
}
|
||||
}
|
||||
|
||||
fn is_punctuator_char(&mut self, ch: u8) -> bool {
|
||||
matches!(ch, b'{' | b'}' | b'[' | b']' | b',' | b':')
|
||||
}
|
||||
|
||||
fn parse_keyname<V>(&mut self, mut visitor: V) -> Result<V::Value>
|
||||
where
|
||||
V: de::Visitor,
|
||||
{
|
||||
// quotes for keys are optional in Hjson
|
||||
// unless they include {}[],: or whitespace.
|
||||
// assume whitespace was already eaten
|
||||
|
||||
self.str_buf.clear();
|
||||
|
||||
let mut space: Option<usize> = None;
|
||||
loop {
|
||||
let ch = self.rdr.next_char_or_null()?;
|
||||
|
||||
if ch == b':' {
|
||||
if self.str_buf.is_empty() {
|
||||
return Err(self.rdr.error(ErrorCode::Custom(
|
||||
"Found ':' but no key name (for an empty key name use quotes)".to_string(),
|
||||
)));
|
||||
} else if space.is_some()
|
||||
&& space.expect("Internal error: json parsing") != self.str_buf.len()
|
||||
{
|
||||
return Err(self.rdr.error(ErrorCode::Custom(
|
||||
"Found whitespace in your key name (use quotes to include)".to_string(),
|
||||
)));
|
||||
}
|
||||
self.rdr.uneat_char(ch);
|
||||
let s = str::from_utf8(&self.str_buf).expect("Internal error: json parsing");
|
||||
return visitor.visit_str(s);
|
||||
} else if ch <= b' ' {
|
||||
if ch == 0 {
|
||||
return Err(self.rdr.error(ErrorCode::EOFWhileParsingObject));
|
||||
} else if space.is_none() {
|
||||
space = Some(self.str_buf.len());
|
||||
}
|
||||
} else if self.is_punctuator_char(ch) {
|
||||
return Err(self.rdr.error(ErrorCode::Custom("Found a punctuator where a key name was expected (check your syntax or use quotes if the key name includes {}[],: or whitespace)".to_string())));
|
||||
} else {
|
||||
self.str_buf.push(ch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_value<V>(&mut self, mut visitor: V) -> Result<V::Value>
|
||||
where
|
||||
V: de::Visitor,
|
||||
{
|
||||
self.rdr.parse_whitespace()?;
|
||||
|
||||
if self.rdr.eof()? {
|
||||
return Err(self.rdr.error(ErrorCode::EOFWhileParsingValue));
|
||||
}
|
||||
|
||||
match self.state {
|
||||
State::Keyname => {
|
||||
self.state = State::Normal;
|
||||
return self.parse_keyname(visitor);
|
||||
}
|
||||
State::Root => {
|
||||
self.state = State::Normal;
|
||||
return visitor.visit_map(MapVisitor::new(self, true));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
let value = match self.rdr.peek_or_null()? {
|
||||
/*
|
||||
b'-' => {
|
||||
self.rdr.eat_char();
|
||||
self.parse_integer(false, visitor)
|
||||
}
|
||||
b'0' ..= b'9' => {
|
||||
self.parse_integer(true, visitor)
|
||||
}
|
||||
*/
|
||||
b'"' => {
|
||||
self.rdr.eat_char();
|
||||
self.parse_string()?;
|
||||
let s = str::from_utf8(&self.str_buf).expect("Internal error: json parsing");
|
||||
visitor.visit_str(s)
|
||||
}
|
||||
b'[' => {
|
||||
self.rdr.eat_char();
|
||||
visitor.visit_seq(SeqVisitor::new(self))
|
||||
}
|
||||
b'{' => {
|
||||
self.rdr.eat_char();
|
||||
visitor.visit_map(MapVisitor::new(self, false))
|
||||
}
|
||||
b'\x00' => Err(self.rdr.error(ErrorCode::ExpectedSomeValue)),
|
||||
_ => self.parse_tfnns(visitor),
|
||||
};
|
||||
|
||||
match value {
|
||||
Ok(value) => Ok(value),
|
||||
Err(Error::Syntax(code, _, _)) => Err(self.rdr.error(code)),
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_ident(&mut self, ident: &[u8]) -> Result<()> {
|
||||
for c in ident {
|
||||
if Some(*c) != self.rdr.next_char()? {
|
||||
return Err(self.rdr.error(ErrorCode::ExpectedSomeIdent));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn parse_tfnns<V>(&mut self, mut visitor: V) -> Result<V::Value>
|
||||
where
|
||||
V: de::Visitor,
|
||||
{
|
||||
// Hjson strings can be quoteless
|
||||
// returns string, true, false, or null.
|
||||
self.str_buf.clear();
|
||||
|
||||
let first = self.rdr.peek()?.expect("Internal error: json parsing");
|
||||
|
||||
if self.is_punctuator_char(first) {
|
||||
return Err(self.rdr.error(ErrorCode::PunctuatorInQlString));
|
||||
}
|
||||
|
||||
loop {
|
||||
let ch = self.rdr.next_char_or_null()?;
|
||||
|
||||
let is_eol = ch == b'\r' || ch == b'\n' || ch == b'\x00';
|
||||
let is_comment = ch == b'#'
|
||||
|| if ch == b'/' {
|
||||
let next = self.rdr.peek_or_null()?;
|
||||
next == b'/' || next == b'*'
|
||||
} else {
|
||||
false
|
||||
};
|
||||
if is_eol || is_comment || ch == b',' || ch == b'}' || ch == b']' {
|
||||
let chf = self.str_buf[0];
|
||||
match chf {
|
||||
b'f' => {
|
||||
if str::from_utf8(&self.str_buf)
|
||||
.expect("Internal error: json parsing")
|
||||
.trim()
|
||||
== "false"
|
||||
{
|
||||
self.rdr.uneat_char(ch);
|
||||
return visitor.visit_bool(false);
|
||||
}
|
||||
}
|
||||
b'n' => {
|
||||
if str::from_utf8(&self.str_buf)
|
||||
.expect("Internal error: json parsing")
|
||||
.trim()
|
||||
== "null"
|
||||
{
|
||||
self.rdr.uneat_char(ch);
|
||||
return visitor.visit_unit();
|
||||
}
|
||||
}
|
||||
b't' => {
|
||||
if str::from_utf8(&self.str_buf)
|
||||
.expect("Internal error: json parsing")
|
||||
.trim()
|
||||
== "true"
|
||||
{
|
||||
self.rdr.uneat_char(ch);
|
||||
return visitor.visit_bool(true);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
if chf == b'-' || chf >= b'0' && chf <= b'9' {
|
||||
let mut pn = ParseNumber::new(self.str_buf.iter().cloned());
|
||||
match pn.parse(false) {
|
||||
Ok(Number::F64(v)) => {
|
||||
self.rdr.uneat_char(ch);
|
||||
return visitor.visit_f64(v);
|
||||
}
|
||||
Ok(Number::U64(v)) => {
|
||||
self.rdr.uneat_char(ch);
|
||||
return visitor.visit_u64(v);
|
||||
}
|
||||
Ok(Number::I64(v)) => {
|
||||
self.rdr.uneat_char(ch);
|
||||
return visitor.visit_i64(v);
|
||||
}
|
||||
Err(_) => {} // not a number, continue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if is_eol {
|
||||
// remove any whitespace at the end (ignored in quoteless strings)
|
||||
return visitor.visit_str(
|
||||
str::from_utf8(&self.str_buf)
|
||||
.expect("Internal error: json parsing")
|
||||
.trim(),
|
||||
);
|
||||
}
|
||||
}
|
||||
self.str_buf.push(ch);
|
||||
|
||||
if self.str_buf == vec![b'\''; 3] {
|
||||
return self.parse_ml_string(visitor);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn decode_hex_escape(&mut self) -> Result<u16> {
|
||||
let mut i = 0;
|
||||
let mut n = 0u16;
|
||||
while i < 4 && !(self.rdr.eof()?) {
|
||||
n = match self.rdr.next_char_or_null()? {
|
||||
c @ b'0'..=b'9' => n * 16_u16 + ((c as u16) - (b'0' as u16)),
|
||||
b'a' | b'A' => n * 16_u16 + 10_u16,
|
||||
b'b' | b'B' => n * 16_u16 + 11_u16,
|
||||
b'c' | b'C' => n * 16_u16 + 12_u16,
|
||||
b'd' | b'D' => n * 16_u16 + 13_u16,
|
||||
b'e' | b'E' => n * 16_u16 + 14_u16,
|
||||
b'f' | b'F' => n * 16_u16 + 15_u16,
|
||||
_ => {
|
||||
return Err(self.rdr.error(ErrorCode::InvalidEscape));
|
||||
}
|
||||
};
|
||||
|
||||
i += 1;
|
||||
}
|
||||
|
||||
// Error out if we didn't parse 4 digits.
|
||||
if i != 4 {
|
||||
return Err(self.rdr.error(ErrorCode::InvalidEscape));
|
||||
}
|
||||
|
||||
Ok(n)
|
||||
}
|
||||
|
||||
fn ml_skip_white(&mut self) -> Result<bool> {
|
||||
match self.rdr.peek_or_null()? {
|
||||
b' ' | b'\t' | b'\r' => {
|
||||
self.rdr.eat_char();
|
||||
Ok(true)
|
||||
}
|
||||
_ => Ok(false),
|
||||
}
|
||||
}
|
||||
|
||||
fn ml_skip_indent(&mut self, indent: usize) -> Result<()> {
|
||||
let mut skip = indent;
|
||||
while self.ml_skip_white()? && skip > 0 {
|
||||
skip -= 1;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn parse_ml_string<V>(&mut self, mut visitor: V) -> Result<V::Value>
|
||||
where
|
||||
V: de::Visitor,
|
||||
{
|
||||
self.str_buf.clear();
|
||||
|
||||
// Parse a multiline string value.
|
||||
let mut triple = 0;
|
||||
|
||||
// we are at ''' +1 - get indent
|
||||
let (_, col) = self.rdr.pos();
|
||||
let indent = col - 4;
|
||||
|
||||
// skip white/to (newline)
|
||||
while self.ml_skip_white()? {}
|
||||
if self.rdr.peek_or_null()? == b'\n' {
|
||||
self.rdr.eat_char();
|
||||
self.ml_skip_indent(indent)?;
|
||||
}
|
||||
|
||||
// When parsing multiline string values, we must look for ' characters.
|
||||
loop {
|
||||
if self.rdr.eof()? {
|
||||
return Err(self.rdr.error(ErrorCode::EOFWhileParsingString));
|
||||
} // todo error("Bad multiline string");
|
||||
let ch = self.rdr.next_char_or_null()?;
|
||||
|
||||
if ch == b'\'' {
|
||||
triple += 1;
|
||||
if triple == 3 {
|
||||
if self.str_buf.last() == Some(&b'\n') {
|
||||
self.str_buf.pop();
|
||||
}
|
||||
let res = str::from_utf8(&self.str_buf).expect("Internal error: json parsing");
|
||||
//todo if (self.str_buf.slice(-1) === '\n') self.str_buf=self.str_buf.slice(0, -1); // remove last EOL
|
||||
return visitor.visit_str(res);
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
while triple > 0 {
|
||||
self.str_buf.push(b'\'');
|
||||
triple -= 1;
|
||||
}
|
||||
|
||||
if ch != b'\r' {
|
||||
self.str_buf.push(ch);
|
||||
}
|
||||
if ch == b'\n' {
|
||||
self.ml_skip_indent(indent)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_string(&mut self) -> Result<()> {
|
||||
self.str_buf.clear();
|
||||
|
||||
loop {
|
||||
let ch = match self.rdr.next_char()? {
|
||||
Some(ch) => ch,
|
||||
None => {
|
||||
return Err(self.rdr.error(ErrorCode::EOFWhileParsingString));
|
||||
}
|
||||
};
|
||||
|
||||
match ch {
|
||||
b'"' => {
|
||||
return Ok(());
|
||||
}
|
||||
b'\\' => {
|
||||
let ch = match self.rdr.next_char()? {
|
||||
Some(ch) => ch,
|
||||
None => {
|
||||
return Err(self.rdr.error(ErrorCode::EOFWhileParsingString));
|
||||
}
|
||||
};
|
||||
|
||||
match ch {
|
||||
b'"' => self.str_buf.push(b'"'),
|
||||
b'\\' => self.str_buf.push(b'\\'),
|
||||
b'/' => self.str_buf.push(b'/'),
|
||||
b'b' => self.str_buf.push(b'\x08'),
|
||||
b'f' => self.str_buf.push(b'\x0c'),
|
||||
b'n' => self.str_buf.push(b'\n'),
|
||||
b'r' => self.str_buf.push(b'\r'),
|
||||
b't' => self.str_buf.push(b'\t'),
|
||||
b'u' => {
|
||||
let c = match self.decode_hex_escape()? {
|
||||
0xDC00..=0xDFFF => {
|
||||
return Err(self
|
||||
.rdr
|
||||
.error(ErrorCode::LoneLeadingSurrogateInHexEscape));
|
||||
}
|
||||
|
||||
// Non-BMP characters are encoded as a sequence of
|
||||
// two hex escapes, representing UTF-16 surrogates.
|
||||
n1 @ 0xD800..=0xDBFF => {
|
||||
match (self.rdr.next_char()?, self.rdr.next_char()?) {
|
||||
(Some(b'\\'), Some(b'u')) => (),
|
||||
_ => {
|
||||
return Err(self
|
||||
.rdr
|
||||
.error(ErrorCode::UnexpectedEndOfHexEscape));
|
||||
}
|
||||
}
|
||||
|
||||
let n2 = self.decode_hex_escape()?;
|
||||
|
||||
if n2 < 0xDC00 || n2 > 0xDFFF {
|
||||
return Err(self
|
||||
.rdr
|
||||
.error(ErrorCode::LoneLeadingSurrogateInHexEscape));
|
||||
}
|
||||
|
||||
let n = (((n1 - 0xD800) as u32) << 10 | (n2 - 0xDC00) as u32)
|
||||
+ 0x1_0000;
|
||||
|
||||
match char::from_u32(n as u32) {
|
||||
Some(c) => c,
|
||||
None => {
|
||||
return Err(self
|
||||
.rdr
|
||||
.error(ErrorCode::InvalidUnicodeCodePoint));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
n => match char::from_u32(n as u32) {
|
||||
Some(c) => c,
|
||||
None => {
|
||||
return Err(self
|
||||
.rdr
|
||||
.error(ErrorCode::InvalidUnicodeCodePoint));
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
// FIXME: this allocation is required in order to be compatible with stable
|
||||
// rust, which doesn't support encoding a `char` into a stack buffer.
|
||||
let mut buf = String::new();
|
||||
buf.push(c);
|
||||
self.str_buf.extend(buf.bytes());
|
||||
}
|
||||
_ => {
|
||||
return Err(self.rdr.error(ErrorCode::InvalidEscape));
|
||||
}
|
||||
}
|
||||
}
|
||||
ch => {
|
||||
self.str_buf.push(ch);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_object_colon(&mut self) -> Result<()> {
|
||||
self.rdr.parse_whitespace()?;
|
||||
|
||||
match self.rdr.next_char()? {
|
||||
Some(b':') => Ok(()),
|
||||
Some(_) => Err(self.rdr.error(ErrorCode::ExpectedColon)),
|
||||
None => Err(self.rdr.error(ErrorCode::EOFWhileParsingObject)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Iter> de::Deserializer for Deserializer<Iter>
|
||||
where
|
||||
Iter: Iterator<Item = u8>,
|
||||
{
|
||||
type Error = Error;
|
||||
|
||||
#[inline]
|
||||
fn deserialize<V>(&mut self, visitor: V) -> Result<V::Value>
|
||||
where
|
||||
V: de::Visitor,
|
||||
{
|
||||
self.parse_value(visitor)
|
||||
}
|
||||
|
||||
/// Parses a `null` as a None, and any other values as a `Some(...)`.
|
||||
#[inline]
|
||||
fn deserialize_option<V>(&mut self, mut visitor: V) -> Result<V::Value>
|
||||
where
|
||||
V: de::Visitor,
|
||||
{
|
||||
self.rdr.parse_whitespace()?;
|
||||
|
||||
match self.rdr.peek_or_null()? {
|
||||
b'n' => {
|
||||
self.rdr.eat_char();
|
||||
self.parse_ident(b"ull")?;
|
||||
visitor.visit_none()
|
||||
}
|
||||
_ => visitor.visit_some(self),
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses a newtype struct as the underlying value.
|
||||
#[inline]
|
||||
fn deserialize_newtype_struct<V>(&mut self, _name: &str, mut visitor: V) -> Result<V::Value>
|
||||
where
|
||||
V: de::Visitor,
|
||||
{
|
||||
visitor.visit_newtype_struct(self)
|
||||
}
|
||||
|
||||
forward_to_deserialize! {
|
||||
deserialize_bool();
|
||||
deserialize_usize();
|
||||
deserialize_u8();
|
||||
deserialize_u16();
|
||||
deserialize_u32();
|
||||
deserialize_u64();
|
||||
deserialize_isize();
|
||||
deserialize_i8();
|
||||
deserialize_i16();
|
||||
deserialize_i32();
|
||||
deserialize_i64();
|
||||
deserialize_f32();
|
||||
deserialize_f64();
|
||||
deserialize_char();
|
||||
deserialize_str();
|
||||
deserialize_string();
|
||||
deserialize_unit();
|
||||
deserialize_seq();
|
||||
deserialize_seq_fixed_size(len: usize);
|
||||
deserialize_bytes();
|
||||
deserialize_map();
|
||||
deserialize_unit_struct(name: &'static str);
|
||||
deserialize_tuple_struct(name: &'static str, len: usize);
|
||||
deserialize_struct(name: &'static str, fields: &'static [&'static str]);
|
||||
deserialize_struct_field();
|
||||
deserialize_tuple(len: usize);
|
||||
deserialize_enum(name: &'static str, variants: &'static [&'static str]);
|
||||
deserialize_ignored_any();
|
||||
}
|
||||
}
|
||||
|
||||
struct SeqVisitor<'a, Iter: 'a + Iterator<Item = u8>> {
|
||||
de: &'a mut Deserializer<Iter>,
|
||||
}
|
||||
|
||||
impl<'a, Iter: Iterator<Item = u8>> SeqVisitor<'a, Iter> {
|
||||
fn new(de: &'a mut Deserializer<Iter>) -> Self {
|
||||
SeqVisitor { de }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, Iter> de::SeqVisitor for SeqVisitor<'a, Iter>
|
||||
where
|
||||
Iter: Iterator<Item = u8>,
|
||||
{
|
||||
type Error = Error;
|
||||
|
||||
fn visit<T>(&mut self) -> Result<Option<T>>
|
||||
where
|
||||
T: de::Deserialize,
|
||||
{
|
||||
self.de.rdr.parse_whitespace()?;
|
||||
|
||||
match self.de.rdr.peek()? {
|
||||
Some(b']') => {
|
||||
return Ok(None);
|
||||
}
|
||||
Some(_) => {}
|
||||
None => {
|
||||
return Err(self.de.rdr.error(ErrorCode::EOFWhileParsingList));
|
||||
}
|
||||
}
|
||||
|
||||
let value = de::Deserialize::deserialize(self.de)?;
|
||||
|
||||
// in Hjson the comma is optional and trailing commas are allowed
|
||||
self.de.rdr.parse_whitespace()?;
|
||||
if self.de.rdr.peek()? == Some(b',') {
|
||||
self.de.rdr.eat_char();
|
||||
self.de.rdr.parse_whitespace()?;
|
||||
}
|
||||
|
||||
Ok(Some(value))
|
||||
}
|
||||
|
||||
fn end(&mut self) -> Result<()> {
|
||||
self.de.rdr.parse_whitespace()?;
|
||||
|
||||
match self.de.rdr.next_char()? {
|
||||
Some(b']') => Ok(()),
|
||||
Some(_) => Err(self.de.rdr.error(ErrorCode::TrailingCharacters)),
|
||||
None => Err(self.de.rdr.error(ErrorCode::EOFWhileParsingList)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct MapVisitor<'a, Iter: 'a + Iterator<Item = u8>> {
|
||||
de: &'a mut Deserializer<Iter>,
|
||||
first: bool,
|
||||
root: bool,
|
||||
}
|
||||
|
||||
impl<'a, Iter: Iterator<Item = u8>> MapVisitor<'a, Iter> {
|
||||
fn new(de: &'a mut Deserializer<Iter>, root: bool) -> Self {
|
||||
MapVisitor {
|
||||
de,
|
||||
first: true,
|
||||
root,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, Iter> de::MapVisitor for MapVisitor<'a, Iter>
|
||||
where
|
||||
Iter: Iterator<Item = u8>,
|
||||
{
|
||||
type Error = Error;
|
||||
|
||||
fn visit_key<K>(&mut self) -> Result<Option<K>>
|
||||
where
|
||||
K: de::Deserialize,
|
||||
{
|
||||
self.de.rdr.parse_whitespace()?;
|
||||
|
||||
if self.first {
|
||||
self.first = false;
|
||||
} else if self.de.rdr.peek()? == Some(b',') {
|
||||
// in Hjson the comma is optional and trailing commas are allowed
|
||||
self.de.rdr.eat_char();
|
||||
self.de.rdr.parse_whitespace()?;
|
||||
}
|
||||
|
||||
match self.de.rdr.peek()? {
|
||||
Some(b'}') => return Ok(None), // handled later for root
|
||||
Some(_) => {}
|
||||
None => {
|
||||
if self.root {
|
||||
return Ok(None);
|
||||
} else {
|
||||
return Err(self.de.rdr.error(ErrorCode::EOFWhileParsingObject));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match self.de.rdr.peek()? {
|
||||
Some(ch) => {
|
||||
self.de.state = if ch == b'"' {
|
||||
State::Normal
|
||||
} else {
|
||||
State::Keyname
|
||||
};
|
||||
Ok(Some(de::Deserialize::deserialize(self.de)?))
|
||||
}
|
||||
None => Err(self.de.rdr.error(ErrorCode::EOFWhileParsingValue)),
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_value<V>(&mut self) -> Result<V>
|
||||
where
|
||||
V: de::Deserialize,
|
||||
{
|
||||
self.de.parse_object_colon()?;
|
||||
|
||||
Ok(de::Deserialize::deserialize(self.de)?)
|
||||
}
|
||||
|
||||
fn end(&mut self) -> Result<()> {
|
||||
self.de.rdr.parse_whitespace()?;
|
||||
|
||||
match self.de.rdr.next_char()? {
|
||||
Some(b'}') => {
|
||||
if !self.root {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(self.de.rdr.error(ErrorCode::TrailingCharacters))
|
||||
} // todo
|
||||
}
|
||||
Some(_) => Err(self.de.rdr.error(ErrorCode::TrailingCharacters)),
|
||||
None => {
|
||||
if self.root {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(self.de.rdr.error(ErrorCode::EOFWhileParsingObject))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn missing_field<V>(&mut self, field: &'static str) -> Result<V>
|
||||
where
|
||||
V: de::Deserialize,
|
||||
{
|
||||
struct MissingFieldDeserializer(&'static str);
|
||||
|
||||
impl de::Deserializer for MissingFieldDeserializer {
|
||||
type Error = de::value::Error;
|
||||
|
||||
fn deserialize<V>(&mut self, _visitor: V) -> std::result::Result<V::Value, Self::Error>
|
||||
where
|
||||
V: de::Visitor,
|
||||
{
|
||||
let &mut MissingFieldDeserializer(field) = self;
|
||||
Err(de::value::Error::MissingField(field))
|
||||
}
|
||||
|
||||
fn deserialize_option<V>(
|
||||
&mut self,
|
||||
mut visitor: V,
|
||||
) -> std::result::Result<V::Value, Self::Error>
|
||||
where
|
||||
V: de::Visitor,
|
||||
{
|
||||
visitor.visit_none()
|
||||
}
|
||||
|
||||
forward_to_deserialize! {
|
||||
deserialize_bool();
|
||||
deserialize_usize();
|
||||
deserialize_u8();
|
||||
deserialize_u16();
|
||||
deserialize_u32();
|
||||
deserialize_u64();
|
||||
deserialize_isize();
|
||||
deserialize_i8();
|
||||
deserialize_i16();
|
||||
deserialize_i32();
|
||||
deserialize_i64();
|
||||
deserialize_f32();
|
||||
deserialize_f64();
|
||||
deserialize_char();
|
||||
deserialize_str();
|
||||
deserialize_string();
|
||||
deserialize_unit();
|
||||
deserialize_seq();
|
||||
deserialize_seq_fixed_size(len: usize);
|
||||
deserialize_bytes();
|
||||
deserialize_map();
|
||||
deserialize_unit_struct(name: &'static str);
|
||||
deserialize_newtype_struct(name: &'static str);
|
||||
deserialize_tuple_struct(name: &'static str, len: usize);
|
||||
deserialize_struct(name: &'static str, fields: &'static [&'static str]);
|
||||
deserialize_struct_field();
|
||||
deserialize_tuple(len: usize);
|
||||
deserialize_enum(name: &'static str, variants: &'static [&'static str]);
|
||||
deserialize_ignored_any();
|
||||
}
|
||||
}
|
||||
|
||||
let mut de = MissingFieldDeserializer(field);
|
||||
Ok(de::Deserialize::deserialize(&mut de)?)
|
||||
}
|
||||
}
|
||||
|
||||
impl<Iter> de::VariantVisitor for Deserializer<Iter>
|
||||
where
|
||||
Iter: Iterator<Item = u8>,
|
||||
{
|
||||
type Error = Error;
|
||||
|
||||
fn visit_variant<V>(&mut self) -> Result<V>
|
||||
where
|
||||
V: de::Deserialize,
|
||||
{
|
||||
let val = de::Deserialize::deserialize(self)?;
|
||||
self.parse_object_colon()?;
|
||||
Ok(val)
|
||||
}
|
||||
|
||||
fn visit_unit(&mut self) -> Result<()> {
|
||||
de::Deserialize::deserialize(self)
|
||||
}
|
||||
|
||||
fn visit_newtype<T>(&mut self) -> Result<T>
|
||||
where
|
||||
T: de::Deserialize,
|
||||
{
|
||||
de::Deserialize::deserialize(self)
|
||||
}
|
||||
|
||||
fn visit_tuple<V>(&mut self, _len: usize, visitor: V) -> Result<V::Value>
|
||||
where
|
||||
V: de::Visitor,
|
||||
{
|
||||
de::Deserializer::deserialize(self, visitor)
|
||||
}
|
||||
|
||||
fn visit_struct<V>(&mut self, _fields: &'static [&'static str], visitor: V) -> Result<V::Value>
|
||||
where
|
||||
V: de::Visitor,
|
||||
{
|
||||
de::Deserializer::deserialize(self, visitor)
|
||||
}
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/// Iterator that deserializes a stream into multiple Hjson values.
|
||||
pub struct StreamDeserializer<T, Iter>
|
||||
where
|
||||
Iter: Iterator<Item = u8>,
|
||||
T: de::Deserialize,
|
||||
{
|
||||
deser: Deserializer<Iter>,
|
||||
_marker: PhantomData<T>,
|
||||
}
|
||||
|
||||
impl<T, Iter> StreamDeserializer<T, Iter>
|
||||
where
|
||||
Iter: Iterator<Item = u8>,
|
||||
T: de::Deserialize,
|
||||
{
|
||||
/// Returns an `Iterator` of decoded Hjson values from an iterator over
|
||||
/// `Iterator<Item=u8>`.
|
||||
pub fn new(iter: Iter) -> StreamDeserializer<T, Iter> {
|
||||
StreamDeserializer {
|
||||
deser: Deserializer::new(iter),
|
||||
_marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T, Iter> Iterator for StreamDeserializer<T, Iter>
|
||||
where
|
||||
Iter: Iterator<Item = u8>,
|
||||
T: de::Deserialize,
|
||||
{
|
||||
type Item = Result<T>;
|
||||
|
||||
fn next(&mut self) -> Option<Result<T>> {
|
||||
// skip whitespaces, if any
|
||||
// this helps with trailing whitespaces, since whitespaces between
|
||||
// values are handled for us.
|
||||
if let Err(e) = self.deser.rdr.parse_whitespace() {
|
||||
return Some(Err(e));
|
||||
};
|
||||
|
||||
match self.deser.rdr.eof() {
|
||||
Ok(true) => None,
|
||||
Ok(false) => match de::Deserialize::deserialize(&mut self.deser) {
|
||||
Ok(v) => Some(Ok(v)),
|
||||
Err(e) => Some(Err(e)),
|
||||
},
|
||||
Err(e) => Some(Err(e)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/// Decodes a Hjson value from an iterator over an iterator
|
||||
/// `Iterator<Item=u8>`.
|
||||
pub fn from_iter<I, T>(iter: I) -> Result<T>
|
||||
where
|
||||
I: Iterator<Item = io::Result<u8>>,
|
||||
T: de::Deserialize,
|
||||
{
|
||||
let fold: io::Result<Vec<_>> = iter.collect();
|
||||
if let Err(e) = fold {
|
||||
return Err(Error::Io(e));
|
||||
}
|
||||
|
||||
let bytes = fold.expect("Internal error: json parsing");
|
||||
|
||||
// deserialize tries first to decode with legacy support (new_for_root)
|
||||
// and then with the standard method if this fails.
|
||||
// todo: add compile switch
|
||||
|
||||
// deserialize and make sure the whole stream has been consumed
|
||||
let mut de = Deserializer::new_for_root(bytes.iter().cloned());
|
||||
let value = match de::Deserialize::deserialize(&mut de).and_then(|x| {
|
||||
de.end()?;
|
||||
Ok(x)
|
||||
}) {
|
||||
Ok(v) => Ok(v),
|
||||
Err(_) => {
|
||||
let mut de2 = Deserializer::new(bytes.iter().cloned());
|
||||
match de::Deserialize::deserialize(&mut de2).and_then(|x| {
|
||||
de2.end()?;
|
||||
Ok(x)
|
||||
}) {
|
||||
Ok(v) => Ok(v),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/* without legacy support:
|
||||
// deserialize and make sure the whole stream has been consumed
|
||||
let mut de = Deserializer::new(bytes.iter().map(|b| *b));
|
||||
let value = match de::Deserialize::deserialize(&mut de)
|
||||
.and_then(|x| { de.end()); Ok(x) })
|
||||
{
|
||||
Ok(v) => Ok(v),
|
||||
Err(e) => Err(e),
|
||||
};
|
||||
*/
|
||||
|
||||
value
|
||||
}
|
||||
|
||||
/// Decodes a Hjson value from a `std::io::Read`.
|
||||
pub fn from_reader<R, T>(rdr: R) -> Result<T>
|
||||
where
|
||||
R: io::Read,
|
||||
T: de::Deserialize,
|
||||
{
|
||||
from_iter(rdr.bytes())
|
||||
}
|
||||
|
||||
/// Decodes a Hjson value from a byte slice `&[u8]`.
|
||||
pub fn from_slice<T>(v: &[u8]) -> Result<T>
|
||||
where
|
||||
T: de::Deserialize,
|
||||
{
|
||||
from_iter(v.iter().map(|byte| Ok(*byte)))
|
||||
}
|
||||
|
||||
/// Decodes a Hjson value from a `&str`.
|
||||
pub fn from_str<T>(s: &str) -> Result<T>
|
||||
where
|
||||
T: de::Deserialize,
|
||||
{
|
||||
from_slice(s.as_bytes())
|
||||
}
|
243
crates/nu-json/src/error.rs
Normal file
243
crates/nu-json/src/error.rs
Normal file
@ -0,0 +1,243 @@
|
||||
//! JSON Errors
|
||||
//!
|
||||
//! This module is centered around the `Error` and `ErrorCode` types, which represents all possible
|
||||
//! `serde_hjson` errors.
|
||||
|
||||
use std::error;
|
||||
use std::fmt;
|
||||
use std::io;
|
||||
use std::result;
|
||||
use std::string::FromUtf8Error;
|
||||
|
||||
use serde::de;
|
||||
use serde::ser;
|
||||
|
||||
/// The errors that can arise while parsing a JSON stream.
|
||||
#[derive(Clone, PartialEq)]
|
||||
pub enum ErrorCode {
|
||||
/// Catchall for syntax error messages
|
||||
Custom(String),
|
||||
|
||||
/// Incorrect type from value
|
||||
InvalidType(de::Type),
|
||||
|
||||
/// Incorrect value
|
||||
InvalidValue(String),
|
||||
|
||||
/// Invalid length
|
||||
InvalidLength(usize),
|
||||
|
||||
/// Unknown variant in an enum.
|
||||
UnknownVariant(String),
|
||||
|
||||
/// Unknown field in struct.
|
||||
UnknownField(String),
|
||||
|
||||
/// Struct is missing a field.
|
||||
MissingField(&'static str),
|
||||
|
||||
/// EOF while parsing a list.
|
||||
EOFWhileParsingList,
|
||||
|
||||
/// EOF while parsing an object.
|
||||
EOFWhileParsingObject,
|
||||
|
||||
/// EOF while parsing a string.
|
||||
EOFWhileParsingString,
|
||||
|
||||
/// EOF while parsing a JSON value.
|
||||
EOFWhileParsingValue,
|
||||
|
||||
/// Expected this character to be a `':'`.
|
||||
ExpectedColon,
|
||||
|
||||
/// Expected this character to be either a `','` or a `]`.
|
||||
ExpectedListCommaOrEnd,
|
||||
|
||||
/// Expected this character to be either a `','` or a `}`.
|
||||
ExpectedObjectCommaOrEnd,
|
||||
|
||||
/// Expected to parse either a `true`, `false`, or a `null`.
|
||||
ExpectedSomeIdent,
|
||||
|
||||
/// Expected this character to start a JSON value.
|
||||
ExpectedSomeValue,
|
||||
|
||||
/// Invalid hex escape code.
|
||||
InvalidEscape,
|
||||
|
||||
/// Invalid number.
|
||||
InvalidNumber,
|
||||
|
||||
/// Invalid unicode code point.
|
||||
InvalidUnicodeCodePoint,
|
||||
|
||||
/// Object key is not a string.
|
||||
KeyMustBeAString,
|
||||
|
||||
/// Lone leading surrogate in hex escape.
|
||||
LoneLeadingSurrogateInHexEscape,
|
||||
|
||||
/// JSON has non-whitespace trailing characters after the value.
|
||||
TrailingCharacters,
|
||||
|
||||
/// Unexpected end of hex excape.
|
||||
UnexpectedEndOfHexEscape,
|
||||
|
||||
/// Found a punctuator character when expecting a quoteless string.
|
||||
PunctuatorInQlString,
|
||||
}
|
||||
|
||||
impl fmt::Debug for ErrorCode {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
//use std::fmt::Debug;
|
||||
|
||||
match *self {
|
||||
ErrorCode::Custom(ref msg) => write!(f, "{}", msg),
|
||||
ErrorCode::InvalidType(ref ty) => write!(f, "invalid type: {:?}", ty),
|
||||
ErrorCode::InvalidValue(ref msg) => write!(f, "invalid value: {}", msg),
|
||||
ErrorCode::InvalidLength(ref len) => write!(f, "invalid value length {}", len),
|
||||
ErrorCode::UnknownVariant(ref variant) => write!(f, "unknown variant \"{}\"", variant),
|
||||
ErrorCode::UnknownField(ref field) => write!(f, "unknown field \"{}\"", field),
|
||||
ErrorCode::MissingField(ref field) => write!(f, "missing field \"{}\"", field),
|
||||
ErrorCode::EOFWhileParsingList => "EOF while parsing a list".fmt(f),
|
||||
ErrorCode::EOFWhileParsingObject => "EOF while parsing an object".fmt(f),
|
||||
ErrorCode::EOFWhileParsingString => "EOF while parsing a string".fmt(f),
|
||||
ErrorCode::EOFWhileParsingValue => "EOF while parsing a value".fmt(f),
|
||||
ErrorCode::ExpectedColon => "expected `:`".fmt(f),
|
||||
ErrorCode::ExpectedListCommaOrEnd => "expected `,` or `]`".fmt(f),
|
||||
ErrorCode::ExpectedObjectCommaOrEnd => "expected `,` or `}`".fmt(f),
|
||||
ErrorCode::ExpectedSomeIdent => "expected ident".fmt(f),
|
||||
ErrorCode::ExpectedSomeValue => "expected value".fmt(f),
|
||||
ErrorCode::InvalidEscape => "invalid escape".fmt(f),
|
||||
ErrorCode::InvalidNumber => "invalid number".fmt(f),
|
||||
ErrorCode::InvalidUnicodeCodePoint => "invalid unicode code point".fmt(f),
|
||||
ErrorCode::KeyMustBeAString => "key must be a string".fmt(f),
|
||||
ErrorCode::LoneLeadingSurrogateInHexEscape => {
|
||||
"lone leading surrogate in hex escape".fmt(f)
|
||||
}
|
||||
ErrorCode::TrailingCharacters => "trailing characters".fmt(f),
|
||||
ErrorCode::UnexpectedEndOfHexEscape => "unexpected end of hex escape".fmt(f),
|
||||
ErrorCode::PunctuatorInQlString => {
|
||||
"found a punctuator character when expecting a quoteless string".fmt(f)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// This type represents all possible errors that can occur when serializing or deserializing a
|
||||
/// value into JSON.
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
/// The JSON value had some syntatic error.
|
||||
Syntax(ErrorCode, usize, usize),
|
||||
|
||||
/// Some IO error occurred when serializing or deserializing a value.
|
||||
Io(io::Error),
|
||||
|
||||
/// Some UTF8 error occurred while serializing or deserializing a value.
|
||||
FromUtf8(FromUtf8Error),
|
||||
}
|
||||
|
||||
impl error::Error for Error {
|
||||
fn cause(&self) -> Option<&dyn error::Error> {
|
||||
match *self {
|
||||
Error::Io(ref error) => Some(error),
|
||||
Error::FromUtf8(ref error) => Some(error),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {
|
||||
match *self {
|
||||
Error::Syntax(ref code, line, col) => {
|
||||
write!(fmt, "{:?} at line {} column {}", code, line, col)
|
||||
}
|
||||
Error::Io(ref error) => fmt::Display::fmt(error, fmt),
|
||||
Error::FromUtf8(ref error) => fmt::Display::fmt(error, fmt),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<io::Error> for Error {
|
||||
fn from(error: io::Error) -> Error {
|
||||
Error::Io(error)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FromUtf8Error> for Error {
|
||||
fn from(error: FromUtf8Error) -> Error {
|
||||
Error::FromUtf8(error)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<de::value::Error> for Error {
|
||||
fn from(error: de::value::Error) -> Error {
|
||||
match error {
|
||||
de::value::Error::Custom(e) => Error::Syntax(ErrorCode::Custom(e), 0, 0),
|
||||
de::value::Error::EndOfStream => de::Error::end_of_stream(),
|
||||
de::value::Error::InvalidType(ty) => Error::Syntax(ErrorCode::InvalidType(ty), 0, 0),
|
||||
de::value::Error::InvalidValue(msg) => {
|
||||
Error::Syntax(ErrorCode::InvalidValue(msg), 0, 0)
|
||||
}
|
||||
de::value::Error::InvalidLength(len) => {
|
||||
Error::Syntax(ErrorCode::InvalidLength(len), 0, 0)
|
||||
}
|
||||
de::value::Error::UnknownVariant(variant) => {
|
||||
Error::Syntax(ErrorCode::UnknownVariant(variant), 0, 0)
|
||||
}
|
||||
de::value::Error::UnknownField(field) => {
|
||||
Error::Syntax(ErrorCode::UnknownField(field), 0, 0)
|
||||
}
|
||||
de::value::Error::MissingField(field) => {
|
||||
Error::Syntax(ErrorCode::MissingField(field), 0, 0)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl de::Error for Error {
|
||||
fn custom<T: Into<String>>(msg: T) -> Error {
|
||||
Error::Syntax(ErrorCode::Custom(msg.into()), 0, 0)
|
||||
}
|
||||
|
||||
fn end_of_stream() -> Error {
|
||||
Error::Syntax(ErrorCode::EOFWhileParsingValue, 0, 0)
|
||||
}
|
||||
|
||||
fn invalid_type(ty: de::Type) -> Error {
|
||||
Error::Syntax(ErrorCode::InvalidType(ty), 0, 0)
|
||||
}
|
||||
|
||||
fn invalid_value(msg: &str) -> Error {
|
||||
Error::Syntax(ErrorCode::InvalidValue(msg.to_owned()), 0, 0)
|
||||
}
|
||||
|
||||
fn invalid_length(len: usize) -> Error {
|
||||
Error::Syntax(ErrorCode::InvalidLength(len), 0, 0)
|
||||
}
|
||||
|
||||
fn unknown_variant(variant: &str) -> Error {
|
||||
Error::Syntax(ErrorCode::UnknownVariant(String::from(variant)), 0, 0)
|
||||
}
|
||||
|
||||
fn unknown_field(field: &str) -> Error {
|
||||
Error::Syntax(ErrorCode::UnknownField(String::from(field)), 0, 0)
|
||||
}
|
||||
|
||||
fn missing_field(field: &'static str) -> Error {
|
||||
Error::Syntax(ErrorCode::MissingField(field), 0, 0)
|
||||
}
|
||||
}
|
||||
|
||||
impl ser::Error for Error {
|
||||
/// Raised when there is general error when deserializing a type.
|
||||
fn custom<T: Into<String>>(msg: T) -> Error {
|
||||
Error::Syntax(ErrorCode::Custom(msg.into()), 0, 0)
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper alias for `Result` objects that return a JSON `Error`.
|
||||
pub type Result<T> = result::Result<T, Error>;
|
38
crates/nu-json/src/forward.rs
Normal file
38
crates/nu-json/src/forward.rs
Normal file
@ -0,0 +1,38 @@
|
||||
#[macro_export]
|
||||
/// Create a function to forward a specific serialize call to the generic deserialize
|
||||
macro_rules! forward_to_deserialize {
|
||||
($(
|
||||
$name:ident ( $( $arg:ident : $ty:ty ),* );
|
||||
)*) => {
|
||||
$(
|
||||
forward_to_deserialize!{
|
||||
func: $name ( $( $arg: $ty ),* );
|
||||
}
|
||||
)*
|
||||
};
|
||||
|
||||
(func: deserialize_enum ( $( $arg:ident : $ty:ty ),* );) => {
|
||||
fn deserialize_enum<V>(
|
||||
&mut self,
|
||||
$(_: $ty,)*
|
||||
_visitor: V,
|
||||
) -> ::std::result::Result<V::Value, Self::Error>
|
||||
where V: ::serde::de::EnumVisitor
|
||||
{
|
||||
Err(::serde::de::Error::invalid_type(::serde::de::Type::Enum))
|
||||
}
|
||||
};
|
||||
|
||||
(func: $name:ident ( $( $arg:ident : $ty:ty ),* );) => {
|
||||
#[inline]
|
||||
fn $name<V>(
|
||||
&mut self,
|
||||
$(_: $ty,)*
|
||||
visitor: V,
|
||||
) -> ::std::result::Result<V::Value, Self::Error>
|
||||
where V: ::serde::de::Visitor
|
||||
{
|
||||
self.deserialize(visitor)
|
||||
}
|
||||
};
|
||||
}
|
16
crates/nu-json/src/lib.rs
Normal file
16
crates/nu-json/src/lib.rs
Normal file
@ -0,0 +1,16 @@
|
||||
pub use self::de::{
|
||||
from_iter, from_reader, from_slice, from_str, Deserializer, StreamDeserializer,
|
||||
};
|
||||
pub use self::error::{Error, ErrorCode, Result};
|
||||
pub use self::ser::{to_string, to_vec, to_writer, Serializer};
|
||||
pub use self::value::{from_value, to_value, Map, Value};
|
||||
|
||||
#[macro_use]
|
||||
mod forward;
|
||||
|
||||
pub mod builder;
|
||||
pub mod de;
|
||||
pub mod error;
|
||||
pub mod ser;
|
||||
mod util;
|
||||
pub mod value;
|
1058
crates/nu-json/src/ser.rs
Normal file
1058
crates/nu-json/src/ser.rs
Normal file
File diff suppressed because it is too large
Load Diff
328
crates/nu-json/src/util.rs
Normal file
328
crates/nu-json/src/util.rs
Normal file
@ -0,0 +1,328 @@
|
||||
use std::io;
|
||||
use std::str;
|
||||
|
||||
use super::error::{Error, ErrorCode, Result};
|
||||
|
||||
pub struct StringReader<Iter: Iterator<Item = u8>> {
|
||||
iter: Iter,
|
||||
line: usize,
|
||||
col: usize,
|
||||
ch: Vec<u8>,
|
||||
}
|
||||
|
||||
impl<Iter> StringReader<Iter>
|
||||
where
|
||||
Iter: Iterator<Item = u8>,
|
||||
{
|
||||
#[inline]
|
||||
pub fn new(iter: Iter) -> Self {
|
||||
StringReader {
|
||||
iter,
|
||||
line: 1,
|
||||
col: 0,
|
||||
ch: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn next(&mut self) -> Option<io::Result<u8>> {
|
||||
match self.iter.next() {
|
||||
None => None,
|
||||
Some(b'\n') => {
|
||||
self.line += 1;
|
||||
self.col = 0;
|
||||
Some(Ok(b'\n'))
|
||||
}
|
||||
Some(c) => {
|
||||
self.col += 1;
|
||||
Some(Ok(c))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn pos(&mut self) -> (usize, usize) {
|
||||
(self.line, self.col)
|
||||
}
|
||||
|
||||
pub fn eof(&mut self) -> Result<bool> {
|
||||
Ok(self.peek()?.is_none())
|
||||
}
|
||||
|
||||
pub fn peek_next(&mut self, idx: usize) -> Result<Option<u8>> {
|
||||
while self.ch.len() <= idx {
|
||||
match self.next() {
|
||||
Some(Err(err)) => return Err(Error::Io(err)),
|
||||
Some(Ok(ch)) => self.ch.push(ch),
|
||||
None => return Ok(None),
|
||||
}
|
||||
}
|
||||
Ok(Some(self.ch[idx]))
|
||||
}
|
||||
|
||||
pub fn peek(&mut self) -> Result<Option<u8>> {
|
||||
self.peek_next(0)
|
||||
}
|
||||
|
||||
pub fn peek_or_null(&mut self) -> Result<u8> {
|
||||
Ok(self.peek()?.unwrap_or(b'\x00'))
|
||||
}
|
||||
|
||||
pub fn eat_char(&mut self) -> u8 {
|
||||
self.ch.remove(0)
|
||||
}
|
||||
|
||||
pub fn uneat_char(&mut self, ch: u8) {
|
||||
self.ch.insert(0, ch);
|
||||
}
|
||||
|
||||
pub fn next_char(&mut self) -> Result<Option<u8>> {
|
||||
match self.ch.first() {
|
||||
Some(&ch) => {
|
||||
self.eat_char();
|
||||
Ok(Some(ch))
|
||||
}
|
||||
None => match self.next() {
|
||||
Some(Err(err)) => Err(Error::Io(err)),
|
||||
Some(Ok(ch)) => Ok(Some(ch)),
|
||||
None => Ok(None),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next_char_or_null(&mut self) -> Result<u8> {
|
||||
Ok(self.next_char()?.unwrap_or(b'\x00'))
|
||||
}
|
||||
|
||||
fn eat_line(&mut self) -> Result<()> {
|
||||
loop {
|
||||
match self.peek()? {
|
||||
Some(b'\n') | None => return Ok(()),
|
||||
_ => {}
|
||||
}
|
||||
self.eat_char();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_whitespace(&mut self) -> Result<()> {
|
||||
loop {
|
||||
match self.peek_or_null()? {
|
||||
b' ' | b'\n' | b'\t' | b'\r' => {
|
||||
self.eat_char();
|
||||
}
|
||||
b'#' => self.eat_line()?,
|
||||
b'/' => {
|
||||
match self.peek_next(1)? {
|
||||
Some(b'/') => self.eat_line()?,
|
||||
Some(b'*') => {
|
||||
self.eat_char();
|
||||
self.eat_char();
|
||||
while !(self.peek()?.unwrap_or(b'*') == b'*'
|
||||
&& self.peek_next(1)?.unwrap_or(b'/') == b'/')
|
||||
{
|
||||
self.eat_char();
|
||||
}
|
||||
self.eat_char();
|
||||
self.eat_char();
|
||||
}
|
||||
Some(_) => {
|
||||
self.eat_char();
|
||||
}
|
||||
None => return Err(self.error(ErrorCode::TrailingCharacters)), //todo
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn error(&mut self, reason: ErrorCode) -> Error {
|
||||
Error::Syntax(reason, self.line, self.col)
|
||||
}
|
||||
}
|
||||
|
||||
pub enum Number {
|
||||
I64(i64),
|
||||
U64(u64),
|
||||
F64(f64),
|
||||
}
|
||||
|
||||
pub struct ParseNumber<Iter: Iterator<Item = u8>> {
|
||||
rdr: StringReader<Iter>,
|
||||
result: Vec<u8>,
|
||||
}
|
||||
|
||||
// macro_rules! try_or_invalid {
|
||||
// ($e:expr) => {
|
||||
// match $e {
|
||||
// Some(v) => v,
|
||||
// None => { return Err(Error::Syntax(ErrorCode::InvalidNumber, 0, 0)); }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
impl<Iter: Iterator<Item = u8>> ParseNumber<Iter> {
|
||||
#[inline]
|
||||
pub fn new(iter: Iter) -> Self {
|
||||
ParseNumber {
|
||||
rdr: StringReader::new(iter),
|
||||
result: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse(&mut self, stop_at_next: bool) -> Result<Number> {
|
||||
match self.try_parse() {
|
||||
Ok(()) => {
|
||||
self.rdr.parse_whitespace()?;
|
||||
|
||||
let mut ch = self.rdr.next_char_or_null()?;
|
||||
|
||||
if stop_at_next {
|
||||
let ch2 = self.rdr.peek_or_null()?;
|
||||
// end scan if we find a punctuator character like ,}] or a comment
|
||||
if ch == b','
|
||||
|| ch == b'}'
|
||||
|| ch == b']'
|
||||
|| ch == b'#'
|
||||
|| ch == b'/' && (ch2 == b'/' || ch2 == b'*')
|
||||
{
|
||||
ch = b'\x00';
|
||||
}
|
||||
}
|
||||
|
||||
match ch {
|
||||
b'\x00' => {
|
||||
let res =
|
||||
str::from_utf8(&self.result).expect("Internal error: json parsing");
|
||||
|
||||
let mut is_float = false;
|
||||
for ch in res.chars() {
|
||||
if ch == '.' || ch == 'e' || ch == 'E' {
|
||||
is_float = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if is_float {
|
||||
Ok(Number::F64(
|
||||
res.parse::<f64>().expect("Internal error: json parsing"),
|
||||
))
|
||||
} else if res.starts_with('-') {
|
||||
Ok(Number::I64(
|
||||
res.parse::<i64>().expect("Internal error: json parsing"),
|
||||
))
|
||||
} else {
|
||||
Ok(Number::U64(
|
||||
res.parse::<u64>().expect("Internal error: json parsing"),
|
||||
))
|
||||
}
|
||||
}
|
||||
_ => Err(Error::Syntax(ErrorCode::InvalidNumber, 0, 0)),
|
||||
}
|
||||
}
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
fn try_parse(&mut self) -> Result<()> {
|
||||
if self.rdr.peek_or_null()? == b'-' {
|
||||
self.result.push(self.rdr.eat_char());
|
||||
}
|
||||
|
||||
let mut has_value = false;
|
||||
|
||||
if self.rdr.peek_or_null()? == b'0' {
|
||||
self.result.push(self.rdr.eat_char());
|
||||
has_value = true;
|
||||
// There can be only one leading '0'.
|
||||
if let b'0'..=b'9' = self.rdr.peek_or_null()? {
|
||||
return Err(Error::Syntax(ErrorCode::InvalidNumber, 0, 0));
|
||||
}
|
||||
}
|
||||
|
||||
loop {
|
||||
match self.rdr.peek_or_null()? {
|
||||
b'0'..=b'9' => {
|
||||
self.result.push(self.rdr.eat_char());
|
||||
has_value = true;
|
||||
}
|
||||
b'.' => {
|
||||
if !has_value {
|
||||
return Err(Error::Syntax(ErrorCode::InvalidNumber, 0, 0));
|
||||
}
|
||||
self.rdr.eat_char();
|
||||
return self.try_decimal();
|
||||
}
|
||||
b'e' | b'E' => {
|
||||
if !has_value {
|
||||
return Err(Error::Syntax(ErrorCode::InvalidNumber, 0, 0));
|
||||
}
|
||||
self.rdr.eat_char();
|
||||
return self.try_exponent();
|
||||
}
|
||||
_ => {
|
||||
if !has_value {
|
||||
return Err(Error::Syntax(ErrorCode::InvalidNumber, 0, 0));
|
||||
}
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn try_decimal(&mut self) -> Result<()> {
|
||||
self.result.push(b'.');
|
||||
|
||||
// Make sure a digit follows the decimal place.
|
||||
match self.rdr.next_char_or_null()? {
|
||||
c @ b'0'..=b'9' => {
|
||||
self.result.push(c);
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::Syntax(ErrorCode::InvalidNumber, 0, 0));
|
||||
}
|
||||
};
|
||||
|
||||
while let b'0'..=b'9' = self.rdr.peek_or_null()? {
|
||||
self.result.push(self.rdr.eat_char());
|
||||
}
|
||||
|
||||
match self.rdr.peek_or_null()? {
|
||||
b'e' | b'E' => {
|
||||
self.rdr.eat_char();
|
||||
self.try_exponent()
|
||||
}
|
||||
_ => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
fn try_exponent(&mut self) -> Result<()> {
|
||||
self.result.push(b'e');
|
||||
|
||||
match self.rdr.peek_or_null()? {
|
||||
b'+' => {
|
||||
self.result.push(self.rdr.eat_char());
|
||||
}
|
||||
b'-' => {
|
||||
self.result.push(self.rdr.eat_char());
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
|
||||
// Make sure a digit follows the exponent place.
|
||||
match self.rdr.next_char_or_null()? {
|
||||
c @ b'0'..=b'9' => {
|
||||
self.result.push(c);
|
||||
}
|
||||
_ => {
|
||||
return Err(Error::Syntax(ErrorCode::InvalidNumber, 0, 0));
|
||||
}
|
||||
};
|
||||
|
||||
while let b'0'..=b'9' = self.rdr.peek_or_null()? {
|
||||
self.result.push(self.rdr.eat_char());
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
1349
crates/nu-json/src/value.rs
Normal file
1349
crates/nu-json/src/value.rs
Normal file
File diff suppressed because it is too large
Load Diff
@ -4,7 +4,7 @@ description = "Nushell parser"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-parser"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
@ -19,9 +19,9 @@ num-traits = "0.2.12"
|
||||
serde = "1.0.115"
|
||||
shellexpand = "2.0.0"
|
||||
|
||||
nu-errors = {version = "0.22.0", path = "../nu-errors"}
|
||||
nu-protocol = {version = "0.22.0", path = "../nu-protocol"}
|
||||
nu-source = {version = "0.22.0", path = "../nu-source"}
|
||||
nu-errors = {version = "0.23.0", path = "../nu-errors"}
|
||||
nu-protocol = {version = "0.23.0", path = "../nu-protocol"}
|
||||
nu-source = {version = "0.23.0", path = "../nu-source"}
|
||||
|
||||
[features]
|
||||
stable = []
|
||||
|
@ -1,19 +1,18 @@
|
||||
use std::fmt::Debug;
|
||||
// use std::fmt::Debug;
|
||||
|
||||
/// A combination of an informative parse error, and what has been successfully parsed so far
|
||||
#[derive(Debug)]
|
||||
pub struct ParseError<T: Debug> {
|
||||
/// An informative cause for this parse error
|
||||
pub cause: nu_errors::ParseError,
|
||||
// A combination of an informative parse error, and what has been successfully parsed so far
|
||||
// #[derive(Debug)]
|
||||
// pub struct ParseError {
|
||||
// /// An informative cause for this parse error
|
||||
// pub cause: nu_errors::ParseError,
|
||||
// // /// What has been successfully parsed, if anything
|
||||
// // pub partial: Option<T>,
|
||||
// }
|
||||
|
||||
/// What has been successfully parsed, if anything
|
||||
pub partial: Option<T>,
|
||||
}
|
||||
// pub type ParseResult<T> = Result<T, ParseError<T>>;
|
||||
|
||||
pub type ParseResult<T> = Result<T, ParseError<T>>;
|
||||
|
||||
impl<T: Debug> From<ParseError<T>> for nu_errors::ShellError {
|
||||
fn from(e: ParseError<T>) -> Self {
|
||||
e.cause.into()
|
||||
}
|
||||
}
|
||||
// impl<T: Debug> From<ParseError<T>> for nu_errors::ShellError {
|
||||
// fn from(e: ParseError<T>) -> Self {
|
||||
// e.cause.into()
|
||||
// }
|
||||
// }
|
||||
|
@ -5,7 +5,6 @@ mod path;
|
||||
mod shapes;
|
||||
mod signature;
|
||||
|
||||
pub use errors::{ParseError, ParseResult};
|
||||
pub use lite_parse::{lite_parse, LiteBlock};
|
||||
pub use parse::{classify_block, garbage, parse_full_column_path};
|
||||
pub use path::expand_ndots;
|
||||
|
@ -3,27 +3,57 @@ use std::str::CharIndices;
|
||||
|
||||
use nu_source::{Span, Spanned, SpannedItem};
|
||||
|
||||
use crate::errors::{ParseError, ParseResult};
|
||||
use nu_errors::ParseError;
|
||||
|
||||
type Input<'t> = Peekable<CharIndices<'t>>;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Token {
|
||||
pub contents: TokenContents,
|
||||
pub span: Span,
|
||||
}
|
||||
impl Token {
|
||||
pub fn new(contents: TokenContents, span: Span) -> Token {
|
||||
Token { contents, span }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum TokenContents {
|
||||
Bare(String),
|
||||
Pipe,
|
||||
Semicolon,
|
||||
EOL,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LiteCommand {
|
||||
pub name: Spanned<String>,
|
||||
pub args: Vec<Spanned<String>>,
|
||||
pub parts: Vec<Spanned<String>>,
|
||||
}
|
||||
|
||||
impl LiteCommand {
|
||||
fn new(name: Spanned<String>) -> LiteCommand {
|
||||
LiteCommand { name, args: vec![] }
|
||||
fn new() -> LiteCommand {
|
||||
LiteCommand { parts: vec![] }
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.parts.is_empty()
|
||||
}
|
||||
pub fn push(&mut self, item: Spanned<String>) {
|
||||
self.parts.push(item)
|
||||
}
|
||||
|
||||
pub(crate) fn span(&self) -> Span {
|
||||
let start = self.name.span.start();
|
||||
let end = if let Some(x) = self.args.last() {
|
||||
let start = if let Some(x) = self.parts.first() {
|
||||
x.span.start()
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
let end = if let Some(x) = self.parts.last() {
|
||||
x.span.end()
|
||||
} else {
|
||||
self.name.span.end()
|
||||
0
|
||||
};
|
||||
|
||||
Span::new(start, end)
|
||||
@ -35,10 +65,25 @@ pub struct LitePipeline {
|
||||
pub commands: Vec<LiteCommand>,
|
||||
}
|
||||
|
||||
impl Default for LitePipeline {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl LitePipeline {
|
||||
pub fn new() -> Self {
|
||||
Self { commands: vec![] }
|
||||
}
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.commands.is_empty()
|
||||
}
|
||||
pub fn push(&mut self, item: LiteCommand) {
|
||||
self.commands.push(item)
|
||||
}
|
||||
pub(crate) fn span(&self) -> Span {
|
||||
let start = if !self.commands.is_empty() {
|
||||
self.commands[0].name.span.start()
|
||||
self.commands[0].span().start()
|
||||
} else {
|
||||
0
|
||||
};
|
||||
@ -51,12 +96,63 @@ impl LitePipeline {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LiteGroup {
|
||||
pub pipelines: Vec<LitePipeline>,
|
||||
}
|
||||
|
||||
impl Default for LiteGroup {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl LiteGroup {
|
||||
pub fn new() -> Self {
|
||||
Self { pipelines: vec![] }
|
||||
}
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.pipelines.is_empty()
|
||||
}
|
||||
pub fn push(&mut self, item: LitePipeline) {
|
||||
self.pipelines.push(item)
|
||||
}
|
||||
pub(crate) fn span(&self) -> Span {
|
||||
let start = if !self.pipelines.is_empty() {
|
||||
self.pipelines[0].span().start()
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
if let Some((last, _)) = self.pipelines[..].split_last() {
|
||||
Span::new(start, last.span().end())
|
||||
} else {
|
||||
Span::new(start, 0)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct LiteBlock {
|
||||
pub block: Vec<LitePipeline>,
|
||||
pub block: Vec<LiteGroup>,
|
||||
}
|
||||
|
||||
impl Default for LiteBlock {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl LiteBlock {
|
||||
pub fn new() -> Self {
|
||||
Self { block: vec![] }
|
||||
}
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.block.is_empty()
|
||||
}
|
||||
pub fn push(&mut self, item: LiteGroup) {
|
||||
self.block.push(item)
|
||||
}
|
||||
pub(crate) fn span(&self) -> Span {
|
||||
let start = if !self.block.is_empty() {
|
||||
self.block[0].span().start()
|
||||
@ -72,22 +168,6 @@ impl LiteBlock {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Spanned<String>> for LiteCommand {
|
||||
fn from(v: Spanned<String>) -> LiteCommand {
|
||||
LiteCommand::new(v)
|
||||
}
|
||||
}
|
||||
|
||||
fn skip_whitespace(src: &mut Input) {
|
||||
while let Some((_, x)) = src.peek() {
|
||||
if x.is_whitespace() {
|
||||
let _ = src.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
enum BlockKind {
|
||||
Paren,
|
||||
@ -105,9 +185,11 @@ impl From<BlockKind> for char {
|
||||
}
|
||||
}
|
||||
|
||||
fn bare(src: &mut Input, span_offset: usize) -> ParseResult<Spanned<String>> {
|
||||
skip_whitespace(src);
|
||||
|
||||
/// Finds the extents of a bare (un-classified) token, returning the string with its associated span,
|
||||
/// along with any parse error that was discovered along the way.
|
||||
/// Bare tokens are unparsed content separated by spaces or a command separator (like pipe or semicolon)
|
||||
/// Bare tokens may be surrounded by quotes (single, double, or backtick) or braces (square, paren, curly)
|
||||
pub fn bare(src: &mut Input, span_offset: usize) -> (Spanned<String>, Option<ParseError>) {
|
||||
let mut bare = String::new();
|
||||
let start_offset = if let Some((pos, _)) = src.peek() {
|
||||
*pos
|
||||
@ -158,16 +240,13 @@ fn bare(src: &mut Input, span_offset: usize) -> ParseResult<Spanned<String>> {
|
||||
|
||||
if let Some(block) = block_level.last() {
|
||||
let delim: char = (*block).into();
|
||||
let cause = nu_errors::ParseError::unexpected_eof(delim.to_string(), span);
|
||||
let cause = ParseError::unexpected_eof(delim.to_string(), span);
|
||||
|
||||
while let Some(bk) = block_level.pop() {
|
||||
bare.push(bk.into());
|
||||
}
|
||||
|
||||
return Err(ParseError {
|
||||
cause,
|
||||
partial: Some(bare.spanned(span)),
|
||||
});
|
||||
return (bare.spanned(span), Some(cause));
|
||||
}
|
||||
|
||||
if let Some(delimiter) = inside_quote {
|
||||
@ -176,133 +255,161 @@ fn bare(src: &mut Input, span_offset: usize) -> ParseResult<Spanned<String>> {
|
||||
// correct information from the non-lite parse.
|
||||
bare.push(delimiter);
|
||||
|
||||
return Err(ParseError {
|
||||
cause: nu_errors::ParseError::unexpected_eof(delimiter.to_string(), span),
|
||||
partial: Some(bare.spanned(span)),
|
||||
});
|
||||
return (
|
||||
bare.spanned(span),
|
||||
Some(ParseError::unexpected_eof(delimiter.to_string(), span)),
|
||||
);
|
||||
}
|
||||
|
||||
if bare.is_empty() {
|
||||
return Err(ParseError {
|
||||
cause: nu_errors::ParseError::unexpected_eof("command", span),
|
||||
partial: Some(bare.spanned(span)),
|
||||
});
|
||||
return (
|
||||
bare.spanned(span),
|
||||
Some(ParseError::unexpected_eof("command".to_string(), span)),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(bare.spanned(span))
|
||||
(bare.spanned(span), None)
|
||||
}
|
||||
|
||||
fn command(src: &mut Input, span_offset: usize) -> ParseResult<LiteCommand> {
|
||||
let mut cmd = match bare(src, span_offset) {
|
||||
Ok(v) => LiteCommand::new(v),
|
||||
Err(e) => {
|
||||
return Err(ParseError {
|
||||
cause: e.cause,
|
||||
partial: e.partial.map(LiteCommand::new),
|
||||
});
|
||||
}
|
||||
};
|
||||
/// Breaks the input string into a vector of tokens. This tokenization only tries to classify separators like
|
||||
/// semicolons, pipes, etc from external bare values (values that haven't been classified further)
|
||||
/// Takes in a string and and offset, which is used to offset the spans created (for when this function is used to parse inner strings)
|
||||
pub fn lex(input: &str, span_offset: usize) -> (Vec<Token>, Option<ParseError>) {
|
||||
let mut char_indices = input.char_indices().peekable();
|
||||
let mut error = None;
|
||||
|
||||
loop {
|
||||
skip_whitespace(src);
|
||||
let mut output = vec![];
|
||||
|
||||
if let Some((_, c)) = src.peek() {
|
||||
// The first character tells us a lot about each argument
|
||||
match c {
|
||||
';' => {
|
||||
// this is the end of the command and the end of the pipeline
|
||||
break;
|
||||
}
|
||||
'|' => {
|
||||
let _ = src.next();
|
||||
if let Some((pos, next_c)) = src.peek() {
|
||||
if *next_c == '|' {
|
||||
// this isn't actually a pipeline but a comparison
|
||||
let span = Span::new(pos - 1 + span_offset, pos + 1 + span_offset);
|
||||
cmd.args.push("||".to_string().spanned(span));
|
||||
let _ = src.next();
|
||||
} else {
|
||||
// this is the end of this command
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
// this is the end of this command
|
||||
break;
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
// basic argument
|
||||
match bare(src, span_offset) {
|
||||
Ok(v) => {
|
||||
cmd.args.push(v);
|
||||
}
|
||||
|
||||
Err(e) => {
|
||||
if let Some(v) = e.partial {
|
||||
cmd.args.push(v);
|
||||
}
|
||||
|
||||
return Err(ParseError {
|
||||
cause: e.cause,
|
||||
partial: Some(cmd),
|
||||
});
|
||||
}
|
||||
}
|
||||
while let Some((idx, c)) = char_indices.peek() {
|
||||
if *c == '|' {
|
||||
let idx = *idx;
|
||||
let prev_idx = idx;
|
||||
let _ = char_indices.next();
|
||||
if let Some((idx, c)) = char_indices.peek() {
|
||||
if *c == '|' {
|
||||
// we have '||' instead of '|'
|
||||
let idx = *idx;
|
||||
let _ = char_indices.next();
|
||||
output.push(Token::new(
|
||||
TokenContents::Bare("||".into()),
|
||||
Span::new(span_offset + prev_idx, span_offset + idx + 1),
|
||||
));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
output.push(Token::new(
|
||||
TokenContents::Pipe,
|
||||
Span::new(span_offset + idx, span_offset + idx + 1),
|
||||
));
|
||||
} else if *c == ';' {
|
||||
let idx = *idx;
|
||||
let _ = char_indices.next();
|
||||
output.push(Token::new(
|
||||
TokenContents::Semicolon,
|
||||
Span::new(span_offset + idx, span_offset + idx + 1),
|
||||
));
|
||||
} else if *c == '\n' || *c == '\r' {
|
||||
let idx = *idx;
|
||||
let _ = char_indices.next();
|
||||
output.push(Token::new(
|
||||
TokenContents::EOL,
|
||||
Span::new(span_offset + idx, span_offset + idx + 1),
|
||||
));
|
||||
} else if c.is_whitespace() {
|
||||
let _ = char_indices.next();
|
||||
} else {
|
||||
break;
|
||||
let (result, err) = bare(&mut char_indices, span_offset);
|
||||
if error.is_none() {
|
||||
error = err;
|
||||
}
|
||||
let Spanned { item, span } = result;
|
||||
output.push(Token::new(TokenContents::Bare(item), span));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(cmd)
|
||||
(output, error)
|
||||
}
|
||||
|
||||
fn pipeline(src: &mut Input, span_offset: usize) -> ParseResult<LiteBlock> {
|
||||
let mut block = vec![];
|
||||
let mut commands = vec![];
|
||||
fn group(tokens: Vec<Token>) -> (LiteBlock, Option<ParseError>) {
|
||||
let mut groups = vec![];
|
||||
let mut group = LiteGroup::new();
|
||||
let mut pipeline = LitePipeline::new();
|
||||
let mut command = LiteCommand::new();
|
||||
|
||||
skip_whitespace(src);
|
||||
|
||||
while src.peek().is_some() {
|
||||
// If there is content there, let's parse it
|
||||
let cmd = match command(src, span_offset) {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
if let Some(partial) = e.partial {
|
||||
commands.push(partial);
|
||||
block.push(LitePipeline { commands });
|
||||
for token in tokens {
|
||||
match token.contents {
|
||||
TokenContents::EOL => {
|
||||
if !command.is_empty() {
|
||||
pipeline.push(command);
|
||||
command = LiteCommand::new();
|
||||
}
|
||||
if !pipeline.is_empty() {
|
||||
group.push(pipeline);
|
||||
pipeline = LitePipeline::new();
|
||||
}
|
||||
if !group.is_empty() {
|
||||
groups.push(group);
|
||||
group = LiteGroup::new();
|
||||
}
|
||||
|
||||
return Err(ParseError {
|
||||
cause: e.cause,
|
||||
partial: Some(LiteBlock { block }),
|
||||
});
|
||||
}
|
||||
};
|
||||
TokenContents::Pipe => {
|
||||
if !command.is_empty() {
|
||||
pipeline.push(command);
|
||||
command = LiteCommand::new();
|
||||
} else {
|
||||
let mut block = LiteBlock::new();
|
||||
block.block = groups;
|
||||
|
||||
commands.push(cmd);
|
||||
skip_whitespace(src);
|
||||
|
||||
if let Some((_, ';')) = src.peek() {
|
||||
let _ = src.next();
|
||||
|
||||
if !commands.is_empty() {
|
||||
block.push(LitePipeline { commands });
|
||||
commands = vec![];
|
||||
return (
|
||||
block,
|
||||
Some(ParseError::extra_tokens(
|
||||
"|".to_string().spanned(token.span),
|
||||
)),
|
||||
);
|
||||
}
|
||||
}
|
||||
TokenContents::Semicolon => {
|
||||
if !command.is_empty() {
|
||||
pipeline.push(command);
|
||||
command = LiteCommand::new();
|
||||
}
|
||||
if !pipeline.is_empty() {
|
||||
group.push(pipeline);
|
||||
pipeline = LitePipeline::new();
|
||||
}
|
||||
}
|
||||
TokenContents::Bare(bare) => {
|
||||
command.push(bare.spanned(token.span));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !commands.is_empty() {
|
||||
block.push(LitePipeline { commands });
|
||||
if !command.is_empty() {
|
||||
pipeline.push(command);
|
||||
}
|
||||
if !pipeline.is_empty() {
|
||||
group.push(pipeline);
|
||||
}
|
||||
if !group.is_empty() {
|
||||
groups.push(group);
|
||||
}
|
||||
|
||||
Ok(LiteBlock { block })
|
||||
let mut block = LiteBlock::new();
|
||||
block.block = groups;
|
||||
(block, None)
|
||||
}
|
||||
|
||||
pub fn lite_parse(src: &str, span_offset: usize) -> ParseResult<LiteBlock> {
|
||||
pipeline(&mut src.char_indices().peekable(), span_offset)
|
||||
pub fn lite_parse(src: &str, span_offset: usize) -> (LiteBlock, Option<ParseError>) {
|
||||
let mut error = None;
|
||||
let (output, err) = lex(src, span_offset);
|
||||
if err.is_some() {
|
||||
error = err;
|
||||
}
|
||||
let (group_output, err) = group(output);
|
||||
if error.is_none() {
|
||||
error = err;
|
||||
}
|
||||
|
||||
(group_output, error)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@ -320,140 +427,136 @@ mod tests {
|
||||
fn simple_1() {
|
||||
let input = "foo bar baz";
|
||||
|
||||
let input = &mut input.char_indices().peekable();
|
||||
let result = bare(input, 0).unwrap();
|
||||
let (result, err) = lex(input, 0);
|
||||
|
||||
assert_eq!(result.span, span(0, 3));
|
||||
assert!(err.is_none());
|
||||
assert_eq!(result[0].span, span(0, 3));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple_2() {
|
||||
let input = "'foo bar' baz";
|
||||
|
||||
let input = &mut input.char_indices().peekable();
|
||||
let result = bare(input, 0).unwrap();
|
||||
let (result, err) = lex(input, 0);
|
||||
|
||||
assert_eq!(result.span, span(0, 9));
|
||||
assert!(err.is_none());
|
||||
assert_eq!(result[0].span, span(0, 9));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple_3() {
|
||||
let input = "'foo\" bar' baz";
|
||||
|
||||
let input = &mut input.char_indices().peekable();
|
||||
let result = bare(input, 0).unwrap();
|
||||
let (result, err) = lex(input, 0);
|
||||
|
||||
assert_eq!(result.span, span(0, 10));
|
||||
assert!(err.is_none());
|
||||
assert_eq!(result[0].span, span(0, 10));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple_4() {
|
||||
let input = "[foo bar] baz";
|
||||
|
||||
let input = &mut input.char_indices().peekable();
|
||||
let result = bare(input, 0).unwrap();
|
||||
let (result, err) = lex(input, 0);
|
||||
|
||||
assert_eq!(result.span, span(0, 9));
|
||||
assert!(err.is_none());
|
||||
assert_eq!(result[0].span, span(0, 9));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple_5() {
|
||||
let input = "'foo 'bar baz";
|
||||
|
||||
let input = &mut input.char_indices().peekable();
|
||||
let result = bare(input, 0).unwrap();
|
||||
let (result, err) = lex(input, 0);
|
||||
|
||||
assert_eq!(result.span, span(0, 9));
|
||||
assert!(err.is_none());
|
||||
assert_eq!(result[0].span, span(0, 9));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple_6() {
|
||||
let input = "''foo baz";
|
||||
|
||||
let input = &mut input.char_indices().peekable();
|
||||
let result = bare(input, 0).unwrap();
|
||||
let (result, err) = lex(input, 0);
|
||||
|
||||
assert_eq!(result.span, span(0, 5));
|
||||
assert!(err.is_none());
|
||||
assert_eq!(result[0].span, span(0, 5));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple_7() {
|
||||
let input = "'' foo";
|
||||
|
||||
let input = &mut input.char_indices().peekable();
|
||||
let result = bare(input, 0).unwrap();
|
||||
let (result, err) = lex(input, 0);
|
||||
|
||||
assert_eq!(result.span, span(0, 2));
|
||||
assert!(err.is_none());
|
||||
assert_eq!(result[0].span, span(0, 2));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple_8() {
|
||||
let input = " '' foo";
|
||||
|
||||
let input = &mut input.char_indices().peekable();
|
||||
let result = bare(input, 0).unwrap();
|
||||
let (result, err) = lex(input, 0);
|
||||
|
||||
assert_eq!(result.span, span(1, 3));
|
||||
assert!(err.is_none());
|
||||
assert_eq!(result[0].span, span(1, 3));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple_9() {
|
||||
let input = " 'foo' foo";
|
||||
|
||||
let input = &mut input.char_indices().peekable();
|
||||
let result = bare(input, 0).unwrap();
|
||||
let (result, err) = lex(input, 0);
|
||||
|
||||
assert_eq!(result.span, span(1, 6));
|
||||
assert!(err.is_none());
|
||||
assert_eq!(result[0].span, span(1, 6));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple_10() {
|
||||
let input = "[foo, bar]";
|
||||
|
||||
let input = &mut input.char_indices().peekable();
|
||||
let result = bare(input, 0).unwrap();
|
||||
let (result, err) = lex(input, 0);
|
||||
|
||||
assert_eq!(result.span, span(0, 10));
|
||||
assert!(err.is_none());
|
||||
assert_eq!(result[0].span, span(0, 10));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ignore_future() {
|
||||
let input = "foo 'bar";
|
||||
|
||||
let input = &mut input.char_indices().peekable();
|
||||
let result = bare(input, 0).unwrap();
|
||||
let (result, _) = lex(input, 0);
|
||||
|
||||
assert_eq!(result.span, span(0, 3));
|
||||
assert_eq!(result[0].span, span(0, 3));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_1() {
|
||||
let input = "'foo bar";
|
||||
|
||||
let input = &mut input.char_indices().peekable();
|
||||
let result = bare(input, 0);
|
||||
let (_, err) = lex(input, 0);
|
||||
|
||||
assert_eq!(result.is_ok(), false);
|
||||
assert!(err.is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_2() {
|
||||
let input = "'bar";
|
||||
|
||||
let input = &mut input.char_indices().peekable();
|
||||
let result = bare(input, 0);
|
||||
let (_, err) = lex(input, 0);
|
||||
|
||||
assert_eq!(result.is_ok(), false);
|
||||
assert!(err.is_some());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn invalid_4() {
|
||||
let input = " 'bar";
|
||||
|
||||
let input = &mut input.char_indices().peekable();
|
||||
let result = bare(input, 0);
|
||||
let (_, err) = lex(input, 0);
|
||||
|
||||
assert_eq!(result.is_ok(), false);
|
||||
assert!(err.is_some());
|
||||
}
|
||||
}
|
||||
|
||||
@ -462,39 +565,58 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn pipeline() {
|
||||
let result = lite_parse("cmd1 | cmd2 ; deploy", 0).unwrap();
|
||||
let (result, err) = lite_parse("cmd1 | cmd2 ; deploy", 0);
|
||||
assert!(err.is_none());
|
||||
assert_eq!(result.span(), span(0, 20));
|
||||
assert_eq!(result.block[0].span(), span(0, 11));
|
||||
assert_eq!(result.block[1].span(), span(14, 20));
|
||||
assert_eq!(result.block[0].pipelines[0].span(), span(0, 11));
|
||||
assert_eq!(result.block[0].pipelines[1].span(), span(14, 20));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple_1() {
|
||||
let result = lite_parse("foo", 0).unwrap();
|
||||
let (result, err) = lite_parse("foo", 0);
|
||||
assert!(err.is_none());
|
||||
assert_eq!(result.block.len(), 1);
|
||||
assert_eq!(result.block[0].commands.len(), 1);
|
||||
assert_eq!(result.block[0].commands[0].name.span, span(0, 3));
|
||||
assert_eq!(result.block[0].pipelines.len(), 1);
|
||||
assert_eq!(result.block[0].pipelines[0].commands.len(), 1);
|
||||
assert_eq!(result.block[0].pipelines[0].commands[0].parts.len(), 1);
|
||||
assert_eq!(
|
||||
result.block[0].pipelines[0].commands[0].parts[0].span,
|
||||
span(0, 3)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple_offset() {
|
||||
let result = lite_parse("foo", 10).unwrap();
|
||||
assert_eq!(result.block.len(), 1);
|
||||
assert_eq!(result.block[0].commands.len(), 1);
|
||||
assert_eq!(result.block[0].commands[0].name.span, span(10, 13));
|
||||
let (result, err) = lite_parse("foo", 10);
|
||||
assert!(err.is_none());
|
||||
assert_eq!(result.block[0].pipelines.len(), 1);
|
||||
assert_eq!(result.block[0].pipelines[0].commands.len(), 1);
|
||||
assert_eq!(result.block[0].pipelines[0].commands[0].parts.len(), 1);
|
||||
assert_eq!(
|
||||
result.block[0].pipelines[0].commands[0].parts[0].span,
|
||||
span(10, 13)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn incomplete_result() {
|
||||
let result = lite_parse("my_command \"foo' --test", 10).unwrap_err();
|
||||
assert!(matches!(result.cause.reason(), nu_errors::ParseErrorReason::Eof { .. }));
|
||||
let (result, err) = lite_parse("my_command \"foo' --test", 10);
|
||||
assert!(matches!(err.unwrap().reason(), nu_errors::ParseErrorReason::Eof { .. }));
|
||||
|
||||
let result = result.partial.unwrap();
|
||||
assert_eq!(result.block.len(), 1);
|
||||
assert_eq!(result.block[0].commands.len(), 1);
|
||||
assert_eq!(result.block[0].commands[0].name.item, "my_command");
|
||||
assert_eq!(result.block[0].commands[0].args.len(), 1);
|
||||
assert_eq!(result.block[0].commands[0].args[0].item, "\"foo' --test\"");
|
||||
assert_eq!(result.block[0].pipelines.len(), 1);
|
||||
assert_eq!(result.block[0].pipelines[0].commands.len(), 1);
|
||||
assert_eq!(result.block[0].pipelines[0].commands[0].parts.len(), 2);
|
||||
|
||||
assert_eq!(
|
||||
result.block[0].pipelines[0].commands[0].parts[0].item,
|
||||
"my_command"
|
||||
);
|
||||
assert_eq!(
|
||||
result.block[0].pipelines[0].commands[0].parts[1].item,
|
||||
"\"foo' --test\""
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -388,9 +388,9 @@ fn parse_invocation(
|
||||
.collect();
|
||||
|
||||
// We haven't done much with the inner string, so let's go ahead and work with it
|
||||
let lite_block = match lite_parse(&string, lite_arg.span.start() + 2) {
|
||||
Ok(lp) => lp,
|
||||
Err(e) => return (garbage(lite_arg.span), Some(e.cause)),
|
||||
let (lite_block, err) = lite_parse(&string, lite_arg.span.start() + 2);
|
||||
if err.is_some() {
|
||||
return (garbage(lite_arg.span), err);
|
||||
};
|
||||
|
||||
let classified_block = classify_block(&lite_block, registry);
|
||||
@ -641,38 +641,22 @@ fn parse_list(
|
||||
}
|
||||
let lite_pipeline = &lite_block.block[0];
|
||||
let mut output = vec![];
|
||||
for lite_inner in &lite_pipeline.commands {
|
||||
let item = if lite_inner.name.ends_with(',') {
|
||||
let mut str: String = lite_inner.name.item.clone();
|
||||
str.pop();
|
||||
str.spanned(Span::new(
|
||||
lite_inner.name.span.start(),
|
||||
lite_inner.name.span.end() - 1,
|
||||
))
|
||||
} else {
|
||||
lite_inner.name.clone()
|
||||
};
|
||||
for lite_pipeline in &lite_pipeline.pipelines {
|
||||
for lite_inner in &lite_pipeline.commands {
|
||||
for part in &lite_inner.parts {
|
||||
let item = if part.ends_with(',') {
|
||||
let mut str: String = part.item.clone();
|
||||
str.pop();
|
||||
str.spanned(Span::new(part.span.start(), part.span.end() - 1))
|
||||
} else {
|
||||
part.clone()
|
||||
};
|
||||
let (part, err) = parse_arg(SyntaxShape::Any, registry, &item);
|
||||
output.push(part);
|
||||
|
||||
let (arg, err) = parse_arg(SyntaxShape::Any, registry, &item);
|
||||
|
||||
output.push(arg);
|
||||
if error.is_none() {
|
||||
error = err;
|
||||
}
|
||||
|
||||
for arg in &lite_inner.args {
|
||||
let item = if arg.ends_with(',') {
|
||||
let mut str: String = arg.item.clone();
|
||||
str.pop();
|
||||
str.spanned(Span::new(arg.span.start(), arg.span.end() - 1))
|
||||
} else {
|
||||
arg.clone()
|
||||
};
|
||||
let (arg, err) = parse_arg(SyntaxShape::Any, registry, &item);
|
||||
output.push(arg);
|
||||
|
||||
if error.is_none() {
|
||||
error = err;
|
||||
if error.is_none() {
|
||||
error = err;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -711,18 +695,19 @@ fn parse_table(
|
||||
let mut output = vec![];
|
||||
|
||||
// Header
|
||||
let lite_pipeline = &lite_block.block[0];
|
||||
let lite_group = &lite_block.block[0];
|
||||
let lite_pipeline = &lite_group.pipelines[0];
|
||||
let lite_inner = &lite_pipeline.commands[0];
|
||||
|
||||
let (string, err) = verify_and_strip(&lite_inner.name, '[', ']');
|
||||
let (string, err) = verify_and_strip(&lite_inner.parts[0], '[', ']');
|
||||
if error.is_none() {
|
||||
error = err;
|
||||
}
|
||||
|
||||
let lite_header = match lite_parse(&string, lite_inner.name.span.start() + 1) {
|
||||
Ok(lb) => lb,
|
||||
Err(e) => return (garbage(lite_inner.name.span), Some(e.cause)),
|
||||
};
|
||||
let (lite_header, err) = lite_parse(&string, lite_inner.parts[0].span.start() + 1);
|
||||
if err.is_some() {
|
||||
return (garbage(lite_inner.span()), err);
|
||||
}
|
||||
|
||||
let (headers, err) = parse_list(&lite_header, registry);
|
||||
if error.is_none() {
|
||||
@ -730,34 +715,18 @@ fn parse_table(
|
||||
}
|
||||
|
||||
// Cells
|
||||
let lite_rows = &lite_block.block[1];
|
||||
let lite_rows = &lite_group.pipelines[1];
|
||||
let lite_cells = &lite_rows.commands[0];
|
||||
|
||||
let (string, err) = verify_and_strip(&lite_cells.name, '[', ']');
|
||||
if error.is_none() {
|
||||
error = err;
|
||||
}
|
||||
|
||||
let lite_cell = match lite_parse(&string, lite_cells.name.span.start() + 1) {
|
||||
Ok(lb) => lb,
|
||||
Err(e) => return (garbage(lite_cells.name.span), Some(e.cause)),
|
||||
};
|
||||
|
||||
let (inner_cell, err) = parse_list(&lite_cell, registry);
|
||||
if error.is_none() {
|
||||
error = err;
|
||||
}
|
||||
output.push(inner_cell);
|
||||
|
||||
for arg in &lite_cells.args {
|
||||
for arg in &lite_cells.parts {
|
||||
let (string, err) = verify_and_strip(&arg, '[', ']');
|
||||
if error.is_none() {
|
||||
error = err;
|
||||
}
|
||||
let lite_cell = match lite_parse(&string, arg.span.start() + 1) {
|
||||
Ok(lb) => lb,
|
||||
Err(e) => return (garbage(arg.span), Some(e.cause)),
|
||||
};
|
||||
let (lite_cell, err) = lite_parse(&string, arg.span.start() + 1);
|
||||
if err.is_some() {
|
||||
return (garbage(arg.span), err);
|
||||
}
|
||||
let (inner_cell, err) = parse_list(&lite_cell, registry);
|
||||
if error.is_none() {
|
||||
error = err;
|
||||
@ -880,24 +849,25 @@ fn parse_arg(
|
||||
let string: String = chars.collect();
|
||||
|
||||
// We haven't done much with the inner string, so let's go ahead and work with it
|
||||
let lite_block = match lite_parse(&string, lite_arg.span.start() + 1) {
|
||||
Ok(lb) => lb,
|
||||
Err(e) => return (garbage(lite_arg.span), Some(e.cause)),
|
||||
};
|
||||
let (lite_block, err) = lite_parse(&string, lite_arg.span.start() + 1);
|
||||
if err.is_some() {
|
||||
return (garbage(lite_arg.span), err);
|
||||
}
|
||||
let lite_groups = &lite_block.block;
|
||||
|
||||
if lite_block.block.is_empty() {
|
||||
if lite_groups.is_empty() {
|
||||
return (
|
||||
SpannedExpression::new(Expression::List(vec![]), lite_arg.span),
|
||||
None,
|
||||
);
|
||||
}
|
||||
if lite_block.block.len() == 1 {
|
||||
if lite_groups[0].pipelines.len() == 1 {
|
||||
let (items, err) = parse_list(&lite_block, registry);
|
||||
(
|
||||
SpannedExpression::new(Expression::List(items), lite_arg.span),
|
||||
err,
|
||||
)
|
||||
} else if lite_block.block.len() == 2 {
|
||||
} else if lite_groups[0].pipelines.len() == 2 {
|
||||
parse_table(&lite_block, registry, lite_arg.span)
|
||||
} else {
|
||||
(
|
||||
@ -926,10 +896,10 @@ fn parse_arg(
|
||||
let string: String = chars.collect();
|
||||
|
||||
// We haven't done much with the inner string, so let's go ahead and work with it
|
||||
let lite_block = match lite_parse(&string, lite_arg.span.start() + 1) {
|
||||
Ok(lp) => lp,
|
||||
Err(e) => return (garbage(lite_arg.span), Some(e.cause)),
|
||||
};
|
||||
let (lite_block, err) = lite_parse(&string, lite_arg.span.start() + 1);
|
||||
if err.is_some() {
|
||||
return (garbage(lite_arg.span), err);
|
||||
}
|
||||
|
||||
let classified_block = classify_block(&lite_block, registry);
|
||||
let error = classified_block.failed;
|
||||
@ -1147,10 +1117,10 @@ fn parse_parenthesized_expression(
|
||||
let string: String = chars.collect();
|
||||
|
||||
// We haven't done much with the inner string, so let's go ahead and work with it
|
||||
let lite_block = match lite_parse(&string, lite_arg.span.start() + 1) {
|
||||
Ok(lb) => lb,
|
||||
Err(e) => return (garbage(lite_arg.span), Some(e.cause)),
|
||||
};
|
||||
let (lite_block, err) = lite_parse(&string, lite_arg.span.start() + 1);
|
||||
if err.is_some() {
|
||||
return (garbage(lite_arg.span), err);
|
||||
}
|
||||
|
||||
if lite_block.block.len() != 1 {
|
||||
return (
|
||||
@ -1162,9 +1132,10 @@ fn parse_parenthesized_expression(
|
||||
let mut lite_pipeline = lite_block.block[0].clone();
|
||||
|
||||
let mut collection = vec![];
|
||||
for lite_cmd in lite_pipeline.commands.iter_mut() {
|
||||
collection.push(lite_cmd.name.clone());
|
||||
collection.append(&mut lite_cmd.args);
|
||||
for lite_pipeline in lite_pipeline.pipelines.iter_mut() {
|
||||
for lite_cmd in lite_pipeline.commands.iter_mut() {
|
||||
collection.append(&mut lite_cmd.parts);
|
||||
}
|
||||
}
|
||||
let (_, expr, err) =
|
||||
parse_math_expression(0, &collection[..], registry, shorthand_mode);
|
||||
@ -1351,23 +1322,23 @@ fn parse_positional_argument(
|
||||
// A condition can take up multiple arguments, as we build the operation as <arg> <operator> <arg>
|
||||
// We need to do this here because in parse_arg, we have access to only one arg at a time
|
||||
|
||||
if idx < lite_cmd.args.len() {
|
||||
if lite_cmd.args[idx].item.starts_with('{') {
|
||||
if idx < lite_cmd.parts.len() {
|
||||
if lite_cmd.parts[idx].item.starts_with('{') {
|
||||
// It's an explicit math expression, so parse it deeper in
|
||||
let (arg, err) = parse_arg(SyntaxShape::Math, registry, &lite_cmd.args[idx]);
|
||||
let (arg, err) = parse_arg(SyntaxShape::Math, registry, &lite_cmd.parts[idx]);
|
||||
if error.is_none() {
|
||||
error = err;
|
||||
}
|
||||
arg
|
||||
} else {
|
||||
let end_idx = if lite_cmd.args.len() > remaining_positionals {
|
||||
lite_cmd.args.len() - remaining_positionals
|
||||
let end_idx = if (lite_cmd.parts.len() - 1) > remaining_positionals {
|
||||
lite_cmd.parts.len() - remaining_positionals
|
||||
} else {
|
||||
lite_cmd.args.len()
|
||||
lite_cmd.parts.len()
|
||||
};
|
||||
|
||||
let (new_idx, arg, err) =
|
||||
parse_math_expression(idx, &lite_cmd.args[idx..end_idx], registry, true);
|
||||
parse_math_expression(idx, &lite_cmd.parts[idx..end_idx], registry, true);
|
||||
|
||||
let span = arg.span;
|
||||
let mut commands = hir::Commands::new(span);
|
||||
@ -1386,7 +1357,7 @@ fn parse_positional_argument(
|
||||
} else {
|
||||
if error.is_none() {
|
||||
error = Some(ParseError::argument_error(
|
||||
lite_cmd.name.clone(),
|
||||
lite_cmd.parts[0].clone(),
|
||||
ArgumentError::MissingMandatoryPositional("condition".into()),
|
||||
))
|
||||
}
|
||||
@ -1394,7 +1365,7 @@ fn parse_positional_argument(
|
||||
}
|
||||
}
|
||||
PositionalType::Mandatory(_, shape) | PositionalType::Optional(_, shape) => {
|
||||
let (arg, err) = parse_arg(*shape, registry, &lite_cmd.args[idx]);
|
||||
let (arg, err) = parse_arg(*shape, registry, &lite_cmd.parts[idx]);
|
||||
if error.is_none() {
|
||||
error = err;
|
||||
}
|
||||
@ -1416,14 +1387,17 @@ fn parse_internal_command(
|
||||
) -> (InternalCommand, Option<ParseError>) {
|
||||
// This is a known internal command, so we need to work with the arguments and parse them according to the expected types
|
||||
|
||||
let (name, name_span) = if idx == 0 {
|
||||
(lite_cmd.name.item.clone(), lite_cmd.name.span)
|
||||
} else {
|
||||
(
|
||||
format!("{} {}", lite_cmd.name.item, lite_cmd.args[0].item),
|
||||
Span::new(lite_cmd.name.span.start(), lite_cmd.args[0].span.end()),
|
||||
)
|
||||
};
|
||||
let (name, name_span) = (
|
||||
lite_cmd.parts[0..(idx + 1)]
|
||||
.iter()
|
||||
.map(|x| x.item.clone())
|
||||
.collect::<Vec<String>>()
|
||||
.join(" "),
|
||||
Span::new(
|
||||
lite_cmd.parts[0].span.start(),
|
||||
lite_cmd.parts[idx].span.end(),
|
||||
),
|
||||
);
|
||||
|
||||
let mut internal_command = InternalCommand::new(name, name_span, lite_cmd.span());
|
||||
internal_command.args.set_initial_flags(&signature);
|
||||
@ -1432,32 +1406,33 @@ fn parse_internal_command(
|
||||
let mut named = NamedArguments::new();
|
||||
let mut positional = vec![];
|
||||
let mut error = None;
|
||||
idx += 1; // Start where the arguments begin
|
||||
|
||||
while idx < lite_cmd.args.len() {
|
||||
if lite_cmd.args[idx].item.starts_with('-') && lite_cmd.args[idx].item.len() > 1 {
|
||||
while idx < lite_cmd.parts.len() {
|
||||
if lite_cmd.parts[idx].item.starts_with('-') && lite_cmd.parts[idx].item.len() > 1 {
|
||||
let (named_types, err) =
|
||||
get_flags_from_flag(&signature, &lite_cmd.name, &lite_cmd.args[idx]);
|
||||
get_flags_from_flag(&signature, &lite_cmd.parts[0], &lite_cmd.parts[idx]);
|
||||
|
||||
if err.is_none() {
|
||||
for (full_name, named_type) in &named_types {
|
||||
match named_type {
|
||||
NamedType::Mandatory(_, shape) | NamedType::Optional(_, shape) => {
|
||||
if idx == lite_cmd.args.len() {
|
||||
if idx == lite_cmd.parts.len() {
|
||||
// Oops, we're missing the argument to our named argument
|
||||
if error.is_none() {
|
||||
error = Some(ParseError::argument_error(
|
||||
lite_cmd.name.clone(),
|
||||
lite_cmd.parts[0].clone(),
|
||||
ArgumentError::MissingValueForName(format!("{:?}", shape)),
|
||||
));
|
||||
}
|
||||
} else {
|
||||
idx += 1;
|
||||
if lite_cmd.args.len() > idx {
|
||||
if lite_cmd.parts.len() > idx {
|
||||
let (arg, err) =
|
||||
parse_arg(*shape, registry, &lite_cmd.args[idx]);
|
||||
parse_arg(*shape, registry, &lite_cmd.parts[idx]);
|
||||
named.insert_mandatory(
|
||||
full_name.clone(),
|
||||
lite_cmd.args[idx - 1].span,
|
||||
lite_cmd.parts[idx - 1].span,
|
||||
arg,
|
||||
);
|
||||
|
||||
@ -1466,7 +1441,7 @@ fn parse_internal_command(
|
||||
}
|
||||
} else if error.is_none() {
|
||||
error = Some(ParseError::argument_error(
|
||||
lite_cmd.name.clone(),
|
||||
lite_cmd.parts[0].clone(),
|
||||
ArgumentError::MissingValueForName(full_name.to_owned()),
|
||||
));
|
||||
}
|
||||
@ -1475,13 +1450,13 @@ fn parse_internal_command(
|
||||
NamedType::Switch(_) => {
|
||||
named.insert_switch(
|
||||
full_name.clone(),
|
||||
Some(Flag::new(FlagKind::Longhand, lite_cmd.args[idx].span)),
|
||||
Some(Flag::new(FlagKind::Longhand, lite_cmd.parts[idx].span)),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
positional.push(garbage(lite_cmd.args[idx].span));
|
||||
positional.push(garbage(lite_cmd.parts[idx].span));
|
||||
|
||||
if error.is_none() {
|
||||
error = err;
|
||||
@ -1506,7 +1481,7 @@ fn parse_internal_command(
|
||||
positional.push(arg);
|
||||
current_positional += 1;
|
||||
} else if let Some((rest_type, _)) = &signature.rest_positional {
|
||||
let (arg, err) = parse_arg(*rest_type, registry, &lite_cmd.args[idx]);
|
||||
let (arg, err) = parse_arg(*rest_type, registry, &lite_cmd.parts[idx]);
|
||||
if error.is_none() {
|
||||
error = err;
|
||||
}
|
||||
@ -1514,12 +1489,12 @@ fn parse_internal_command(
|
||||
positional.push(arg);
|
||||
current_positional += 1;
|
||||
} else {
|
||||
positional.push(garbage(lite_cmd.args[idx].span));
|
||||
positional.push(garbage(lite_cmd.parts[idx].span));
|
||||
|
||||
if error.is_none() {
|
||||
error = Some(ParseError::argument_error(
|
||||
lite_cmd.name.clone(),
|
||||
ArgumentError::UnexpectedArgument(lite_cmd.args[idx].clone()),
|
||||
lite_cmd.parts[0].clone(),
|
||||
ArgumentError::UnexpectedArgument(lite_cmd.parts[idx].clone()),
|
||||
));
|
||||
}
|
||||
}
|
||||
@ -1539,7 +1514,7 @@ fn parse_internal_command(
|
||||
if !named.named.contains_key("help") {
|
||||
let (_, name) = &signature.positional[positional.len()];
|
||||
error = Some(ParseError::argument_error(
|
||||
lite_cmd.name.clone(),
|
||||
lite_cmd.parts[0].clone(),
|
||||
ArgumentError::MissingMandatoryPositional(name.to_owned()),
|
||||
));
|
||||
}
|
||||
@ -1568,9 +1543,11 @@ fn classify_pipeline(
|
||||
|
||||
let mut iter = lite_pipeline.commands.iter().peekable();
|
||||
while let Some(lite_cmd) = iter.next() {
|
||||
if lite_cmd.name.item.starts_with('^') {
|
||||
let name = lite_cmd
|
||||
.name
|
||||
if lite_cmd.parts.is_empty() {
|
||||
continue;
|
||||
}
|
||||
if lite_cmd.parts[0].item.starts_with('^') {
|
||||
let name = lite_cmd.parts[0]
|
||||
.clone()
|
||||
.map(|v| v.chars().skip(1).collect::<String>());
|
||||
// TODO this is the same as the `else` branch below, only the name differs. Find a way
|
||||
@ -1584,7 +1561,7 @@ fn classify_pipeline(
|
||||
}
|
||||
args.push(name);
|
||||
|
||||
for lite_arg in &lite_cmd.args {
|
||||
for lite_arg in &lite_cmd.parts[1..] {
|
||||
let (expr, err) = parse_external_arg(registry, lite_arg);
|
||||
if error.is_none() {
|
||||
error = err;
|
||||
@ -1610,15 +1587,16 @@ fn classify_pipeline(
|
||||
},
|
||||
},
|
||||
}))
|
||||
} else if lite_cmd.name.item == "=" {
|
||||
let expr = if !lite_cmd.args.is_empty() {
|
||||
let (_, expr, err) = parse_math_expression(0, &lite_cmd.args[0..], registry, false);
|
||||
} else if lite_cmd.parts[0].item == "=" {
|
||||
let expr = if lite_cmd.parts.len() > 1 {
|
||||
let (_, expr, err) =
|
||||
parse_math_expression(0, &lite_cmd.parts[1..], registry, false);
|
||||
error = error.or(err);
|
||||
expr
|
||||
} else {
|
||||
error = error.or_else(|| {
|
||||
Some(ParseError::argument_error(
|
||||
lite_cmd.name.clone(),
|
||||
lite_cmd.parts[0].clone(),
|
||||
ArgumentError::MissingMandatoryPositional("an expression".into()),
|
||||
))
|
||||
});
|
||||
@ -1626,11 +1604,12 @@ fn classify_pipeline(
|
||||
};
|
||||
commands.push(ClassifiedCommand::Expr(Box::new(expr)))
|
||||
} else {
|
||||
if !lite_cmd.args.is_empty() {
|
||||
if lite_cmd.parts.len() > 1 {
|
||||
// Check if it's a sub-command
|
||||
if let Some(signature) =
|
||||
registry.get(&format!("{} {}", lite_cmd.name.item, lite_cmd.args[0].item))
|
||||
{
|
||||
if let Some(signature) = registry.get(&format!(
|
||||
"{} {}",
|
||||
lite_cmd.parts[0].item, lite_cmd.parts[1].item
|
||||
)) {
|
||||
let (mut internal_command, err) =
|
||||
parse_internal_command(&lite_cmd, registry, &signature, 1);
|
||||
|
||||
@ -1646,7 +1625,7 @@ fn classify_pipeline(
|
||||
}
|
||||
|
||||
// Check if it's an internal command
|
||||
if let Some(signature) = registry.get(&lite_cmd.name.item) {
|
||||
if let Some(signature) = registry.get(&lite_cmd.parts[0].item) {
|
||||
let (mut internal_command, err) =
|
||||
parse_internal_command(&lite_cmd, registry, &signature, 0);
|
||||
|
||||
@ -1660,7 +1639,7 @@ fn classify_pipeline(
|
||||
continue;
|
||||
}
|
||||
|
||||
let name = lite_cmd.name.clone().map(|v| {
|
||||
let name = lite_cmd.parts[0].clone().map(|v| {
|
||||
let trimmed = trim_quotes(&v);
|
||||
expand_path(&trimmed).to_string()
|
||||
});
|
||||
@ -1674,7 +1653,7 @@ fn classify_pipeline(
|
||||
}
|
||||
args.push(name);
|
||||
|
||||
for lite_arg in &lite_cmd.args {
|
||||
for lite_arg in &lite_cmd.parts[1..] {
|
||||
let (expr, err) = parse_external_arg(registry, lite_arg);
|
||||
if error.is_none() {
|
||||
error = err;
|
||||
@ -1712,33 +1691,31 @@ fn expand_shorthand_forms(
|
||||
lite_pipeline: &LitePipeline,
|
||||
) -> (LitePipeline, Option<SpannedKeyValue>, Option<ParseError>) {
|
||||
if !lite_pipeline.commands.is_empty() {
|
||||
if lite_pipeline.commands[0].name.item == "=" {
|
||||
if lite_pipeline.commands[0].parts[0].item == "=" {
|
||||
(lite_pipeline.clone(), None, None)
|
||||
} else if lite_pipeline.commands[0].name.contains('=') {
|
||||
let assignment: Vec<_> = lite_pipeline.commands[0].name.split('=').collect();
|
||||
} else if lite_pipeline.commands[0].parts[0].contains('=') {
|
||||
let assignment: Vec<_> = lite_pipeline.commands[0].parts[0].split('=').collect();
|
||||
if assignment.len() != 2 {
|
||||
(
|
||||
lite_pipeline.clone(),
|
||||
None,
|
||||
Some(ParseError::mismatch(
|
||||
"environment variable assignment",
|
||||
lite_pipeline.commands[0].name.clone(),
|
||||
lite_pipeline.commands[0].parts[0].clone(),
|
||||
)),
|
||||
)
|
||||
} else {
|
||||
let original_span = lite_pipeline.commands[0].name.span;
|
||||
let original_span = lite_pipeline.commands[0].parts[0].span;
|
||||
let env_value = trim_quotes(assignment[1]);
|
||||
|
||||
let (variable_name, value) = (assignment[0], env_value);
|
||||
let mut lite_pipeline = lite_pipeline.clone();
|
||||
|
||||
if !lite_pipeline.commands[0].args.is_empty() {
|
||||
let new_lite_command_name = lite_pipeline.commands[0].args[0].clone();
|
||||
let mut new_lite_command_args = lite_pipeline.commands[0].args.clone();
|
||||
new_lite_command_args.remove(0);
|
||||
if !lite_pipeline.commands[0].parts.len() > 1 {
|
||||
let mut new_lite_command_parts = lite_pipeline.commands[0].parts.clone();
|
||||
new_lite_command_parts.remove(0);
|
||||
|
||||
lite_pipeline.commands[0].name = new_lite_command_name;
|
||||
lite_pipeline.commands[0].args = new_lite_command_args;
|
||||
lite_pipeline.commands[0].parts = new_lite_command_parts;
|
||||
|
||||
(
|
||||
lite_pipeline,
|
||||
@ -1754,7 +1731,7 @@ fn expand_shorthand_forms(
|
||||
None,
|
||||
Some(ParseError::mismatch(
|
||||
"a command following variable",
|
||||
lite_pipeline.commands[0].name.clone(),
|
||||
lite_pipeline.commands[0].parts[0].clone(),
|
||||
)),
|
||||
)
|
||||
}
|
||||
@ -1771,61 +1748,63 @@ pub fn classify_block(lite_block: &LiteBlock, registry: &dyn SignatureRegistry)
|
||||
let mut command_list = vec![];
|
||||
|
||||
let mut error = None;
|
||||
for lite_pipeline in &lite_block.block {
|
||||
let (lite_pipeline, vars, err) = expand_shorthand_forms(lite_pipeline);
|
||||
if error.is_none() {
|
||||
error = err;
|
||||
}
|
||||
|
||||
let (pipeline, err) = classify_pipeline(&lite_pipeline, registry);
|
||||
|
||||
let pipeline = if let Some(vars) = vars {
|
||||
let span = pipeline.commands.span;
|
||||
let block = hir::Block::new(vec![], vec![pipeline.commands.clone()], span);
|
||||
let mut call = hir::Call::new(
|
||||
Box::new(SpannedExpression {
|
||||
expr: Expression::string("with-env".to_string()),
|
||||
span,
|
||||
}),
|
||||
span,
|
||||
);
|
||||
call.positional = Some(vec![
|
||||
SpannedExpression {
|
||||
expr: Expression::List(vec![
|
||||
SpannedExpression {
|
||||
expr: Expression::string(vars.0.item),
|
||||
span: vars.0.span,
|
||||
},
|
||||
SpannedExpression {
|
||||
expr: Expression::string(vars.1.item),
|
||||
span: vars.1.span,
|
||||
},
|
||||
]),
|
||||
span: Span::new(vars.0.span.start(), vars.1.span.end()),
|
||||
},
|
||||
SpannedExpression {
|
||||
expr: Expression::Block(block),
|
||||
span,
|
||||
},
|
||||
]);
|
||||
let classified_with_env = ClassifiedCommand::Internal(InternalCommand {
|
||||
name: "with-env".to_string(),
|
||||
name_span: Span::unknown(),
|
||||
args: call,
|
||||
});
|
||||
ClassifiedPipeline {
|
||||
commands: Commands {
|
||||
list: vec![classified_with_env],
|
||||
span,
|
||||
},
|
||||
for lite_group in &lite_block.block {
|
||||
for lite_pipeline in &lite_group.pipelines {
|
||||
let (lite_pipeline, vars, err) = expand_shorthand_forms(lite_pipeline);
|
||||
if error.is_none() {
|
||||
error = err;
|
||||
}
|
||||
} else {
|
||||
pipeline
|
||||
};
|
||||
|
||||
command_list.push(pipeline.commands);
|
||||
if error.is_none() {
|
||||
error = err;
|
||||
let (pipeline, err) = classify_pipeline(&lite_pipeline, registry);
|
||||
|
||||
let pipeline = if let Some(vars) = vars {
|
||||
let span = pipeline.commands.span;
|
||||
let block = hir::Block::new(vec![], vec![pipeline.commands.clone()], span);
|
||||
let mut call = hir::Call::new(
|
||||
Box::new(SpannedExpression {
|
||||
expr: Expression::string("with-env".to_string()),
|
||||
span,
|
||||
}),
|
||||
span,
|
||||
);
|
||||
call.positional = Some(vec![
|
||||
SpannedExpression {
|
||||
expr: Expression::List(vec![
|
||||
SpannedExpression {
|
||||
expr: Expression::string(vars.0.item),
|
||||
span: vars.0.span,
|
||||
},
|
||||
SpannedExpression {
|
||||
expr: Expression::string(vars.1.item),
|
||||
span: vars.1.span,
|
||||
},
|
||||
]),
|
||||
span: Span::new(vars.0.span.start(), vars.1.span.end()),
|
||||
},
|
||||
SpannedExpression {
|
||||
expr: Expression::Block(block),
|
||||
span,
|
||||
},
|
||||
]);
|
||||
let classified_with_env = ClassifiedCommand::Internal(InternalCommand {
|
||||
name: "with-env".to_string(),
|
||||
name_span: Span::unknown(),
|
||||
args: call,
|
||||
});
|
||||
ClassifiedPipeline {
|
||||
commands: Commands {
|
||||
list: vec![classified_with_env],
|
||||
span,
|
||||
},
|
||||
}
|
||||
} else {
|
||||
pipeline
|
||||
};
|
||||
|
||||
command_list.push(pipeline.commands);
|
||||
if error.is_none() {
|
||||
error = err;
|
||||
}
|
||||
}
|
||||
}
|
||||
let block = Block::new(vec![], command_list, lite_block.span());
|
||||
|
@ -4,17 +4,17 @@ description = "Nushell Plugin"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-plugin"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-errors = {path = "../nu-errors", version = "0.22.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.22.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.22.0"}
|
||||
nu-test-support = {path = "../nu-test-support", version = "0.22.0"}
|
||||
nu-value-ext = {path = "../nu-value-ext", version = "0.22.0"}
|
||||
nu-errors = {path = "../nu-errors", version = "0.23.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.23.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.23.0"}
|
||||
nu-test-support = {path = "../nu-test-support", version = "0.23.0"}
|
||||
nu-value-ext = {path = "../nu-value-ext", version = "0.23.0"}
|
||||
|
||||
bigdecimal = {version = "0.2.0", features = ["serde"]}
|
||||
indexmap = {version = "1.6.0", features = ["serde-1"]}
|
||||
|
@ -4,7 +4,7 @@ description = "Core values and protocols for Nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-protocol"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
@ -17,8 +17,8 @@ derive-new = "0.5.8"
|
||||
getset = "0.1.1"
|
||||
indexmap = {version = "1.6.0", features = ["serde-1"]}
|
||||
log = "0.4.11"
|
||||
nu-errors = {path = "../nu-errors", version = "0.22.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.22.0"}
|
||||
nu-errors = {path = "../nu-errors", version = "0.23.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.23.0"}
|
||||
num-bigint = {version = "0.3.0", features = ["serde"]}
|
||||
num-integer = "0.1.43"
|
||||
num-traits = "0.2.12"
|
||||
|
@ -400,6 +400,14 @@ impl Value {
|
||||
}
|
||||
}
|
||||
|
||||
/// View the Value as signed 64-bit, if possible
|
||||
pub fn as_i64(&self) -> Result<i64, ShellError> {
|
||||
match &self.value {
|
||||
UntaggedValue::Primitive(primitive) => primitive.as_i64(self.tag.span),
|
||||
_ => Err(ShellError::type_error("integer", self.spanned_type_name())),
|
||||
}
|
||||
}
|
||||
|
||||
/// View the Value as boolean, if possible
|
||||
pub fn as_bool(&self) -> Result<bool, ShellError> {
|
||||
match &self.value {
|
||||
|
@ -69,14 +69,37 @@ impl Primitive {
|
||||
ShellError::range_error(
|
||||
ExpectedRange::U64,
|
||||
&format!("{}", int).spanned(span),
|
||||
"converting an integer into a 64-bit integer",
|
||||
"converting an integer into an unsigned 64-bit integer",
|
||||
)
|
||||
}),
|
||||
Primitive::Decimal(decimal) => decimal.to_u64().ok_or_else(|| {
|
||||
ShellError::range_error(
|
||||
ExpectedRange::U64,
|
||||
&format!("{}", decimal).spanned(span),
|
||||
"converting a decimal into a 64-bit integer",
|
||||
"converting a decimal into an unsigned 64-bit integer",
|
||||
)
|
||||
}),
|
||||
other => Err(ShellError::type_error(
|
||||
"number",
|
||||
other.type_name().spanned(span),
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_i64(&self, span: Span) -> Result<i64, ShellError> {
|
||||
match self {
|
||||
Primitive::Int(int) => int.to_i64().ok_or_else(|| {
|
||||
ShellError::range_error(
|
||||
ExpectedRange::I64,
|
||||
&format!("{}", int).spanned(span),
|
||||
"converting an integer into a signed 64-bit integer",
|
||||
)
|
||||
}),
|
||||
Primitive::Decimal(decimal) => decimal.to_i64().ok_or_else(|| {
|
||||
ShellError::range_error(
|
||||
ExpectedRange::I64,
|
||||
&format!("{}", decimal).spanned(span),
|
||||
"converting a decimal into a signed 64-bit integer",
|
||||
)
|
||||
}),
|
||||
other => Err(ShellError::type_error(
|
||||
@ -284,7 +307,7 @@ pub fn format_primitive(primitive: &Primitive, field_name: Option<&String>) -> S
|
||||
);
|
||||
|
||||
for member in members {
|
||||
f.push_str(".");
|
||||
f.push('.');
|
||||
f.push_str(&member.display())
|
||||
}
|
||||
|
||||
|
@ -4,7 +4,7 @@ description = "A source string characterizer for Nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-source"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
@ -3,7 +3,6 @@ use std::cmp::Ordering;
|
||||
use std::hash::Hash;
|
||||
use std::hash::Hasher;
|
||||
use std::ops::Range;
|
||||
use std::sync::Arc;
|
||||
|
||||
/// A "Text" is like a string except that it can be cheaply cloned.
|
||||
/// You can also "extract" subtexts quite cheaply. You can also deref
|
||||
@ -12,7 +11,7 @@ use std::sync::Arc;
|
||||
/// Used to represent the value of an input file.
|
||||
#[derive(Clone)]
|
||||
pub struct Text {
|
||||
text: Arc<String>,
|
||||
text: String,
|
||||
start: usize,
|
||||
end: usize,
|
||||
}
|
||||
@ -39,11 +38,11 @@ impl Text {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Arc<String>> for Text {
|
||||
fn from(text: Arc<String>) -> Self {
|
||||
impl From<&str> for Text {
|
||||
fn from(text: &str) -> Self {
|
||||
let end = text.len();
|
||||
Self {
|
||||
text,
|
||||
text: text.to_string(),
|
||||
start: 0,
|
||||
end,
|
||||
}
|
||||
@ -58,19 +57,12 @@ impl AsRef<str> for Text {
|
||||
|
||||
impl From<String> for Text {
|
||||
fn from(text: String) -> Self {
|
||||
Text::from(Arc::new(text))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&String> for Text {
|
||||
fn from(text: &String) -> Self {
|
||||
Text::from(text.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for Text {
|
||||
fn from(text: &str) -> Self {
|
||||
Text::from(text.to_string())
|
||||
let end = text.len();
|
||||
Self {
|
||||
text,
|
||||
start: 0,
|
||||
end,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4,7 +4,7 @@ description = "Nushell table printing"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-table"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
[[bin]]
|
||||
|
@ -57,7 +57,15 @@ pub fn split_sublines(input: &str) -> Vec<Vec<Subline>> {
|
||||
line.split_terminator(' ')
|
||||
.map(|x| Subline {
|
||||
subline: x,
|
||||
width: UnicodeWidthStr::width(x),
|
||||
width: {
|
||||
// We've tried UnicodeWidthStr::width(x), UnicodeSegmentation::graphemes(x, true).count()
|
||||
// and x.chars().count() with all types of combinations. Currently, it appears that
|
||||
// getting the max of char count and unicode width seems to produce the best layout.
|
||||
// However, it's not perfect.
|
||||
let c = x.chars().count();
|
||||
let u = UnicodeWidthStr::width(x);
|
||||
std::cmp::max(c, u)
|
||||
},
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
|
@ -4,16 +4,16 @@ description = "Support for writing Nushell tests"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-test-support"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-errors = {version = "0.22.0", path = "../nu-errors"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.22.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.22.0"}
|
||||
nu-value-ext = {version = "0.22.0", path = "../nu-value-ext"}
|
||||
nu-errors = {version = "0.23.0", path = "../nu-errors"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.23.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.23.0"}
|
||||
nu-value-ext = {version = "0.23.0", path = "../nu-value-ext"}
|
||||
|
||||
bigdecimal = {version = "0.2.0", features = ["serde"]}
|
||||
chrono = "0.4.15"
|
||||
|
@ -4,15 +4,15 @@ description = "Extension traits for values in Nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu-value-ext"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-errors = {path = "../nu-errors", version = "0.22.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.22.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.22.0"}
|
||||
nu-errors = {path = "../nu-errors", version = "0.23.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.23.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.23.0"}
|
||||
|
||||
indexmap = {version = "1.6.0", features = ["serde-1"]}
|
||||
itertools = "0.9.0"
|
||||
|
@ -508,7 +508,7 @@ pub fn forgiving_insert_data_at_column_path(
|
||||
}
|
||||
UnspannedPathMember::Int(int) => {
|
||||
let mut rows = vec![];
|
||||
let size = int.to_usize().unwrap_or_else(|| 0);
|
||||
let size = int.to_usize().unwrap_or(0);
|
||||
|
||||
for _ in 0..=size {
|
||||
rows.push(
|
||||
|
@ -4,7 +4,7 @@ description = "A binary viewer plugin for Nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu_plugin_binaryview"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
@ -14,10 +14,10 @@ ansi_term = "0.12.1"
|
||||
crossterm = "0.18"
|
||||
image = {version = "0.22.4", default_features = false, features = ["png_codec", "jpeg"]}
|
||||
neso = "0.5.0"
|
||||
nu-errors = {path = "../nu-errors", version = "0.22.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.22.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.22.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.22.0"}
|
||||
nu-errors = {path = "../nu-errors", version = "0.23.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.23.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.23.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.23.0"}
|
||||
pretty-hex = "0.2.0"
|
||||
rawkey = "0.1.3"
|
||||
|
||||
|
@ -149,7 +149,7 @@ impl RenderContext {
|
||||
}
|
||||
}
|
||||
pub fn update(&mut self) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let terminal_size = crossterm::terminal::size().unwrap_or_else(|_| (80, 24));
|
||||
let terminal_size = crossterm::terminal::size().unwrap_or((80, 24));
|
||||
|
||||
if (self.width != terminal_size.0 as usize) || (self.height != terminal_size.1 as usize) {
|
||||
let _ = std::io::stdout().execute(crossterm::cursor::Hide);
|
||||
|
@ -4,19 +4,19 @@ description = "A plugin to display charts"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu_plugin_chart"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-cli = {path = "../nu-cli", version = "0.22.0"}
|
||||
nu-data = {path = "../nu-data", version = "0.22.0"}
|
||||
nu-errors = {path = "../nu-errors", version = "0.22.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.22.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.22.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.22.0"}
|
||||
nu-value-ext = {path = "../nu-value-ext", version = "0.22.0"}
|
||||
nu-cli = {path = "../nu-cli", version = "0.23.0"}
|
||||
nu-data = {path = "../nu-data", version = "0.23.0"}
|
||||
nu-errors = {path = "../nu-errors", version = "0.23.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.23.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.23.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.23.0"}
|
||||
nu-value-ext = {path = "../nu-value-ext", version = "0.23.0"}
|
||||
|
||||
crossterm = "0.18"
|
||||
tui = {version = "0.12.0", default-features = false, features = ["crossterm"]}
|
||||
|
@ -119,9 +119,8 @@ impl<'a> Line<'a> {
|
||||
.marker(marker)
|
||||
.graph_type(GraphType::Line)
|
||||
.style(
|
||||
Style::default().fg(*DEFAULT_LINE_COLORS
|
||||
.get(idx)
|
||||
.unwrap_or_else(|| &DEFAULT_COLOR)),
|
||||
Style::default()
|
||||
.fg(*DEFAULT_LINE_COLORS.get(idx).unwrap_or(&DEFAULT_COLOR)),
|
||||
)
|
||||
.data(data_series)
|
||||
})
|
||||
|
@ -284,7 +284,7 @@ impl SubCommand {
|
||||
let formatter = if self.format.is_some() {
|
||||
let default = String::from("%b-%Y");
|
||||
|
||||
let string_fmt = self.format.as_ref().unwrap_or_else(|| &default);
|
||||
let string_fmt = self.format.as_ref().unwrap_or(&default);
|
||||
|
||||
Some(nu_data::utils::helpers::date_formatter(
|
||||
string_fmt.to_string(),
|
||||
@ -331,7 +331,7 @@ impl SubCommand {
|
||||
let formatter = if self.format.is_some() {
|
||||
let default = String::from("%b-%Y");
|
||||
|
||||
let string_fmt = self.format.as_ref().unwrap_or_else(|| &default);
|
||||
let string_fmt = self.format.as_ref().unwrap_or(&default);
|
||||
|
||||
Some(nu_data::utils::helpers::date_formatter(
|
||||
string_fmt.to_string(),
|
||||
|
@ -282,7 +282,7 @@ impl SubCommand {
|
||||
let formatter = if self.format.is_some() {
|
||||
let default = String::from("%b-%Y");
|
||||
|
||||
let string_fmt = self.format.as_ref().unwrap_or_else(|| &default);
|
||||
let string_fmt = self.format.as_ref().unwrap_or(&default);
|
||||
|
||||
Some(nu_data::utils::helpers::date_formatter(
|
||||
string_fmt.to_string(),
|
||||
@ -329,7 +329,7 @@ impl SubCommand {
|
||||
let formatter = if self.format.is_some() {
|
||||
let default = String::from("%b-%Y");
|
||||
|
||||
let string_fmt = self.format.as_ref().unwrap_or_else(|| &default);
|
||||
let string_fmt = self.format.as_ref().unwrap_or(&default);
|
||||
|
||||
Some(nu_data::utils::helpers::date_formatter(
|
||||
string_fmt.to_string(),
|
||||
|
@ -4,7 +4,7 @@ description = "A URL fetch plugin for Nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu_plugin_fetch"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
@ -12,10 +12,10 @@ doctest = false
|
||||
[dependencies]
|
||||
base64 = "0.12.3"
|
||||
futures = {version = "0.3.5", features = ["compat", "io-compat"]}
|
||||
nu-errors = {path = "../nu-errors", version = "0.22.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.22.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.22.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.22.0"}
|
||||
nu-errors = {path = "../nu-errors", version = "0.23.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.23.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.23.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.23.0"}
|
||||
surf = "1.0.3"
|
||||
url = "2.1.1"
|
||||
|
||||
|
@ -4,7 +4,7 @@ description = "A converter plugin to the bson format for Nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu_plugin_from_bson"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
@ -12,11 +12,11 @@ doctest = false
|
||||
[dependencies]
|
||||
bigdecimal = "0.2.0"
|
||||
bson = {version = "0.14.1", features = ["decimal128"]}
|
||||
nu-errors = {path = "../nu-errors", version = "0.22.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.22.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.22.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.22.0"}
|
||||
nu-value-ext = {path = "../nu-value-ext", version = "0.22.0"}
|
||||
nu-errors = {path = "../nu-errors", version = "0.23.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.23.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.23.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.23.0"}
|
||||
nu-value-ext = {path = "../nu-value-ext", version = "0.23.0"}
|
||||
num-traits = "0.2.12"
|
||||
|
||||
[build-dependencies]
|
||||
|
@ -4,18 +4,18 @@ description = "A converter plugin to the bson format for Nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu_plugin_from_sqlite"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
bigdecimal = "0.2.0"
|
||||
nu-errors = {path = "../nu-errors", version = "0.22.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.22.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.22.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.22.0"}
|
||||
nu-value-ext = {path = "../nu-value-ext", version = "0.22.0"}
|
||||
nu-errors = {path = "../nu-errors", version = "0.23.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.23.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.23.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.23.0"}
|
||||
nu-value-ext = {path = "../nu-value-ext", version = "0.23.0"}
|
||||
num-traits = "0.2.12"
|
||||
tempfile = "3.1.0"
|
||||
|
||||
|
@ -4,18 +4,18 @@ description = "A version incrementer plugin for Nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu_plugin_inc"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-errors = {path = "../nu-errors", version = "0.22.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.22.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.22.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.22.0"}
|
||||
nu-test-support = {path = "../nu-test-support", version = "0.22.0"}
|
||||
nu-value-ext = {path = "../nu-value-ext", version = "0.22.0"}
|
||||
nu-errors = {path = "../nu-errors", version = "0.23.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.23.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.23.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.23.0"}
|
||||
nu-test-support = {path = "../nu-test-support", version = "0.23.0"}
|
||||
nu-value-ext = {path = "../nu-value-ext", version = "0.23.0"}
|
||||
|
||||
semver = "0.10.0"
|
||||
|
||||
|
@ -4,16 +4,16 @@ description = "A regex match plugin for Nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu_plugin_match"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-errors = {path = "../nu-errors", version = "0.22.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.22.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.22.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.22.0"}
|
||||
nu-errors = {path = "../nu-errors", version = "0.23.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.23.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.23.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.23.0"}
|
||||
regex = "1.3.9"
|
||||
|
||||
[build-dependencies]
|
||||
|
@ -4,7 +4,7 @@ description = "An HTTP post plugin for Nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu_plugin_post"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
@ -12,10 +12,10 @@ doctest = false
|
||||
[dependencies]
|
||||
base64 = "0.12.3"
|
||||
futures = {version = "0.3.5", features = ["compat", "io-compat"]}
|
||||
nu-errors = {path = "../nu-errors", version = "0.22.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.22.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.22.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.22.0"}
|
||||
nu-errors = {path = "../nu-errors", version = "0.23.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.23.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.23.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.23.0"}
|
||||
num-traits = "0.2.12"
|
||||
serde_json = "1.0.57"
|
||||
surf = "1.0.3"
|
||||
|
@ -4,16 +4,16 @@ description = "A process list plugin for Nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu_plugin_ps"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-errors = {path = "../nu-errors", version = "0.22.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.22.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.22.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.22.0"}
|
||||
nu-errors = {path = "../nu-errors", version = "0.23.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.23.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.23.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.23.0"}
|
||||
|
||||
num-bigint = "0.3.0"
|
||||
|
||||
|
@ -4,17 +4,17 @@ description = "An S3 plugin for Nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu_plugin_s3"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
futures = {version = "0.3.5", features = ["compat", "io-compat"]}
|
||||
nu-errors = {path = "../nu-errors", version = "0.22.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.22.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.22.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.22.0"}
|
||||
nu-errors = {path = "../nu-errors", version = "0.23.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.23.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.23.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.23.0"}
|
||||
s3handler = "0.5.0"
|
||||
|
||||
[build-dependencies]
|
||||
|
20
crates/nu_plugin_selector/Cargo.toml
Normal file
20
crates/nu_plugin_selector/Cargo.toml
Normal file
@ -0,0 +1,20 @@
|
||||
[package]
|
||||
authors = ["The Nu Project Contributors"]
|
||||
description = "web scraping using css selector"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu_plugin_selector"
|
||||
version = "0.23.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-errors = {version = "0.23.0", path = "../nu-errors"}
|
||||
nu-plugin = {version = "0.23.0", path = "../nu-plugin"}
|
||||
nu-protocol = {version = "0.23.0", path = "../nu-protocol"}
|
||||
nu-source = {version = "0.23.0", path = "../nu-source"}
|
||||
nipper = "0.1.8"
|
||||
|
||||
[dev-dependencies]
|
||||
nu-test-support = {path = "../nu-test-support", version = "0.23.0"}
|
4
crates/nu_plugin_selector/src/lib.rs
Normal file
4
crates/nu_plugin_selector/src/lib.rs
Normal file
@ -0,0 +1,4 @@
|
||||
mod nu;
|
||||
mod selector;
|
||||
|
||||
pub use selector::Selector;
|
6
crates/nu_plugin_selector/src/main.rs
Normal file
6
crates/nu_plugin_selector/src/main.rs
Normal file
@ -0,0 +1,6 @@
|
||||
use nu_plugin::serve_plugin;
|
||||
use nu_plugin_selector::Selector;
|
||||
|
||||
fn main() {
|
||||
serve_plugin(&mut Selector::new());
|
||||
}
|
56
crates/nu_plugin_selector/src/nu/mod.rs
Normal file
56
crates/nu_plugin_selector/src/nu/mod.rs
Normal file
@ -0,0 +1,56 @@
|
||||
use nu_errors::ShellError;
|
||||
use nu_plugin::Plugin;
|
||||
use nu_protocol::{
|
||||
CallInfo, Primitive, ReturnSuccess, ReturnValue, Signature, SyntaxShape, UntaggedValue, Value,
|
||||
};
|
||||
use nu_source::TaggedItem;
|
||||
|
||||
use crate::{selector::begin_selector_query, Selector};
|
||||
|
||||
impl Plugin for Selector {
|
||||
fn config(&mut self) -> Result<Signature, ShellError> {
|
||||
Ok(Signature::build("selector")
|
||||
.desc("execute selector query on html/web")
|
||||
.required("query", SyntaxShape::String, "selector query")
|
||||
.switch("as_html", "return the query output as html", Some('a'))
|
||||
.filter())
|
||||
}
|
||||
|
||||
fn begin_filter(&mut self, call_info: CallInfo) -> Result<Vec<ReturnValue>, ShellError> {
|
||||
let tag = call_info.name_tag;
|
||||
let query = call_info.args.nth(0).ok_or_else(|| {
|
||||
ShellError::labeled_error(
|
||||
"selector query not passed",
|
||||
"selector query not passed",
|
||||
&tag,
|
||||
)
|
||||
})?;
|
||||
|
||||
self.query = query.as_string()?;
|
||||
self.tag = tag;
|
||||
self.as_html = call_info.args.has("as_html");
|
||||
|
||||
Ok(vec![])
|
||||
}
|
||||
|
||||
fn filter(&mut self, input: Value) -> Result<Vec<ReturnValue>, ShellError> {
|
||||
match input {
|
||||
Value {
|
||||
value: UntaggedValue::Primitive(Primitive::String(s)),
|
||||
..
|
||||
} => Ok(
|
||||
begin_selector_query(s, (*self.query).tagged(&self.tag), self.as_html)?
|
||||
.into_iter()
|
||||
.map(ReturnSuccess::value)
|
||||
.collect(),
|
||||
),
|
||||
Value { tag, .. } => Err(ShellError::labeled_error_with_secondary(
|
||||
"Expected text from pipeline",
|
||||
"requires text input",
|
||||
&self.tag,
|
||||
"value originates from here",
|
||||
tag,
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
112
crates/nu_plugin_selector/src/selector.rs
Normal file
112
crates/nu_plugin_selector/src/selector.rs
Normal file
@ -0,0 +1,112 @@
|
||||
use nipper::Document;
|
||||
use nu_errors::ShellError;
|
||||
use nu_protocol::{value::StringExt, Value};
|
||||
use nu_source::{Tag, Tagged};
|
||||
|
||||
pub struct Selector {
|
||||
pub query: String,
|
||||
pub tag: Tag,
|
||||
pub as_html: bool,
|
||||
}
|
||||
|
||||
impl Selector {
|
||||
pub fn new() -> Selector {
|
||||
Selector {
|
||||
query: String::new(),
|
||||
tag: Tag::unknown(),
|
||||
as_html: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Selector {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn begin_selector_query(
|
||||
input: String,
|
||||
query: Tagged<&str>,
|
||||
as_html: bool,
|
||||
) -> Result<Vec<Value>, ShellError> {
|
||||
execute_selector_query(input, query.item.to_string(), query.tag(), as_html)
|
||||
}
|
||||
|
||||
fn execute_selector_query(
|
||||
input_string: String,
|
||||
query_string: String,
|
||||
tag: impl Into<Tag>,
|
||||
as_html: bool,
|
||||
) -> Result<Vec<Value>, ShellError> {
|
||||
let _tag = tag.into();
|
||||
let mut ret = vec![];
|
||||
let doc = Document::from(&input_string);
|
||||
|
||||
// How to internally iterate
|
||||
// doc.nip("tr.athing").iter().for_each(|athing| {
|
||||
// let title = format!("{}", athing.select(".title a").text().to_string());
|
||||
// let href = athing
|
||||
// .select(".storylink")
|
||||
// .attr("href")
|
||||
// .unwrap()
|
||||
// .to_string();
|
||||
// let title_url = format!("{} - {}\n", title, href);
|
||||
// ret.push(title_url.to_string_value_create_tag());
|
||||
// });
|
||||
|
||||
if as_html {
|
||||
doc.nip(&query_string).iter().for_each(|athing| {
|
||||
ret.push(athing.html().to_string().to_string_value_create_tag());
|
||||
});
|
||||
} else {
|
||||
doc.nip(&query_string).iter().for_each(|athing| {
|
||||
ret.push(athing.text().to_string().to_string_value_create_tag());
|
||||
});
|
||||
}
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use nipper::Document;
|
||||
use nu_errors::ShellError;
|
||||
|
||||
#[test]
|
||||
fn create_document_from_string() -> Result<(), ShellError> {
|
||||
let html = r#"<div name="foo" value="bar"></div>"#;
|
||||
let document = Document::from(html);
|
||||
let shouldbe =
|
||||
r#"<html><head></head><body><div name="foo" value="bar"></div></body></html>"#;
|
||||
|
||||
assert_eq!(shouldbe.to_string(), document.html().to_string());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn modify_html_document() -> Result<(), ShellError> {
|
||||
let html = r#"<div name="foo" value="bar"></div>"#;
|
||||
let document = Document::from(html);
|
||||
let mut input = document.select(r#"div[name="foo"]"#);
|
||||
input.set_attr("id", "input");
|
||||
input.remove_attr("name");
|
||||
|
||||
let shouldbe = "bar".to_string();
|
||||
let actual = input.attr("value").unwrap().to_string();
|
||||
|
||||
assert_eq!(shouldbe, actual);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// #[test]
|
||||
// fn test_hacker_news() -> Result<(), ShellError> {
|
||||
// let html = reqwest::blocking::get("https://news.ycombinator.com")?.text()?;
|
||||
// let document = Document::from(&html);
|
||||
// let result = query(html, ".hnname a".to_string(), Tag::unknown());
|
||||
// let shouldbe = Ok(vec!["Hacker News".to_str_value_create_tag()]);
|
||||
// assert_eq!(shouldbe, result);
|
||||
// Ok(())
|
||||
// }
|
||||
}
|
@ -4,20 +4,20 @@ description = "A plugin to open files/URLs directly from Nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu_plugin_start"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
glob = "0.3.0"
|
||||
nu-errors = {path = "../nu-errors", version = "0.22.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.22.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.22.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.22.0"}
|
||||
nu-errors = {path = "../nu-errors", version = "0.23.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.23.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.23.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.23.0"}
|
||||
open = "1.4.0"
|
||||
url = "2.1.1"
|
||||
|
||||
[build-dependencies]
|
||||
nu-errors = {version = "0.22.0", path = "../nu-errors"}
|
||||
nu-source = {version = "0.22.0", path = "../nu-source"}
|
||||
nu-errors = {version = "0.23.0", path = "../nu-errors"}
|
||||
nu-source = {version = "0.23.0", path = "../nu-source"}
|
||||
|
@ -4,16 +4,16 @@ description = "A system info plugin for Nushell"
|
||||
edition = "2018"
|
||||
license = "MIT"
|
||||
name = "nu_plugin_sys"
|
||||
version = "0.22.0"
|
||||
version = "0.23.0"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
nu-errors = {path = "../nu-errors", version = "0.22.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.22.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.22.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.22.0"}
|
||||
nu-errors = {path = "../nu-errors", version = "0.23.0"}
|
||||
nu-plugin = {path = "../nu-plugin", version = "0.23.0"}
|
||||
nu-protocol = {path = "../nu-protocol", version = "0.23.0"}
|
||||
nu-source = {path = "../nu-source", version = "0.23.0"}
|
||||
|
||||
battery = "0.7.6"
|
||||
futures = {version = "0.3.5", features = ["compat", "io-compat"]}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user