nushell/src/commands/save.rs

243 lines
7.8 KiB
Rust
Raw Normal View History

use crate::commands::{UnevaluatedCallInfo, WholeStreamCommand};
use crate::data::Value;
use crate::errors::ShellError;
2019-08-02 21:15:07 +02:00
use crate::prelude::*;
2019-08-09 07:36:43 +02:00
use std::path::{Path, PathBuf};
2019-06-07 19:13:38 +02:00
2019-08-02 21:15:07 +02:00
pub struct Save;
2019-09-04 04:21:37 +02:00
macro_rules! process_string {
2019-09-28 02:05:18 +02:00
($scope:tt, $input:ident, $name_tag:ident) => {{
2019-09-04 04:21:37 +02:00
let mut result_string = String::new();
for res in $input {
match res {
Tagged {
item: Value::Primitive(Primitive::String(s)),
..
} => {
result_string.push_str(&s);
}
_ => {
2019-09-28 02:05:18 +02:00
break $scope Err(ShellError::labeled_error(
2019-09-04 04:21:37 +02:00
"Save could not successfully save",
"unexpected data during save",
$name_tag,
2019-09-28 02:05:18 +02:00
));
2019-09-04 04:21:37 +02:00
}
}
}
Ok(result_string.into_bytes())
}};
}
macro_rules! process_string_return_success {
2019-09-28 02:05:18 +02:00
($scope:tt, $result_vec:ident, $name_tag:ident) => {{
2019-09-04 04:21:37 +02:00
let mut result_string = String::new();
for res in $result_vec {
match res {
Ok(ReturnSuccess::Value(Tagged {
item: Value::Primitive(Primitive::String(s)),
..
})) => {
result_string.push_str(&s);
}
_ => {
2019-09-28 02:05:18 +02:00
break $scope Err(ShellError::labeled_error(
2019-09-04 04:21:37 +02:00
"Save could not successfully save",
"unexpected data during text save",
$name_tag,
2019-09-28 02:05:18 +02:00
));
2019-09-04 04:21:37 +02:00
}
}
}
Ok(result_string.into_bytes())
}};
}
macro_rules! process_binary_return_success {
2019-09-28 02:05:18 +02:00
($scope:tt, $result_vec:ident, $name_tag:ident) => {{
2019-09-04 04:21:37 +02:00
let mut result_binary: Vec<u8> = Vec::new();
for res in $result_vec {
match res {
Ok(ReturnSuccess::Value(Tagged {
item: Value::Primitive(Primitive::Binary(b)),
2019-09-04 04:21:37 +02:00
..
})) => {
for u in b.into_iter() {
result_binary.push(u);
}
}
_ => {
2019-09-28 02:05:18 +02:00
break $scope Err(ShellError::labeled_error(
2019-09-04 04:21:37 +02:00
"Save could not successfully save",
"unexpected data during binary save",
$name_tag,
2019-09-28 02:05:18 +02:00
));
2019-09-04 04:21:37 +02:00
}
}
}
Ok(result_binary)
}};
}
2019-08-02 21:15:07 +02:00
#[derive(Deserialize)]
2019-08-03 04:17:28 +02:00
pub struct SaveArgs {
2019-08-09 07:36:43 +02:00
path: Option<Tagged<PathBuf>>,
2019-08-02 21:15:07 +02:00
raw: bool,
}
2019-08-15 07:02:02 +02:00
impl WholeStreamCommand for Save {
2019-08-02 21:15:07 +02:00
fn name(&self) -> &str {
"save"
2019-06-07 19:13:38 +02:00
}
2019-08-02 21:15:07 +02:00
fn signature(&self) -> Signature {
Signature::build("save")
.optional("path", SyntaxShape::Path)
2019-08-02 21:15:07 +02:00
.switch("raw")
}
2019-06-22 05:43:37 +02:00
fn usage(&self) -> &str {
"Save the contents of the pipeline to a file."
}
2019-08-02 21:15:07 +02:00
fn run(
&self,
args: CommandArgs,
registry: &CommandRegistry,
) -> Result<OutputStream, ShellError> {
2019-08-31 02:59:21 +02:00
Ok(args.process_raw(registry, save)?.run())
2019-06-07 19:13:38 +02:00
}
2019-08-02 21:15:07 +02:00
}
2019-06-07 19:13:38 +02:00
2019-08-21 14:08:23 +02:00
fn save(
2019-08-02 21:15:07 +02:00
SaveArgs {
path,
raw: save_raw,
}: SaveArgs,
2019-08-21 14:08:23 +02:00
RunnableContext {
input,
name,
shell_manager,
2019-08-31 02:59:21 +02:00
host,
ctrl_c,
2019-08-31 02:59:21 +02:00
commands: registry,
2019-08-21 14:08:23 +02:00
..
}: RunnableContext,
2019-08-31 02:59:21 +02:00
raw_args: RawCommandArgs,
2019-08-02 21:15:07 +02:00
) -> Result<OutputStream, ShellError> {
2019-08-21 14:08:23 +02:00
let mut full_path = PathBuf::from(shell_manager.path());
let name_tag = name.clone();
2019-08-02 21:15:07 +02:00
2019-09-28 02:05:18 +02:00
let stream = async_stream! {
2019-08-31 02:59:21 +02:00
let input: Vec<Tagged<Value>> = input.values.collect().await;
if path.is_none() {
2019-09-29 07:13:56 +02:00
// If there is no filename, check the metadata for the anchor filename
2019-08-09 07:36:43 +02:00
if input.len() > 0 {
2019-09-29 07:13:56 +02:00
let anchor = input[0].anchor();
match anchor {
2019-08-09 07:36:43 +02:00
Some(path) => match path {
2019-09-29 07:18:59 +02:00
AnchorLocation::File(file) => {
full_path.push(Path::new(&file));
2019-08-09 07:36:43 +02:00
}
_ => {
2019-08-09 22:49:43 +02:00
yield Err(ShellError::labeled_error(
Overhaul the expansion system The main thrust of this (very large) commit is an overhaul of the expansion system. The parsing pipeline is: - Lightly parse the source file for atoms, basic delimiters and pipeline structure into a token tree - Expand the token tree into a HIR (high-level intermediate representation) based upon the baseline syntax rules for expressions and the syntactic shape of commands. Somewhat non-traditionally, nu doesn't have an AST at all. It goes directly from the token tree, which doesn't represent many important distinctions (like the difference between `hello` and `5KB`) directly into a high-level representation that doesn't have a direct correspondence to the source code. At a high level, nu commands work like macros, in the sense that the syntactic shape of the invocation of a command depends on the definition of a command. However, commands do not have the ability to perform unrestricted expansions of the token tree. Instead, they describe their arguments in terms of syntactic shapes, and the expander expands the token tree into HIR based upon that definition. For example, the `where` command says that it takes a block as its first required argument, and the description of the block syntactic shape expands the syntax `cpu > 10` into HIR that represents `{ $it.cpu > 10 }`. This commit overhauls that system so that the syntactic shapes are described in terms of a few new traits (`ExpandSyntax` and `ExpandExpression` are the primary ones) that are more composable than the previous system. The first big win of this new system is the addition of the `ColumnPath` shape, which looks like `cpu."max ghz"` or `package.version`. Previously, while a variable path could look like `$it.cpu."max ghz"`, the tail of a variable path could not be easily reused in other contexts. Now, that tail is its own syntactic shape, and it can be used as part of a command's signature. This cleans up commands like `inc`, `add` and `edit` as well as shorthand blocks, which can now look like `| where cpu."max ghz" > 10`
2019-09-18 00:26:27 +02:00
"Save requires a filepath (1)",
2019-08-09 22:49:43 +02:00
"needs path",
name_tag.clone(),
2019-08-09 22:49:43 +02:00
));
2019-08-09 07:36:43 +02:00
}
},
None => {
2019-08-09 22:49:43 +02:00
yield Err(ShellError::labeled_error(
Overhaul the expansion system The main thrust of this (very large) commit is an overhaul of the expansion system. The parsing pipeline is: - Lightly parse the source file for atoms, basic delimiters and pipeline structure into a token tree - Expand the token tree into a HIR (high-level intermediate representation) based upon the baseline syntax rules for expressions and the syntactic shape of commands. Somewhat non-traditionally, nu doesn't have an AST at all. It goes directly from the token tree, which doesn't represent many important distinctions (like the difference between `hello` and `5KB`) directly into a high-level representation that doesn't have a direct correspondence to the source code. At a high level, nu commands work like macros, in the sense that the syntactic shape of the invocation of a command depends on the definition of a command. However, commands do not have the ability to perform unrestricted expansions of the token tree. Instead, they describe their arguments in terms of syntactic shapes, and the expander expands the token tree into HIR based upon that definition. For example, the `where` command says that it takes a block as its first required argument, and the description of the block syntactic shape expands the syntax `cpu > 10` into HIR that represents `{ $it.cpu > 10 }`. This commit overhauls that system so that the syntactic shapes are described in terms of a few new traits (`ExpandSyntax` and `ExpandExpression` are the primary ones) that are more composable than the previous system. The first big win of this new system is the addition of the `ColumnPath` shape, which looks like `cpu."max ghz"` or `package.version`. Previously, while a variable path could look like `$it.cpu."max ghz"`, the tail of a variable path could not be easily reused in other contexts. Now, that tail is its own syntactic shape, and it can be used as part of a command's signature. This cleans up commands like `inc`, `add` and `edit` as well as shorthand blocks, which can now look like `| where cpu."max ghz" > 10`
2019-09-18 00:26:27 +02:00
"Save requires a filepath (2)",
2019-08-09 22:49:43 +02:00
"needs path",
name_tag.clone(),
2019-08-09 22:49:43 +02:00
));
2019-08-09 07:36:43 +02:00
}
2019-08-02 21:15:07 +02:00
}
2019-08-09 07:36:43 +02:00
} else {
2019-08-09 22:49:43 +02:00
yield Err(ShellError::labeled_error(
Overhaul the expansion system The main thrust of this (very large) commit is an overhaul of the expansion system. The parsing pipeline is: - Lightly parse the source file for atoms, basic delimiters and pipeline structure into a token tree - Expand the token tree into a HIR (high-level intermediate representation) based upon the baseline syntax rules for expressions and the syntactic shape of commands. Somewhat non-traditionally, nu doesn't have an AST at all. It goes directly from the token tree, which doesn't represent many important distinctions (like the difference between `hello` and `5KB`) directly into a high-level representation that doesn't have a direct correspondence to the source code. At a high level, nu commands work like macros, in the sense that the syntactic shape of the invocation of a command depends on the definition of a command. However, commands do not have the ability to perform unrestricted expansions of the token tree. Instead, they describe their arguments in terms of syntactic shapes, and the expander expands the token tree into HIR based upon that definition. For example, the `where` command says that it takes a block as its first required argument, and the description of the block syntactic shape expands the syntax `cpu > 10` into HIR that represents `{ $it.cpu > 10 }`. This commit overhauls that system so that the syntactic shapes are described in terms of a few new traits (`ExpandSyntax` and `ExpandExpression` are the primary ones) that are more composable than the previous system. The first big win of this new system is the addition of the `ColumnPath` shape, which looks like `cpu."max ghz"` or `package.version`. Previously, while a variable path could look like `$it.cpu."max ghz"`, the tail of a variable path could not be easily reused in other contexts. Now, that tail is its own syntactic shape, and it can be used as part of a command's signature. This cleans up commands like `inc`, `add` and `edit` as well as shorthand blocks, which can now look like `| where cpu."max ghz" > 10`
2019-09-18 00:26:27 +02:00
"Save requires a filepath (3)",
2019-08-09 22:49:43 +02:00
"needs path",
name_tag.clone(),
2019-08-09 22:49:43 +02:00
));
2019-07-21 09:08:05 +02:00
}
2019-08-31 02:59:21 +02:00
} else {
if let Some(file) = path {
full_path.push(file.item());
2019-08-21 14:08:23 +02:00
}
}
2019-08-09 07:36:43 +02:00
2019-09-28 02:05:18 +02:00
// TODO use label_break_value once it is stable:
// https://github.com/rust-lang/rust/issues/48594
let content : Result<Vec<u8>, ShellError> = 'scope: loop {
break if !save_raw {
if let Some(extension) = full_path.extension() {
let command_name = format!("to-{}", extension.to_str().unwrap());
if let Some(converter) = registry.get_command(&command_name) {
let new_args = RawCommandArgs {
host,
ctrl_c,
2019-09-28 02:05:18 +02:00
shell_manager,
call_info: UnevaluatedCallInfo {
args: crate::parser::hir::Call {
head: raw_args.call_info.args.head,
positional: None,
named: None
},
source: raw_args.call_info.source,
name_tag: raw_args.call_info.name_tag,
}
};
let mut result = converter.run(new_args.with_input(input), &registry, false);
let result_vec: Vec<Result<ReturnSuccess, ShellError>> = result.drain_vec().await;
if converter.is_binary() {
process_binary_return_success!('scope, result_vec, name_tag)
} else {
process_string_return_success!('scope, result_vec, name_tag)
2019-08-31 02:59:21 +02:00
}
2019-09-04 03:50:23 +02:00
} else {
2019-09-28 02:05:18 +02:00
process_string!('scope, input, name_tag)
2019-08-31 02:59:21 +02:00
}
} else {
2019-09-28 02:05:18 +02:00
process_string!('scope, input, name_tag)
2019-08-31 02:59:21 +02:00
}
2019-08-21 14:08:23 +02:00
} else {
2019-09-28 02:05:18 +02:00
Ok(string_from(&input).into_bytes())
};
2019-08-31 02:59:21 +02:00
};
2019-08-09 07:36:43 +02:00
2019-08-31 02:59:21 +02:00
match content {
Ok(save_data) => match std::fs::write(full_path, save_data) {
Ok(o) => o,
Overhaul the coloring system This commit replaces the previous naive coloring system with a coloring system that is more aligned with the parser. The main benefit of this change is that it allows us to use parsing rules to decide how to color tokens. For example, consider the following syntax: ``` $ ps | where cpu > 10 ``` Ideally, we could color `cpu` like a column name and not a string, because `cpu > 10` is a shorthand block syntax that expands to `{ $it.cpu > 10 }`. The way that we know that it's a shorthand block is that the `where` command declares that its first parameter is a `SyntaxShape::Block`, which allows the shorthand block form. In order to accomplish this, we need to color the tokens in a way that corresponds to their expanded semantics, which means that high-fidelity coloring requires expansion. This commit adds a `ColorSyntax` trait that corresponds to the `ExpandExpression` trait. The semantics are fairly similar, with a few differences. First `ExpandExpression` consumes N tokens and returns a single `hir::Expression`. `ColorSyntax` consumes N tokens and writes M `FlatShape` tokens to the output. Concretely, for syntax like `[1 2 3]` - `ExpandExpression` takes a single token node and produces a single `hir::Expression` - `ColorSyntax` takes the same token node and emits 7 `FlatShape`s (open delimiter, int, whitespace, int, whitespace, int, close delimiter) Second, `ColorSyntax` is more willing to plow through failures than `ExpandExpression`. In particular, consider syntax like ``` $ ps | where cpu > ``` In this case - `ExpandExpression` will see that the `where` command is expecting a block, see that it's not a literal block and try to parse it as a shorthand block. It will successfully find a member followed by an infix operator, but not a following expression. That means that the entire pipeline part fails to parse and is a syntax error. - `ColorSyntax` will also try to parse it as a shorthand block and ultimately fail, but it will fall back to "backoff coloring mode", which parsing any unidentified tokens in an unfallible, simple way. In this case, `cpu` will color as a string and `>` will color as an operator. Finally, it's very important that coloring a pipeline infallibly colors the entire string, doesn't fail, and doesn't get stuck in an infinite loop. In order to accomplish this, this PR separates `ColorSyntax`, which is infallible from `FallibleColorSyntax`, which might fail. This allows the type system to let us know if our coloring rules bottom out at at an infallible rule. It's not perfect: it's still possible for the coloring process to get stuck or consume tokens non-atomically. I intend to reduce the opportunity for those problems in a future commit. In the meantime, the current system catches a number of mistakes (like trying to use a fallible coloring rule in a loop without thinking about the possibility that it will never terminate).
2019-10-06 22:22:50 +02:00
Err(e) => yield Err(ShellError::labeled_error(e.to_string(), "for command", name)),
2019-08-31 02:59:21 +02:00
},
Overhaul the coloring system This commit replaces the previous naive coloring system with a coloring system that is more aligned with the parser. The main benefit of this change is that it allows us to use parsing rules to decide how to color tokens. For example, consider the following syntax: ``` $ ps | where cpu > 10 ``` Ideally, we could color `cpu` like a column name and not a string, because `cpu > 10` is a shorthand block syntax that expands to `{ $it.cpu > 10 }`. The way that we know that it's a shorthand block is that the `where` command declares that its first parameter is a `SyntaxShape::Block`, which allows the shorthand block form. In order to accomplish this, we need to color the tokens in a way that corresponds to their expanded semantics, which means that high-fidelity coloring requires expansion. This commit adds a `ColorSyntax` trait that corresponds to the `ExpandExpression` trait. The semantics are fairly similar, with a few differences. First `ExpandExpression` consumes N tokens and returns a single `hir::Expression`. `ColorSyntax` consumes N tokens and writes M `FlatShape` tokens to the output. Concretely, for syntax like `[1 2 3]` - `ExpandExpression` takes a single token node and produces a single `hir::Expression` - `ColorSyntax` takes the same token node and emits 7 `FlatShape`s (open delimiter, int, whitespace, int, whitespace, int, close delimiter) Second, `ColorSyntax` is more willing to plow through failures than `ExpandExpression`. In particular, consider syntax like ``` $ ps | where cpu > ``` In this case - `ExpandExpression` will see that the `where` command is expecting a block, see that it's not a literal block and try to parse it as a shorthand block. It will successfully find a member followed by an infix operator, but not a following expression. That means that the entire pipeline part fails to parse and is a syntax error. - `ColorSyntax` will also try to parse it as a shorthand block and ultimately fail, but it will fall back to "backoff coloring mode", which parsing any unidentified tokens in an unfallible, simple way. In this case, `cpu` will color as a string and `>` will color as an operator. Finally, it's very important that coloring a pipeline infallibly colors the entire string, doesn't fail, and doesn't get stuck in an infinite loop. In order to accomplish this, this PR separates `ColorSyntax`, which is infallible from `FallibleColorSyntax`, which might fail. This allows the type system to let us know if our coloring rules bottom out at at an infallible rule. It's not perfect: it's still possible for the coloring process to get stuck or consume tokens non-atomically. I intend to reduce the opportunity for those problems in a future commit. In the meantime, the current system catches a number of mistakes (like trying to use a fallible coloring rule in a loop without thinking about the possibility that it will never terminate).
2019-10-06 22:22:50 +02:00
Err(e) => yield Err(ShellError::labeled_error(e.to_string(), "for command", name)),
2019-08-31 02:59:21 +02:00
}
2019-08-21 14:08:23 +02:00
2019-08-31 02:59:21 +02:00
};
2019-08-09 07:36:43 +02:00
2019-08-31 02:59:21 +02:00
Ok(OutputStream::new(stream))
2019-06-07 19:13:38 +02:00
}
2019-08-21 14:08:23 +02:00
2019-09-04 03:50:23 +02:00
fn string_from(input: &Vec<Tagged<Value>>) -> String {
2019-08-21 14:08:23 +02:00
let mut save_data = String::new();
if input.len() > 0 {
let mut first = true;
for i in input.iter() {
if !first {
save_data.push_str("\n");
} else {
first = false;
}
if let Ok(data) = &i.as_string() {
save_data.push_str(data);
}
}
}
2019-09-04 03:50:23 +02:00
save_data
2019-08-21 14:08:23 +02:00
}