2019-08-22 06:13:40 +02:00
|
|
|
use crate::commands::command::EvaluatedWholeStreamCommandArgs;
|
2019-08-21 19:03:59 +02:00
|
|
|
use crate::commands::cp::CopyArgs;
|
2020-01-19 03:25:07 +01:00
|
|
|
use crate::commands::ls::LsArgs;
|
2019-08-21 19:03:59 +02:00
|
|
|
use crate::commands::mkdir::MkdirArgs;
|
|
|
|
use crate::commands::mv::MoveArgs;
|
|
|
|
use crate::commands::rm::RemoveArgs;
|
2019-09-05 18:23:42 +02:00
|
|
|
use crate::data::dir_entry_dict;
|
2019-08-07 19:49:11 +02:00
|
|
|
use crate::prelude::*;
|
2019-08-09 07:36:43 +02:00
|
|
|
use crate::shell::completer::NuCompleter;
|
2019-08-07 19:49:11 +02:00
|
|
|
use crate::shell::shell::Shell;
|
2019-08-21 19:03:59 +02:00
|
|
|
use crate::utils::FileStructure;
|
Extract core stuff into own crates
This commit extracts five new crates:
- nu-source, which contains the core source-code handling logic in Nu,
including Text, Span, and also the pretty.rs-based debug logic
- nu-parser, which is the parser and expander logic
- nu-protocol, which is the bulk of the types and basic conveniences
used by plugins
- nu-errors, which contains ShellError, ParseError and error handling
conveniences
- nu-textview, which is the textview plugin extracted into a crate
One of the major consequences of this refactor is that it's no longer
possible to `impl X for Spanned<Y>` outside of the `nu-source` crate, so
a lot of types became more concrete (Value became a concrete type
instead of Spanned<Value>, for example).
This also turned a number of inherent methods in the main nu crate into
plain functions (impl Value {} became a bunch of functions in the
`value` namespace in `crate::data::value`).
2019-11-26 03:30:48 +01:00
|
|
|
use nu_errors::ShellError;
|
Restructure and streamline token expansion (#1123)
Restructure and streamline token expansion
The purpose of this commit is to streamline the token expansion code, by
removing aspects of the code that are no longer relevant, removing
pointless duplication, and eliminating the need to pass the same
arguments to `expand_syntax`.
The first big-picture change in this commit is that instead of a handful
of `expand_` functions, which take a TokensIterator and ExpandContext, a
smaller number of methods on the `TokensIterator` do the same job.
The second big-picture change in this commit is fully eliminating the
coloring traits, making coloring a responsibility of the base expansion
implementations. This also means that the coloring tracer is merged into
the expansion tracer, so you can follow a single expansion and see how
the expansion process produced colored tokens.
One side effect of this change is that the expander itself is marginally
more error-correcting. The error correction works by switching from
structured expansion to `BackoffColoringMode` when an unexpected token
is found, which guarantees that all spans of the source are colored, but
may not be the most optimal error recovery strategy.
That said, because `BackoffColoringMode` only extends as far as a
closing delimiter (`)`, `]`, `}`) or pipe (`|`), it does result in
fairly granular correction strategy.
The current code still produces an `Err` (plus a complete list of
colored shapes) from the parsing process if any errors are encountered,
but this could easily be addressed now that the underlying expansion is
error-correcting.
This commit also colors any spans that are syntax errors in red, and
causes the parser to include some additional information about what
tokens were expected at any given point where an error was encountered,
so that completions and hinting could be more robust in the future.
Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com>
Co-authored-by: Andrés N. Robalino <andres@androbtech.com>
2020-01-21 23:45:03 +01:00
|
|
|
use nu_parser::ExpandContext;
|
Extract core stuff into own crates
This commit extracts five new crates:
- nu-source, which contains the core source-code handling logic in Nu,
including Text, Span, and also the pretty.rs-based debug logic
- nu-parser, which is the parser and expander logic
- nu-protocol, which is the bulk of the types and basic conveniences
used by plugins
- nu-errors, which contains ShellError, ParseError and error handling
conveniences
- nu-textview, which is the textview plugin extracted into a crate
One of the major consequences of this refactor is that it's no longer
possible to `impl X for Spanned<Y>` outside of the `nu-source` crate, so
a lot of types became more concrete (Value became a concrete type
instead of Spanned<Value>, for example).
This also turned a number of inherent methods in the main nu crate into
plain functions (impl Value {} became a bunch of functions in the
`value` namespace in `crate::data::value`).
2019-11-26 03:30:48 +01:00
|
|
|
use nu_protocol::{Primitive, ReturnSuccess, UntaggedValue};
|
2019-08-09 07:36:43 +02:00
|
|
|
use rustyline::completion::FilenameCompleter;
|
2019-08-07 19:49:11 +02:00
|
|
|
use rustyline::hint::{Hinter, HistoryHinter};
|
2020-03-25 21:19:01 +01:00
|
|
|
use std::collections::HashMap;
|
2020-03-16 07:28:18 +01:00
|
|
|
use std::path::{Component, Path, PathBuf};
|
2019-10-19 22:52:39 +02:00
|
|
|
use trash as SendToTrash;
|
2019-08-22 07:15:14 +02:00
|
|
|
|
2020-03-06 08:13:47 +01:00
|
|
|
#[cfg(unix)]
|
|
|
|
use std::os::unix::fs::PermissionsExt;
|
|
|
|
|
2019-08-07 19:49:11 +02:00
|
|
|
pub struct FilesystemShell {
|
2019-08-29 13:08:28 +02:00
|
|
|
pub(crate) path: String,
|
2019-09-08 12:15:55 +02:00
|
|
|
pub(crate) last_path: String,
|
2019-08-07 19:49:11 +02:00
|
|
|
completer: NuCompleter,
|
|
|
|
hinter: HistoryHinter,
|
|
|
|
}
|
|
|
|
|
2019-08-17 05:53:39 +02:00
|
|
|
impl std::fmt::Debug for FilesystemShell {
|
|
|
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
|
|
|
write!(f, "FilesystemShell @ {}", self.path)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-07 19:49:11 +02:00
|
|
|
impl Clone for FilesystemShell {
|
|
|
|
fn clone(&self) -> Self {
|
|
|
|
FilesystemShell {
|
|
|
|
path: self.path.clone(),
|
2019-09-08 07:10:08 +02:00
|
|
|
last_path: self.path.clone(),
|
2019-08-07 19:49:11 +02:00
|
|
|
completer: NuCompleter {
|
|
|
|
file_completer: FilenameCompleter::new(),
|
2019-08-10 07:02:15 +02:00
|
|
|
commands: self.completer.commands.clone(),
|
Restructure and streamline token expansion (#1123)
Restructure and streamline token expansion
The purpose of this commit is to streamline the token expansion code, by
removing aspects of the code that are no longer relevant, removing
pointless duplication, and eliminating the need to pass the same
arguments to `expand_syntax`.
The first big-picture change in this commit is that instead of a handful
of `expand_` functions, which take a TokensIterator and ExpandContext, a
smaller number of methods on the `TokensIterator` do the same job.
The second big-picture change in this commit is fully eliminating the
coloring traits, making coloring a responsibility of the base expansion
implementations. This also means that the coloring tracer is merged into
the expansion tracer, so you can follow a single expansion and see how
the expansion process produced colored tokens.
One side effect of this change is that the expander itself is marginally
more error-correcting. The error correction works by switching from
structured expansion to `BackoffColoringMode` when an unexpected token
is found, which guarantees that all spans of the source are colored, but
may not be the most optimal error recovery strategy.
That said, because `BackoffColoringMode` only extends as far as a
closing delimiter (`)`, `]`, `}`) or pipe (`|`), it does result in
fairly granular correction strategy.
The current code still produces an `Err` (plus a complete list of
colored shapes) from the parsing process if any errors are encountered,
but this could easily be addressed now that the underlying expansion is
error-correcting.
This commit also colors any spans that are syntax errors in red, and
causes the parser to include some additional information about what
tokens were expected at any given point where an error was encountered,
so that completions and hinting could be more robust in the future.
Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com>
Co-authored-by: Andrés N. Robalino <andres@androbtech.com>
2020-01-21 23:45:03 +01:00
|
|
|
homedir: self.homedir(),
|
2019-08-07 19:49:11 +02:00
|
|
|
},
|
|
|
|
hinter: HistoryHinter {},
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl FilesystemShell {
|
2019-08-10 07:02:15 +02:00
|
|
|
pub fn basic(commands: CommandRegistry) -> Result<FilesystemShell, std::io::Error> {
|
2019-08-07 19:49:11 +02:00
|
|
|
let path = std::env::current_dir()?;
|
|
|
|
|
|
|
|
Ok(FilesystemShell {
|
|
|
|
path: path.to_string_lossy().to_string(),
|
2019-09-08 07:10:08 +02:00
|
|
|
last_path: path.to_string_lossy().to_string(),
|
2019-08-07 19:49:11 +02:00
|
|
|
completer: NuCompleter {
|
|
|
|
file_completer: FilenameCompleter::new(),
|
2019-08-10 07:02:15 +02:00
|
|
|
commands,
|
Restructure and streamline token expansion (#1123)
Restructure and streamline token expansion
The purpose of this commit is to streamline the token expansion code, by
removing aspects of the code that are no longer relevant, removing
pointless duplication, and eliminating the need to pass the same
arguments to `expand_syntax`.
The first big-picture change in this commit is that instead of a handful
of `expand_` functions, which take a TokensIterator and ExpandContext, a
smaller number of methods on the `TokensIterator` do the same job.
The second big-picture change in this commit is fully eliminating the
coloring traits, making coloring a responsibility of the base expansion
implementations. This also means that the coloring tracer is merged into
the expansion tracer, so you can follow a single expansion and see how
the expansion process produced colored tokens.
One side effect of this change is that the expander itself is marginally
more error-correcting. The error correction works by switching from
structured expansion to `BackoffColoringMode` when an unexpected token
is found, which guarantees that all spans of the source are colored, but
may not be the most optimal error recovery strategy.
That said, because `BackoffColoringMode` only extends as far as a
closing delimiter (`)`, `]`, `}`) or pipe (`|`), it does result in
fairly granular correction strategy.
The current code still produces an `Err` (plus a complete list of
colored shapes) from the parsing process if any errors are encountered,
but this could easily be addressed now that the underlying expansion is
error-correcting.
This commit also colors any spans that are syntax errors in red, and
causes the parser to include some additional information about what
tokens were expected at any given point where an error was encountered,
so that completions and hinting could be more robust in the future.
Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com>
Co-authored-by: Andrés N. Robalino <andres@androbtech.com>
2020-01-21 23:45:03 +01:00
|
|
|
homedir: dirs::home_dir(),
|
2019-08-07 19:49:11 +02:00
|
|
|
},
|
|
|
|
hinter: HistoryHinter {},
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2020-01-04 07:44:17 +01:00
|
|
|
pub fn with_location(path: String, commands: CommandRegistry) -> FilesystemShell {
|
2019-09-08 07:10:08 +02:00
|
|
|
let last_path = path.clone();
|
2020-01-04 07:44:17 +01:00
|
|
|
FilesystemShell {
|
2019-08-07 19:49:11 +02:00
|
|
|
path,
|
2019-09-08 07:10:08 +02:00
|
|
|
last_path,
|
2019-08-07 19:49:11 +02:00
|
|
|
completer: NuCompleter {
|
|
|
|
file_completer: FilenameCompleter::new(),
|
2019-08-10 07:02:15 +02:00
|
|
|
commands,
|
Restructure and streamline token expansion (#1123)
Restructure and streamline token expansion
The purpose of this commit is to streamline the token expansion code, by
removing aspects of the code that are no longer relevant, removing
pointless duplication, and eliminating the need to pass the same
arguments to `expand_syntax`.
The first big-picture change in this commit is that instead of a handful
of `expand_` functions, which take a TokensIterator and ExpandContext, a
smaller number of methods on the `TokensIterator` do the same job.
The second big-picture change in this commit is fully eliminating the
coloring traits, making coloring a responsibility of the base expansion
implementations. This also means that the coloring tracer is merged into
the expansion tracer, so you can follow a single expansion and see how
the expansion process produced colored tokens.
One side effect of this change is that the expander itself is marginally
more error-correcting. The error correction works by switching from
structured expansion to `BackoffColoringMode` when an unexpected token
is found, which guarantees that all spans of the source are colored, but
may not be the most optimal error recovery strategy.
That said, because `BackoffColoringMode` only extends as far as a
closing delimiter (`)`, `]`, `}`) or pipe (`|`), it does result in
fairly granular correction strategy.
The current code still produces an `Err` (plus a complete list of
colored shapes) from the parsing process if any errors are encountered,
but this could easily be addressed now that the underlying expansion is
error-correcting.
This commit also colors any spans that are syntax errors in red, and
causes the parser to include some additional information about what
tokens were expected at any given point where an error was encountered,
so that completions and hinting could be more robust in the future.
Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com>
Co-authored-by: Andrés N. Robalino <andres@androbtech.com>
2020-01-21 23:45:03 +01:00
|
|
|
homedir: dirs::home_dir(),
|
2019-08-07 19:49:11 +02:00
|
|
|
},
|
|
|
|
hinter: HistoryHinter {},
|
2020-01-04 07:44:17 +01:00
|
|
|
}
|
2019-08-07 19:49:11 +02:00
|
|
|
}
|
2020-03-16 07:28:18 +01:00
|
|
|
|
|
|
|
fn canonicalize(&self, path: impl AsRef<Path>) -> std::io::Result<PathBuf> {
|
|
|
|
let path = if path.as_ref().is_relative() {
|
|
|
|
let components = path.as_ref().components();
|
|
|
|
let mut result = PathBuf::from(self.path());
|
|
|
|
for component in components {
|
|
|
|
match component {
|
|
|
|
Component::CurDir => { /* ignore current dir */ }
|
|
|
|
Component::ParentDir => {
|
|
|
|
result.pop();
|
|
|
|
}
|
|
|
|
Component::Normal(normal) => result.push(normal),
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
result
|
|
|
|
} else {
|
|
|
|
path.as_ref().into()
|
|
|
|
};
|
|
|
|
|
|
|
|
dunce::canonicalize(path)
|
|
|
|
}
|
2019-08-07 19:49:11 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
impl Shell for FilesystemShell {
|
2019-10-13 06:12:43 +02:00
|
|
|
fn name(&self) -> String {
|
2019-08-07 19:49:11 +02:00
|
|
|
"filesystem".to_string()
|
|
|
|
}
|
|
|
|
|
Add support for ~ expansion
This ended up being a bit of a yak shave. The basic idea in this commit is to
expand `~` in paths, but only in paths.
The way this is accomplished is by doing the expansion inside of the code that
parses literal syntax for `SyntaxType::Path`.
As a quick refresher: every command is entitled to expand its arguments in a
custom way. While this could in theory be used for general-purpose macros,
today the expansion facility is limited to syntactic hints.
For example, the syntax `where cpu > 0` expands under the hood to
`where { $it.cpu > 0 }`. This happens because the first argument to `where`
is defined as a `SyntaxType::Block`, and the parser coerces binary expressions
whose left-hand-side looks like a member into a block when the command is
expecting one.
This is mildly more magical than what most programming languages would do,
but we believe that it makes sense to allow commands to fine-tune the syntax
because of the domain nushell is in (command-line shells).
The syntactic expansions supported by this facility are relatively limited.
For example, we don't allow `$it` to become a bare word, simply because the
command asks for a string in the relevant position. That would quickly
become more confusing than it's worth.
This PR adds a new `SyntaxType` rule: `SyntaxType::Path`. When a command
declares a parameter as a `SyntaxType::Path`, string literals and bare
words passed as an argument to that parameter are processed using the
path expansion rules. Right now, that only means that `~` is expanded into
the home directory, but additional rules are possible in the future.
By restricting this expansion to a syntactic expansion when passed as an
argument to a command expecting a path, we avoid making `~` a generally
reserved character. This will also allow us to give good tab completion
for paths with `~` characters in them when a command is expecting a path.
In order to accomplish the above, this commit changes the parsing functions
to take a `Context` instead of just a `CommandRegistry`. From the perspective
of macro expansion, you can think of the `CommandRegistry` as a dictionary
of in-scope macros, and the `Context` as the compile-time state used in
expansion. This could gain additional functionality over time as we find
more uses for the expansion system.
2019-08-26 21:21:03 +02:00
|
|
|
fn homedir(&self) -> Option<PathBuf> {
|
|
|
|
dirs::home_dir()
|
|
|
|
}
|
|
|
|
|
2019-09-14 18:30:24 +02:00
|
|
|
fn ls(
|
|
|
|
&self,
|
2020-01-25 17:20:33 +01:00
|
|
|
LsArgs {
|
|
|
|
path,
|
2020-03-13 18:27:04 +01:00
|
|
|
all,
|
2020-01-25 17:20:33 +01:00
|
|
|
full,
|
|
|
|
short_names,
|
2020-01-28 07:48:41 +01:00
|
|
|
with_symlink_targets,
|
2020-01-25 17:20:33 +01:00
|
|
|
}: LsArgs,
|
2020-01-19 03:25:07 +01:00
|
|
|
context: &RunnablePerItemContext,
|
2019-09-14 18:30:24 +02:00
|
|
|
) -> Result<OutputStream, ShellError> {
|
2019-10-13 06:12:43 +02:00
|
|
|
let ctrl_c = context.ctrl_c.clone();
|
|
|
|
let name_tag = context.name.clone();
|
2019-09-15 03:51:19 +02:00
|
|
|
|
2020-01-28 17:58:31 +01:00
|
|
|
let (path, p_tag) = match path {
|
|
|
|
Some(p) => {
|
|
|
|
let p_tag = p.tag;
|
|
|
|
let mut p = p.item;
|
|
|
|
if p.is_dir() {
|
2020-03-13 18:27:04 +01:00
|
|
|
if is_empty_dir(&p) {
|
2020-01-28 17:58:31 +01:00
|
|
|
return Ok(OutputStream::empty());
|
2019-10-13 06:12:43 +02:00
|
|
|
}
|
2020-01-28 17:58:31 +01:00
|
|
|
p.push("*");
|
|
|
|
}
|
|
|
|
(p, p_tag)
|
2019-09-15 03:51:19 +02:00
|
|
|
}
|
2020-01-28 17:58:31 +01:00
|
|
|
None => {
|
2020-03-13 18:27:04 +01:00
|
|
|
if is_empty_dir(&self.path()) {
|
2020-01-28 17:58:31 +01:00
|
|
|
return Ok(OutputStream::empty());
|
2019-08-23 06:51:43 +02:00
|
|
|
} else {
|
2020-01-28 17:58:31 +01:00
|
|
|
(PathBuf::from("./*"), context.name.clone())
|
2019-08-23 06:51:43 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
2019-08-07 19:49:11 +02:00
|
|
|
|
2020-03-13 18:27:04 +01:00
|
|
|
let mut paths = glob::glob(&path.to_string_lossy())
|
|
|
|
.map_err(|e| ShellError::labeled_error("Glob error", e.to_string(), &p_tag))?
|
|
|
|
.peekable();
|
2020-01-28 17:58:31 +01:00
|
|
|
|
|
|
|
if paths.peek().is_none() {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
"Invalid File or Pattern",
|
2020-02-01 09:34:34 +01:00
|
|
|
"invalid file or pattern",
|
2020-01-28 17:58:31 +01:00
|
|
|
&p_tag,
|
|
|
|
));
|
|
|
|
}
|
|
|
|
|
2020-03-13 18:27:04 +01:00
|
|
|
// Generated stream: impl Stream<Item = Result<ReturnSuccess, ShellError>
|
|
|
|
let stream = async_stream::try_stream! {
|
2020-01-28 17:58:31 +01:00
|
|
|
for path in paths {
|
2020-03-13 18:27:04 +01:00
|
|
|
let path = path.map_err(|e| ShellError::from(e.into_error()))?;
|
|
|
|
|
|
|
|
if !all && is_hidden_dir(&path) {
|
|
|
|
continue;
|
2019-10-13 06:12:43 +02:00
|
|
|
}
|
2020-03-13 18:27:04 +01:00
|
|
|
|
|
|
|
let metadata = match std::fs::symlink_metadata(&path) {
|
|
|
|
Ok(metadata) => Ok(Some(metadata)),
|
|
|
|
Err(e) => if let PermissionDenied = e.kind() {
|
|
|
|
Ok(None)
|
|
|
|
} else {
|
|
|
|
Err(e)
|
|
|
|
},
|
|
|
|
}?;
|
|
|
|
|
|
|
|
let entry = dir_entry_dict(
|
|
|
|
&path,
|
|
|
|
metadata.as_ref(),
|
|
|
|
name_tag.clone(),
|
|
|
|
full,
|
|
|
|
short_names,
|
|
|
|
with_symlink_targets
|
|
|
|
)
|
|
|
|
.map(|entry| ReturnSuccess::Value(entry.into()))?;
|
|
|
|
|
|
|
|
yield entry;
|
2019-08-07 19:49:11 +02:00
|
|
|
}
|
2019-10-13 06:12:43 +02:00
|
|
|
};
|
2020-03-13 18:27:04 +01:00
|
|
|
|
2020-03-29 18:30:36 +02:00
|
|
|
Ok(stream.interruptible(ctrl_c).to_output_stream())
|
2019-08-07 19:49:11 +02:00
|
|
|
}
|
|
|
|
|
2019-08-15 07:02:02 +02:00
|
|
|
fn cd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result<OutputStream, ShellError> {
|
2019-08-09 21:42:23 +02:00
|
|
|
let path = match args.nth(0) {
|
2019-08-07 19:49:11 +02:00
|
|
|
None => match dirs::home_dir() {
|
|
|
|
Some(o) => o,
|
|
|
|
_ => {
|
|
|
|
return Err(ShellError::labeled_error(
|
2020-03-06 08:13:47 +01:00
|
|
|
"Cannot change to home directory",
|
|
|
|
"cannot go to home",
|
2019-10-13 06:12:43 +02:00
|
|
|
&args.call_info.name_tag,
|
2019-08-07 19:49:11 +02:00
|
|
|
))
|
|
|
|
}
|
|
|
|
},
|
|
|
|
Some(v) => {
|
Add support for ~ expansion
This ended up being a bit of a yak shave. The basic idea in this commit is to
expand `~` in paths, but only in paths.
The way this is accomplished is by doing the expansion inside of the code that
parses literal syntax for `SyntaxType::Path`.
As a quick refresher: every command is entitled to expand its arguments in a
custom way. While this could in theory be used for general-purpose macros,
today the expansion facility is limited to syntactic hints.
For example, the syntax `where cpu > 0` expands under the hood to
`where { $it.cpu > 0 }`. This happens because the first argument to `where`
is defined as a `SyntaxType::Block`, and the parser coerces binary expressions
whose left-hand-side looks like a member into a block when the command is
expecting one.
This is mildly more magical than what most programming languages would do,
but we believe that it makes sense to allow commands to fine-tune the syntax
because of the domain nushell is in (command-line shells).
The syntactic expansions supported by this facility are relatively limited.
For example, we don't allow `$it` to become a bare word, simply because the
command asks for a string in the relevant position. That would quickly
become more confusing than it's worth.
This PR adds a new `SyntaxType` rule: `SyntaxType::Path`. When a command
declares a parameter as a `SyntaxType::Path`, string literals and bare
words passed as an argument to that parameter are processed using the
path expansion rules. Right now, that only means that `~` is expanded into
the home directory, but additional rules are possible in the future.
By restricting this expansion to a syntactic expansion when passed as an
argument to a command expecting a path, we avoid making `~` a generally
reserved character. This will also allow us to give good tab completion
for paths with `~` characters in them when a command is expecting a path.
In order to accomplish the above, this commit changes the parsing functions
to take a `Context` instead of just a `CommandRegistry`. From the perspective
of macro expansion, you can think of the `CommandRegistry` as a dictionary
of in-scope macros, and the `Context` as the compile-time state used in
expansion. This could gain additional functionality over time as we find
more uses for the expansion system.
2019-08-26 21:21:03 +02:00
|
|
|
let target = v.as_path()?;
|
2019-09-08 07:10:08 +02:00
|
|
|
|
2020-03-16 07:28:18 +01:00
|
|
|
if target == Path::new("-") {
|
2019-09-08 11:55:49 +02:00
|
|
|
PathBuf::from(&self.last_path)
|
|
|
|
} else {
|
2020-03-16 07:28:18 +01:00
|
|
|
let path = self.canonicalize(target).map_err(|_| {
|
2020-03-06 08:13:47 +01:00
|
|
|
ShellError::labeled_error(
|
|
|
|
"Cannot change to directory",
|
|
|
|
"directory not found",
|
|
|
|
&v.tag,
|
|
|
|
)
|
|
|
|
})?;
|
2019-09-08 11:55:49 +02:00
|
|
|
|
2020-03-06 08:13:47 +01:00
|
|
|
if !path.is_dir() {
|
2019-09-24 22:34:30 +02:00
|
|
|
return Err(ShellError::labeled_error(
|
2020-03-06 08:13:47 +01:00
|
|
|
"Cannot change to directory",
|
2019-09-24 22:34:30 +02:00
|
|
|
"is not a directory",
|
2020-02-01 09:34:34 +01:00
|
|
|
&v.tag,
|
2019-09-24 22:34:30 +02:00
|
|
|
));
|
|
|
|
}
|
|
|
|
|
2020-03-06 08:13:47 +01:00
|
|
|
#[cfg(unix)]
|
|
|
|
{
|
|
|
|
let has_exec = path
|
|
|
|
.metadata()
|
|
|
|
.map(|m| {
|
2020-03-11 01:19:15 +01:00
|
|
|
umask::Mode::from(m.permissions().mode()).has(umask::USER_READ)
|
2020-03-06 08:13:47 +01:00
|
|
|
})
|
|
|
|
.map_err(|e| {
|
|
|
|
ShellError::labeled_error(
|
|
|
|
"Cannot change to directory",
|
|
|
|
format!("cannot stat ({})", e),
|
|
|
|
&v.tag,
|
|
|
|
)
|
|
|
|
})?;
|
|
|
|
|
|
|
|
if !has_exec {
|
2019-09-08 11:55:49 +02:00
|
|
|
return Err(ShellError::labeled_error(
|
2020-02-01 09:34:34 +01:00
|
|
|
"Cannot change to directory",
|
2020-03-06 08:13:47 +01:00
|
|
|
"permission denied",
|
2020-02-01 09:34:34 +01:00
|
|
|
&v.tag,
|
2020-03-06 08:13:47 +01:00
|
|
|
));
|
2019-09-08 07:10:08 +02:00
|
|
|
}
|
2019-08-07 19:49:11 +02:00
|
|
|
}
|
2020-03-06 08:13:47 +01:00
|
|
|
|
|
|
|
path
|
2019-08-07 19:49:11 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
let mut stream = VecDeque::new();
|
2019-09-08 11:55:49 +02:00
|
|
|
|
2019-09-11 16:36:50 +02:00
|
|
|
stream.push_back(ReturnSuccess::change_cwd(
|
2019-08-07 19:49:11 +02:00
|
|
|
path.to_string_lossy().to_string(),
|
|
|
|
));
|
2019-09-08 12:15:55 +02:00
|
|
|
|
2019-08-07 19:49:11 +02:00
|
|
|
Ok(stream.into())
|
|
|
|
}
|
|
|
|
|
2019-08-21 19:03:59 +02:00
|
|
|
fn cp(
|
|
|
|
&self,
|
|
|
|
CopyArgs {
|
|
|
|
src,
|
|
|
|
dst,
|
|
|
|
recursive,
|
|
|
|
}: CopyArgs,
|
2019-09-14 18:30:24 +02:00
|
|
|
name: Tag,
|
2019-08-22 06:13:40 +02:00
|
|
|
path: &str,
|
2019-08-24 21:36:19 +02:00
|
|
|
) -> Result<OutputStream, ShellError> {
|
2019-09-14 18:30:24 +02:00
|
|
|
let name_tag = name;
|
2019-08-21 19:09:23 +02:00
|
|
|
|
2019-08-22 06:13:40 +02:00
|
|
|
let mut source = PathBuf::from(path);
|
|
|
|
let mut destination = PathBuf::from(path);
|
2019-08-21 19:09:23 +02:00
|
|
|
|
|
|
|
source.push(&src.item);
|
|
|
|
destination.push(&dst.item);
|
2019-08-21 19:03:59 +02:00
|
|
|
|
|
|
|
let sources: Vec<_> = match glob::glob(&source.to_string_lossy()) {
|
|
|
|
Ok(files) => files.collect(),
|
|
|
|
Err(_) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
2020-02-01 09:34:34 +01:00
|
|
|
"Invalid pattern",
|
|
|
|
"invalid pattern",
|
2019-08-21 19:03:59 +02:00
|
|
|
src.tag,
|
|
|
|
))
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
if sources.len() == 1 {
|
|
|
|
if let Ok(entry) = &sources[0] {
|
|
|
|
if entry.is_dir() && !recursive.item {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
"is a directory (not copied). Try using \"--recursive\".",
|
|
|
|
"is a directory (not copied). Try using \"--recursive\".",
|
|
|
|
src.tag,
|
|
|
|
));
|
|
|
|
}
|
|
|
|
|
|
|
|
let mut sources: FileStructure = FileStructure::new();
|
|
|
|
|
|
|
|
sources.walk_decorate(&entry)?;
|
|
|
|
|
|
|
|
if entry.is_file() {
|
|
|
|
let strategy = |(source_file, _depth_level)| {
|
2020-02-05 07:54:05 +01:00
|
|
|
if destination.is_dir() {
|
2019-08-21 19:03:59 +02:00
|
|
|
let mut new_dst = dunce::canonicalize(destination.clone())?;
|
|
|
|
if let Some(name) = entry.file_name() {
|
|
|
|
new_dst.push(name);
|
|
|
|
}
|
|
|
|
Ok((source_file, new_dst))
|
|
|
|
} else {
|
|
|
|
Ok((source_file, destination.clone()))
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
let sources = sources.paths_applying_with(strategy)?;
|
|
|
|
|
|
|
|
for (ref src, ref dst) in sources {
|
|
|
|
if src.is_file() {
|
|
|
|
match std::fs::copy(src, dst) {
|
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
e.to_string(),
|
|
|
|
e.to_string(),
|
2019-09-14 18:30:24 +02:00
|
|
|
name_tag,
|
2019-08-21 19:03:59 +02:00
|
|
|
));
|
|
|
|
}
|
|
|
|
Ok(o) => o,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if entry.is_dir() {
|
|
|
|
if !destination.exists() {
|
|
|
|
match std::fs::create_dir_all(&destination) {
|
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
e.to_string(),
|
|
|
|
e.to_string(),
|
2020-02-01 09:34:34 +01:00
|
|
|
dst.tag,
|
2019-08-21 19:03:59 +02:00
|
|
|
));
|
|
|
|
}
|
|
|
|
Ok(o) => o,
|
|
|
|
};
|
|
|
|
|
|
|
|
let strategy = |(source_file, depth_level)| {
|
|
|
|
let mut new_dst = destination.clone();
|
|
|
|
let path = dunce::canonicalize(&source_file)?;
|
|
|
|
|
|
|
|
let mut comps: Vec<_> = path
|
|
|
|
.components()
|
|
|
|
.map(|fragment| fragment.as_os_str())
|
|
|
|
.rev()
|
|
|
|
.take(1 + depth_level)
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
comps.reverse();
|
|
|
|
|
|
|
|
for fragment in comps.iter() {
|
|
|
|
new_dst.push(fragment);
|
|
|
|
}
|
|
|
|
|
2019-12-06 16:28:26 +01:00
|
|
|
Ok((PathBuf::from(&source_file), new_dst))
|
2019-08-21 19:03:59 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
let sources = sources.paths_applying_with(strategy)?;
|
|
|
|
|
2020-02-01 09:34:34 +01:00
|
|
|
let dst_tag = dst.tag;
|
2019-08-21 19:03:59 +02:00
|
|
|
for (ref src, ref dst) in sources {
|
2019-12-07 10:34:32 +01:00
|
|
|
if src.is_dir() && !dst.exists() {
|
|
|
|
match std::fs::create_dir_all(dst) {
|
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
e.to_string(),
|
|
|
|
e.to_string(),
|
2020-02-01 09:34:34 +01:00
|
|
|
dst_tag,
|
2019-12-07 10:34:32 +01:00
|
|
|
));
|
|
|
|
}
|
|
|
|
Ok(o) => o,
|
|
|
|
};
|
2019-08-21 19:03:59 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if src.is_file() {
|
|
|
|
match std::fs::copy(src, dst) {
|
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
e.to_string(),
|
|
|
|
e.to_string(),
|
2019-09-14 18:30:24 +02:00
|
|
|
name_tag,
|
2019-08-21 19:03:59 +02:00
|
|
|
));
|
|
|
|
}
|
|
|
|
Ok(o) => o,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
match entry.file_name() {
|
|
|
|
Some(name) => destination.push(name),
|
|
|
|
None => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
"Copy aborted. Not a valid path",
|
2020-02-01 09:34:34 +01:00
|
|
|
"not a valid path",
|
|
|
|
dst.tag,
|
2019-08-21 19:03:59 +02:00
|
|
|
))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
match std::fs::create_dir_all(&destination) {
|
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
e.to_string(),
|
|
|
|
e.to_string(),
|
2020-02-01 09:34:34 +01:00
|
|
|
dst.tag,
|
2019-08-21 19:03:59 +02:00
|
|
|
));
|
|
|
|
}
|
|
|
|
Ok(o) => o,
|
|
|
|
};
|
|
|
|
|
|
|
|
let strategy = |(source_file, depth_level)| {
|
|
|
|
let mut new_dst = dunce::canonicalize(&destination)?;
|
|
|
|
let path = dunce::canonicalize(&source_file)?;
|
|
|
|
|
|
|
|
let mut comps: Vec<_> = path
|
|
|
|
.components()
|
|
|
|
.map(|fragment| fragment.as_os_str())
|
|
|
|
.rev()
|
|
|
|
.take(1 + depth_level)
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
comps.reverse();
|
|
|
|
|
|
|
|
for fragment in comps.iter() {
|
|
|
|
new_dst.push(fragment);
|
|
|
|
}
|
|
|
|
|
2019-12-06 16:28:26 +01:00
|
|
|
Ok((PathBuf::from(&source_file), new_dst))
|
2019-08-21 19:03:59 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
let sources = sources.paths_applying_with(strategy)?;
|
|
|
|
|
2020-02-01 09:34:34 +01:00
|
|
|
let dst_tag = dst.tag;
|
2019-08-21 19:03:59 +02:00
|
|
|
for (ref src, ref dst) in sources {
|
2019-12-07 10:34:32 +01:00
|
|
|
if src.is_dir() && !dst.exists() {
|
|
|
|
match std::fs::create_dir_all(dst) {
|
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
e.to_string(),
|
|
|
|
e.to_string(),
|
2020-02-01 09:34:34 +01:00
|
|
|
dst_tag,
|
2019-12-07 10:34:32 +01:00
|
|
|
));
|
|
|
|
}
|
|
|
|
Ok(o) => o,
|
|
|
|
};
|
2019-08-21 19:03:59 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if src.is_file() {
|
|
|
|
match std::fs::copy(src, dst) {
|
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
e.to_string(),
|
|
|
|
e.to_string(),
|
2019-09-14 18:30:24 +02:00
|
|
|
name_tag,
|
2019-08-21 19:03:59 +02:00
|
|
|
));
|
|
|
|
}
|
|
|
|
Ok(o) => o,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-12-07 10:34:32 +01:00
|
|
|
} else if destination.exists() {
|
|
|
|
if !sources.iter().all(|x| match x {
|
|
|
|
Ok(f) => f.is_file(),
|
|
|
|
Err(_) => false,
|
|
|
|
}) {
|
|
|
|
return Err(ShellError::labeled_error(
|
2019-08-21 19:03:59 +02:00
|
|
|
"Copy aborted (directories found). Recursive copying in patterns not supported yet (try copying the directory directly)",
|
2020-02-01 09:34:34 +01:00
|
|
|
"recursive copying in patterns not supported",
|
2019-08-21 19:03:59 +02:00
|
|
|
src.tag,
|
|
|
|
));
|
2019-12-07 10:34:32 +01:00
|
|
|
}
|
2019-08-21 19:03:59 +02:00
|
|
|
|
2019-12-07 10:34:32 +01:00
|
|
|
for entry in sources {
|
|
|
|
if let Ok(entry) = entry {
|
|
|
|
let mut to = PathBuf::from(&destination);
|
2019-08-21 19:03:59 +02:00
|
|
|
|
2019-12-07 10:34:32 +01:00
|
|
|
match entry.file_name() {
|
|
|
|
Some(name) => to.push(name),
|
2019-08-21 19:03:59 +02:00
|
|
|
None => {
|
|
|
|
return Err(ShellError::labeled_error(
|
2019-12-07 10:34:32 +01:00
|
|
|
"Copy aborted. Not a valid path",
|
2020-02-01 09:34:34 +01:00
|
|
|
"not a valid path",
|
|
|
|
dst.tag,
|
2019-08-21 19:03:59 +02:00
|
|
|
))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-12-07 10:34:32 +01:00
|
|
|
if entry.is_file() {
|
|
|
|
match std::fs::copy(&entry, &to) {
|
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
e.to_string(),
|
|
|
|
e.to_string(),
|
|
|
|
src.tag,
|
|
|
|
));
|
|
|
|
}
|
|
|
|
Ok(o) => o,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
2019-08-21 19:03:59 +02:00
|
|
|
}
|
2019-12-07 10:34:32 +01:00
|
|
|
} else {
|
|
|
|
let destination_file_name = {
|
|
|
|
match destination.file_name() {
|
|
|
|
Some(name) => PathBuf::from(name),
|
|
|
|
None => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
"Copy aborted. Not a valid destination",
|
2020-02-01 09:34:34 +01:00
|
|
|
"not a valid destination",
|
|
|
|
dst.tag,
|
2019-12-07 10:34:32 +01:00
|
|
|
))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
format!("Copy aborted. (Does {:?} exist?)", destination_file_name),
|
2020-02-01 09:34:34 +01:00
|
|
|
format!("copy aborted (does {:?} exist?)", destination_file_name),
|
|
|
|
dst.tag,
|
2019-12-07 10:34:32 +01:00
|
|
|
));
|
2019-08-21 19:03:59 +02:00
|
|
|
}
|
|
|
|
|
2019-08-24 21:36:19 +02:00
|
|
|
Ok(OutputStream::empty())
|
2019-08-21 19:03:59 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
fn mkdir(
|
|
|
|
&self,
|
2019-08-22 06:13:40 +02:00
|
|
|
MkdirArgs { rest: directories }: MkdirArgs,
|
2019-09-14 18:30:24 +02:00
|
|
|
name: Tag,
|
2019-08-22 06:13:40 +02:00
|
|
|
path: &str,
|
2019-08-24 21:36:19 +02:00
|
|
|
) -> Result<OutputStream, ShellError> {
|
2019-08-22 06:13:40 +02:00
|
|
|
let full_path = PathBuf::from(path);
|
2019-08-21 19:03:59 +02:00
|
|
|
|
2019-12-06 16:28:26 +01:00
|
|
|
if directories.is_empty() {
|
2019-08-21 19:03:59 +02:00
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
"mkdir requires directory paths",
|
|
|
|
"needs parameter",
|
|
|
|
name,
|
|
|
|
));
|
|
|
|
}
|
|
|
|
|
|
|
|
for dir in directories.iter() {
|
|
|
|
let create_at = {
|
|
|
|
let mut loc = full_path.clone();
|
|
|
|
loc.push(&dir.item);
|
|
|
|
loc
|
|
|
|
};
|
|
|
|
|
2019-12-07 10:34:32 +01:00
|
|
|
let dir_res = std::fs::create_dir_all(create_at);
|
|
|
|
if let Err(reason) = dir_res {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
reason.to_string(),
|
|
|
|
reason.to_string(),
|
|
|
|
dir.tag(),
|
|
|
|
));
|
2019-08-21 19:03:59 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-24 21:36:19 +02:00
|
|
|
Ok(OutputStream::empty())
|
2019-08-21 19:03:59 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
fn mv(
|
|
|
|
&self,
|
|
|
|
MoveArgs { src, dst }: MoveArgs,
|
2019-09-14 18:30:24 +02:00
|
|
|
name: Tag,
|
2019-08-22 06:13:40 +02:00
|
|
|
path: &str,
|
2019-08-24 21:36:19 +02:00
|
|
|
) -> Result<OutputStream, ShellError> {
|
2019-09-14 18:30:24 +02:00
|
|
|
let name_tag = name;
|
2019-08-21 19:09:23 +02:00
|
|
|
|
2019-08-22 06:13:40 +02:00
|
|
|
let mut source = PathBuf::from(path);
|
|
|
|
let mut destination = PathBuf::from(path);
|
2019-08-21 19:09:23 +02:00
|
|
|
|
|
|
|
source.push(&src.item);
|
|
|
|
destination.push(&dst.item);
|
2019-08-21 19:03:59 +02:00
|
|
|
|
|
|
|
let sources: Vec<_> = match glob::glob(&source.to_string_lossy()) {
|
|
|
|
Ok(files) => files.collect(),
|
|
|
|
Err(_) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
"Invalid pattern.",
|
2020-02-01 09:34:34 +01:00
|
|
|
"invalid pattern",
|
2019-08-21 19:03:59 +02:00
|
|
|
src.tag,
|
|
|
|
))
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-02-07 18:40:48 +01:00
|
|
|
if sources.is_empty() {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
"Invalid File or Pattern.",
|
|
|
|
"Invalid File or Pattern",
|
|
|
|
src.tag,
|
|
|
|
));
|
|
|
|
}
|
2019-08-21 19:03:59 +02:00
|
|
|
let destination_file_name = {
|
|
|
|
match destination.file_name() {
|
|
|
|
Some(name) => PathBuf::from(name),
|
|
|
|
None => {
|
2020-03-24 02:00:48 +01:00
|
|
|
let name_maybe =
|
|
|
|
destination.components().next_back().and_then(
|
|
|
|
|component| match component {
|
|
|
|
Component::RootDir => Some(PathBuf::from("/")),
|
|
|
|
Component::ParentDir => destination
|
|
|
|
.parent()
|
|
|
|
.and_then(|parent| parent.file_name())
|
|
|
|
.map(PathBuf::from),
|
|
|
|
_ => None,
|
|
|
|
},
|
|
|
|
);
|
|
|
|
|
|
|
|
if let Some(name) = name_maybe {
|
|
|
|
name
|
|
|
|
} else {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
"Rename aborted. Not a valid destination",
|
|
|
|
"not a valid destination",
|
|
|
|
dst.tag,
|
|
|
|
));
|
|
|
|
}
|
2019-08-21 19:03:59 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-02-07 18:40:48 +01:00
|
|
|
if sources.is_empty() {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
"Move aborted. Not a valid destination",
|
|
|
|
"not a valid destination",
|
|
|
|
src.tag,
|
|
|
|
));
|
|
|
|
}
|
|
|
|
|
2019-08-21 19:03:59 +02:00
|
|
|
if sources.len() == 1 {
|
|
|
|
if let Ok(entry) = &sources[0] {
|
|
|
|
let entry_file_name = match entry.file_name() {
|
|
|
|
Some(name) => name,
|
|
|
|
None => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
"Rename aborted. Not a valid entry name",
|
2020-02-01 09:34:34 +01:00
|
|
|
"not a valid entry name",
|
|
|
|
src.tag,
|
2019-08-21 19:03:59 +02:00
|
|
|
))
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
if destination.exists() && destination.is_dir() {
|
|
|
|
destination = match dunce::canonicalize(&destination) {
|
|
|
|
Ok(path) => path,
|
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
format!("Rename aborted. {:}", e.to_string()),
|
2020-02-01 09:34:34 +01:00
|
|
|
e.to_string(),
|
|
|
|
dst.tag,
|
2019-08-21 19:03:59 +02:00
|
|
|
))
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
destination.push(entry_file_name);
|
|
|
|
}
|
|
|
|
|
|
|
|
if entry.is_file() {
|
2019-11-15 03:52:51 +01:00
|
|
|
#[cfg(not(windows))]
|
|
|
|
{
|
|
|
|
match std::fs::rename(&entry, &destination) {
|
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
format!(
|
|
|
|
"Rename {:?} to {:?} aborted. {:}",
|
|
|
|
entry_file_name,
|
|
|
|
destination_file_name,
|
|
|
|
e.to_string(),
|
|
|
|
),
|
2020-02-01 09:34:34 +01:00
|
|
|
e.to_string(),
|
2019-11-15 03:52:51 +01:00
|
|
|
name_tag,
|
|
|
|
));
|
|
|
|
}
|
|
|
|
Ok(o) => o,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
#[cfg(windows)]
|
|
|
|
{
|
|
|
|
match std::fs::copy(&entry, &destination) {
|
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
format!(
|
|
|
|
"Rename {:?} to {:?} aborted. {:}",
|
|
|
|
entry_file_name,
|
|
|
|
destination_file_name,
|
|
|
|
e.to_string(),
|
|
|
|
),
|
2020-02-01 09:34:34 +01:00
|
|
|
e.to_string(),
|
2019-11-15 03:52:51 +01:00
|
|
|
name_tag,
|
|
|
|
));
|
|
|
|
}
|
|
|
|
Ok(_) => match std::fs::remove_file(&entry) {
|
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
format!(
|
|
|
|
"Rename {:?} to {:?} aborted. {:}",
|
|
|
|
entry_file_name,
|
|
|
|
destination_file_name,
|
|
|
|
e.to_string(),
|
|
|
|
),
|
2020-02-01 09:34:34 +01:00
|
|
|
e.to_string(),
|
2019-11-15 03:52:51 +01:00
|
|
|
name_tag,
|
|
|
|
));
|
|
|
|
}
|
|
|
|
Ok(o) => o,
|
|
|
|
},
|
|
|
|
};
|
|
|
|
}
|
2019-08-21 19:03:59 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if entry.is_dir() {
|
|
|
|
match std::fs::create_dir_all(&destination) {
|
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
format!(
|
|
|
|
"Rename {:?} to {:?} aborted. {:}",
|
|
|
|
entry_file_name,
|
|
|
|
destination_file_name,
|
|
|
|
e.to_string(),
|
|
|
|
),
|
2020-02-01 09:34:34 +01:00
|
|
|
e.to_string(),
|
2019-09-14 18:30:24 +02:00
|
|
|
name_tag,
|
2019-08-21 19:03:59 +02:00
|
|
|
));
|
|
|
|
}
|
|
|
|
Ok(o) => o,
|
|
|
|
};
|
|
|
|
#[cfg(not(windows))]
|
|
|
|
{
|
|
|
|
match std::fs::rename(&entry, &destination) {
|
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
format!(
|
|
|
|
"Rename {:?} to {:?} aborted. {:}",
|
|
|
|
entry_file_name,
|
|
|
|
destination_file_name,
|
|
|
|
e.to_string(),
|
|
|
|
),
|
2020-02-01 09:34:34 +01:00
|
|
|
e.to_string(),
|
2019-09-14 18:30:24 +02:00
|
|
|
name_tag,
|
2019-08-21 19:03:59 +02:00
|
|
|
));
|
|
|
|
}
|
|
|
|
Ok(o) => o,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
#[cfg(windows)]
|
|
|
|
{
|
|
|
|
let mut sources: FileStructure = FileStructure::new();
|
|
|
|
|
|
|
|
sources.walk_decorate(&entry)?;
|
|
|
|
|
|
|
|
let strategy = |(source_file, depth_level)| {
|
|
|
|
let mut new_dst = destination.clone();
|
|
|
|
|
|
|
|
let path = dunce::canonicalize(&source_file)?;
|
|
|
|
|
|
|
|
let mut comps: Vec<_> = path
|
|
|
|
.components()
|
|
|
|
.map(|fragment| fragment.as_os_str())
|
|
|
|
.rev()
|
|
|
|
.take(1 + depth_level)
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
comps.reverse();
|
|
|
|
|
|
|
|
for fragment in comps.iter() {
|
|
|
|
new_dst.push(fragment);
|
|
|
|
}
|
|
|
|
|
2019-12-31 08:36:08 +01:00
|
|
|
Ok((PathBuf::from(&source_file), new_dst))
|
2019-08-21 19:03:59 +02:00
|
|
|
};
|
|
|
|
|
|
|
|
let sources = sources.paths_applying_with(strategy)?;
|
|
|
|
|
|
|
|
for (ref src, ref dst) in sources {
|
2019-12-31 08:36:08 +01:00
|
|
|
if src.is_dir() && !dst.exists() {
|
|
|
|
match std::fs::create_dir_all(dst) {
|
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
format!(
|
|
|
|
"Rename {:?} to {:?} aborted. {:}",
|
|
|
|
entry_file_name,
|
|
|
|
destination_file_name,
|
|
|
|
e.to_string(),
|
|
|
|
),
|
2020-02-01 09:34:34 +01:00
|
|
|
e.to_string(),
|
2019-12-31 08:36:08 +01:00
|
|
|
name_tag,
|
|
|
|
));
|
2019-08-21 19:03:59 +02:00
|
|
|
}
|
2019-12-31 08:36:08 +01:00
|
|
|
Ok(o) => o,
|
2019-08-21 19:03:59 +02:00
|
|
|
}
|
2020-02-07 17:24:01 +01:00
|
|
|
} else if src.is_file() {
|
|
|
|
match std::fs::copy(src, dst) {
|
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
format!(
|
|
|
|
"Moving file {:?} to {:?} aborted. {:}",
|
|
|
|
src,
|
|
|
|
dst,
|
|
|
|
e.to_string(),
|
|
|
|
),
|
|
|
|
e.to_string(),
|
|
|
|
name_tag,
|
|
|
|
));
|
|
|
|
}
|
|
|
|
Ok(_o) => (),
|
|
|
|
}
|
2019-08-21 19:03:59 +02:00
|
|
|
}
|
2019-12-31 08:36:08 +01:00
|
|
|
}
|
2019-08-21 19:03:59 +02:00
|
|
|
|
2019-12-31 08:36:08 +01:00
|
|
|
if src.is_file() {
|
|
|
|
match std::fs::copy(&src, &dst) {
|
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
format!(
|
|
|
|
"Rename {:?} to {:?} aborted. {:}",
|
|
|
|
src,
|
|
|
|
destination_file_name,
|
|
|
|
e.to_string(),
|
|
|
|
),
|
2020-02-01 09:34:34 +01:00
|
|
|
e.to_string(),
|
2019-12-31 08:36:08 +01:00
|
|
|
name_tag,
|
|
|
|
));
|
|
|
|
}
|
|
|
|
Ok(_) => match std::fs::remove_file(&src) {
|
2019-08-21 19:03:59 +02:00
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
format!(
|
|
|
|
"Rename {:?} to {:?} aborted. {:}",
|
2019-12-31 08:36:08 +01:00
|
|
|
entry_file_name,
|
2019-08-21 19:03:59 +02:00
|
|
|
destination_file_name,
|
|
|
|
e.to_string(),
|
|
|
|
),
|
2020-02-01 09:34:34 +01:00
|
|
|
e.to_string(),
|
2019-09-14 18:30:24 +02:00
|
|
|
name_tag,
|
2019-08-21 19:03:59 +02:00
|
|
|
));
|
|
|
|
}
|
2019-12-31 08:36:08 +01:00
|
|
|
Ok(o) => o,
|
|
|
|
},
|
|
|
|
};
|
2019-08-21 19:03:59 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
match std::fs::remove_dir_all(entry) {
|
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
format!(
|
|
|
|
"Rename {:?} to {:?} aborted. {:}",
|
|
|
|
entry_file_name,
|
|
|
|
destination_file_name,
|
|
|
|
e.to_string(),
|
|
|
|
),
|
2020-02-01 09:34:34 +01:00
|
|
|
e.to_string(),
|
2019-09-14 18:30:24 +02:00
|
|
|
name_tag,
|
2019-08-21 19:03:59 +02:00
|
|
|
));
|
|
|
|
}
|
|
|
|
Ok(o) => o,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-12-07 10:34:32 +01:00
|
|
|
} else if destination.exists() {
|
|
|
|
let is_file = |x: &Result<PathBuf, _>| {
|
|
|
|
x.as_ref().map(|entry| entry.is_file()).unwrap_or_default()
|
|
|
|
};
|
2019-12-06 16:28:26 +01:00
|
|
|
|
2019-12-07 10:34:32 +01:00
|
|
|
if !sources.iter().all(is_file) {
|
|
|
|
return Err(ShellError::labeled_error(
|
2019-08-21 19:03:59 +02:00
|
|
|
"Rename aborted (directories found). Renaming in patterns not supported yet (try moving the directory directly)",
|
2020-02-01 09:34:34 +01:00
|
|
|
"renaming in patterns not supported yet (try moving the directory directly)",
|
2019-08-21 19:03:59 +02:00
|
|
|
src.tag,
|
|
|
|
));
|
2019-12-07 10:34:32 +01:00
|
|
|
}
|
2019-08-21 19:03:59 +02:00
|
|
|
|
2019-12-07 10:34:32 +01:00
|
|
|
for entry in sources {
|
|
|
|
if let Ok(entry) = entry {
|
|
|
|
let entry_file_name = match entry.file_name() {
|
|
|
|
Some(name) => name,
|
|
|
|
None => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
"Rename aborted. Not a valid entry name",
|
2020-02-01 09:34:34 +01:00
|
|
|
"not a valid entry name",
|
|
|
|
src.tag,
|
2019-12-07 10:34:32 +01:00
|
|
|
))
|
|
|
|
}
|
|
|
|
};
|
2019-08-21 19:03:59 +02:00
|
|
|
|
2019-12-07 10:34:32 +01:00
|
|
|
let mut to = PathBuf::from(&destination);
|
|
|
|
to.push(entry_file_name);
|
2019-08-21 19:03:59 +02:00
|
|
|
|
2019-12-07 10:34:32 +01:00
|
|
|
if entry.is_file() {
|
|
|
|
#[cfg(not(windows))]
|
|
|
|
{
|
|
|
|
match std::fs::rename(&entry, &to) {
|
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
format!(
|
|
|
|
"Rename {:?} to {:?} aborted. {:}",
|
|
|
|
entry_file_name,
|
|
|
|
destination_file_name,
|
|
|
|
e.to_string(),
|
|
|
|
),
|
2020-02-01 09:34:34 +01:00
|
|
|
e.to_string(),
|
2019-12-07 10:34:32 +01:00
|
|
|
name_tag,
|
|
|
|
));
|
|
|
|
}
|
|
|
|
Ok(o) => o,
|
|
|
|
};
|
|
|
|
}
|
|
|
|
#[cfg(windows)]
|
|
|
|
{
|
|
|
|
match std::fs::copy(&entry, &to) {
|
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
format!(
|
|
|
|
"Rename {:?} to {:?} aborted. {:}",
|
|
|
|
entry_file_name,
|
|
|
|
destination_file_name,
|
|
|
|
e.to_string(),
|
|
|
|
),
|
2020-02-01 09:34:34 +01:00
|
|
|
e.to_string(),
|
2019-12-07 10:34:32 +01:00
|
|
|
name_tag,
|
|
|
|
));
|
|
|
|
}
|
|
|
|
Ok(_) => match std::fs::remove_file(&entry) {
|
2019-11-15 03:52:51 +01:00
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
format!(
|
2019-12-07 10:34:32 +01:00
|
|
|
"Remove {:?} to {:?} aborted. {:}",
|
2019-11-15 03:52:51 +01:00
|
|
|
entry_file_name,
|
|
|
|
destination_file_name,
|
|
|
|
e.to_string(),
|
|
|
|
),
|
2020-02-01 09:34:34 +01:00
|
|
|
e.to_string(),
|
2019-11-15 03:52:51 +01:00
|
|
|
name_tag,
|
|
|
|
));
|
|
|
|
}
|
|
|
|
Ok(o) => o,
|
2019-12-07 10:34:32 +01:00
|
|
|
},
|
|
|
|
};
|
2019-08-21 19:03:59 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2019-12-07 10:34:32 +01:00
|
|
|
} else {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
format!("Rename aborted. (Does {:?} exist?)", destination_file_name),
|
2020-02-01 09:34:34 +01:00
|
|
|
format!("rename aborted (does {:?} exist?)", destination_file_name),
|
|
|
|
dst.tag,
|
2019-12-07 10:34:32 +01:00
|
|
|
));
|
2019-08-21 19:03:59 +02:00
|
|
|
}
|
|
|
|
|
2019-08-24 21:36:19 +02:00
|
|
|
Ok(OutputStream::empty())
|
2019-08-21 19:03:59 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
fn rm(
|
|
|
|
&self,
|
2019-10-19 22:52:39 +02:00
|
|
|
RemoveArgs {
|
2020-03-25 21:19:01 +01:00
|
|
|
rest: targets,
|
2019-10-19 22:52:39 +02:00
|
|
|
recursive,
|
|
|
|
trash,
|
|
|
|
}: RemoveArgs,
|
2019-09-14 18:30:24 +02:00
|
|
|
name: Tag,
|
2019-08-22 06:13:40 +02:00
|
|
|
path: &str,
|
2019-08-24 21:36:19 +02:00
|
|
|
) -> Result<OutputStream, ShellError> {
|
2019-09-14 18:30:24 +02:00
|
|
|
let name_tag = name;
|
2019-08-21 19:09:23 +02:00
|
|
|
|
2020-03-25 21:19:01 +01:00
|
|
|
if targets.is_empty() {
|
2019-08-21 19:03:59 +02:00
|
|
|
return Err(ShellError::labeled_error(
|
2020-03-25 21:19:01 +01:00
|
|
|
"rm requires target paths",
|
|
|
|
"needs parameter",
|
|
|
|
name_tag,
|
2019-08-21 19:03:59 +02:00
|
|
|
));
|
|
|
|
}
|
|
|
|
|
2020-03-25 21:19:01 +01:00
|
|
|
let mut all_targets: HashMap<PathBuf, Tag> = HashMap::new();
|
|
|
|
for target in targets {
|
|
|
|
if target.item.to_str() == Some(".") || target.item.to_str() == Some("..") {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
"Remove aborted. \".\" or \"..\" may not be removed.",
|
|
|
|
"\".\" or \"..\" may not be removed",
|
|
|
|
target.tag,
|
|
|
|
));
|
|
|
|
}
|
2019-08-22 06:23:57 +02:00
|
|
|
|
2020-03-25 21:19:01 +01:00
|
|
|
let mut path = PathBuf::from(path);
|
|
|
|
path.push(&target.item);
|
|
|
|
match glob::glob(&path.to_string_lossy()) {
|
|
|
|
Ok(files) => {
|
|
|
|
for file in files {
|
|
|
|
match file {
|
|
|
|
Ok(ref f) => {
|
|
|
|
all_targets
|
|
|
|
.entry(f.clone())
|
|
|
|
.or_insert_with(|| target.tag.clone());
|
|
|
|
}
|
|
|
|
Err(e) => {
|
|
|
|
let msg = format!("Could not remove {:}", path.to_string_lossy());
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
msg,
|
|
|
|
e.to_string(),
|
|
|
|
&target.tag,
|
|
|
|
));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Err(e) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
format!("Remove aborted. {:}", e.to_string()),
|
|
|
|
e.to_string(),
|
|
|
|
&name_tag,
|
2020-01-24 20:16:41 +01:00
|
|
|
))
|
2020-03-25 21:19:01 +01:00
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
if all_targets.is_empty() {
|
|
|
|
Err(ShellError::labeled_error(
|
|
|
|
"Remove aborted. No valid paths",
|
|
|
|
"no valid paths",
|
|
|
|
name_tag,
|
|
|
|
))
|
|
|
|
} else {
|
|
|
|
let stream = async_stream! {
|
|
|
|
for (f, tag) in all_targets.iter() {
|
|
|
|
let is_empty = match f.read_dir() {
|
|
|
|
Ok(mut p) => p.next().is_none(),
|
|
|
|
Err(_) => false
|
|
|
|
};
|
|
|
|
|
|
|
|
let valid_target =
|
|
|
|
f.exists() && (!f.is_dir() || (is_empty || recursive.item));
|
|
|
|
if valid_target {
|
|
|
|
if trash.item {
|
|
|
|
match SendToTrash::remove(f) {
|
2020-01-24 20:16:41 +01:00
|
|
|
Err(e) => {
|
2020-03-25 21:19:01 +01:00
|
|
|
let msg = format!(
|
|
|
|
"Could not delete {:}",
|
|
|
|
f.to_string_lossy()
|
|
|
|
);
|
|
|
|
let label = format!("{:?}", e);
|
2020-01-24 20:16:41 +01:00
|
|
|
yield Err(ShellError::labeled_error(
|
|
|
|
msg,
|
2020-03-25 21:19:01 +01:00
|
|
|
label,
|
|
|
|
tag,
|
2020-01-24 20:16:41 +01:00
|
|
|
))
|
|
|
|
},
|
2020-03-25 21:19:01 +01:00
|
|
|
Ok(()) => {
|
|
|
|
let val = format!("deleted {:}", f.to_string_lossy()).into();
|
|
|
|
yield Ok(ReturnSuccess::Value(val))
|
|
|
|
},
|
2020-01-24 20:16:41 +01:00
|
|
|
}
|
2020-03-25 21:19:01 +01:00
|
|
|
} else {
|
|
|
|
let success = if f.is_dir() {
|
|
|
|
std::fs::remove_dir_all(f)
|
|
|
|
} else {
|
|
|
|
std::fs::remove_file(f)
|
|
|
|
};
|
|
|
|
match success {
|
|
|
|
Err(e) => {
|
|
|
|
let msg = format!(
|
|
|
|
"Could not delete {:}",
|
|
|
|
f.to_string_lossy()
|
|
|
|
);
|
|
|
|
yield Err(ShellError::labeled_error(
|
|
|
|
msg,
|
|
|
|
e.to_string(),
|
|
|
|
tag,
|
|
|
|
))
|
|
|
|
},
|
|
|
|
Ok(()) => {
|
|
|
|
let val = format!("deleted {:}", f.to_string_lossy()).into();
|
|
|
|
yield Ok(ReturnSuccess::Value(
|
|
|
|
val,
|
|
|
|
))
|
|
|
|
},
|
2019-08-21 19:03:59 +02:00
|
|
|
}
|
2020-03-25 21:19:01 +01:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
if f.is_dir() {
|
|
|
|
let msg = format!(
|
|
|
|
"Cannot remove {:}. try --recursive",
|
|
|
|
f.to_string_lossy()
|
|
|
|
);
|
|
|
|
yield Err(ShellError::labeled_error(
|
|
|
|
msg,
|
|
|
|
"cannot remove non-empty directory",
|
|
|
|
tag,
|
|
|
|
))
|
|
|
|
} else {
|
|
|
|
let msg = format!("Invalid file: {:}", f.to_string_lossy());
|
|
|
|
yield Err(ShellError::labeled_error(
|
|
|
|
msg,
|
|
|
|
"invalid file",
|
|
|
|
tag,
|
|
|
|
))
|
|
|
|
}
|
|
|
|
}
|
2019-08-21 19:03:59 +02:00
|
|
|
}
|
2020-03-25 21:19:01 +01:00
|
|
|
};
|
|
|
|
Ok(stream.to_output_stream())
|
2019-08-21 19:03:59 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-07 19:49:11 +02:00
|
|
|
fn path(&self) -> String {
|
|
|
|
self.path.clone()
|
|
|
|
}
|
|
|
|
|
2019-09-08 00:31:16 +02:00
|
|
|
fn pwd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result<OutputStream, ShellError> {
|
|
|
|
let path = PathBuf::from(self.path());
|
|
|
|
let p = match dunce::canonicalize(path.as_path()) {
|
|
|
|
Ok(p) => p,
|
|
|
|
Err(_) => {
|
|
|
|
return Err(ShellError::labeled_error(
|
|
|
|
"unable to show current directory",
|
|
|
|
"pwd command failed",
|
2019-10-13 06:12:43 +02:00
|
|
|
&args.call_info.name_tag,
|
2019-09-08 00:31:16 +02:00
|
|
|
));
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
let mut stream = VecDeque::new();
|
|
|
|
stream.push_back(ReturnSuccess::value(
|
2019-11-21 15:33:14 +01:00
|
|
|
UntaggedValue::Primitive(Primitive::String(p.to_string_lossy().to_string()))
|
|
|
|
.into_value(&args.call_info.name_tag),
|
2019-09-08 00:31:16 +02:00
|
|
|
));
|
|
|
|
|
|
|
|
Ok(stream.into())
|
|
|
|
}
|
|
|
|
|
2019-08-07 19:49:11 +02:00
|
|
|
fn set_path(&mut self, path: String) {
|
2019-08-08 02:52:29 +02:00
|
|
|
let pathbuf = PathBuf::from(&path);
|
|
|
|
let path = match dunce::canonicalize(pathbuf.as_path()) {
|
|
|
|
Ok(path) => {
|
|
|
|
let _ = std::env::set_current_dir(&path);
|
|
|
|
path
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
// TODO: handle the case where the path cannot be canonicalized
|
|
|
|
pathbuf
|
|
|
|
}
|
|
|
|
};
|
2019-09-08 07:10:08 +02:00
|
|
|
self.last_path = self.path.clone();
|
2019-08-08 02:52:29 +02:00
|
|
|
self.path = path.to_string_lossy().to_string();
|
2019-08-07 19:49:11 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
fn complete(
|
|
|
|
&self,
|
|
|
|
line: &str,
|
|
|
|
pos: usize,
|
|
|
|
ctx: &rustyline::Context<'_>,
|
2019-08-09 21:42:23 +02:00
|
|
|
) -> Result<(usize, Vec<rustyline::completion::Pair>), rustyline::error::ReadlineError> {
|
2019-08-07 19:49:11 +02:00
|
|
|
self.completer.complete(line, pos, ctx)
|
|
|
|
}
|
|
|
|
|
Restructure and streamline token expansion (#1123)
Restructure and streamline token expansion
The purpose of this commit is to streamline the token expansion code, by
removing aspects of the code that are no longer relevant, removing
pointless duplication, and eliminating the need to pass the same
arguments to `expand_syntax`.
The first big-picture change in this commit is that instead of a handful
of `expand_` functions, which take a TokensIterator and ExpandContext, a
smaller number of methods on the `TokensIterator` do the same job.
The second big-picture change in this commit is fully eliminating the
coloring traits, making coloring a responsibility of the base expansion
implementations. This also means that the coloring tracer is merged into
the expansion tracer, so you can follow a single expansion and see how
the expansion process produced colored tokens.
One side effect of this change is that the expander itself is marginally
more error-correcting. The error correction works by switching from
structured expansion to `BackoffColoringMode` when an unexpected token
is found, which guarantees that all spans of the source are colored, but
may not be the most optimal error recovery strategy.
That said, because `BackoffColoringMode` only extends as far as a
closing delimiter (`)`, `]`, `}`) or pipe (`|`), it does result in
fairly granular correction strategy.
The current code still produces an `Err` (plus a complete list of
colored shapes) from the parsing process if any errors are encountered,
but this could easily be addressed now that the underlying expansion is
error-correcting.
This commit also colors any spans that are syntax errors in red, and
causes the parser to include some additional information about what
tokens were expected at any given point where an error was encountered,
so that completions and hinting could be more robust in the future.
Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com>
Co-authored-by: Andrés N. Robalino <andres@androbtech.com>
2020-01-21 23:45:03 +01:00
|
|
|
fn hint(
|
|
|
|
&self,
|
|
|
|
line: &str,
|
|
|
|
pos: usize,
|
|
|
|
ctx: &rustyline::Context<'_>,
|
|
|
|
_expand_context: ExpandContext,
|
|
|
|
) -> Option<String> {
|
2019-08-07 19:49:11 +02:00
|
|
|
self.hinter.hint(line, pos, ctx)
|
|
|
|
}
|
|
|
|
}
|
2020-01-28 17:58:31 +01:00
|
|
|
|
2020-03-13 18:27:04 +01:00
|
|
|
fn is_empty_dir(dir: impl AsRef<Path>) -> bool {
|
|
|
|
match dir.as_ref().read_dir() {
|
|
|
|
Err(_) => true,
|
2020-01-28 17:58:31 +01:00
|
|
|
Ok(mut s) => s.next().is_none(),
|
|
|
|
}
|
|
|
|
}
|
2020-03-13 18:27:04 +01:00
|
|
|
|
|
|
|
fn is_hidden_dir(dir: impl AsRef<Path>) -> bool {
|
|
|
|
cfg_if::cfg_if! {
|
|
|
|
if #[cfg(windows)] {
|
|
|
|
use std::os::windows::fs::MetadataExt;
|
|
|
|
|
|
|
|
if let Ok(metadata) = dir.as_ref().metadata() {
|
|
|
|
let attributes = metadata.file_attributes();
|
|
|
|
// https://docs.microsoft.com/en-us/windows/win32/fileio/file-attribute-constants
|
|
|
|
(attributes & 0x2) != 0
|
|
|
|
} else {
|
|
|
|
false
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
dir.as_ref()
|
|
|
|
.file_name()
|
|
|
|
.map(|name| name.to_string_lossy().starts_with('.'))
|
|
|
|
.unwrap_or(false)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|