nushell/crates/nu-cli/src/shell/filesystem_shell.rs

796 lines
26 KiB
Rust
Raw Normal View History

use crate::commands::cd::CdArgs;
use crate::commands::command::EvaluatedWholeStreamCommandArgs;
2019-08-21 19:03:59 +02:00
use crate::commands::cp::CopyArgs;
use crate::commands::ls::LsArgs;
2019-08-21 19:03:59 +02:00
use crate::commands::mkdir::MkdirArgs;
use crate::commands::mv::MoveArgs;
use crate::commands::rm::RemoveArgs;
use crate::data::dir_entry_dict;
use crate::path::canonicalize;
2019-08-07 19:49:11 +02:00
use crate::prelude::*;
2019-08-09 07:36:43 +02:00
use crate::shell::completer::NuCompleter;
2019-08-07 19:49:11 +02:00
use crate::shell::shell::Shell;
2019-08-21 19:03:59 +02:00
use crate::utils::FileStructure;
2019-08-09 07:36:43 +02:00
use rustyline::completion::FilenameCompleter;
2019-08-07 19:49:11 +02:00
use rustyline::hint::{Hinter, HistoryHinter};
use std::collections::HashMap;
use std::path::{Path, PathBuf};
2019-08-22 07:15:14 +02:00
#[cfg(unix)]
use std::os::unix::fs::PermissionsExt;
use nu_errors::ShellError;
use nu_parser::expand_ndots;
use nu_protocol::{Primitive, ReturnSuccess, UntaggedValue};
use nu_source::Tagged;
2019-08-07 19:49:11 +02:00
pub struct FilesystemShell {
pub(crate) path: String,
pub(crate) last_path: String,
2019-08-07 19:49:11 +02:00
completer: NuCompleter,
hinter: HistoryHinter,
}
impl std::fmt::Debug for FilesystemShell {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "FilesystemShell @ {}", self.path)
}
}
2019-08-07 19:49:11 +02:00
impl Clone for FilesystemShell {
fn clone(&self) -> Self {
FilesystemShell {
path: self.path.clone(),
last_path: self.path.clone(),
2019-08-07 19:49:11 +02:00
completer: NuCompleter {
file_completer: FilenameCompleter::new(),
2019-08-10 07:02:15 +02:00
commands: self.completer.commands.clone(),
Restructure and streamline token expansion (#1123) Restructure and streamline token expansion The purpose of this commit is to streamline the token expansion code, by removing aspects of the code that are no longer relevant, removing pointless duplication, and eliminating the need to pass the same arguments to `expand_syntax`. The first big-picture change in this commit is that instead of a handful of `expand_` functions, which take a TokensIterator and ExpandContext, a smaller number of methods on the `TokensIterator` do the same job. The second big-picture change in this commit is fully eliminating the coloring traits, making coloring a responsibility of the base expansion implementations. This also means that the coloring tracer is merged into the expansion tracer, so you can follow a single expansion and see how the expansion process produced colored tokens. One side effect of this change is that the expander itself is marginally more error-correcting. The error correction works by switching from structured expansion to `BackoffColoringMode` when an unexpected token is found, which guarantees that all spans of the source are colored, but may not be the most optimal error recovery strategy. That said, because `BackoffColoringMode` only extends as far as a closing delimiter (`)`, `]`, `}`) or pipe (`|`), it does result in fairly granular correction strategy. The current code still produces an `Err` (plus a complete list of colored shapes) from the parsing process if any errors are encountered, but this could easily be addressed now that the underlying expansion is error-correcting. This commit also colors any spans that are syntax errors in red, and causes the parser to include some additional information about what tokens were expected at any given point where an error was encountered, so that completions and hinting could be more robust in the future. Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com> Co-authored-by: Andrés N. Robalino <andres@androbtech.com>
2020-01-21 23:45:03 +01:00
homedir: self.homedir(),
2019-08-07 19:49:11 +02:00
},
hinter: HistoryHinter {},
}
}
}
impl FilesystemShell {
2019-08-10 07:02:15 +02:00
pub fn basic(commands: CommandRegistry) -> Result<FilesystemShell, std::io::Error> {
2019-08-07 19:49:11 +02:00
let path = std::env::current_dir()?;
Ok(FilesystemShell {
path: path.to_string_lossy().to_string(),
last_path: path.to_string_lossy().to_string(),
2019-08-07 19:49:11 +02:00
completer: NuCompleter {
file_completer: FilenameCompleter::new(),
2019-08-10 07:02:15 +02:00
commands,
Restructure and streamline token expansion (#1123) Restructure and streamline token expansion The purpose of this commit is to streamline the token expansion code, by removing aspects of the code that are no longer relevant, removing pointless duplication, and eliminating the need to pass the same arguments to `expand_syntax`. The first big-picture change in this commit is that instead of a handful of `expand_` functions, which take a TokensIterator and ExpandContext, a smaller number of methods on the `TokensIterator` do the same job. The second big-picture change in this commit is fully eliminating the coloring traits, making coloring a responsibility of the base expansion implementations. This also means that the coloring tracer is merged into the expansion tracer, so you can follow a single expansion and see how the expansion process produced colored tokens. One side effect of this change is that the expander itself is marginally more error-correcting. The error correction works by switching from structured expansion to `BackoffColoringMode` when an unexpected token is found, which guarantees that all spans of the source are colored, but may not be the most optimal error recovery strategy. That said, because `BackoffColoringMode` only extends as far as a closing delimiter (`)`, `]`, `}`) or pipe (`|`), it does result in fairly granular correction strategy. The current code still produces an `Err` (plus a complete list of colored shapes) from the parsing process if any errors are encountered, but this could easily be addressed now that the underlying expansion is error-correcting. This commit also colors any spans that are syntax errors in red, and causes the parser to include some additional information about what tokens were expected at any given point where an error was encountered, so that completions and hinting could be more robust in the future. Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com> Co-authored-by: Andrés N. Robalino <andres@androbtech.com>
2020-01-21 23:45:03 +01:00
homedir: dirs::home_dir(),
2019-08-07 19:49:11 +02:00
},
hinter: HistoryHinter {},
})
}
pub fn with_location(
path: String,
commands: CommandRegistry,
) -> Result<FilesystemShell, std::io::Error> {
let path = canonicalize(std::env::current_dir()?, &path)?;
let path = path.display().to_string();
let last_path = path.clone();
Ok(FilesystemShell {
2019-08-07 19:49:11 +02:00
path,
last_path,
2019-08-07 19:49:11 +02:00
completer: NuCompleter {
file_completer: FilenameCompleter::new(),
2019-08-10 07:02:15 +02:00
commands,
Restructure and streamline token expansion (#1123) Restructure and streamline token expansion The purpose of this commit is to streamline the token expansion code, by removing aspects of the code that are no longer relevant, removing pointless duplication, and eliminating the need to pass the same arguments to `expand_syntax`. The first big-picture change in this commit is that instead of a handful of `expand_` functions, which take a TokensIterator and ExpandContext, a smaller number of methods on the `TokensIterator` do the same job. The second big-picture change in this commit is fully eliminating the coloring traits, making coloring a responsibility of the base expansion implementations. This also means that the coloring tracer is merged into the expansion tracer, so you can follow a single expansion and see how the expansion process produced colored tokens. One side effect of this change is that the expander itself is marginally more error-correcting. The error correction works by switching from structured expansion to `BackoffColoringMode` when an unexpected token is found, which guarantees that all spans of the source are colored, but may not be the most optimal error recovery strategy. That said, because `BackoffColoringMode` only extends as far as a closing delimiter (`)`, `]`, `}`) or pipe (`|`), it does result in fairly granular correction strategy. The current code still produces an `Err` (plus a complete list of colored shapes) from the parsing process if any errors are encountered, but this could easily be addressed now that the underlying expansion is error-correcting. This commit also colors any spans that are syntax errors in red, and causes the parser to include some additional information about what tokens were expected at any given point where an error was encountered, so that completions and hinting could be more robust in the future. Co-authored-by: Jonathan Turner <jonathandturner@users.noreply.github.com> Co-authored-by: Andrés N. Robalino <andres@androbtech.com>
2020-01-21 23:45:03 +01:00
homedir: dirs::home_dir(),
2019-08-07 19:49:11 +02:00
},
hinter: HistoryHinter {},
})
2019-08-07 19:49:11 +02:00
}
}
impl Shell for FilesystemShell {
fn name(&self) -> String {
2019-08-07 19:49:11 +02:00
"filesystem".to_string()
}
Add support for ~ expansion This ended up being a bit of a yak shave. The basic idea in this commit is to expand `~` in paths, but only in paths. The way this is accomplished is by doing the expansion inside of the code that parses literal syntax for `SyntaxType::Path`. As a quick refresher: every command is entitled to expand its arguments in a custom way. While this could in theory be used for general-purpose macros, today the expansion facility is limited to syntactic hints. For example, the syntax `where cpu > 0` expands under the hood to `where { $it.cpu > 0 }`. This happens because the first argument to `where` is defined as a `SyntaxType::Block`, and the parser coerces binary expressions whose left-hand-side looks like a member into a block when the command is expecting one. This is mildly more magical than what most programming languages would do, but we believe that it makes sense to allow commands to fine-tune the syntax because of the domain nushell is in (command-line shells). The syntactic expansions supported by this facility are relatively limited. For example, we don't allow `$it` to become a bare word, simply because the command asks for a string in the relevant position. That would quickly become more confusing than it's worth. This PR adds a new `SyntaxType` rule: `SyntaxType::Path`. When a command declares a parameter as a `SyntaxType::Path`, string literals and bare words passed as an argument to that parameter are processed using the path expansion rules. Right now, that only means that `~` is expanded into the home directory, but additional rules are possible in the future. By restricting this expansion to a syntactic expansion when passed as an argument to a command expecting a path, we avoid making `~` a generally reserved character. This will also allow us to give good tab completion for paths with `~` characters in them when a command is expecting a path. In order to accomplish the above, this commit changes the parsing functions to take a `Context` instead of just a `CommandRegistry`. From the perspective of macro expansion, you can think of the `CommandRegistry` as a dictionary of in-scope macros, and the `Context` as the compile-time state used in expansion. This could gain additional functionality over time as we find more uses for the expansion system.
2019-08-26 21:21:03 +02:00
fn homedir(&self) -> Option<PathBuf> {
dirs::home_dir()
}
fn ls(
&self,
LsArgs {
path,
all,
full,
short_names,
with_symlink_targets,
du,
}: LsArgs,
name_tag: Tag,
ctrl_c: Arc<AtomicBool>,
) -> Result<OutputStream, ShellError> {
let ctrl_c_copy = ctrl_c.clone();
let (path, p_tag) = match path {
Some(p) => {
let p_tag = p.tag;
let mut p = p.item;
if p.is_dir() {
if is_empty_dir(&p) {
return Ok(OutputStream::empty());
}
p.push("*");
}
(p, p_tag)
}
None => {
if is_empty_dir(&self.path()) {
return Ok(OutputStream::empty());
2019-08-23 06:51:43 +02:00
} else {
(PathBuf::from("./*"), name_tag.clone())
2019-08-23 06:51:43 +02:00
}
}
};
2019-08-07 19:49:11 +02:00
let mut paths = glob::glob(&path.to_string_lossy())
.map_err(|e| ShellError::labeled_error(e.to_string(), "invalid pattern", &p_tag))?
.peekable();
if paths.peek().is_none() {
return Err(ShellError::labeled_error(
"No matches found",
"no matches found",
&p_tag,
));
}
// Generated stream: impl Stream<Item = Result<ReturnSuccess, ShellError>
Ok(futures::stream::iter(paths.filter_map(move |path| {
let path = match path.map_err(|e| ShellError::from(e.into_error())) {
Ok(path) => path,
Err(err) => return Some(Err(err)),
};
if !all && is_hidden_dir(&path) {
return None;
2019-08-07 19:49:11 +02:00
}
let metadata = match std::fs::symlink_metadata(&path) {
Ok(metadata) => Some(metadata),
Err(e) => {
if e.kind() == std::io::ErrorKind::PermissionDenied {
None
} else {
return Some(Err(e.into()));
}
}
};
let entry = dir_entry_dict(
&path,
metadata.as_ref(),
name_tag.clone(),
full,
short_names,
with_symlink_targets,
du,
ctrl_c.clone(),
)
.map(ReturnSuccess::Value);
Some(entry)
}))
.interruptible(ctrl_c_copy)
.to_output_stream())
2019-08-07 19:49:11 +02:00
}
fn cd(&self, args: CdArgs, name: Tag) -> Result<OutputStream, ShellError> {
let path = match args.path {
2019-08-07 19:49:11 +02:00
None => match dirs::home_dir() {
Some(o) => o,
_ => {
return Err(ShellError::labeled_error(
"Cannot change to home directory",
"cannot go to home",
&name,
2019-08-07 19:49:11 +02:00
))
}
},
Some(v) => {
let Tagged { item: target, tag } = v;
if target == Path::new("-") {
2019-09-08 11:55:49 +02:00
PathBuf::from(&self.last_path)
} else {
let path = canonicalize(self.path(), target).map_err(|_| {
ShellError::labeled_error(
"Cannot change to directory",
"directory not found",
&tag,
)
})?;
2019-09-08 11:55:49 +02:00
if !path.is_dir() {
return Err(ShellError::labeled_error(
"Cannot change to directory",
"is not a directory",
&tag,
));
}
#[cfg(unix)]
{
let has_exec = path
.metadata()
.map(|m| {
umask::Mode::from(m.permissions().mode()).has(umask::USER_READ)
})
.map_err(|e| {
ShellError::labeled_error(
"Cannot change to directory",
format!("cannot stat ({})", e),
&tag,
)
})?;
if !has_exec {
2019-09-08 11:55:49 +02:00
return Err(ShellError::labeled_error(
"Cannot change to directory",
"permission denied",
&tag,
));
}
2019-08-07 19:49:11 +02:00
}
path
2019-08-07 19:49:11 +02:00
}
}
};
let mut stream = VecDeque::new();
2019-09-08 11:55:49 +02:00
2019-09-11 16:36:50 +02:00
stream.push_back(ReturnSuccess::change_cwd(
2019-08-07 19:49:11 +02:00
path.to_string_lossy().to_string(),
));
2019-08-07 19:49:11 +02:00
Ok(stream.into())
}
2019-08-21 19:03:59 +02:00
fn cp(
&self,
CopyArgs {
src,
dst,
recursive,
}: CopyArgs,
name: Tag,
path: &str,
2019-08-24 21:36:19 +02:00
) -> Result<OutputStream, ShellError> {
let name_tag = name;
2019-08-21 19:09:23 +02:00
let path = Path::new(path);
let source = path.join(&src.item);
let destination = path.join(&dst.item);
2019-08-21 19:03:59 +02:00
let sources: Vec<_> = match glob::glob(&source.to_string_lossy()) {
Ok(files) => files.collect(),
Err(e) => {
2019-08-21 19:03:59 +02:00
return Err(ShellError::labeled_error(
e.to_string(),
"invalid pattern",
2019-08-21 19:03:59 +02:00
src.tag,
))
}
};
if sources.is_empty() {
return Err(ShellError::labeled_error(
"No matches found",
"no matches found",
src.tag,
));
}
2019-08-21 19:03:59 +02:00
if sources.len() > 1 && !destination.is_dir() {
return Err(ShellError::labeled_error(
"Destination must be a directory when copying multiple files",
"is not a directory",
dst.tag,
));
}
2019-08-21 19:03:59 +02:00
let any_source_is_dir = sources.iter().any(|f| match f {
Ok(f) => f.is_dir(),
Err(_) => false,
});
if any_source_is_dir && !recursive.item {
return Err(ShellError::labeled_error(
"Directories must be copied using \"--recursive\"",
"resolves to a directory (not copied)",
src.tag,
));
}
for entry in sources {
if let Ok(entry) = entry {
let mut sources = FileStructure::new();
2019-08-21 19:03:59 +02:00
sources.walk_decorate(&entry)?;
if entry.is_file() {
let sources = sources.paths_applying_with(|(source_file, _depth_level)| {
if destination.is_dir() {
let mut dest = canonicalize(&path, &dst.item)?;
2019-08-21 19:03:59 +02:00
if let Some(name) = entry.file_name() {
dest.push(name);
2019-08-21 19:03:59 +02:00
}
Ok((source_file, dest))
2019-08-21 19:03:59 +02:00
} else {
Ok((source_file, destination.clone()))
}
})?;
2019-08-21 19:03:59 +02:00
for (src, dst) in sources {
2019-08-21 19:03:59 +02:00
if src.is_file() {
std::fs::copy(src, dst).map_err(|e| {
ShellError::labeled_error(e.to_string(), e.to_string(), &name_tag)
})?;
2019-08-21 19:03:59 +02:00
}
}
} else if entry.is_dir() {
let destination = if !destination.exists() {
destination.clone()
2019-08-21 19:03:59 +02:00
} else {
match entry.file_name() {
Some(name) => destination.join(name),
2019-08-21 19:03:59 +02:00
None => {
return Err(ShellError::labeled_error(
"Copy aborted. Not a valid path",
"not a valid path",
dst.tag,
2019-08-21 19:03:59 +02:00
))
}
}
};
2019-08-21 19:03:59 +02:00
std::fs::create_dir_all(&destination).map_err(|e| {
ShellError::labeled_error(e.to_string(), e.to_string(), &dst.tag)
})?;
2019-08-21 19:03:59 +02:00
let sources = sources.paths_applying_with(|(source_file, depth_level)| {
let mut dest = destination.clone();
let path = canonicalize(&path, &source_file)?;
2019-08-21 19:03:59 +02:00
let comps: Vec<_> = path
.components()
.map(|fragment| fragment.as_os_str())
.rev()
.take(1 + depth_level)
.collect();
2019-08-21 19:03:59 +02:00
for fragment in comps.into_iter().rev() {
dest.push(fragment);
2019-08-21 19:03:59 +02:00
}
Ok((PathBuf::from(&source_file), dest))
})?;
2019-08-21 19:03:59 +02:00
let dst_tag = &dst.tag;
for (src, dst) in sources {
if src.is_dir() && !dst.exists() {
std::fs::create_dir_all(&dst).map_err(|e| {
ShellError::labeled_error(e.to_string(), e.to_string(), dst_tag)
})?;
2019-08-21 19:03:59 +02:00
}
if src.is_file() {
std::fs::copy(&src, &dst).map_err(|e| {
ShellError::labeled_error(e.to_string(), e.to_string(), &name_tag)
})?;
}
}
}
2019-08-21 19:03:59 +02:00
}
}
2019-08-24 21:36:19 +02:00
Ok(OutputStream::empty())
2019-08-21 19:03:59 +02:00
}
fn mkdir(
&self,
MkdirArgs {
rest: directories,
show_created_paths,
}: MkdirArgs,
name: Tag,
path: &str,
2019-08-24 21:36:19 +02:00
) -> Result<OutputStream, ShellError> {
let path = Path::new(path);
let mut stream = VecDeque::new();
2019-08-21 19:03:59 +02:00
if directories.is_empty() {
2019-08-21 19:03:59 +02:00
return Err(ShellError::labeled_error(
"mkdir requires directory paths",
"needs parameter",
name,
));
}
for dir in directories.iter() {
let create_at = path.join(&dir.item);
2019-08-21 19:03:59 +02:00
let dir_res = std::fs::create_dir_all(&create_at);
if let Err(reason) = dir_res {
return Err(ShellError::labeled_error(
reason.to_string(),
reason.to_string(),
dir.tag(),
));
2019-08-21 19:03:59 +02:00
}
if show_created_paths {
let val = format!("{:}", create_at.to_string_lossy()).into();
stream.push_back(Ok(ReturnSuccess::Value(val)));
}
2019-08-21 19:03:59 +02:00
}
Ok(stream.into())
2019-08-21 19:03:59 +02:00
}
fn mv(
&self,
MoveArgs { src, dst }: MoveArgs,
_name: Tag,
path: &str,
2019-08-24 21:36:19 +02:00
) -> Result<OutputStream, ShellError> {
let path = Path::new(path);
let source = path.join(&src.item);
let destination = path.join(&dst.item);
2019-08-21 19:03:59 +02:00
let sources =
glob::glob(&source.to_string_lossy()).map_or_else(|_| Vec::new(), Iterator::collect);
2019-08-21 19:03:59 +02:00
if sources.is_empty() {
return Err(ShellError::labeled_error(
"Invalid file or pattern",
"invalid file or pattern",
src.tag,
));
}
// We have two possibilities.
//
// First, the destination exists.
// - If a directory, move everything into that directory, otherwise
// - if only a single source, overwrite the file, otherwise
// - error.
//
// Second, the destination doesn't exist, so we can only rename a single source. Otherwise
// it's an error.
if (destination.exists() && !destination.is_dir() && sources.len() > 1)
|| (!destination.exists() && sources.len() > 1)
{
return Err(ShellError::labeled_error(
"Can only move multiple sources if destination is a directory",
"destination must be a directory when multiple sources",
dst.tag,
));
}
for entry in sources {
if let Ok(entry) = entry {
move_file(
TaggedPathBuf(&entry, &src.tag),
TaggedPathBuf(&destination, &dst.tag),
)?
2019-08-21 19:03:59 +02:00
}
}
2019-08-24 21:36:19 +02:00
Ok(OutputStream::empty())
2019-08-21 19:03:59 +02:00
}
fn rm(
&self,
2019-10-19 22:52:39 +02:00
RemoveArgs {
rest: targets,
2019-10-19 22:52:39 +02:00
recursive,
2020-04-11 08:53:53 +02:00
trash: _trash,
permanent: _permanent,
2019-10-19 22:52:39 +02:00
}: RemoveArgs,
name: Tag,
path: &str,
2019-08-24 21:36:19 +02:00
) -> Result<OutputStream, ShellError> {
let name_tag = name;
2019-08-21 19:09:23 +02:00
if targets.is_empty() {
2019-08-21 19:03:59 +02:00
return Err(ShellError::labeled_error(
"rm requires target paths",
"needs parameter",
name_tag,
2019-08-21 19:03:59 +02:00
));
}
let path = Path::new(path);
let mut all_targets: HashMap<PathBuf, Tag> = HashMap::new();
for target in targets {
let all_dots = target
.item
.to_str()
.map_or(false, |v| v.chars().all(|c| c == '.'));
if all_dots {
return Err(ShellError::labeled_error(
"Cannot remove any parent directory",
"cannot remove any parent directory",
target.tag,
));
}
2019-08-22 06:23:57 +02:00
let path = path.join(&target.item);
match glob::glob(&path.to_string_lossy()) {
Ok(files) => {
for file in files {
match file {
Ok(ref f) => {
all_targets
.entry(f.clone())
.or_insert_with(|| target.tag.clone());
}
Err(e) => {
return Err(ShellError::labeled_error(
format!("Could not remove {:}", path.to_string_lossy()),
e.to_string(),
&target.tag,
));
}
}
}
}
Err(e) => {
return Err(ShellError::labeled_error(
e.to_string(),
e.to_string(),
&name_tag,
))
}
};
}
if all_targets.is_empty() {
return Err(ShellError::labeled_error(
"No valid paths",
"no valid paths",
name_tag,
));
}
Ok(
futures::stream::iter(all_targets.into_iter().map(move |(f, tag)| {
let is_empty = || match f.read_dir() {
Ok(mut p) => p.next().is_none(),
Err(_) => false,
};
if let Ok(metadata) = f.symlink_metadata() {
if metadata.is_file()
|| metadata.file_type().is_symlink()
|| recursive.item
|| is_empty()
{
2020-04-11 08:53:53 +02:00
let result;
#[cfg(feature = "trash-support")]
{
let rm_always_trash = config::config(Tag::unknown())?
.get("rm_always_trash")
.map(|val| val.is_true())
.unwrap_or(false);
result = if _trash.item || (rm_always_trash && !_permanent.item) {
trash::remove(&f).map_err(|_| f.to_string_lossy())
2020-04-11 08:53:53 +02:00
} else if metadata.is_file() {
std::fs::remove_file(&f).map_err(|_| f.to_string_lossy())
2020-04-11 08:53:53 +02:00
} else {
std::fs::remove_dir_all(&f).map_err(|_| f.to_string_lossy())
2020-04-11 08:53:53 +02:00
};
}
#[cfg(not(feature = "trash-support"))]
{
result = if metadata.is_file() {
std::fs::remove_file(&f).map_err(|_| f.to_string_lossy())
2020-04-11 08:53:53 +02:00
} else {
std::fs::remove_dir_all(&f).map_err(|_| f.to_string_lossy())
2020-04-11 08:53:53 +02:00
};
}
if let Err(e) = result {
let msg = format!("Could not delete {:}", e);
Err(ShellError::labeled_error(msg, e, tag))
} else {
let val = format!("deleted {:}", f.to_string_lossy()).into();
Ok(ReturnSuccess::Value(val))
}
} else {
let msg =
format!("Cannot remove {:}. try --recursive", f.to_string_lossy());
Err(ShellError::labeled_error(
msg,
"cannot remove non-empty directory",
tag,
))
}
} else {
let msg = format!("no such file or directory: {:}", f.to_string_lossy());
Err(ShellError::labeled_error(
msg,
"no such file or directory",
tag,
))
2019-08-21 19:03:59 +02:00
}
}))
.to_output_stream(),
)
2019-08-21 19:03:59 +02:00
}
2019-08-07 19:49:11 +02:00
fn path(&self) -> String {
self.path.clone()
}
fn pwd(&self, args: EvaluatedWholeStreamCommandArgs) -> Result<OutputStream, ShellError> {
let path = PathBuf::from(self.path());
let p = match dunce::canonicalize(path.as_path()) {
Ok(p) => p,
Err(_) => {
return Err(ShellError::labeled_error(
"unable to show current directory",
"pwd command failed",
&args.call_info.name_tag,
));
}
};
let mut stream = VecDeque::new();
stream.push_back(ReturnSuccess::value(
UntaggedValue::Primitive(Primitive::String(p.to_string_lossy().to_string()))
.into_value(&args.call_info.name_tag),
));
Ok(stream.into())
}
2019-08-07 19:49:11 +02:00
fn set_path(&mut self, path: String) {
2019-08-08 02:52:29 +02:00
let pathbuf = PathBuf::from(&path);
let path = match canonicalize(self.path(), pathbuf.as_path()) {
2019-08-08 02:52:29 +02:00
Ok(path) => {
let _ = std::env::set_current_dir(&path);
std::env::set_var("PWD", &path);
2019-08-08 02:52:29 +02:00
path
}
_ => {
// TODO: handle the case where the path cannot be canonicalized
pathbuf
}
};
self.last_path = self.path.clone();
2019-08-08 02:52:29 +02:00
self.path = path.to_string_lossy().to_string();
2019-08-07 19:49:11 +02:00
}
fn complete(
&self,
line: &str,
pos: usize,
ctx: &rustyline::Context<'_>,
2019-08-09 21:42:23 +02:00
) -> Result<(usize, Vec<rustyline::completion::Pair>), rustyline::error::ReadlineError> {
let expanded = expand_ndots(&line);
// Find the first not-matching char position, if there is one
let differ_pos = line
.chars()
.zip(expanded.chars())
.enumerate()
.find(|(_index, (a, b))| a != b)
.map(|(differ_pos, _)| differ_pos);
let pos = if let Some(differ_pos) = differ_pos {
if differ_pos < pos {
pos + (expanded.len() - line.len())
} else {
pos
}
} else {
pos
};
self.completer.complete(&expanded, pos, ctx)
2019-08-07 19:49:11 +02:00
}
fn hint(&self, line: &str, pos: usize, ctx: &rustyline::Context<'_>) -> Option<String> {
2019-08-07 19:49:11 +02:00
self.hinter.hint(line, pos, ctx)
}
}
struct TaggedPathBuf<'a>(&'a PathBuf, &'a Tag);
fn move_file(from: TaggedPathBuf, to: TaggedPathBuf) -> Result<(), ShellError> {
let TaggedPathBuf(from, from_tag) = from;
let TaggedPathBuf(to, to_tag) = to;
if to.exists() && from.is_dir() && to.is_file() {
return Err(ShellError::labeled_error(
"Cannot rename a directory to a file",
"invalid destination",
to_tag,
));
}
let destination_dir_exists = if to.is_dir() {
true
} else {
to.parent().map(Path::exists).unwrap_or(true)
};
if !destination_dir_exists {
return Err(ShellError::labeled_error(
"Destination directory does not exist",
"destination does not exist",
to_tag,
));
}
let mut to = to.clone();
if to.is_dir() {
let from_file_name = match from.file_name() {
Some(name) => name,
None => {
return Err(ShellError::labeled_error(
"Not a valid entry name",
"not a valid entry name",
from_tag,
))
}
};
to.push(from_file_name);
}
// We first try a rename, which is a quick operation. If that doesn't work, we'll try a copy
// and remove the old file. This is necessary if we're moving across filesystems.
std::fs::rename(&from, &to)
.or_else(|_| std::fs::copy(&from, &to).and_then(|_| std::fs::remove_file(&from)))
.map_err(|e| {
ShellError::labeled_error(
format!("Could not move {:?} to {:?}. {:}", from, to, e.to_string()),
"could not move",
from_tag,
)
})
}
fn is_empty_dir(dir: impl AsRef<Path>) -> bool {
match dir.as_ref().read_dir() {
Err(_) => true,
Ok(mut s) => s.next().is_none(),
}
}
fn is_hidden_dir(dir: impl AsRef<Path>) -> bool {
cfg_if::cfg_if! {
if #[cfg(windows)] {
use std::os::windows::fs::MetadataExt;
if let Ok(metadata) = dir.as_ref().metadata() {
let attributes = metadata.file_attributes();
// https://docs.microsoft.com/en-us/windows/win32/fileio/file-attribute-constants
(attributes & 0x2) != 0
} else {
false
}
} else {
dir.as_ref()
.file_name()
.map(|name| name.to_string_lossy().starts_with('.'))
.unwrap_or(false)
}
}
}