mirror of
https://github.com/nushell/nushell.git
synced 2025-05-16 16:04:30 +02:00
Merge branch 'main' into ecow-record
This commit is contained in:
commit
0f463b18ac
2
.github/pull_request_template.md
vendored
2
.github/pull_request_template.md
vendored
@ -26,7 +26,7 @@ Make sure you've run and fixed any issues with these commands:
|
||||
- `cargo fmt --all -- --check` to check standard code formatting (`cargo fmt --all` applies these changes)
|
||||
- `cargo clippy --workspace -- -D warnings -D clippy::unwrap_used` to check that you're using the standard code style
|
||||
- `cargo test --workspace` to check that all tests pass (on Windows make sure to [enable developer mode](https://learn.microsoft.com/en-us/windows/apps/get-started/developer-mode-features-and-debugging))
|
||||
- `cargo run -- -c "use std testing; testing run-tests --path crates/nu-std"` to run the tests for the standard library
|
||||
- `cargo run -- -c "use toolkit.nu; toolkit test stdlib"` to run the tests for the standard library
|
||||
|
||||
> **Note**
|
||||
> from `nushell` you can also use the `toolkit` as follows
|
||||
|
2
.github/workflows/audit.yml
vendored
2
.github/workflows/audit.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
||||
# Prevent sudden announcement of a new advisory from failing ci:
|
||||
continue-on-error: true
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.4
|
||||
- uses: actions/checkout@v4.1.5
|
||||
- uses: rustsec/audit-check@v1.4.1
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
8
.github/workflows/ci.yml
vendored
8
.github/workflows/ci.yml
vendored
@ -44,7 +44,7 @@ jobs:
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.4
|
||||
- uses: actions/checkout@v4.1.5
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||
@ -89,7 +89,7 @@ jobs:
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.4
|
||||
- uses: actions/checkout@v4.1.5
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||
@ -121,7 +121,7 @@ jobs:
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.4
|
||||
- uses: actions/checkout@v4.1.5
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||
@ -174,7 +174,7 @@ jobs:
|
||||
runs-on: ${{ matrix.platform }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.4
|
||||
- uses: actions/checkout@v4.1.5
|
||||
|
||||
- name: Setup Rust toolchain and cache
|
||||
uses: actions-rust-lang/setup-rust-toolchain@v1.8.0
|
||||
|
12
.github/workflows/nightly-build.yml
vendored
12
.github/workflows/nightly-build.yml
vendored
@ -27,7 +27,7 @@ jobs:
|
||||
# if: github.repository == 'nushell/nightly'
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4.1.4
|
||||
uses: actions/checkout@v4.1.5
|
||||
if: github.repository == 'nushell/nightly'
|
||||
with:
|
||||
ref: main
|
||||
@ -123,7 +123,7 @@ jobs:
|
||||
runs-on: ${{matrix.os}}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.4
|
||||
- uses: actions/checkout@v4.1.5
|
||||
with:
|
||||
ref: main
|
||||
fetch-depth: 0
|
||||
@ -174,7 +174,7 @@ jobs:
|
||||
# REF: https://github.com/marketplace/actions/gh-release
|
||||
# Create a release only in nushell/nightly repo
|
||||
- name: Publish Archive
|
||||
uses: softprops/action-gh-release@v2.0.4
|
||||
uses: softprops/action-gh-release@v2.0.5
|
||||
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
|
||||
with:
|
||||
prerelease: true
|
||||
@ -235,7 +235,7 @@ jobs:
|
||||
runs-on: ${{matrix.os}}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.4
|
||||
- uses: actions/checkout@v4.1.5
|
||||
with:
|
||||
ref: main
|
||||
fetch-depth: 0
|
||||
@ -286,7 +286,7 @@ jobs:
|
||||
# REF: https://github.com/marketplace/actions/gh-release
|
||||
# Create a release only in nushell/nightly repo
|
||||
- name: Publish Archive
|
||||
uses: softprops/action-gh-release@v2.0.4
|
||||
uses: softprops/action-gh-release@v2.0.5
|
||||
if: ${{ startsWith(github.repository, 'nushell/nightly') }}
|
||||
with:
|
||||
draft: false
|
||||
@ -310,7 +310,7 @@ jobs:
|
||||
- name: Waiting for Release
|
||||
run: sleep 1800
|
||||
|
||||
- uses: actions/checkout@v4.1.4
|
||||
- uses: actions/checkout@v4.1.5
|
||||
with:
|
||||
ref: main
|
||||
|
||||
|
8
.github/workflows/release.yml
vendored
8
.github/workflows/release.yml
vendored
@ -73,7 +73,7 @@ jobs:
|
||||
runs-on: ${{matrix.os}}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.4
|
||||
- uses: actions/checkout@v4.1.5
|
||||
|
||||
- name: Update Rust Toolchain Target
|
||||
run: |
|
||||
@ -104,7 +104,7 @@ jobs:
|
||||
|
||||
# REF: https://github.com/marketplace/actions/gh-release
|
||||
- name: Publish Archive
|
||||
uses: softprops/action-gh-release@v2.0.4
|
||||
uses: softprops/action-gh-release@v2.0.5
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
||||
with:
|
||||
draft: true
|
||||
@ -163,7 +163,7 @@ jobs:
|
||||
runs-on: ${{matrix.os}}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v4.1.4
|
||||
- uses: actions/checkout@v4.1.5
|
||||
|
||||
- name: Update Rust Toolchain Target
|
||||
run: |
|
||||
@ -194,7 +194,7 @@ jobs:
|
||||
|
||||
# REF: https://github.com/marketplace/actions/gh-release
|
||||
- name: Publish Archive
|
||||
uses: softprops/action-gh-release@v2.0.4
|
||||
uses: softprops/action-gh-release@v2.0.5
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/') }}
|
||||
with:
|
||||
draft: true
|
||||
|
2
.github/workflows/typos.yml
vendored
2
.github/workflows/typos.yml
vendored
@ -7,7 +7,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Actions Repository
|
||||
uses: actions/checkout@v4.1.4
|
||||
uses: actions/checkout@v4.1.5
|
||||
|
||||
- name: Check spelling
|
||||
uses: crate-ci/typos@v1.21.0
|
||||
|
@ -55,7 +55,6 @@ It is good practice to cover your changes with a test. Also, try to think about
|
||||
|
||||
Tests can be found in different places:
|
||||
* `/tests`
|
||||
* `src/tests`
|
||||
* command examples
|
||||
* crate-specific tests
|
||||
|
||||
|
24
Cargo.lock
generated
24
Cargo.lock
generated
@ -377,7 +377,7 @@ dependencies = [
|
||||
"bitflags 2.5.0",
|
||||
"cexpr",
|
||||
"clang-sys",
|
||||
"itertools 0.12.1",
|
||||
"itertools 0.11.0",
|
||||
"lazy_static",
|
||||
"lazycell",
|
||||
"proc-macro2",
|
||||
@ -2043,9 +2043,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "interprocess"
|
||||
version = "2.0.1"
|
||||
version = "2.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7c7fb8583fab9503654385e2bafda123376445a77027a1b106dd7e44cf51122f"
|
||||
checksum = "7b4d0250d41da118226e55b3d50ca3f0d9e0a0f6829b92f543ac0054aeea1572"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"recvmsg",
|
||||
@ -2861,6 +2861,7 @@ dependencies = [
|
||||
"reedline",
|
||||
"rstest",
|
||||
"sysinfo",
|
||||
"tempfile",
|
||||
"unicode-segmentation",
|
||||
"uuid",
|
||||
"which",
|
||||
@ -3263,11 +3264,13 @@ dependencies = [
|
||||
"indexmap",
|
||||
"lru",
|
||||
"miette",
|
||||
"nix",
|
||||
"nu-path",
|
||||
"nu-system",
|
||||
"nu-test-support",
|
||||
"nu-utils",
|
||||
"num-format",
|
||||
"os_pipe",
|
||||
"pretty_assertions",
|
||||
"rmp-serde",
|
||||
"rstest",
|
||||
@ -4850,8 +4853,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "reedline"
|
||||
version = "0.32.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "abf59e4c97b5049ba96b052cdb652368305a2eddcbce9bf1c16f9d003139eeea"
|
||||
source = "git+https://github.com/nushell/reedline?branch=main#a580ea56d4e5a889468b2969d2a1534379504ab6"
|
||||
dependencies = [
|
||||
"arboard",
|
||||
"chrono",
|
||||
@ -5065,9 +5067,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rust-embed"
|
||||
version = "8.3.0"
|
||||
version = "8.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fb78f46d0066053d16d4ca7b898e9343bc3530f71c61d5ad84cd404ada068745"
|
||||
checksum = "19549741604902eb99a7ed0ee177a0663ee1eda51a29f71401f166e47e77806a"
|
||||
dependencies = [
|
||||
"rust-embed-impl",
|
||||
"rust-embed-utils",
|
||||
@ -5076,9 +5078,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rust-embed-impl"
|
||||
version = "8.3.0"
|
||||
version = "8.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b91ac2a3c6c0520a3fb3dd89321177c3c692937c4eb21893378219da10c44fc8"
|
||||
checksum = "cb9f96e283ec64401f30d3df8ee2aaeb2561f34c824381efa24a35f79bf40ee4"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@ -5089,9 +5091,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rust-embed-utils"
|
||||
version = "8.3.0"
|
||||
version = "8.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "86f69089032567ffff4eada41c573fc43ff466c7db7c5688b2e7969584345581"
|
||||
checksum = "38c74a686185620830701348de757fd36bef4aa9680fd23c49fc539ddcc1af32"
|
||||
dependencies = [
|
||||
"sha2",
|
||||
"walkdir",
|
||||
|
@ -94,7 +94,7 @@ heck = "0.5.0"
|
||||
human-date-parser = "0.1.1"
|
||||
indexmap = "2.2"
|
||||
indicatif = "0.17"
|
||||
interprocess = "2.0.1"
|
||||
interprocess = "2.1.0"
|
||||
is_executable = "1.0"
|
||||
itertools = "0.12"
|
||||
libc = "0.2"
|
||||
@ -119,7 +119,7 @@ num-traits = "0.2"
|
||||
omnipath = "0.1"
|
||||
once_cell = "1.18"
|
||||
open = "5.1"
|
||||
os_pipe = "1.1"
|
||||
os_pipe = { version = "1.1", features = ["io_safety"] }
|
||||
pathdiff = "0.2"
|
||||
percent-encoding = "2"
|
||||
pretty_assertions = "1.4"
|
||||
@ -140,7 +140,7 @@ ropey = "1.6.1"
|
||||
roxmltree = "0.19"
|
||||
rstest = { version = "0.18", default-features = false }
|
||||
rusqlite = "0.31"
|
||||
rust-embed = "8.3.0"
|
||||
rust-embed = "8.4.0"
|
||||
same-file = "1.0"
|
||||
serde = { version = "1.0", default-features = false }
|
||||
serde_json = "1.0"
|
||||
@ -305,7 +305,7 @@ bench = false
|
||||
# To use a development version of a dependency please use a global override here
|
||||
# changing versions in each sub-crate of the workspace is tedious
|
||||
[patch.crates-io]
|
||||
# reedline = { git = "https://github.com/nushell/reedline", branch = "main" }
|
||||
reedline = { git = "https://github.com/nushell/reedline", branch = "main" }
|
||||
# nu-ansi-term = {git = "https://github.com/nushell/nu-ansi-term.git", branch = "main"}
|
||||
|
||||
# Run all benchmarks with `cargo bench`
|
||||
|
@ -4,15 +4,11 @@ use nu_plugin_protocol::{PluginCallResponse, PluginOutput};
|
||||
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack},
|
||||
eval_const::create_nu_constant,
|
||||
PipelineData, Span, Spanned, Value, NU_VARIABLE_ID,
|
||||
PipelineData, Span, Spanned, Value,
|
||||
};
|
||||
use nu_std::load_standard_library;
|
||||
use nu_utils::{get_default_config, get_default_env};
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
rc::Rc,
|
||||
};
|
||||
use std::rc::Rc;
|
||||
|
||||
use std::hint::black_box;
|
||||
|
||||
@ -22,38 +18,18 @@ fn load_bench_commands() -> EngineState {
|
||||
nu_command::add_shell_command_context(nu_cmd_lang::create_default_context())
|
||||
}
|
||||
|
||||
fn canonicalize_path(engine_state: &EngineState, path: &Path) -> PathBuf {
|
||||
let cwd = engine_state.cwd_as_string(None).unwrap();
|
||||
|
||||
if path.exists() {
|
||||
match nu_path::canonicalize_with(path, cwd) {
|
||||
Ok(canon_path) => canon_path,
|
||||
Err(_) => path.to_owned(),
|
||||
}
|
||||
} else {
|
||||
path.to_owned()
|
||||
}
|
||||
}
|
||||
|
||||
fn get_home_path(engine_state: &EngineState) -> PathBuf {
|
||||
nu_path::home_dir()
|
||||
.map(|path| canonicalize_path(engine_state, &path))
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
fn setup_engine() -> EngineState {
|
||||
let mut engine_state = load_bench_commands();
|
||||
let home_path = get_home_path(&engine_state);
|
||||
let cwd = std::env::current_dir()
|
||||
.unwrap()
|
||||
.into_os_string()
|
||||
.into_string()
|
||||
.unwrap();
|
||||
|
||||
// parsing config.nu breaks without PWD set, so set a valid path
|
||||
engine_state.add_env_var(
|
||||
"PWD".into(),
|
||||
Value::string(home_path.to_string_lossy(), Span::test_data()),
|
||||
);
|
||||
engine_state.add_env_var("PWD".into(), Value::string(cwd, Span::test_data()));
|
||||
|
||||
let nu_const = create_nu_constant(&engine_state, Span::unknown())
|
||||
.expect("Failed to create nushell constant.");
|
||||
engine_state.set_variable_const_val(NU_VARIABLE_ID, nu_const);
|
||||
engine_state.generate_nu_constant();
|
||||
|
||||
engine_state
|
||||
}
|
||||
@ -107,6 +83,7 @@ fn bench_command(
|
||||
b.iter(move || {
|
||||
let mut stack = stack.clone();
|
||||
let mut engine = engine.clone();
|
||||
#[allow(clippy::unit_arg)]
|
||||
black_box(
|
||||
evaluate_commands(
|
||||
&commands,
|
||||
|
@ -15,6 +15,7 @@ nu-cmd-lang = { path = "../nu-cmd-lang", version = "0.93.1" }
|
||||
nu-command = { path = "../nu-command", version = "0.93.1" }
|
||||
nu-test-support = { path = "../nu-test-support", version = "0.93.1" }
|
||||
rstest = { workspace = true, default-features = false }
|
||||
tempfile = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
nu-cmd-base = { path = "../nu-cmd-base", version = "0.93.1" }
|
||||
|
@ -1,13 +1,18 @@
|
||||
use crate::completions::{CompletionOptions, SortBy};
|
||||
use nu_protocol::{engine::StateWorkingSet, levenshtein_distance, Span};
|
||||
use nu_protocol::{
|
||||
engine::{Stack, StateWorkingSet},
|
||||
levenshtein_distance, Span,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
|
||||
// Completer trait represents the three stages of the completion
|
||||
// fetch, filter and sort
|
||||
pub trait Completer {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: Vec<u8>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
|
@ -4,16 +4,14 @@ use crate::{
|
||||
};
|
||||
use nu_parser::FlatShape;
|
||||
use nu_protocol::{
|
||||
engine::{CachedFile, EngineState, StateWorkingSet},
|
||||
engine::{CachedFile, Stack, StateWorkingSet},
|
||||
Span,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use std::sync::Arc;
|
||||
|
||||
use super::SemanticSuggestion;
|
||||
|
||||
pub struct CommandCompletion {
|
||||
engine_state: Arc<EngineState>,
|
||||
flattened: Vec<(Span, FlatShape)>,
|
||||
flat_shape: FlatShape,
|
||||
force_completion_after_space: bool,
|
||||
@ -21,14 +19,11 @@ pub struct CommandCompletion {
|
||||
|
||||
impl CommandCompletion {
|
||||
pub fn new(
|
||||
engine_state: Arc<EngineState>,
|
||||
_: &StateWorkingSet,
|
||||
flattened: Vec<(Span, FlatShape)>,
|
||||
flat_shape: FlatShape,
|
||||
force_completion_after_space: bool,
|
||||
) -> Self {
|
||||
Self {
|
||||
engine_state,
|
||||
flattened,
|
||||
flat_shape,
|
||||
force_completion_after_space,
|
||||
@ -37,13 +32,14 @@ impl CommandCompletion {
|
||||
|
||||
fn external_command_completion(
|
||||
&self,
|
||||
working_set: &StateWorkingSet,
|
||||
prefix: &str,
|
||||
match_algorithm: MatchAlgorithm,
|
||||
) -> Vec<String> {
|
||||
let mut executables = vec![];
|
||||
|
||||
// os agnostic way to get the PATH env var
|
||||
let paths = self.engine_state.get_path_env_var();
|
||||
let paths = working_set.permanent_state.get_path_env_var();
|
||||
|
||||
if let Some(paths) = paths {
|
||||
if let Ok(paths) = paths.as_list() {
|
||||
@ -52,7 +48,10 @@ impl CommandCompletion {
|
||||
|
||||
if let Ok(mut contents) = std::fs::read_dir(path.as_ref()) {
|
||||
while let Some(Ok(item)) = contents.next() {
|
||||
if self.engine_state.config.max_external_completion_results
|
||||
if working_set
|
||||
.permanent_state
|
||||
.config
|
||||
.max_external_completion_results
|
||||
> executables.len() as i64
|
||||
&& !executables.contains(
|
||||
&item
|
||||
@ -114,7 +113,7 @@ impl CommandCompletion {
|
||||
|
||||
if find_externals {
|
||||
let results_external = self
|
||||
.external_command_completion(&partial, match_algorithm)
|
||||
.external_command_completion(working_set, &partial, match_algorithm)
|
||||
.into_iter()
|
||||
.map(move |x| SemanticSuggestion {
|
||||
suggestion: Suggestion {
|
||||
@ -161,6 +160,7 @@ impl Completer for CommandCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
_prefix: Vec<u8>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
@ -266,6 +266,8 @@ pub fn is_passthrough_command(working_set_file_contents: &[CachedFile]) -> bool
|
||||
#[cfg(test)]
|
||||
mod command_completions_tests {
|
||||
use super::*;
|
||||
use nu_protocol::engine::EngineState;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[test]
|
||||
fn test_find_non_whitespace_index() {
|
||||
|
@ -22,10 +22,10 @@ pub struct NuCompleter {
|
||||
}
|
||||
|
||||
impl NuCompleter {
|
||||
pub fn new(engine_state: Arc<EngineState>, stack: Stack) -> Self {
|
||||
pub fn new(engine_state: Arc<EngineState>, stack: Arc<Stack>) -> Self {
|
||||
Self {
|
||||
engine_state,
|
||||
stack: stack.reset_out_dest().capture(),
|
||||
stack: Stack::with_parent(stack).reset_out_dest().capture(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -52,8 +52,15 @@ impl NuCompleter {
|
||||
};
|
||||
|
||||
// Fetch
|
||||
let mut suggestions =
|
||||
completer.fetch(working_set, prefix.clone(), new_span, offset, pos, &options);
|
||||
let mut suggestions = completer.fetch(
|
||||
working_set,
|
||||
&self.stack,
|
||||
prefix.clone(),
|
||||
new_span,
|
||||
offset,
|
||||
pos,
|
||||
&options,
|
||||
);
|
||||
|
||||
// Sort
|
||||
suggestions = completer.sort(suggestions, prefix);
|
||||
@ -96,9 +103,8 @@ impl NuCompleter {
|
||||
PipelineData::empty(),
|
||||
);
|
||||
|
||||
match result {
|
||||
Ok(pd) => {
|
||||
let value = pd.into_value(span);
|
||||
match result.and_then(|data| data.into_value(span)) {
|
||||
Ok(value) => {
|
||||
if let Value::List { vals, .. } = value {
|
||||
let result =
|
||||
map_value_completions(vals.iter(), Span::new(span.start, span.end), offset);
|
||||
@ -175,11 +181,8 @@ impl NuCompleter {
|
||||
|
||||
// Variables completion
|
||||
if prefix.starts_with(b"$") || most_left_var.is_some() {
|
||||
let mut completer = VariableCompletion::new(
|
||||
self.engine_state.clone(),
|
||||
self.stack.clone(),
|
||||
most_left_var.unwrap_or((vec![], vec![])),
|
||||
);
|
||||
let mut completer =
|
||||
VariableCompletion::new(most_left_var.unwrap_or((vec![], vec![])));
|
||||
|
||||
return self.process_completion(
|
||||
&mut completer,
|
||||
@ -224,8 +227,6 @@ impl NuCompleter {
|
||||
|| (flat_idx == 0 && working_set.get_span_contents(new_span).is_empty())
|
||||
{
|
||||
let mut completer = CommandCompletion::new(
|
||||
self.engine_state.clone(),
|
||||
&working_set,
|
||||
flattened.clone(),
|
||||
// flat_idx,
|
||||
FlatShape::String,
|
||||
@ -253,10 +254,7 @@ impl NuCompleter {
|
||||
|| prev_expr_str == b"overlay use"
|
||||
|| prev_expr_str == b"source-env"
|
||||
{
|
||||
let mut completer = DotNuCompletion::new(
|
||||
self.engine_state.clone(),
|
||||
self.stack.clone(),
|
||||
);
|
||||
let mut completer = DotNuCompletion::new();
|
||||
|
||||
return self.process_completion(
|
||||
&mut completer,
|
||||
@ -267,10 +265,7 @@ impl NuCompleter {
|
||||
pos,
|
||||
);
|
||||
} else if prev_expr_str == b"ls" {
|
||||
let mut completer = FileCompletion::new(
|
||||
self.engine_state.clone(),
|
||||
self.stack.clone(),
|
||||
);
|
||||
let mut completer = FileCompletion::new();
|
||||
|
||||
return self.process_completion(
|
||||
&mut completer,
|
||||
@ -288,7 +283,6 @@ impl NuCompleter {
|
||||
match &flat.1 {
|
||||
FlatShape::Custom(decl_id) => {
|
||||
let mut completer = CustomCompletion::new(
|
||||
self.engine_state.clone(),
|
||||
self.stack.clone(),
|
||||
*decl_id,
|
||||
initial_line,
|
||||
@ -304,10 +298,7 @@ impl NuCompleter {
|
||||
);
|
||||
}
|
||||
FlatShape::Directory => {
|
||||
let mut completer = DirectoryCompletion::new(
|
||||
self.engine_state.clone(),
|
||||
self.stack.clone(),
|
||||
);
|
||||
let mut completer = DirectoryCompletion::new();
|
||||
|
||||
return self.process_completion(
|
||||
&mut completer,
|
||||
@ -319,10 +310,7 @@ impl NuCompleter {
|
||||
);
|
||||
}
|
||||
FlatShape::Filepath | FlatShape::GlobPattern => {
|
||||
let mut completer = FileCompletion::new(
|
||||
self.engine_state.clone(),
|
||||
self.stack.clone(),
|
||||
);
|
||||
let mut completer = FileCompletion::new();
|
||||
|
||||
return self.process_completion(
|
||||
&mut completer,
|
||||
@ -335,8 +323,6 @@ impl NuCompleter {
|
||||
}
|
||||
flat_shape => {
|
||||
let mut completer = CommandCompletion::new(
|
||||
self.engine_state.clone(),
|
||||
&working_set,
|
||||
flattened.clone(),
|
||||
// flat_idx,
|
||||
flat_shape.clone(),
|
||||
@ -369,10 +355,7 @@ impl NuCompleter {
|
||||
}
|
||||
|
||||
// Check for file completion
|
||||
let mut completer = FileCompletion::new(
|
||||
self.engine_state.clone(),
|
||||
self.stack.clone(),
|
||||
);
|
||||
let mut completer = FileCompletion::new();
|
||||
out = self.process_completion(
|
||||
&mut completer,
|
||||
&working_set,
|
||||
@ -557,7 +540,7 @@ mod completer_tests {
|
||||
result.err().unwrap()
|
||||
);
|
||||
|
||||
let mut completer = NuCompleter::new(engine_state.into(), Stack::new());
|
||||
let mut completer = NuCompleter::new(engine_state.into(), Arc::new(Stack::new()));
|
||||
let dataset = [
|
||||
("sudo", false, "", Vec::new()),
|
||||
("sudo l", true, "l", vec!["ls", "let", "lines", "loop"]),
|
||||
|
@ -6,14 +6,13 @@ use nu_engine::eval_call;
|
||||
use nu_protocol::{
|
||||
ast::{Argument, Call, Expr, Expression},
|
||||
debugger::WithoutDebug,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
engine::{Stack, StateWorkingSet},
|
||||
PipelineData, Span, Type, Value,
|
||||
};
|
||||
use nu_utils::IgnoreCaseExt;
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub struct CustomCompletion {
|
||||
engine_state: Arc<EngineState>,
|
||||
stack: Stack,
|
||||
decl_id: usize,
|
||||
line: String,
|
||||
@ -21,10 +20,9 @@ pub struct CustomCompletion {
|
||||
}
|
||||
|
||||
impl CustomCompletion {
|
||||
pub fn new(engine_state: Arc<EngineState>, stack: Stack, decl_id: usize, line: String) -> Self {
|
||||
pub fn new(stack: Stack, decl_id: usize, line: String) -> Self {
|
||||
Self {
|
||||
engine_state,
|
||||
stack: stack.reset_out_dest().capture(),
|
||||
stack,
|
||||
decl_id,
|
||||
line,
|
||||
sort_by: SortBy::None,
|
||||
@ -35,7 +33,8 @@ impl CustomCompletion {
|
||||
impl Completer for CustomCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
_: &StateWorkingSet,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
prefix: Vec<u8>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
@ -47,7 +46,7 @@ impl Completer for CustomCompletion {
|
||||
|
||||
// Call custom declaration
|
||||
let result = eval_call::<WithoutDebug>(
|
||||
&self.engine_state,
|
||||
working_set.permanent_state,
|
||||
&mut self.stack,
|
||||
&Call {
|
||||
decl_id: self.decl_id,
|
||||
@ -75,55 +74,53 @@ impl Completer for CustomCompletion {
|
||||
|
||||
// Parse result
|
||||
let suggestions = result
|
||||
.map(|pd| {
|
||||
let value = pd.into_value(span);
|
||||
match &value {
|
||||
Value::Record { val, .. } => {
|
||||
let completions = val
|
||||
.get("completions")
|
||||
.and_then(|val| {
|
||||
val.as_list()
|
||||
.ok()
|
||||
.map(|it| map_value_completions(it.iter(), span, offset))
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let options = val.get("options");
|
||||
.and_then(|data| data.into_value(span))
|
||||
.map(|value| match &value {
|
||||
Value::Record { val, .. } => {
|
||||
let completions = val
|
||||
.get("completions")
|
||||
.and_then(|val| {
|
||||
val.as_list()
|
||||
.ok()
|
||||
.map(|it| map_value_completions(it.iter(), span, offset))
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let options = val.get("options");
|
||||
|
||||
if let Some(Value::Record { val: options, .. }) = &options {
|
||||
let should_sort = options
|
||||
.get("sort")
|
||||
.and_then(|val| val.as_bool().ok())
|
||||
.unwrap_or(false);
|
||||
if let Some(Value::Record { val: options, .. }) = &options {
|
||||
let should_sort = options
|
||||
.get("sort")
|
||||
.and_then(|val| val.as_bool().ok())
|
||||
.unwrap_or(false);
|
||||
|
||||
if should_sort {
|
||||
self.sort_by = SortBy::Ascending;
|
||||
}
|
||||
|
||||
custom_completion_options = Some(CompletionOptions {
|
||||
case_sensitive: options
|
||||
.get("case_sensitive")
|
||||
.and_then(|val| val.as_bool().ok())
|
||||
.unwrap_or(true),
|
||||
positional: options
|
||||
.get("positional")
|
||||
.and_then(|val| val.as_bool().ok())
|
||||
.unwrap_or(true),
|
||||
match_algorithm: match options.get("completion_algorithm") {
|
||||
Some(option) => option
|
||||
.coerce_string()
|
||||
.ok()
|
||||
.and_then(|option| option.try_into().ok())
|
||||
.unwrap_or(MatchAlgorithm::Prefix),
|
||||
None => completion_options.match_algorithm,
|
||||
},
|
||||
});
|
||||
if should_sort {
|
||||
self.sort_by = SortBy::Ascending;
|
||||
}
|
||||
|
||||
completions
|
||||
custom_completion_options = Some(CompletionOptions {
|
||||
case_sensitive: options
|
||||
.get("case_sensitive")
|
||||
.and_then(|val| val.as_bool().ok())
|
||||
.unwrap_or(true),
|
||||
positional: options
|
||||
.get("positional")
|
||||
.and_then(|val| val.as_bool().ok())
|
||||
.unwrap_or(true),
|
||||
match_algorithm: match options.get("completion_algorithm") {
|
||||
Some(option) => option
|
||||
.coerce_string()
|
||||
.ok()
|
||||
.and_then(|option| option.try_into().ok())
|
||||
.unwrap_or(MatchAlgorithm::Prefix),
|
||||
None => completion_options.match_algorithm,
|
||||
},
|
||||
});
|
||||
}
|
||||
Value::List { vals, .. } => map_value_completions(vals.iter(), span, offset),
|
||||
_ => vec![],
|
||||
|
||||
completions
|
||||
}
|
||||
Value::List { vals, .. } => map_value_completions(vals.iter(), span, offset),
|
||||
_ => vec![],
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
|
@ -8,25 +8,16 @@ use nu_protocol::{
|
||||
levenshtein_distance, Span,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use std::{
|
||||
path::{Path, MAIN_SEPARATOR as SEP},
|
||||
sync::Arc,
|
||||
};
|
||||
use std::path::{Path, MAIN_SEPARATOR as SEP};
|
||||
|
||||
use super::SemanticSuggestion;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DirectoryCompletion {
|
||||
engine_state: Arc<EngineState>,
|
||||
stack: Stack,
|
||||
}
|
||||
#[derive(Clone, Default)]
|
||||
pub struct DirectoryCompletion {}
|
||||
|
||||
impl DirectoryCompletion {
|
||||
pub fn new(engine_state: Arc<EngineState>, stack: Stack) -> Self {
|
||||
Self {
|
||||
engine_state,
|
||||
stack,
|
||||
}
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
@ -34,10 +25,11 @@ impl Completer for DirectoryCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: Vec<u8>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_: usize,
|
||||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let AdjustView { prefix, span, .. } = adjust_if_intermediate(&prefix, working_set, span);
|
||||
@ -47,10 +39,10 @@ impl Completer for DirectoryCompletion {
|
||||
let output: Vec<_> = directory_completion(
|
||||
span,
|
||||
&prefix,
|
||||
&self.engine_state.current_work_dir(),
|
||||
&working_set.permanent_state.current_work_dir(),
|
||||
options,
|
||||
self.engine_state.as_ref(),
|
||||
&self.stack,
|
||||
working_set.permanent_state,
|
||||
stack,
|
||||
)
|
||||
.into_iter()
|
||||
.map(move |x| SemanticSuggestion {
|
||||
|
@ -1,39 +1,31 @@
|
||||
use crate::completions::{file_path_completion, Completer, CompletionOptions, SortBy};
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
engine::{Stack, StateWorkingSet},
|
||||
Span,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use std::{
|
||||
path::{is_separator, Path, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR},
|
||||
sync::Arc,
|
||||
};
|
||||
use std::path::{is_separator, Path, MAIN_SEPARATOR as SEP, MAIN_SEPARATOR_STR};
|
||||
|
||||
use super::SemanticSuggestion;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct DotNuCompletion {
|
||||
engine_state: Arc<EngineState>,
|
||||
stack: Stack,
|
||||
}
|
||||
#[derive(Clone, Default)]
|
||||
pub struct DotNuCompletion {}
|
||||
|
||||
impl DotNuCompletion {
|
||||
pub fn new(engine_state: Arc<EngineState>, stack: Stack) -> Self {
|
||||
Self {
|
||||
engine_state,
|
||||
stack,
|
||||
}
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
impl Completer for DotNuCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
_: &StateWorkingSet,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: Vec<u8>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_: usize,
|
||||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let prefix_str = String::from_utf8_lossy(&prefix).replace('`', "");
|
||||
@ -49,26 +41,25 @@ impl Completer for DotNuCompletion {
|
||||
let mut is_current_folder = false;
|
||||
|
||||
// Fetch the lib dirs
|
||||
let lib_dirs: Vec<String> =
|
||||
if let Some(lib_dirs) = self.engine_state.get_env_var("NU_LIB_DIRS") {
|
||||
lib_dirs
|
||||
.as_list()
|
||||
.into_iter()
|
||||
.flat_map(|it| {
|
||||
it.iter().map(|x| {
|
||||
x.to_path()
|
||||
.expect("internal error: failed to convert lib path")
|
||||
})
|
||||
let lib_dirs: Vec<String> = if let Some(lib_dirs) = working_set.get_env_var("NU_LIB_DIRS") {
|
||||
lib_dirs
|
||||
.as_list()
|
||||
.into_iter()
|
||||
.flat_map(|it| {
|
||||
it.iter().map(|x| {
|
||||
x.to_path()
|
||||
.expect("internal error: failed to convert lib path")
|
||||
})
|
||||
.map(|it| {
|
||||
it.into_os_string()
|
||||
.into_string()
|
||||
.expect("internal error: failed to convert OS path")
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
})
|
||||
.map(|it| {
|
||||
it.into_os_string()
|
||||
.into_string()
|
||||
.expect("internal error: failed to convert OS path")
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
// Check if the base_dir is a folder
|
||||
// rsplit_once removes the separator
|
||||
@ -85,7 +76,7 @@ impl Completer for DotNuCompletion {
|
||||
} else {
|
||||
// Fetch the current folder
|
||||
#[allow(deprecated)]
|
||||
let current_folder = self.engine_state.current_work_dir();
|
||||
let current_folder = working_set.permanent_state.current_work_dir();
|
||||
is_current_folder = true;
|
||||
|
||||
// Add the current folder and the lib dirs into the
|
||||
@ -104,8 +95,8 @@ impl Completer for DotNuCompletion {
|
||||
&partial,
|
||||
&search_dir,
|
||||
options,
|
||||
self.engine_state.as_ref(),
|
||||
&self.stack,
|
||||
working_set.permanent_state,
|
||||
stack,
|
||||
);
|
||||
completions
|
||||
.into_iter()
|
||||
|
@ -9,25 +9,16 @@ use nu_protocol::{
|
||||
};
|
||||
use nu_utils::IgnoreCaseExt;
|
||||
use reedline::Suggestion;
|
||||
use std::{
|
||||
path::{Path, MAIN_SEPARATOR as SEP},
|
||||
sync::Arc,
|
||||
};
|
||||
use std::path::{Path, MAIN_SEPARATOR as SEP};
|
||||
|
||||
use super::SemanticSuggestion;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct FileCompletion {
|
||||
engine_state: Arc<EngineState>,
|
||||
stack: Stack,
|
||||
}
|
||||
#[derive(Clone, Default)]
|
||||
pub struct FileCompletion {}
|
||||
|
||||
impl FileCompletion {
|
||||
pub fn new(engine_state: Arc<EngineState>, stack: Stack) -> Self {
|
||||
Self {
|
||||
engine_state,
|
||||
stack,
|
||||
}
|
||||
pub fn new() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
@ -35,10 +26,11 @@ impl Completer for FileCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: Vec<u8>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_: usize,
|
||||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let AdjustView {
|
||||
@ -52,10 +44,10 @@ impl Completer for FileCompletion {
|
||||
readjusted,
|
||||
span,
|
||||
&prefix,
|
||||
&self.engine_state.current_work_dir(),
|
||||
&working_set.permanent_state.current_work_dir(),
|
||||
options,
|
||||
self.engine_state.as_ref(),
|
||||
&self.stack,
|
||||
working_set.permanent_state,
|
||||
stack,
|
||||
)
|
||||
.into_iter()
|
||||
.map(move |x| SemanticSuggestion {
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::completions::{Completer, CompletionOptions};
|
||||
use nu_protocol::{
|
||||
ast::{Expr, Expression},
|
||||
engine::StateWorkingSet,
|
||||
engine::{Stack, StateWorkingSet},
|
||||
Span,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
@ -23,10 +23,11 @@ impl Completer for FlagCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
_stack: &Stack,
|
||||
prefix: Vec<u8>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_: usize,
|
||||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
// Check if it's a flag
|
||||
|
@ -3,30 +3,20 @@ use crate::completions::{
|
||||
};
|
||||
use nu_engine::{column::get_columns, eval_variable};
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
engine::{Stack, StateWorkingSet},
|
||||
Span, Value,
|
||||
};
|
||||
use reedline::Suggestion;
|
||||
use std::{str, sync::Arc};
|
||||
use std::str;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct VariableCompletion {
|
||||
engine_state: Arc<EngineState>, // TODO: Is engine state necessary? It's already a part of working set in fetch()
|
||||
stack: Stack,
|
||||
var_context: (Vec<u8>, Vec<Vec<u8>>), // tuple with $var and the sublevels (.b.c.d)
|
||||
}
|
||||
|
||||
impl VariableCompletion {
|
||||
pub fn new(
|
||||
engine_state: Arc<EngineState>,
|
||||
stack: Stack,
|
||||
var_context: (Vec<u8>, Vec<Vec<u8>>),
|
||||
) -> Self {
|
||||
Self {
|
||||
engine_state,
|
||||
stack,
|
||||
var_context,
|
||||
}
|
||||
pub fn new(var_context: (Vec<u8>, Vec<Vec<u8>>)) -> Self {
|
||||
Self { var_context }
|
||||
}
|
||||
}
|
||||
|
||||
@ -34,10 +24,11 @@ impl Completer for VariableCompletion {
|
||||
fn fetch(
|
||||
&mut self,
|
||||
working_set: &StateWorkingSet,
|
||||
stack: &Stack,
|
||||
prefix: Vec<u8>,
|
||||
span: Span,
|
||||
offset: usize,
|
||||
_: usize,
|
||||
_pos: usize,
|
||||
options: &CompletionOptions,
|
||||
) -> Vec<SemanticSuggestion> {
|
||||
let mut output = vec![];
|
||||
@ -54,7 +45,7 @@ impl Completer for VariableCompletion {
|
||||
if !var_str.is_empty() {
|
||||
// Completion for $env.<tab>
|
||||
if var_str == "$env" {
|
||||
let env_vars = self.stack.get_env_vars(&self.engine_state);
|
||||
let env_vars = stack.get_env_vars(working_set.permanent_state);
|
||||
|
||||
// Return nested values
|
||||
if sublevels_count > 0 {
|
||||
@ -110,8 +101,8 @@ impl Completer for VariableCompletion {
|
||||
if var_str == "$nu" {
|
||||
// Eval nu var
|
||||
if let Ok(nuval) = eval_variable(
|
||||
&self.engine_state,
|
||||
&self.stack,
|
||||
working_set.permanent_state,
|
||||
stack,
|
||||
nu_protocol::NU_VARIABLE_ID,
|
||||
nu_protocol::Span::new(current_span.start, current_span.end),
|
||||
) {
|
||||
@ -133,7 +124,7 @@ impl Completer for VariableCompletion {
|
||||
// Completion other variable types
|
||||
if let Some(var_id) = var_id {
|
||||
// Extract the variable value from the stack
|
||||
let var = self.stack.get_var(var_id, Span::new(span.start, span.end));
|
||||
let var = stack.get_var(var_id, Span::new(span.start, span.end));
|
||||
|
||||
// If the value exists and it's of type Record
|
||||
if let Ok(value) = var {
|
||||
@ -207,7 +198,11 @@ impl Completer for VariableCompletion {
|
||||
|
||||
// Permanent state vars
|
||||
// for scope in &self.engine_state.scope {
|
||||
for overlay_frame in self.engine_state.active_overlays(&removed_overlays).rev() {
|
||||
for overlay_frame in working_set
|
||||
.permanent_state
|
||||
.active_overlays(&removed_overlays)
|
||||
.rev()
|
||||
{
|
||||
for v in &overlay_frame.vars {
|
||||
if options.match_algorithm.matches_u8_insensitive(
|
||||
options.case_sensitive,
|
||||
|
@ -1,12 +1,12 @@
|
||||
use crate::util::eval_source;
|
||||
#[cfg(feature = "plugin")]
|
||||
use nu_path::canonicalize_with;
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
report_error, HistoryFileFormat, PipelineData,
|
||||
};
|
||||
#[cfg(feature = "plugin")]
|
||||
use nu_protocol::{ParseError, PluginRegistryFile, Spanned};
|
||||
use nu_protocol::{engine::StateWorkingSet, report_error, ParseError, PluginRegistryFile, Spanned};
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack},
|
||||
report_error_new, HistoryFileFormat, PipelineData,
|
||||
};
|
||||
#[cfg(feature = "plugin")]
|
||||
use nu_utils::utils::perf;
|
||||
use std::path::PathBuf;
|
||||
@ -25,10 +25,9 @@ pub fn read_plugin_file(
|
||||
plugin_file: Option<Spanned<String>>,
|
||||
storage_path: &str,
|
||||
) {
|
||||
use nu_protocol::ShellError;
|
||||
use std::path::Path;
|
||||
|
||||
use nu_protocol::{report_error_new, ShellError};
|
||||
|
||||
let span = plugin_file.as_ref().map(|s| s.span);
|
||||
|
||||
// Check and warn + abort if this is a .nu plugin file
|
||||
@ -239,13 +238,11 @@ pub fn eval_config_contents(
|
||||
match engine_state.cwd(Some(stack)) {
|
||||
Ok(cwd) => {
|
||||
if let Err(e) = engine_state.merge_env(stack, cwd) {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(&working_set, &e);
|
||||
report_error_new(engine_state, &e);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(&working_set, &e);
|
||||
report_error_new(engine_state, &e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -266,8 +263,8 @@ pub(crate) fn get_history_path(storage_path: &str, mode: HistoryFileFormat) -> O
|
||||
#[cfg(feature = "plugin")]
|
||||
pub fn migrate_old_plugin_file(engine_state: &EngineState, storage_path: &str) -> bool {
|
||||
use nu_protocol::{
|
||||
report_error_new, PluginExample, PluginIdentity, PluginRegistryItem,
|
||||
PluginRegistryItemData, PluginSignature, ShellError,
|
||||
PluginExample, PluginIdentity, PluginRegistryItem, PluginRegistryItemData, PluginSignature,
|
||||
ShellError,
|
||||
};
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
@ -309,14 +306,15 @@ pub fn migrate_old_plugin_file(engine_state: &EngineState, storage_path: &str) -
|
||||
let mut engine_state = engine_state.clone();
|
||||
let mut stack = Stack::new();
|
||||
|
||||
if !eval_source(
|
||||
if eval_source(
|
||||
&mut engine_state,
|
||||
&mut stack,
|
||||
&old_contents,
|
||||
&old_plugin_file_path.to_string_lossy(),
|
||||
PipelineData::Empty,
|
||||
false,
|
||||
) {
|
||||
) != 0
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -1,12 +1,12 @@
|
||||
use log::info;
|
||||
use miette::Result;
|
||||
use nu_engine::{convert_env_values, eval_block};
|
||||
use nu_parser::parse;
|
||||
use nu_protocol::{
|
||||
debugger::WithoutDebug,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
report_error, PipelineData, Spanned, Value,
|
||||
report_error, PipelineData, ShellError, Spanned, Value,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
|
||||
/// Run a command (or commands) given to us by the user
|
||||
pub fn evaluate_commands(
|
||||
@ -16,13 +16,9 @@ pub fn evaluate_commands(
|
||||
input: PipelineData,
|
||||
table_mode: Option<Value>,
|
||||
no_newline: bool,
|
||||
) -> Result<Option<i64>> {
|
||||
) -> Result<(), ShellError> {
|
||||
// Translate environment variables from Strings to Values
|
||||
if let Some(e) = convert_env_values(engine_state, stack) {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(&working_set, &e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
convert_env_values(engine_state, stack)?;
|
||||
|
||||
// Parse the source code
|
||||
let (block, delta) = {
|
||||
@ -41,7 +37,6 @@ pub fn evaluate_commands(
|
||||
|
||||
if let Some(err) = working_set.parse_errors.first() {
|
||||
report_error(&working_set, err);
|
||||
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
@ -49,35 +44,27 @@ pub fn evaluate_commands(
|
||||
};
|
||||
|
||||
// Update permanent state
|
||||
if let Err(err) = engine_state.merge_delta(delta) {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(&working_set, &err);
|
||||
}
|
||||
engine_state.merge_delta(delta)?;
|
||||
|
||||
// Run the block
|
||||
let exit_code = match eval_block::<WithoutDebug>(engine_state, stack, &block, input) {
|
||||
Ok(pipeline_data) => {
|
||||
let mut config = engine_state.get_config().clone();
|
||||
if let Some(t_mode) = table_mode {
|
||||
config.table_mode = t_mode.coerce_str()?.parse().unwrap_or_default();
|
||||
}
|
||||
crate::eval_file::print_table_or_error(
|
||||
engine_state,
|
||||
stack,
|
||||
pipeline_data,
|
||||
&mut config,
|
||||
no_newline,
|
||||
)
|
||||
}
|
||||
Err(err) => {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
let pipeline = eval_block::<WithoutDebug>(engine_state, stack, &block, input)?;
|
||||
|
||||
report_error(&working_set, &err);
|
||||
std::process::exit(1);
|
||||
if let PipelineData::Value(Value::Error { error, .. }, ..) = pipeline {
|
||||
return Err(*error);
|
||||
}
|
||||
|
||||
if let Some(t_mode) = table_mode {
|
||||
Arc::make_mut(&mut engine_state.config).table_mode =
|
||||
t_mode.coerce_str()?.parse().unwrap_or_default();
|
||||
}
|
||||
|
||||
if let Some(status) = pipeline.print(engine_state, stack, no_newline, false)? {
|
||||
if status.code() != 0 {
|
||||
std::process::exit(status.code())
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
info!("evaluate {}:{}:{}", file!(), line!(), column!());
|
||||
|
||||
Ok(exit_code)
|
||||
Ok(())
|
||||
}
|
||||
|
@ -1,15 +1,14 @@
|
||||
use crate::util::eval_source;
|
||||
use log::{info, trace};
|
||||
use miette::{IntoDiagnostic, Result};
|
||||
use nu_engine::{convert_env_values, eval_block};
|
||||
use nu_parser::parse;
|
||||
use nu_path::canonicalize_with;
|
||||
use nu_protocol::{
|
||||
debugger::WithoutDebug,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
report_error, Config, PipelineData, ShellError, Span, Value,
|
||||
report_error, PipelineData, ShellError, Span, Value,
|
||||
};
|
||||
use std::{io::Write, sync::Arc};
|
||||
use std::sync::Arc;
|
||||
|
||||
/// Entry point for evaluating a file.
|
||||
///
|
||||
@ -21,73 +20,40 @@ pub fn evaluate_file(
|
||||
engine_state: &mut EngineState,
|
||||
stack: &mut Stack,
|
||||
input: PipelineData,
|
||||
) -> Result<()> {
|
||||
) -> Result<(), ShellError> {
|
||||
// Convert environment variables from Strings to Values and store them in the engine state.
|
||||
if let Some(e) = convert_env_values(engine_state, stack) {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(&working_set, &e);
|
||||
std::process::exit(1);
|
||||
}
|
||||
convert_env_values(engine_state, stack)?;
|
||||
|
||||
let cwd = engine_state.cwd_as_string(Some(stack))?;
|
||||
|
||||
let file_path = canonicalize_with(&path, cwd).unwrap_or_else(|e| {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(
|
||||
&working_set,
|
||||
&ShellError::FileNotFoundCustom {
|
||||
msg: format!("Could not access file '{}': {:?}", path, e.to_string()),
|
||||
span: Span::unknown(),
|
||||
},
|
||||
);
|
||||
std::process::exit(1);
|
||||
});
|
||||
let file_path =
|
||||
canonicalize_with(&path, cwd).map_err(|err| ShellError::FileNotFoundCustom {
|
||||
msg: format!("Could not access file '{path}': {err}"),
|
||||
span: Span::unknown(),
|
||||
})?;
|
||||
|
||||
let file_path_str = file_path.to_str().unwrap_or_else(|| {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(
|
||||
&working_set,
|
||||
&ShellError::NonUtf8Custom {
|
||||
msg: format!(
|
||||
"Input file name '{}' is not valid UTF8",
|
||||
file_path.to_string_lossy()
|
||||
),
|
||||
span: Span::unknown(),
|
||||
},
|
||||
);
|
||||
std::process::exit(1);
|
||||
});
|
||||
let file_path_str = file_path
|
||||
.to_str()
|
||||
.ok_or_else(|| ShellError::NonUtf8Custom {
|
||||
msg: format!(
|
||||
"Input file name '{}' is not valid UTF8",
|
||||
file_path.to_string_lossy()
|
||||
),
|
||||
span: Span::unknown(),
|
||||
})?;
|
||||
|
||||
let file = std::fs::read(&file_path)
|
||||
.into_diagnostic()
|
||||
.unwrap_or_else(|e| {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(
|
||||
&working_set,
|
||||
&ShellError::FileNotFoundCustom {
|
||||
msg: format!(
|
||||
"Could not read file '{}': {:?}",
|
||||
file_path_str,
|
||||
e.to_string()
|
||||
),
|
||||
span: Span::unknown(),
|
||||
},
|
||||
);
|
||||
std::process::exit(1);
|
||||
});
|
||||
let file = std::fs::read(&file_path).map_err(|err| ShellError::FileNotFoundCustom {
|
||||
msg: format!("Could not read file '{file_path_str}': {err}"),
|
||||
span: Span::unknown(),
|
||||
})?;
|
||||
engine_state.file = Some(file_path.clone());
|
||||
|
||||
let parent = file_path.parent().unwrap_or_else(|| {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(
|
||||
&working_set,
|
||||
&ShellError::FileNotFoundCustom {
|
||||
msg: format!("The file path '{file_path_str}' does not have a parent"),
|
||||
span: Span::unknown(),
|
||||
},
|
||||
);
|
||||
std::process::exit(1);
|
||||
});
|
||||
let parent = file_path
|
||||
.parent()
|
||||
.ok_or_else(|| ShellError::FileNotFoundCustom {
|
||||
msg: format!("The file path '{file_path_str}' does not have a parent"),
|
||||
span: Span::unknown(),
|
||||
})?;
|
||||
|
||||
stack.add_env_var(
|
||||
"FILE_PWD".to_string(),
|
||||
@ -127,119 +93,48 @@ pub fn evaluate_file(
|
||||
}
|
||||
|
||||
// Merge the changes into the engine state.
|
||||
engine_state
|
||||
.merge_delta(working_set.delta)
|
||||
.expect("merging delta into engine_state should succeed");
|
||||
engine_state.merge_delta(working_set.delta)?;
|
||||
|
||||
// Check if the file contains a main command.
|
||||
if engine_state.find_decl(b"main", &[]).is_some() {
|
||||
let exit_code = if engine_state.find_decl(b"main", &[]).is_some() {
|
||||
// Evaluate the file, but don't run main yet.
|
||||
let pipeline_data =
|
||||
eval_block::<WithoutDebug>(engine_state, stack, &block, PipelineData::empty());
|
||||
let pipeline_data = match pipeline_data {
|
||||
Err(ShellError::Return { .. }) => {
|
||||
// Allow early return before main is run.
|
||||
return Ok(());
|
||||
}
|
||||
x => x,
|
||||
}
|
||||
.unwrap_or_else(|e| {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(&working_set, &e);
|
||||
std::process::exit(1);
|
||||
});
|
||||
|
||||
// Print the pipeline output of the file.
|
||||
// The pipeline output of a file is the pipeline output of its last command.
|
||||
let result = pipeline_data.print(engine_state, stack, true, false);
|
||||
match result {
|
||||
Err(err) => {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(&working_set, &err);
|
||||
std::process::exit(1);
|
||||
}
|
||||
Ok(exit_code) => {
|
||||
if exit_code != 0 {
|
||||
std::process::exit(exit_code as i32);
|
||||
let pipeline =
|
||||
match eval_block::<WithoutDebug>(engine_state, stack, &block, PipelineData::empty()) {
|
||||
Ok(data) => data,
|
||||
Err(ShellError::Return { .. }) => {
|
||||
// Allow early return before main is run.
|
||||
return Ok(());
|
||||
}
|
||||
Err(err) => return Err(err),
|
||||
};
|
||||
|
||||
// Print the pipeline output of the last command of the file.
|
||||
if let Some(status) = pipeline.print(engine_state, stack, true, false)? {
|
||||
if status.code() != 0 {
|
||||
std::process::exit(status.code())
|
||||
}
|
||||
}
|
||||
|
||||
// Invoke the main command with arguments.
|
||||
// Arguments with whitespaces are quoted, thus can be safely concatenated by whitespace.
|
||||
let args = format!("main {}", args.join(" "));
|
||||
if !eval_source(
|
||||
eval_source(
|
||||
engine_state,
|
||||
stack,
|
||||
args.as_bytes(),
|
||||
"<commandline>",
|
||||
input,
|
||||
true,
|
||||
) {
|
||||
std::process::exit(1);
|
||||
}
|
||||
} else if !eval_source(engine_state, stack, &file, file_path_str, input, true) {
|
||||
std::process::exit(1);
|
||||
)
|
||||
} else {
|
||||
eval_source(engine_state, stack, &file, file_path_str, input, true)
|
||||
};
|
||||
|
||||
if exit_code != 0 {
|
||||
std::process::exit(exit_code)
|
||||
}
|
||||
|
||||
info!("evaluate {}:{}:{}", file!(), line!(), column!());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn print_table_or_error(
|
||||
engine_state: &mut EngineState,
|
||||
stack: &mut Stack,
|
||||
mut pipeline_data: PipelineData,
|
||||
config: &mut Config,
|
||||
no_newline: bool,
|
||||
) -> Option<i64> {
|
||||
let exit_code = match &mut pipeline_data {
|
||||
PipelineData::ExternalStream { exit_code, .. } => exit_code.take(),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
// Change the engine_state config to use the passed in configuration
|
||||
engine_state.set_config(config.clone());
|
||||
|
||||
if let PipelineData::Value(Value::Error { error, .. }, ..) = &pipeline_data {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(&working_set, &**error);
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
// We don't need to do anything special to print a table because print() handles it
|
||||
print_or_exit(pipeline_data, engine_state, stack, no_newline);
|
||||
|
||||
// Make sure everything has finished
|
||||
if let Some(exit_code) = exit_code {
|
||||
let mut exit_code: Vec<_> = exit_code.into_iter().collect();
|
||||
exit_code
|
||||
.pop()
|
||||
.and_then(|last_exit_code| match last_exit_code {
|
||||
Value::Int { val: code, .. } => Some(code),
|
||||
_ => None,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn print_or_exit(
|
||||
pipeline_data: PipelineData,
|
||||
engine_state: &EngineState,
|
||||
stack: &mut Stack,
|
||||
no_newline: bool,
|
||||
) {
|
||||
let result = pipeline_data.print(engine_state, stack, no_newline, false);
|
||||
|
||||
let _ = std::io::stdout().flush();
|
||||
let _ = std::io::stderr().flush();
|
||||
|
||||
if let Err(error) = result {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(&working_set, &error);
|
||||
let _ = std::io::stderr().flush();
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
|
@ -59,8 +59,7 @@ impl Completer for NuMenuCompleter {
|
||||
|
||||
let res = eval_block::<WithoutDebug>(&self.engine_state, &mut self.stack, block, input);
|
||||
|
||||
if let Ok(values) = res {
|
||||
let values = values.into_value(self.span);
|
||||
if let Ok(values) = res.and_then(|data| data.into_value(self.span)) {
|
||||
convert_to_suggestions(values, line, pos, self.only_buffer_difference)
|
||||
} else {
|
||||
Vec::new()
|
||||
|
@ -129,9 +129,11 @@ impl Prompt for NushellPrompt {
|
||||
{
|
||||
// We're in vscode and we have osc633 enabled
|
||||
format!("{VSCODE_PRE_PROMPT_MARKER}{prompt}{VSCODE_POST_PROMPT_MARKER}").into()
|
||||
} else {
|
||||
// If we're in VSCode but we don't find the env var, just return the regular markers
|
||||
} else if self.shell_integration_osc133 {
|
||||
// If we're in VSCode but we don't find the env var, but we have osc133 set, then use it
|
||||
format!("{PRE_PROMPT_MARKER}{prompt}{POST_PROMPT_MARKER}").into()
|
||||
} else {
|
||||
prompt.into()
|
||||
}
|
||||
} else if self.shell_integration_osc133 {
|
||||
format!("{PRE_PROMPT_MARKER}{prompt}{POST_PROMPT_MARKER}").into()
|
||||
|
@ -2,8 +2,8 @@ use crate::NushellPrompt;
|
||||
use log::trace;
|
||||
use nu_engine::ClosureEvalOnce;
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
report_error, Config, PipelineData, Value,
|
||||
engine::{EngineState, Stack},
|
||||
report_error_new, Config, PipelineData, Value,
|
||||
};
|
||||
use reedline::Prompt;
|
||||
|
||||
@ -65,7 +65,7 @@ fn get_prompt_string(
|
||||
.get_env_var(engine_state, prompt)
|
||||
.and_then(|v| match v {
|
||||
Value::Closure { val, .. } => {
|
||||
let result = ClosureEvalOnce::new(engine_state, stack, val)
|
||||
let result = ClosureEvalOnce::new(engine_state, stack, *val)
|
||||
.run_with_input(PipelineData::Empty);
|
||||
|
||||
trace!(
|
||||
@ -77,8 +77,7 @@ fn get_prompt_string(
|
||||
|
||||
result
|
||||
.map_err(|err| {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(&working_set, &err);
|
||||
report_error_new(engine_state, &err);
|
||||
})
|
||||
.ok()
|
||||
}
|
||||
@ -108,50 +107,34 @@ pub(crate) fn update_prompt(
|
||||
stack: &mut Stack,
|
||||
nu_prompt: &mut NushellPrompt,
|
||||
) {
|
||||
let left_prompt_string = get_prompt_string(PROMPT_COMMAND, config, engine_state, stack);
|
||||
let configured_left_prompt_string =
|
||||
match get_prompt_string(PROMPT_COMMAND, config, engine_state, stack) {
|
||||
Some(s) => s,
|
||||
None => "".to_string(),
|
||||
};
|
||||
|
||||
// Now that we have the prompt string lets ansify it.
|
||||
// <133 A><prompt><133 B><command><133 C><command output>
|
||||
let left_prompt_string_133 = if config.shell_integration_osc133 {
|
||||
if let Some(prompt_string) = left_prompt_string.clone() {
|
||||
let left_prompt_string = if config.shell_integration_osc633 {
|
||||
if stack.get_env_var(engine_state, "TERM_PROGRAM") == Some(Value::test_string("vscode")) {
|
||||
// We're in vscode and we have osc633 enabled
|
||||
Some(format!(
|
||||
"{PRE_PROMPT_MARKER}{prompt_string}{POST_PROMPT_MARKER}"
|
||||
"{VSCODE_PRE_PROMPT_MARKER}{configured_left_prompt_string}{VSCODE_POST_PROMPT_MARKER}"
|
||||
))
|
||||
} else if config.shell_integration_osc133 {
|
||||
// If we're in VSCode but we don't find the env var, but we have osc133 set, then use it
|
||||
Some(format!(
|
||||
"{PRE_PROMPT_MARKER}{configured_left_prompt_string}{POST_PROMPT_MARKER}"
|
||||
))
|
||||
} else {
|
||||
left_prompt_string.clone()
|
||||
configured_left_prompt_string.into()
|
||||
}
|
||||
} else if config.shell_integration_osc133 {
|
||||
Some(format!(
|
||||
"{PRE_PROMPT_MARKER}{configured_left_prompt_string}{POST_PROMPT_MARKER}"
|
||||
))
|
||||
} else {
|
||||
left_prompt_string.clone()
|
||||
};
|
||||
|
||||
let left_prompt_string_633 = if config.shell_integration_osc633 {
|
||||
if let Some(prompt_string) = left_prompt_string.clone() {
|
||||
if stack.get_env_var(engine_state, "TERM_PROGRAM") == Some(Value::test_string("vscode"))
|
||||
{
|
||||
// If the user enabled osc633 and we're in vscode, use the vscode markers
|
||||
Some(format!(
|
||||
"{VSCODE_PRE_PROMPT_MARKER}{prompt_string}{VSCODE_POST_PROMPT_MARKER}"
|
||||
))
|
||||
} else {
|
||||
// otherwise, use the regular osc133 markers
|
||||
Some(format!(
|
||||
"{PRE_PROMPT_MARKER}{prompt_string}{POST_PROMPT_MARKER}"
|
||||
))
|
||||
}
|
||||
} else {
|
||||
left_prompt_string.clone()
|
||||
}
|
||||
} else {
|
||||
left_prompt_string.clone()
|
||||
};
|
||||
|
||||
let left_prompt_string = match (left_prompt_string_133, left_prompt_string_633) {
|
||||
(None, None) => left_prompt_string,
|
||||
(None, Some(l633)) => Some(l633),
|
||||
(Some(l133), None) => Some(l133),
|
||||
// If both are set, it means we're in vscode, so use the vscode markers
|
||||
// and even if we're not actually in vscode atm, the regular 133 markers are used
|
||||
(Some(_l133), Some(l633)) => Some(l633),
|
||||
configured_left_prompt_string.into()
|
||||
};
|
||||
|
||||
let right_prompt_string = get_prompt_string(PROMPT_COMMAND_RIGHT, config, engine_state, stack);
|
||||
|
@ -26,9 +26,8 @@ use nu_parser::{lex, parse, trim_quotes_str};
|
||||
use nu_protocol::{
|
||||
config::NuCursorShape,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
eval_const::create_nu_constant,
|
||||
report_error_new, HistoryConfig, HistoryFileFormat, PipelineData, ShellError, Span, Spanned,
|
||||
Value, NU_VARIABLE_ID,
|
||||
Value,
|
||||
};
|
||||
use nu_utils::{
|
||||
filesystem::{have_permission, PermissionResult},
|
||||
@ -87,7 +86,7 @@ pub fn evaluate_repl(
|
||||
|
||||
let start_time = std::time::Instant::now();
|
||||
// Translate environment variables from Strings to Values
|
||||
if let Some(e) = convert_env_values(engine_state, &unique_stack) {
|
||||
if let Err(e) = convert_env_values(engine_state, &unique_stack) {
|
||||
report_error_new(engine_state, &e);
|
||||
}
|
||||
perf(
|
||||
@ -145,8 +144,7 @@ pub fn evaluate_repl(
|
||||
engine_state.set_startup_time(entire_start_time.elapsed().as_nanos() as i64);
|
||||
|
||||
// Regenerate the $nu constant to contain the startup time and any other potential updates
|
||||
let nu_const = create_nu_constant(engine_state, Span::unknown())?;
|
||||
engine_state.set_variable_const_val(NU_VARIABLE_ID, nu_const);
|
||||
engine_state.generate_nu_constant();
|
||||
|
||||
if load_std_lib.is_none() && engine_state.get_config().show_banner {
|
||||
eval_source(
|
||||
@ -389,7 +387,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
||||
.with_completer(Box::new(NuCompleter::new(
|
||||
engine_reference.clone(),
|
||||
// STACK-REFERENCE 2
|
||||
Stack::with_parent(stack_arc.clone()),
|
||||
stack_arc.clone(),
|
||||
)))
|
||||
.with_quick_completions(config.quick_completions)
|
||||
.with_partial_completions(config.partial_completions)
|
||||
@ -544,7 +542,9 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
||||
let shell_integration_osc633 = config.shell_integration_osc633;
|
||||
let shell_integration_reset_application_mode = config.shell_integration_reset_application_mode;
|
||||
|
||||
let mut stack = Stack::unwrap_unique(stack_arc);
|
||||
// TODO: we may clone the stack, this can lead to major performance issues
|
||||
// so we should avoid it or making stack cheaper to clone.
|
||||
let mut stack = Arc::unwrap_or_clone(stack_arc);
|
||||
|
||||
perf(
|
||||
"line_editor setup",
|
||||
@ -620,7 +620,7 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
} else {
|
||||
} else if shell_integration_osc133 {
|
||||
start_time = Instant::now();
|
||||
|
||||
run_ansi_sequence(PRE_EXECUTION_MARKER);
|
||||
@ -660,9 +660,9 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
||||
run_finaliziation_ansi_sequence(
|
||||
&stack,
|
||||
engine_state,
|
||||
use_color,
|
||||
shell_integration_osc633,
|
||||
shell_integration_osc133,
|
||||
use_color,
|
||||
);
|
||||
}
|
||||
ReplOperation::RunCommand(cmd) => {
|
||||
@ -679,9 +679,9 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
||||
run_finaliziation_ansi_sequence(
|
||||
&stack,
|
||||
engine_state,
|
||||
use_color,
|
||||
shell_integration_osc633,
|
||||
shell_integration_osc133,
|
||||
use_color,
|
||||
);
|
||||
}
|
||||
// as the name implies, we do nothing in this case
|
||||
@ -731,9 +731,9 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
||||
run_finaliziation_ansi_sequence(
|
||||
&stack,
|
||||
engine_state,
|
||||
use_color,
|
||||
shell_integration_osc633,
|
||||
shell_integration_osc133,
|
||||
use_color,
|
||||
);
|
||||
}
|
||||
Ok(Signal::CtrlD) => {
|
||||
@ -742,9 +742,9 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
||||
run_finaliziation_ansi_sequence(
|
||||
&stack,
|
||||
engine_state,
|
||||
use_color,
|
||||
shell_integration_osc633,
|
||||
shell_integration_osc133,
|
||||
use_color,
|
||||
);
|
||||
|
||||
println!();
|
||||
@ -763,9 +763,9 @@ fn loop_iteration(ctx: LoopContext) -> (bool, Stack, Reedline) {
|
||||
run_finaliziation_ansi_sequence(
|
||||
&stack,
|
||||
engine_state,
|
||||
use_color,
|
||||
shell_integration_osc633,
|
||||
shell_integration_osc133,
|
||||
use_color,
|
||||
);
|
||||
}
|
||||
}
|
||||
@ -872,7 +872,7 @@ fn parse_operation(
|
||||
let tokens = lex(s.as_bytes(), 0, &[], &[], false);
|
||||
// Check if this is a single call to a directory, if so auto-cd
|
||||
#[allow(deprecated)]
|
||||
let cwd = nu_engine::env::current_dir_str(engine_state, stack)?;
|
||||
let cwd = nu_engine::env::current_dir_str(engine_state, stack).unwrap_or_default();
|
||||
let mut orig = s.clone();
|
||||
if orig.starts_with('`') {
|
||||
orig = trim_quotes_str(&orig).to_string()
|
||||
@ -929,7 +929,10 @@ fn do_auto_cd(
|
||||
|
||||
//FIXME: this only changes the current scope, but instead this environment variable
|
||||
//should probably be a block that loads the information from the state in the overlay
|
||||
stack.add_env_var("PWD".into(), Value::string(path.clone(), Span::unknown()));
|
||||
if let Err(err) = stack.set_cwd(&path) {
|
||||
report_error_new(engine_state, &err);
|
||||
return;
|
||||
};
|
||||
let cwd = Value::string(cwd, span);
|
||||
|
||||
let shells = stack.get_env_var(engine_state, "NUSHELL_SHELLS");
|
||||
@ -1298,27 +1301,46 @@ fn map_nucursorshape_to_cursorshape(shape: NuCursorShape) -> Option<SetCursorSty
|
||||
}
|
||||
}
|
||||
|
||||
fn get_command_finished_marker(stack: &Stack, engine_state: &EngineState, vscode: bool) -> String {
|
||||
fn get_command_finished_marker(
|
||||
stack: &Stack,
|
||||
engine_state: &EngineState,
|
||||
shell_integration_osc633: bool,
|
||||
shell_integration_osc133: bool,
|
||||
) -> String {
|
||||
let exit_code = stack
|
||||
.get_env_var(engine_state, "LAST_EXIT_CODE")
|
||||
.and_then(|e| e.as_i64().ok());
|
||||
|
||||
if vscode {
|
||||
// format!("\x1b]633;D;{}\x1b\\", exit_code.unwrap_or(0))
|
||||
format!(
|
||||
"{}{}{}",
|
||||
VSCODE_POST_EXECUTION_MARKER_PREFIX,
|
||||
exit_code.unwrap_or(0),
|
||||
VSCODE_POST_EXECUTION_MARKER_SUFFIX
|
||||
)
|
||||
} else {
|
||||
// format!("\x1b]133;D;{}\x1b\\", exit_code.unwrap_or(0))
|
||||
if shell_integration_osc633 {
|
||||
if stack.get_env_var(engine_state, "TERM_PROGRAM") == Some(Value::test_string("vscode")) {
|
||||
// We're in vscode and we have osc633 enabled
|
||||
format!(
|
||||
"{}{}{}",
|
||||
VSCODE_POST_EXECUTION_MARKER_PREFIX,
|
||||
exit_code.unwrap_or(0),
|
||||
VSCODE_POST_EXECUTION_MARKER_SUFFIX
|
||||
)
|
||||
} else if shell_integration_osc133 {
|
||||
// If we're in VSCode but we don't find the env var, just return the regular markers
|
||||
format!(
|
||||
"{}{}{}",
|
||||
POST_EXECUTION_MARKER_PREFIX,
|
||||
exit_code.unwrap_or(0),
|
||||
POST_EXECUTION_MARKER_SUFFIX
|
||||
)
|
||||
} else {
|
||||
// We're not in vscode, so we don't need to do anything special
|
||||
"\x1b[0m".to_string()
|
||||
}
|
||||
} else if shell_integration_osc133 {
|
||||
format!(
|
||||
"{}{}{}",
|
||||
POST_EXECUTION_MARKER_PREFIX,
|
||||
exit_code.unwrap_or(0),
|
||||
POST_EXECUTION_MARKER_SUFFIX
|
||||
)
|
||||
} else {
|
||||
"\x1b[0m".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
@ -1342,7 +1364,12 @@ fn run_finaliziation_ansi_sequence(
|
||||
if stack.get_env_var(engine_state, "TERM_PROGRAM") == Some(Value::test_string("vscode")) {
|
||||
let start_time = Instant::now();
|
||||
|
||||
run_ansi_sequence(&get_command_finished_marker(stack, engine_state, true));
|
||||
run_ansi_sequence(&get_command_finished_marker(
|
||||
stack,
|
||||
engine_state,
|
||||
shell_integration_osc633,
|
||||
shell_integration_osc133,
|
||||
));
|
||||
|
||||
perf(
|
||||
"post_execute_marker (633;D) ansi escape sequences",
|
||||
@ -1352,10 +1379,15 @@ fn run_finaliziation_ansi_sequence(
|
||||
column!(),
|
||||
use_color,
|
||||
);
|
||||
} else {
|
||||
} else if shell_integration_osc133 {
|
||||
let start_time = Instant::now();
|
||||
|
||||
run_ansi_sequence(&get_command_finished_marker(stack, engine_state, false));
|
||||
run_ansi_sequence(&get_command_finished_marker(
|
||||
stack,
|
||||
engine_state,
|
||||
shell_integration_osc633,
|
||||
shell_integration_osc133,
|
||||
));
|
||||
|
||||
perf(
|
||||
"post_execute_marker (133;D) ansi escape sequences",
|
||||
@ -1369,7 +1401,12 @@ fn run_finaliziation_ansi_sequence(
|
||||
} else if shell_integration_osc133 {
|
||||
let start_time = Instant::now();
|
||||
|
||||
run_ansi_sequence(&get_command_finished_marker(stack, engine_state, false));
|
||||
run_ansi_sequence(&get_command_finished_marker(
|
||||
stack,
|
||||
engine_state,
|
||||
shell_integration_osc633,
|
||||
shell_integration_osc133,
|
||||
));
|
||||
|
||||
perf(
|
||||
"post_execute_marker (133;D) ansi escape sequences",
|
||||
@ -1447,3 +1484,136 @@ fn are_session_ids_in_sync() {
|
||||
engine_state.history_session_id
|
||||
);
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test_auto_cd {
|
||||
use super::{do_auto_cd, parse_operation, ReplOperation};
|
||||
use nu_protocol::engine::{EngineState, Stack};
|
||||
use std::path::Path;
|
||||
use tempfile::tempdir;
|
||||
|
||||
/// Create a symlink. Works on both Unix and Windows.
|
||||
#[cfg(any(unix, windows))]
|
||||
fn symlink(original: impl AsRef<Path>, link: impl AsRef<Path>) -> std::io::Result<()> {
|
||||
#[cfg(unix)]
|
||||
{
|
||||
std::os::unix::fs::symlink(original, link)
|
||||
}
|
||||
#[cfg(windows)]
|
||||
{
|
||||
if original.as_ref().is_dir() {
|
||||
std::os::windows::fs::symlink_dir(original, link)
|
||||
} else {
|
||||
std::os::windows::fs::symlink_file(original, link)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Run one test case on the auto-cd feature. PWD is initially set to
|
||||
/// `before`, and after `input` is parsed and evaluated, PWD should be
|
||||
/// changed to `after`.
|
||||
#[track_caller]
|
||||
fn check(before: impl AsRef<Path>, input: &str, after: impl AsRef<Path>) {
|
||||
// Setup EngineState and Stack.
|
||||
let mut engine_state = EngineState::new();
|
||||
let mut stack = Stack::new();
|
||||
stack.set_cwd(before).unwrap();
|
||||
|
||||
// Parse the input. It must be an auto-cd operation.
|
||||
let op = parse_operation(input.to_string(), &engine_state, &stack).unwrap();
|
||||
let ReplOperation::AutoCd { cwd, target, span } = op else {
|
||||
panic!("'{}' was not parsed into an auto-cd operation", input)
|
||||
};
|
||||
|
||||
// Perform the auto-cd operation.
|
||||
do_auto_cd(target, cwd, &mut stack, &mut engine_state, span);
|
||||
let updated_cwd = engine_state.cwd(Some(&stack)).unwrap();
|
||||
|
||||
// Check that `updated_cwd` and `after` point to the same place. They
|
||||
// don't have to be byte-wise equal (on Windows, the 8.3 filename
|
||||
// conversion messes things up),
|
||||
let updated_cwd = std::fs::canonicalize(updated_cwd).unwrap();
|
||||
let after = std::fs::canonicalize(after).unwrap();
|
||||
assert_eq!(updated_cwd, after);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn auto_cd_root() {
|
||||
let tempdir = tempdir().unwrap();
|
||||
let root = if cfg!(windows) { r"C:\" } else { "/" };
|
||||
check(&tempdir, root, root);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn auto_cd_tilde() {
|
||||
let tempdir = tempdir().unwrap();
|
||||
let home = nu_path::home_dir().unwrap();
|
||||
check(&tempdir, "~", home);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn auto_cd_dot() {
|
||||
let tempdir = tempdir().unwrap();
|
||||
check(&tempdir, ".", &tempdir);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn auto_cd_double_dot() {
|
||||
let tempdir = tempdir().unwrap();
|
||||
let dir = tempdir.path().join("foo");
|
||||
std::fs::create_dir_all(&dir).unwrap();
|
||||
check(dir, "..", &tempdir);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn auto_cd_triple_dot() {
|
||||
let tempdir = tempdir().unwrap();
|
||||
let dir = tempdir.path().join("foo").join("bar");
|
||||
std::fs::create_dir_all(&dir).unwrap();
|
||||
check(dir, "...", &tempdir);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn auto_cd_relative() {
|
||||
let tempdir = tempdir().unwrap();
|
||||
let foo = tempdir.path().join("foo");
|
||||
let bar = tempdir.path().join("bar");
|
||||
std::fs::create_dir_all(&foo).unwrap();
|
||||
std::fs::create_dir_all(&bar).unwrap();
|
||||
|
||||
let input = if cfg!(windows) { r"..\bar" } else { "../bar" };
|
||||
check(foo, input, bar);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn auto_cd_trailing_slash() {
|
||||
let tempdir = tempdir().unwrap();
|
||||
let dir = tempdir.path().join("foo");
|
||||
std::fs::create_dir_all(&dir).unwrap();
|
||||
|
||||
let input = if cfg!(windows) { r"foo\" } else { "foo/" };
|
||||
check(&tempdir, input, dir);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn auto_cd_symlink() {
|
||||
let tempdir = tempdir().unwrap();
|
||||
let dir = tempdir.path().join("foo");
|
||||
std::fs::create_dir_all(&dir).unwrap();
|
||||
let link = tempdir.path().join("link");
|
||||
symlink(&dir, &link).unwrap();
|
||||
|
||||
let input = if cfg!(windows) { r".\link" } else { "./link" };
|
||||
check(&tempdir, input, link);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic(expected = "was not parsed into an auto-cd operation")]
|
||||
fn auto_cd_nonexistent_directory() {
|
||||
let tempdir = tempdir().unwrap();
|
||||
let dir = tempdir.path().join("foo");
|
||||
|
||||
let input = if cfg!(windows) { r"foo\" } else { "foo/" };
|
||||
check(&tempdir, input, dir);
|
||||
}
|
||||
}
|
||||
|
@ -4,7 +4,7 @@ use nu_parser::{escape_quote_string, lex, parse, unescape_unquote_string, Token,
|
||||
use nu_protocol::{
|
||||
debugger::WithoutDebug,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
print_if_stream, report_error, report_error_new, PipelineData, ShellError, Span, Value,
|
||||
report_error, report_error_new, PipelineData, ShellError, Span, Value,
|
||||
};
|
||||
#[cfg(windows)]
|
||||
use nu_utils::enable_vt_processing;
|
||||
@ -39,9 +39,8 @@ fn gather_env_vars(
|
||||
init_cwd: &Path,
|
||||
) {
|
||||
fn report_capture_error(engine_state: &EngineState, env_str: &str, msg: &str) {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(
|
||||
&working_set,
|
||||
report_error_new(
|
||||
engine_state,
|
||||
&ShellError::GenericError {
|
||||
error: format!("Environment variable was not captured: {env_str}"),
|
||||
msg: "".into(),
|
||||
@ -71,9 +70,8 @@ fn gather_env_vars(
|
||||
}
|
||||
None => {
|
||||
// Could not capture current working directory
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(
|
||||
&working_set,
|
||||
report_error_new(
|
||||
engine_state,
|
||||
&ShellError::GenericError {
|
||||
error: "Current directory is not a valid utf-8 path".into(),
|
||||
msg: "".into(),
|
||||
@ -208,9 +206,48 @@ pub fn eval_source(
|
||||
fname: &str,
|
||||
input: PipelineData,
|
||||
allow_return: bool,
|
||||
) -> bool {
|
||||
) -> i32 {
|
||||
let start_time = std::time::Instant::now();
|
||||
|
||||
let exit_code = match evaluate_source(engine_state, stack, source, fname, input, allow_return) {
|
||||
Ok(code) => code.unwrap_or(0),
|
||||
Err(err) => {
|
||||
report_error_new(engine_state, &err);
|
||||
1
|
||||
}
|
||||
};
|
||||
|
||||
stack.add_env_var(
|
||||
"LAST_EXIT_CODE".to_string(),
|
||||
Value::int(exit_code.into(), Span::unknown()),
|
||||
);
|
||||
|
||||
// reset vt processing, aka ansi because illbehaved externals can break it
|
||||
#[cfg(windows)]
|
||||
{
|
||||
let _ = enable_vt_processing();
|
||||
}
|
||||
|
||||
perf(
|
||||
&format!("eval_source {}", &fname),
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
engine_state.get_config().use_ansi_coloring,
|
||||
);
|
||||
|
||||
exit_code
|
||||
}
|
||||
|
||||
fn evaluate_source(
|
||||
engine_state: &mut EngineState,
|
||||
stack: &mut Stack,
|
||||
source: &[u8],
|
||||
fname: &str,
|
||||
input: PipelineData,
|
||||
allow_return: bool,
|
||||
) -> Result<Option<i32>, ShellError> {
|
||||
let (block, delta) = {
|
||||
let mut working_set = StateWorkingSet::new(engine_state);
|
||||
let output = parse(
|
||||
@ -224,104 +261,40 @@ pub fn eval_source(
|
||||
}
|
||||
|
||||
if let Some(err) = working_set.parse_errors.first() {
|
||||
set_last_exit_code(stack, 1);
|
||||
report_error(&working_set, err);
|
||||
return false;
|
||||
return Ok(Some(1));
|
||||
}
|
||||
|
||||
(output, working_set.render())
|
||||
};
|
||||
|
||||
if let Err(err) = engine_state.merge_delta(delta) {
|
||||
set_last_exit_code(stack, 1);
|
||||
report_error_new(engine_state, &err);
|
||||
return false;
|
||||
}
|
||||
engine_state.merge_delta(delta)?;
|
||||
|
||||
let b = if allow_return {
|
||||
let pipeline = if allow_return {
|
||||
eval_block_with_early_return::<WithoutDebug>(engine_state, stack, &block, input)
|
||||
} else {
|
||||
eval_block::<WithoutDebug>(engine_state, stack, &block, input)
|
||||
}?;
|
||||
|
||||
let status = if let PipelineData::ByteStream(stream, ..) = pipeline {
|
||||
stream.print(false)?
|
||||
} else {
|
||||
if let Some(hook) = engine_state.get_config().hooks.display_output.clone() {
|
||||
let pipeline = eval_hook(
|
||||
engine_state,
|
||||
stack,
|
||||
Some(pipeline),
|
||||
vec![],
|
||||
&hook,
|
||||
"display_output",
|
||||
)?;
|
||||
pipeline.print(engine_state, stack, false, false)
|
||||
} else {
|
||||
pipeline.print(engine_state, stack, true, false)
|
||||
}?
|
||||
};
|
||||
|
||||
match b {
|
||||
Ok(pipeline_data) => {
|
||||
let config = engine_state.get_config();
|
||||
let result;
|
||||
if let PipelineData::ExternalStream {
|
||||
stdout: stream,
|
||||
stderr: stderr_stream,
|
||||
exit_code,
|
||||
..
|
||||
} = pipeline_data
|
||||
{
|
||||
result = print_if_stream(stream, stderr_stream, false, exit_code);
|
||||
} else if let Some(hook) = config.hooks.display_output.clone() {
|
||||
match eval_hook(
|
||||
engine_state,
|
||||
stack,
|
||||
Some(pipeline_data),
|
||||
vec![],
|
||||
&hook,
|
||||
"display_output",
|
||||
) {
|
||||
Err(err) => {
|
||||
result = Err(err);
|
||||
}
|
||||
Ok(val) => {
|
||||
result = val.print(engine_state, stack, false, false);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
result = pipeline_data.print(engine_state, stack, true, false);
|
||||
}
|
||||
|
||||
match result {
|
||||
Err(err) => {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
|
||||
report_error(&working_set, &err);
|
||||
|
||||
return false;
|
||||
}
|
||||
Ok(exit_code) => {
|
||||
set_last_exit_code(stack, exit_code);
|
||||
}
|
||||
}
|
||||
|
||||
// reset vt processing, aka ansi because illbehaved externals can break it
|
||||
#[cfg(windows)]
|
||||
{
|
||||
let _ = enable_vt_processing();
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
set_last_exit_code(stack, 1);
|
||||
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
|
||||
report_error(&working_set, &err);
|
||||
|
||||
return false;
|
||||
}
|
||||
}
|
||||
perf(
|
||||
&format!("eval_source {}", &fname),
|
||||
start_time,
|
||||
file!(),
|
||||
line!(),
|
||||
column!(),
|
||||
engine_state.get_config().use_ansi_coloring,
|
||||
);
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
fn set_last_exit_code(stack: &mut Stack, exit_code: i64) {
|
||||
stack.add_env_var(
|
||||
"LAST_EXIT_CODE".to_string(),
|
||||
Value::int(exit_code, Span::unknown()),
|
||||
);
|
||||
Ok(status.map(|status| status.code()))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
1
crates/nu-cli/tests/commands/mod.rs
Normal file
1
crates/nu-cli/tests/commands/mod.rs
Normal file
@ -0,0 +1 @@
|
||||
mod nu_highlight;
|
7
crates/nu-cli/tests/commands/nu_highlight.rs
Normal file
7
crates/nu-cli/tests/commands/nu_highlight.rs
Normal file
@ -0,0 +1,7 @@
|
||||
use nu_test_support::nu;
|
||||
|
||||
#[test]
|
||||
fn nu_highlight_not_expr() {
|
||||
let actual = nu!("'not false' | nu-highlight | ansi strip");
|
||||
assert_eq!(actual.out, "not false");
|
||||
}
|
@ -6,7 +6,10 @@ use nu_parser::parse;
|
||||
use nu_protocol::{debugger::WithoutDebug, engine::StateWorkingSet, PipelineData};
|
||||
use reedline::{Completer, Suggestion};
|
||||
use rstest::{fixture, rstest};
|
||||
use std::path::{PathBuf, MAIN_SEPARATOR};
|
||||
use std::{
|
||||
path::{PathBuf, MAIN_SEPARATOR},
|
||||
sync::Arc,
|
||||
};
|
||||
use support::{
|
||||
completions_helpers::{new_partial_engine, new_quote_engine},
|
||||
file, folder, match_suggestions, new_engine,
|
||||
@ -22,7 +25,7 @@ fn completer() -> NuCompleter {
|
||||
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||
|
||||
// Instantiate a new completer
|
||||
NuCompleter::new(std::sync::Arc::new(engine), stack)
|
||||
NuCompleter::new(Arc::new(engine), Arc::new(stack))
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
@ -36,7 +39,7 @@ fn completer_strings() -> NuCompleter {
|
||||
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||
|
||||
// Instantiate a new completer
|
||||
NuCompleter::new(std::sync::Arc::new(engine), stack)
|
||||
NuCompleter::new(Arc::new(engine), Arc::new(stack))
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
@ -56,7 +59,7 @@ fn extern_completer() -> NuCompleter {
|
||||
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||
|
||||
// Instantiate a new completer
|
||||
NuCompleter::new(std::sync::Arc::new(engine), stack)
|
||||
NuCompleter::new(Arc::new(engine), Arc::new(stack))
|
||||
}
|
||||
|
||||
#[fixture]
|
||||
@ -79,14 +82,14 @@ fn custom_completer() -> NuCompleter {
|
||||
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||
|
||||
// Instantiate a new completer
|
||||
NuCompleter::new(std::sync::Arc::new(engine), stack)
|
||||
NuCompleter::new(Arc::new(engine), Arc::new(stack))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn variables_dollar_sign_with_varialblecompletion() {
|
||||
let (_, _, engine, stack) = new_engine();
|
||||
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
let target_dir = "$ ";
|
||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||
@ -138,7 +141,7 @@ fn dotnu_completions() {
|
||||
let (_, _, engine, stack) = new_engine();
|
||||
|
||||
// Instantiate a new completer
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
// Test source completion
|
||||
let completion_str = "source-env ".to_string();
|
||||
@ -217,7 +220,7 @@ fn file_completions() {
|
||||
let (dir, dir_str, engine, stack) = new_engine();
|
||||
|
||||
// Instantiate a new completer
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
// Test completions for the current folder
|
||||
let target_dir = format!("cp {dir_str}{MAIN_SEPARATOR}");
|
||||
@ -265,7 +268,7 @@ fn partial_completions() {
|
||||
let (dir, _, engine, stack) = new_partial_engine();
|
||||
|
||||
// Instantiate a new completer
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
// Test completions for a folder's name
|
||||
let target_dir = format!("cd {}", file(dir.join("pa")));
|
||||
@ -363,7 +366,7 @@ fn partial_completions() {
|
||||
fn command_ls_with_filecompletion() {
|
||||
let (_, _, engine, stack) = new_engine();
|
||||
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
let target_dir = "ls ";
|
||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||
@ -397,7 +400,7 @@ fn command_ls_with_filecompletion() {
|
||||
fn command_open_with_filecompletion() {
|
||||
let (_, _, engine, stack) = new_engine();
|
||||
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
let target_dir = "open ";
|
||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||
@ -432,7 +435,7 @@ fn command_open_with_filecompletion() {
|
||||
fn command_rm_with_globcompletion() {
|
||||
let (_, _, engine, stack) = new_engine();
|
||||
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
let target_dir = "rm ";
|
||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||
@ -467,7 +470,7 @@ fn command_rm_with_globcompletion() {
|
||||
fn command_cp_with_globcompletion() {
|
||||
let (_, _, engine, stack) = new_engine();
|
||||
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
let target_dir = "cp ";
|
||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||
@ -502,7 +505,7 @@ fn command_cp_with_globcompletion() {
|
||||
fn command_save_with_filecompletion() {
|
||||
let (_, _, engine, stack) = new_engine();
|
||||
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
let target_dir = "save ";
|
||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||
@ -537,7 +540,7 @@ fn command_save_with_filecompletion() {
|
||||
fn command_touch_with_filecompletion() {
|
||||
let (_, _, engine, stack) = new_engine();
|
||||
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
let target_dir = "touch ";
|
||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||
@ -572,7 +575,7 @@ fn command_touch_with_filecompletion() {
|
||||
fn command_watch_with_filecompletion() {
|
||||
let (_, _, engine, stack) = new_engine();
|
||||
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
let target_dir = "watch ";
|
||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||
@ -607,7 +610,7 @@ fn command_watch_with_filecompletion() {
|
||||
fn file_completion_quoted() {
|
||||
let (_, _, engine, stack) = new_quote_engine();
|
||||
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
let target_dir = "open ";
|
||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||
@ -645,7 +648,7 @@ fn flag_completions() {
|
||||
let (_, _, engine, stack) = new_engine();
|
||||
|
||||
// Instantiate a new completer
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
// Test completions for the 'ls' flags
|
||||
let suggestions = completer.complete("ls -", 4);
|
||||
|
||||
@ -680,7 +683,7 @@ fn folder_with_directorycompletions() {
|
||||
let (dir, dir_str, engine, stack) = new_engine();
|
||||
|
||||
// Instantiate a new completer
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
// Test completions for the current folder
|
||||
let target_dir = format!("cd {dir_str}{MAIN_SEPARATOR}");
|
||||
@ -709,7 +712,7 @@ fn variables_completions() {
|
||||
assert!(support::merge_input(record.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||
|
||||
// Instantiate a new completer
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
// Test completions for $nu
|
||||
let suggestions = completer.complete("$nu.", 4);
|
||||
@ -815,7 +818,7 @@ fn alias_of_command_and_flags() {
|
||||
let alias = r#"alias ll = ls -l"#;
|
||||
assert!(support::merge_input(alias.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
let suggestions = completer.complete("ll t", 4);
|
||||
#[cfg(windows)]
|
||||
@ -834,7 +837,7 @@ fn alias_of_basic_command() {
|
||||
let alias = r#"alias ll = ls "#;
|
||||
assert!(support::merge_input(alias.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
let suggestions = completer.complete("ll t", 4);
|
||||
#[cfg(windows)]
|
||||
@ -856,7 +859,7 @@ fn alias_of_another_alias() {
|
||||
let alias = r#"alias lf = ll -f"#;
|
||||
assert!(support::merge_input(alias.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
let suggestions = completer.complete("lf t", 4);
|
||||
#[cfg(windows)]
|
||||
@ -890,7 +893,7 @@ fn run_external_completion(completer: &str, input: &str) -> Vec<Suggestion> {
|
||||
assert!(engine_state.merge_env(&mut stack, &dir).is_ok());
|
||||
|
||||
// Instantiate a new completer
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine_state), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine_state), Arc::new(stack));
|
||||
|
||||
completer.complete(input, input.len())
|
||||
}
|
||||
@ -899,7 +902,7 @@ fn run_external_completion(completer: &str, input: &str) -> Vec<Suggestion> {
|
||||
fn unknown_command_completion() {
|
||||
let (_, _, engine, stack) = new_engine();
|
||||
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
let target_dir = "thiscommanddoesnotexist ";
|
||||
let suggestions = completer.complete(target_dir, target_dir.len());
|
||||
@ -962,7 +965,7 @@ fn flagcompletion_triggers_after_cursor_piped(mut completer: NuCompleter) {
|
||||
fn filecompletions_triggers_after_cursor() {
|
||||
let (_, _, engine, stack) = new_engine();
|
||||
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
let suggestions = completer.complete("cp test_c", 3);
|
||||
|
||||
@ -1071,7 +1074,7 @@ fn alias_offset_bug_7648() {
|
||||
let alias = r#"alias ea = ^$env.EDITOR /tmp/test.s"#;
|
||||
assert!(support::merge_input(alias.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
// Issue #7648
|
||||
// Nushell crashes when an alias name is shorter than the alias command
|
||||
@ -1090,7 +1093,7 @@ fn alias_offset_bug_7754() {
|
||||
let alias = r#"alias ll = ls -l"#;
|
||||
assert!(support::merge_input(alias.as_bytes(), &mut engine, &mut stack, dir).is_ok());
|
||||
|
||||
let mut completer = NuCompleter::new(std::sync::Arc::new(engine), stack);
|
||||
let mut completer = NuCompleter::new(Arc::new(engine), Arc::new(stack));
|
||||
|
||||
// Issue #7754
|
||||
// Nushell crashes when an alias name is shorter than the alias command
|
@ -3,8 +3,7 @@ use nu_parser::parse;
|
||||
use nu_protocol::{
|
||||
debugger::WithoutDebug,
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
eval_const::create_nu_constant,
|
||||
PipelineData, ShellError, Span, Value, NU_VARIABLE_ID,
|
||||
PipelineData, ShellError, Span, Value,
|
||||
};
|
||||
use nu_test_support::fs;
|
||||
use reedline::Suggestion;
|
||||
@ -28,9 +27,7 @@ pub fn new_engine() -> (PathBuf, String, EngineState, Stack) {
|
||||
let mut engine_state = create_default_context();
|
||||
|
||||
// Add $nu
|
||||
let nu_const =
|
||||
create_nu_constant(&engine_state, Span::test_data()).expect("Failed creating $nu");
|
||||
engine_state.set_variable_const_val(NU_VARIABLE_ID, nu_const);
|
||||
engine_state.generate_nu_constant();
|
||||
|
||||
// New stack
|
||||
let mut stack = Stack::new();
|
2
crates/nu-cli/tests/main.rs
Normal file
2
crates/nu-cli/tests/main.rs
Normal file
@ -0,0 +1,2 @@
|
||||
mod commands;
|
||||
mod completions;
|
@ -1,6 +1,6 @@
|
||||
use nu_protocol::{
|
||||
engine::{EngineState, Stack, StateWorkingSet},
|
||||
report_error, Range, ShellError, Span, Value,
|
||||
engine::{EngineState, Stack},
|
||||
Range, ShellError, Span, Value,
|
||||
};
|
||||
use std::{ops::Bound, path::PathBuf};
|
||||
|
||||
@ -13,11 +13,9 @@ pub fn get_init_cwd() -> PathBuf {
|
||||
}
|
||||
|
||||
pub fn get_guaranteed_cwd(engine_state: &EngineState, stack: &Stack) -> PathBuf {
|
||||
engine_state.cwd(Some(stack)).unwrap_or_else(|e| {
|
||||
let working_set = StateWorkingSet::new(engine_state);
|
||||
report_error(&working_set, &e);
|
||||
crate::util::get_init_cwd()
|
||||
})
|
||||
engine_state
|
||||
.cwd(Some(stack))
|
||||
.unwrap_or(crate::util::get_init_cwd())
|
||||
}
|
||||
|
||||
type MakeRangeError = fn(&str, Span) -> ShellError;
|
||||
|
@ -79,7 +79,7 @@ impl Command for CastDF {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuLazyFrame::can_downcast(&value) {
|
||||
let (dtype, column_nm) = df_args(engine_state, stack, call)?;
|
||||
let df = NuLazyFrame::try_from_value(value)?;
|
||||
|
@ -72,8 +72,7 @@ impl Command for FilterWith {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuLazyFrame::can_downcast(&value) {
|
||||
let df = NuLazyFrame::try_from_value(value)?;
|
||||
command_lazy(engine_state, stack, call, df)
|
||||
|
@ -86,7 +86,7 @@ impl Command for FirstDF {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuDataFrame::can_downcast(&value) {
|
||||
let df = NuDataFrame::try_from_value(value)?;
|
||||
command(engine_state, stack, call, df)
|
||||
|
@ -61,7 +61,7 @@ impl Command for LastDF {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuDataFrame::can_downcast(&value) {
|
||||
let df = NuDataFrame::try_from_value(value)?;
|
||||
command(engine_state, stack, call, df)
|
||||
|
@ -109,8 +109,7 @@ impl Command for RenameDF {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuLazyFrame::can_downcast(&value) {
|
||||
let df = NuLazyFrame::try_from_value(value)?;
|
||||
command_lazy(engine_state, stack, call, df)
|
||||
|
@ -76,7 +76,7 @@ impl Command for ToNu {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuDataFrame::can_downcast(&value) {
|
||||
dataframe_command(engine_state, stack, call, value)
|
||||
} else {
|
||||
|
@ -102,8 +102,7 @@ impl Command for WithColumn {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuLazyFrame::can_downcast(&value) {
|
||||
let df = NuLazyFrame::try_from_value(value)?;
|
||||
command_lazy(engine_state, stack, call, df)
|
||||
|
@ -172,7 +172,7 @@ macro_rules! lazy_expr_command {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuDataFrame::can_downcast(&value) {
|
||||
let lazy = NuLazyFrame::try_from_value(value)?;
|
||||
let lazy = NuLazyFrame::new(
|
||||
@ -271,7 +271,7 @@ macro_rules! lazy_expr_command {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuDataFrame::can_downcast(&value) {
|
||||
let lazy = NuLazyFrame::try_from_value(value)?;
|
||||
let lazy = NuLazyFrame::new(
|
||||
|
@ -91,7 +91,7 @@ impl Command for ExprOtherwise {
|
||||
let otherwise_predicate: Value = call.req(engine_state, stack, 0)?;
|
||||
let otherwise_predicate = NuExpression::try_from_value(otherwise_predicate)?;
|
||||
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
let complete: NuExpression = match NuWhen::try_from_value(value)? {
|
||||
NuWhen::Then(then) => then.otherwise(otherwise_predicate.into_polars()).into(),
|
||||
NuWhen::ChainedThen(chained_when) => chained_when
|
||||
|
@ -67,7 +67,7 @@ impl Command for ExprQuantile {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
let quantile: f64 = call.req(engine_state, stack, 0)?;
|
||||
|
||||
let expr = NuExpression::try_from_value(value)?;
|
||||
|
@ -103,7 +103,7 @@ impl Command for ExprWhen {
|
||||
let then_predicate: Value = call.req(engine_state, stack, 1)?;
|
||||
let then_predicate = NuExpression::try_from_value(then_predicate)?;
|
||||
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
let when_then: NuWhen = match value {
|
||||
Value::Nothing { .. } => when(when_predicate.into_polars())
|
||||
.then(then_predicate.into_polars())
|
||||
|
@ -100,7 +100,7 @@ impl Command for LazyExplode {
|
||||
}
|
||||
|
||||
pub(crate) fn explode(call: &Call, input: PipelineData) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuDataFrame::can_downcast(&value) {
|
||||
let df = NuLazyFrame::try_from_value(value)?;
|
||||
let columns: Vec<String> = call
|
||||
|
@ -82,7 +82,7 @@ impl Command for LazyFillNA {
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let fill: Value = call.req(engine_state, stack, 0)?;
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
|
||||
if NuExpression::can_downcast(&value) {
|
||||
let expr = NuExpression::try_from_value(value)?;
|
||||
|
@ -59,7 +59,7 @@ impl Command for LazyFillNull {
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let fill: Value = call.req(engine_state, stack, 0)?;
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
|
||||
if NuExpression::can_downcast(&value) {
|
||||
let expr = NuExpression::try_from_value(value)?;
|
||||
|
@ -219,7 +219,7 @@ impl Command for LazyJoin {
|
||||
let suffix: Option<String> = call.get_flag(engine_state, stack, "suffix")?;
|
||||
let suffix = suffix.unwrap_or_else(|| "_x".into());
|
||||
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
let lazy = NuLazyFrame::try_from_value(value)?;
|
||||
let from_eager = lazy.from_eager;
|
||||
let lazy = lazy.into_polars();
|
||||
|
@ -54,7 +54,7 @@ impl Command for LazyQuantile {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
let quantile: f64 = call.req(engine_state, stack, 0)?;
|
||||
|
||||
let lazy = NuLazyFrame::try_from_value(value)?;
|
||||
|
@ -68,7 +68,7 @@ impl Command for IsNotNull {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuDataFrame::can_downcast(&value) {
|
||||
let df = NuDataFrame::try_from_value(value)?;
|
||||
command(engine_state, stack, call, df)
|
||||
|
@ -68,7 +68,7 @@ impl Command for IsNull {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuDataFrame::can_downcast(&value) {
|
||||
let df = NuDataFrame::try_from_value(value)?;
|
||||
command(engine_state, stack, call, df)
|
||||
|
@ -60,7 +60,7 @@ impl Command for NUnique {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuDataFrame::can_downcast(&value) {
|
||||
let df = NuDataFrame::try_from_value(value)?;
|
||||
command(engine_state, stack, call, df)
|
||||
|
@ -56,8 +56,7 @@ impl Command for Shift {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuLazyFrame::can_downcast(&value) {
|
||||
let df = NuLazyFrame::try_from_value(value)?;
|
||||
command_lazy(engine_state, stack, call, df)
|
||||
|
@ -72,8 +72,7 @@ impl Command for Unique {
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let value = input.into_value(call.head);
|
||||
|
||||
let value = input.into_value(call.head)?;
|
||||
if NuLazyFrame::can_downcast(&value) {
|
||||
let df = NuLazyFrame::try_from_value(value)?;
|
||||
command_lazy(engine_state, stack, call, df)
|
||||
|
@ -80,7 +80,8 @@ pub fn test_dataframe_example(engine_state: &mut Box<EngineState>, example: &Exa
|
||||
let result =
|
||||
eval_block::<WithoutDebug>(engine_state, &mut stack, &block, PipelineData::empty())
|
||||
.unwrap_or_else(|err| panic!("test eval error in `{}`: {:?}", example.example, err))
|
||||
.into_value(Span::test_data());
|
||||
.into_value(Span::test_data())
|
||||
.expect("ok value");
|
||||
|
||||
println!("input: {}", example.example);
|
||||
println!("result: {result:?}");
|
||||
|
@ -1,7 +1,7 @@
|
||||
use super::{operations::Axis, NuDataFrame};
|
||||
use nu_protocol::{
|
||||
ast::{Boolean, Comparison, Math, Operator},
|
||||
span, ShellError, Span, Spanned, Value,
|
||||
ShellError, Span, Spanned, Value,
|
||||
};
|
||||
use num::Zero;
|
||||
use polars::prelude::{
|
||||
@ -17,7 +17,7 @@ pub(super) fn between_dataframes(
|
||||
right: &Value,
|
||||
rhs: &NuDataFrame,
|
||||
) -> Result<Value, ShellError> {
|
||||
let operation_span = span(&[left.span(), right.span()]);
|
||||
let operation_span = Span::merge(left.span(), right.span());
|
||||
match operator.item {
|
||||
Operator::Math(Math::Plus) => match lhs.append_df(rhs, Axis::Row, operation_span) {
|
||||
Ok(df) => Ok(df.into_value(operation_span)),
|
||||
@ -40,7 +40,7 @@ pub(super) fn compute_between_series(
|
||||
right: &Value,
|
||||
rhs: &Series,
|
||||
) -> Result<Value, ShellError> {
|
||||
let operation_span = span(&[left.span(), right.span()]);
|
||||
let operation_span = Span::merge(left.span(), right.span());
|
||||
match operator.item {
|
||||
Operator::Math(Math::Plus) => {
|
||||
let mut res = lhs + rhs;
|
||||
|
@ -295,7 +295,7 @@ impl NuDataFrame {
|
||||
}
|
||||
|
||||
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
|
||||
let value = input.into_value(span);
|
||||
let value = input.into_value(span)?;
|
||||
Self::try_from_value(value)
|
||||
}
|
||||
|
||||
|
@ -84,7 +84,7 @@ impl NuExpression {
|
||||
}
|
||||
|
||||
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
|
||||
let value = input.into_value(span);
|
||||
let value = input.into_value(span)?;
|
||||
Self::try_from_value(value)
|
||||
}
|
||||
|
||||
|
@ -134,7 +134,7 @@ impl NuLazyFrame {
|
||||
}
|
||||
|
||||
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
|
||||
let value = input.into_value(span);
|
||||
let value = input.into_value(span)?;
|
||||
Self::try_from_value(value)
|
||||
}
|
||||
|
||||
|
@ -107,7 +107,7 @@ impl NuLazyGroupBy {
|
||||
}
|
||||
|
||||
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
|
||||
let value = input.into_value(span);
|
||||
let value = input.into_value(span)?;
|
||||
Self::try_from_value(value)
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
use nu_protocol::{span as span_join, ShellError, Span, Spanned, Value};
|
||||
use nu_protocol::{ShellError, Span, Spanned, Value};
|
||||
|
||||
// Default value used when selecting rows from dataframe
|
||||
pub const DEFAULT_ROWS: usize = 5;
|
||||
@ -27,7 +27,7 @@ pub(crate) fn convert_columns(
|
||||
let span = value.span();
|
||||
match value {
|
||||
Value::String { val, .. } => {
|
||||
col_span = span_join(&[col_span, span]);
|
||||
col_span = col_span.merge(span);
|
||||
Ok(Spanned { item: val, span })
|
||||
}
|
||||
_ => Err(ShellError::GenericError {
|
||||
@ -68,7 +68,7 @@ pub(crate) fn convert_columns_string(
|
||||
let span = value.span();
|
||||
match value {
|
||||
Value::String { val, .. } => {
|
||||
col_span = span_join(&[col_span, span]);
|
||||
col_span = col_span.merge(span);
|
||||
Ok(val)
|
||||
}
|
||||
_ => Err(ShellError::GenericError {
|
||||
|
@ -118,22 +118,12 @@ fn into_bits(
|
||||
let cell_paths = call.rest(engine_state, stack, 0)?;
|
||||
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
|
||||
|
||||
match input {
|
||||
PipelineData::ExternalStream { stdout: None, .. } => {
|
||||
Ok(Value::binary(vec![], head).into_pipeline_data())
|
||||
}
|
||||
PipelineData::ExternalStream {
|
||||
stdout: Some(stream),
|
||||
..
|
||||
} => {
|
||||
// TODO: in the future, we may want this to stream out, converting each to bytes
|
||||
let output = stream.into_bytes()?;
|
||||
Ok(Value::binary(output.item, head).into_pipeline_data())
|
||||
}
|
||||
_ => {
|
||||
let args = Arguments { cell_paths };
|
||||
operate(action, args, input, call.head, engine_state.ctrlc.clone())
|
||||
}
|
||||
if let PipelineData::ByteStream(stream, ..) = input {
|
||||
// TODO: in the future, we may want this to stream out, converting each to bytes
|
||||
Ok(Value::binary(stream.into_bytes()?, head).into_pipeline_data())
|
||||
} else {
|
||||
let args = Arguments { cell_paths };
|
||||
operate(action, args, input, call.head, engine_state.ctrlc.clone())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -10,7 +10,7 @@ impl Command for EachWhile {
|
||||
}
|
||||
|
||||
fn usage(&self) -> &str {
|
||||
"Run a block on each row of the input list until a null is found, then create a new list with the results."
|
||||
"Run a closure on each row of the input list until a null is found, then create a new list with the results."
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
@ -78,38 +78,40 @@ impl Command for EachWhile {
|
||||
| PipelineData::ListStream(..) => {
|
||||
let mut closure = ClosureEval::new(engine_state, stack, closure);
|
||||
Ok(input
|
||||
.into_iter()
|
||||
.map_while(move |value| match closure.run_with_value(value) {
|
||||
Ok(data) => {
|
||||
let value = data.into_value(head);
|
||||
(!value.is_nothing()).then_some(value)
|
||||
}
|
||||
Err(_) => None,
|
||||
})
|
||||
.fuse()
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
}
|
||||
PipelineData::ExternalStream { stdout: None, .. } => Ok(PipelineData::empty()),
|
||||
PipelineData::ExternalStream {
|
||||
stdout: Some(stream),
|
||||
..
|
||||
} => {
|
||||
let mut closure = ClosureEval::new(engine_state, stack, closure);
|
||||
Ok(stream
|
||||
.into_iter()
|
||||
.map_while(move |value| {
|
||||
let value = value.ok()?;
|
||||
match closure.run_with_value(value) {
|
||||
Ok(data) => {
|
||||
let value = data.into_value(head);
|
||||
(!value.is_nothing()).then_some(value)
|
||||
}
|
||||
match closure
|
||||
.run_with_value(value)
|
||||
.and_then(|data| data.into_value(head))
|
||||
{
|
||||
Ok(value) => (!value.is_nothing()).then_some(value),
|
||||
Err(_) => None,
|
||||
}
|
||||
})
|
||||
.fuse()
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
}
|
||||
PipelineData::ByteStream(stream, ..) => {
|
||||
let span = stream.span();
|
||||
if let Some(chunks) = stream.chunks() {
|
||||
let mut closure = ClosureEval::new(engine_state, stack, closure);
|
||||
Ok(chunks
|
||||
.map_while(move |value| {
|
||||
let value = value.ok()?;
|
||||
match closure
|
||||
.run_with_value(value)
|
||||
.and_then(|data| data.into_value(span))
|
||||
{
|
||||
Ok(value) => (!value.is_nothing()).then_some(value),
|
||||
Err(_) => None,
|
||||
}
|
||||
})
|
||||
.fuse()
|
||||
.into_pipeline_data(head, engine_state.ctrlc.clone()))
|
||||
} else {
|
||||
Ok(PipelineData::Empty)
|
||||
}
|
||||
}
|
||||
// This match allows non-iterables to be accepted,
|
||||
// which is currently considered undesirable (Nov 2022).
|
||||
PipelineData::Value(value, ..) => {
|
||||
|
@ -56,7 +56,7 @@ impl Command for RollDown {
|
||||
let by: Option<usize> = call.get_flag(engine_state, stack, "by")?;
|
||||
let metadata = input.metadata();
|
||||
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
let rotated_value = vertical_rotate_value(value, by, VerticalDirection::Down)?;
|
||||
|
||||
Ok(rotated_value.into_pipeline_data().set_metadata(metadata))
|
||||
|
@ -94,7 +94,7 @@ impl Command for RollLeft {
|
||||
let metadata = input.metadata();
|
||||
|
||||
let cells_only = call.has_flag(engine_state, stack, "cells-only")?;
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
let rotated_value =
|
||||
horizontal_rotate_value(value, by, cells_only, &HorizontalDirection::Left)?;
|
||||
|
||||
|
@ -94,7 +94,7 @@ impl Command for RollRight {
|
||||
let metadata = input.metadata();
|
||||
|
||||
let cells_only = call.has_flag(engine_state, stack, "cells-only")?;
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
let rotated_value =
|
||||
horizontal_rotate_value(value, by, cells_only, &HorizontalDirection::Right)?;
|
||||
|
||||
|
@ -56,7 +56,7 @@ impl Command for RollUp {
|
||||
let by: Option<usize> = call.get_flag(engine_state, stack, "by")?;
|
||||
let metadata = input.metadata();
|
||||
|
||||
let value = input.into_value(call.head);
|
||||
let value = input.into_value(call.head)?;
|
||||
let rotated_value = vertical_rotate_value(value, by, VerticalDirection::Up)?;
|
||||
|
||||
Ok(rotated_value.into_pipeline_data().set_metadata(metadata))
|
||||
|
@ -151,7 +151,7 @@ impl Iterator for UpdateCellIterator {
|
||||
fn eval_value(closure: &mut ClosureEval, span: Span, value: Value) -> Value {
|
||||
closure
|
||||
.run_with_value(value)
|
||||
.map(|data| data.into_value(span))
|
||||
.and_then(|data| data.into_value(span))
|
||||
.unwrap_or_else(|err| Value::error(err, span))
|
||||
}
|
||||
|
||||
|
@ -39,7 +39,7 @@ impl Command for FormatPattern {
|
||||
let mut working_set = StateWorkingSet::new(engine_state);
|
||||
|
||||
let specified_pattern: Result<Value, ShellError> = call.req(engine_state, stack, 0);
|
||||
let input_val = input.into_value(call.head);
|
||||
let input_val = input.into_value(call.head)?;
|
||||
// add '$it' variable to support format like this: $it.column1.column2.
|
||||
let it_id = working_set.add_variable(b"$it".to_vec(), call.head, Type::Any, false);
|
||||
stack.add_var(it_id, input_val.clone());
|
||||
|
@ -19,102 +19,102 @@ fn basic_string_fails() {
|
||||
assert_eq!(actual.out, "");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn short_stream_binary() {
|
||||
let actual = nu!(r#"
|
||||
nu --testbin repeater (0x[01]) 5 | bytes starts-with 0x[010101]
|
||||
"#);
|
||||
// #[test]
|
||||
// fn short_stream_binary() {
|
||||
// let actual = nu!(r#"
|
||||
// nu --testbin repeater (0x[01]) 5 | bytes starts-with 0x[010101]
|
||||
// "#);
|
||||
|
||||
assert_eq!(actual.out, "true");
|
||||
}
|
||||
// assert_eq!(actual.out, "true");
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn short_stream_mismatch() {
|
||||
let actual = nu!(r#"
|
||||
nu --testbin repeater (0x[010203]) 5 | bytes starts-with 0x[010204]
|
||||
"#);
|
||||
// #[test]
|
||||
// fn short_stream_mismatch() {
|
||||
// let actual = nu!(r#"
|
||||
// nu --testbin repeater (0x[010203]) 5 | bytes starts-with 0x[010204]
|
||||
// "#);
|
||||
|
||||
assert_eq!(actual.out, "false");
|
||||
}
|
||||
// assert_eq!(actual.out, "false");
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn short_stream_binary_overflow() {
|
||||
let actual = nu!(r#"
|
||||
nu --testbin repeater (0x[01]) 5 | bytes starts-with 0x[010101010101]
|
||||
"#);
|
||||
// #[test]
|
||||
// fn short_stream_binary_overflow() {
|
||||
// let actual = nu!(r#"
|
||||
// nu --testbin repeater (0x[01]) 5 | bytes starts-with 0x[010101010101]
|
||||
// "#);
|
||||
|
||||
assert_eq!(actual.out, "false");
|
||||
}
|
||||
// assert_eq!(actual.out, "false");
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn long_stream_binary() {
|
||||
let actual = nu!(r#"
|
||||
nu --testbin repeater (0x[01]) 32768 | bytes starts-with 0x[010101]
|
||||
"#);
|
||||
// #[test]
|
||||
// fn long_stream_binary() {
|
||||
// let actual = nu!(r#"
|
||||
// nu --testbin repeater (0x[01]) 32768 | bytes starts-with 0x[010101]
|
||||
// "#);
|
||||
|
||||
assert_eq!(actual.out, "true");
|
||||
}
|
||||
// assert_eq!(actual.out, "true");
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn long_stream_binary_overflow() {
|
||||
// .. ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||
let actual = nu!(r#"
|
||||
nu --testbin repeater (0x[01]) 32768 | bytes starts-with (0..32768 | each {|| 0x[01] } | bytes collect)
|
||||
"#);
|
||||
// #[test]
|
||||
// fn long_stream_binary_overflow() {
|
||||
// // .. ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||
// let actual = nu!(r#"
|
||||
// nu --testbin repeater (0x[01]) 32768 | bytes starts-with (0..32768 | each {|| 0x[01] } | bytes collect)
|
||||
// "#);
|
||||
|
||||
assert_eq!(actual.out, "false");
|
||||
}
|
||||
// assert_eq!(actual.out, "false");
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn long_stream_binary_exact() {
|
||||
// ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||
let actual = nu!(r#"
|
||||
nu --testbin repeater (0x[01020304]) 8192 | bytes starts-with (0..<8192 | each {|| 0x[01020304] } | bytes collect)
|
||||
"#);
|
||||
// #[test]
|
||||
// fn long_stream_binary_exact() {
|
||||
// // ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||
// let actual = nu!(r#"
|
||||
// nu --testbin repeater (0x[01020304]) 8192 | bytes starts-with (0..<8192 | each {|| 0x[01020304] } | bytes collect)
|
||||
// "#);
|
||||
|
||||
assert_eq!(actual.out, "true");
|
||||
}
|
||||
// assert_eq!(actual.out, "true");
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn long_stream_string_exact() {
|
||||
// ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||
let actual = nu!(r#"
|
||||
nu --testbin repeater hell 8192 | bytes starts-with (0..<8192 | each {|| "hell" | into binary } | bytes collect)
|
||||
"#);
|
||||
// #[test]
|
||||
// fn long_stream_string_exact() {
|
||||
// // ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||
// let actual = nu!(r#"
|
||||
// nu --testbin repeater hell 8192 | bytes starts-with (0..<8192 | each {|| "hell" | into binary } | bytes collect)
|
||||
// "#);
|
||||
|
||||
assert_eq!(actual.out, "true");
|
||||
}
|
||||
// assert_eq!(actual.out, "true");
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn long_stream_mixed_exact() {
|
||||
// ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||
let actual = nu!(r#"
|
||||
let binseg = (0..<2048 | each {|| 0x[003d9fbf] } | bytes collect)
|
||||
let strseg = (0..<2048 | each {|| "hell" | into binary } | bytes collect)
|
||||
// #[test]
|
||||
// fn long_stream_mixed_exact() {
|
||||
// // ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||
// let actual = nu!(r#"
|
||||
// let binseg = (0..<2048 | each {|| 0x[003d9fbf] } | bytes collect)
|
||||
// let strseg = (0..<2048 | each {|| "hell" | into binary } | bytes collect)
|
||||
|
||||
nu --testbin repeat_bytes 003d9fbf 2048 68656c6c 2048 | bytes starts-with (bytes build $binseg $strseg)
|
||||
"#);
|
||||
// nu --testbin repeat_bytes 003d9fbf 2048 68656c6c 2048 | bytes starts-with (bytes build $binseg $strseg)
|
||||
// "#);
|
||||
|
||||
assert_eq!(
|
||||
actual.err, "",
|
||||
"invocation failed. command line limit likely reached"
|
||||
);
|
||||
assert_eq!(actual.out, "true");
|
||||
}
|
||||
// assert_eq!(
|
||||
// actual.err, "",
|
||||
// "invocation failed. command line limit likely reached"
|
||||
// );
|
||||
// assert_eq!(actual.out, "true");
|
||||
// }
|
||||
|
||||
#[test]
|
||||
fn long_stream_mixed_overflow() {
|
||||
// ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||
let actual = nu!(r#"
|
||||
let binseg = (0..<2048 | each {|| 0x[003d9fbf] } | bytes collect)
|
||||
let strseg = (0..<2048 | each {|| "hell" | into binary } | bytes collect)
|
||||
// #[test]
|
||||
// fn long_stream_mixed_overflow() {
|
||||
// // ranges are inclusive..inclusive, so we don't need to +1 to check for an overflow
|
||||
// let actual = nu!(r#"
|
||||
// let binseg = (0..<2048 | each {|| 0x[003d9fbf] } | bytes collect)
|
||||
// let strseg = (0..<2048 | each {|| "hell" | into binary } | bytes collect)
|
||||
|
||||
nu --testbin repeat_bytes 003d9fbf 2048 68656c6c 2048 | bytes starts-with (bytes build $binseg $strseg 0x[01])
|
||||
"#);
|
||||
// nu --testbin repeat_bytes 003d9fbf 2048 68656c6c 2048 | bytes starts-with (bytes build $binseg $strseg 0x[01])
|
||||
// "#);
|
||||
|
||||
assert_eq!(
|
||||
actual.err, "",
|
||||
"invocation failed. command line limit likely reached"
|
||||
);
|
||||
assert_eq!(actual.out, "false");
|
||||
}
|
||||
// assert_eq!(
|
||||
// actual.err, "",
|
||||
// "invocation failed. command line limit likely reached"
|
||||
// );
|
||||
// assert_eq!(actual.out, "false");
|
||||
// }
|
||||
|
@ -43,7 +43,7 @@ impl Command for Collect {
|
||||
stack.captures_to_stack_preserve_out_dest(closure.captures.clone());
|
||||
|
||||
let metadata = input.metadata();
|
||||
let input = input.into_value(call.head);
|
||||
let input = input.into_value(call.head)?;
|
||||
|
||||
let mut saved_positional = None;
|
||||
if let Some(var) = block.signature.get_positional(0) {
|
||||
|
@ -1,5 +1,5 @@
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_protocol::{engine::StateWorkingSet, PipelineMetadata};
|
||||
use nu_protocol::{engine::StateWorkingSet, ByteStreamSource, PipelineMetadata};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Describe;
|
||||
@ -162,73 +162,38 @@ fn run(
|
||||
let metadata = input.metadata();
|
||||
|
||||
let description = match input {
|
||||
PipelineData::ExternalStream {
|
||||
ref stdout,
|
||||
ref stderr,
|
||||
ref exit_code,
|
||||
..
|
||||
} => {
|
||||
if options.detailed {
|
||||
let stdout = if stdout.is_some() {
|
||||
Value::record(
|
||||
record! {
|
||||
"type" => Value::string("stream", head),
|
||||
"origin" => Value::string("external", head),
|
||||
"subtype" => Value::string("any", head),
|
||||
},
|
||||
head,
|
||||
)
|
||||
} else {
|
||||
Value::nothing(head)
|
||||
};
|
||||
|
||||
let stderr = if stderr.is_some() {
|
||||
Value::record(
|
||||
record! {
|
||||
"type" => Value::string("stream", head),
|
||||
"origin" => Value::string("external", head),
|
||||
"subtype" => Value::string("any", head),
|
||||
},
|
||||
head,
|
||||
)
|
||||
} else {
|
||||
Value::nothing(head)
|
||||
};
|
||||
|
||||
let exit_code = if exit_code.is_some() {
|
||||
Value::record(
|
||||
record! {
|
||||
"type" => Value::string("stream", head),
|
||||
"origin" => Value::string("external", head),
|
||||
"subtype" => Value::string("int", head),
|
||||
},
|
||||
head,
|
||||
)
|
||||
} else {
|
||||
Value::nothing(head)
|
||||
PipelineData::ByteStream(stream, ..) => {
|
||||
let description = if options.detailed {
|
||||
let origin = match stream.source() {
|
||||
ByteStreamSource::Read(_) => "unknown",
|
||||
ByteStreamSource::File(_) => "file",
|
||||
ByteStreamSource::Child(_) => "external",
|
||||
};
|
||||
|
||||
Value::record(
|
||||
record! {
|
||||
"type" => Value::string("stream", head),
|
||||
"origin" => Value::string("external", head),
|
||||
"stdout" => stdout,
|
||||
"stderr" => stderr,
|
||||
"exit_code" => exit_code,
|
||||
"type" => Value::string("byte stream", head),
|
||||
"origin" => Value::string(origin, head),
|
||||
"metadata" => metadata_to_value(metadata, head),
|
||||
},
|
||||
head,
|
||||
)
|
||||
} else {
|
||||
Value::string("raw input", head)
|
||||
Value::string("byte stream", head)
|
||||
};
|
||||
|
||||
if !options.no_collect {
|
||||
stream.drain()?;
|
||||
}
|
||||
|
||||
description
|
||||
}
|
||||
PipelineData::ListStream(_, _) => {
|
||||
PipelineData::ListStream(stream, ..) => {
|
||||
if options.detailed {
|
||||
let subtype = if options.no_collect {
|
||||
Value::string("any", head)
|
||||
} else {
|
||||
describe_value(input.into_value(head), head, engine_state)
|
||||
describe_value(stream.into_value(), head, engine_state)
|
||||
};
|
||||
Value::record(
|
||||
record! {
|
||||
@ -242,19 +207,19 @@ fn run(
|
||||
} else if options.no_collect {
|
||||
Value::string("stream", head)
|
||||
} else {
|
||||
let value = input.into_value(head);
|
||||
let value = stream.into_value();
|
||||
let base_description = value.get_type().to_string();
|
||||
Value::string(format!("{} (stream)", base_description), head)
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
let value = input.into_value(head);
|
||||
PipelineData::Value(value, ..) => {
|
||||
if !options.detailed {
|
||||
Value::string(value.get_type().to_string(), head)
|
||||
} else {
|
||||
describe_value(value, head, engine_state)
|
||||
}
|
||||
}
|
||||
PipelineData::Empty => Value::string(Type::Nothing.to_string(), head),
|
||||
};
|
||||
|
||||
Ok(description.into_pipeline_data())
|
||||
|
@ -1,6 +1,13 @@
|
||||
use nu_engine::{command_prelude::*, get_eval_block_with_early_return, redirect_env};
|
||||
use nu_protocol::{engine::Closure, ListStream, OutDest, RawStream};
|
||||
use std::thread;
|
||||
use nu_protocol::{
|
||||
engine::Closure,
|
||||
process::{ChildPipe, ChildProcess, ExitStatus},
|
||||
ByteStream, ByteStreamSource, OutDest,
|
||||
};
|
||||
use std::{
|
||||
io::{Cursor, Read},
|
||||
thread,
|
||||
};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Do;
|
||||
@ -86,115 +93,91 @@ impl Command for Do {
|
||||
}
|
||||
|
||||
match result {
|
||||
Ok(PipelineData::ExternalStream {
|
||||
stdout,
|
||||
stderr,
|
||||
exit_code,
|
||||
span,
|
||||
metadata,
|
||||
trim_end_newline,
|
||||
}) if capture_errors => {
|
||||
// Use a thread to receive stdout message.
|
||||
// Or we may get a deadlock if child process sends out too much bytes to stderr.
|
||||
//
|
||||
// For example: in normal linux system, stderr pipe's limit is 65535 bytes.
|
||||
// if child process sends out 65536 bytes, the process will be hanged because no consumer
|
||||
// consumes the first 65535 bytes
|
||||
// So we need a thread to receive stdout message, then the current thread can continue to consume
|
||||
// stderr messages.
|
||||
let stdout_handler = stdout
|
||||
.map(|stdout_stream| {
|
||||
thread::Builder::new()
|
||||
.name("stderr redirector".to_string())
|
||||
.spawn(move || {
|
||||
let ctrlc = stdout_stream.ctrlc.clone();
|
||||
let span = stdout_stream.span;
|
||||
RawStream::new(
|
||||
Box::new(std::iter::once(
|
||||
stdout_stream.into_bytes().map(|s| s.item),
|
||||
)),
|
||||
ctrlc,
|
||||
span,
|
||||
None,
|
||||
)
|
||||
Ok(PipelineData::ByteStream(stream, metadata)) if capture_errors => {
|
||||
let span = stream.span();
|
||||
match stream.into_child() {
|
||||
Ok(mut child) => {
|
||||
// Use a thread to receive stdout message.
|
||||
// Or we may get a deadlock if child process sends out too much bytes to stderr.
|
||||
//
|
||||
// For example: in normal linux system, stderr pipe's limit is 65535 bytes.
|
||||
// if child process sends out 65536 bytes, the process will be hanged because no consumer
|
||||
// consumes the first 65535 bytes
|
||||
// So we need a thread to receive stdout message, then the current thread can continue to consume
|
||||
// stderr messages.
|
||||
let stdout_handler = child
|
||||
.stdout
|
||||
.take()
|
||||
.map(|mut stdout| {
|
||||
thread::Builder::new()
|
||||
.name("stdout consumer".to_string())
|
||||
.spawn(move || {
|
||||
let mut buf = Vec::new();
|
||||
stdout.read_to_end(&mut buf)?;
|
||||
Ok::<_, ShellError>(buf)
|
||||
})
|
||||
.err_span(head)
|
||||
})
|
||||
.err_span(head)
|
||||
})
|
||||
.transpose()?;
|
||||
.transpose()?;
|
||||
|
||||
// Intercept stderr so we can return it in the error if the exit code is non-zero.
|
||||
// The threading issues mentioned above dictate why we also need to intercept stdout.
|
||||
let mut stderr_ctrlc = None;
|
||||
let stderr_msg = match stderr {
|
||||
None => "".to_string(),
|
||||
Some(stderr_stream) => {
|
||||
stderr_ctrlc.clone_from(&stderr_stream.ctrlc);
|
||||
stderr_stream.into_string().map(|s| s.item)?
|
||||
}
|
||||
};
|
||||
// Intercept stderr so we can return it in the error if the exit code is non-zero.
|
||||
// The threading issues mentioned above dictate why we also need to intercept stdout.
|
||||
let stderr_msg = match child.stderr.take() {
|
||||
None => String::new(),
|
||||
Some(mut stderr) => {
|
||||
let mut buf = String::new();
|
||||
stderr.read_to_string(&mut buf).err_span(span)?;
|
||||
buf
|
||||
}
|
||||
};
|
||||
|
||||
let stdout = if let Some(handle) = stdout_handler {
|
||||
match handle.join() {
|
||||
Err(err) => {
|
||||
let stdout = if let Some(handle) = stdout_handler {
|
||||
match handle.join() {
|
||||
Err(err) => {
|
||||
return Err(ShellError::ExternalCommand {
|
||||
label: "Fail to receive external commands stdout message"
|
||||
.to_string(),
|
||||
help: format!("{err:?}"),
|
||||
span,
|
||||
});
|
||||
}
|
||||
Ok(res) => Some(res?),
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if child.wait()? != ExitStatus::Exited(0) {
|
||||
return Err(ShellError::ExternalCommand {
|
||||
label: "Fail to receive external commands stdout message"
|
||||
.to_string(),
|
||||
help: format!("{err:?}"),
|
||||
label: "External command failed".to_string(),
|
||||
help: stderr_msg,
|
||||
span,
|
||||
});
|
||||
}
|
||||
Ok(res) => Some(res),
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let exit_code: Vec<Value> = match exit_code {
|
||||
None => vec![],
|
||||
Some(exit_code_stream) => exit_code_stream.into_iter().collect(),
|
||||
};
|
||||
if let Some(Value::Int { val: code, .. }) = exit_code.last() {
|
||||
if *code != 0 {
|
||||
return Err(ShellError::ExternalCommand {
|
||||
label: "External command failed".to_string(),
|
||||
help: stderr_msg,
|
||||
span,
|
||||
});
|
||||
let mut child = ChildProcess::from_raw(None, None, None, span);
|
||||
if let Some(stdout) = stdout {
|
||||
child.stdout = Some(ChildPipe::Tee(Box::new(Cursor::new(stdout))));
|
||||
}
|
||||
if !stderr_msg.is_empty() {
|
||||
child.stderr = Some(ChildPipe::Tee(Box::new(Cursor::new(stderr_msg))));
|
||||
}
|
||||
Ok(PipelineData::ByteStream(
|
||||
ByteStream::child(child, span),
|
||||
metadata,
|
||||
))
|
||||
}
|
||||
Err(stream) => Ok(PipelineData::ByteStream(stream, metadata)),
|
||||
}
|
||||
|
||||
Ok(PipelineData::ExternalStream {
|
||||
stdout,
|
||||
stderr: Some(RawStream::new(
|
||||
Box::new(std::iter::once(Ok(stderr_msg.into_bytes()))),
|
||||
stderr_ctrlc,
|
||||
span,
|
||||
None,
|
||||
)),
|
||||
exit_code: Some(ListStream::new(exit_code.into_iter(), span, None)),
|
||||
span,
|
||||
metadata,
|
||||
trim_end_newline,
|
||||
})
|
||||
}
|
||||
Ok(PipelineData::ExternalStream {
|
||||
stdout,
|
||||
stderr,
|
||||
exit_code: _,
|
||||
span,
|
||||
metadata,
|
||||
trim_end_newline,
|
||||
}) if ignore_program_errors
|
||||
&& !matches!(caller_stack.stdout(), OutDest::Pipe | OutDest::Capture) =>
|
||||
Ok(PipelineData::ByteStream(mut stream, metadata))
|
||||
if ignore_program_errors
|
||||
&& !matches!(caller_stack.stdout(), OutDest::Pipe | OutDest::Capture) =>
|
||||
{
|
||||
Ok(PipelineData::ExternalStream {
|
||||
stdout,
|
||||
stderr,
|
||||
exit_code: None,
|
||||
span,
|
||||
metadata,
|
||||
trim_end_newline,
|
||||
})
|
||||
if let ByteStreamSource::Child(child) = stream.source_mut() {
|
||||
child.set_exit_code(0)
|
||||
}
|
||||
Ok(PipelineData::ByteStream(stream, metadata))
|
||||
}
|
||||
Ok(PipelineData::Value(Value::Error { .. }, ..)) | Err(_) if ignore_shell_errors => {
|
||||
Ok(PipelineData::empty())
|
||||
|
@ -121,12 +121,14 @@ impl Command for For {
|
||||
Err(err) => {
|
||||
return Err(err);
|
||||
}
|
||||
Ok(pipeline) => {
|
||||
let exit_code = pipeline.drain_with_exit_code()?;
|
||||
if exit_code != 0 {
|
||||
return Ok(PipelineData::new_external_stream_with_only_exit_code(
|
||||
exit_code,
|
||||
));
|
||||
Ok(data) => {
|
||||
if let Some(status) = data.drain()? {
|
||||
let code = status.code();
|
||||
if code != 0 {
|
||||
return Ok(
|
||||
PipelineData::new_external_stream_with_only_exit_code(code),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -159,12 +161,14 @@ impl Command for For {
|
||||
Err(err) => {
|
||||
return Err(err);
|
||||
}
|
||||
Ok(pipeline) => {
|
||||
let exit_code = pipeline.drain_with_exit_code()?;
|
||||
if exit_code != 0 {
|
||||
return Ok(PipelineData::new_external_stream_with_only_exit_code(
|
||||
exit_code,
|
||||
));
|
||||
Ok(data) => {
|
||||
if let Some(status) = data.drain()? {
|
||||
let code = status.code();
|
||||
if code != 0 {
|
||||
return Ok(
|
||||
PipelineData::new_external_stream_with_only_exit_code(code),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -173,7 +177,7 @@ impl Command for For {
|
||||
x => {
|
||||
stack.add_var(var_id, x);
|
||||
|
||||
eval_block(&engine_state, stack, block, PipelineData::empty())?.into_value(head);
|
||||
eval_block(&engine_state, stack, block, PipelineData::empty())?.into_value(head)?;
|
||||
}
|
||||
}
|
||||
Ok(PipelineData::empty())
|
||||
|
@ -61,7 +61,7 @@ impl Command for Let {
|
||||
let eval_block = get_eval_block(engine_state);
|
||||
let stack = &mut stack.start_capture();
|
||||
let pipeline_data = eval_block(engine_state, stack, block, input)?;
|
||||
let value = pipeline_data.into_value(call.head);
|
||||
let value = pipeline_data.into_value(call.head)?;
|
||||
|
||||
// if given variable type is Glob, and our result is string
|
||||
// then nushell need to convert from Value::String to Value::Glob
|
||||
|
@ -53,12 +53,12 @@ impl Command for Loop {
|
||||
Err(err) => {
|
||||
return Err(err);
|
||||
}
|
||||
Ok(pipeline) => {
|
||||
let exit_code = pipeline.drain_with_exit_code()?;
|
||||
if exit_code != 0 {
|
||||
return Ok(PipelineData::new_external_stream_with_only_exit_code(
|
||||
exit_code,
|
||||
));
|
||||
Ok(data) => {
|
||||
if let Some(status) = data.drain()? {
|
||||
let code = status.code();
|
||||
if code != 0 {
|
||||
return Ok(PipelineData::new_external_stream_with_only_exit_code(code));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -61,7 +61,7 @@ impl Command for Mut {
|
||||
let eval_block = get_eval_block(engine_state);
|
||||
let stack = &mut stack.start_capture();
|
||||
let pipeline_data = eval_block(engine_state, stack, block, input)?;
|
||||
let value = pipeline_data.into_value(call.head);
|
||||
let value = pipeline_data.into_value(call.head)?;
|
||||
|
||||
// if given variable type is Glob, and our result is string
|
||||
// then nushell need to convert from Value::String to Value::Glob
|
||||
|
@ -62,10 +62,11 @@ impl Command for Try {
|
||||
}
|
||||
// external command may fail to run
|
||||
Ok(pipeline) => {
|
||||
let (pipeline, external_failed) = pipeline.check_external_failed();
|
||||
let (pipeline, external_failed) = pipeline.check_external_failed()?;
|
||||
if external_failed {
|
||||
let exit_code = pipeline.drain_with_exit_code()?;
|
||||
stack.add_env_var("LAST_EXIT_CODE".into(), Value::int(exit_code, call.head));
|
||||
let status = pipeline.drain()?;
|
||||
let code = status.map(|status| status.code()).unwrap_or(0);
|
||||
stack.add_env_var("LAST_EXIT_CODE".into(), Value::int(code.into(), call.head));
|
||||
let err_value = Value::nothing(call.head);
|
||||
handle_catch(err_value, catch_block, engine_state, stack, eval_block)
|
||||
} else {
|
||||
|
@ -70,14 +70,16 @@ impl Command for While {
|
||||
Err(err) => {
|
||||
return Err(err);
|
||||
}
|
||||
Ok(pipeline) => {
|
||||
let exit_code = pipeline.drain_with_exit_code()?;
|
||||
if exit_code != 0 {
|
||||
return Ok(
|
||||
PipelineData::new_external_stream_with_only_exit_code(
|
||||
exit_code,
|
||||
),
|
||||
);
|
||||
Ok(data) => {
|
||||
if let Some(status) = data.drain()? {
|
||||
let code = status.code();
|
||||
if code != 0 {
|
||||
return Ok(
|
||||
PipelineData::new_external_stream_with_only_exit_code(
|
||||
code,
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -122,10 +122,9 @@ pub fn eval_block(
|
||||
|
||||
stack.add_env_var("PWD".to_string(), Value::test_string(cwd.to_string_lossy()));
|
||||
|
||||
match nu_engine::eval_block::<WithoutDebug>(engine_state, &mut stack, &block, input) {
|
||||
Err(err) => panic!("test eval error in `{}`: {:?}", "TODO", err),
|
||||
Ok(result) => result.into_value(Span::test_data()),
|
||||
}
|
||||
nu_engine::eval_block::<WithoutDebug>(engine_state, &mut stack, &block, input)
|
||||
.and_then(|data| data.into_value(Span::test_data()))
|
||||
.unwrap_or_else(|err| panic!("test eval error in `{}`: {:?}", "TODO", err))
|
||||
}
|
||||
|
||||
pub fn check_example_evaluates_to_expected_output(
|
||||
@ -223,7 +222,7 @@ impl<'a> std::fmt::Debug for DebuggableValue<'a> {
|
||||
Value::Date { val, .. } => {
|
||||
write!(f, "Date({:?})", val)
|
||||
}
|
||||
Value::Range { val, .. } => match val {
|
||||
Value::Range { val, .. } => match **val {
|
||||
Range::IntRange(range) => match range.end() {
|
||||
Bound::Included(end) => write!(
|
||||
f,
|
||||
|
@ -43,7 +43,8 @@ impl Command for PluginAdd {
|
||||
|
||||
fn extra_usage(&self) -> &str {
|
||||
r#"
|
||||
This does not load the plugin commands into the scope - see `register` for that.
|
||||
This does not load the plugin commands into the scope - see `plugin use` for
|
||||
that.
|
||||
|
||||
Instead, it runs the plugin to get its command signatures, and then edits the
|
||||
plugin registry file (by default, `$nu.plugin-path`). The changes will be
|
||||
|
@ -58,11 +58,11 @@ impl<'a> StyleComputer<'a> {
|
||||
Some(ComputableStyle::Closure(closure, span)) => {
|
||||
let result = ClosureEvalOnce::new(self.engine_state, self.stack, closure.clone())
|
||||
.debug(false)
|
||||
.run_with_value(value.clone());
|
||||
.run_with_value(value.clone())
|
||||
.and_then(|data| data.into_value(*span));
|
||||
|
||||
match result {
|
||||
Ok(v) => {
|
||||
let value = v.into_value(*span);
|
||||
Ok(value) => {
|
||||
// These should be the same color data forms supported by color_config.
|
||||
match value {
|
||||
Value::Record { .. } => color_record_to_nustyle(&value),
|
||||
@ -146,7 +146,10 @@ impl<'a> StyleComputer<'a> {
|
||||
let span = value.span();
|
||||
match value {
|
||||
Value::Closure { val, .. } => {
|
||||
map.insert(key.to_string(), ComputableStyle::Closure(val.clone(), span));
|
||||
map.insert(
|
||||
key.to_string(),
|
||||
ComputableStyle::Closure(*val.clone(), span),
|
||||
);
|
||||
}
|
||||
Value::Record { .. } => {
|
||||
map.insert(
|
||||
|
@ -60,63 +60,13 @@ impl Command for BytesStartsWith {
|
||||
pattern,
|
||||
cell_paths,
|
||||
};
|
||||
|
||||
match input {
|
||||
PipelineData::ExternalStream {
|
||||
stdout: Some(stream),
|
||||
span,
|
||||
..
|
||||
} => {
|
||||
let mut i = 0;
|
||||
|
||||
for item in stream {
|
||||
let byte_slice = match &item {
|
||||
// String and binary data are valid byte patterns
|
||||
Ok(Value::String { val, .. }) => val.as_bytes(),
|
||||
Ok(Value::Binary { val, .. }) => val,
|
||||
// If any Error value is output, echo it back
|
||||
Ok(v @ Value::Error { .. }) => return Ok(v.clone().into_pipeline_data()),
|
||||
// Unsupported data
|
||||
Ok(other) => {
|
||||
return Ok(Value::error(
|
||||
ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "string and binary".into(),
|
||||
wrong_type: other.get_type().to_string(),
|
||||
dst_span: span,
|
||||
src_span: other.span(),
|
||||
},
|
||||
span,
|
||||
)
|
||||
.into_pipeline_data());
|
||||
}
|
||||
Err(err) => return Err(err.to_owned()),
|
||||
};
|
||||
|
||||
let max = byte_slice.len().min(arg.pattern.len() - i);
|
||||
|
||||
if byte_slice[..max] == arg.pattern[i..i + max] {
|
||||
i += max;
|
||||
|
||||
if i >= arg.pattern.len() {
|
||||
return Ok(Value::bool(true, span).into_pipeline_data());
|
||||
}
|
||||
} else {
|
||||
return Ok(Value::bool(false, span).into_pipeline_data());
|
||||
}
|
||||
}
|
||||
|
||||
// We reached the end of the stream and never returned,
|
||||
// the pattern wasn't exhausted so it probably doesn't match
|
||||
Ok(Value::bool(false, span).into_pipeline_data())
|
||||
}
|
||||
_ => operate(
|
||||
starts_with,
|
||||
arg,
|
||||
input,
|
||||
call.head,
|
||||
engine_state.ctrlc.clone(),
|
||||
),
|
||||
}
|
||||
operate(
|
||||
starts_with,
|
||||
arg,
|
||||
input,
|
||||
call.head,
|
||||
engine_state.ctrlc.clone(),
|
||||
)
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
|
@ -121,7 +121,7 @@ impl Command for Histogram {
|
||||
};
|
||||
|
||||
let span = call.head;
|
||||
let data_as_value = input.into_value(span);
|
||||
let data_as_value = input.into_value(span)?;
|
||||
let value_span = data_as_value.span();
|
||||
// `input` is not a list, here we can return an error.
|
||||
run_histogram(
|
||||
|
@ -73,7 +73,7 @@ impl Command for Fill {
|
||||
}
|
||||
|
||||
fn search_terms(&self) -> Vec<&str> {
|
||||
vec!["display", "render", "format", "pad", "align"]
|
||||
vec!["display", "render", "format", "pad", "align", "repeat"]
|
||||
}
|
||||
|
||||
fn examples(&self) -> Vec<Example> {
|
||||
@ -91,9 +91,9 @@ impl Command for Fill {
|
||||
result: Some(Value::string("────────nushell", Span::test_data())),
|
||||
},
|
||||
Example {
|
||||
description: "Fill a string on both sides to a width of 15 with the character '─'",
|
||||
example: "'nushell' | fill --alignment m --character '─' --width 15",
|
||||
result: Some(Value::string("────nushell────", Span::test_data())),
|
||||
description: "Fill an empty string with 10 '─' characters",
|
||||
example: "'' | fill --character '─' --width 10",
|
||||
result: Some(Value::string("──────────", Span::test_data())),
|
||||
},
|
||||
Example {
|
||||
description:
|
||||
|
@ -127,25 +127,15 @@ fn into_binary(
|
||||
let cell_paths = call.rest(engine_state, stack, 0)?;
|
||||
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
|
||||
|
||||
match input {
|
||||
PipelineData::ExternalStream { stdout: None, .. } => {
|
||||
Ok(Value::binary(vec![], head).into_pipeline_data())
|
||||
}
|
||||
PipelineData::ExternalStream {
|
||||
stdout: Some(stream),
|
||||
..
|
||||
} => {
|
||||
// TODO: in the future, we may want this to stream out, converting each to bytes
|
||||
let output = stream.into_bytes()?;
|
||||
Ok(Value::binary(output.item, head).into_pipeline_data())
|
||||
}
|
||||
_ => {
|
||||
let args = Arguments {
|
||||
cell_paths,
|
||||
compact: call.has_flag(engine_state, stack, "compact")?,
|
||||
};
|
||||
operate(action, args, input, call.head, engine_state.ctrlc.clone())
|
||||
}
|
||||
if let PipelineData::ByteStream(stream, ..) = input {
|
||||
// TODO: in the future, we may want this to stream out, converting each to bytes
|
||||
Ok(Value::binary(stream.into_bytes()?, head).into_pipeline_data())
|
||||
} else {
|
||||
let args = Arguments {
|
||||
cell_paths,
|
||||
compact: call.has_flag(engine_state, stack, "compact")?,
|
||||
};
|
||||
operate(action, args, input, call.head, engine_state.ctrlc.clone())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -101,11 +101,11 @@ fn into_cell_path(call: &Call, input: PipelineData) -> Result<PipelineData, Shel
|
||||
let list: Vec<_> = stream.into_iter().collect();
|
||||
Ok(list_to_cell_path(&list, head)?.into_pipeline_data())
|
||||
}
|
||||
PipelineData::ExternalStream { span, .. } => Err(ShellError::OnlySupportsThisInputType {
|
||||
PipelineData::ByteStream(stream, ..) => Err(ShellError::OnlySupportsThisInputType {
|
||||
exp_input_type: "list, int".into(),
|
||||
wrong_type: "raw data".into(),
|
||||
wrong_type: "byte stream".into(),
|
||||
dst_span: head,
|
||||
src_span: span,
|
||||
src_span: stream.span(),
|
||||
}),
|
||||
PipelineData::Empty => Err(ShellError::PipelineEmpty { dst_span: head }),
|
||||
}
|
||||
|
@ -82,20 +82,12 @@ fn glob_helper(
|
||||
let head = call.head;
|
||||
let cell_paths = call.rest(engine_state, stack, 0)?;
|
||||
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
|
||||
let args = Arguments { cell_paths };
|
||||
match input {
|
||||
PipelineData::ExternalStream { stdout: None, .. } => {
|
||||
Ok(Value::glob(String::new(), false, head).into_pipeline_data())
|
||||
}
|
||||
PipelineData::ExternalStream {
|
||||
stdout: Some(stream),
|
||||
..
|
||||
} => {
|
||||
// TODO: in the future, we may want this to stream out, converting each to bytes
|
||||
let output = stream.into_string()?;
|
||||
Ok(Value::glob(output.item, false, head).into_pipeline_data())
|
||||
}
|
||||
_ => operate(action, args, input, head, engine_state.ctrlc.clone()),
|
||||
if let PipelineData::ByteStream(stream, ..) = input {
|
||||
// TODO: in the future, we may want this to stream out, converting each to bytes
|
||||
Ok(Value::glob(stream.into_string()?, false, head).into_pipeline_data())
|
||||
} else {
|
||||
let args = Arguments { cell_paths };
|
||||
operate(action, args, input, head, engine_state.ctrlc.clone())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -108,7 +108,7 @@ fn into_record(
|
||||
call: &Call,
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let input = input.into_value(call.head);
|
||||
let input = input.into_value(call.head)?;
|
||||
let input_type = input.get_type();
|
||||
let span = input.span();
|
||||
let res = match input {
|
||||
|
@ -155,26 +155,18 @@ fn string_helper(
|
||||
}
|
||||
let cell_paths = call.rest(engine_state, stack, 0)?;
|
||||
let cell_paths = (!cell_paths.is_empty()).then_some(cell_paths);
|
||||
let config = engine_state.get_config().clone();
|
||||
let args = Arguments {
|
||||
decimals_value,
|
||||
cell_paths,
|
||||
config,
|
||||
};
|
||||
|
||||
match input {
|
||||
PipelineData::ExternalStream { stdout: None, .. } => {
|
||||
Ok(Value::string(String::new(), head).into_pipeline_data())
|
||||
}
|
||||
PipelineData::ExternalStream {
|
||||
stdout: Some(stream),
|
||||
..
|
||||
} => {
|
||||
// TODO: in the future, we may want this to stream out, converting each to bytes
|
||||
let output = stream.into_string()?;
|
||||
Ok(Value::string(output.item, head).into_pipeline_data())
|
||||
}
|
||||
_ => operate(action, args, input, head, engine_state.ctrlc.clone()),
|
||||
if let PipelineData::ByteStream(stream, ..) = input {
|
||||
// TODO: in the future, we may want this to stream out, converting each to bytes
|
||||
Ok(Value::string(stream.into_string()?, head).into_pipeline_data())
|
||||
} else {
|
||||
let config = engine_state.get_config().clone();
|
||||
let args = Arguments {
|
||||
decimals_value,
|
||||
cell_paths,
|
||||
config,
|
||||
};
|
||||
operate(action, args, input, head, engine_state.ctrlc.clone())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -5,7 +5,7 @@ use commands::add_commands_decls;
|
||||
|
||||
pub use values::{
|
||||
convert_sqlite_row_to_nu_value, convert_sqlite_value_to_nu_value, open_connection_in_memory,
|
||||
open_connection_in_memory_custom, SQLiteDatabase, MEMORY_DB,
|
||||
open_connection_in_memory_custom, values_to_sql, SQLiteDatabase, MEMORY_DB,
|
||||
};
|
||||
|
||||
use nu_protocol::engine::StateWorkingSet;
|
||||
|
@ -3,5 +3,5 @@ pub mod sqlite;
|
||||
|
||||
pub use sqlite::{
|
||||
convert_sqlite_row_to_nu_value, convert_sqlite_value_to_nu_value, open_connection_in_memory,
|
||||
open_connection_in_memory_custom, SQLiteDatabase, MEMORY_DB,
|
||||
open_connection_in_memory_custom, values_to_sql, SQLiteDatabase, MEMORY_DB,
|
||||
};
|
||||
|
@ -91,7 +91,7 @@ impl SQLiteDatabase {
|
||||
}
|
||||
|
||||
pub fn try_from_pipeline(input: PipelineData, span: Span) -> Result<Self, ShellError> {
|
||||
let value = input.into_value(span);
|
||||
let value = input.into_value(span)?;
|
||||
Self::try_from_value(value)
|
||||
}
|
||||
|
||||
|
@ -29,7 +29,7 @@ impl Command for Inspect {
|
||||
input: PipelineData,
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let input_metadata = input.metadata();
|
||||
let input_val = input.into_value(call.head);
|
||||
let input_val = input.into_value(call.head)?;
|
||||
if input_val.is_nothing() {
|
||||
return Err(ShellError::PipelineEmpty {
|
||||
dst_span: call.head,
|
||||
|
@ -53,13 +53,12 @@ impl Command for TimeIt {
|
||||
eval_block(engine_state, stack, block, input)?
|
||||
} else {
|
||||
let eval_expression_with_input = get_eval_expression_with_input(engine_state);
|
||||
eval_expression_with_input(engine_state, stack, command_to_run, input)
|
||||
.map(|res| res.0)?
|
||||
eval_expression_with_input(engine_state, stack, command_to_run, input)?.0
|
||||
}
|
||||
} else {
|
||||
PipelineData::empty()
|
||||
}
|
||||
.into_value(call.head);
|
||||
.into_value(call.head)?;
|
||||
|
||||
let end_time = Instant::now();
|
||||
|
||||
|
10
crates/nu-command/src/env/with_env.rs
vendored
10
crates/nu-command/src/env/with_env.rs
vendored
@ -90,10 +90,7 @@ fn with_env(
|
||||
return Err(ShellError::CantConvert {
|
||||
to_type: "record".into(),
|
||||
from_type: x.get_type().to_string(),
|
||||
span: call
|
||||
.positional_nth(1)
|
||||
.expect("already checked through .req")
|
||||
.span,
|
||||
span: x.span(),
|
||||
help: None,
|
||||
});
|
||||
}
|
||||
@ -124,10 +121,7 @@ fn with_env(
|
||||
return Err(ShellError::CantConvert {
|
||||
to_type: "record".into(),
|
||||
from_type: x.get_type().to_string(),
|
||||
span: call
|
||||
.positional_nth(1)
|
||||
.expect("already checked through .req")
|
||||
.span,
|
||||
span: x.span(),
|
||||
help: None,
|
||||
});
|
||||
}
|
||||
|
@ -1,3 +1,4 @@
|
||||
use nu_cmd_base::util::get_init_cwd;
|
||||
use nu_engine::command_prelude::*;
|
||||
use nu_utils::filesystem::{have_permission, PermissionResult};
|
||||
|
||||
@ -39,7 +40,10 @@ impl Command for Cd {
|
||||
) -> Result<PipelineData, ShellError> {
|
||||
let physical = call.has_flag(engine_state, stack, "physical")?;
|
||||
let path_val: Option<Spanned<String>> = call.opt(engine_state, stack, 0)?;
|
||||
let cwd = engine_state.cwd(Some(stack))?;
|
||||
|
||||
// If getting PWD failed, default to the initial directory. This way, the
|
||||
// user can use `cd` to recover PWD to a good state.
|
||||
let cwd = engine_state.cwd(Some(stack)).unwrap_or(get_init_cwd());
|
||||
|
||||
let path_val = {
|
||||
if let Some(path) = path_val {
|
||||
@ -52,13 +56,13 @@ impl Command for Cd {
|
||||
}
|
||||
};
|
||||
|
||||
let (path, span) = match path_val {
|
||||
let path = match path_val {
|
||||
Some(v) => {
|
||||
if v.item == "-" {
|
||||
if let Some(oldpwd) = stack.get_env_var(engine_state, "OLDPWD") {
|
||||
(oldpwd.to_path()?, v.span)
|
||||
oldpwd.to_path()?
|
||||
} else {
|
||||
(cwd, v.span)
|
||||
cwd
|
||||
}
|
||||
} else {
|
||||
// Trim whitespace from the end of path.
|
||||
@ -66,7 +70,7 @@ impl Command for Cd {
|
||||
&v.item.trim_end_matches(|x| matches!(x, '\x09'..='\x0d'));
|
||||
|
||||
// If `--physical` is specified, canonicalize the path; otherwise expand the path.
|
||||
let path = if physical {
|
||||
if physical {
|
||||
if let Ok(path) = nu_path::canonicalize_with(path_no_whitespace, &cwd) {
|
||||
if !path.is_dir() {
|
||||
return Err(ShellError::NotADirectory { span: v.span });
|
||||
@ -90,19 +94,12 @@ impl Command for Cd {
|
||||
return Err(ShellError::NotADirectory { span: v.span });
|
||||
};
|
||||
path
|
||||
};
|
||||
(path, v.span)
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let path = nu_path::expand_tilde("~");
|
||||
(path, call.head)
|
||||
}
|
||||
None => nu_path::expand_tilde("~"),
|
||||
};
|
||||
|
||||
// Strip the trailing slash from the new path. This is required for PWD.
|
||||
let path = nu_path::strip_trailing_slash(&path);
|
||||
|
||||
// Set OLDPWD.
|
||||
// We're using `Stack::get_env_var()` instead of `EngineState::cwd()` to avoid a conversion roundtrip.
|
||||
if let Some(oldpwd) = stack.get_env_var(engine_state, "PWD") {
|
||||
@ -113,7 +110,7 @@ impl Command for Cd {
|
||||
//FIXME: this only changes the current scope, but instead this environment variable
|
||||
//should probably be a block that loads the information from the state in the overlay
|
||||
PermissionResult::PermissionOk => {
|
||||
stack.add_env_var("PWD".into(), Value::string(path.to_string_lossy(), span));
|
||||
stack.set_cwd(path)?;
|
||||
Ok(PipelineData::empty())
|
||||
}
|
||||
PermissionResult::PermissionDenied(reason) => Err(ShellError::IOError {
|
||||
|
@ -1,8 +1,8 @@
|
||||
use super::util::get_rest_for_glob_pattern;
|
||||
#[allow(deprecated)]
|
||||
use nu_engine::{command_prelude::*, current_dir, get_eval_block};
|
||||
use nu_protocol::{BufferedReader, DataSource, NuGlob, PipelineMetadata, RawStream};
|
||||
use std::{io::BufReader, path::Path};
|
||||
use nu_protocol::{ByteStream, DataSource, NuGlob, PipelineMetadata};
|
||||
use std::path::Path;
|
||||
|
||||
#[cfg(feature = "sqlite")]
|
||||
use crate::database::SQLiteDatabase;
|
||||
@ -143,23 +143,13 @@ impl Command for Open {
|
||||
}
|
||||
};
|
||||
|
||||
let buf_reader = BufReader::new(file);
|
||||
|
||||
let file_contents = PipelineData::ExternalStream {
|
||||
stdout: Some(RawStream::new(
|
||||
Box::new(BufferedReader::new(buf_reader)),
|
||||
ctrlc.clone(),
|
||||
call_span,
|
||||
None,
|
||||
)),
|
||||
stderr: None,
|
||||
exit_code: None,
|
||||
span: call_span,
|
||||
metadata: Some(PipelineMetadata {
|
||||
let stream = PipelineData::ByteStream(
|
||||
ByteStream::file(file, call_span, ctrlc.clone()),
|
||||
Some(PipelineMetadata {
|
||||
data_source: DataSource::FilePath(path.to_path_buf()),
|
||||
}),
|
||||
trim_end_newline: false,
|
||||
};
|
||||
);
|
||||
|
||||
let exts_opt: Option<Vec<String>> = if raw {
|
||||
None
|
||||
} else {
|
||||
@ -184,9 +174,9 @@ impl Command for Open {
|
||||
let decl = engine_state.get_decl(converter_id);
|
||||
let command_output = if let Some(block_id) = decl.get_block_id() {
|
||||
let block = engine_state.get_block(block_id);
|
||||
eval_block(engine_state, stack, block, file_contents)
|
||||
eval_block(engine_state, stack, block, stream)
|
||||
} else {
|
||||
decl.run(engine_state, stack, &Call::new(call_span), file_contents)
|
||||
decl.run(engine_state, stack, &Call::new(call_span), stream)
|
||||
};
|
||||
output.push(command_output.map_err(|inner| {
|
||||
ShellError::GenericError{
|
||||
@ -198,7 +188,7 @@ impl Command for Open {
|
||||
}
|
||||
})?);
|
||||
}
|
||||
None => output.push(file_contents),
|
||||
None => output.push(stream),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user